code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def is_bounded(board, y_end, x_end, length, d_y, d_x): <NEW_LINE> <INDENT> end_status = "" <NEW_LINE> start_status = "" <NEW_LINE> if (max(y_end, x_end) >= len(board)) or (min(y_end, x_end) < 0): <NEW_LINE> <INDENT> return "CLOSED" <NEW_LINE> <DEDENT> if (min(y_end + d_y, x_end + d_x) < 0) or (max(y_end + d_y, x_end + d_x) >= len(board)): <NEW_LINE> <INDENT> end_status = "CLOSED" <NEW_LINE> <DEDENT> elif board[y_end + d_y][x_end + d_x] == " ": <NEW_LINE> <INDENT> end_status = "OPEN" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end_status = "CLOSED" <NEW_LINE> <DEDENT> if (min(y_end - (d_y * length), x_end - (d_x * length)) < 0) or (max(y_end - (d_y * length), x_end - (d_x * length)) >= len(board)): <NEW_LINE> <INDENT> start_status = "CLOSED" <NEW_LINE> <DEDENT> elif board[y_end - (d_y * length)][x_end - (d_x * length)] == " ": <NEW_LINE> <INDENT> start_status = "OPEN" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start_status = "CLOSED" <NEW_LINE> <DEDENT> if end_status != start_status: <NEW_LINE> <INDENT> return "SEMIOPEN" <NEW_LINE> <DEDENT> elif start_status == "OPEN" and end_status == "OPEN": <NEW_LINE> <INDENT> return "OPEN" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "CLOSED" | Return "OPEN", "SEMIOPEN", or "CLOSED" depending on the status of sequence length length ending at
[y_end][x_end] on board board.
Board is a nxn matrix stored as a list of lists; y_end, x_end, and length are positive ints and length
is greater than one.
(d_y, d_x) is one of: (1, 0), (0, 1), or (1, ±1) | 625941b7507cdc57c6306b12 |
def addVariant(xml,subType): <NEW_LINE> <INDENT> for verse in xml.find_all("verse"): <NEW_LINE> <INDENT> copyVerse=copy.copy(verse) <NEW_LINE> origContent=copyVerse.children <NEW_LINE> verse.clear() <NEW_LINE> seg=xml.new_tag("seg", type="x-variant", subType="%s"%subType) <NEW_LINE> verse.append(seg) <NEW_LINE> for c in copyVerse.children: <NEW_LINE> <INDENT> seg.append(copy.copy(c)) <NEW_LINE> <DEDENT> <DEDENT> return xml | wrap the contnt of verses in some <seg> nodes:
<verse osisID="Josh.1.1"><seg type="x-variant" subType="x-1">blabla</verse></seg> | 625941b7627d3e7fe0d68c8d |
def __init__(self, parent, channel): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.parent = parent <NEW_LINE> self.name = channel | Create a new queue for the channel
Parameters :
- parent : communicator parent
- channel : string for the name of the channel | 625941b707f4c71912b112c6 |
def connect(self): <NEW_LINE> <INDENT> ioswitch_pins = [self.intf_spec['traceable_outputs'][ins[1]] for ins in self.fsm_spec['inputs']] <NEW_LINE> ioswitch_pins.extend([self.intf_spec['traceable_outputs'][outs[1]] for outs in self.fsm_spec['outputs']]) <NEW_LINE> self.logictools_controller.config_ioswitch(ioswitch_pins, IOSWITCH_FSM_SELECT) | Method to configure the IO switch.
Usually this method should only be used internally. Users only need
to use `run()` method. | 625941b724f1403a926009a9 |
def add_friends(self, usernames): <NEW_LINE> <INDENT> existing_user_friends = UserFriend.query.filter( UserFriend.user_id == self.id, UserFriend.username.in_(usernames) ).all() <NEW_LINE> existing_usernames = [existing_user_friend.username for existing_user_friend in existing_user_friends] <NEW_LINE> usernames = [username.upper() for username in usernames if username not in existing_usernames] <NEW_LINE> user_friends = [] <NEW_LINE> for username in usernames: <NEW_LINE> <INDENT> user_friend = UserFriend() <NEW_LINE> user_friend.user = self <NEW_LINE> user_friend.username = username <NEW_LINE> user_friends.append(user_friend) <NEW_LINE> <DEDENT> db.session.bulk_save_objects(user_friends) <NEW_LINE> return user_friends | Add multiple friends to this User's friends list in one go. Commiting DB operation is needed after calling this method. | 625941b7a05bb46b383ec66c |
def send_activation_email(self, site): <NEW_LINE> <INDENT> ctx_dict = {'activation_key': self.activation_key, 'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS, 'site': site} <NEW_LINE> subject = render_to_string('cr_registration/activation_email_subject.txt', ctx_dict) <NEW_LINE> subject = ''.join(subject.splitlines()) <NEW_LINE> message_text = render_to_string('cr_registration/activation_email.txt', ctx_dict) <NEW_LINE> message_html = render_to_string('cr_registration/activation_email.html', ctx_dict) <NEW_LINE> msg = EmailMultiAlternatives(subject, message_text, settings.DEFAULT_FROM_EMAIL, [self.user.email]) <NEW_LINE> msg.attach_alternative(message_html, "text/html") <NEW_LINE> msg.send() | Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces. | 625941b7a17c0f6771cbde93 |
def project_multiple_points_woorld_coord_into_camera_image_as_image_coord(self, points_world_coord): <NEW_LINE> <INDENT> image_points = [] <NEW_LINE> visibility_flags = [] <NEW_LINE> for point in points_world_coord: <NEW_LINE> <INDENT> image_point, visible = self.project_single_point_woorld_coord_into_camera_image_as_image_coord(point) <NEW_LINE> image_points.append(image_point) <NEW_LINE> visibility_flags.append(visible) <NEW_LINE> <DEDENT> return image_points, visibility_flags | $ >>> point_world_coordinates.shape
$ >>> (3,)
:param point_world_coord:
:return: | 625941b7925a0f43d2549cb2 |
def request_method(method): <NEW_LINE> <INDENT> method = method.upper() <NEW_LINE> assert method in ('POST', 'GET', 'PUT', 'DELETE') <NEW_LINE> def wrapper(func): <NEW_LINE> <INDENT> def method_filter(request, *args, **kwargs): <NEW_LINE> <INDENT> if request.method == method: <NEW_LINE> <INDENT> return func(request, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if request.method == 'GET': <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise HiHttp404 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return method_filter <NEW_LINE> <DEDENT> return wrapper | A decoration to ensure the request run in a designated way.
You need pass a string argument in 'POST', 'GET', 'PUT', 'DELETE'
All methods are 'get', 'post', 'put', 'patch', 'delete', 'head',
'options', 'trace' | 625941b7fbf16365ca6f5ffb |
def science_meta(self, contact, email, description, reference): <NEW_LINE> <INDENT> self.logger.info("adding science related metadata to database.") <NEW_LINE> self.db['meta'].update( {'_id': 'science'}, { '_id' : 'science', 'contact': contact, 'email': email, 'description': description, 'ref': reference, }, upsert = True) | Add metadata associated to the catalog as a tool for science.
Paramters:
----------
contact/email: `str`
name and email of contact person.
description: `str`
brief description of the catalog content
refernce: `str`
publication, web address documenting the catalog. | 625941b7090684286d50eb1f |
def draw_shell(G, **kwargs): <NEW_LINE> <INDENT> nlist = kwargs.get('nlist', None) <NEW_LINE> if nlist is not None: <NEW_LINE> <INDENT> del(kwargs['nlist']) <NEW_LINE> <DEDENT> draw(G, shell_layout(G, nlist=nlist), **kwargs) | Draw networkx graph with shell layout.
Parameters
----------
G : graph
A networkx graph
**kwargs : optional keywords
See networkx.draw_networkx() for a description of optional keywords,
with the exception of the pos parameter which is not used by this
function. | 625941b7e5267d203edcdae0 |
def _update_variable_in_string(self, fdata: str = "", tag: str = "", tag_str: str = "", variable: str = ""): <NEW_LINE> <INDENT> self.logger.debug(f"Replaces variable of tag {tag} from {tag_str} to {variable}") <NEW_LINE> ret = re.sub(rf"{tag}: {tag_str}", f"{tag}: {variable}", fdata) <NEW_LINE> return ret | Updates variable in string. Mainly used for updating test-openshift.yaml file.
It replaces VERSION: VERSION_NUMBER -> VERSION: variable and
It replaces OS: OS_VERSION -> OS: <os_name>" | 625941b726068e7796caeb17 |
def test_s(self): <NEW_LINE> <INDENT> qc_A = QCirc().s(0) <NEW_LINE> str_A = qc_A.to_qasm() <NEW_LINE> qc_B = QCirc.from_qasm(str_A) <NEW_LINE> str_B = qc_B.to_qasm() <NEW_LINE> self.assertEqual(str_A, str_B) | test s
| 625941b7a4f1c619b28afe81 |
def GetPartitions(options, image_type, layout_filename): <NEW_LINE> <INDENT> partitions = GetPartitionTableFromConfig(options, layout_filename, image_type) <NEW_LINE> return ' '.join(str(p['num']) for p in partitions if 'num' in p and p['num'] != 'metadata') | Returns the partition numbers for the image_type.
Args:
options: Flags passed to the script
image_type: Type of image eg base/test/dev/factory_install
layout_filename: Path to partition configuration file
Returns:
A space delimited string of partition numbers. | 625941b78c3a8732951581fd |
@login_required <NEW_LINE> @scientist_required <NEW_LINE> def scientist(request, template='scientist/scientist.html', extra_context=None): <NEW_LINE> <INDENT> context = { } <NEW_LINE> if extra_context: <NEW_LINE> <INDENT> context.update(extra_context) <NEW_LINE> <DEDENT> return render_to_response(template, context, context_instance=RequestContext(request)) | Scientist homepage, login required.
**Context**
``RequestContext``
**Template:**
:template:`scientist/home.html` | 625941b78c0ade5d55d3e7ff |
def check_sphere2d(self): <NEW_LINE> <INDENT> sphere2d_doc = [doc for doc in self.cat_db["meta"].find({"type": "sphere2d"})] <NEW_LINE> if len(sphere2d_doc) == 0: <NEW_LINE> <INDENT> self.has_2dsphere = False <NEW_LINE> self.sphere2d_index = False <NEW_LINE> self.s2d_key = None <NEW_LINE> self.logger.debug("no 2d sphere key found in catalog %s"%self.cat_db.name) <NEW_LINE> <DEDENT> elif len(sphere2d_doc)==1 and sphere2d_doc[0]["is_indexed"]: <NEW_LINE> <INDENT> self.has_2dsphere = True <NEW_LINE> self.sphere2d_index = True <NEW_LINE> self.s2d_key = sphere2d_doc[0]["key"] <NEW_LINE> self.logger.info("set 2dsphere key '%s' with format %s. Indexed: %s"%( self.s2d_key, sphere2d_doc[0]["pos_format"], self.sphere2d_index)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.warning("mongo collections can have at most one 2dsphere key indexed.") | reads metadata collection and figures out if the catalog support queries
on spherical geometry. There should be at most one indexed 2dsphere-type
of key (mongodb rule). | 625941b763f4b57ef0000f62 |
def b1(self, b1_p1): <NEW_LINE> <INDENT> SeqDiagBuilder.recordFlow() | :param b1_p1:
:seqdiag_return Bb1Return
:return: | 625941b799fddb7c1c9de1d2 |
def solve_cl(self, cl): <NEW_LINE> <INDENT> problem = self._solve(value=cl, mode=SolverMode.CL) <NEW_LINE> return problem | Solve aerodynamical problem for a lift coefficient.
Parameters
----------
cl : float
Return
------
aerosandbox.vlm3 | 625941b7cb5e8a47e48b78ef |
def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.child.close() | 离开上下文管理器时关闭连接
:param exc_type: exc_type
:param exc_val: exc_val
:param exc_tb: exc_tb
:return: 无 | 625941b71f5feb6acb0c4994 |
def __init__(self, genome): <NEW_LINE> <INDENT> self.genome = genome <NEW_LINE> self.fitness = Individual.DEFAULT_FITNESS | Constructor
:param genome: genome of the individual
:type genome: Tree | 625941b77047854f462a124c |
def connect(self): <NEW_LINE> <INDENT> self.ssh_session.connect( self.network_name, username=self.username, password=self.password ) | Connects to the given ESXi host using SSH. | 625941b7adb09d7d5db6c5d2 |
def draw_to_canvas(self, gc, canvas): <NEW_LINE> <INDENT> self.canvas = canvas <NEW_LINE> self.UpdateValues(canvas) | Draws to an wx.GraphicsContext.
Parameters:
gc: is a wx.GraphicsContext
canvas: the canvas it's being drawn. | 625941b796565a6dacc8f514 |
def _wrap_item(self, item): <NEW_LINE> <INDENT> if isinstance(item, Proxy): <NEW_LINE> <INDENT> return True, Placeholder.from_proxy(item) <NEW_LINE> <DEDENT> elif hasattr(item, '__dict__'): <NEW_LINE> <INDENT> self._add_ref(item) <NEW_LINE> return Placeholder(item) <NEW_LINE> <DEDENT> elif getattr(item, '__class__', None) is tuple: <NEW_LINE> <INDENT> return tuple(self._wrap_item(value) for value in item) <NEW_LINE> <DEDENT> elif getattr(item, '__class__', None) is dict: <NEW_LINE> <INDENT> return {key: self._wrap_item(value) for key, value in item.items()} <NEW_LINE> <DEDENT> elif getattr(item, '__class__', None) is list: <NEW_LINE> <INDENT> return [self._wrap_item(value) for value in item] <NEW_LINE> <DEDENT> return item | Replace objects by Placeholders before sending them through pipe. | 625941b7046cf37aa974cb8a |
def game_creator(request, game_id): <NEW_LINE> <INDENT> game = get_object_or_404(Game, pk=int(game_id)) <NEW_LINE> player_id = Player.get_player_id(game.creator.id) <NEW_LINE> return HttpResponse(player_id) | Respond with the game creator's player_id. | 625941b7796e427e537b0402 |
def update_item(self, context, table_name, key_attribute_map, attribute_action_map, expected_condition_map=None): <NEW_LINE> <INDENT> raise NotImplementedError() | :param context: current request context
:param table_name: String, name of table to delete item from
:param key_attribute_map: key attribute name to
AttributeValue mapping. It defines row it to update item
:param attribute_action_map: attribute name to UpdateItemAction
instance mapping. It defines actions to perform for each
given attribute
:param expected_condition_map: expected attribute name to
ExpectedCondition instance mapping. It provides
preconditions
to make decision about should item be updated or not
:returns: True if operation performed, otherwise False
:raises: BackendInteractionException | 625941b7adb09d7d5db6c5d3 |
def kill_jobs(self, worker: str, ) -> None: <NEW_LINE> <INDENT> assert self.project_name.lower() == self.src_name <NEW_LINE> if not self.project_path.exists(): <NEW_LINE> <INDENT> self.project_path.mkdir() <NEW_LINE> <DEDENT> if not self.runs_path.exists(): <NEW_LINE> <INDENT> self.runs_path.mkdir(parents=True) <NEW_LINE> <DEDENT> print_ludwig(f'Killing jobs on {worker} with address={configs.Constants.worker2ip[worker]}') <NEW_LINE> ludwig_data_path = self.project_path.parent <NEW_LINE> private_key_path = ludwig_data_path / '.ludwig' / 'id_rsa' <NEW_LINE> if not private_key_path.exists(): <NEW_LINE> <INDENT> raise OSError(f'Did not find private key in {private_key_path}') <NEW_LINE> <DEDENT> sftp = pysftp.Connection(username='ludwig', host=configs.Constants.worker2ip[worker], private_key=str(private_key_path), cnopts=self.cnopts) <NEW_LINE> run_file_name = f'run_{self.project_name}.py' <NEW_LINE> sftp.put(localpath=run.__file__, remotepath=f'{configs.WorkerDirs.watched.name}/{run_file_name}') <NEW_LINE> print_ludwig(f'Killed any active jobs with src_name={self.src_name} on {worker}') | first kil all job descriptions for worker (pickle files saved on server).
then, run.py is uploaded to worker, which triggers killing of existing jobs,
and executes run.py.
because no job descriptions for worker exist on server, run.py will exit. | 625941b723e79379d52ee3a7 |
def test_shouldnt_add_lines(): <NEW_LINE> <INDENT> test_input = ('"""Text"""\n' '# This is a comment\n' 'import pkg_resources\n') <NEW_LINE> assert SortImports(file_contents=test_input).output == test_input | Ensure that isort doesn't add a blank line when a top of import comment is present, issue #316 | 625941b7a8ecb033257d2f16 |
def getbyvals(self, values, namemapping={}): <NEW_LINE> <INDENT> res = self._before_getbyvals(values, namemapping) <NEW_LINE> if res is not None: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> attstouse = [a for a in self.attributes if a in values or a in namemapping] <NEW_LINE> sql = "SELECT " + ", ".join(self.quotelist(self.all)) + " FROM " + self.name + " WHERE " + " AND ".join(["%s = %%(%s)s" % (self.quote(att), att) for att in attstouse]) <NEW_LINE> self.targetconnection.execute(sql, values, namemapping) <NEW_LINE> res = [r for r in self.targetconnection.rowfactory(self.all)] <NEW_LINE> self._after_getbyvals(values, namemapping, res) <NEW_LINE> return res | Return a list of all rows with values identical to the given.
Arguments:
- values: a dict which must hold a subset of the tables attributes.
All rows that have identical values for all attributes in this
dict are returned.
- namemapping: an optional namemapping (see module's documentation) | 625941b721a7993f00bc7b29 |
def match_tasks(task_dict, query): <NEW_LINE> <INDENT> matches = set([tn for tn in task_dict.keys() if query in tn]) <NEW_LINE> return matches | Returns a set of keys in task_dict which match the query. | 625941b771ff763f4b5494ce |
def test_insert_200(): <NEW_LINE> <INDENT> inserted_coins = [] <NEW_LINE> insert_coin(200, inserted_coins) <NEW_LINE> assert inserted_coins == [200] | Given 200 to an empty list of coins, 200 should be
appended. | 625941b7091ae35668666da5 |
def prepare(filepath): <NEW_LINE> <INDENT> IMG_SIZE = 70 <NEW_LINE> img_array = cv2.imread(filepath) <NEW_LINE> new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE)) <NEW_LINE> return new_array.reshape(-1, IMG_SIZE, IMG_SIZE, 3) | Prepares test image to run through model. Resizes and converts to array
:param filepath:
:return: | 625941b72c8b7c6e89b35603 |
def get_table(source): <NEW_LINE> <INDENT> with open(source, 'r') as f: <NEW_LINE> <INDENT> src = f.read() <NEW_LINE> <DEDENT> start = src.find(START) <NEW_LINE> end = src.find(END) <NEW_LINE> if start == -1 or end == -1: <NEW_LINE> <INDENT> sys.exit("Please check your source\n" "No table found.") <NEW_LINE> <DEDENT> before = src[:start] <NEW_LINE> after = src[end + len(END):] <NEW_LINE> table = src[start + len(START):end] <NEW_LINE> return table, before, after | Get table from file | 625941b70a50d4780f666ccf |
def _set_rsi(self,n_rsi): <NEW_LINE> <INDENT> print("Attention veut modifier l'att '{}', donc {} demenage à {}".format(self._rsi,self.prenom,n_rsi)) <NEW_LINE> self._rsi =n_rsi | Méthode qui sera appeler quand on souhaite modifier la valeur de l'att
'_rsi' c'est donc la méth qu'on appelle 'mutateur', elle prend en parametre
le nouvel objet qu'on donnera à l'att comme nouvelle valeur | 625941b74428ac0f6e5ba632 |
def parseData(self, message): <NEW_LINE> <INDENT> if len(message) > 10: <NEW_LINE> <INDENT> message = message[3:len(message)-4] <NEW_LINE> board, coords = message.split("]], [[[") <NEW_LINE> rows = board.split("], [") <NEW_LINE> j = 0 <NEW_LINE> for element in rows: <NEW_LINE> <INDENT> vals = element.split(", ") <NEW_LINE> for i in range(10): <NEW_LINE> <INDENT> self.oBOARD[j][i] = int(vals[i]) <NEW_LINE> <DEDENT> j+=1 <NEW_LINE> <DEDENT> ships = coords.split("]], [[") <NEW_LINE> oCoords = [] <NEW_LINE> for ship in ships: <NEW_LINE> <INDENT> shipCoords = [] <NEW_LINE> elems = ship.split("], [") <NEW_LINE> for elem in elems: <NEW_LINE> <INDENT> r,c = elem.split(", ") <NEW_LINE> shipCoords.append([int(r),int(c)]) <NEW_LINE> <DEDENT> oCoords.append(shipCoords) <NEW_LINE> <DEDENT> self.oFleet = Fleet(oCoords) <NEW_LINE> self.setupDone = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = message[1:len(message)-1] <NEW_LINE> coords = message.split(", ") <NEW_LINE> r = int(coords[0]) <NEW_LINE> c = int(coords[1]) <NEW_LINE> self.oGuess = (r, c) | parses the data returned from playTurn() or setup() after it has been sent as a string | 625941b7498bea3a759b98f1 |
def close(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.fd.close() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.fd = None | Close the file descriptor and dereference it
This method attempts to close the file descriptor and sets the
internal reference to the file descriptor object to ``None``. If the
call to file descriptor's ``close()`` method fails due to file
descriptor not providing the ``close`` attribute, this condition is
silently ignored. In all other cases, exception is propagated to the
caller. | 625941b7460517430c393fcf |
def products_of_4(ar): <NEW_LINE> <INDENT> step = 4 <NEW_LINE> w = len(ar[0]) <NEW_LINE> h = len(ar) <NEW_LINE> for col in xrange(w): <NEW_LINE> <INDENT> for row in xrange(h - step + 1): <NEW_LINE> <INDENT> if col <= 16: <NEW_LINE> <INDENT> yield reduce(mul, ar[row][col:col+step]) <NEW_LINE> yield reduce(mul, [x[col] for x in ar[row:row+step]]) <NEW_LINE> yield reduce(mul, [ar[row+i][col+i] for i in xrange(step)]) <NEW_LINE> <DEDENT> if col >= 3: <NEW_LINE> <INDENT> yield reduce(mul, [ar[row+i][col-i] for i in xrange(step)]) | Generator of all products of 4 adjacent numbers in an array
Here, adjacency is left-right, up-down, and both diagonals.
Accepts: an m x n rectangular array of int | 625941b73c8af77a43ae35de |
def serve(model_fn, preprocess_fn=None, postprocess_fn=None): <NEW_LINE> <INDENT> server = EstimatorServer( estimator(model_fn, FLAGS.output_dir), preprocess_fn, postprocess_fn) <NEW_LINE> class Handler(http.server.BaseHTTPRequestHandler): <NEW_LINE> <INDENT> def do_POST(self): <NEW_LINE> <INDENT> self.send_response(200) <NEW_LINE> self.send_header('Content-type', 'application/json') <NEW_LINE> self.end_headers() <NEW_LINE> inputs = json.loads(self.rfile.read( int(self.headers['Content-Length']))) <NEW_LINE> outputs = server.predict(inputs) <NEW_LINE> logging.info('Prediction results: {}'.format(outputs)) <NEW_LINE> self.wfile.write(json.dumps(outputs).encode()) <NEW_LINE> <DEDENT> <DEDENT> http.server.HTTPServer((FLAGS.ip_address, FLAGS.port), Handler) .serve_forever() | Serve as a HTTP server.
- Args
- `model_fn`: Same as `train_and_evaluate()`'s.
- `preprocess_fn`: A function to preprocess server request bodies
in JSON. Its first argument is a function which returns the
JSON input. You may need to use `tf.py_func` to create this
function.
- `preprocess_fn`: A function to postprocess server responses of
JSON serializable objects. | 625941b7b57a9660fec336c0 |
def test_edit_poll_pleb(self): <NEW_LINE> <INDENT> self.client.force_authenticate(self.pleb) <NEW_LINE> response = self.client.post(_get_detail_url(1), self.poll_update_data) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) | A user without permissions should not be able to edit a poll | 625941b75f7d997b871748db |
def propagate(enqueue_condition_fn, csp, queue=None) : <NEW_LINE> <INDENT> if queue==None: <NEW_LINE> <INDENT> queue=csp.get_all_variables() <NEW_LINE> <DEDENT> dequeued=[] <NEW_LINE> while len(queue)!=0: <NEW_LINE> <INDENT> current_var=queue.pop(0) <NEW_LINE> dequeued.append(current_var) <NEW_LINE> eliminated=eliminate_from_neighbors(csp,current_var) <NEW_LINE> if(eliminated==None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> pre_add_list=[] <NEW_LINE> add_list=[] <NEW_LINE> for var in eliminated: <NEW_LINE> <INDENT> exist=False <NEW_LINE> for varr in queue: <NEW_LINE> <INDENT> if var == varr: <NEW_LINE> <INDENT> exist=True <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> if not exist: <NEW_LINE> <INDENT> pre_add_list.append(var) <NEW_LINE> <DEDENT> <DEDENT> for var in pre_add_list: <NEW_LINE> <INDENT> if enqueue_condition_fn(csp,var): <NEW_LINE> <INDENT> add_list.append(var) <NEW_LINE> <DEDENT> <DEDENT> queue=queue+add_list <NEW_LINE> <DEDENT> return dequeued | Uses constraints to reduce domains, modifying the original csp.
Uses enqueue_condition_fn to determine whether to enqueue a variable whose
domain has been reduced. Same return type as domain_reduction. | 625941b7ec188e330fd5a5e6 |
def isInitializationCaseAvailable(self): <NEW_LINE> <INDENT> return self._server_proxy.isInitializationCaseAvailable() | Returns True if the initialization case is prepared and ready to run simulations.
@rtype: bool | 625941b77b25080760e3929b |
def get_vm_snapshot_info(self, vm_moid, snap_moid): <NEW_LINE> <INDENT> result = DataResult() <NEW_LINE> try: <NEW_LINE> <INDENT> snap_mor = self._get_vm_snapshot_mor(vm_moid, snap_moid) <NEW_LINE> snap_info = s_utils.snaptree_obj_to_json(snap_mor) <NEW_LINE> result.data = {"snap_info": snap_info} <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> LOG.exception(ex) <NEW_LINE> result.status = False <NEW_LINE> result.message = "Get a snapshot info of the vm error: %s" % str( ex) <NEW_LINE> <DEDENT> return result | Get a snapshot info of the VM. | 625941b750812a4eaa59c166 |
def add_section(section, issuer=None): <NEW_LINE> <INDENT> kwargs = {'issuer': issuer, 'section': section} <NEW_LINE> if not permission.has_permission(issuer=issuer, action='config_add_section', kwargs=kwargs): <NEW_LINE> <INDENT> raise exception.AccessDenied('%s cannot add section %s' % (issuer, section)) <NEW_LINE> <DEDENT> return config.add_section(section) | Add a section to the configuration.
:param section: The name of the section.
:param issuer: The issuer account. | 625941b7187af65679ca4f5d |
def complex(name: str) -> bool: <NEW_LINE> <INDENT> if "." in name: <NEW_LINE> <INDENT> left, right = name.split(".") <NEW_LINE> return polym(right) and target(left) <NEW_LINE> <DEDENT> return False | complex compounds are the aggregation of a target and a polymer
{abcd}:efgh | 625941b75510c4643540f238 |
def autorestart(delay_start=None, pause=None, restart_check=None): <NEW_LINE> <INDENT> if not (delay_start is None or callable(delay_start)): <NEW_LINE> <INDENT> raise TypeError("delay_start must be a callable") <NEW_LINE> <DEDENT> if not (pause is None or callable(pause)): <NEW_LINE> <INDENT> raise TypeError("pause must be a callable") <NEW_LINE> <DEDENT> if not (restart_check is None or callable(restart_check)): <NEW_LINE> <INDENT> raise TypeError("restart_check must be a callable") <NEW_LINE> <DEDENT> def wrapper(coro): <NEW_LINE> <INDENT> if not asyncio.iscoroutinefunction(coro): <NEW_LINE> <INDENT> raise TypeError("decorated function must be a coroutine function") <NEW_LINE> <DEDENT> @functools.wraps(coro) <NEW_LINE> @asyncio.coroutine <NEW_LINE> def wrapped(*args, **kwargs): <NEW_LINE> <INDENT> if delay_start is not None: <NEW_LINE> <INDENT> yield from maybe_coroutine(delay_start) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if pause is not None: <NEW_LINE> <INDENT> yield from maybe_coroutine(pause) <NEW_LINE> <DEDENT> return (yield from coro(*args, **kwargs)) <NEW_LINE> <DEDENT> except asyncio.CancelledError: <NEW_LINE> <INDENT> if restart_check is not None and (yield from maybe_coroutine(restart_check)): <NEW_LINE> <INDENT> yield from wrapped(*args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> except (OSError, HTTPException, GatewayNotFound, ConnectionClosed, aiohttp.ClientError, asyncio.TimeoutError, websockets.InvalidHandshake, websockets.WebSocketProtocolError) as ex: <NEW_LINE> <INDENT> if any((isinstance(ex, ConnectionClosed) and ex.code == 1000, not isinstance(ex, ConnectionClosed))): <NEW_LINE> <INDENT> yield from wrapped(*args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return wrapped <NEW_LINE> <DEDENT> return wrapper | Decorator that automatically restarts the decorated
coroutine function when a connection issue occurs.
Parameters
----------
delay_start : Callable
Will be yielded from before starting the
execution of the decorated coroutine function.
pause : Callable
Will be yielded from before restarting the
execution of the decorated coroutine function.
restart_check : Callable
A callable that checks whether the decorated
coroutine function should be restarted if it
has been cancelled. Should return a truth value.
May be a coroutine function. | 625941b7b5575c28eb68de3d |
def fit( self, data, n_epochs=30, batch_size=100, shuffle=True, verbose=True, ): <NEW_LINE> <INDENT> assert n_epochs > 0 <NEW_LINE> data_cpy = {} <NEW_LINE> for label in data: <NEW_LINE> <INDENT> data_cpy[label] = np.copy(np.transpose(data[label])) <NEW_LINE> <DEDENT> errs = [] <NEW_LINE> for e in range(n_epochs): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print('Epoch: {:d}'.format(e + 1)) <NEW_LINE> <DEDENT> epoch_errs = np.array([]) <NEW_LINE> for label, features in data_cpy.items(): <NEW_LINE> <INDENT> if shuffle: <NEW_LINE> <INDENT> np.random.shuffle(features) <NEW_LINE> <DEDENT> onehot = np.zeros([self.n_adaptive], dtype=float) <NEW_LINE> onehot[label] = 1 <NEW_LINE> for batch_nr in range(batch_size, features.shape[0], batch_size): <NEW_LINE> <INDENT> batch = features[batch_nr - batch_size:batch_nr] <NEW_LINE> self._partial_fit(batch, onehot) <NEW_LINE> batch_err = self._get_error(batch, onehot) <NEW_LINE> assert np.isnan(batch_err).any() == False <NEW_LINE> epoch_errs = np.append(epoch_errs, batch_err) <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> err_mean = epoch_errs.mean() <NEW_LINE> print('Train error: {:.4f}'.format(err_mean)) <NEW_LINE> print() <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> errs = np.hstack([errs, epoch_errs]) <NEW_LINE> <DEDENT> return errs | Fits the given data into the model of the RBM.
Args:
data: The training data to be used for learning
n_epochs: The number of epochs. Defaults to 10.
batch_size: The size of the data batch per epoch. Defaults to 10.
shuffle: True if the data should be shuffled before learning.
False otherwise. Defaults to True.
verbose: True if the progress should be displayed on
the standard output during training.
Returns: An array of the mean square errors of each batch. | 625941b7de87d2750b85fbce |
@lm.user_loader <NEW_LINE> def user_loader(id): <NEW_LINE> <INDENT> return User.query.filter(User.id==id).first() | Given *od*, return the associated User object. | 625941b74527f215b584c29b |
def inject_fixture(fixture_name=None): <NEW_LINE> <INDENT> return InjectedFixture(fixture_name) | Inject a fixture into a suite. If no fixture name is specified then the name of the variable holding
the injected fixture will be used. | 625941b766656f66f7cbbfea |
def doc_vector(self, doc, terms): <NEW_LINE> <INDENT> vector = [] <NEW_LINE> for t in terms: <NEW_LINE> <INDENT> vector.append(self.weight(t, doc)) <NEW_LINE> <DEDENT> return vector | Build a document vector against terms set | 625941b77d847024c06be100 |
def triplets(sequence): <NEW_LINE> <INDENT> it = iter(sequence) <NEW_LINE> l = first = next(it) <NEW_LINE> c = second = next(it) <NEW_LINE> for r in it: <NEW_LINE> <INDENT> yield l, c, r <NEW_LINE> l, c = c, r <NEW_LINE> <DEDENT> yield l, c, first <NEW_LINE> yield c, first, second | Generate consecutive triplets throughout the given sequence | 625941b7f9cc0f698b140446 |
def payment_in_pounds(self): <NEW_LINE> <INDENT> return "£{:.2f}".format(self.amount/100) | Return the payment amount in GPB format | 625941b7d53ae8145f87a0b7 |
def view_subjects(search_query=None): <NEW_LINE> <INDENT> if search_query is not None: <NEW_LINE> <INDENT> subjects = Subject.select().where(Subject.subjectname == search_query).order_by(Subject.subjectname.asc()) <NEW_LINE> subjects = subjects.where(Subject.subjectname.contains(search_query)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subjects = Subject.select().order_by(Subject.subjectname.asc()) <NEW_LINE> <DEDENT> return subjects | View previous subjects | 625941b7b5575c28eb68de3e |
def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Type: <NEW_LINE> <INDENT> if isinstance(return_type, AnyType): <NEW_LINE> <INDENT> return AnyType() <NEW_LINE> <DEDENT> elif not self.is_generator_return_type(return_type, is_coroutine): <NEW_LINE> <INDENT> return AnyType() <NEW_LINE> <DEDENT> elif not isinstance(return_type, Instance): <NEW_LINE> <INDENT> return AnyType() <NEW_LINE> <DEDENT> elif return_type.type.fullname() == 'typing.Awaitable' and len(return_type.args) == 1: <NEW_LINE> <INDENT> return return_type.args[0] <NEW_LINE> <DEDENT> elif (return_type.type.fullname() in ('typing.Generator', 'typing.AwaitableGenerator') and len(return_type.args) >= 3): <NEW_LINE> <INDENT> return return_type.args[2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return AnyType() | Given the declared return type of a generator (t), return the type it returns (tr). | 625941b75fdd1c0f98dc0072 |
def test_results_view_with_a_future_question(self): <NEW_LINE> <INDENT> future_question = create_question(question_text="Future question", days=30) <NEW_LINE> response = self.client.get(reverse('polls:results', args=(future_question.id,))) <NEW_LINE> self.assertEqual(response.status_code, 404) | The results view of a question with a pub_date in the
future should return a 404 not found. | 625941b738b623060ff0ac2f |
@app.route("/view") <NEW_LINE> @login_required <NEW_LINE> def view(): <NEW_LINE> <INDENT> hikes = db.execute( "SELECT * FROM hikes WHERE user_id = :user_id", user_id=session["user_id"]) <NEW_LINE> for hike in hikes: <NEW_LINE> <INDENT> hike["edit"] = "http://ide50-whl24.cs50.io:8080/edit" + "?id=" + str(hike["id"]) <NEW_LINE> <DEDENT> return render_template("view.html", hikes=hikes) | View Repository of Hikes | 625941b7091ae35668666da6 |
def __init__(self, datafile, n_sample, steps, phase, **kw): <NEW_LINE> <INDENT> super().__init__(datafile, n_sample, steps, phase,**kw) | batch x 45 x features.
features including n_sample speed fc in time seg, road attrs, etc. | 625941b7e8904600ed9f1d69 |
def __init__(self, guid, text_a, text_b=None, label=None): <NEW_LINE> <INDENT> self.guid = guid <NEW_LINE> self.text_a = text_a <NEW_LINE> self.text_b = text_b <NEW_LINE> self.label = label | Construct s input Example.
Args:
guid: unqiue id for the example.
text_a: string, the untokenzied text of the first seq. for single sequence
tasks, only this sequction msut be specified.
text_b: (Optional) string, the untokenized text of the second sequence.
label: (Optional) string, the label of the example, This should be specifi
for train and dev examples, but not for test examples. | 625941b78a349b6b435e7fb5 |
def remove(self, args=None, path=None, fragment=None, query=None, query_params=None, port=None, fragment_path=None, fragment_args=None): <NEW_LINE> <INDENT> if port: <NEW_LINE> <INDENT> self.port = None <NEW_LINE> <DEDENT> self.path.remove(path) <NEW_LINE> self.query.remove(args) <NEW_LINE> self.query.remove(query) <NEW_LINE> self.fragment.remove(fragment) <NEW_LINE> self.query.remove(query_params) <NEW_LINE> self.fragment.path.remove(fragment_path) <NEW_LINE> self.fragment.query.remove(fragment_args) <NEW_LINE> return self | Remove components of url and return this furl instance, <self>.
Parameters:
args: Shortcut for query_params.
path: A list of path segments to remove from the end of the existing path
segments list, or a path string to remove from the end of the existing
path string, or True to remove the path entirely.
query: If True, remove the query portion of the URL entirely.
query_params: A list of query keys to remove from the query, if they
exist.
port: If True, remove the port from the network location string, if it
exists.
fragment: If True, remove the fragment portion of the URL entirely.
fragment_path: A list of path segments to remove from the end of the
fragment's path segments or a path string to remove from the end of the
fragment's path string.
fragment_args: A list of query keys to remove from the fragment's query,
if they exist.
Returns: <self>. | 625941b76aa9bd52df036be3 |
def authenticated_user(request): <NEW_LINE> <INDENT> userid = authenticated_userid(request) <NEW_LINE> if userid is not None: <NEW_LINE> <INDENT> return DBSession().query(User).filter_by(username=userid).first() | Returns the currently authenticated User instance or None. | 625941b7d8ef3951e324337e |
def split_instances(clazz: type, values: Iterable[T]) -> Tuple[Set[Any], Set[T]]: <NEW_LINE> <INDENT> instances = set() <NEW_LINE> rest = set() <NEW_LINE> for value in values: <NEW_LINE> <INDENT> if isinstance(value, clazz): <NEW_LINE> <INDENT> instances.add(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rest.add(value) <NEW_LINE> <DEDENT> <DEDENT> return instances, rest | Split an iterable into instances of a type and other values. | 625941b72ae34c7f2600cf72 |
def letterCasePermutation(self, S): <NEW_LINE> <INDENT> result = list() <NEW_LINE> def dfs(S, i): <NEW_LINE> <INDENT> if i == len(S): <NEW_LINE> <INDENT> result.append("".join(S)) <NEW_LINE> return <NEW_LINE> <DEDENT> S[i] = S[i].lower() <NEW_LINE> dfs(S, i + 1) <NEW_LINE> if not S[i].isalpha(): return <NEW_LINE> S[i] = S[i].upper() <NEW_LINE> dfs(S, i + 1) <NEW_LINE> <DEDENT> dfs(list(S), 0) <NEW_LINE> return result | :type S: str
:rtype: List[str] | 625941b7d164cc6175782b8e |
def age(self, request, pk): <NEW_LINE> <INDENT> actor = self.get_object() <NEW_LINE> actor.age = request.data.get('age') <NEW_LINE> actor.save() <NEW_LINE> serializer = self.get_serializer(actor) <NEW_LINE> return Response(serializer.data) | 修改演员的年龄 | 625941b799cbb53fe6792a28 |
def add_error(name=None, code=None, status=None): <NEW_LINE> <INDENT> if not name or not status or not code: <NEW_LINE> <INDENT> raise Exception("Can't create Exception class %s: you must set both name, status and code" % name) <NEW_LINE> <DEDENT> myexception = type(name, (PyMacaronException, ), {"code": code, "status": status}) <NEW_LINE> globals()[name] = myexception <NEW_LINE> if code in code_to_class: <NEW_LINE> <INDENT> raise Exception("ERROR! Exception %s is already defined." % code) <NEW_LINE> <DEDENT> code_to_class[code] = myexception <NEW_LINE> return myexception | Create a new Exception class | 625941b7d486a94d0b98df8e |
def prepare_data(self): <NEW_LINE> <INDENT> self.generic_dataset = LIDCNodulesDataset(**self.dataset_params.params) <NEW_LINE> log.info(f"DATASET SIZE: {len(self.generic_dataset)}") <NEW_LINE> tensor_dataset_path = self.__prepare_tensor_dataset() <NEW_LINE> self.dataset = DatasetFolder(tensor_dataset_path, torch.load, ("pt")) <NEW_LINE> self.dataset.norm = self.generic_dataset.norm <NEW_LINE> train_inds, val_inds, test_inds = H.train_val_holdout_split(self.dataset) <NEW_LINE> self.train_sampler = SubsetRandomSampler(train_inds) <NEW_LINE> self.val_sampler = SubsetRandomSampler(val_inds) <NEW_LINE> self.test_subset = Subset(self.dataset, test_inds) | Prepare and save dataset as TensorDataset to improve training speed.
| 625941b7aad79263cf39087c |
def on_exchange_declareok(self, _unused_frame, userdata): <NEW_LINE> <INDENT> logger.info("Exchange declared: %s", userdata) <NEW_LINE> self.setup_queue(self._queue) | Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC
command.
:param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame
:param str|unicode userdata: Extra user data (exchange name) | 625941b738b623060ff0ac30 |
def get_error_handler( request: web.Request, config: Optional[Config] ) -> Optional[Handler]: <NEW_LINE> <INDENT> if not config: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> path = request.rel_url.path <NEW_LINE> for item, handler in config.items(): <NEW_LINE> <INDENT> if match_path(item, path): <NEW_LINE> <INDENT> return handler <NEW_LINE> <DEDENT> <DEDENT> return None | Find error handler matching current request path if any. | 625941b75fc7496912cc37c6 |
def test_set_breakdown(self): <NEW_LINE> <INDENT> self._test_set_bool(self.api_request.set_breakdown, APIFilters.BREAKDOWN, False) | Tests set_breakdown function exception and filter results | 625941b7507cdc57c6306b14 |
def reconstruction(self, stop_cri = 'max_iter', max_ini = 100): <NEW_LINE> <INDENT> xhat_recon, min_error, c_opt, a = dirac_recon_time(self.G, self.yl, self.K, self.noise_lvl, max_ini, stop_cri) <NEW_LINE> z = np.roots(c_opt) <NEW_LINE> z = z / np.abs(z) <NEW_LINE> self.tk_recon = np.real(self.tau * 1j / (2 * np.pi) * np.log(z)) <NEW_LINE> self.tk_recon = np.sort(self.tk_recon - np.floor(self.tk_recon / self.tau) * self.tau) <NEW_LINE> Phi_recon = periodicSinc(np.pi * self.B * (np.reshape(self.t_samp, (-1, 1), order='F') - np.reshape(self.tk_recon, (1, -1), order='F')), self.B * self.tau) <NEW_LINE> self.ak_recon = np.real(linalg.lstsq(Phi_recon, self.yl)[0]) <NEW_LINE> if self.original: <NEW_LINE> <INDENT> self.t_error = distance(self.tk_recon, self.tk)[0] | Main method of the clas. This reconstructs the signal according to the algorithm.
Parameters
----------
stop_cri: str
the stopping criteria of the algorithm, can either be *mse* or *max_iter*
max_ini: int
maximum number of random initialisations | 625941b74a966d76dd550e4d |
def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> chartsheet = workbook.add_chartsheet() <NEW_LINE> chart = workbook.add_chart({'type': 'bar'}) <NEW_LINE> chart.axis_ids = [43913216, 43914752] <NEW_LINE> data = [ [1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15], ] <NEW_LINE> worksheet.write_column('A1', data[0]) <NEW_LINE> worksheet.write_column('B1', data[1]) <NEW_LINE> worksheet.write_column('C1', data[2]) <NEW_LINE> chart.add_series({'values': '=Sheet1!$A$1:$A$5'}) <NEW_LINE> chart.add_series({'values': '=Sheet1!$B$1:$B$5'}) <NEW_LINE> chart.add_series({'values': '=Sheet1!$C$1:$C$5'}) <NEW_LINE> chartsheet.protect() <NEW_LINE> chartsheet.set_chart(chart) <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual() | Test the worksheet properties of an XlsxWriter chartsheet file. | 625941b724f1403a926009ab |
def connect_event(self): <NEW_LINE> <INDENT> pass | Event occurs when a new USB-CANmodul has been connected to the host.
.. note:: To be overridden by subclassing. | 625941b7a05bb46b383ec66e |
def get_appliances(self): <NEW_LINE> <INDENT> response = self._get(url=self._url(path='flow', query={'orgId': self.config.org_id})) <NEW_LINE> if self._verify(response): <NEW_LINE> <INDENT> return response.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._apm_http_error(sys._getframe().f_code.co_name, response) | Returns a list of appliances with configured capture interfaces in the configured org | 625941b70a50d4780f666cd0 |
def detect_geometry(): <NEW_LINE> <INDENT> p = subprocess.Popen(['stty', 'size'], stdout=subprocess.PIPE) <NEW_LINE> stdout = p.communicate()[0] <NEW_LINE> rows, cols = map(int, stdout.split()) <NEW_LINE> return rows, cols | Determine the console geometry from the current console. | 625941b7fff4ab517eb2f27a |
def support(request): <NEW_LINE> <INDENT> if not request.user.is_authenticated(): <NEW_LINE> <INDENT> return HttpResponseRedirect('/login') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> is_user = Deligation.objects.get(user=request.user) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> is_user = None <NEW_LINE> return HttpResponseRedirect('/home/') <NEW_LINE> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> if 'send' in request.POST: <NEW_LINE> <INDENT> subject = request.POST.get('subject', '') <NEW_LINE> text = request.POST.get('text', '') <NEW_LINE> user_id = request.POST.get('user_id', '') <NEW_LINE> user = User.objects.get(id=user_id) <NEW_LINE> from_user = user.username + ' <' + user.email + '>' <NEW_LINE> adm_user = User.objects.get(id=is_user.admin_id) <NEW_LINE> mailsend(from_user, adm_user.email, subject, text) <NEW_LINE> messages = [] <NEW_LINE> messages.append('Email send seccesfyl to your cloud administrator') <NEW_LINE> <DEDENT> <DEDENT> return render_to_response('support.html', locals(), context_instance=RequestContext(request)) | Support page | 625941b7e1aae11d1e749af5 |
def lengthOfLongestSubstring_2(self, s): <NEW_LINE> <INDENT> record = collections.Counter() <NEW_LINE> i = 0 <NEW_LINE> maxlth = 0 <NEW_LINE> for j in range(len(s)): <NEW_LINE> <INDENT> record[s[j]] += 1 <NEW_LINE> while 2 in record.values(): <NEW_LINE> <INDENT> record[s[i]] -= 1 <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> maxlth = max(maxlth, j - i + 1) <NEW_LINE> <DEDENT> return maxlth | :type s: str
:rtype: int | 625941b7d7e4931a7ee9dd5d |
def test_primer_pair_from_sequence(self): <NEW_LINE> <INDENT> forward_entry = self.sequence_store.add_from_reference(Seq('ATGCCCCCCCATAGA')) <NEW_LINE> reverse_entry = self.sequence_store.add_from_reference(Seq('TTAGACACATGGGAC')) <NEW_LINE> result = PrimerPair.from_sequence_entries(forward_entry, reverse_entry) <NEW_LINE> self.assertEqual(result.forward.sequence, forward_entry.sequence) <NEW_LINE> self.assertEqual(result.reverse.sequence, reverse_entry.sequence) | Test that PrimerPair type can be initialized from a pair of sequences. | 625941b70c0af96317bb802a |
def set_ivs_seed_geometry(seed_table): <NEW_LINE> <INDENT> for i in seed_table.index.values: <NEW_LINE> <INDENT> if _jGUI.MajorAxis == "X": <NEW_LINE> <INDENT> if seed_table.at[i, 'Placement'].startswith('Vert'): <NEW_LINE> <INDENT> seed_table.at[i, 'Orientation'] = 0 <NEW_LINE> seed_table.at[i, 'Inclination'] = 270 <NEW_LINE> <DEDENT> if seed_table.at[i, 'Placement'].startswith('In'): <NEW_LINE> <INDENT> seed_table.at[i, 'Orientation'] = 270 <NEW_LINE> seed_table.at[i, 'Inclination'] = 0 <NEW_LINE> <DEDENT> if seed_table.at[i, 'Placement'].startswith('Cross'): <NEW_LINE> <INDENT> seed_table.at[i, 'Orientation'] = 0 <NEW_LINE> seed_table.at[i, 'Inclination'] = 0 <NEW_LINE> <DEDENT> return seed_table <NEW_LINE> <DEDENT> if seed_table.at[i, 'Placement'].startswith('Vert'): <NEW_LINE> <INDENT> seed_table.at[i, 'Orientation'] = 0 <NEW_LINE> seed_table.at[i, 'Inclination'] = 270 <NEW_LINE> <DEDENT> if seed_table.at[i, 'Placement'].startswith('In'): <NEW_LINE> <INDENT> seed_table.at[i, 'Orientation'] = 180 <NEW_LINE> seed_table.at[i, 'Inclination'] = 0 <NEW_LINE> <DEDENT> if seed_table.at[i, 'Placement'].startswith('Cross'): <NEW_LINE> <INDENT> seed_table.at[i, 'Orientation'] = 90 <NEW_LINE> seed_table.at[i, 'Inclination'] = 0 <NEW_LINE> <DEDENT> return seed_table | Used for formatting into MS Access table. Creates columns 'Orientation'
and 'Inclination' from strings in 'Placement' of the ``seed_table``
DataFrame returned by 'import_seed_data_csv'.
Parameters
----------
seed_table : df
df returned by 'import_seed_data_csv'
Returns
-------
pd.DataFrame : ``seed_table`` with 'Orientation' and 'Inclination'.
Notes
-----
Orientation of nose, degrees (0/180 is N/S, 90/270 is W/E).
Inclination of nose, degrees (0/360 is flat, 90/270 is down/up). | 625941b7ab23a570cc24ffc1 |
def get(self, key=None, block=True, timeout=None): <NEW_LINE> <INDENT> key = key or self.key <NEW_LINE> if block: <NEW_LINE> <INDENT> item = self.client.blpop(key, timeout=timeout) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item = (None, self.client.lpop(key)) <NEW_LINE> <DEDENT> if item: <NEW_LINE> <INDENT> item = item[1] <NEW_LINE> <DEDENT> return json.loads(item) | Remove and return an item from the queue.
Args:
key(string): specific redis queue key
block(boolean): if block is true and timeout is None (the default),
block if necessary until an item is available
timeout(float): time out setting. | 625941b77c178a314d6ef29a |
def Eq(a, b=0): <NEW_LINE> <INDENT> return Relational(a,b,'==') | A handy wrapper around the Relational class.
Eq(a,b)
Example:
>>> from sympy import Eq
>>> from sympy.abc import x, y
>>> Eq(y, x+x**2)
y == x + x**2 | 625941b763f4b57ef0000f63 |
@blueprint.route('/<int:protocol_id>/opentrons', methods=['GET']) <NEW_LINE> def opentrons_protocol(protocol_id): <NEW_LINE> <INDENT> current_protocol = Protocol.query.filter_by(id=protocol_id).first() <NEW_LINE> if not current_protocol: <NEW_LINE> <INDENT> flash('No such specification!', 'danger') <NEW_LINE> return redirect('.') <NEW_LINE> <DEDENT> if current_protocol.user != current_user and not current_protocol.public: <NEW_LINE> <INDENT> flash('Not your project!', 'danger') <NEW_LINE> return redirect('.') <NEW_LINE> <DEDENT> if not current_protocol.protocol: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> protocol_object = json.loads(current_protocol.protocol) <NEW_LINE> converter = OpenTrons() <NEW_LINE> resp = make_response(converter.convert(protocol_object, current_protocol.name, current_protocol.description)) <NEW_LINE> resp.headers['Content-Type'] = "text" <NEW_LINE> resp.headers['Content-Disposition'] = "attachment; filename=" + current_protocol.name + "-opentrons.py" <NEW_LINE> return resp | Get OpenTrons representation of a protocol. | 625941b71f5feb6acb0c4996 |
def erase_metadata_objects(self, prefix): <NEW_LINE> <INDENT> all_objects = self.radosmo(["ls"], stdout=StringIO()).strip().split("\n") <NEW_LINE> matching_objects = [o for o in all_objects if o.startswith(prefix)] <NEW_LINE> for o in matching_objects: <NEW_LINE> <INDENT> self.radosm(["rm", o]) | For all objects in the metadata pool matching the prefix,
erase them.
This O(N) with the number of objects in the pool, so only suitable
for use on toy test filesystems. | 625941b799fddb7c1c9de1d4 |
def get(self, key): <NEW_LINE> <INDENT> return self._stats[key].value if key in self._stats else None | return saved data or None in case of error | 625941b7d6c5a10208143e88 |
def prepare(self, context, databases, memory_mb, users, device_path=None, mount_point=None): <NEW_LINE> <INDENT> dbaas.MySqlAppStatus.get().begin_mysql_install() <NEW_LINE> app = dbaas.MySqlApp(dbaas.MySqlAppStatus.get()) <NEW_LINE> restart_mysql = False <NEW_LINE> if device_path: <NEW_LINE> <INDENT> device = volume.VolumeDevice(device_path) <NEW_LINE> device.format() <NEW_LINE> if app.is_installed(): <NEW_LINE> <INDENT> app.stop_mysql() <NEW_LINE> restart_mysql = True <NEW_LINE> device.migrate_data(MYSQL_BASE_DIR) <NEW_LINE> <DEDENT> device.mount(mount_point) <NEW_LINE> LOG.debug(_("Mounted the volume.")) <NEW_LINE> if restart_mysql: <NEW_LINE> <INDENT> app.start_mysql() <NEW_LINE> <DEDENT> <DEDENT> app.install_and_secure(memory_mb) <NEW_LINE> LOG.info("Creating initial databases and users following successful " "prepare.") <NEW_LINE> self.create_database(context, databases) <NEW_LINE> self.create_user(context, users) <NEW_LINE> LOG.info('"prepare" call has finished.') | Makes ready DBAAS on a Guest container. | 625941b7796e427e537b0403 |
def create_article(title=ARTICLE_TITLE, body=ARTICLE_BODY, time_published=None, category=None, draft=None): <NEW_LINE> <INDENT> data = { 'title': title, 'body': body, } <NEW_LINE> if time_published is not None: <NEW_LINE> <INDENT> data['time_published'] = time_published <NEW_LINE> <DEDENT> if category is not None: <NEW_LINE> <INDENT> data['category'] = category <NEW_LINE> <DEDENT> if draft is not None: <NEW_LINE> <INDENT> data['draft'] = draft <NEW_LINE> <DEDENT> return models.Article.objects.create(**data) | Create an article for testing with default values | 625941b7097d151d1a222c9d |
def get_nb_corners(self): <NEW_LINE> <INDENT> return len(self.corners) | Returns the number of corners. | 625941b79c8ee82313fbb5b6 |
def cas2tas( cas, altitude, temp='std', speed_units=default_speed_units, alt_units=default_alt_units, temp_units=default_temp_units, ): <NEW_LINE> <INDENT> if temp == 'std': <NEW_LINE> <INDENT> temp = SA.alt2temp(altitude, temp_units=temp_units, alt_units=alt_units) <NEW_LINE> <DEDENT> dp = cas2dp(cas, speed_units) <NEW_LINE> tas = dp2tas( dp, altitude, temp, speed_units=speed_units, alt_units=alt_units, temp_units=temp_units, ) <NEW_LINE> return tas | Return the TAS for a given CAS, pressure altitude and temperature.
The speed units may be 'kt', 'mph', 'km/h', 'm/s' and 'ft/s'.
The altitude may be in feet ('ft'), metres ('m'), kilometres ('km'),
statute miles, ('sm') or nautical miles ('nm').
The temperature may be in deg C, F, K or R. The temperature defaults to std
temperature if it is not input.
If the units are not specified, the units in default_units.py are used. | 625941b7377c676e91271fec |
def process_string_code(code): <NEW_LINE> <INDENT> processed_code = "" <NEW_LINE> digit = "" <NEW_LINE> for index in range(NUMBER_OF_DIGITS): <NEW_LINE> <INDENT> digit = "" <NEW_LINE> starter_column_of_digit = index * DIGIT_CHARACTER_COLUMN <NEW_LINE> for row in range(NUMBER_OF_DIGIT_PRINT_LINE): <NEW_LINE> <INDENT> row_starter_index = starter_column_of_digit + (row * NUMBER_OF_CHARACTERS_IN_LINE) <NEW_LINE> digit += code[row_starter_index: row_starter_index + DIGIT_CHARACTER_COLUMN] <NEW_LINE> <DEDENT> if digit in DICT_OF_STRING_DIGITS.keys(): <NEW_LINE> <INDENT> processed_code += str(DICT_OF_STRING_DIGITS[digit]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> processed_code += "?" <NEW_LINE> <DEDENT> <DEDENT> return processed_code | Processes the bank code. It handles the string code like a matrix. Each code is a 3x3 string. For example
The inner for goes from 0:0 -> 0:3 then steps 1 line and concatenate 1:0 -> 1:3 then steps 1 line
and concatenate 2:0 -> 2:3. Then the outer for "step a digit" and the inner for goes again.
Then it searches the string digit in the dictionary of digits and concatenate the numbers. If digit not found in
the dict it, it will ad a "?".
:param code: String of a bank code that broken into lines and concatenated.
Returns:
String of the code in number format. | 625941b7046cf37aa974cb8c |
def get_session(self, website): <NEW_LINE> <INDENT> current = self.session_set.filter(website=website).last() <NEW_LINE> if current: <NEW_LINE> <INDENT> if timezone.now() - current.timestamp < datetime.timedelta(seconds=60 * settings.MAX_PAGE_VIEW_DURATION): <NEW_LINE> <INDENT> return current <NEW_LINE> <DEDENT> <DEDENT> new = Session(user=self, website=website) <NEW_LINE> new.save() <NEW_LINE> return new | Get current session or create and return new session if previous expired | 625941b763b5f9789fde6f27 |
def manage_edit(self, REQUEST): <NEW_LINE> <INDENT> self.title = REQUEST.form['title'] <NEW_LINE> self.update_script = REQUEST.form['update_script'] <NEW_LINE> REQUEST.SESSION['messages'] = ["Saved changes. (%s)" % (datetime.now())] <NEW_LINE> REQUEST.RESPONSE.redirect(self.absolute_url() + '/manage_workspace') | Edit this value box | 625941b771ff763f4b5494d0 |
def test_create_endpoint_enabled_str_false(self): <NEW_LINE> <INDENT> ref = self.new_endpoint_ref(service_id=self.service_id, enabled='False') <NEW_LINE> self.post( '/endpoints', body={'endpoint': ref}, expected_status=400) | Call ``POST /endpoints`` with enabled: 'False'. | 625941b7091ae35668666da7 |
def change_band(self): <NEW_LINE> <INDENT> pass | Combo box to choose band.
Returns
-------
None. | 625941b76e29344779a62457 |
def register_signal(self, signum, callback): <NEW_LINE> <INDENT> if signum not in self._signal_handlers: <NEW_LINE> <INDENT> self._signal_handlers[signum] = [callback] <NEW_LINE> signal.signal(signum, self._generic_signal_handler) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._signal_handlers[signum].append(callback) | callback signature: (signum) | 625941b78e7ae83300e4ae0d |
def add_other_cols(row, line, config): <NEW_LINE> <INDENT> n = int(line[-1]) <NEW_LINE> category = ['DBA', 'DTGBA'] <NEW_LINE> dba_t = [] <NEW_LINE> dtgba_t = [] <NEW_LINE> dba_st = int(line[13]) <NEW_LINE> all_dba_st_eq = check_all_st_are_eq(line, 'minDBA') <NEW_LINE> all_dtgba_st_eq = check_all_st_are_eq(line, 'minDTGBA') <NEW_LINE> len_l = len(config.l) <NEW_LINE> for i in range(0, len_l): <NEW_LINE> <INDENT> for elt in category: <NEW_LINE> <INDENT> if 'DBA' in elt: <NEW_LINE> <INDENT> width = 3 <NEW_LINE> <DEDENT> elif 'DTGBA' in elt: <NEW_LINE> <INDENT> width = 4 <NEW_LINE> <DEDENT> st_id = get_st_index_of('min' + elt + '.' + config.l[i].code, line) <NEW_LINE> if '-' in line[st_id]: <NEW_LINE> <INDENT> s = ne(get_last_successful(n, elt, config.l[i].code)) <NEW_LINE> row.append(MultiColumn(width, align='c|', data=ne('(killed') + s + ne(')'))) <NEW_LINE> <DEDENT> elif '!' in line[st_id]: <NEW_LINE> <INDENT> s = ne(get_last_successful(n, elt, config.l[i].code)) <NEW_LINE> row.append(MultiColumn(width, align='c|', data=ne('(intmax') + s + ne(')'))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur_st = int(line[st_id]) <NEW_LINE> if 'DBA' in elt and dba_st_cmp_dtgba_st('min' + elt + '.' + config.l[i].code, line): <NEW_LINE> <INDENT> row.append(SetColor( arguments=ColorArgument('Purpl', str(cur_st)))) <NEW_LINE> <DEDENT> elif ((not all_dba_st_eq) and 'DBA' in elt) or ((not all_dtgba_st_eq) and 'DTGBA' in elt) or cur_st > dba_st: <NEW_LINE> <INDENT> row.append(SetColor( arguments=ColorArgument('Red', str(cur_st)))) <NEW_LINE> <DEDENT> elif cur_st < dba_st: <NEW_LINE> <INDENT> row.append(SetColor( arguments=ColorArgument('Gray', str(cur_st)))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> row.append(str(cur_st)) <NEW_LINE> <DEDENT> row.append(line[st_id + 2]) <NEW_LINE> time = '%.2f' % round(float(line[st_id + 7]), 2) <NEW_LINE> if width > 3: <NEW_LINE> <INDENT> row.append(line[st_id + 3]) <NEW_LINE> dtgba_t.append(time) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dba_t.append(time) <NEW_LINE> <DEDENT> row.append(time) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> dba = min(float(x) for x in dba_t) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> dba = -1 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> dtgba = min(float(x) for x in dtgba_t) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> dtgba = -1 <NEW_LINE> <DEDENT> return dba, dtgba | A function used to add all columns that dynamically depend on the
config.bench file. | 625941b7a8370b77170526e2 |
def delete(self, **kwargs): <NEW_LINE> <INDENT> return self._esi.delete(self._path, self._token, params=kwargs) | Do an HTTP DELETE request for the built query. | 625941b7656771135c3eb6b4 |
def form_factor_squared_isotropic(self, q, num_phi=50, num_theta=50): <NEW_LINE> <INDENT> return self.dist_sum( 'form_factor_squared_isotropic', q.shape, float, q, num_phi=num_phi, num_theta=num_theta) | Returns the isotropic form factor
< <|F|^2>_iso >_d | 625941b7283ffb24f3c5574e |
def test_transfer_syntax_not_added(self): <NEW_LINE> <INDENT> ds = read_file(rtplan_name) <NEW_LINE> del ds.file_meta.TransferSyntaxUID <NEW_LINE> ds.is_implicit_VR = False <NEW_LINE> ds.is_little_endian = True <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> ds.save_as(DicomBytesIO(), write_like_original=False) <NEW_LINE> <DEDENT> assert 'TransferSyntaxUID' not in ds.file_meta | Test TransferSyntaxUID is not added if ExplVRLE. | 625941b73c8af77a43ae35e0 |
def run(self): <NEW_LINE> <INDENT> (options, args) = self.parse_args() <NEW_LINE> if options.filterclass is None: <NEW_LINE> <INDENT> checkerclasses = [checks.StandardChecker, checks.StandardUnitChecker] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> checkerclasses = [options.filterclass, checks.StandardUnitChecker] <NEW_LINE> <DEDENT> checkerconfig = build_checkerconfig(options) <NEW_LINE> options.checkfilter = pocheckfilter(options, checkerclasses, checkerconfig) <NEW_LINE> if not options.checkfilter.checker.combinedfilters: <NEW_LINE> <INDENT> self.error("No valid filters were specified") <NEW_LINE> <DEDENT> options.inputformats = self.inputformats <NEW_LINE> options.outputoptions = self.outputoptions <NEW_LINE> if options.listfilters: <NEW_LINE> <INDENT> print(options.checkfilter.getfilterdocs()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.recursiveprocess(options) | Parses the arguments, and runs recursiveprocess with the
resulting options. | 625941b75166f23b2e1a4f9b |
def _read_space_separated_file(self, file_path): <NEW_LINE> <INDENT> with open(file_path) as f: <NEW_LINE> <INDENT> content = f.readlines() <NEW_LINE> content = [x.strip('\n').strip() for x in content] <NEW_LINE> content = [x.split(' ') for x in content] <NEW_LINE> X = [x[:-1] for x in content] <NEW_LINE> Y = [x[-1:] for x in content] <NEW_LINE> self.action_mapping_ = self._create_action_mapping(Y) <NEW_LINE> Y = self._apply_action_mapping(Y) <NEW_LINE> content = numpy.append(X, Y, axis = 1) <NEW_LINE> content = [[float(y) for y in x]for x in content] <NEW_LINE> <DEDENT> return content | Read files separated by space (example: 0.015 0.12 0.082 0.146 3) | 625941b794891a1f4081b8ea |
def load_pickle_file(filename): <NEW_LINE> <INDENT> _validate_filename_is_string(filename) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(filename, 'rb') as open_file: <NEW_LINE> <INDENT> return pickle.load(open_file) <NEW_LINE> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> raise HealthcareAIError( 'No file named \'{}\' was found. Please verify the file you intend to load'.format(filename)) | Loads a python object of any type from a pickle file with the given filename
Args:
filename (str): File name to load
Returns:
(object): A python object | 625941b75f7d997b871748dd |
def map(self, function, sequence): <NEW_LINE> <INDENT> sequence = mpiBCast(sequence) <NEW_LINE> getLogger().debug("Rank: %s, pid: %s MpiPool: starts processing iteration" %(self.rank, os.getpid())) <NEW_LINE> mergedList = mergeList(MPI.COMM_WORLD.allgather( self.mapFunction(function, splitList(sequence,self.size)[self.rank]))) <NEW_LINE> getLogger().debug("Rank: %s, pid: %s MpiPool: done processing iteration"%(self.rank, os.getpid())) <NEW_LINE> return mergedList | Emulates a pool map function using Mpi.
Retrieves the number of mpi processes and splits the sequence of walker position
in order to allow each process its block
:param function: the function to apply on the items of the sequence
:param sequence: a sequence of items
:returns sequence: sequence of results | 625941b750812a4eaa59c167 |
def get(self, CachableItem): <NEW_LINE> <INDENT> cached_item = self.mapper.get(CachableItem) <NEW_LINE> r = self.request('get', self.url+"storage/collections/data/"+self.collname+'/'+cached_item.getId(), data={'output_mode': 'json'}) <NEW_LINE> if r.ok: <NEW_LINE> <INDENT> data = r.json() <NEW_LINE> for name in self.mapper.mapper: <NEW_LINE> <INDENT> setattr(cached_item, name, data[name]) <NEW_LINE> <DEDENT> return cached_item <NEW_LINE> <DEDENT> return None | Returns current ICachedItem for ICachableItem or None if not cached | 625941b7fbf16365ca6f5ffe |
def test_template_with_variable(self): <NEW_LINE> <INDENT> val = { 'success': False, 'test_value': 'a test value' } <NEW_LINE> Variable.set("a_variable", val['test_value']) <NEW_LINE> def verify_templated_field(context): <NEW_LINE> <INDENT> self.assertEqual(context['ti'].task.some_templated_field, val['test_value']) <NEW_LINE> val['success'] = True <NEW_LINE> <DEDENT> t = OperatorSubclass( task_id='test_complex_template', some_templated_field='{{ var.value.a_variable }}', on_success_callback=verify_templated_field, dag=self.dag) <NEW_LINE> t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) <NEW_LINE> self.assertTrue(val['success']) | Test the availability of variables in templates | 625941b7d53ae8145f87a0b8 |
def method_argument_types(symbol, sign=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> args = method_arguments_string(symbol) <NEW_LINE> if args is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not args or args == 'void': <NEW_LINE> <INDENT> return set() <NEW_LINE> <DEDENT> args = re.sub(r"[*&]|const", ' ', args) <NEW_LINE> if not sign: <NEW_LINE> <INDENT> args = re.sub(r"unsigned", ' ', args) <NEW_LINE> <DEDENT> args = re.sub(r" +", ' ', args) <NEW_LINE> argtypes = set(arg.strip() for arg in re.split(r"[,()]", args)) <NEW_LINE> argtypes.discard('') <NEW_LINE> return argtypes <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None | Get the base types used in the arguments to a C++ method. | 625941b77cff6e4e811177c8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.