code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def get_xrefs(curie:str) -> List[str]: <NEW_LINE> <INDENT> if ':' not in curie: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> curie = curie.upper() <NEW_LINE> prefix, local_id = curie.split(':', 1) <NEW_LINE> df = load_df() <NEW_LINE> rheaIds = load_rheaIds() <NEW_LINE> if prefix == 'RHEA': <NEW_LINE> <INDENT> df = df[(df.RHEA_ID == local_id) | (df.MASTER_ID == local_id)] <NEW_LINE> xrefs = set() <NEW_LINE> if local_id in rheaIds: <NEW_LINE> <INDENT> xrefs.add(curie) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> for index, row in df.iterrows(): <NEW_LINE> <INDENT> if row.DB == 'KEGG_REACTION': <NEW_LINE> <INDENT> xrefs.add(f'KEGG:{row.ID}') <NEW_LINE> xrefs.add(f'KEGG.REACTION:{row.ID}') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> xrefs.add(f'{row.DB}:{row.ID}') <NEW_LINE> <DEDENT> xrefs.add(f'RHEA:{row.RHEA_ID}') <NEW_LINE> xrefs.add(f'RHEA:{row.MASTER_ID}') <NEW_LINE> <DEDENT> return list(xrefs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if prefix == 'KEGG' or prefix == 'KEGG.REACTION': <NEW_LINE> <INDENT> df = df[(df.DB == 'KEGG_REACTION') & (df.ID == local_id)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> df = df[(df.DB == prefix) & (df.ID == local_id)] <NEW_LINE> <DEDENT> xrefs = set() <NEW_LINE> for index, row in df.iterrows(): <NEW_LINE> <INDENT> xrefs.update(get_xrefs(f'RHEA:{row.RHEA_ID}')) <NEW_LINE> xrefs.add(f'RHEA:{row.RHEA_ID}') <NEW_LINE> xrefs.add(f'RHEA:{row.MASTER_ID}') <NEW_LINE> <DEDENT> return list(xrefs)
Returns a list of exactly matching CURIE's to the given CURIE. The given CURIE is contained in the returned list as long as it's recognized. If the returned list is empty then Rhea is not aware of that CURIE.
625941bacc40096d615957df
def moveChild(self, child, position): <NEW_LINE> <INDENT> if position < 0 or position > len(self.children): <NEW_LINE> <INDENT> self.log.info("Wrong position given, nothing is done: the position {0} is without the boundaries [0, {1}]".format(str(position), str(len(self.children)))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.removeChild(child) <NEW_LINE> self.insertChild(position, child)
removeVariable: move child to a given position @type child: netzob.Common.MMSTD.Dictionary.Variable.AbstractVariable.AbstractVariable @param child: the variable that is being moved. @type position: integer @param position: the position where the child is being moved.
625941ba2ae34c7f2600cfbe
def destroy_test_db(self, old_database_name=None, verbosity=1, keepdb=False, suffix=None): <NEW_LINE> <INDENT> self.connection.close() <NEW_LINE> if suffix is None: <NEW_LINE> <INDENT> test_database_name = self.connection.settings_dict['NAME'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> test_database_name = self.get_test_db_clone_settings(suffix)['NAME'] <NEW_LINE> <DEDENT> if verbosity >= 1: <NEW_LINE> <INDENT> action = 'Destroying' <NEW_LINE> if keepdb: <NEW_LINE> <INDENT> action = 'Preserving' <NEW_LINE> <DEDENT> self.log('%s test database for alias %s…' % ( action, self._get_database_display_str(verbosity, test_database_name), )) <NEW_LINE> <DEDENT> if not keepdb: <NEW_LINE> <INDENT> self._destroy_test_db(test_database_name, verbosity) <NEW_LINE> <DEDENT> if old_database_name is not None: <NEW_LINE> <INDENT> settings.DATABASES[self.connection.alias]["NAME"] = old_database_name <NEW_LINE> self.connection.settings_dict["NAME"] = old_database_name
Destroy a test database, prompting the user for confirmation if the database already exists.
625941ba7c178a314d6ef2e5
def connect(self): <NEW_LINE> <INDENT> self.__connection_socket = None <NEW_LINE> try: <NEW_LINE> <INDENT> self.__connection_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> <DEDENT> except socket.error as msg: <NEW_LINE> <INDENT> self.logger.error(f'Error {msg[0]}: {msg[1]}') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__connection_socket.settimeout(self.connection_timeout) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.__connection_socket.connect((self.connection_host, self.connection_port)) <NEW_LINE> <DEDENT> except socket.timeout: <NEW_LINE> <INDENT> self.logger.error(f'Connection timeout: {self.connection_host}:{self.connection_port}') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> except socket.error: <NEW_LINE> <INDENT> self.logger.error(f'Error connecting to {self.connection_host}:{self.connection_port}') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> self.logger.debug(f'Connected to {self.connection_host}:{self.connection_port}')
Connect a socket.
625941ba187af65679ca4faa
def get_metadata(self): <NEW_LINE> <INDENT> self.doi2json() <NEW_LINE> self.title = self.meta['title'] <NEW_LINE> self.author_surnames = [] <NEW_LINE> self.author_givennames = [] <NEW_LINE> for i in self.meta['author']: <NEW_LINE> <INDENT> self.author_surnames.append(i['family']) <NEW_LINE> self.author_givennames.append(i['given']) <NEW_LINE> <DEDENT> self.journal = self.meta['container-title'] <NEW_LINE> if 'published-print' in self.meta.keys(): <NEW_LINE> <INDENT> self.year = str(self.meta['published-print']['date-parts'][0][0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.year = str(self.meta['published-online']['date-parts'][0][0]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.volume = str(self.meta['volume']) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.volume = '' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.pages = str(self.meta['page']) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.pages = ''
Extract metadata from DOI
625941ba796e427e537b044f
def click_coroutine(f): <NEW_LINE> <INDENT> f = asyncio.coroutine(f) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> loop = asyncio.get_event_loop() <NEW_LINE> return loop.run_until_complete(f(*args, **kwargs)) <NEW_LINE> <DEDENT> return update_wrapper(wrapper, f)
A wrapper to allow to use asyncio with click. https://github.com/pallets/click/issues/85
625941bad7e4931a7ee9dda8
@login_required <NEW_LINE> @permission_required("aa_bulletin_board.basic_access") <NEW_LINE> def view_bulletin(request, slug): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> bulletin = Bulletin.objects.user_has_access(request.user).get(slug=slug) <NEW_LINE> context = {"bulletin": bulletin, "slug": slug} <NEW_LINE> return render(request, "aa_bulletin_board/bulletin.html", context) <NEW_LINE> <DEDENT> except Bulletin.DoesNotExist: <NEW_LINE> <INDENT> messages.warning( request, _( "The bulletin you are looking for does not exist, " "or you don't have access to it." ), ) <NEW_LINE> return redirect("aa_bulletin_board:dashboard")
View a bulletin
625941ba8e7ae83300e4ae58
def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1alpha1WebhookThrottleConfig): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
Returns true if both objects are not equal
625941baab23a570cc25000c
def get_extra_delete_msg(self, vc_room, event_vc_room): <NEW_LINE> <INDENT> return ''
Return a custom message to show in the confirmation dialog when deleting a VC room. :param vc_room: the VC room object :param event_vc_room: the association of an event and a VC room :return: a string (may contain HTML) with the message to display
625941ba6e29344779a624a2
def __init__(self, date, day_open, day_high, day_low, day_close, volume): <NEW_LINE> <INDENT> self._date = date <NEW_LINE> self._open = day_open <NEW_LINE> self._high = day_high <NEW_LINE> self._low = day_low <NEW_LINE> self._close = day_close <NEW_LINE> self._volume = volume
Parameters: date (str): Date in yyyymmdd format. day_open (float): Dollar value of the first trade of the day. day_high (float): Dollar value of the highest trade of the day. day_low (float): Dollar value of the lowest trade of the day. day_close (float): Dollar value of the last trade of the day. volume (int): The number of shares traded on this day.
625941ba30bbd722463cbc4f
def on_btnGenerarFactura_clicked(self, witget, data=None): <NEW_LINE> <INDENT> model1, iter1 = self.treeClientes.get_selection().get_selected() <NEW_LINE> model2, iter2 = self.treeFactuMesas.get_selection().get_selected() <NEW_LINE> if iter2 == None: <NEW_LINE> <INDENT> self.winErrores.show() <NEW_LINE> self.lblError.set_text("Selecciona una factura.") <NEW_LINE> <DEDENT> if iter1 == None: <NEW_LINE> <INDENT> self.winErrores.show() <NEW_LINE> self.lblError.set_text("Selecciona un cliente.") <NEW_LINE> <DEDENT> if iter1 != None and iter2 != None: <NEW_LINE> <INDENT> pagado = model2.get_value(iter2, 3) <NEW_LINE> if pagado == "No": <NEW_LINE> <INDENT> idMesa = self.CMBMesaFactura.get_active() <NEW_LINE> dni = model1.get_value(iter1, 0) <NEW_LINE> idfactu = model2.get_value(iter2, 1) <NEW_LINE> BBDD.AñadirClienteFactura(dni, idfactu) <NEW_LINE> BBDD.CargarFacturasMesa(self.listFactuMesa, self.treeFactuMesas, idMesa) <NEW_LINE> Restaurante.VaciarMesaPagada(self, witget) <NEW_LINE> Informe.factura(idfactu,dni) <NEW_LINE> <DEDENT> if pagado == "Si": <NEW_LINE> <INDENT> idfactu = model2.get_value(iter2, 1) <NEW_LINE> dni = model1.get_value(iter1, 0) <NEW_LINE> Informe.factura(idfactu,dni)
# Accion que genera el ticket de una factura seleccionada, de un cliente seleccionado, que pone la factura como pagada y libera la mesa.
625941baec188e330fd5a632
def __init__(self, inplanes, planes, bn_norm, with_ibn, baseWidth, cardinality, stride=1, downsample=None): <NEW_LINE> <INDENT> super(Bottleneck, self).__init__() <NEW_LINE> D = int(math.floor(planes * (baseWidth / 64))) <NEW_LINE> C = cardinality <NEW_LINE> self.conv1 = nn.Conv2d(inplanes, D * C, kernel_size=1, stride=1, padding=0, bias=False) <NEW_LINE> if with_ibn: <NEW_LINE> <INDENT> self.bn1 = IBN(D * C, bn_norm) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bn1 = get_norm(bn_norm, D * C) <NEW_LINE> <DEDENT> self.conv2 = nn.Conv2d(D * C, D * C, kernel_size=3, stride=stride, padding=1, groups=C, bias=False) <NEW_LINE> self.bn2 = get_norm(bn_norm, D * C) <NEW_LINE> self.conv3 = nn.Conv2d(D * C, planes * 4, kernel_size=1, stride=1, padding=0, bias=False) <NEW_LINE> self.bn3 = get_norm(bn_norm, planes * 4) <NEW_LINE> self.relu = nn.ReLU(inplace=True) <NEW_LINE> self.downsample = downsample
Constructor Args: inplanes: input channel dimensionality planes: output channel dimensionality baseWidth: base width. cardinality: num of convolution groups. stride: conv stride. Replaces pooling layer.
625941ba1f037a2d8b94608b
def load_snb_parms(snb_file, num_snow_bands): <NEW_LINE> <INDENT> def assign_dummy_band_elevations(elevs): <NEW_LINE> <INDENT> left_pads = 0 <NEW_LINE> leftmost_floor = 0 <NEW_LINE> right_pads = 0 <NEW_LINE> rightmost_floor = 0 <NEW_LINE> for count1, elev in enumerate(elevs): <NEW_LINE> <INDENT> if elev != 0: <NEW_LINE> <INDENT> left_pads = count1 <NEW_LINE> leftmost_floor = elev - elev % Band.band_size <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> elevs.reverse() <NEW_LINE> for count2, elev in enumerate(elevs): <NEW_LINE> <INDENT> if elev != 0: <NEW_LINE> <INDENT> right_pads = count2 <NEW_LINE> rightmost_floor = elev - elev % Band.band_size <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> elevs.reverse() <NEW_LINE> left_fills = list(range((leftmost_floor - left_pads*Band.band_size), leftmost_floor, Band.band_size )) <NEW_LINE> right_fills = list(range((rightmost_floor + Band.band_size), (rightmost_floor + right_pads*Band.band_size + Band.band_size), Band.band_size )) <NEW_LINE> elevs[0:len(left_fills)] = left_fills <NEW_LINE> elevs[elevs.index(0):] = right_fills <NEW_LINE> return elevs <NEW_LINE> <DEDENT> with open(snb_file, 'r') as f: <NEW_LINE> <INDENT> cells = OrderedDict() <NEW_LINE> for line in f: <NEW_LINE> <INDENT> split_line = line.split() <NEW_LINE> cell_id = split_line[0] <NEW_LINE> elevs = [ int(z) for z in split_line[num_snow_bands+1:2*num_snow_bands+1] ] <NEW_LINE> elevs = assign_dummy_band_elevations(elevs) <NEW_LINE> cell = [ Band(z) for z in elevs ] <NEW_LINE> cells[cell_id] = cell <NEW_LINE> <DEDENT> <DEDENT> return cells
Reads in a Snow Band Parameter File and populates the median elevation property for each band withing an existing set of VIC cells. Creates a band map to keep track of the lower bounds of each band (each spanning an elevation of band_size) and any zero pads provided by the user in the Snow Band Parameter File (zero pads are required by VIC, to allow for glacier growth/slide into previously non-existent elevations between iterations).
625941badd821e528d63b038
def get_newest_key_id(self): <NEW_LINE> <INDENT> return self._run_and_fetch_one(sql.select_newest_in_regkey)[0]
Gets the ID of the newest key in the database by finding the highest ID. :return: int - the ID
625941ba30bbd722463cbc50
def __str__(self): <NEW_LINE> <INDENT> ss = '{} summary info\n'.format(type(self).__name__) <NEW_LINE> for axis, axname in zip(self.axes, self.axis_names): <NEW_LINE> <INDENT> ss += array_stats_str(axis.data, axname) <NEW_LINE> <DEDENT> ss += array_stats_str(self.data, 'Data') <NEW_LINE> return ss
String representation
625941ba627d3e7fe0d68cdb
def test_get_documentation_str(self): <NEW_LINE> <INDENT> self.assertEqual("Number threads", get_documentation_str({"doc": "Number threads", "id": "123"})) <NEW_LINE> self.assertEqual("123", get_documentation_str({"id": "123"})) <NEW_LINE> self.assertEqual(None, get_documentation_str({}))
get_documentation_str should return "doc", else "id", else None.
625941ba23849d37ff7b2f1e
def delete_playlist(settings) -> None: <NEW_LINE> <INDENT> playlist_to_delete = int(input('Enter playlist number that you wish to delete (Enter to skip): ')) <NEW_LINE> for index, key in enumerate(settings['playlists'], start=1): <NEW_LINE> <INDENT> if index == playlist_to_delete: <NEW_LINE> <INDENT> settings.remove_option('playlists', key)
Deletes a playlist from the settings
625941badc8b845886cb53c1
def getfields(self): <NEW_LINE> <INDENT> with xlrd.open_workbook(self.filename, on_demand=True) as book: <NEW_LINE> <INDENT> sheet = book.sheet_by_name(self.tablename) <NEW_LINE> fieldnames = sheet.row_values(0) <NEW_LINE> fieldlist = [] <NEW_LINE> for fieldindex in xrange(len(fieldnames)): <NEW_LINE> <INDENT> fieldname = fieldnames[fieldindex] <NEW_LINE> attributes = {'type': self.fieldtypes[fieldindex].upper()} <NEW_LINE> newfield = field.Field(fieldname, attributes, namelen=self.namelenlimit) <NEW_LINE> fieldlist.append(newfield) <NEW_LINE> <DEDENT> return fieldlist
Get the fields from the csv file as a list of Field objects
625941baac7a0e7691ed3f66
def assertTraitChanges(self, obj, trait, count=None): <NEW_LINE> <INDENT> return _AssertTraitChangesContext(obj, trait, count, self)
Assert that the class trait changes exactly n times. Used in a with statement to assert that a class trait has changed during the execution of the code inside the with context block (similar to the assertRaises method). Please note that the context manager returns itself and the user can introspect the information of the fired events by accessing the ``events`` attribute of the context object. The attribute is a list with tuple elements containing the arguments of an `on_trait_change` event signature (<object>, <name>, <old>, <new>). **Example**:: class MyClass(HasTraits): number = Float(2.0) my_class = MyClass() with self.assertTraitChanges(my_class, 'number') as ctx: my_class.number = 3.0 self.assert(ctx.events, [(my_class, 'number', 2.0, 3.0)] Parameters ---------- obj : HasTraits The HasTraits class instance who's class trait will change. xname : str The extended trait name of trait changes to listen too. count : int, optional The expected number of times the event should be fired. When None (default value) there is no check for the number of times the change event was fired. Notes ----- - Checking if the provided xname corresponds to valid traits in the class is not implemented yet.
625941ba0383005118ecf471
def remove_hardpts(self): <NEW_LINE> <INDENT> self._mhard.clear_members()
Clear all hardpoints from this model.
625941bad6c5a10208143ed4
def unmold_mask(masks, bboxes, image_shape): <NEW_LINE> <INDENT> assert masks.shape[0] == bboxes.shape[0] <NEW_LINE> full_mask_add = np.zeros((image_shape[1], image_shape[2], image_shape[3], masks.shape[-1]), dtype=np.float32) <NEW_LINE> full_mask_count = np.zeros((image_shape[1], image_shape[2], image_shape[3], masks.shape[-1]), dtype=np.float32) <NEW_LINE> masks = torch.from_numpy(masks).float().cuda() <NEW_LINE> for i in range(masks.shape[0]): <NEW_LINE> <INDENT> z1, y1, x1, z2, y2, x2 = bboxes[i] <NEW_LINE> mask = masks[i] <NEW_LINE> mask = mask.permute(3, 0, 1, 2).unsqueeze(0) <NEW_LINE> mask = F.interpolate(mask, size=(z2 - z1, y2 - y1, x2 - x1), mode='trilinear', align_corners=False) <NEW_LINE> mask = mask.squeeze(0).detach().cpu().numpy().transpose(1, 2, 3, 0) <NEW_LINE> full_mask_add[z1:z2, y1:y2, x1:x2, :] += mask <NEW_LINE> full_mask_count[z1:z2, y1:y2, x1:x2, :] += 1. <NEW_LINE> <DEDENT> full_mask = full_mask_add / (full_mask_count + 1e-6) <NEW_LINE> return full_mask.clip(min=0., max=1.)
Converts a mask generated by the neural network into a format similar to it's original shape, using overlap-tile strategy here. masks: [detect_num, depth, height, width, num_instances] of type float. A small, typically 28x28 mask. bboxes: [detect_num, z1, y1, x1, z2, y2, x2]. The box to fit the mask in. image_shape: [channels, depth, height, width] Returns a tf.int32 mask with the same size as the original image.
625941ba21a7993f00bc7b77
def _initKeyboard(self): <NEW_LINE> <INDENT> kaleidoscope.clearAllKeys() <NEW_LINE> hid.initializeKeyboard()
Resets the keyboard to initial state.
625941ba16aa5153ce362305
@app.route("/user/<ident>/online") <NEW_LINE> def user_is_online(ident): <NEW_LINE> <INDENT> ident = ident.lower() <NEW_LINE> hashedId = hashlib.md5(ident).hexdigest() <NEW_LINE> if not r.sismember(NS+"all",hashedId): abort(404) <NEW_LINE> return json.dumps(r.exists((NSL+hashedId+".online")))
TODO generate hash from uid (default is currently RFID-UID)
625941bad53ae8145f87a103
def edit_insert(self, c): <NEW_LINE> <INDENT> self.buf.insert(self.pos, c) <NEW_LINE> self.pos += 1 <NEW_LINE> self.refresh_line()
insert a character at the current cursor position
625941ba24f1403a926009f6
def get_all_backups_weekly(self, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.get_all_backups_weekly_with_http_info(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.get_all_backups_weekly_with_http_info(**kwargs) <NEW_LINE> return data
List filenames of available weekly backups # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_backups_weekly(async_req=True) >>> result = thread.get() :param async_req bool :return: list[str] If the method is called asynchronously, returns the request thread.
625941bafbf16365ca6f604a
def test_invalid_extension(self): <NEW_LINE> <INDENT> url_iterator = ListFileURLGenerator("examples/test_invalid_extension.list", "*.jpg") <NEW_LINE> assert len([_ for _ in url_iterator]) == 2, repr([_ for _ in url_iterator])
Test an invalid extension in the list file.
625941ba379a373c97cfa9d7
def test_denoise_tv_chambolle_3d(): <NEW_LINE> <INDENT> x, y, z = np.ogrid[0:40, 0:40, 0:40] <NEW_LINE> mask = (x - 22)**2 + (y - 20)**2 + (z - 17)**2 < 8**2 <NEW_LINE> mask = 100 * mask.astype(np.float) <NEW_LINE> mask += 60 <NEW_LINE> mask += 20 * np.random.rand(*mask.shape) <NEW_LINE> mask[mask < 0] = 0 <NEW_LINE> mask[mask > 255] = 255 <NEW_LINE> res = restoration.denoise_tv_chambolle(mask.astype(np.uint8), weight=0.1) <NEW_LINE> assert_(res.dtype == np.float) <NEW_LINE> assert_(res.std() * 255 < mask.std())
Apply the TV denoising algorithm on a 3D image representing a sphere.
625941babe7bc26dc91cd492
def get_RasterResolution(self): <NEW_LINE> <INDENT> return super(IRasterOutputSettings, self).get_RasterResolution()
Method IRasterOutputSettings.get_RasterResolution OUTPUT dpi : double*
625941bab57a9660fec3370e
def decode(signals, tree): <NEW_LINE> <INDENT> for signal in signals: <NEW_LINE> <INDENT> tree = [b for b in tree.branches if b.label == signal][0] <NEW_LINE> <DEDENT> leaves = [b for b in tree.branches if b.is_leaf()] <NEW_LINE> assert len(leaves) == 1 <NEW_LINE> return leaves[0].label
Decode signals into a letter according to tree, assuming signals correctly represents a letter. tree has the format returned by morse(). >>> t = morse(abcde) >>> [decode(s, t) for s in ['-..', '.', '-.-.', '.-', '-..', '.']] ['d', 'e', 'c', 'a', 'd', 'e']
625941baec188e330fd5a633
def rowCount(self, parent: QModelIndex = ...) -> Any: <NEW_LINE> <INDENT> return len(self.__data_list)
returns the current row count of the table model :param parent: redundant parameter as this derived class isn't a tree model :return: returns the current row count of the table model
625941bab5575c28eb68de8b
def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(VideoCommentListResponseDataList, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result
Returns the model properties as a dict
625941baa05bb46b383ec6b9
def resize(self, w, h): <NEW_LINE> <INDENT> pass
Set the canvas size in pixels.
625941bad7e4931a7ee9dda9
def iter_attribute_fields(self): <NEW_LINE> <INDENT> for attr in self.available_attributes: <NEW_LINE> <INDENT> yield self[attr.get_formfield_name()]
In use in templates to retrieve the attributes input fields.
625941ba3d592f4c4ed1cf0c
def test_update_after_delete(self): <NEW_LINE> <INDENT> users_list = User.objects.all() <NEW_LINE> initial_count = users_list.count() <NEW_LINE> first_user = users_list[0] <NEW_LINE> first_user.delete() <NEW_LINE> assert (initial_count-1) == User.objects.all().count()
Test invalidation after do a delete and get again a list of a model
625941ba26238365f5f0ecf7
def tpl1(request): <NEW_LINE> <INDENT> u = [1, 2, 3, 4] <NEW_LINE> return render(request, 'tpl1.html', {'u': u})
模板继承 母版
625941ba5166f23b2e1a4fe6
@cache('run') <NEW_LINE> def sf_bathtub_demand(): <NEW_LINE> <INDENT> return 25
Real Name: SF Bathtub Demand Original Eqn: 25 Units: L/(Day*cap) Limits: (None, None) Type: constant According to the Toronto’s Design Criteria for Sewers and Watermains and City of Toronto Water User Breakdown Information, Keating Channel Precinct Env. Study Report
625941baa17c0f6771cbdee1
def exists_vnfd(vnfdId, version=None): <NEW_LINE> <INDENT> vnfd_json = None <NEW_LINE> if version is None: <NEW_LINE> <INDENT> vnfd_json = vnfd_coll.find_one({"vnfdId": vnfdId}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vnfd_json = vnfd_coll.find_one({"vnfdId": vnfdId, "vnfdVersion": version}) <NEW_LINE> <DEDENT> if vnfd_json is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
Function to check if an VNFD with identifier "vnfdId" exists. Parameters ---------- vnfdId: string Identifier of the Virtual Network Function Descriptor Returns ------- boolean returns True if a VNFD with Id "vnfdId" exists in "vnfd_coll". Returns False otherwise.
625941bad99f1b3c44c67424
def buildItemDataString(self): <NEW_LINE> <INDENT> return ""
Public method to build a string to persist the specific item data. This string must start with ", " and should be built like "attribute=value" with pairs separated by ", ". value must not contain ", " or newlines. @return persistence data (string)
625941ba4527f215b584c2e8
def init_extra_module(self, component_instance, function, mw_data): <NEW_LINE> <INDENT> component_name = component_instance.blender_obj.name <NEW_LINE> parent_name = component_instance.robot_parent.blender_obj.name <NEW_LINE> component_instance.output_functions.append(function) <NEW_LINE> logger.info('######## GPS-SENSOR INITIALIZED ########')
Setup the middleware connection with this data Prepare the middleware to handle the serialised data as necessary.
625941bad58c6744b4257aee
def get_db(): <NEW_LINE> <INDENT> if not hasattr(g, 'mongodb'): <NEW_LINE> <INDENT> g.mongodb = connect_db() <NEW_LINE> <DEDENT> return g.mongodb
Opens a new database connection if there is none yet for the current application context.
625941bad268445f265b4cfc
def _split_notes(notes, max_len_notes_line = 40): <NEW_LINE> <INDENT> l = len(notes) <NEW_LINE> n = l//max_len_notes_line + 1 <NEW_LINE> notes = notes.split() <NEW_LINE> line = '' <NEW_LINE> notes_ = '' <NEW_LINE> i = 0 <NEW_LINE> while (i < len(notes)): <NEW_LINE> <INDENT> if (len(line) + len(notes[i]) + 1) > max_len_notes_line: <NEW_LINE> <INDENT> notes_ = notes_ + line + '\n' <NEW_LINE> line = notes[i] + ' ' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> line = line + notes[i] + ' ' <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> notes_ = notes_ + line[:-1] <NEW_LINE> return notes_
Split a string on white spaces over multiple lines, such that the line length doesn't exceed a specified value. Args: :notes: | string to be split :max_len_notes_line: | 40, optional | Maximum length of a single line when splitting the string. Returns: :notes_: | string with ' ' added at the right places to not exceed a certain width.
625941ba56b00c62f0f144ec
def map_min_of_link_nodes_to_link(grid, var_name, out=None): <NEW_LINE> <INDENT> if out is None: <NEW_LINE> <INDENT> out = grid.empty(at='link') <NEW_LINE> <DEDENT> if type(var_name) is str: <NEW_LINE> <INDENT> var_name = grid.at_node[var_name] <NEW_LINE> <DEDENT> np.minimum(var_name[grid.node_at_link_head], var_name[grid.node_at_link_tail], out=out) <NEW_LINE> return out
Map the minimum of a link's nodes to the link. map_min_of_link_nodes_to_link iterates across the grid and identifies the node values at both the "head" and "tail" of a given link. This function evaluates the value of 'var_name' at both the "to" and "from" node. The minimum value of the two node values is then mapped to the link. Construction:: map_min_of_link_nodes_to_link(grid, var_name, out=None) Parameters ---------- grid : ModelGrid A landlab ModelGrid. var_name : array or field name Values defined at nodes. out : ndarray, optional Buffer to place mapped values into or `None` to create a new array. Returns ------- ndarray Mapped values at links. Examples -------- >>> import numpy as np >>> from landlab.grid.mappers import map_min_of_link_nodes_to_link >>> from landlab import RasterModelGrid >>> rmg = RasterModelGrid((3, 4)) >>> _ = rmg.add_field('node', 'z', ... [[ 0, 1, 2, 3], ... [ 7, 6, 5, 4], ... [ 8, 9, 10, 11]]) >>> map_min_of_link_nodes_to_link(rmg, 'z') array([ 0., 1., 2., 0., 1., 2., 3., 6., 5., 4., 7., 6., 5., 4., 8., 9., 10.]) >>> values_at_links = rmg.empty(at='link') >>> rtn = map_min_of_link_nodes_to_link(rmg, 'z', out=values_at_links) >>> values_at_links array([ 0., 1., 2., 0., 1., 2., 3., 6., 5., 4., 7., 6., 5., 4., 8., 9., 10.]) >>> rtn is values_at_links True
625941ba82261d6c526ab330
def start(self, chat_id, send_message, back_btn=True, **kwargs): <NEW_LINE> <INDENT> assert callable(send_message) <NEW_LINE> assert isinstance(chat_id, int) <NEW_LINE> self.init_defer() <NEW_LINE> self.chat_id = chat_id <NEW_LINE> self._send_message = send_message <NEW_LINE> self.kwargs = kwargs <NEW_LINE> self.back_btn = back_btn <NEW_LINE> self.render() <NEW_LINE> return self.deferred
Start activity :param chat_id: Id of the chat where activity running :param send_message: Method to send messages to telegram (chat_id, text, keyboard) :param back_btn: Show back button :param kwargs: Activity params :return: Deferred, which called with ActivityReturn
625941ba796e427e537b0450
def _get_general_info(self): <NEW_LINE> <INDENT> general_info = [ ur.AttrMap(ur.Edit('Recipe Name: ', self.recipe.name, wrap='clip'), 'name' ), ur.AttrMap(ur.IntEdit('Prep Time: ', self.recipe.prep_time), 'prep_time' ), ur.AttrMap(ur.IntEdit('Cook Time: ', self.recipe.cook_time), 'cook_time' ), ur.AttrMap(ur.Edit('Source URL: ', self.recipe.source_url, wrap='clip'), 'source_url' ), ur.AttrMap(ur.Edit('Author: ', self.recipe.author), 'author' ) ] <NEW_LINE> return general_info
General info editors.
625941ba73bcbd0ca4b2bf0b
def ftp_upload(self): <NEW_LINE> <INDENT> file_remote = 'exp_ebao_uat_dm20190914.log' <NEW_LINE> file_local = 'D:\\exp_ebao_uat_dm20190914.log' <NEW_LINE> bufsize = 1024 <NEW_LINE> fp = open(file_local, 'rb') <NEW_LINE> self.__ftp.storbinary('STOR ' + file_remote, fp, bufsize) <NEW_LINE> fp.close()
以二进制形式上传文件
625941babe8e80087fb20ad6
def _emit_log_message(self, handler, record): <NEW_LINE> <INDENT> pass
Called by the engine to log messages in Alias Terminal. All log messages from the toolkit logging namespace will be passed to this method. :param handler: Log handler that this message was dispatched from. Its default format is "[levelname basename] message". :type handler: :class:`~python.logging.LogHandler` :param record: Standard python logging record. :type record: :class:`~python.logging.LogRecord`
625941bacc40096d615957e0
def find_next(self): <NEW_LINE> <INDENT> assert self.index + 1 < len(self.chords), "No more chords to walk" <NEW_LINE> current_chord = self.chords[self.index] <NEW_LINE> current_rn = roman.romanNumeralFromChord(current_chord, self.key) <NEW_LINE> expected_chord_labels = get_expected_next(current_rn.figure, self.key.type == "major") <NEW_LINE> assert len(expected_chord_labels) > 0, "No expected chords given" <NEW_LINE> for i, c in enumerate(self.chords[self.index + 1:]): <NEW_LINE> <INDENT> rn = roman.romanNumeralFromChord(c, self.key) <NEW_LINE> if rn.figure in expected_chord_labels: <NEW_LINE> <INDENT> return self.index + i + 1
Returns the index and chord of the next chord in the list of expected chords
625941bacc0a2c11143dcd26
def __init__(self, file_name): <NEW_LINE> <INDENT> self.sprite_sheet = pygame.image.load(file_name).convert()
Constructor de la clase. Se debe pasar la ruta del la hoja del sprite
625941bae5267d203edcdb2e
def __init__(self, expression, variables, **kwargs): <NEW_LINE> <INDENT> self._number_components = 1 <NEW_LINE> self._variables = variables <NEW_LINE> self._dimension = len(variables) <NEW_LINE> if kwargs.has_key("try_simplification"): <NEW_LINE> <INDENT> self._try_simplify = kwargs["try_simplification"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._try_simplify = GlobalDefaults.__dict__["try_simplification"] <NEW_LINE> <DEDENT> assert expression.shape == (1,1) <NEW_LINE> self._potential_s = expression <NEW_LINE> self._potential_n = sympy.lambdify(self._variables, self._potential_s[0,0], "numpy") <NEW_LINE> self._eigenvalues_s = None <NEW_LINE> self._eigenvalues_n = None <NEW_LINE> self._eigenvectors_s = None <NEW_LINE> self._eigenvectors_n = None <NEW_LINE> self._exponential_s = None <NEW_LINE> self._exponential_n = None <NEW_LINE> self._jacobian_s = None <NEW_LINE> self._jacobian_n = None <NEW_LINE> self._hessian_s = None <NEW_LINE> self._hessian_n = None
Create a new :py:class:`MatrixPotential1S` instance for a given potential matrix :math:`V(x)`. :param expression:The mathematical expression representing the potential. :type expressiom: A `Sympy` matrix type. :param variables: The variables corresponding to the space dimensions. :type variables: A list of `Sympy` symbols.
625941ba293b9510aa2c3126
def test_convert(self): <NEW_LINE> <INDENT> self.assertEqual("1", convert(1))
test method for「1を渡すと文字列"1"を返す」
625941ba07f4c71912b11315
def build_model(name, num_classes, loss='softmax', pretrained=True, use_gpu=True, backbone='resnet50'): <NEW_LINE> <INDENT> avai_models = list(__model_factory.keys()) <NEW_LINE> if name not in avai_models: <NEW_LINE> <INDENT> raise KeyError('Unknown model: {}. Must be one of {}'.format(name, avai_models)) <NEW_LINE> <DEDENT> return __model_factory[name]( num_classes=num_classes, loss=loss, pretrained=pretrained, use_gpu=use_gpu, backbone=backbone, )
A function wrapper for building a model. Args: name (str): model name. num_classes (int): number of training identities. loss (str, optional): loss function to optimize the model. Currently supports "softmax" and "triplet". Default is "softmax". pretrained (bool, optional): whether to load ImageNet-pretrained weights. Default is True. use_gpu (bool, optional): whether to use gpu. Default is True. Returns: nn.Module Examples:: >> from deepreid import models >> model = models.build_model('resnet50', 751, loss='softmax')
625941bafbf16365ca6f604b
def remove_root_node(graph): <NEW_LINE> <INDENT> if 'root' in graph: <NEW_LINE> <INDENT> graph.remove_node('root')
Remove a root node from a scaffoldgraph. Parameters ---------- graph : Scaffoldgraph Graph from which to remove root node.
625941ba9b70327d1c4e0c62
def getWebDriverInstance(self): <NEW_LINE> <INDENT> baseURL = "https://letskodeit.teachable.com/" <NEW_LINE> if self.browser == "iexplorer": <NEW_LINE> <INDENT> driver = webdriver.Ie() <NEW_LINE> <DEDENT> elif self.browser == "firefox": <NEW_LINE> <INDENT> driver = webdriver.Firefox(executable_path="D:/PythonSeleniumFW/configfiles/geckodriver.exe") <NEW_LINE> <DEDENT> elif self.browser == "chrome": <NEW_LINE> <INDENT> driver=webdriver.Chrome(executable_path="D:/PythonSeleniumFW/configfiles/chromedriver.exe") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> driver=webdriver.Chrome(executable_path="D:/PythonSeleniumFW/configfiles/chromedriver.exe") <NEW_LINE> <DEDENT> driver.implicitly_wait(3) <NEW_LINE> driver.maximize_window() <NEW_LINE> driver.get(baseURL) <NEW_LINE> return driver
Get WebDriver Instance based on the browser configuration Returns: 'WebDriver Instance'
625941ba63f4b57ef0000faf
def get_longest_state(data): <NEW_LINE> <INDENT> max_length = 0 <NEW_LINE> for item in data: <NEW_LINE> <INDENT> if len(item) >= max_length: <NEW_LINE> <INDENT> max_length = len(item) <NEW_LINE> <DEDENT> <DEDENT> for item in data: <NEW_LINE> <INDENT> if len(item) == max_length: <NEW_LINE> <INDENT> return item
Receives data, which can be the us_state_abbrev dict or the states list (see above). It returns the longest state measured by the length of the string
625941ba96565a6dacc8f563
@click.command('delete_group') <NEW_LINE> @click.argument("group", type=str) <NEW_LINE> @pass_context <NEW_LINE> @custom_exception <NEW_LINE> @dict_output <NEW_LINE> def cli(ctx, group): <NEW_LINE> <INDENT> return ctx.gi.groups.delete_group(group)
Delete a group Output: an empty dictionary
625941ba66656f66f7cbc038
def GetTthAzmG(x,y,data): <NEW_LINE> <INDENT> tilt = data['tilt'] <NEW_LINE> dist = data['distance']/npcosd(tilt) <NEW_LINE> x0 = data['distance']*nptand(tilt) <NEW_LINE> MN = -np.inner(makeMat(data['rotation'],2),makeMat(tilt,0)) <NEW_LINE> distsq = data['distance']**2 <NEW_LINE> dx = x-data['center'][0] <NEW_LINE> dy = y-data['center'][1] <NEW_LINE> G = ((dx-x0)**2+dy**2+distsq)/distsq <NEW_LINE> Z = np.dot(np.dstack([dx.T,dy.T,np.zeros_like(dx.T)]),MN).T[2] <NEW_LINE> xyZ = dx**2+dy**2-Z**2 <NEW_LINE> tth = npatand(np.sqrt(xyZ)/(dist-Z)) <NEW_LINE> dxy = peneCorr(tth,data['DetDepth'],dist,tilt,npatan2d(dy,dx)) <NEW_LINE> tth = npatan2d(np.sqrt(xyZ),dist-Z+dxy) <NEW_LINE> azm = (npatan2d(dy,dx)+data['azmthOff']+720.)%360. <NEW_LINE> return tth,azm,G
Give 2-theta, azimuth & geometric corr. values for detector x,y position; calibration info in data - only used in integration
625941bae64d504609d746ce
def calc_diff_S2_te_d2jw_dGjdS2(data, params, j, k): <NEW_LINE> <INDENT> return 0.4 * sum(data.ci * data.dti[k] * (data.fact_ti_djw_dti - data.fact_te_djw_dti), axis=2)
Spectral density Hessian. Calculate the spectral desity values for the Gj - S2 double partial derivative of the original model-free formula with the parameters {S2, te} together with diffusion tensor parameters. The model-free Hessian is:: _k_ d2J(w) 2 \ dti / 1 - (w.ti)^2 (te + ti)^2 - (w.te.ti)^2 \ ------- = - > ci . --- | ---------------- - te^2 ----------------------------- |. dGj.dS2 5 /__ dGj \ (1 + (w.ti)^2)^2 ((te + ti)^2 + (w.te.ti)^2)^2 / i=-k
625941bade87d2750b85fc1c
def __ne__(self, other): <NEW_LINE> <INDENT> return ( not isinstance(self, type(other)) or self._permutation != other._permutation or self._lengths != other._lengths)
Tests difference TESTS:: sage: t = iet.IntervalExchangeTransformation(('a b','b a'),[1,1]) sage: t != t False
625941ba4a966d76dd550e9a
def __init__(self, id, init_name, init_rotation=0, eng_state=0, max_limit=0, obj_type=False): <NEW_LINE> <INDENT> if not isinstance(id, (int, long)) or id == 0: <NEW_LINE> <INDENT> raise ValueError('ERROR:', 'id should be a number > 0') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._id = id <NEW_LINE> self.name = init_name <NEW_LINE> self.rotation_point = init_rotation <NEW_LINE> self.engine_state = eng_state <NEW_LINE> self.max = max_limit <NEW_LINE> self.e_type = obj_type
name: str rotation point: Integer from 0 to max -> allows to know in what position is engine rotation enggine_state: Integer value from -1 to 1 where -> -1: engine ons left rotation +-0: engine off +1: engine on right rotation max: Integer max value to avoid supra rotation and a future disaster type: str type of object
625941ba8e71fb1e9831d63b
def test_lowercase_filtering(): <NEW_LINE> <INDENT> val = SMSShell.filters.LowerCase(1) <NEW_LINE> assert val('Abcdef') == 'abcdef'
Simple test with configuration sample loading
625941bad8ef3951e32433cb
def _add_thermal_element_object(self, elem): <NEW_LINE> <INDENT> self._add_element_object(elem)
same as add_element at the moment...
625941ba0fa83653e4656e4b
def test_tf_column_filter(self): <NEW_LINE> <INDENT> trainDF = self.spark.createDataFrame(self.train_examples, ['col1', 'col2']) <NEW_LINE> df = trainDF.withColumn('extra1', trainDF.col1 ) <NEW_LINE> df = df.withColumn('extra2', trainDF.col2) <NEW_LINE> self.assertEquals(len(df.columns), 4) <NEW_LINE> args = {} <NEW_LINE> estimator = TFEstimator(self.get_function('tf/train'), args, export_fn=self.get_function('tf/export')) .setInputMapping( { 'col1': 'x', 'col2': 'y_' }) .setInputMode(TFCluster.InputMode.TENSORFLOW) .setModelDir(self.model_dir) .setExportDir(self.export_dir) .setTFRecordDir(self.tfrecord_dir) .setClusterSize(self.num_workers) .setNumPS(1) .setBatchSize(10) <NEW_LINE> model = estimator.fit(df) <NEW_LINE> self.assertTrue(os.path.isdir(self.model_dir)) <NEW_LINE> self.assertTrue(os.path.isdir(self.tfrecord_dir)) <NEW_LINE> df_tmp = dfutil.loadTFRecords(self.sc, self.tfrecord_dir) <NEW_LINE> self.assertEquals(df_tmp.columns, ['col1', 'col2'])
InputMode.TENSORFLOW TFEstimator saving temporary TFRecords, filtered by input_mapping columns
625941ba30dc7b76659017f8
def evalf(self, a, prec=None, **args): <NEW_LINE> <INDENT> if prec is None: <NEW_LINE> <INDENT> return self.to_sympy(a).evalf(**args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.to_sympy(a).evalf(prec, **args)
Returns numerical approximation of ``a``.
625941ba45492302aab5e14e
def read_csv_table(csvFile): <NEW_LINE> <INDENT> table = [] <NEW_LINE> with open(csvFile, 'rt', encoding='utf-8-sig') as file_in: <NEW_LINE> <INDENT> csv_reader = csv.reader(file_in, delimiter=',', quoting=csv.QUOTE_ALL) <NEW_LINE> for r, row in enumerate(csv_reader): <NEW_LINE> <INDENT> table.append(row) <NEW_LINE> <DEDENT> <DEDENT> return table
Read a CSV file, export as a table: table[row][column]
625941ba796e427e537b0451
def fetch(self, *, no_strip=False, numeric=False) -> Union[str, int]: <NEW_LINE> <INDENT> if os.path.exists(self.input_path): <NEW_LINE> <INDENT> with open(self.input_path) as f: <NEW_LINE> <INDENT> if no_strip: <NEW_LINE> <INDENT> return f.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f.read().strip() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> response = requests.get(self.base_url + '/input', cookies={'session': self.token}).text <NEW_LINE> if 'Puzzle inputs differ by user. Please log in to get your puzzle input.' in response: <NEW_LINE> <INDENT> raise ValueError('Token has expired. Please go to Applications -> Cookies and get the new token.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(self.input_path, 'w') as f: <NEW_LINE> <INDENT> f.write(response) <NEW_LINE> <DEDENT> <DEDENT> if numeric: <NEW_LINE> <INDENT> return int(response) <NEW_LINE> <DEDENT> if no_strip: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return response.strip()
Retrieves the puzzle input as a single string.
625941bad7e4931a7ee9ddaa
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'meiduo.configs.dev') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941ba004d5f362079a1c5
def draw_thetas(N): <NEW_LINE> <INDENT> cos_thetas = np.random.uniform(low=-1, high=1, size=N) <NEW_LINE> cos_incs = np.random.uniform(low=-1, high=1, size=N) <NEW_LINE> phis = np.random.uniform(low=0, high=2*np.pi, size=N) <NEW_LINE> zetas = np.random.uniform(low=0, high=2*np.pi, size=N) <NEW_LINE> Fps = 0.5*cos(2*zetas)*(1 + square(cos_thetas))*cos(2*phis) - sin(2*zetas)*cos_thetas*sin(2*phis) <NEW_LINE> Fxs = 0.5*sin(2*zetas)*(1 + square(cos_thetas))*cos(2*phis) + cos(2*zetas)*cos_thetas*sin(2*phis) <NEW_LINE> return np.sqrt(0.25*square(Fps)*square(1 + square(cos_incs)) + square(Fxs)*square(cos_incs))
Draw `N` random angular factors for the SNR. Theta is as defined in [Finn & Chernoff (1993)](https://ui.adsabs.harvard.edu/#abs/1993PhRvD..47.2198F/abstract).
625941ba4e696a04525c92db
def alert_accept(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ele=self.alert_is_pressent() <NEW_LINE> ele.accept() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> alert = Alert(self.driver) <NEW_LINE> alert.accept() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise("not find alert")
弹出框的确定
625941bad8ef3951e32433cc
def clean(text): <NEW_LINE> <INDENT> remove_punctuation = ''.join(i for i in text if not i in string.punctuation) <NEW_LINE> seperated_lines = remove_punctuation.splitlines() <NEW_LINE> for sentence in range(len(seperated_lines)): <NEW_LINE> <INDENT> seperated_lines[sentence] = seperated_lines[sentence].split() <NEW_LINE> <DEDENT> return seperated_lines
(str) -> 2d list Extra function that removes all of the punctuation in a given text. Precondition: A string is given. Post: A 2d list is returned.
625941ba7047854f462a129b
def get_variable_name(data_var): <NEW_LINE> <INDENT> name = '' <NEW_LINE> keys = locals().keys() <NEW_LINE> for key, val in locals().items(): <NEW_LINE> <INDENT> print(key, id(key), id(data_var), key is data_var) <NEW_LINE> if val == data_var: <NEW_LINE> <INDENT> name = key <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return name
get variable name as string :param data_var: :return:
625941ba6e29344779a624a4
def create_office(self, office_type, office_name): <NEW_LINE> <INDENT> new_office = { "office_id": len(self.all_offices)+1, "office_name": office_name, "office_type": office_type } <NEW_LINE> self.all_offices.append(new_office) <NEW_LINE> return self.all_offices
Args: name: New office name. type : New office type.
625941ba7c178a314d6ef2e8
def test_object_creation_table(self): <NEW_LINE> <INDENT> for i, vto in enumerate(self.validate_table_objects): <NEW_LINE> <INDENT> self.assertEqual(self.sbtabs[i].header_row, vto.sbtab.header_row) <NEW_LINE> self.assertEqual(self.sbtabs[i].columns, vto.sbtab.columns) <NEW_LINE> self.assertEqual(self.sbtabs[i].columns_dict, vto.sbtab.columns_dict) <NEW_LINE> self.assertEqual(self.sbtabs[i].value_rows, vto.sbtab.value_rows) <NEW_LINE> self.assertEqual(self.sbtabs[i].table_type, vto.sbtab.table_type) <NEW_LINE> self.assertNotEqual(len(vto.allowed_table_types), 0) <NEW_LINE> self.assertNotEqual(len(vto.allowed_columns), 0) <NEW_LINE> self.assertNotEqual(len(vto.column2format), 0) <NEW_LINE> self.assertEqual(sorted(vto.allowed_columns.keys()), sorted(vto.allowed_table_types)) <NEW_LINE> test_entries = [] <NEW_LINE> for entry in vto.allowed_columns[vto.sbtab.table_type]: <NEW_LINE> <INDENT> test_entries.append(entry) <NEW_LINE> <DEDENT> self.assertEqual(sorted(vto.column2format.keys()), sorted(test_entries)) <NEW_LINE> for column in vto.allowed_columns: <NEW_LINE> <INDENT> self.assertNotEqual(len(column), 0) <NEW_LINE> <DEDENT> self.assertNotIn('',vto.sbtab.columns)
test if the SBtabs can be used as input for the validatorSBtab
625941bafff4ab517eb2f2c8
def hook_record_stop(self, key): <NEW_LINE> <INDENT> if (key == getattr(Key, self.stop_record_key) and self.record_status == 'start') or key == 'force_stop': <NEW_LINE> <INDENT> self.record_status = 'stop' <NEW_LINE> if self.keyboard_listen_thread: self.keyboard_listen_thread.stop() <NEW_LINE> if self.mouse_listen_thread: self.mouse_listen_thread.stop() <NEW_LINE> if self.debug: print('{} record stop.'.format(key)) <NEW_LINE> return True
force_stop 是为了处理在未结束录制时就开始 repeat 时的问题。可以强制结束录制行为。
625941ba23849d37ff7b2f20
def OperatorControl(self): <NEW_LINE> <INDENT> dog = self.GetWatchdog() <NEW_LINE> dog.SetEnabled(True) <NEW_LINE> dog.SetExpiration(0.25) <NEW_LINE> while self.IsOperatorControl() and self.IsEnabled(): <NEW_LINE> <INDENT> dog.Feed() <NEW_LINE> self.robot_drive.ArcadeDrive(self.stick) <NEW_LINE> wpilib.Wait(0.04)
Called when operation control mode is enabled
625941ba66656f66f7cbc039
def _Response_Finite(self, vmin, Eee1, Eee2, mx, fp, fn, delta): <NEW_LINE> <INDENT> self.count_response_calls += 1 <NEW_LINE> if delta == 0: <NEW_LINE> <INDENT> branches = [1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> branches = [1, -1] <NEW_LINE> <DEDENT> result = 0 <NEW_LINE> for sign in branches: <NEW_LINE> <INDENT> (ER, qER, const_factor) = self.ConstFactor(vmin, mx, fp, fn, delta, sign) <NEW_LINE> result += integrate.quad(self.DifferentialResponse, Eee1, Eee2, args=(qER, const_factor), epsrel=PRECISSION, epsabs=0)[0] <NEW_LINE> <DEDENT> if result >= 0: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> return 0
Response function integral d**2 R / (d Eee d ER) between measured energies Eee1 and Eee2. NOT including eta0. For any finite resolution function (i.e. other than Dirac Delta).
625941ba8da39b475bd64e06
def trim(self, shape='None', value='None'): <NEW_LINE> <INDENT> pass
a.trim(shape) Performs in-place trimming of array a to given shape. An optional value argument can be passed and will be used to fill the newly created components if the resize results in a size increase. Note : a is modified in-place >>> A = Array(range(1, 10), shape=(3, 3)) >>> print A.formated() [[1, 2, 3], [4, 5, 6], [7, 8, 9]] >>> S = A[0] >>> A.trim(shape=(4, 3)) >>> print A.formated() [[1, 2, 3], [4, 5, 6], [7, 8, 9], [0, 0, 0]] >>> S == A[0] True >>> S is A[0] True >>> A.trim(shape=(4, 4)) >>> print A.formated() [[1, 2, 3, 0], [4, 5, 6, 0], [7, 8, 9, 0], [0, 0, 0, 0]] >>> A.trim(shape=(2, 2)) >>> print A.formated() [[1, 2], [4, 5]]
625941ba9c8ee82313fbb604
def normalize(eval_data, batch_size): <NEW_LINE> <INDENT> if not FLAGS.data_dir: <NEW_LINE> <INDENT> raise ValueError('Please supply a data_dir') <NEW_LINE> <DEDENT> data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin') <NEW_LINE> images, labels = cifar10_input.normalize_inputs(eval_data=eval_data, data_dir=data_dir, batch_size=batch_size) <NEW_LINE> if FLAGS.use_fp16: <NEW_LINE> <INDENT> images = tf.cast(images, tf.float16) <NEW_LINE> labels = tf.cast(labels, tf.float16) <NEW_LINE> <DEDENT> return images, labels
Construct input for CIFAR evaluation using the Reader ops. Args: eval_data: bool, indicating if one should use the train or eval data set. Returns: images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size. labels: Labels. 1D tensor of [batch_size] size. Raises: ValueError: If no data_dir
625941ba15fb5d323cde0999
def norm_spaces(s): <NEW_LINE> <INDENT> return ' '.join(s.split())
Normalize spaces, splits on whitespace ( \s) and rejoins (faster than a s/\s+// regexp)
625941ba851cf427c661a3a1
def add_args(self, *names): <NEW_LINE> <INDENT> for n in names: <NEW_LINE> <INDENT> self.kwds[n] = self.parser.get(n)
Add the values of the named options to the pending Bot constructor arguments. E.g., if add_args('foo') is called, the Bot constructor will receive the keyword argument foo= with the value of the option --foo (or the argument <foo>).
625941ba16aa5153ce362307
@register.filter("startswith") <NEW_LINE> def startswith(text: str, starts: str) -> bool: <NEW_LINE> <INDENT> if isinstance(text, str): <NEW_LINE> <INDENT> return text.startswith(starts) <NEW_LINE> <DEDENT> return False
Template implementation of `str.startswith()`.
625941ba24f1403a926009f8
def compute_update(self, printing=False): <NEW_LINE> <INDENT> err = self.net.error() <NEW_LINE> grad = self.net.calc_grad() <NEW_LINE> if printing: <NEW_LINE> <INDENT> print("initial err", err) <NEW_LINE> print("grad norm", np.linalg.norm(grad)) <NEW_LINE> <DEDENT> if self.init_delta is None: <NEW_LINE> <INDENT> self.init_delta = np.zeros_like(self.net.W) <NEW_LINE> <DEDENT> deltas = self.conjugate_gradient(self.init_delta * 0.95, grad, iters=self.CG_iter, printing=printing and self.net.debug) <NEW_LINE> if printing: <NEW_LINE> <INDENT> print("CG steps", deltas[-1][0]) <NEW_LINE> <DEDENT> self.init_delta = deltas[-1][1] <NEW_LINE> new_err = np.inf <NEW_LINE> for j in range(len(deltas) - 1, -1, -1): <NEW_LINE> <INDENT> prev_err = self.net.error(self.net.W + deltas[j][1]) <NEW_LINE> if prev_err > new_err: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> delta = deltas[j][1] <NEW_LINE> new_err = prev_err <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> j -= 1 <NEW_LINE> <DEDENT> if printing: <NEW_LINE> <INDENT> print("using iteration", deltas[j + 1][0]) <NEW_LINE> print("backtracked err", new_err) <NEW_LINE> <DEDENT> quad = (0.5 * np.dot(self.calc_G(delta, damping=self.damping), delta) + np.dot(grad, delta)) <NEW_LINE> improvement_ratio = ((new_err - err) / quad) if quad != 0 else 1 <NEW_LINE> if improvement_ratio < 0.25: <NEW_LINE> <INDENT> self.damping *= 1.5 <NEW_LINE> <DEDENT> elif improvement_ratio > 0.75: <NEW_LINE> <INDENT> self.damping *= 0.66 <NEW_LINE> <DEDENT> if printing: <NEW_LINE> <INDENT> print("improvement_ratio", improvement_ratio) <NEW_LINE> print("damping", self.damping) <NEW_LINE> <DEDENT> l_rate = 1.0 <NEW_LINE> min_improv = min(1e-2 * np.dot(grad, delta), 0) <NEW_LINE> for _ in range(60): <NEW_LINE> <INDENT> if new_err <= err + l_rate * min_improv: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> l_rate *= 0.8 <NEW_LINE> new_err = self.net.error(self.net.W + l_rate * delta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l_rate = 0.0 <NEW_LINE> new_err = err <NEW_LINE> <DEDENT> if printing: <NEW_LINE> <INDENT> print("min_improv", min_improv) <NEW_LINE> print("l_rate", l_rate) <NEW_LINE> print("l_rate err", new_err) <NEW_LINE> print("improvement", new_err - err) <NEW_LINE> <DEDENT> if self.plotting: <NEW_LINE> <INDENT> self.plots["training error (log)"] += [new_err] <NEW_LINE> self.plots["learning rate"] += [l_rate] <NEW_LINE> self.plots["damping (log)"] += [self.damping] <NEW_LINE> self.plots["CG iterations"] += [deltas[-1][0]] <NEW_LINE> self.plots["backtracked steps"] += [deltas[-1][0] - deltas[j + 1][0]] <NEW_LINE> <DEDENT> return l_rate * delta
Compute a weight update for the current batch. :param bool printing: if True, print out data about the optimization calc_G(.): Compute Gauss-Newton matrix-vector product, sec 4.2, defined at hessianfree/hessianfree/ffnet.py
625941ba925a0f43d2549d03
def check_int(ask): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> num = int(input(ask)) <NEW_LINE> return num <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print('input is not integer') <NEW_LINE> print()
check int input and ask again when input is not int but will return if it was integer
625941ba9f2886367277a720
def mobility_user_list(self): <NEW_LINE> <INDENT> return self.method('robots/mobility?user')
List user mobilities.
625941babe7bc26dc91cd494
def boletin_detalle(request,slug): <NEW_LINE> <INDENT> boletin = get_object_or_404(Boletin, slug=slug) <NEW_LINE> dicc = {'boletin': boletin, } <NEW_LINE> return direct_to_template(request, 'boletines/boletin_detalle.html',dicc)
Muestra el detalle de un boletín
625941ba0fa83653e4656e4c
def quat_to_rot(quat): <NEW_LINE> <INDENT> b, c, d = quat[..., 0], quat[..., 1], quat[..., 2] <NEW_LINE> bb, cc, dd = b * b, c * c, d * d <NEW_LINE> aa = np.maximum(1. - bb - cc - dd, 0.) <NEW_LINE> a = np.sqrt(aa) <NEW_LINE> ab_2 = 2 * a * b <NEW_LINE> ac_2 = 2 * a * c <NEW_LINE> ad_2 = 2 * a * d <NEW_LINE> bc_2 = 2 * b * c <NEW_LINE> bd_2 = 2 * b * d <NEW_LINE> cd_2 = 2 * c * d <NEW_LINE> rotation = np.array([(aa + bb - cc - dd, bc_2 - ad_2, bd_2 + ac_2), (bc_2 + ad_2, aa + cc - bb - dd, cd_2 - ab_2), (bd_2 - ac_2, cd_2 + ab_2, aa + dd - bb - cc), ]) <NEW_LINE> if quat.ndim > 1: <NEW_LINE> <INDENT> rotation = np.rollaxis(np.rollaxis(rotation, 1, quat.ndim + 1), 0, quat.ndim) <NEW_LINE> <DEDENT> return rotation
Convert a set of quaternions to rotations. Parameters ---------- quat : array, shape (..., 3) q1, q2, and q3 (x, y, z) parameters of a unit quaternion. Returns ------- rot : array, shape (..., 3, 3) The corresponding rotation matrices. See Also -------- rot_to_quat
625941bacb5e8a47e48b793e
def interpretSpecialEventSeriousness(value): <NEW_LINE> <INDENT> codes = { 0 : "Aucune sévérité", 1 : "Événement d’information", 2 : "Événement de mise en garde", 3 : "Évènement relatif à une faute" } <NEW_LINE> return matchWithCode(codes, value)
Champ : SpecialEventSeriousness. (p32)
625941ba71ff763f4b54951e
def get_colour_name(requested_colour): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> closest_name = webcolors.rgb_to_name(requested_colour) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> closest_name = closest_colour(requested_colour) <NEW_LINE> <DEDENT> return closest_name
Return the name of the color
625941ba0c0af96317bb8078
def get_product_summary(self): <NEW_LINE> <INDENT> products = defaultdict(lambda: defaultdict(lambda: Decimal(0))) <NEW_LINE> lines = ( self.lines.filter(type=OrderLineType.PRODUCT) .values_list("product_id", "quantity")) <NEW_LINE> for product_id, quantity in lines: <NEW_LINE> <INDENT> products[product_id]['ordered'] += quantity <NEW_LINE> products[product_id]['unshipped'] += quantity <NEW_LINE> <DEDENT> from .shipments import ShipmentProduct <NEW_LINE> shipment_prods = ( ShipmentProduct.objects .filter(shipment__order=self) .values_list("product_id", "quantity")) <NEW_LINE> for product_id, quantity in shipment_prods: <NEW_LINE> <INDENT> products[product_id]['shipped'] += quantity <NEW_LINE> products[product_id]['unshipped'] -= quantity <NEW_LINE> <DEDENT> return products
Return a dict of product IDs -> {ordered, unshipped, shipped}
625941ba167d2b6e31218a2c
def layout(self): <NEW_LINE> <INDENT> self.dispatcher._setSessionId() <NEW_LINE> return self.dispatcher._checkResult(Indigo._lib.indigoLayout(self.id))
Molecule or reaction method calculates layout for the structure Returns: int: 1 if there are no errors
625941ba26238365f5f0ecf9
def send_tab_key(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.web_element.send_keys(Keys.TAB) <NEW_LINE> Log.log_step("元素上按TAB键.(元素位置:%s)"%(self.locator)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> Log.log_error_info("fail to send tab key for %s\n" % (self.class_name))
在元素上,按Tab键
625941bad99f1b3c44c67425
def test_K_utf8(self): <NEW_LINE> <INDENT> with sftp.open(FOLDER + '/something', 'w') as f: <NEW_LINE> <INDENT> f.write('okay') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> sftp.rename(FOLDER + '/something', FOLDER + '/' + unicode_folder) <NEW_LINE> sftp.open(b(FOLDER) + utf8_folder, 'r') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.fail('exception ' + str(e)) <NEW_LINE> <DEDENT> sftp.unlink(b(FOLDER) + utf8_folder)
verify that unicode strings are encoded into utf8 correctly.
625941ba3d592f4c4ed1cf0e
def test_retrieve_recipes(self): <NEW_LINE> <INDENT> sample_recipe(user=self.user) <NEW_LINE> sample_recipe(user=self.user) <NEW_LINE> res = self.client.get(RECIPES_URL) <NEW_LINE> recipes = Recipe.objects.all().order_by('-id') <NEW_LINE> serializer = RecipeSerializer(recipes, many=True) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data)
Test retrieving recipe
625941bad18da76e23532362
def msg_contents(self, message, exclude=None, from_obj=None, **kwargs): <NEW_LINE> <INDENT> contents = self.contents <NEW_LINE> if exclude: <NEW_LINE> <INDENT> exclude = make_iter(exclude) <NEW_LINE> contents = [obj for obj in contents if obj not in exclude] <NEW_LINE> <DEDENT> for obj in contents: <NEW_LINE> <INDENT> obj.msg(message, from_obj=from_obj, **kwargs)
Emits a message to all objects inside this object. Args: message (str): Message to send. exclude (list, optional): A list of objects not to send to. from_obj (Object, optional): An object designated as the "sender" of the message. See `DefaultObject.msg()` for more info. Kwargs: Keyword arguments will be passed on to `obj.msg()` for all messaged objects.
625941ba5510c4643540f286
def get_instruments_on_search(self, exchange, key_word): <NEW_LINE> <INDENT> if self.is_authenticated: <NEW_LINE> <INDENT> str_message = {} <NEW_LINE> str_message["MessageType"] = Constants.GET_INSTRUMENTS_ON_SEARCH <NEW_LINE> str_message["Exchange"] = exchange <NEW_LINE> str_message["Search"] = key_word <NEW_LINE> payload = (json.dumps(str_message)).encode('utf8') <NEW_LINE> self.base_client.sendMessage(payload, isBinary=False)
Call this method to get the list of instruments using search key word. On successful execution **on_message_instruments_on_search(list_instruments)** callback is fired. **list_instruments** : The list of instruments matching the search word. :param exchange: The exchange(required) :param key_word: The search word(required).
625941ba090684286d50eb70
@register.filter(name='documents') <NEW_LINE> def getDocuments(matricula): <NEW_LINE> <INDENT> return Document.objects.filter(matricula=matricula).order_by('pk')
Documents de la matrícula
625941ba0c0af96317bb8079
def test_srid(self): <NEW_LINE> <INDENT> pnt = Point(5, 23, srid=4326) <NEW_LINE> self.assertEqual(4326, pnt.srid) <NEW_LINE> pnt.srid = 3084 <NEW_LINE> self.assertEqual(3084, pnt.srid) <NEW_LINE> self.assertRaises(ctypes.ArgumentError, pnt.set_srid, '4326') <NEW_LINE> poly = fromstr(self.geometries.polygons[1].wkt, srid=4269) <NEW_LINE> self.assertEqual(4269, poly.srid) <NEW_LINE> for ring in poly: <NEW_LINE> <INDENT> self.assertEqual(4269, ring.srid) <NEW_LINE> <DEDENT> poly.srid = 4326 <NEW_LINE> self.assertEqual(4326, poly.shell.srid) <NEW_LINE> gc = GeometryCollection(Point(5, 23), LineString((0, 0), (1.5, 1.5), (3, 3)), srid=32021) <NEW_LINE> self.assertEqual(32021, gc.srid) <NEW_LINE> for i in range(len(gc)): <NEW_LINE> <INDENT> self.assertEqual(32021, gc[i].srid) <NEW_LINE> <DEDENT> hex = '0101000020E610000000000000000014400000000000003740' <NEW_LINE> p1 = fromstr(hex) <NEW_LINE> self.assertEqual(4326, p1.srid) <NEW_LINE> p2 = fromstr(p1.hex) <NEW_LINE> self.assertIsNone(p2.srid) <NEW_LINE> p3 = fromstr(p1.hex, srid=-1) <NEW_LINE> self.assertEqual(-1, p3.srid) <NEW_LINE> pnt_wo_srid = Point(1, 1) <NEW_LINE> pnt_wo_srid.srid = pnt_wo_srid.srid
Testing the SRID property and keyword.
625941bad99f1b3c44c67426
def make_number(): <NEW_LINE> <INDENT> import random <NEW_LINE> number = random.randrange(0,101) <NEW_LINE> return number
Загадывает число
625941ba956e5f7376d70d08
def encode(self, vecs): <NEW_LINE> <INDENT> assert vecs.dtype == np.float32 <NEW_LINE> assert vecs.ndim == 2 <NEW_LINE> N, D = vecs.shape <NEW_LINE> assert D == self.Ds * self.M, "input dimension must be Ds * M" <NEW_LINE> codes = np.empty((N, self.M), dtype=self.code_dtype) <NEW_LINE> for m in range(self.M): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print("Encoding the subspace: {} / {}".format(m, self.M)) <NEW_LINE> <DEDENT> vecs_sub = vecs[:, m * self.Ds : (m+1) * self.Ds] <NEW_LINE> codes[:, m], _ = vq(vecs_sub, self.codewords[m]) <NEW_LINE> <DEDENT> return codes
Encode input vectors into PQ-codes. Args: vecs (np.ndarray): Input vectors with shape=(N, D) and dtype=np.float32. Returns: np.ndarray: PQ codes with shape=(N, M) and dtype=self.code_dtype
625941ba099cdd3c635f0aec
def resize_activations_avg(v, so): <NEW_LINE> <INDENT> si = list(v.size()) <NEW_LINE> so = list(so) <NEW_LINE> assert len(si) == len(so) <NEW_LINE> if si[1] > so[1]: <NEW_LINE> <INDENT> v = v[:, :so[1]] <NEW_LINE> <DEDENT> if si[0] > so[0]: <NEW_LINE> <INDENT> v = v[:so[0], :] <NEW_LINE> <DEDENT> if len(si) == 4 and (si[2] > so[2] or si[3] > so[3]): <NEW_LINE> <INDENT> assert si[2] % so[2] == 0 and si[3] % so[3] == 0 <NEW_LINE> ks = (si[2] // so[2], si[3] // so[3]) <NEW_LINE> v = F.avg_pool2d(v, kernel_size=ks, stride=ks, ceil_mode=False, padding=0, count_include_pad=False) <NEW_LINE> <DEDENT> if si[2] != so[2]: <NEW_LINE> <INDENT> assert so[2] / si[2] == so[3] / si[3] <NEW_LINE> v = F.interpolate(v, size=so[2], mode='nearest') <NEW_LINE> <DEDENT> if si[1] < so[1]: <NEW_LINE> <INDENT> z = torch.zeros([v.shape[0], so[1] - si[1]] + so[2:]) <NEW_LINE> v = torch.cat([v, z], 1) <NEW_LINE> <DEDENT> if si[0] < so[0]: <NEW_LINE> <INDENT> z = torch.zeros([so[0] - si[0], v.shape[1]] + so[2:]) <NEW_LINE> v = torch.cat([v, z], 0) <NEW_LINE> <DEDENT> return v
Resize activation tensor 'v' of shape 'si' to match shape 'so'. :param v: :param so: :return:
625941bad268445f265b4cfe
def argmax(input, dim=None, keepdim=False): <NEW_LINE> <INDENT> if dim is None: <NEW_LINE> <INDENT> return torch._argmax(input.contiguous().view(-1), dim=0, keepdim=False) <NEW_LINE> <DEDENT> return torch._argmax(input, dim, keepdim)
Returns the indices of the maximum values of a tensor across a dimension. This is the second value returned by :meth:`torch.max`. See its documentation for the exact semantics of this method. Args: input (Tensor): the input tensor dim (int): the dimension to reduce. If ``None``, the argmax of the flattened input is returned. keepdim (bool): whether the output tensors have :attr:`dim` retained or not. Ignored if ``dim=None``. Example:: >>> a = torch.randn(4, 4) >>> a tensor([[ 1.3398, 0.2663, -0.2686, 0.2450], [-0.7401, -0.8805, -0.3402, -1.1936], [ 0.4907, -1.3948, -1.0691, -0.3132], [-1.6092, 0.5419, -0.2993, 0.3195]]) >>> torch.argmax(a, dim=1) tensor([ 0, 2, 0, 1])
625941ba4e696a04525c92dc