code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def __init__(self, width, val, *, trunc=False): <NEW_LINE> <INDENT> width = operator.index(width) <NEW_LINE> if width < 0: <NEW_LINE> <INDENT> raise ValueError('width must not be negative') <NEW_LINE> <DEDENT> self._width = width <NEW_LINE> val = BinInt(operator.index(val)) <NEW_LINE> if trunc: <NEW_LINE> <INDENT> val &= self.mask <NEW_LINE> <DEDENT> elif val & ~self.mask: <NEW_LINE> <INDENT> raise ValueError('value does not fit in the given bit width') <NEW_LINE> <DEDENT> assert isinstance(val, BinInt) <NEW_LINE> self._val = val
Creates a word with a given width corresponding to a given unsigned integer value. If ``trunc`` is True, values out of range are masked to fit. Otherwise, it is an error to pass a value that doesn't fit in the given width.
625941b591f36d47f21ac2dd
def send_location(self, chat_id: int or str, latitude: float, longitude: float, disable_notification: bool = None, reply_to_message_id: int = None, reply_markup=None): <NEW_LINE> <INDENT> r = self.send( functions.messages.SendMedia( peer=self.resolve_peer(chat_id), media=types.InputMediaGeoPoint( types.InputGeoPoint( latitude, longitude ) ), message="", silent=disable_notification or None, reply_to_msg_id=reply_to_message_id, random_id=self.rnd_id(), reply_markup=reply_markup.write() if reply_markup else None ) ) <NEW_LINE> for i in r.updates: <NEW_LINE> <INDENT> if isinstance(i, (types.UpdateNewMessage, types.UpdateNewChannelMessage)): <NEW_LINE> <INDENT> return utils.parse_messages( self, i.message, {i.id: i for i in r.users}, {i.id: i for i in r.chats} )
Use this method to send points on the map. Args: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. For your personal cloud (Saved Messages) you can simply use "me" or "self". For a contact that exists in your Telegram address book you can use his phone number (str). For a private channel/supergroup you can use its *t.me/joinchat/* link. latitude (``float``): Latitude of the location. longitude (``float``): Longitude of the location. disable_notification (``bool``, *optional*): Sends the message silently. Users will receive a notification with no sound. reply_to_message_id (``int``, *optional*): If the message is a reply, ID of the original message reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*): Additional interface options. An object for an inline keyboard, custom reply keyboard, instructions to remove reply keyboard or to force a reply from the user. Returns: On success, the sent :obj:`Message <pyrogram.Message>` is returned. Raises: :class:`Error <pyrogram.Error>`
625941b5be7bc26dc91cd3ee
def GetParametersForImport(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = request._serialize() <NEW_LINE> body = self.call("GetParametersForImport", params) <NEW_LINE> response = json.loads(body) <NEW_LINE> if "Error" not in response["Response"]: <NEW_LINE> <INDENT> model = models.GetParametersForImportResponse() <NEW_LINE> model._deserialize(response["Response"]) <NEW_LINE> return model <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code = response["Response"]["Error"]["Code"] <NEW_LINE> message = response["Response"]["Error"]["Message"] <NEW_LINE> reqid = response["Response"]["RequestId"] <NEW_LINE> raise TencentCloudSDKException(code, message, reqid) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, TencentCloudSDKException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TencentCloudSDKException(e.message, e.message)
This API is used to obtain the parameters of the material to be imported into a CMK. The returned `Token` is used as one of the parameters to execute `ImportKeyMaterial`, and the returned `PublicKey` is used to encrypt the key material. The `Token` and `PublicKey` are valid for 24 hours. If they are expired, you will need to call the API again to get a new `Token` and `PublicKey`. :param request: Request instance for GetParametersForImport. :type request: :class:`tencentcloud.kms.v20190118.models.GetParametersForImportRequest` :rtype: :class:`tencentcloud.kms.v20190118.models.GetParametersForImportResponse`
625941b54e696a04525c923c
def resnet152_cub(classes=200, **kwargs): <NEW_LINE> <INDENT> return get_resnet(classes=classes, blocks=152, model_name="resnet152_cub", **kwargs)
ResNet-152 model for CUB-200-2011 from 'Deep Residual Learning for Image Recognition,' https://arxiv.org/abs/1512.03385. Parameters: ---------- classes : int, default 200 Number of classification classes. pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.chainer/models' Location for keeping the model parameters.
625941b5d99f1b3c44c67387
def at_file(infile, outfile): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> with open (infile) as fastq_file: <NEW_LINE> <INDENT> outhandle = open(outfile, "w") <NEW_LINE> for i,line in enumerate(fastq_file): <NEW_LINE> <INDENT> if i % 4 == 1: <NEW_LINE> <INDENT> at_pct = (at(line)/len(line)) <NEW_LINE> outhandle.write("{}\n".format(at_pct))
count a and t in dna sequence in any fastq file infile is a .fastq file, outfile is a .txt file outfile will contain the AT percentage of each DNA sequence in the fastq file
625941b5f9cc0f698b1403ee
def __getattr__(self, name): <NEW_LINE> <INDENT> feature_list = self._class_definition()['features'] <NEW_LINE> if name not in feature_list: <NEW_LINE> <INDENT> raise AttributeError("'Profile' object has no attribute '{0}'".format(name)) <NEW_LINE> <DEDENT> feature_string = self.get_mythtv_feature(name) <NEW_LINE> feature_string = re.sub(r'([0-9]{6,})L', r'\1', feature_string) <NEW_LINE> feature_type = feature_list[name] <NEW_LINE> feature = eval('{0}("{1}")'.format(feature_type, feature_string)) <NEW_LINE> return feature
Answer the requested feature
625941b57c178a314d6ef23f
def fractional_knapsack(capacity, weights, values): <NEW_LINE> <INDENT> value = 0. <NEW_LINE> fractions = {} <NEW_LINE> for i, weight in enumerate(weights): <NEW_LINE> <INDENT> fractions[float(values[i])/float(weight)] = [float(weight), float(values[i])] <NEW_LINE> <DEDENT> sorted_fractions = sorted(fractions.keys(), reverse=True) <NEW_LINE> for fraction in sorted_fractions: <NEW_LINE> <INDENT> if capacity == 0: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> weight = fractions[fraction][0] <NEW_LINE> if capacity > weight: <NEW_LINE> <INDENT> multiplier = weight <NEW_LINE> capacity -= weight <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> multiplier = capacity <NEW_LINE> weight -= multiplier <NEW_LINE> capacity = 0 <NEW_LINE> <DEDENT> value = value + (multiplier * fraction) <NEW_LINE> <DEDENT> return value
@type capacity: int @param capacity: The capacity of the knapsack @type weights: list[int] @param weights: Weight of an item - this is what fills the capacity of the knapsack @type values: list[int] @param values: The value of each item - corresponds to weight𝑖 @rtype float @return Maximum value of items that can fit into the knapsack. The difference between the value and the answer must be at most 10^−3
625941b5cdde0d52a9e52e16
def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir, native_sysroot, pseudo): <NEW_LINE> <INDENT> du_cmd = "du -ks %s" % rootfs_dir <NEW_LINE> out = exec_cmd(du_cmd) <NEW_LINE> actual_rootfs_size = int(out.split()[0]) <NEW_LINE> rootfs_size = self.get_rootfs_size(actual_rootfs_size) <NEW_LINE> with open(rootfs, 'w') as sparse: <NEW_LINE> <INDENT> os.ftruncate(sparse.fileno(), rootfs_size * 1024) <NEW_LINE> <DEDENT> label_str = "" <NEW_LINE> if self.label: <NEW_LINE> <INDENT> label_str = "-L %s" % self.label <NEW_LINE> <DEDENT> mkfs_cmd = "mkfs.%s -b %d -r %s %s %s -U %s %s" % (self.fstype, rootfs_size * 1024, rootfs_dir, label_str, self.mkfs_extraopts, self.fsuuid, rootfs) <NEW_LINE> exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
Prepare content for a btrfs rootfs partition.
625941b55fdd1c0f98dc0019
def SetModule3DModel(self): <NEW_LINE> <INDENT> pass
Set a 3D model for the module Default is to do nothing, you need to implement this if you have a model to set FIXME: This doesn't seem to be enabled yet?
625941b523e79379d52ee350
def __init__(self, f, θs, α=0.001, β1=0.9, β2=0.999, ε=1e-8, dec=0.): <NEW_LINE> <INDENT> α, β1, β2, ε, dec = [np.cast[floatX](h) for h in [α, β1, β2, ε, dec]] <NEW_LINE> t = theano.shared(0, name="t") <NEW_LINE> self.ms = [theano.shared(np.zeros(θ.shape.eval(), dtype=floatX), borrow=True, name="m") for θ in θs] <NEW_LINE> self.vs = [theano.shared(np.zeros(θ.shape.eval(), dtype=floatX), borrow=True, name="v") for θ in θs] <NEW_LINE> gs = T.grad(f, θs) <NEW_LINE> t_u = (t, t + 1) <NEW_LINE> m_us = [(m, β1 * m + (one - β1) * g) for m, g in zip(self.ms, gs)] <NEW_LINE> v_us = [(v, β2 * v + (one - β2) * T.sqr(g)) for v, g in zip(self.vs, gs)] <NEW_LINE> α_hat = α * T.sqrt(one - T.cast(T.pow(β2, t_u[1]), floatX)) / (one - T.cast(T.pow(β1, t_u[1]), floatX)) <NEW_LINE> α_hat = α_hat / (one + (t_u[1] * dec)) <NEW_LINE> θ_us = [(θ, θ - α_hat * m_u[1] / (T.sqrt(v_u[1]) + ε)) for θ, m_u, v_u in zip(θs, m_us, v_us)] <NEW_LINE> self.updates = m_us + v_us + [t_u] + θ_us
f: tensor representing the loss function. θs: list of shared variables representing the parameters. The remaining parameters are the same as in the paper.
625941b5adb09d7d5db6c57b
def SetLJTickVoltage(self, name, voltage): <NEW_LINE> <INDENT> if (voltage < 0) or (voltage > 10): <NEW_LINE> <INDENT> logging.warning("labjackT7 error: Set voltage LJTick out of range 0-10V") <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.debug("Setting LJTICK to {0:.3f}V".format(voltage)) <NEW_LINE> eWriteName(self.handle, name, voltage)
Set the voltage output of the LTTick DAC
625941b53eb6a72ae02ec2c2
def update(self, S21): <NEW_LINE> <INDENT> self.line.set_xdata(np.real(S21)) <NEW_LINE> self.line.set_ydata(np.imag(S21)) <NEW_LINE> self.ax.axis('equal') <NEW_LINE> self.ax.relim() <NEW_LINE> self.ax.autoscale() <NEW_LINE> self.fig.canvas.draw() <NEW_LINE> self.fig.canvas.flush_events()
Args: S21: numpy array of S21
625941b5d4950a0f3b08c142
def run_rpc(self, tree, devices): <NEW_LINE> <INDENT> if not etree.iselement(tree): <NEW_LINE> <INDENT> tree = etree.fromstring(tree) <NEW_LINE> <DEDENT> rpc = etree.Element('rpc') <NEW_LINE> rpc.append(tree) <NEW_LINE> for data in self.run(rpc, devices): <NEW_LINE> <INDENT> data['Output'] = data['Output'][0] <NEW_LINE> yield data
This simply appends the rpc tag to the supplied tree, and removes the rpc-reply tag from the response.
625941b530bbd722463cbbaa
def __init__(self, **kwargs): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self._stop_thread = threading.Event() <NEW_LINE> self._lock_thread = threading.RLock() <NEW_LINE> self.initialize(**kwargs)
initializer #1
625941b5d6c5a10208143e2f
@app.route('/login') <NEW_LINE> def showLogin(): <NEW_LINE> <INDENT> state = ''.join(random.choice( string.ascii_uppercase + string.digits ) for x in range(32)) <NEW_LINE> login_session['state'] = state <NEW_LINE> return render_template('login.html', STATE=state)
Login Page
625941b52c8b7c6e89b355ac
def receive(self): <NEW_LINE> <INDENT> packetsize = False <NEW_LINE> requestid = False <NEW_LINE> response = False <NEW_LINE> message = '' <NEW_LINE> message2 = '' <NEW_LINE> while 1: <NEW_LINE> <INDENT> buf = b'' <NEW_LINE> while len(buf) < 4: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> recv = self.tcp.recv(4 - len(buf)) <NEW_LINE> if not len(recv): <NEW_LINE> <INDENT> raise SourceRconError('RCON connection unexpectedly closed by remote host') <NEW_LINE> <DEDENT> buf += recv <NEW_LINE> <DEDENT> except SourceRconError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if len(buf) != 4: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> packetsize = struct.unpack('<l', buf)[0] <NEW_LINE> if packetsize < MIN_MESSAGE_LENGTH or packetsize > MAX_MESSAGE_LENGTH: <NEW_LINE> <INDENT> raise SourceRconError('RCON packet claims to have illegal size: %d bytes' % (packetsize,)) <NEW_LINE> <DEDENT> buf = b'' <NEW_LINE> while len(buf) < packetsize: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> recv = self.tcp.recv(packetsize - len(buf)) <NEW_LINE> if not len(recv): <NEW_LINE> <INDENT> raise SourceRconError('RCON connection unexpectedly closed by remote host') <NEW_LINE> <DEDENT> buf += recv <NEW_LINE> <DEDENT> except SourceRconError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if len(buf) != packetsize: <NEW_LINE> <INDENT> raise SourceRconError('Received RCON packet with bad length (%d of %d bytes)' % (len(buf),packetsize,)) <NEW_LINE> <DEDENT> requestid = struct.unpack('<l', buf[:4])[0] <NEW_LINE> if requestid == -1: <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> raise SourceRconError('Bad RCON password') <NEW_LINE> <DEDENT> elif requestid != self.reqid: <NEW_LINE> <INDENT> raise SourceRconError('RCON request id error: %d, expected %d' % (requestid,self.reqid,)) <NEW_LINE> <DEDENT> response = struct.unpack('<l', buf[4:8])[0] <NEW_LINE> if response == SERVERDATA_AUTH_RESPONSE: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif response != SERVERDATA_RESPONSE_VALUE: <NEW_LINE> <INDENT> raise SourceRconError('Invalid RCON command response: %d' % (response,)) <NEW_LINE> <DEDENT> buf = buf.decode("utf-8", errors="replace") <NEW_LINE> str1 = buf[8:] <NEW_LINE> pos1 = str1.index('\x00') <NEW_LINE> str2 = str1[pos1+1:] <NEW_LINE> pos2 = str2.index('\x00') <NEW_LINE> crap = str2[pos2+1:] <NEW_LINE> if crap: <NEW_LINE> <INDENT> raise SourceRconError('RCON response contains %d superfluous bytes' % (len(crap),)) <NEW_LINE> <DEDENT> message += str1[:pos1] <NEW_LINE> message2 += str2[:pos2] <NEW_LINE> poll = select.select([self.tcp], [], [], 0) <NEW_LINE> if not len(poll[0]) and packetsize < PROBABLY_SPLIT_IF_LARGER_THAN: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if response is False: <NEW_LINE> <INDENT> raise SourceRconError('Timed out while waiting for reply') <NEW_LINE> <DEDENT> elif message2: <NEW_LINE> <INDENT> raise SourceRconError('Invalid response message: %s' % (repr(message2),)) <NEW_LINE> <DEDENT> return message
Receive a reply from the server. Should only be used internally.
625941b521a7993f00bc7ad0
def __init__(self, chemin: str, nom: Optional[str] = None) -> None: <NEW_LINE> <INDENT> self.contenu = str() <NEW_LINE> self.nom = nom <NEW_LINE> self.est_valide = False <NEW_LINE> self.extraire(chemin) <NEW_LINE> self.verifier_contenu()
Instancie une carte. ``contenu`` est la chaîne de caractère extraite du fichier. ``est_valide`` est à True si le fichier est valide, False sinon. :param chemin: chemin d'accès au fichier contenant la carte. :param nom: identifie la carte.
625941b5091ae35668666d4e
def __init__(self, app, local_conf): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.limits = [] <NEW_LINE> self.limit_sum = None <NEW_LINE> self.mapper = None <NEW_LINE> self.mapper_lock = eventlet.semaphore.Semaphore() <NEW_LINE> self.conf = config.Config(conf_dict=local_conf) <NEW_LINE> self._db = None <NEW_LINE> self.preprocessors = [] <NEW_LINE> self.postprocessors = [] <NEW_LINE> enable = self.conf.get('enable') <NEW_LINE> if enable is not None: <NEW_LINE> <INDENT> for proc in enable.split(): <NEW_LINE> <INDENT> preproc = utils.find_entrypoint('turnstile.preprocessor', proc, compat=False) <NEW_LINE> if preproc: <NEW_LINE> <INDENT> self.preprocessors.append(preproc) <NEW_LINE> <DEDENT> postproc = utils.find_entrypoint('turnstile.postprocessor', proc, compat=False) <NEW_LINE> if postproc: <NEW_LINE> <INDENT> self.postprocessors.insert(0, postproc) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for preproc in self.conf.get('preprocess', '').split(): <NEW_LINE> <INDENT> klass = utils.find_entrypoint('turnstile.preprocessor', preproc, required=True) <NEW_LINE> self.preprocessors.append(klass) <NEW_LINE> <DEDENT> for postproc in self.conf.get('postprocess', '').split(): <NEW_LINE> <INDENT> klass = utils.find_entrypoint('turnstile.postprocessor', postproc, required=True) <NEW_LINE> self.postprocessors.append(klass) <NEW_LINE> <DEDENT> <DEDENT> formatter = self.conf.get('formatter') <NEW_LINE> if formatter: <NEW_LINE> <INDENT> formatter = utils.find_entrypoint('turnstile.formatter', formatter, required=True) <NEW_LINE> self.formatter = lambda a, b, c, d, e: formatter( self.conf.status, a, b, c, d, e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.formatter = self.format_delay <NEW_LINE> <DEDENT> if self.conf.to_bool(self.conf['control'].get('remote', 'no'), False): <NEW_LINE> <INDENT> self.control_daemon = remote.RemoteControlDaemon(self, self.conf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.control_daemon = control.ControlDaemon(self, self.conf) <NEW_LINE> <DEDENT> self.control_daemon.start() <NEW_LINE> LOG.info("Turnstile middleware initialized")
Initialize the turnstile middleware. Saves the configuration and sets up the list of preprocessors, connects to the database, and initiates the control daemon thread.
625941b5d10714528d5ffac7
def dedupe_images(image_dir:Path)->int: <NEW_LINE> <INDENT> images = {}; dups = [] <NEW_LINE> path = Path(image_dir) <NEW_LINE> for f in path.iterdir(): <NEW_LINE> <INDENT> h = hashfile(f) <NEW_LINE> if h in images: <NEW_LINE> <INDENT> images[h] = images[h] + 1 <NEW_LINE> dups.append(f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> images[h] = 1 <NEW_LINE> <DEDENT> <DEDENT> n = len(dups) <NEW_LINE> for f in dups: <NEW_LINE> <INDENT> f.unlink() <NEW_LINE> <DEDENT> return n
Delete duplicate images from image_dir
625941b556b00c62f0f14444
def is_valid_shlvl(x): <NEW_LINE> <INDENT> return isinstance(x, int) and to_shlvl(x) == x
Checks whether a variable is a proper $SHLVL integer.
625941b5a4f1c619b28afe2b
@alias(func_alias="i", _type="COMMON") <NEW_LINE> def run(): <NEW_LINE> <INDENT> print_webshell_info()
info Show website information.
625941b51b99ca400220a898
def clicked(self, p): <NEW_LINE> <INDENT> return (self.active and (self.xmin <= p.getX() <= self.xmax) and (self.ymin <= p.getY() <= self.ymax))
Returns true if button is active and p is inside.
625941b523849d37ff7b2e7a
def _get_new_seqnr(self): <NEW_LINE> <INDENT> self.last_seqnr = self.last_seqnr + 1 <NEW_LINE> return str(self.last_seqnr)
Returns a unique sequence number (unique across threads)
625941b5925a0f43d2549c5b
def searchPossibleEntries(self, return_fields=None, **kw): <NEW_LINE> <INDENT> ob = self._getContent() <NEW_LINE> try: <NEW_LINE> <INDENT> return ob.searchPossibleEntries(return_fields, **kw) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise NotImplementedError
List all the possible entry ids. Implemented in Indirect Directory only.
625941b56fece00bbac2d523
def idx4(i, j, k, l): <NEW_LINE> <INDENT> return idx2(i, j), idx2(k, l)
idx4(i,j,k,l) returns 2-tuple corresponding to (ij|kl) in square ERI array (size n*(n-1)/2 square) (4-fold symmetry?)
625941b555399d3f0558849c
def has_permission(self, method, endpoint, user=None): <NEW_LINE> <INDENT> app = self.get_app() <NEW_LINE> _user = user or self._user_loader() <NEW_LINE> if not hasattr(_user, 'get_roles'): <NEW_LINE> <INDENT> roles = [anonymous] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> roles = _user.get_roles() <NEW_LINE> <DEDENT> return self._check_permission(roles, method, endpoint)
Return does the current user can access the resource. Example:: @app.route('/some_url', methods=['GET', 'POST']) @rbac.allow(['anonymous'], ['GET']) def a_view_func(): return Response('Blah Blah...') If you are not logged. `rbac.has_permission('GET', 'a_view_func')` return True. `rbac.has_permission('POST', 'a_view_func')` return False. :param method: The method wait to check. :param endpoint: The application endpoint. :param user: user who you need to check. Current user by default.
625941b5be8e80087fb20a37
def urlwrite(site_url, url=''): <NEW_LINE> <INDENT> if site_url.endswith('/'): <NEW_LINE> <INDENT> s_url = site_url.rstrip('/') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s_url = site_url <NEW_LINE> <DEDENT> if url and not url.startswith('/'): <NEW_LINE> <INDENT> url = '/' + url <NEW_LINE> <DEDENT> url = s_url + url <NEW_LINE> return url
url write
625941b5bde94217f3682be5
def get_related_admin(self, cls): <NEW_LINE> <INDENT> return self.get_entity_admin( cls )
Get the default :class:`camelot.admin.object_admin.ObjectAdmin` class for a specific class, return None, if not known. The ObjectAdmin should either be registered through the :meth:`register` method or be defined as an inner class with name :keyword:`Admin` of the entity. :param entity: a :class:`class`
625941b53617ad0b5ed67ce7
def upgradeUsr(anaconda): <NEW_LINE> <INDENT> dirs = ["/bin", "/sbin", "/lib", "/lib64"] <NEW_LINE> dirs = [ROOT_PATH+d for d in dirs] <NEW_LINE> if all(map(os.path.islink, dirs)): <NEW_LINE> <INDENT> log.info("upgradeusr dirs are already symlinks") <NEW_LINE> return <NEW_LINE> <DEDENT> if anaconda.intf is not None: <NEW_LINE> <INDENT> w = anaconda.intf.waitWindow(_("Upgrade /usr symlinks"), _("Running /usr merge script")) <NEW_LINE> <DEDENT> if iutil.execWithRedirect("/usr/lib/dracut/modules.d/30convertfs/convertfs.sh", [ROOT_PATH], stdout="/dev/tty5", stderr="/dev/tty5"): <NEW_LINE> <INDENT> log.error("convertfs failed") <NEW_LINE> if anaconda.intf is not None: <NEW_LINE> <INDENT> w.pop() <NEW_LINE> rc = anaconda.intf.messageWindow(_("/usr merge failed"), _("The /usr merge script failed. This is required" " for Fedora 17 to work. The upgrade cannot continue." "\n\n")) <NEW_LINE> <DEDENT> sys.exit(0) <NEW_LINE> <DEDENT> log.info("convertfs was successful") <NEW_LINE> if anaconda.intf is not None: <NEW_LINE> <INDENT> w.pop()
Handle the upgrade of /bin, /sbin, /lib, /lib64 to symlinks into /usr/ This uses dracut's convertfs module
625941b5d486a94d0b98df37
def quick_sort(elements): <NEW_LINE> <INDENT> if elements: <NEW_LINE> <INDENT> pi = partition(elements, len(elements) - 1) <NEW_LINE> for i in range(pi): <NEW_LINE> <INDENT> partition(elements, pi - i)
Quick sort with lomuto partition.
625941b5046cf37aa974cb33
@application.route('/') <NEW_LINE> def elixir_callback(): <NEW_LINE> <INDENT> error = request.args.get('error', '') <NEW_LINE> if error: <NEW_LINE> <INDENT> return "Error: " + error <NEW_LINE> <DEDENT> state = request.args.get('state', '') <NEW_LINE> if not is_valid_state(state): <NEW_LINE> <INDENT> abort(403) <NEW_LINE> <DEDENT> code = request.args.get('code') <NEW_LINE> access_token = get_token(code) <NEW_LINE> try: <NEW_LINE> <INDENT> response = application.make_response(redirect(os.environ.get('REDIRECT_URL', None))) <NEW_LINE> response.set_cookie('access_token', access_token, max_age=int(os.environ.get('COOKIE_AGE', 3600)), secure=os.environ.get('COOKIE_SECURE', True), domain=os.environ.get('COOKIE_DOMAIN', None)) <NEW_LINE> if get_bona_fide_status(access_token): <NEW_LINE> <INDENT> response.set_cookie('bona_fide_status', BONA_FIDE_URL, max_age=int(os.environ.get('COOKIE_AGE', 3600)), secure=os.environ.get('COOKIE_SECURE', True), domain=os.environ.get('COOKIE_DOMAIN', None)) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.error(str(e)) <NEW_LINE> <DEDENT> return response
Receive callback from ELIXIR AAI, create cookies and redirect to Beacon UI.
625941b5a79ad161976cbf2e
def selectionTop10(self): <NEW_LINE> <INDENT> sorted_chromos = sorted(self.population, key = lambda chromo: self.fitness(chromo), reverse=True) <NEW_LINE> selected_chromos = sorted_chromos[:10] <NEW_LINE> return selected_chromos
Perform selection using top 10 best fitness chromosome approach
625941b5e5267d203edcda8a
def _assert_workflow_status(self, submission_uuid, num_completed, num_required): <NEW_LINE> <INDENT> actual_num_completed = training_api.get_num_completed(submission_uuid) <NEW_LINE> self.assertEqual(actual_num_completed, num_completed) <NEW_LINE> requirements = {'num_required': num_required} <NEW_LINE> is_finished = training_api.submitter_is_finished(submission_uuid, requirements) <NEW_LINE> self.assertEqual(is_finished, bool(num_completed >= num_required))
Check that the training workflow is on the expected step. Args: submission_uuid (str): Submission UUID of the student being trained. num_completed (int): The expected number of examples assessed correctly. num_total (int): The required number of examples to assess. Returns: None Raises: AssertionError
625941b51b99ca400220a899
def patch_my_account(self, email=None, password=None): <NEW_LINE> <INDENT> url = '/my-account' <NEW_LINE> data = camelize_dict({ 'email': email, 'password': password }) <NEW_LINE> return self._result( self._patch(self._url(url), json=data), json=True )
Update current account. :param email: the email of account. :param password: the password of account. :return: the account. :raise APIError: if server returns an error.
625941b532920d7e50b27fb4
def p_generic_items(t): <NEW_LINE> <INDENT> global active_lang, LANG_TABLE <NEW_LINE> t[0]= LANG_TABLE[active_lang].process_generic_items(t)
generic_items : generic_item | generic_item COMMA generic_items
625941b573bcbd0ca4b2be65
def test_to_pbuf(self): <NEW_LINE> <INDENT> import barrista.design as design <NEW_LINE> layerspec = design.LayerSpecification() <NEW_LINE> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> _ = layerspec.to_pbuf_message(0, None, ['data']) <NEW_LINE> <DEDENT> layerspec.type = 'convolution' <NEW_LINE> pbmessage = layerspec.to_pbuf_message(0, None, ['data']) <NEW_LINE> self.assertEqual(pbmessage.name, '_layer_0') <NEW_LINE> self.assertEqual(pbmessage.top[0], '_layer_0') <NEW_LINE> self.assertEqual(pbmessage.bottom[0], 'data') <NEW_LINE> layerspec2 = design.LayerSpecification() <NEW_LINE> layerspec2.type = 'convolution' <NEW_LINE> pbmessage2 = layerspec2.to_pbuf_message(1, layerspec, ['data']) <NEW_LINE> self.assertEqual(pbmessage2.name, '_layer_1') <NEW_LINE> self.assertEqual(pbmessage2.top[0], '_layer_1') <NEW_LINE> self.assertEqual(pbmessage2.bottom[0], '_layer_0')
Test protobuf conversion.
625941b5ff9c53063f47bfe7
def parse_charset(charset): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> index=charset.find('-') <NEW_LINE> if index==-1: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> begin_chr=charset[index-1] <NEW_LINE> end_chr=charset[index+1] <NEW_LINE> dst='' <NEW_LINE> for x in range(ord(begin_chr),ord(end_chr)+1): <NEW_LINE> <INDENT> dst+=chr(x) <NEW_LINE> <DEDENT> charset=charset.replace(begin_chr+'-'+end_chr,dst) <NEW_LINE> <DEDENT> ret = ''.join(x for i, x in enumerate(charset) if charset.index(x) == i) <NEW_LINE> return ret <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise SwarmParseException('charset')
Parse charset like 'ABC Da-d' into 'ABC Dabcd'. Returns: String like 'ABCDabcd'. Raises: SwarmParseException: An error occurred when parse charset.
625941b5462c4b4f79d1d4b9
def save(self): <NEW_LINE> <INDENT> for page in self._pages: <NEW_LINE> <INDENT> agreement, created = Agreement.objects.get_or_create( user=self._user, document=page, ) <NEW_LINE> agreement.save()
Save user agreements.
625941b51f037a2d8b945fe8
def t_parameter_tuning(): <NEW_LINE> <INDENT> num_runs = 25 <NEW_LINE> alg_params = solver.AlgoParams(generations=500, pop_size=20, num_children=15, mutation_rate=0.1, mutation_sd=0.1, crossover_rate=0.2, minimising=True) <NEW_LINE> algs = [algo_hc] <NEW_LINE> gens = [gen_schwefel] <NEW_LINE> m_rates = [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5, 0.75, 1] <NEW_LINE> m_sds = [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5, 0.75, 1] <NEW_LINE> for gen in gens: <NEW_LINE> <INDENT> print('\n\n', gen.__name__, ' D:', gen.n, ' Av over:', num_runs, ' minimising:', alg_params.minimising) <NEW_LINE> for alg in algs: <NEW_LINE> <INDENT> print('\n', alg.__name__) <NEW_LINE> m = build_parameter_matrix(m_sds, m_rates, gen, alg, alg_params, num_runs) <NEW_LINE> plot_params_from_existing_matrix(m, alg_params, m_rates, m_sds, gen, alg)
Plot heatmap showing the effects of different combinations of mutation rate and mutation strength on an algorithm's performance
625941b5a17c0f6771cbde3d
def render_graphics(self): <NEW_LINE> <INDENT> if hasattr(self, "iter_world_points"): <NEW_LINE> <INDENT> if self.graphic_settings.draw_lines: <NEW_LINE> <INDENT> batch: pyglet.graphics.Batch = self.engine.renderer.get_batch(self.graphic_settings.line_batch_name) <NEW_LINE> if batch: <NEW_LINE> <INDENT> self.engine.renderer.draw_lines(batch, self.iter_world_points())
Default function renders lines along iter_world_points(), if present
625941b5b57a9660fec33668
def greet_user(): <NEW_LINE> <INDENT> username = get_stored_username() <NEW_LINE> if username: <NEW_LINE> <INDENT> response = input('Are you %s? \nPlease enter yes or no. ' % username) <NEW_LINE> if response.lower() == 'yes': <NEW_LINE> <INDENT> print("Welcome back, " + username + "!") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> username = get_new_username() <NEW_LINE> print("We'll remember you when you come back, " + username + "!") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> username = get_new_username() <NEW_LINE> print("We'll remember you when you come back, " + username + "!")
Greet the user by name.
625941b58a43f66fc4b53e53
def humansize(num, suffix='B'): <NEW_LINE> <INDENT> for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']: <NEW_LINE> <INDENT> if abs(num) < 1024.0: <NEW_LINE> <INDENT> return "%3.1f%s%s" % (num, unit, suffix) <NEW_LINE> <DEDENT> num /= 1024.0 <NEW_LINE> <DEDENT> return "%.1f%s%s" % (num, 'Yi', suffix)
human readable output of size, takes in bytes https://stackoverflow.com/a/1094933
625941b51d351010ab85590e
def testExport(self): <NEW_LINE> <INDENT> usdFile = os.path.abspath('MyAwesomePackage.usdz') <NEW_LINE> cmds.usdExport( file=usdFile, mergeTransformAndShape=True, shadingMode='none') <NEW_LINE> zipFile = Usd.ZipFile.Open(usdFile) <NEW_LINE> fileNames = zipFile.GetFileNames() <NEW_LINE> self.assertEqual(fileNames, [ "MyAwesomePackage.usd", "ReferenceModel.usda", "BaseModel.usda", "card.png" ]) <NEW_LINE> stage = Usd.Stage.Open(usdFile) <NEW_LINE> self._AssertExpectedStage(stage, "./card.png") <NEW_LINE> self._AssertNoTempFiles(usdFile)
Tests standard usdz package export.
625941b52ae34c7f2600cf1a
def csv_dict_reader(fileobj, **kw): <NEW_LINE> <INDENT> reader = csv.DictReader(fileobj, **kw) <NEW_LINE> reader.fieldnames = csv_clean_header(reader.fieldnames) <NEW_LINE> return reader
Read a CSV file yielding normalized dictionary fields. This is basically an alternative constructor for csv.DictReader that normalized the field names. Args: fileobj: A file object to be read. **kw: Optional arguments forwarded to csv.DictReader. Returns: A csv.DictReader object.
625941b54a966d76dd550df4
def Like(self, userLogin, pageUrl): <NEW_LINE> <INDENT> self._Login(userLogin) <NEW_LINE> pass
Лайкаем
625941b56aa9bd52df036b8b
def _update_routing_table(self, node): <NEW_LINE> <INDENT> if node.key == self._key: <NEW_LINE> <INDENT> _logger.debug('Ignore node %s with our id on routing table update', node) <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._routing_table.node_update(node) <NEW_LINE> <DEDENT> except BucketFullError as e: <NEW_LINE> <INDENT> bucket = e.bucket <NEW_LINE> old_node = e.node <NEW_LINE> self._update_full_bucket(bucket, old_node, node)
Update the routing table with this node. The node must have contacted us or it has responded.
625941b5d8ef3951e3243326
def add_rest_resource(config, cls_or_dotted): <NEW_LINE> <INDENT> resolver = DottedNameResolver() <NEW_LINE> cls = resolver.maybe_resolve(cls_or_dotted) <NEW_LINE> resource_name = cls.get_route_prefix() <NEW_LINE> register_resource(cls) <NEW_LINE> for route_type, route_info in REST_ROUTE_INFO.items(): <NEW_LINE> <INDENT> match_param = "member={}".format(resource_name) <NEW_LINE> if 'action' in route_info: <NEW_LINE> <INDENT> action_info_list = cls.get_actions_by_type(route_type) <NEW_LINE> for action_info in action_info_list: <NEW_LINE> <INDENT> action_name = action_info['name'] <NEW_LINE> request_method = action_info['request_method'] <NEW_LINE> permission_name = action_info.get('permission') <NEW_LINE> if permission_name: <NEW_LINE> <INDENT> permission = permission_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> permission = cls.model.get_permission('action') <NEW_LINE> <DEDENT> custom_predicates = (resource_action_predicate(action_name), ) <NEW_LINE> config.add_view( cls, attr=action_info['attr_name'], match_param=match_param, route_name="{}_action".format(route_info['route_name']), decorator=allow_request_methods(request_method), custom_predicates=custom_predicates, renderer='json', request_method=REQUEST_METHODS, permission=permission) <NEW_LINE> <DEDENT> <DEDENT> for request_method in route_info['methods']: <NEW_LINE> <INDENT> attr_name = '{0}_{1}'.format(request_method.lower(), route_type) <NEW_LINE> view_handler = getattr(cls, attr_name, None) <NEW_LINE> if not hasattr(view_handler, '__call__'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> permission_name = PERMISSION_SUFFIX[request_method] <NEW_LINE> permission = cls.model.get_permission(permission_name) <NEW_LINE> config.add_view( cls, attr=attr_name, match_param=match_param, route_name=route_info['route_name'], renderer='json', request_method=request_method, permission=permission)
Add routes and views for a `RestResource` class.
625941b5fff4ab517eb2f222
def findTarget(root, k): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> arr = [] <NEW_LINE> traverseInOrder(root, arr) <NEW_LINE> n = len(arr) <NEW_LINE> i, j = 0, n-1 <NEW_LINE> while i < j: <NEW_LINE> <INDENT> s = arr[i] + arr[j] <NEW_LINE> if s == k: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif s < k: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> j -= 1 <NEW_LINE> <DEDENT> <DEDENT> return False
:type root: TreeNode :type k: int :rtype: bool
625941b55166f23b2e1a4f42
def my_func(param1 = "default"): <NEW_LINE> <INDENT> print("A python function with param value {}".format(param1));
This is a DOCSTRING :param param1: :return:
625941b5a4f1c619b28afe2c
def __lt__(self,G): <NEW_LINE> <INDENT> r <NEW_LINE> return self.is_subgroup(G)
Test if self is a subgroup of G
625941b53346ee7daa2b2b52
def existing_url(module): <NEW_LINE> <INDENT> url_base = "/axapi/v3/system" <NEW_LINE> f_dict = {} <NEW_LINE> return url_base.format(**f_dict)
Return the URL for an existing resource
625941b5aad79263cf390824
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ResponseContainerDashboard): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict()
Returns true if both objects are equal
625941b563b5f9789fde6ecf
def comments(request, app=False, **kwargs): <NEW_LINE> <INDENT> from conescy.apps.comments.models import Comment <NEW_LINE> if app == False: app = kwargs.pop("app") <NEW_LINE> comments = Comment.objects.filter(ref__startswith="%s." % app, status='ok') <NEW_LINE> if not kwargs.get("extra_context", False): kwargs["extra_context"] = {} <NEW_LINE> kwargs["extra_context"]["special"] = _('Comments in "%(app)s"') % {'app': app.capitalize()} <NEW_LINE> if kwargs.pop("rss", False) == True: <NEW_LINE> <INDENT> kwargs["extra_context"]["title"] = kwargs["extra_context"].get("name", "") + _('Comments in "%(app)s"') % {'app': app.capitalize()} <NEW_LINE> kwargs["extra_context"]["description"] = kwargs["extra_context"]["title"] <NEW_LINE> kwargs["extra_context"]["link"] = reverse("%s-home" % app) <NEW_LINE> kwargs["extra_context"]["site"] = Site.objects.get_current().domain <NEW_LINE> <DEDENT> return object_list(request, comments, **kwargs)
This view renders a page (e.g. feed) with all (approved) comments for a specific app. This depends on Conescy's comments app, so be sure to have it in your ``INSTALLED_APPS``. It uses the Generic View ``object_list`` and accepts all of its options. The only required argument is the Everything instance's name, which can also be given as keyword argument (both called ``app``). To use this for a feed template, just add ``rss: True`` as keyword argument and the feed's title, description, link and site will be set automatically. The view also passes one variable as ``extra_context`` to the template, ``special``, which is usable as a heading ("Comments in <app>").
625941b54a966d76dd550df5
def cmd_version(self, cmdict): <NEW_LINE> <INDENT> return self.db.get_globals(only='version')
Return librarian version In (dict)--- None Out (dict) --- librarian version
625941b591af0d3eaac9b7fd
def processClientKeyExchange(self, clientKeyExchange): <NEW_LINE> <INDENT> ecdhYc = clientKeyExchange.ecdh_Yc <NEW_LINE> if not ecdhYc: <NEW_LINE> <INDENT> raise TLSDecodeError("No key share") <NEW_LINE> <DEDENT> kex = ECDHKeyExchange(self.group_id, self.serverHello.server_version) <NEW_LINE> return kex.calc_shared_key(self.ecdhXs, ecdhYc)
Calculate premaster secret from previously generated SKE and CKE
625941b5d8ef3951e3243327
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'educationApp.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941b54e4d5625662d41c7
def thinfilm_color_vs_thickness_plot (n1, n2, n3, thickness_nm_list, illuminant, title, filename): <NEW_LINE> <INDENT> films = create_thin_films(n1, n2, n3, thickness_nm_list) <NEW_LINE> num_films = len (films) <NEW_LINE> rgb_list = numpy.empty ((num_films, 3)) <NEW_LINE> for i in range (0, num_films): <NEW_LINE> <INDENT> film = films[i] <NEW_LINE> xyz = film.illuminated_color (illuminant) <NEW_LINE> rgb_list [i] = colormodels.rgb_from_xyz (xyz) <NEW_LINE> <DEDENT> plots.color_vs_param_plot ( thickness_nm_list, rgb_list, title, filename, xlabel = r'Thickness (nm)', ylabel = r'RGB Color')
Plot the color of the thin film for the specfied thicknesses [nm].
625941b56aa9bd52df036b8c
def rad_to_dmsstr(rads): <NEW_LINE> <INDENT> signs = np.atleast_1d(np.sign(rads)) <NEW_LINE> degs = np.atleast_1d(rad_to_deg(np.abs(rads))) <NEW_LINE> strs = [] <NEW_LINE> for sign, deg in zip(signs, degs): <NEW_LINE> <INDENT> deg += 1e-12 <NEW_LINE> d = int(deg) <NEW_LINE> min = (deg-d)*60.0 <NEW_LINE> m = int(min) <NEW_LINE> s = (min-m)*60.0 <NEW_LINE> if sign == -1: <NEW_LINE> <INDENT> sign = "-" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sign = "" <NEW_LINE> <DEDENT> if (s >= 9.9995): <NEW_LINE> <INDENT> strs.append("%s%.2d:%.2d:%.4f" % (sign, d, m, s)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> strs.append("%s%.2d:%.2d:0%.4f" % (sign, d, m, s)) <NEW_LINE> <DEDENT> <DEDENT> return strs
Convert radians to DD:MM:SS.SS sexigesimal string.
625941b5a17c0f6771cbde3e
def run_profile(self, profile, names, vm_overrides=None): <NEW_LINE> <INDENT> if profile not in self.opts['profiles']: <NEW_LINE> <INDENT> msg = 'Profile {0} is not defined'.format(profile) <NEW_LINE> log.error(msg) <NEW_LINE> return {'Error': msg} <NEW_LINE> <DEDENT> ret = {} <NEW_LINE> if not vm_overrides: <NEW_LINE> <INDENT> vm_overrides = {} <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with salt.utils.fopen(self.opts['conf_file'], 'r') as mcc: <NEW_LINE> <INDENT> main_cloud_config = yaml.safe_load(mcc) <NEW_LINE> <DEDENT> if not main_cloud_config: <NEW_LINE> <INDENT> main_cloud_config = {} <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> main_cloud_config = {} <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> main_cloud_config = {} <NEW_LINE> <DEDENT> profile_details = self.opts['profiles'][profile] <NEW_LINE> alias, driver = profile_details['provider'].split(':') <NEW_LINE> mapped_providers = self.map_providers_parallel() <NEW_LINE> alias_data = mapped_providers.setdefault(alias, {}) <NEW_LINE> vms = alias_data.setdefault(driver, {}) <NEW_LINE> provider_details = self.opts['providers'][alias][driver].copy() <NEW_LINE> del provider_details['profiles'] <NEW_LINE> for name in names: <NEW_LINE> <INDENT> name_exists = False <NEW_LINE> if name in vms: <NEW_LINE> <INDENT> if 'state' in vms[name]: <NEW_LINE> <INDENT> if vms[name]['state'].lower() != 'terminated': <NEW_LINE> <INDENT> name_exists = True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> name_exists = True <NEW_LINE> <DEDENT> <DEDENT> if name_exists: <NEW_LINE> <INDENT> msg = '{0} already exists under {1}:{2}'.format( name, alias, driver ) <NEW_LINE> log.error(msg) <NEW_LINE> ret[name] = {'Error': msg} <NEW_LINE> continue <NEW_LINE> <DEDENT> vm_ = main_cloud_config.copy() <NEW_LINE> vm_.update(provider_details) <NEW_LINE> vm_.update(profile_details) <NEW_LINE> vm_.update(vm_overrides) <NEW_LINE> vm_['name'] = name <NEW_LINE> if self.opts['parallel']: <NEW_LINE> <INDENT> process = multiprocessing.Process( target=self.create, args=(vm_,) ) <NEW_LINE> process.start() <NEW_LINE> ret[name] = { 'Provisioning': 'VM being provisioned in parallel. ' 'PID: {0}'.format(process.pid) } <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ret[name] = self.create(vm_) <NEW_LINE> if not ret[name]: <NEW_LINE> <INDENT> ret[name] = {'Error': 'Failed to deploy VM'} <NEW_LINE> if len(names) == 1: <NEW_LINE> <INDENT> raise SaltCloudSystemExit('Failed to deploy VM') <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> if self.opts.get('show_deploy_args', False) is False: <NEW_LINE> <INDENT> ret[name].pop('deploy_kwargs', None) <NEW_LINE> <DEDENT> <DEDENT> except (SaltCloudSystemExit, SaltCloudConfigError) as exc: <NEW_LINE> <INDENT> if len(names) == 1: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> ret[name] = {'Error': str(exc)} <NEW_LINE> <DEDENT> <DEDENT> return ret
Parse over the options passed on the command line and determine how to handle them
625941b5d10714528d5ffac8
def use_smartio(self): <NEW_LINE> <INDENT> if not self.allocated: <NEW_LINE> <INDENT> raise RuntimeError("You must call flow.allocate before invoking flow.use_smartio") <NEW_LINE> <DEDENT> for task in self.iflat_tasks(): <NEW_LINE> <INDENT> children = task.get_children() <NEW_LINE> if not children: <NEW_LINE> <INDENT> task.history.info("Will disable IO for task") <NEW_LINE> task._set_inpvars(prtwf=-1, prtden=0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> must_produce_abiexts = [] <NEW_LINE> for child in children: <NEW_LINE> <INDENT> for d in child.deps: <NEW_LINE> <INDENT> must_produce_abiexts.extend(d.exts) <NEW_LINE> <DEDENT> <DEDENT> must_produce_abiexts = set(must_produce_abiexts) <NEW_LINE> smart_prtvars = { "prtwf": "WFK", } <NEW_LINE> for varname, abiext in smart_prtvars.items(): <NEW_LINE> <INDENT> if abiext not in must_produce_abiexts: <NEW_LINE> <INDENT> print("%s: setting %s to -1" % (task, varname)) <NEW_LINE> task._set_inpvars({varname: -1})
This function should be called when the entire `Flow` has been built. It tries to reduce the pressure on the hard disk by using Abinit smart-io capabilities for those files that are not needed by other nodes. Smart-io means that big files (e.g. WFK) are written only if the calculation is unconverged so that we can restart from it. No output is produced if convergence is achieved.
625941b5d18da76e235322bb
def set_diluciju(self, naziv, jedinica): <NEW_LINE> <INDENT> self.dilucijskeJedinice[naziv] = jedinica
Setter dilucijske jedinice u dokument
625941b532920d7e50b27fb5
@auth.route('/token') <NEW_LINE> @httpauth.login_required <NEW_LINE> def get_token(): <NEW_LINE> <INDENT> token = g.current_user.generate_auth_token( current_app.config.get('SESSION_TTL') ) <NEW_LINE> response = { 'data': { 'token': token.decode('utf-8'), 'expiration': current_app.config.get('SESSION_TTL'), } } <NEW_LINE> return jsonify(response)
View function which returns a valid token for the user. :return: a JSON with user `token` & `expiration` value.
625941b57d43ff24873a2a8d
def __getitem__(self, key): <NEW_LINE> <INDENT> position = self.hash(key) <NEW_LINE> for i in range(self.table_size): <NEW_LINE> <INDENT> if self.array[position] is None: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> elif self.array[position][0] == key: <NEW_LINE> <INDENT> return self.array[position][1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position = (position + 1) % self.table_size <NEW_LINE> <DEDENT> <DEDENT> raise KeyError(key)
To get item in the SeparateChainingTable instance according to the given key. :precondition: The instance of Table exists. :postcondition: The target item is returned if it is in the list. :param key: An index to mark the position of an item. :return: Returns the item with matching key in the table :complexity: O(1) for best cases, O(key) for worst cases
625941b5507cdc57c6306abc
def entrop(self): <NEW_LINE> <INDENT> return sum([sbs.entrop() for sbs in self.__subst_list])
Расчет энтропии реакции при заданной температуре
625941b521bff66bcd68473f
@click.group() <NEW_LINE> @click.pass_context <NEW_LINE> def client(*args, **kwargs) -> None: <NEW_LINE> <INDENT> pass
Contains operations which should be executed by a content owner
625941b510dbd63aa1bd299a
@log("Доставили за {}с!") <NEW_LINE> def delivery(pizza): <NEW_LINE> <INDENT> pass
Доставляет пиццу
625941b5d164cc6175782b37
@requires_key <NEW_LINE> def add_inventory(inventoryItem, userIndex, upfile=None, public=False): <NEW_LINE> <INDENT> item = {} <NEW_LINE> item['inventoryItem'] = json.loads(inventoryItem) <NEW_LINE> fileHash = None <NEW_LINE> if (upfile): <NEW_LINE> <INDENT> filename = secure_filename(upfile.filename) <NEW_LINE> upfile.save(filename) <NEW_LINE> fileHash = ipfsAdd(filename) <NEW_LINE> <DEDENT> if (fileHash): <NEW_LINE> <INDENT> item['fileHash'] = fileHash <NEW_LINE> <DEDENT> item['ethId'] = eth_latestItemID() <NEW_LINE> if (public): <NEW_LINE> <INDENT> item['public'] = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item['public'] = False <NEW_LINE> <DEDENT> i = ipfsAdd(item, False) <NEW_LINE> e = ethAdd(bytes(i, 'utf-8'), userIndex) <NEW_LINE> if (i and e): <NEW_LINE> <INDENT> result = {"status": "success", "itemHash": i, "txHash": (e['transactionHash']).hex(), "ethId": item['ethId']} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = {"status": "failed"} <NEW_LINE> <DEDENT> return result
adds an inventory item Adds an item to the system # noqa: E501 :param inventoryItem: JSON-encoded item detail :type inventoryItem: str :param userIndex: User's ETH index :type userIndex: str :param upfile: The file to upload :type upfile: werkzeug.datastructures.FileStorage :rtype: str
625941b576d4e153a657e91a
@cli.command('validate', short_help='validate a nordic file') <NEW_LINE> @click.argument('filenames', required=True, nargs=-1, type=click.Path(exists=True, readable=True)) <NEW_LINE> @click.pass_obj <NEW_LINE> def validate(repo, filenames): <NEW_LINE> <INDENT> valid = True <NEW_LINE> for filename in filenames: <NEW_LINE> <INDENT> click.echo("reading {0}".format(filename.split("/")[len(filename.split("/")) - 1])) <NEW_LINE> f_nordic = open(filename, 'r') <NEW_LINE> try: <NEW_LINE> <INDENT> nordic_strings = nordicRead.readNordicFile(f_nordic) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> valid = False <NEW_LINE> click.echo("Error reading nordic file: {0}".format(e)) <NEW_LINE> continue <NEW_LINE> <DEDENT> for n_string in nordic_strings: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> nordic.readNordic(n_string, True, -1, -1, 'O') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> valid = False <NEW_LINE> click.echo("Error reading nordic: {0}".format(e)) <NEW_LINE> click.echo(n_string[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if valid: <NEW_LINE> <INDENT> click.echo('All nordic files are valid')
Command for validating a nordic files
625941b530bbd722463cbbac
def get_idf(self, term): <NEW_LINE> <INDENT> if term in self.stopwords: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if not term in self.term_num_docs: <NEW_LINE> <INDENT> return self.idf_default <NEW_LINE> <DEDENT> return math.log(float(1 + self.get_num_docs()) / (1 + self.term_num_docs[term]))
Retrieve the IDF for the specified term. This is computed by taking the logarithm of ((number of documents in corpus) divided by (number of documents containing this term) ). Args: term : term to get idf Returns: Return the idf value.
625941b5d6c5a10208143e30
def make_field(self, record=None, model_view=None): <NEW_LINE> <INDENT> ret = self.kolumne.make_field(self) <NEW_LINE> if self.widget: <NEW_LINE> <INDENT> ret.kwargs['widget'] = self.widget <NEW_LINE> <DEDENT> return ret
convert to field
625941b5baa26c4b54cb0f0e
def __neg__(self): <NEW_LINE> <INDENT> self.val = -self.val <NEW_LINE> return self
negate without creating new instance
625941b5a79ad161976cbf2f
def find_translation_in_title(title): <NEW_LINE> <INDENT> return findall(r'\((.+)\)', title)[0]
Finds the translation abbreviation in a verse quotation title.
625941b5d4950a0f3b08c144
def check_dataset_location(product_name="", uri_pattern="", **kwargs): <NEW_LINE> <INDENT> pg_hook = PostgresHook(postgres_conn_id=DB_ODC_READER_CONN) <NEW_LINE> connection = pg_hook.get_conn() <NEW_LINE> cursor = connection.cursor() <NEW_LINE> check_if_only_one_location = CONFIRM_DATASET_HAS_MORE_THAN_ONE_LOCATION.format( product_name=product_name ) <NEW_LINE> cursor.execute(check_if_only_one_location) <NEW_LINE> count_dataset_location_more_than_one = cursor.fetchone() <NEW_LINE> print("check the datasets for the product all contain more than 1 location") <NEW_LINE> print(count_dataset_location_more_than_one) <NEW_LINE> if ( not count_dataset_location_more_than_one or count_dataset_location_more_than_one[0] == 0 ): <NEW_LINE> <INDENT> raise AirflowException( "The datasets for the product only has one location, exiting" ) <NEW_LINE> <DEDENT> check_uri_pattern = SELECT_ALL_PRODUCTS_MATCHING_URI_PATTERNS.format( uri_pattern=uri_pattern ) <NEW_LINE> cursor.execute(check_uri_pattern) <NEW_LINE> count_product_match_uri_pattern = cursor.fetchone() <NEW_LINE> print("check uri pattern matches for only one product") <NEW_LINE> print(count_product_match_uri_pattern) <NEW_LINE> if not count_product_match_uri_pattern or count_product_match_uri_pattern[0] > 1: <NEW_LINE> <INDENT> raise AirflowException( "There are more than one product match the uri pattern, the uri pattern can be better refined, exiting" )
return sql query result
625941b5aad79263cf390825
def deserialise_job_command(command): <NEW_LINE> <INDENT> return base64.b64decode(command).decode("utf-8")
Deserialise job commands received through REST API.
625941b5cc40096d6159573e
def has_migs(self, local=True): <NEW_LINE> <INDENT> consecutive_good = 0 <NEW_LINE> try_num = 0 <NEW_LINE> skip_reason = list() <NEW_LINE> while try_num < int(self.module.params['tries_limit']) and consecutive_good < int(self.module.params['consecutive_good_checks']): <NEW_LINE> <INDENT> self._update_nodes_list() <NEW_LINE> self._update_cluster_statistics() <NEW_LINE> stable, reason = self._cluster_good_state() <NEW_LINE> if stable is not True: <NEW_LINE> <INDENT> skip_reason.append( "Skipping on try#" + str(try_num) + " for reason:" + reason ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self._can_use_cluster_stable(): <NEW_LINE> <INDENT> if self._cluster_stable(): <NEW_LINE> <INDENT> consecutive_good += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> consecutive_good = 0 <NEW_LINE> skip_reason.append( "Skipping on try#" + str(try_num) + " for reason:" + " cluster_stable" ) <NEW_LINE> <DEDENT> <DEDENT> elif self._has_migs(local): <NEW_LINE> <INDENT> skip_reason.append( "Skipping on try#" + str(try_num) + " for reason:" + " migrations" ) <NEW_LINE> consecutive_good = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> consecutive_good += 1 <NEW_LINE> if consecutive_good == self.module.params[ 'consecutive_good_checks']: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> try_num += 1 <NEW_LINE> sleep(self.module.params['sleep_between_checks']) <NEW_LINE> <DEDENT> if consecutive_good == self.module.params['consecutive_good_checks']: <NEW_LINE> <INDENT> return False, None <NEW_LINE> <DEDENT> return True, skip_reason
returns a boolean, False if no migrations otherwise True
625941b521a7993f00bc7ad2
def _livereload(host, port, config, builder, site_dir): <NEW_LINE> <INDENT> from livereload import Server <NEW_LINE> watcher = MooseDocsWatcher() <NEW_LINE> server = livereload.Server(None, watcher) <NEW_LINE> server.watch(config['docs_dir'], builder) <NEW_LINE> server.watch(config['config_file_path'], builder) <NEW_LINE> for d in config['theme_dir']: <NEW_LINE> <INDENT> server.watch(d, builder) <NEW_LINE> <DEDENT> server.serve(root=site_dir, host=host, port=int(port), restart_delay=0)
Mimics the mkdocs.commands.serve._livereload function. @TODO: When the mkdocs plugin system allows for custom Watcher this should be removed.
625941b5287bf620b61d385a
def __init__(self, filename=None, options={}): <NEW_LINE> <INDENT> super(Workbook, self).__init__() <NEW_LINE> self.filename = filename <NEW_LINE> self.tmpdir = options.get('tmpdir', None) <NEW_LINE> self.date_1904 = options.get('date_1904', False) <NEW_LINE> self.strings_to_numbers = options.get('strings_to_numbers', False) <NEW_LINE> self.strings_to_formulas = options.get('strings_to_formulas', True) <NEW_LINE> self.strings_to_urls = options.get('strings_to_urls', True) <NEW_LINE> self.default_date_format = options.get('default_date_format', None) <NEW_LINE> self.optimization = options.get('constant_memory', False) <NEW_LINE> self.in_memory = options.get('in_memory', False) <NEW_LINE> self.excel2003_style = options.get('excel2003_style', False) <NEW_LINE> self.default_format_properties = options.get('default_format_properties', {}) <NEW_LINE> self.worksheet_meta = WorksheetMeta() <NEW_LINE> self.selected = 0 <NEW_LINE> self.fileclosed = 0 <NEW_LINE> self.filehandle = None <NEW_LINE> self.internal_fh = 0 <NEW_LINE> self.sheet_name = 'Sheet' <NEW_LINE> self.chart_name = 'Chart' <NEW_LINE> self.sheetname_count = 0 <NEW_LINE> self.chartname_count = 0 <NEW_LINE> self.worksheets_objs = [] <NEW_LINE> self.charts = [] <NEW_LINE> self.drawings = [] <NEW_LINE> self.sheetnames = [] <NEW_LINE> self.formats = [] <NEW_LINE> self.xf_formats = [] <NEW_LINE> self.xf_format_indices = {} <NEW_LINE> self.dxf_formats = [] <NEW_LINE> self.dxf_format_indices = {} <NEW_LINE> self.palette = [] <NEW_LINE> self.font_count = 0 <NEW_LINE> self.num_format_count = 0 <NEW_LINE> self.defined_names = [] <NEW_LINE> self.named_ranges = [] <NEW_LINE> self.custom_colors = [] <NEW_LINE> self.doc_properties = {} <NEW_LINE> self.localtime = datetime.now() <NEW_LINE> self.num_vml_files = 0 <NEW_LINE> self.num_comment_files = 0 <NEW_LINE> self.x_window = 240 <NEW_LINE> self.y_window = 15 <NEW_LINE> self.window_width = 16095 <NEW_LINE> self.window_height = 9660 <NEW_LINE> self.tab_ratio = 500 <NEW_LINE> self.str_table = SharedStringTable() <NEW_LINE> self.vba_project = None <NEW_LINE> self.vba_codename = None <NEW_LINE> self.image_types = {} <NEW_LINE> self.images = [] <NEW_LINE> self.border_count = 0 <NEW_LINE> self.fill_count = 0 <NEW_LINE> self.drawing_count = 0 <NEW_LINE> self.calc_mode = "auto" <NEW_LINE> self.calc_on_load = True <NEW_LINE> self.allow_zip64 = False <NEW_LINE> self.calc_id = 124519 <NEW_LINE> if self.in_memory: <NEW_LINE> <INDENT> self.optimization = False <NEW_LINE> <DEDENT> if self.excel2003_style: <NEW_LINE> <INDENT> self.add_format({'xf_index': 0, 'font_family': 0}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.add_format({'xf_index': 0}) <NEW_LINE> <DEDENT> self.default_url_format = self.add_format({'color': 'blue', 'underline': 1}) <NEW_LINE> if self.default_date_format is not None: <NEW_LINE> <INDENT> self.default_date_format = self.add_format({'num_format': self.default_date_format})
Constructor.
625941b5167d2b6e31218988
def execute(self): <NEW_LINE> <INDENT> if any(help_indicator in self._global_args for help_indicator in ['-h', '--help', 'help']) or any(help_indicator in self._command_args for help_indicator in ['-h', '--help', 'help']): <NEW_LINE> <INDENT> self.help() <NEW_LINE> return <NEW_LINE> <DEDENT> if not self._command: <NEW_LINE> <INDENT> self._status = compat.EX_USAGE <NEW_LINE> self.help() <NEW_LINE> return <NEW_LINE> <DEDENT> global_parser = argparse.ArgumentParser(prog=self._prog, usage=self.global_usage()[7:], add_help=False) <NEW_LINE> for arg in self._arguments: <NEW_LINE> <INDENT> global_parser.add_argument(*arg[0], **arg[1]) <NEW_LINE> <DEDENT> global_args = global_parser.parse_args(self._global_args) <NEW_LINE> command_parser = argparse.ArgumentParser(prog=self._prog, usage=self.command_usage()[7:], add_help=False) <NEW_LINE> for arg in self._commands[self._command].arguments: <NEW_LINE> <INDENT> command_parser.add_argument(*arg[0], **arg[1]) <NEW_LINE> <DEDENT> command_args = command_parser.parse_args(self._command_args) <NEW_LINE> if 'file' in global_args and global_args.file: <NEW_LINE> <INDENT> self._logger.file = global_args.file <NEW_LINE> self._logger.type = 'file' <NEW_LINE> <DEDENT> if 'loglevel' in global_args and global_args.loglevel: <NEW_LINE> <INDENT> self._logger.level = global_args.loglevel <NEW_LINE> <DEDENT> if 'no_color' in global_args and global_args.no_color: <NEW_LINE> <INDENT> self._logger.color = False <NEW_LINE> <DEDENT> if 'conf' in global_args and global_args.conf: <NEW_LINE> <INDENT> conf = ApplicationConf.get_instance() <NEW_LINE> conf['CONF_FILE'] = os.path.abspath(global_args.conf) <NEW_LINE> <DEDENT> command_class = self._commands[self._command] <NEW_LINE> command = command_class(global_args=global_args, cmd_args=command_args, logger=self._logger) <NEW_LINE> command.execute() <NEW_LINE> self._status = command.status
Based on the given command line arguments and the available subcommands, this method creates the appropriate command line parser and starts the corresponding subcommand.
625941b5435de62698dfda3f
def reg_test_baystat_sim(self): <NEW_LINE> <INDENT> self._do_test('baystat_sim')
Regression test baystat_sim.
625941b5e5267d203edcda8b
def tqdm_terminal(it, *args, **kwargs): <NEW_LINE> <INDENT> return tqdm(it, *args, dynamic_ncols=True, ascii=True, leave=(innermost_tqdm() is not None), **kwargs)
Some settings for tqdm that make it run better in resizable terminals.
625941b56e29344779a62400
def get_data(self, task): <NEW_LINE> <INDENT> token = atlassian_jwt.encode_token('GET', self.__apiPath + task, self.key, self.secure) <NEW_LINE> data = self.__make_request(task, token) <NEW_LINE> return self.parse_response(data.json())
Make request and get task info Args: task (string): task id Returns: string: task full text
625941b59f2886367277a67c
def previous(self): <NEW_LINE> <INDENT> if self._firstQuestion: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> self._currentQuestion = self._already_asked.pop() <NEW_LINE> return self._currentQuestion
We get here when the user clicks on the "Previous" button in the GTK user interface. @return: The previous question, with the answers the user selected.
625941b5a4f1c619b28afe2d
def on_register_button_clicked(self, widget = None): <NEW_LINE> <INDENT> model, iter_ = self.window.services_treeview.get_selection().get_selected() <NEW_LINE> if not iter_: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> jid = model[iter_][0] <NEW_LINE> if jid: <NEW_LINE> <INDENT> ServiceRegistration(self.account, jid) <NEW_LINE> self.window.destroy(chain=True)
When we want to register an agent: request information about registering with the agent and close the window
625941b53c8af77a43ae3588
@login_required <NEW_LINE> def user_settings(request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> meta = request.user.usermeta <NEW_LINE> <DEDENT> except UserMeta.DoesNotExist: <NEW_LINE> <INDENT> meta = UserMeta() <NEW_LINE> request.user.usermeta = meta <NEW_LINE> meta.save() <NEW_LINE> request.user.save() <NEW_LINE> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> form = SettingsForm(request.POST, instance=meta) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> form.save() <NEW_LINE> return render(request, "base.html", { "Message": 'Settings saved!', 'return': 'index:profile', }) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> form = SettingsForm(instance=meta) <NEW_LINE> <DEDENT> return render(request, "GenericForm.html", { "form": form, "formtitle": "Change user settings", "buttontext": "Save" })
Let a user change its settings, like email preferences. :param request: :return:
625941b5925a0f43d2549c5d
def mailPassword(self, login, REQUEST, immediate=False): <NEW_LINE> <INDENT> membership = getToolByName(self, 'portal_membership') <NEW_LINE> if not membership.checkPermission('Mail forgotten password', self): <NEW_LINE> <INDENT> raise Unauthorized( _(u"Mailing forgotten passwords has been disabled.")) <NEW_LINE> <DEDENT> utils = getToolByName(self, 'plone_utils') <NEW_LINE> member = get_member_by_login_name(self, login, raise_exceptions=False) <NEW_LINE> if member is None: <NEW_LINE> <INDENT> raise ValueError( _(u'The username you entered could not be found.')) <NEW_LINE> <DEDENT> email = member.getProperty('email') <NEW_LINE> if not email: <NEW_LINE> <INDENT> raise ValueError(_(u'That user does not have an email address.')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not utils.validateSingleEmailAddress(email): <NEW_LINE> <INDENT> raise ValueError(_(u'The email address did not validate.')) <NEW_LINE> <DEDENT> <DEDENT> check, msg = _checkEmail(email) <NEW_LINE> if not check: <NEW_LINE> <INDENT> raise ValueError(msg) <NEW_LINE> <DEDENT> reset_tool = getToolByName(self, 'portal_password_reset') <NEW_LINE> reset = reset_tool.requestReset(member.getId()) <NEW_LINE> encoding = getUtility(ISiteRoot).getProperty('email_charset', 'utf-8') <NEW_LINE> mail_text = self.mail_password_template( self, REQUEST, member=member, reset=reset, password=member.getPassword(), charset=encoding) <NEW_LINE> if isinstance(mail_text, unicode): <NEW_LINE> <INDENT> mail_text = mail_text.encode(encoding) <NEW_LINE> <DEDENT> message_obj = message_from_string(mail_text.strip()) <NEW_LINE> subject = message_obj['Subject'] <NEW_LINE> m_to = message_obj['To'] <NEW_LINE> m_from = message_obj['From'] <NEW_LINE> host = getToolByName(self, 'MailHost') <NEW_LINE> try: <NEW_LINE> <INDENT> host.send(mail_text, m_to, m_from, subject=subject, charset=encoding, immediate=immediate) <NEW_LINE> <DEDENT> except SMTPRecipientsRefused: <NEW_LINE> <INDENT> raise SMTPRecipientsRefused( _(u'Recipient address rejected by server.')) <NEW_LINE> <DEDENT> except SMTPException as e: <NEW_LINE> <INDENT> raise(e) <NEW_LINE> <DEDENT> return self.mail_password_response(self, REQUEST)
Wrapper around mailPassword
625941b599fddb7c1c9de17e
def __init__(self, *args): <NEW_LINE> <INDENT> super().__init__(*args, category=CATEGORY_GARAGE_DOOR_OPENER) <NEW_LINE> self._flag_state = False <NEW_LINE> serv_garage_door = self.add_preload_service(SERV_GARAGE_DOOR_OPENER) <NEW_LINE> self.char_current_state = serv_garage_door.configure_char( CHAR_CURRENT_DOOR_STATE, value=0) <NEW_LINE> self.char_target_state = serv_garage_door.configure_char( CHAR_TARGET_DOOR_STATE, value=0, setter_callback=self.set_state)
Initialize a GarageDoorOpener accessory object.
625941b51f5feb6acb0c4940
def active_thread_priority(self): <NEW_LINE> <INDENT> return _blocks_swig1.vector_sink_i_sptr_active_thread_priority(self)
active_thread_priority(vector_sink_i_sptr self) -> int
625941b563f4b57ef0000f0e
def is_286(self): <NEW_LINE> <INDENT> return self.num == 286
Verify whether num property is 286
625941b538b623060ff0abd9
def main(): <NEW_LINE> <INDENT> download_dataset() <NEW_LINE> u.initialize_process_group() <NEW_LINE> if u.is_parameter_server(): <NEW_LINE> <INDENT> process = ParameterServer() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> process = Worker() <NEW_LINE> <DEDENT> process.run() <NEW_LINE> process.cleanup() <NEW_LINE> del process
Main entry point of the optimization procedure.
625941b53c8af77a43ae3589
def _quote_field(data, field): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> fieldBuf = "" <NEW_LINE> escape = False <NEW_LINE> for c in data[field]: <NEW_LINE> <INDENT> if c == '"': <NEW_LINE> <INDENT> fieldBuf += '\\"' <NEW_LINE> escape = False <NEW_LINE> <DEDENT> elif c == '\\': <NEW_LINE> <INDENT> if escape: <NEW_LINE> <INDENT> fieldBuf += '\\\\' <NEW_LINE> escape = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> escape = True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if escape: <NEW_LINE> <INDENT> fieldBuf += '\\' <NEW_LINE> <DEDENT> fieldBuf += c <NEW_LINE> escape = False <NEW_LINE> <DEDENT> <DEDENT> data[field] = '"%s"' % fieldBuf <NEW_LINE> return data
Quote a field in a list of DNS records. Return the new data records.
625941b5f8510a7c17cf94ef
@base_blueprint.route('/updateState/', methods=['GET', 'POST']) <NEW_LINE> @is_login <NEW_LINE> def update_state(): <NEW_LINE> <INDENT> user_id = session.get('user_id') <NEW_LINE> if request.method == 'POST': <NEW_LINE> <INDENT> data_json = request.get_data().decode('utf-8') <NEW_LINE> data_dict = json.loads(data_json) <NEW_LINE> state = State.query.filter_by(s_value=data_dict['s_value'], s_item_code=data_dict['s_item_code']).first() <NEW_LINE> if state: <NEW_LINE> <INDENT> result = {"flag": False, "value": "状态已经存在!"} <NEW_LINE> return result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state = State.query.filter_by(s_id=data_dict['s_id']).first() <NEW_LINE> state.s_id = data_dict['s_id'] <NEW_LINE> state.s_item_code = data_dict['s_item_code'] <NEW_LINE> state.s_value = data_dict['s_value'] <NEW_LINE> state.s_desc = data_dict['s_desc'] <NEW_LINE> state.s_create_user_id = user_id <NEW_LINE> state.s_create_time = datetime.now() <NEW_LINE> state.save() <NEW_LINE> result = {"flag": True, "value": "状态修改完成!"} <NEW_LINE> return result
确认修改状态
625941b5e64d504609d7462b
def current_action_distribution(self) -> torch.Tensor: <NEW_LINE> <INDENT> with torch.no_grad(): <NEW_LINE> <INDENT> action_scores = self.p_net(self.curr_state) <NEW_LINE> actions_proba = torch.softmax(action_scores, 0) <NEW_LINE> <DEDENT> return actions_proba
Returns the probability distribution over the next actions based on the current state and the policy model. :return: A tensor of shape (n_actions,) containing the probability of each action according to the policy.
625941b53d592f4c4ed1ce6c
def get_activated_devices(nm_client): <NEW_LINE> <INDENT> activated_devices = [] <NEW_LINE> for ac in nm_client.get_active_connections(): <NEW_LINE> <INDENT> if ac.get_state() != NM.ActiveConnectionState.ACTIVATED: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for device in ac.get_devices(): <NEW_LINE> <INDENT> activated_devices.append(device) <NEW_LINE> <DEDENT> <DEDENT> return activated_devices
Get activated NetworkManager devices.
625941b56e29344779a62401
def note(self): <NEW_LINE> <INDENT> print('chapter13.2 note as follow') <NEW_LINE> print('13.2 旋转') <NEW_LINE> print('当在含n个关键字的红黑树上运行时,查找树操作TREE-INSERT和TREE-DELETE的时间为O(lgn)') <NEW_LINE> print('由于这两个操作对树作了修改,结果可能违反13.1节中给出的红黑性质。', '为保持这些性质,就要改变树中某些结点的颜色以及指针结构') <NEW_LINE> print('指针结构的修改是通过旋转来完成的,这是一种能保持二叉查找树性质的查找树局部操作') <NEW_LINE> print('给出左旋和右旋。当某个结点x上做左旋时,假设它的右孩子不是nil[T],', 'x可以为树内任意右孩子不是nil[T]的结点') <NEW_LINE> print('左旋以x到y之间的链为\"支轴\"进行,它使y成为该该子树新的根,x成为y的左孩子,而y的左孩子则成为x的右孩子') <NEW_LINE> print('在LEFT-ROTATE的代码中,必须保证right[x]!=None,且根的父结点为None') <NEW_LINE> print('练习13.2-1 RIGHT-ROTATE的代码已经给出') <NEW_LINE> print('练习13.2-2 二查查找树性质:在一棵有n个结点的二叉查找树中,刚好有n-1种可能的旋转') <NEW_LINE> print('练习13.2-3 属于x结点的子孙结点,当结点x左旋时,x的子孙结点的深度加1') <NEW_LINE> print('练习13.2-4 二查查找树性质:任何一棵含有n个结点的二叉查找树,可以通过O(n)次旋转,', '转变为另外一棵含n个结点的二叉查找树') <NEW_LINE> print('练习13.2-5 如果二叉查找树T1可以右转成二叉查找树T2,则可以调用O(n^2)次RIGHT-ROTATE来右转')
Summary ==== Print chapter13.2 note Example ==== ```python Chapter13_2().note() ```
625941b5be7bc26dc91cd3f1
def compress(self): <NEW_LINE> <INDENT> rams = [[],[]] <NEW_LINE> datas = [] <NEW_LINE> for mode in range(3): <NEW_LINE> <INDENT> for order in range(2): <NEW_LINE> <INDENT> if mode == 0 and order == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for i in range(2): <NEW_LINE> <INDENT> rams[i] = self.image[i::2] <NEW_LINE> <DEDENT> self._interpret_compress(rams, mode, order) <NEW_LINE> datas += [(self.data[:], int(self.which_bit))] <NEW_LINE> <DEDENT> <DEDENT> datas = sorted(datas, key=lambda data_bit: (len(data_bit[0]), -data_bit[1])) <NEW_LINE> self.data, self.which_bit = datas[0]
Compress the image five times (twice for each mode, except 0) and use the smallest one (in bits).
625941b56fb2d068a760ee8c
def D(self): <NEW_LINE> <INDENT> d = np.zeros((3,3), dtype=np.float32) <NEW_LINE> for layer in self._layers: <NEW_LINE> <INDENT> za = self.get_z(layer, relpos=0.0) <NEW_LINE> ze = self.get_z(layer, relpos=1.0) <NEW_LINE> d += layer.Qbar() * (ze**3 - za**3) / 3.0 <NEW_LINE> <DEDENT> return d
Return the bending stiffness matrix D (3x3) of the laminate.
625941b5d53ae8145f87a061
@menu_app.post("/addOrUpMenu", apply=use_kwargs(add_menu_args)) <NEW_LINE> @utils.permissions_auth("post:/v1/sys/menu") <NEW_LINE> def add_menu(parent_id, menu_name, menu_url, menu_icon, sort_number, authority): <NEW_LINE> <INDENT> menu_name = utils.encode_utf8(menu_name) <NEW_LINE> res = SysMenuService.add_menu(parent_id, menu_name, menu_url, menu_icon, sort_number, authority) <NEW_LINE> if res: <NEW_LINE> <INDENT> return error_result(msg=res) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return success_result()
新增菜单 :param parent_id: :param menu_name: :param menu_url: :param menu_icon: :param sort_number: :param authority: :return:
625941b5a8370b771705268c
def calc_F(H, G, m=None): <NEW_LINE> <INDENT> utils.assert_square(H) <NEW_LINE> utils.assert_square(G) <NEW_LINE> n = H.shape[0] <NEW_LINE> I = np.identity(n) <NEW_LINE> IG = I-G <NEW_LINE> if m: <NEW_LINE> <INDENT> F = (np.trace(H.dot(G).dot(H)) / (m-1)) / (np.trace(IG.dot(G).dot(IG)) / (n-m)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> F = (np.trace(H.dot(G).dot(H))) / np.trace(IG.dot(G).dot(IG)) <NEW_LINE> <DEDENT> return F
Calculate the F statistic when comparing two matricies.
625941b530dc7b7665901756
def get_chart_data(symbol='BTC_ETH', timeframe=5, period=120): <NEW_LINE> <INDENT> timeframe_seconds = timeframe * 60 <NEW_LINE> start_time = time.time() - period * timeframe_seconds <NEW_LINE> req = 'https://poloniex.com/public?command=returnChartData&currencyPair=' + symbol + '&start=' + str(start_time) + '&end=9999999999&period=' + str(timeframe_seconds) <NEW_LINE> try: <NEW_LINE> <INDENT> r = requests.get(req) <NEW_LINE> res = r.json() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ValueError(str(res) + ' e.g. 5,15,30,60 etc...') <NEW_LINE> return res <NEW_LINE> <DEDENT> return res
Get OHLC data of selected symbol :param symbol: :param timeframe: Bars timeframe (5,15,30, etc) :param period: Depth. Number of bars back to history :return: Returns JSON formatted data
625941b591af0d3eaac9b7fe
def predict_top_k_recomm(self, user_idx, k): <NEW_LINE> <INDENT> rated_items = self.util_mat[user_idx].nonzero()[1] <NEW_LINE> candidate_items = [] <NEW_LINE> user_list = [] <NEW_LINE> for item_idx in rated_items: <NEW_LINE> <INDENT> user_list = np.union1d( user_list, self.util_mat[:, item_idx].nonzero()[0]) <NEW_LINE> <DEDENT> for user in user_list: <NEW_LINE> <INDENT> candidate_items = np.union1d( candidate_items, self.util_mat[user].nonzero()[1]) <NEW_LINE> <DEDENT> candidate_items = np.setdiff1d(candidate_items, rated_items) <NEW_LINE> predictions = [] <NEW_LINE> for item_idx in candidate_items: <NEW_LINE> <INDENT> rating = self.predict_rating(user_idx, item_idx) <NEW_LINE> if rating > 0: <NEW_LINE> <INDENT> predictions.append((item_idx, rating)) <NEW_LINE> <DEDENT> <DEDENT> predictions.sort(key=lambda tup: tup[1], reverse=True) <NEW_LINE> return predictions[0: k] if len(predictions) > k else predictions
Return top k pairs of (item_idx, predicted_rating) according to the order of the predicted_rating. >>> recsys = RecSysBaseLine() >>> recsys.load_ratings("testcase_ratings.csv") >>> user_idx = recsys.get_user_idx("U1") >>> predictions = recsys.predict_top_k_recomm(user_idx, 2) >>> [(item, "%.2f" %sim) for (item, sim) in predictions] [(15.0, '4.00'), (16.0, '4.00')]
625941b531939e2706e4cc5c