code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def check_if_table_exists(name): <NEW_LINE> <INDENT> conn = sqlite3.connect(db) <NEW_LINE> sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='{}'".format(name) <NEW_LINE> table = pd.read_sql_query(sql=sql, con=conn) <NEW_LINE> conn.close() <NEW_LINE> if table.empty is True: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif table.empty is False: <NEW_LINE> <INDENT> return True
insert name of the table you want to check (not the symbol) if the table already exists the function will return True if the table does not exist function returns False
625941b5796e427e537b03c0
def createProcess(self, **kwargs): <NEW_LINE> <INDENT> allParams = ['body'] <NEW_LINE> params = locals() <NEW_LINE> for (key, val) in params['kwargs'].iteritems(): <NEW_LINE> <INDENT> if key not in allParams: <NEW_LINE> <INDENT> raise TypeError("Got an unexpected keyword argument '%s' to method createProcess" % key) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> resourcePath = '/processes' <NEW_LINE> resourcePath = resourcePath.replace('{format}', 'json') <NEW_LINE> method = 'POST' <NEW_LINE> queryParams = {} <NEW_LINE> headerParams = {} <NEW_LINE> formParams = {} <NEW_LINE> bodyParam = None <NEW_LINE> if formParams: <NEW_LINE> <INDENT> headerParams['Content-type'] = 'application/x-www-form-urlencoded' <NEW_LINE> <DEDENT> if ('' in params): <NEW_LINE> <INDENT> bodyParam = params[''] <NEW_LINE> <DEDENT> postData = params['body'] if 'body' in params else None <NEW_LINE> response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) <NEW_LINE> if not response: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> responseObject = self.apiClient.deserialize(response, 'Process') <NEW_LINE> return responseObject
Create a new Process Args: body, Process: Message-Body (optional) Returns: Process
625941b566656f66f7cbbfa8
def call(self, url, getparams, postparams): <NEW_LINE> <INDENT> if getparams: <NEW_LINE> <INDENT> url += "/?%s" % urllib.urlencode(getparams) <NEW_LINE> <DEDENT> post = json.dumps(postparams) <NEW_LINE> try: <NEW_LINE> <INDENT> if postparams: <NEW_LINE> <INDENT> req = urllib2.Request(url) <NEW_LINE> req.add_header('Content-Type', 'application/json') <NEW_LINE> return json.load(urllib2.urlopen(req, post)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return json.load(urllib2.urlopen(url)) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, urllib2.URLError): <NEW_LINE> <INDENT> return "Error: %s (readlines not possible)" % str(e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Error: %s (%s)" % (str(e), str(e.readlines()))
Make a GET or POST call, depending on the supplied parameters, and return the json parsed result. If an error is thrown, that is returned instead.
625941b55166f23b2e1a4f57
def talk_m10_29_x14(lot1=_, flag1=_, text1=_, z1=_, z2=_): <NEW_LINE> <INDENT> CreateKeyGuideArea(9, 600) <NEW_LINE> assert ConversationRequest() != 0 <NEW_LINE> DeleteKeyGuideArea() <NEW_LINE> call = talk_m10_29_x1(text1=text1, z17=0, z19=-1, z20=0) <NEW_LINE> if call.Done() and GetEventFlag(flag1) != 0: <NEW_LINE> <INDENT> SetEventFlag(z1, 1) <NEW_LINE> <DEDENT> elif call.Done() and CanGetItemLot(lot1, 1) != 1: <NEW_LINE> <INDENT> assert talk_m10_29_x22(z3=1011, lot1=lot1) <NEW_LINE> <DEDENT> elif call.Done(): <NEW_LINE> <INDENT> assert talk_m10_29_x15(lot1=lot1, flag1=flag1, z1=z1, z2=z2, z9=0, z10=0) <NEW_LINE> <DEDENT> """State 9: End state""" <NEW_LINE> return 0
[Lib] Conversation: Item transfer: Mes⇒Item: Key lot1: Item lottery ID flag1: Item transfer: Global event flag text1: Conversation ID z1: Conversation: Global conversation flag z2: Trophy acquisition: Area and other flags
625941b51b99ca400220a8af
def test_filehandler_level(self, temporary_log_directory: local, patch_logging_filehandler: MagicMock, test_input_name: str, test_input_filename: str) -> None: <NEW_LINE> <INDENT> if test_input_filename: <NEW_LINE> <INDENT> temporary_log_file = temporary_log_directory.join(test_input_filename) <NEW_LINE> temporary_log_file_path = os.path.join(temporary_log_file.dirname, temporary_log_file.basename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> temporary_log_file_path = None <NEW_LINE> <DEDENT> _ = create_logger(test_input_name, temporary_log_file_path) <NEW_LINE> if test_input_filename: <NEW_LINE> <INDENT> patch_logging_filehandler.return_value.setLevel.assert_called_once_with(logging.INFO) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert not patch_logging_filehandler.called
Test the file handler is set with the correct logging level.
625941b563f4b57ef0000f22
def create_indexs(self, dataset, max_length): <NEW_LINE> <INDENT> words = ['<PAD>', '<UNK>'] <NEW_LINE> prefixes = ['<PAD>', '<UNK>'] <NEW_LINE> suffixes = ['<PAD>', '<UNK>'] <NEW_LINE> labels = ['<PAD>'] <NEW_LINE> positions = ['<PAD>','<UNK>'] <NEW_LINE> prevword = ['<PAD>','<UNK>'] <NEW_LINE> nextword = ['<PAD>','<UNK>'] <NEW_LINE> class_rules = ['<PAD>', 'brand', 'drug', 'drug_n', 'group', 'none'] <NEW_LINE> for data in list(dataset.values()): <NEW_LINE> <INDENT> pos = 0 <NEW_LINE> w_pack_prev = '<START>' <NEW_LINE> for w_pack in data: <NEW_LINE> <INDENT> if w_pack[0] not in words: <NEW_LINE> <INDENT> words.append(w_pack[0]) <NEW_LINE> <DEDENT> if w_pack[3] not in labels: <NEW_LINE> <INDENT> labels.append(w_pack[3]) <NEW_LINE> <DEDENT> if w_pack[0][:3] not in prefixes: <NEW_LINE> <INDENT> prefixes.append(w_pack[0][:3]) <NEW_LINE> <DEDENT> if w_pack[0][-3:] not in suffixes: <NEW_LINE> <INDENT> suffixes.append(w_pack[0][-3:]) <NEW_LINE> <DEDENT> if pos not in positions: <NEW_LINE> <INDENT> positions.append(pos) <NEW_LINE> <DEDENT> if w_pack_prev not in prevword: <NEW_LINE> <INDENT> prevword.append(w_pack_prev) <NEW_LINE> <DEDENT> if w_pack[0] not in nextword: <NEW_LINE> <INDENT> nextword.append(w_pack[0]) <NEW_LINE> <DEDENT> w_pack_prev = w_pack[0] <NEW_LINE> pos+=1 <NEW_LINE> <DEDENT> if '<END>' not in nextword: <NEW_LINE> <INDENT> nextword.append('<END>') <NEW_LINE> <DEDENT> <DEDENT> words = {k: v for v, k in enumerate(words)} <NEW_LINE> labels = {k: v for v, k in enumerate(labels)} <NEW_LINE> prefixes = {k: v for v, k in enumerate(prefixes)} <NEW_LINE> suffixes = {k: v for v, k in enumerate(suffixes)} <NEW_LINE> positions = {k: v for v, k in enumerate(positions)} <NEW_LINE> prevword = {k: v for v, k in enumerate(prevword)} <NEW_LINE> nextword = {k: v for v, k in enumerate(nextword)} <NEW_LINE> class_rules = {k: v for v, k in enumerate(class_rules)} <NEW_LINE> result = {} <NEW_LINE> result['words'] = words <NEW_LINE> result['labels'] = labels <NEW_LINE> result['maxlen'] = max_length <NEW_LINE> result['prev'] = prevword <NEW_LINE> result['next'] = nextword <NEW_LINE> result["pref"] = prefixes <NEW_LINE> result["suff"] = suffixes <NEW_LINE> result["position"] = positions <NEW_LINE> result["class_rules"] = class_rules <NEW_LINE> return result
Create index dictionaries both for input ( words ) and output ( labels ) from given dataset .
625941b567a9b606de4a7cbc
def __init__(self): <NEW_LINE> <INDENT> Gtk.Window.__init__(self, title="Xerar Pedido") <NEW_LINE> bbdd = dbapi2.connect("bbdd.dat") <NEW_LINE> self.cursor = bbdd.cursor() <NEW_LINE> self.empresas = [] <NEW_LINE> cursorEmpresas = self.cursor.execute("select NombreEmpresa from Clientes") <NEW_LINE> for row in cursorEmpresas: <NEW_LINE> <INDENT> self.empresas.append(row[0]) <NEW_LINE> <DEDENT> self.productos = [] <NEW_LINE> cursorProductos = self.cursor.execute("select producto from Productos") <NEW_LINE> for row in cursorProductos: <NEW_LINE> <INDENT> self.productos.append(row[0]) <NEW_LINE> <DEDENT> builder = Gtk.Builder() <NEW_LINE> builder.add_from_file("NovoPedido.glade") <NEW_LINE> fiestra = builder.get_object("window") <NEW_LINE> self.cmbEmpresas = builder.get_object("cmbEmpresas") <NEW_LINE> self.cmbProductos = builder.get_object("cmbProductos") <NEW_LINE> self.txtCantidade = builder.get_object("txtCantidade") <NEW_LINE> self.txtFecha = builder.get_object("txtFecha") <NEW_LINE> self.btnFacerPedido = builder.get_object("btnFacerPedido") <NEW_LINE> self.txtTotal = builder.get_object("txtTotal") <NEW_LINE> for empresa in self.empresas: <NEW_LINE> <INDENT> self.cmbEmpresas.append_text(empresa) <NEW_LINE> <DEDENT> for producto in self.productos: <NEW_LINE> <INDENT> self.cmbProductos.append_text(producto) <NEW_LINE> <DEDENT> sinais = { "on_btnFacerPedido_clicked": self.on_btnFacerPedido_clicked, "on_btnNovo_clicked": self.on_btnNovo_clicked } <NEW_LINE> builder.connect_signals(sinais) <NEW_LINE> fiestra.show_all()
Constructor da clase XerarPedido (Window) Dispomos de dous ComboBox para seleccionar a empresa e o producto, indicando a cantidade e a fecha para realizar o pedido :param None Excepcions: -Non ten
625941b516aa5153ce362276
@reports.route('/report/<int:report_id>/update', methods=['GET', 'POST']) <NEW_LINE> @requires_access_level('admin') <NEW_LINE> def update_report(report_id): <NEW_LINE> <INDENT> report = Report.query.get_or_404(report_id) <NEW_LINE> form = ReportForm() <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> report.title = form.title.data <NEW_LINE> report.date_published = form.date_published.data <NEW_LINE> report.summary = form.summary.data.replace('\r', '<br>') <NEW_LINE> report.table = form.table.data.replace('\r', '<br>') <NEW_LINE> report.price = form.price.data <NEW_LINE> report.status = form.status.data <NEW_LINE> if form.pdf.data: <NEW_LINE> <INDENT> report.pdf_file = save_pdf(form.pdf.data) <NEW_LINE> <DEDENT> db.session.commit() <NEW_LINE> flash('Your report has been updated!', 'success') <NEW_LINE> return redirect(url_for('reports.report', report_id=report_id)) <NEW_LINE> <DEDENT> elif request.method == 'GET': <NEW_LINE> <INDENT> form.title.data = report.title <NEW_LINE> form.date_published.data = report.date_published <NEW_LINE> form.summary.data = report.summary.replace('<br>', '\r') <NEW_LINE> form.table.data = report.table.replace('<br>', '\r') <NEW_LINE> form.price.data = report.price <NEW_LINE> form.status.data = report.status <NEW_LINE> if report.pdf_file: <NEW_LINE> <INDENT> pdf_file = url_for( 'static', filename='report_pdfs/' + report.pdf_file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pdf_file = None <NEW_LINE> <DEDENT> <DEDENT> return render_template('create_report.html', pdf_file=pdf_file, title='Update Report', form=form, legend='Update Report')
To update the report :param report_id: unique number by report :return: if update report submitted, redirect reports.report and report.id Render the create_report.html, pdf_file, title, form, legend
625941b5e5267d203edcdaa0
def time_elapsed(nb_mins): <NEW_LINE> <INDENT> nb_mins = max(1,int(nb_mins/5))*5 <NEW_LINE> now = datetime.datetime.now() <NEW_LINE> nb_mins_since_beginning_of_the_day = now.hour*60 + now.minute <NEW_LINE> nb_mins_since_beginning_of_the_day = int(nb_mins_since_beginning_of_the_day / 5) * 5 <NEW_LINE> return (nb_mins_since_beginning_of_the_day % nb_mins == 0)
this function roughly returns True every nb_mins minutes (i.e. returns True if the nb of minutes elpased since the beginning of the day modulo nb_mins is 0) nb_mins must be a multiple of 5 (because the watchdog cronjob runs every 5 mins) there is a kind of approximation necessary because the cronjob cannot be guaranteed to run at a precise time; this is done by rounding the nb of minutes since beginning of the day to a multiple of 5
625941b5462c4b4f79d1d4cf
def load_chord_labels(directory="\\BPS_FH_Dataset\\"): <NEW_LINE> <INDENT> dt = [('onset', 'float'), ('end', 'float'), ('key', '<U10'), ('degree', '<U10'), ('quality', '<U10'), ('inversion', 'int'), ('rchord', '<U10')] <NEW_LINE> chord_labels = [None for _ in range(32)] <NEW_LINE> for i in range(32): <NEW_LINE> <INDENT> fileDir = directory + str(i+1).zfill(2) + "\\chords.xlsx" <NEW_LINE> workbook = xlrd.open_workbook(fileDir) <NEW_LINE> sheet = workbook.sheet_by_index(0) <NEW_LINE> chords = [] <NEW_LINE> for rowx in range(sheet.nrows): <NEW_LINE> <INDENT> cols = sheet.row_values(rowx) <NEW_LINE> if isinstance(cols[3], float): <NEW_LINE> <INDENT> cols[3] = int(cols[3]) <NEW_LINE> <DEDENT> chords.append(tuple(cols)) <NEW_LINE> <DEDENT> chords = np.array(chords, dtype=dt) <NEW_LINE> chord_labels[i] = r2tconvert(chords) <NEW_LINE> <DEDENT> return chord_labels
Load chords of each piece and add chord symbols into the labels. :param directory: the path of the dataset :return: chord_labels
625941b58e71fb1e9831d5ac
def relative_inner(x, y, z, transpose): <NEW_LINE> <INDENT> xy_matmul = tf.matmul(x, y, transpose_b = transpose) <NEW_LINE> x_t = tf.transpose(x, [1, 0, 2]) <NEW_LINE> x_tz_matmul = tf.matmul(x_t, z, transpose_b = transpose) <NEW_LINE> x_tz_matmul_t = tf.transpose(x_tz_matmul, [1,0,2]) <NEW_LINE> return xy_matmul + x_tz_matmul_t
:param x: Tensor with shape [h*N, T_q, C/h] :param y: Tensor with shape [h*N, T_k, C/h] :param z: Tensor with shape [max_len, max_len, C/h] :param transpose: :return:
625941b515fb5d323cde0907
def PUpdateTables (inImage, err): <NEW_LINE> <INDENT> if ('myClass' in inImage.__dict__) and (inImage.myClass=='AIPSImage'): <NEW_LINE> <INDENT> raise TypeError("Function unavailable for "+inImage.myClass) <NEW_LINE> <DEDENT> inCast = inImage.cast('ObitImage') <NEW_LINE> inCast.UpdateTables (err)
Update any disk resident structures about the current tables * inImage = Python Image object * err = Python Obit Error/message stack
625941b58a43f66fc4b53e69
def year_projections(self, url, stats_from_row): <NEW_LINE> <INDENT> today = date.today() <NEW_LINE> date_string = str(today) <NEW_LINE> LOGGER.debug(f"Using date_str: {date_string}") <NEW_LINE> cache_key = f"fantasypros/{FantasyProsApi._path_segment(url)}/{date_string}.p" <NEW_LINE> LOGGER.debug(f"Looking in path {cache_key}") <NEW_LINE> cache_location = Path(cache_key) <NEW_LINE> if cache_location.exists(): <NEW_LINE> <INDENT> f = cache_location.open("rb") <NEW_LINE> return pickle.load(f) <NEW_LINE> <DEDENT> results = {} <NEW_LINE> for tr in self.table_rows(url): <NEW_LINE> <INDENT> name = FantasyProsApi.name_from_row(tr) <NEW_LINE> if name is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> stats = stats_from_row(tr) <NEW_LINE> results[name] = stats <NEW_LINE> <DEDENT> f = cache_location.open("wb+") <NEW_LINE> pickle.dump(results, f) <NEW_LINE> return results
Scrape the given url for year-long projections, converting with the given function :return dict: dictionary of name to Stats object
625941b53317a56b86939a69
def send_email(to, subject, html_content, files=None, dryrun=False, cc=None, bcc=None, mime_subtype='mixed', mime_charset='utf-8', **kwargs): <NEW_LINE> <INDENT> path, attr = conf.get('email', 'EMAIL_BACKEND').rsplit('.', 1) <NEW_LINE> module = importlib.import_module(path) <NEW_LINE> backend = getattr(module, attr) <NEW_LINE> to = get_email_address_list(to) <NEW_LINE> to = ", ".join(to) <NEW_LINE> return backend(to, subject, html_content, files=files, dryrun=dryrun, cc=cc, bcc=bcc, mime_subtype=mime_subtype, mime_charset=mime_charset, **kwargs)
Send email using backend specified in EMAIL_BACKEND.
625941b5bde94217f3682bfb
def blur_image(img: np.array, ksize: Tuple[int, int] = (10, 10)) -> np.array: <NEW_LINE> <INDENT> if len(img.shape) == 2: <NEW_LINE> <INDENT> img = np.expand_dims(img, axis=-1) <NEW_LINE> <DEDENT> h, w, c = img.shape <NEW_LINE> assert h % ksize[1] == 0 and w % ksize[0] == 0, 'Image sizes must be dividable by blur kernel sizes.' <NEW_LINE> ny = h // ksize[1] <NEW_LINE> nx = w // ksize[0] <NEW_LINE> new_shape = (ny, ksize[1], nx, ksize[0]) + (c,) <NEW_LINE> img = img.copy().reshape(new_shape) <NEW_LINE> img[...] = np.median(img, axis=(1, 3), keepdims=True) <NEW_LINE> if c > 1: <NEW_LINE> <INDENT> img = img.reshape(h, w, c) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> img = img.reshape(h, w) <NEW_LINE> <DEDENT> return img
Median pooling blur. Parameters: img: image to blur. ksize: (width, height) pooling kernel, define size of each blurring region.
625941b5377c676e91271fa9
def setKeyPair(self, key): <NEW_LINE> <INDENT> pass
SETTER
625941b530bbd722463cbbc1
def isLink(self, segments): <NEW_LINE> <INDENT> if self._isVirtualPath(segments): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> path = self.getRealPathFromSegments(segments) <NEW_LINE> path_encoded = path.encode('utf-8') <NEW_LINE> with self._impersonateUser(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> stats = os.lstat(path_encoded) <NEW_LINE> return bool(stat.S_ISLNK(stats.st_mode)) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return False
See `ILocalFilesystem`.
625941b50c0af96317bb7fe8
def __init__(self, input_dataset, count, name=None): <NEW_LINE> <INDENT> self._input_dataset = input_dataset <NEW_LINE> self._count = ops.convert_to_tensor(count, dtype=dtypes.int64, name="count") <NEW_LINE> self._metadata = dataset_metadata_pb2.Metadata() <NEW_LINE> if name: <NEW_LINE> <INDENT> self._metadata.name = _validate_and_encode(name) <NEW_LINE> <DEDENT> kwargs = self._flat_structure <NEW_LINE> if name or compat.forward_compatible(2021, 9, 30): <NEW_LINE> <INDENT> kwargs["metadata"] = self._metadata.SerializeToString() <NEW_LINE> <DEDENT> variant_tensor = gen_dataset_ops.skip_dataset( input_dataset._variant_tensor, count=self._count, **kwargs) <NEW_LINE> super(SkipDataset, self).__init__(input_dataset, variant_tensor)
See `Dataset.skip()` for details.
625941b5ec188e330fd5a5a7
def test_nested_info(): <NEW_LINE> <INDENT> with DebugInherit(a=1): <NEW_LINE> <INDENT> with DebugInherit(b=2): <NEW_LINE> <INDENT> with DebugInherit(c=3): <NEW_LINE> <INDENT> info = DebugInfo(d=4) <NEW_LINE> assert info.a == 1 <NEW_LINE> assert info.b == 2 <NEW_LINE> assert info.c == 3 <NEW_LINE> assert info.d == 4 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> info = DebugInfo(d=4) <NEW_LINE> for attr in "abc": <NEW_LINE> <INDENT> assert not hasattr(info, attr) <NEW_LINE> <DEDENT> assert info.d == 4
Test DebugInherit as context manager.
625941b563d6d428bbe442ee
def output_files(self): <NEW_LINE> <INDENT> base = self.output_name <NEW_LINE> files = [base] + [ base + a for a in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' ] <NEW_LINE> return filter(os.path.exists, files)
Return all output files produced, in alphabetical order.
625941b5796e427e537b03c1
def add_bias(self, X): <NEW_LINE> <INDENT> X = check_type(X) <NEW_LINE> return np.c_[X, np.ones(X.shape[0])]
Add bias feature the the matrix Return ---------- Return an array-like object of size (n_sample, n_feature + 1)
625941b53346ee7daa2b2b68
@cli.group() <NEW_LINE> def code(): <NEW_LINE> <INDENT> pass
Code extraction and testing
625941b58e7ae83300e4adcb
def main(): <NEW_LINE> <INDENT> args = parse_args(None) <NEW_LINE> command = args.command(args) <NEW_LINE> command.execute()
This is the main method of the application. :return: None
625941b523849d37ff7b2e91
def solution2(linked_list, target): <NEW_LINE> <INDENT> prev = linked_list.head <NEW_LINE> node = linked_list.head.next <NEW_LINE> have_met = False <NEW_LINE> append_list = [] <NEW_LINE> prepend_list = [] <NEW_LINE> while node is not None: <NEW_LINE> <INDENT> if node.data == target: <NEW_LINE> <INDENT> have_met = True <NEW_LINE> <DEDENT> if not have_met and node.data > target: <NEW_LINE> <INDENT> append_list.append(node.data) <NEW_LINE> prev.next = node.next <NEW_LINE> linked_list.count -= 1 <NEW_LINE> <DEDENT> elif have_met and node.data < target: <NEW_LINE> <INDENT> prepend_list.append(node.data) <NEW_LINE> prev.next = node.next <NEW_LINE> linked_list.count -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prev = node <NEW_LINE> <DEDENT> node = node.next <NEW_LINE> <DEDENT> node = prev <NEW_LINE> while append_list: <NEW_LINE> <INDENT> node.next = Node(append_list.pop()) <NEW_LINE> node = node.next <NEW_LINE> linked_list.count += 1 <NEW_LINE> <DEDENT> prev = linked_list.head <NEW_LINE> node = linked_list.head.next <NEW_LINE> while prepend_list: <NEW_LINE> <INDENT> node = prev.next = Node(prepend_list.pop(), node) <NEW_LINE> linked_list.count += 1
- input : 6->3->8->1->5->9, 5 (리스트와 5가 주어짐) - ouput : 3->1->5->6->8->9 (5를 기준으로 왼쪽은 작은값이 위치하고, 오른쪽은 큰값들이 위치함)
625941b563b5f9789fde6ee5
def test_ChainSet_range_regexp_relative_fail_both(self): <NEW_LINE> <INDENT> result = self.chains.get("*6-#8") <NEW_LINE> self.assertEquals(result, ChainSet())
test range regexp-relative_fail_both used in chains.get(regexp-relative)
625941b5377c676e91271faa
def kMeans(data, k, dist=distEclud, createCent=randCent): <NEW_LINE> <INDENT> m = np.shape(data)[0] <NEW_LINE> clusterAssment = mat(zeros((m, 2))) <NEW_LINE> centroids = createCent(data, k) <NEW_LINE> clusterChanged = True <NEW_LINE> while clusterChanged: <NEW_LINE> <INDENT> clusterChanged = False <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> minDist = inf <NEW_LINE> minIndex = -1 <NEW_LINE> for j in range(k): <NEW_LINE> <INDENT> distJI = dist(centroids[j], data[i]) <NEW_LINE> if distJI < minDist: <NEW_LINE> <INDENT> minDist = distJI <NEW_LINE> minIndex = j <NEW_LINE> <DEDENT> <DEDENT> if clusterAssment[i, 0] != minIndex: clusterChanged = True <NEW_LINE> clusterAssment[i, :] = minIndex, minDist ** 2 <NEW_LINE> <DEDENT> for cent in range(k): <NEW_LINE> <INDENT> ptsInClust = data[nonzero(clusterAssment[:, 0].A == cent)[0]] <NEW_LINE> centroids[cent, :] = mean(ptsInClust, axis=0) <NEW_LINE> <DEDENT> <DEDENT> return centroids, clusterAssment
basic k means algorithms :param data: data matrix :param k: k cluster :param dist: distance function :param createCent: random centroid create function :return: centroids and clusterAssment(line_id to cluster_id map)
625941b591af0d3eaac9b813
def dequeue(self): <NEW_LINE> <INDENT> if len(self._items) != 0: <NEW_LINE> <INDENT> return self._items.pop(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Your have popped all of the items!")
Removes item from the queue
625941b5e1aae11d1e749ab3
def get_auction_ids(self): <NEW_LINE> <INDENT> return self.db.get_single_column('id', 'auction', 'house_id = {}'.format(self.id))
Return list of ids for all auctions in this auction house.
625941b550485f2cf553cb98
def tmConnectedNotify(self): <NEW_LINE> <INDENT> self.tmStatusField.set("CONNECTED") <NEW_LINE> self.tmStatusField.setBackground(COLOR_CONNECTED)
Called when the TM connect function is successfully processed
625941b507d97122c417868a
def grant_role2(args, role, user, project): <NEW_LINE> <INDENT> ui.info_section(f'== Grant Role == \n') <NEW_LINE> try: <NEW_LINE> <INDENT> cek = (f'{ks.roles.list(name=role)[0]}') <NEW_LINE> baca = cek.strip().split(',') <NEW_LINE> result = [i for i in baca if i.startswith(' id')] <NEW_LINE> oke = result[0] <NEW_LINE> id_role = oke.strip().strip('id=') <NEW_LINE> cek2 = (f'{ks.users.list(name=user)[0]}') <NEW_LINE> baca2 = cek2.strip().split(',') <NEW_LINE> result2 = [i for i in baca2 if i.startswith(' id')] <NEW_LINE> oke2 = result2[0] <NEW_LINE> id_user = oke2.strip().strip('id=') <NEW_LINE> cek3 = (f'{ks.projects.list(name=project)[0]}') <NEW_LINE> baca3 = cek3.strip().split(',') <NEW_LINE> result3 = [i for i in baca3 if i.startswith(' id')] <NEW_LINE> oke3 = result3[0] <NEW_LINE> id_project = oke3.strip().strip('id=') <NEW_LINE> ex = ks.roles.grant(role=id_role, user=id_user, system=None, project=id_project) <NEW_LINE> ui.info_3(ex) <NEW_LINE> ui.info_3('\n Done ', ui.check) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> ui.info(ui.red, ui.bold, 'Error: ', ui.reset, e)
grant role berdasarkan nama
625941b5cdde0d52a9e52e2e
def __getParagraphParent(self): <NEW_LINE> <INDENT> pDelims = self.textobject.paragraphDelims <NEW_LINE> parent = pDelims[0] <NEW_LINE> for pSlice in pDelims: <NEW_LINE> <INDENT> if pSlice.start <= self.start: <NEW_LINE> <INDENT> parent = pSlice <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return parent.start
returns the start delim of the parent paragraph
625941b5a8370b77170526a1
def get_parallels(self): <NEW_LINE> <INDENT> parallelstring = self.get_tool_result_parallels(self.tool, key='parallels') <NEW_LINE> return utils.check_int_list(parallelstring)
get parallels @return: list
625941b5d10714528d5ffade
def find_or_generate_new_torv3_keys(args): <NEW_LINE> <INDENT> secret_key_path = os.path.join(args.ansible_path, "tor_v3_keys.json") <NEW_LINE> if os.path.exists(secret_key_path): <NEW_LINE> <INDENT> print('Tor v3 onion service keys already exist in: {}'.format( secret_key_path)) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> app_journalist_public_key, app_journalist_private_key = generate_new_v3_keys() <NEW_LINE> app_ssh_public_key, app_ssh_private_key = generate_new_v3_keys() <NEW_LINE> mon_ssh_public_key, mon_ssh_private_key = generate_new_v3_keys() <NEW_LINE> tor_v3_service_info = { "app_journalist_public_key": app_journalist_public_key, "app_journalist_private_key": app_journalist_private_key, "app_ssh_public_key": app_ssh_public_key, "app_ssh_private_key": app_ssh_private_key, "mon_ssh_public_key": mon_ssh_public_key, "mon_ssh_private_key": mon_ssh_private_key, } <NEW_LINE> with open(secret_key_path, 'w') as fobj: <NEW_LINE> <INDENT> json.dump(tor_v3_service_info, fobj, indent=4) <NEW_LINE> <DEDENT> print('Tor v3 onion service keys generated and stored in: {}'.format( secret_key_path)) <NEW_LINE> return 0
This method will either read v3 Tor onion service keys if found or generate a new public/private keypair.
625941b50383005118ecf3e4
def get_signature(self): <NEW_LINE> <INDENT> raise NotImplementedError
Get the signature of the current state of the repository Returns: str
625941b58da39b475bd64d76
def OnDeleteUnit(self, event): <NEW_LINE> <INDENT> msg = 'Are you sure you wish to delete %s' % self.displayUnit.name <NEW_LINE> dlg = wxMessageDialog(self, msg, "Confirm Delete", wxYES_NO) <NEW_LINE> dlg.CentreOnParent() <NEW_LINE> if dlg.ShowModal() == wxID_YES: <NEW_LINE> <INDENT> self.units.DeleteUnit(self.displayUnit.id) <NEW_LINE> self.displayUnit = None <NEW_LINE> self.Changed() <NEW_LINE> self.UpdateItemsList() <NEW_LINE> self.UpdateTypeList() <NEW_LINE> <DEDENT> dlg.Destroy()
delete displayUnit - ask for confirmation
625941b5e64d504609d74640
def _get_hyperparameters() -> kerastuner.HyperParameters: <NEW_LINE> <INDENT> hp = kerastuner.HyperParameters() <NEW_LINE> hp.Float('learning_rate', min_value=1e-4, max_value=1e-2, sampling='log', default=0.0009167702421017742) <NEW_LINE> hp.Int('n_layers', 1, 2, default=2) <NEW_LINE> with hp.conditional_scope('n_layers', 1): <NEW_LINE> <INDENT> hp.Int('n_units_1', min_value=8, max_value=128, step=8, default=72) <NEW_LINE> <DEDENT> with hp.conditional_scope('n_layers', 2): <NEW_LINE> <INDENT> hp.Int('n_units_1', min_value=8, max_value=128, step=8, default=128) <NEW_LINE> hp.Int('n_units_2', min_value=8, max_value=128, step=8, default=80) <NEW_LINE> <DEDENT> return hp
Returns hyperparameters for building Keras model. This function defines a conditional hyperparameter space and default values that are used to build the model. Args: None. Returns: A kerastuner HyperParameters object.
625941b5d6c5a10208143e46
def after_all(context): <NEW_LINE> <INDENT> dbutils.close_cn(context.cn) <NEW_LINE> dbutils.drop_db(context.conf['host'], context.conf['port'], context.conf['user'], context.conf['pass'], context.conf['dbname'])
Unset env parameters.
625941b5a8ecb033257d2ed5
def get_links(html, url=None, local=True, external=True): <NEW_LINE> <INDENT> def normalize_link(link): <NEW_LINE> <INDENT> if urlsplit(link).scheme in ('http', 'https', ''): <NEW_LINE> <INDENT> if '#' in link: <NEW_LINE> <INDENT> link = link[:link.index('#')] <NEW_LINE> <DEDENT> if url: <NEW_LINE> <INDENT> link = urljoin(url, link) <NEW_LINE> if not local and common.same_domain(url, link): <NEW_LINE> <INDENT> link = None <NEW_LINE> <DEDENT> if not external and not common.same_domain(url, link): <NEW_LINE> <INDENT> link = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> link = None <NEW_LINE> <DEDENT> return link <NEW_LINE> <DEDENT> a_links = a_re.search(html) <NEW_LINE> js_links = js_re.findall(html) <NEW_LINE> links = [] <NEW_LINE> for link in a_links + js_links: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> link = normalize_link(link) <NEW_LINE> <DEDENT> except UnicodeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if link and link not in links: <NEW_LINE> <INDENT> links.append(link) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return links
Return all links from html and convert relative to absolute if source url is provided local determines whether to include links from same domain external determines whether to include linkes from other domains
625941b5656771135c3eb672
def __rdiv__(self, other): <NEW_LINE> <INDENT> return divide(other, self)
Return divide(other, self)
625941b5187af65679ca4f24
def successors(self, vertex): <NEW_LINE> <INDENT> assert self.has_vertex(vertex) <NEW_LINE> return list(self._edges[vertex])
Return a list of the vertices a vertex has outgoing edges to.
625941b530c21e258bdfa29d
def Trupke2007(li_imag, Ut, ce_imag, Ap, lisc_imag=0, cesc_imag=0, temp=300): <NEW_LINE> <INDENT> img_cal = li_imag - lisc_imag <NEW_LINE> Ci = util.get_Ci_negliableRs(img_cal, Uil) <NEW_LINE> img_rs = ce_imag - cesc_imag <NEW_LINE> DU = Uec - util.voltage_map(img_rs, Ci, temp=temp) <NEW_LINE> J0 = Jsc / (np.exp(Uec / const.k * temp / const.e) - 1.) <NEW_LINE> Ji = Jsc - J0 * img_rs / Ci <NEW_LINE> Rs = DU * Ap / Ji <NEW_LINE> return Rs
Takes two input images (corrected by the short circuit images) photoluminescence images. The first image minus the short circuit image is used to calibrate the optical constant. This assums: 1. The terminal voltage is equal to the local voltage (U~t~ = U~i~). i.e. Series resistance has no impact 2. The calibration C~i~ is not injeciton dependent. This is only true if the sample is in low injeciton. This calibration makes the method robust against J~0~ variation. 3. The short circuit image represents the voltage independent carriers of the open circuit image. With this calibration, any image can be converted into a voltage, and hence voltage drop map. The second image, is then used to determine a Delta V map The voltage drop is then caculated as, $\Delta U = U_t - V~t~ \ln\left( rac{I_{PL}}{C_i} ight)$ The lateral current determined from the same image: $J_i = J_{sc} - J_0 rac{I_{PL}}{C_i}$ Where J0 is taken as the ratio of Jsc to exp(Uex/Vt). This estimation of the current assumes that the change in the PL signal goes to the fingers. This does not account for changes in lateral currents. Finally: $ R_{s,i} = rac{\Delta U_{R_s,i} A_i}{J_i}$ Inputs: il_imag: (ndarray, representing counts per unit time) The first a PL image at low intenisty (~0.1 sun). Uil: (float) Terminal voltage measuirement of il_imag ec_imag: (ndarray, representing counts per unit time) The second PL image is at high intensisty with current extraction. Uec: (float) Terminal voltage measuirement of the ec image Ap: (float) The area of a pixel. ilsc_imag: (ndarray, representing counts per unit time, optional) The short circuit image of the il_imag ecsc_imag: (ndarray, representing counts per unit time, optional) The short circuit image of the ec_imag Output: Rs: (nd array) A series resistance image doi: 10.1002/pssr.200903175 Example that it can be usd for for inhomgenious Jo. (DOI: 10.1002/pssr.200903175)
625941b5091ae35668666d66
def __str__(self): <NEW_LINE> <INDENT> return "Precision('%s %s')" % (self.getPrecision(), self.__typeid.getType())
String representation.
625941b5a17c0f6771cbde54
def get_entries(self): <NEW_LINE> <INDENT> return self.find_by_device_description({ "deviceType": "urn:dial-multiscreen-org:device:dial:1", "friendlyName": "[LG] webOS TV" })
Get all the LG WebOS TV device uPnP entries.
625941b597e22403b379cd98
def _has_access_descriptor(user, action, descriptor, course_key=None): <NEW_LINE> <INDENT> def can_load(): <NEW_LINE> <INDENT> if descriptor.visible_to_staff_only and not _has_staff_access_to_descriptor(user, descriptor, course_key): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not _has_group_access(descriptor, user, course_key): <NEW_LINE> <INDENT> return _has_staff_access_to_descriptor(user, descriptor, course_key) <NEW_LINE> <DEDENT> if settings.FEATURES['DISABLE_START_DATES'] and not is_masquerading_as_student(user, course_key): <NEW_LINE> <INDENT> debug("Allow: DISABLE_START_DATES") <NEW_LINE> return True <NEW_LINE> <DEDENT> if 'detached' not in descriptor._class_tags and descriptor.start is not None: <NEW_LINE> <INDENT> now = datetime.now(UTC()) <NEW_LINE> effective_start = _adjust_start_date_for_beta_testers( user, descriptor, course_key=course_key ) <NEW_LINE> if in_preview_mode() or now > effective_start: <NEW_LINE> <INDENT> debug("Allow: now > effective start date") <NEW_LINE> return True <NEW_LINE> <DEDENT> return _has_staff_access_to_descriptor(user, descriptor, course_key) <NEW_LINE> <DEDENT> debug("Allow: no start date") <NEW_LINE> return True <NEW_LINE> <DEDENT> checkers = { 'load': can_load, 'staff': lambda: _has_staff_access_to_descriptor(user, descriptor, course_key), 'instructor': lambda: _has_instructor_access_to_descriptor(user, descriptor, course_key) } <NEW_LINE> return _dispatch(checkers, action, user, descriptor)
Check if user has access to this descriptor. Valid actions: 'load' -- load this descriptor, showing it to the user. 'staff' -- staff access to descriptor. NOTE: This is the fallback logic for descriptors that don't have custom policy (e.g. courses). If you call this method directly instead of going through has_access(), it will not do the right thing.
625941b5aad79263cf39083b
def _pairwise_distances(embeddings, squared=False): <NEW_LINE> <INDENT> dot_product = tf.matmul(embeddings, tf.transpose(embeddings)) <NEW_LINE> square_norm = tf.diag_part(dot_product) <NEW_LINE> distances = tf.expand_dims(square_norm, 1) - 2.0 * dot_product + tf.expand_dims(square_norm, 0) <NEW_LINE> distances = tf.maximum(distances, 0.0) <NEW_LINE> if not squared: <NEW_LINE> <INDENT> mask = tf.to_float(tf.equal(distances, 0.0)) <NEW_LINE> distances = distances + mask * 1e-16 <NEW_LINE> distances = tf.sqrt(distances) <NEW_LINE> distances = distances * (1.0 - mask) <NEW_LINE> <DEDENT> return distances
Compute the 2D matrix of distances between all the embeddings. Args: embeddings: tensor of shape (batch_size, embed_dim) squared: Boolean. If true, output is the pairwise squared euclidean distance matrix. If false, output is the pairwise euclidean distance matrix. Returns: pairwise_distances: tensor of shape (batch_size, batch_size)
625941b5cad5886f8bd26de2
def _validate_alpn_challenge(self, challenge_name, fqdn, token, jwk_thumbprint): <NEW_LINE> <INDENT> self.logger.debug('Challenge._validate_alpn_challenge({0}:{1}:{2})'.format(challenge_name, fqdn, token)) <NEW_LINE> (response, invalid) = fqdn_resolve(fqdn, self.dns_server_list) <NEW_LINE> self.logger.debug('fqdn_resolve() ended with: {0}/{1}'.format(response, invalid)) <NEW_LINE> sha256_digest = sha256_hash_hex(self.logger, '{0}.{1}'.format(token, jwk_thumbprint)) <NEW_LINE> extension_value = b64_encode(self.logger, bytearray.fromhex('0420{0}'.format(sha256_digest))) <NEW_LINE> self.logger.debug('computed value: {0}'.format(extension_value)) <NEW_LINE> if not invalid: <NEW_LINE> <INDENT> if self.proxy_server_list: <NEW_LINE> <INDENT> proxy_server = proxy_check(self.logger, fqdn, self.proxy_server_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> proxy_server = None <NEW_LINE> <DEDENT> cert = servercert_get(self.logger, fqdn, 443, proxy_server) <NEW_LINE> if cert: <NEW_LINE> <INDENT> san_list = cert_san_get(self.logger, cert, recode=False) <NEW_LINE> fqdn_in_san = fqdn_in_san_check(self.logger, san_list, fqdn) <NEW_LINE> if fqdn_in_san: <NEW_LINE> <INDENT> extension_list = cert_extensions_get(self.logger, cert, recode=False) <NEW_LINE> if extension_value in extension_list: <NEW_LINE> <INDENT> self.logger.debug('alpn validation successful') <NEW_LINE> result = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.debug('alpn validation not successful') <NEW_LINE> result = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.debug('fqdn check against san failed') <NEW_LINE> result = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.debug('no cert returned...') <NEW_LINE> result = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> self.logger.debug('Challenge._validate_alpn_challenge() ended with: {0}/{1}'.format(result, invalid)) <NEW_LINE> return (result, invalid)
validate dns challenge
625941b5dd821e528d63afab
def size(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> length = len(self.search_requests) <NEW_LINE> <DEDENT> return length
Returns the amount of search requests queued to be processed. :return:
625941b550812a4eaa59c126
def __setitem__(self, key, value): <NEW_LINE> <INDENT> if key == 'path': <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> value = bytestring_path(value) <NEW_LINE> <DEDENT> elif isinstance(value, BLOB_TYPE): <NEW_LINE> <INDENT> value = bytes(value) <NEW_LINE> <DEDENT> <DEDENT> elif key == 'album_id': <NEW_LINE> <INDENT> self._cached_album = None <NEW_LINE> <DEDENT> changed = super()._setitem(key, value) <NEW_LINE> if changed and key in MediaFile.fields(): <NEW_LINE> <INDENT> self.mtime = 0
Set the item's value for a standard field or a flexattr.
625941b54e4d5625662d41de
def logout(self, deletepw=False): <NEW_LINE> <INDENT> if (hasattr(self, 'username') and hasattr(self, 'password') and hasattr(self, 'session')): <NEW_LINE> <INDENT> if deletepw is True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> keyring.delete_password("astroquery:www.cosmosim.org", self.username) <NEW_LINE> warnings.warn("Removed password for {0} in the keychain." .format(self.username)) <NEW_LINE> <DEDENT> except keyring.errors.PasswordDeleteError: <NEW_LINE> <INDENT> warnings.warn("Password for {0} was never stored in the " "keychain.".format(self.username)) <NEW_LINE> <DEDENT> <DEDENT> del self.session <NEW_LINE> del self.username <NEW_LINE> del self.password <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error("You must log in before attempting to logout.")
Public function which allows the user to logout of their cosmosim credentials. Parameters ---------- deletepw : bool A hard logout - delete the password to the associated username from the keychain. The default is True. Returns -------
625941b5fbf16365ca6f5fbb
def cmd_create(self, player, msg, channel): <NEW_LINE> <INDENT> if len(msg) == 1: <NEW_LINE> <INDENT> return minqlx.RET_USAGE <NEW_LINE> <DEDENT> nick = ' '.join(msg[1:]) <NEW_LINE> self.set_cvar("qlx_cleverbotNick", nick) <NEW_LINE> self.post_data("create", channel=channel)
Creates the bot with the nick given. API Doc: https://docs.cleverbot.io/docs/getting-started
625941b53617ad0b5ed67cff
def get_posterior_distribution(self, *, coef_ols=None, sigma=None, sse_ols=None): <NEW_LINE> <INDENT> self.normal_parameters = self._get_normal_posterior_parameters(coef_ols, sigma) <NEW_LINE> self.wishart_parameters = self._get_wishart_posterior_parameters(coef_ols, sse_ols) <NEW_LINE> return self
:param coef_ols: ndarray, estimated by ols :param sigma: ndarray, drawed sigma :param sse_ols: ndarray, computed sum of square error by ols :return: mean: ndarray vector, mean of posterior Noraml distribution variance: ndarray, variance covariance of posterior Normal distribution
625941b510dbd63aa1bd29b0
def test_close() -> None: <NEW_LINE> <INDENT> res = lambda_function.close("state", {"content": "message"}) <NEW_LINE> action = res["dialogAction"] <NEW_LINE> assert action["type"] == "Close" <NEW_LINE> assert action["fulfillmentState"] == "state" <NEW_LINE> assert action["message"] == {"content": "message"}
Test lambda_function.close method
625941b54527f215b584c25c
@cli.command() <NEW_LINE> @click.option('--state', default=None, help='Filter results by state.') <NEW_LINE> @click.option('--id', help='Display ids.', is_flag=True) <NEW_LINE> @click.option('--vhosts', help='Display vhosts.', default=True, is_flag=True) <NEW_LINE> @click.option('--type', help='Display types.', is_flag=True) <NEW_LINE> @click.option('--limit', help='Limit number of results.', default=100, show_default=True) <NEW_LINE> @pass_gandi <NEW_LINE> def list(gandi, state, id, vhosts, type, limit): <NEW_LINE> <INDENT> options = { 'items_per_page': limit, } <NEW_LINE> if state: <NEW_LINE> <INDENT> options['state'] = state <NEW_LINE> <DEDENT> output_keys = ['name', 'state'] <NEW_LINE> if id: <NEW_LINE> <INDENT> output_keys.append('id') <NEW_LINE> <DEDENT> if vhosts: <NEW_LINE> <INDENT> output_keys.append('vhost') <NEW_LINE> <DEDENT> if type: <NEW_LINE> <INDENT> output_keys.append('type') <NEW_LINE> <DEDENT> paas_hosts = {} <NEW_LINE> result = gandi.paas.list(options) <NEW_LINE> for paas in result: <NEW_LINE> <INDENT> paas_hosts[paas['id']] = [] <NEW_LINE> if vhosts: <NEW_LINE> <INDENT> list_vhost = gandi.vhost.list({'paas_id': paas['id']}) <NEW_LINE> for host in list_vhost: <NEW_LINE> <INDENT> paas_hosts[paas['id']].append(host['name']) <NEW_LINE> <DEDENT> <DEDENT> gandi.separator_line() <NEW_LINE> output_paas(gandi, paas, [], paas_hosts[paas['id']], output_keys) <NEW_LINE> <DEDENT> return result
List PaaS instances.
625941b51f5feb6acb0c4956
def run_game(self): <NEW_LINE> <INDENT> error = '' <NEW_LINE> try: <NEW_LINE> <INDENT> self.handle_game_logic() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> error = traceback.format_exc() <NEW_LINE> sys.stderr.write('Error Occurred\n') <NEW_LINE> if self.show_traceback: <NEW_LINE> <INDENT> error_desc = str(error) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error_desc = type(e).__name__ + ': ' + str(e) <NEW_LINE> <DEDENT> sys.stderr.write(error_desc + '\n') <NEW_LINE> if self.verbose_log: <NEW_LINE> <INDENT> self.verbose_log.write(error) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if self.end_wait: <NEW_LINE> <INDENT> for runner in self.runners: <NEW_LINE> <INDENT> runner.resume() <NEW_LINE> <DEDENT> if self.verbose_log and self.end_wait > 1: <NEW_LINE> <INDENT> self.verbose_log.write('waiting {0} seconds for bots to process end turn\n'.format(self.end_wait)) <NEW_LINE> <DEDENT> time.sleep(self.end_wait) <NEW_LINE> <DEDENT> for runner in self.runners: <NEW_LINE> <INDENT> if runner.is_alive: <NEW_LINE> <INDENT> runner.kill() <NEW_LINE> <DEDENT> runner.release() <NEW_LINE> <DEDENT> <DEDENT> game_result = self.get_game_results(error) <NEW_LINE> if self.replay_log: <NEW_LINE> <INDENT> json.dump(game_result, self.replay_log, sort_keys=True) <NEW_LINE> <DEDENT> return game_result
runs the game :return: the replay data :rtype: dict
625941b5b7558d58953c4d1c
def turn_on(self, **kwargs): <NEW_LINE> <INDENT> self.wink.set_state(True)
Turn the device on.
625941b5a79ad161976cbf46
def Stop(self): <NEW_LINE> <INDENT> pass
Stop.
625941b5f7d966606f6a9e09
def middleNode(self, head): <NEW_LINE> <INDENT> currentNode = head <NEW_LINE> listLength = 0 <NEW_LINE> while currentNode != None: <NEW_LINE> <INDENT> listLength += 1 <NEW_LINE> currentNode = currentNode.next <NEW_LINE> <DEDENT> currentNode = head <NEW_LINE> for i in range(listLength//2): <NEW_LINE> <INDENT> currentNode = currentNode.next <NEW_LINE> <DEDENT> return currentNode
:type head: ListNode :rtype: ListNode
625941b5f548e778e58cd37c
def error(_bot, update, error_): <NEW_LINE> <INDENT> logger.warning('Update "%s" caused error "%s"', update, error_)
Log Errors caused by Updates.
625941b59b70327d1c4e0bd4
def fields(jira, args): <NEW_LINE> <INDENT> print("Available JIRA fields (name, id):") <NEW_LINE> pprint.pprint([(field['name'], field['id']) for field in jira.fields()])
List available JIRA field names and IDs
625941b53cc13d1c6d3c7185
def _create_dvr_gateway(self, ex_gw_port, gw_interface_name, snat_ports): <NEW_LINE> <INDENT> snat_ns = self.create_snat_namespace() <NEW_LINE> for port in snat_ports: <NEW_LINE> <INDENT> self._set_subnet_info(port) <NEW_LINE> interface_name = self.get_snat_int_device_name(port['id']) <NEW_LINE> self._internal_network_added(snat_ns.name, port['network_id'], port['id'], port['ip_cidr'], port['mac_address'], interface_name, dvr_snat_ns.SNAT_INT_DEV_PREFIX) <NEW_LINE> <DEDENT> self._external_gateway_added(ex_gw_port, gw_interface_name, snat_ns.name, preserve_ips=[]) <NEW_LINE> self.snat_iptables_manager = iptables_manager.IptablesManager( namespace=snat_ns.name, use_ipv6=self.use_ipv6) <NEW_LINE> self.agent.process_router_add(self)
Create SNAT namespace.
625941b591af0d3eaac9b814
def __init__(self, hass, name, host, port, tcp_port, encryption=False, username=None, password=None, turn_on_action=None, turn_off_action=None, timeout=DEFAULT_TIMEOUT, websocket=True): <NEW_LINE> <INDENT> import jsonrpc_async <NEW_LINE> import jsonrpc_websocket <NEW_LINE> self.hass = hass <NEW_LINE> self._name = name <NEW_LINE> kwargs = { 'timeout': timeout, 'session': async_get_clientsession(hass), } <NEW_LINE> if username is not None: <NEW_LINE> <INDENT> kwargs['auth'] = aiohttp.BasicAuth(username, password) <NEW_LINE> image_auth_string = "{}:{}@".format(username, password) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> image_auth_string = "" <NEW_LINE> <DEDENT> http_protocol = 'https' if encryption else 'http' <NEW_LINE> ws_protocol = 'wss' if encryption else 'ws' <NEW_LINE> self._http_url = '{}://{}:{}/jsonrpc'.format(http_protocol, host, port) <NEW_LINE> self._image_url = '{}://{}{}:{}/image'.format( http_protocol, image_auth_string, host, port) <NEW_LINE> self._ws_url = '{}://{}:{}/jsonrpc'.format(ws_protocol, host, tcp_port) <NEW_LINE> self._http_server = jsonrpc_async.Server(self._http_url, **kwargs) <NEW_LINE> if websocket: <NEW_LINE> <INDENT> self._ws_server = jsonrpc_websocket.Server(self._ws_url, **kwargs) <NEW_LINE> self._ws_server.Player.OnPause = self.async_on_speed_event <NEW_LINE> self._ws_server.Player.OnPlay = self.async_on_speed_event <NEW_LINE> self._ws_server.Player.OnResume = self.async_on_speed_event <NEW_LINE> self._ws_server.Player.OnSpeedChanged = self.async_on_speed_event <NEW_LINE> self._ws_server.Player.OnStop = self.async_on_stop <NEW_LINE> self._ws_server.Application.OnVolumeChanged = self.async_on_volume_changed <NEW_LINE> self._ws_server.System.OnQuit = self.async_on_quit <NEW_LINE> self._ws_server.System.OnRestart = self.async_on_quit <NEW_LINE> self._ws_server.System.OnSleep = self.async_on_quit <NEW_LINE> def on_hass_stop(event): <NEW_LINE> <INDENT> self.hass.async_add_job(self._ws_server.close()) <NEW_LINE> <DEDENT> self.hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STOP, on_hass_stop) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ws_server = None <NEW_LINE> <DEDENT> if turn_on_action is not None: <NEW_LINE> <INDENT> turn_on_action = script.Script( self.hass, turn_on_action, "{} turn ON script".format(self.name), self.async_update_ha_state(True)) <NEW_LINE> <DEDENT> if turn_off_action is not None: <NEW_LINE> <INDENT> turn_off_action = script.Script( self.hass, _check_deprecated_turn_off(hass, turn_off_action), "{} turn OFF script".format(self.name)) <NEW_LINE> <DEDENT> self._turn_on_action = turn_on_action <NEW_LINE> self._turn_off_action = turn_off_action <NEW_LINE> self._enable_websocket = websocket <NEW_LINE> self._players = list() <NEW_LINE> self._properties = {} <NEW_LINE> self._item = {} <NEW_LINE> self._app_properties = {}
Initialize the Kodi device.
625941b556ac1b37e6263fe1
def findSuid(self): <NEW_LINE> <INDENT> print('[*] Scanning file system for suid binaries') <NEW_LINE> paths = [] <NEW_LINE> bins = [] <NEW_LINE> temp_file = subprocess.check_output("mktemp",shell=True).decode('utf-8')[:-1] <NEW_LINE> cmd1 = f"find / -type f -perm -u=s 2>/dev/null | tee {temp_file}" <NEW_LINE> cmd2 = f"cat {temp_file} | rev | cut -f 1 -d \"/\" | rev" <NEW_LINE> paths = subprocess.getoutput(cmd1).split('\n')[:-1] <NEW_LINE> bins = subprocess.check_output(cmd2,shell=True).decode('utf-8').split('\n')[:-1] <NEW_LINE> return bins, dict(zip(bins,paths))
This method searches for suid binaries on the current file system. It currently uses find and stores the result in a temp file, this need to be fixed
625941b5adb09d7d5db6c594
def get_news(url, n_pages=1): <NEW_LINE> <INDENT> news = [] <NEW_LINE> while n_pages: <NEW_LINE> <INDENT> print("Collecting data from page: {}".format(url)) <NEW_LINE> response = requests.get(url) <NEW_LINE> soup = BeautifulSoup(response.text, "html.parser") <NEW_LINE> news_list = extract_news(soup) <NEW_LINE> next_page = extract_next_page(soup) <NEW_LINE> url = "https://news.ycombinator.com/" + next_page <NEW_LINE> news.extend(news_list) <NEW_LINE> n_pages -= 1 <NEW_LINE> if n_pages > 0: <NEW_LINE> <INDENT> sleep(30) <NEW_LINE> <DEDENT> <DEDENT> return news
Collect news from a given web page
625941b567a9b606de4a7cbe
def test_gwas_start_longer_remark(self): <NEW_LINE> <INDENT> Log.debug('test_gwas_start_longer_remark start') <NEW_LINE> para_id = 15 <NEW_LINE> data_id = 15007 <NEW_LINE> res = self.cal_gwas.base_gwas_start(para_id=para_id, data_id=data_id, cookies=self.admin_cookies, isChange=1, maxFile=0) <NEW_LINE> self.assertTrue(res, msg='result check fail') <NEW_LINE> Log.debug('test_gwas_start_longer_remark end') <NEW_LINE> pass
HK remark超长,gwas发起失败
625941b5d164cc6175782b4e
def test_get_colourmap(self): <NEW_LINE> <INDENT> self.failUnlessRaises(RuntimeError, util.get_colourmap, 'xyzzy') <NEW_LINE> expected = 'cool' <NEW_LINE> got = util.get_colourmap('cool') <NEW_LINE> self.failUnlessEqual(got, expected) <NEW_LINE> expected = 'cool' <NEW_LINE> got = util.get_colourmap('COOL') <NEW_LINE> self.failUnlessEqual(got, expected) <NEW_LINE> expected = 'hazmap.cpt' <NEW_LINE> got = util.get_colourmap('hazmap') <NEW_LINE> self.failUnlessEqual(os.path.basename(got), expected) <NEW_LINE> expected = 'hazmap.cpt' <NEW_LINE> got = util.get_colourmap('HazMap') <NEW_LINE> self.failUnlessEqual(os.path.basename(got), expected)
Test the get_colourmap() function.
625941b5925a0f43d2549c74
def filter_info(self): <NEW_LINE> <INDENT> tbl = astropy.table.Table.read(self.params["filename"],format="ascii",header_start=-1) <NEW_LINE> meta = tbl.meta <NEW_LINE> if not meta: <NEW_LINE> <INDENT> cmts_dict = {"comments": ""} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmts_list = meta["comments"] <NEW_LINE> cmts_str = "\n".join(cmts_list) <NEW_LINE> cmts_dict = yaml.full_load(cmts_str) <NEW_LINE> if type(cmts_dict) is str: <NEW_LINE> <INDENT> cmts_dict={"comments":cmts_dict} <NEW_LINE> <DEDENT> <DEDENT> cmts_dict["filename"] = self.params["filename"] <NEW_LINE> return cmts_dict
Returns the filter properties as a dictionary Examples: --------- Creating a Table with fake values:: >>> meta_dict = {"comments": {"author : me" , "source : me", "date : today", "status : ready","center : 0.0", "width : 1.1"}} >>> T = astropy.table.Table(data=[ [1.1,1.2,1.3], [0.1,0.2,0.3] ], names=("wavelength","transmission"), meta=meta_dict,copy=True) >>> T.write("tmp_table.dat",format="ascii",fast_writer=False) Reading the transmission curve:: >>> Tc = TransmissionCurve("tmp_table.dat") >>> Tc.filter_info() {'author': 'me', 'width': 1.1, 'status': 'ready', 'date': 'today', 'source': 'me', 'center': 0.0, 'filename': 'tmp_table.dat'} Deleting the table:: >>> os.remove('tmp_table.dat')
625941b573bcbd0ca4b2be7e
def ownerDocument(self): <NEW_LINE> <INDENT> return QDomDocument
QDomNode.ownerDocument() -> QDomDocument
625941b544b2445a33931ea1
def AddSkimHeader(fp): <NEW_LINE> <INDENT> skim = pd.DataFrame.from_csv(fp, header = None) <NEW_LINE> skim.columns = skim.index <NEW_LINE> skim.to_csv(fp)
Adds skim headers!
625941b531939e2706e4cc72
def consider_duration(segments_, segment_gap_ratio, absolute_max_dist=None, mode='mean'): <NEW_LINE> <INDENT> assert segment_gap_ratio > 0 <NEW_LINE> if hasattr(segment_gap_ratio, 'dimensionality'): <NEW_LINE> <INDENT> assert segment_gap_ratio.dimensionality == U_.dimensionless.dimensionality <NEW_LINE> <DEDENT> durations = segments_.durations <NEW_LINE> if mode == 'mean': <NEW_LINE> <INDENT> reference_duration_for_each_gap = 0.5 * (durations[:-1] + durations[1:]) <NEW_LINE> <DEDENT> elif mode == 'min': <NEW_LINE> <INDENT> reference_duration_for_each_gap = pint_extension.minimum(durations[:-1], durations[1:]) <NEW_LINE> <DEDENT> elif mode == 'max': <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> reference_duration_for_each_gap = pint_extension.maximum(durations[:-1], durations[1:]) <NEW_LINE> <DEDENT> max_distance_due_to_duration = reference_duration_for_each_gap * segment_gap_ratio <NEW_LINE> if absolute_max_dist != None: <NEW_LINE> <INDENT> assert absolute_max_dist.dimensionality == segments_.starts.dimensionality <NEW_LINE> max_distance = pint_extension.minimum(max_distance_due_to_duration, absolute_max_dist) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max_distance = max_distance_due_to_duration <NEW_LINE> <DEDENT> adjoined_segments = max_dist(segments_, max_distance) <NEW_LINE> return adjoined_segments
to determine whether to adjoin two nearby segments, we consider their durations and the gap duration. we calculate a reference_duration for each gap, for comparison parameters: ----------- segments : Segments segment_gap_ratio : float positive the ratio between the segment duration and max gap absolute_max_dist : float with units like segments' domain. when the segments are very small, we want a big "reach", so the segments stick together. when they are big, we want to prevent them from sticking all together. mode : str the reference_duration is determined by the durations before and after the gap. 'mean' 'min' 'max' returns: --------- adjoined_segments : Segments TODO: ---------- an obvious problem with 'min' mode - when there are two big pulses close, and another small one in the middle.
625941b54a966d76dd550e0c
def test_testargs_class_1(): <NEW_LINE> <INDENT> testArgs = TestArgs() <NEW_LINE> testArgs.set_base_arg(('base1', True)) <NEW_LINE> testArgs.set_base_arg(('base2', False)) <NEW_LINE> testArgs.add_test(('test_1',('test_arg1', True))) <NEW_LINE> testArgs.add_test(('test_2',('base1', False))) <NEW_LINE> data = testArgs.get_test_data() <NEW_LINE> test_1 = data.get('test_1', None) <NEW_LINE> assert test_1 is not None <NEW_LINE> test_2 = data.get('test_2', None) <NEW_LINE> assert test_2 is not None <NEW_LINE> assert test_2.get('base1', None) is not None <NEW_LINE> assert test_2.get('base2', None) is not None <NEW_LINE> assert test_2.get('test_arg1', None) is None <NEW_LINE> assert test_1.get('test_arg1', None) is not None <NEW_LINE> assert test_1.get('base1', None) == True <NEW_LINE> assert test_2.get('base1', None) == False <NEW_LINE> test_1['base2'] = 99 <NEW_LINE> assert test_2['base2'] == False <NEW_LINE> data2 = testArgs.get_test_data() <NEW_LINE> test_3 = data2.get('test_2', None) <NEW_LINE> assert test_3.get('base2', None) is not None <NEW_LINE> assert test_3.get('base2', None) == False
Testing methods of TestArgs class
625941b52ae34c7f2600cf33
def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(DeviceClusterAlertConfigPaginationResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result
Returns the model properties as a dict
625941b5e1aae11d1e749ab4
def one_point_crossover(child1, child2): <NEW_LINE> <INDENT> size = min(len(child1), len(child2)) <NEW_LINE> cxpoint = random.randint(1, size - 1) <NEW_LINE> of1 = child1[:cxpoint] <NEW_LINE> of2 = child2[:cxpoint] <NEW_LINE> temp1 = array.array('B', [element for element in child2 if element not in of1]) <NEW_LINE> temp2 = array.array('B', [element for element in child1 if element not in of2]) <NEW_LINE> of1 = of1 + temp1 <NEW_LINE> of2 = of2 + temp2 <NEW_LINE> child1[:], child2[:] = of1[:], of2[:] <NEW_LINE> return child1, child2
Executes a one point crossover on the input :term:`sequence` individuals. The two individuals are modified in place. The resulting individuals will respectively have the length of the other. :param child1: The first individual participating in the crossover. :param child2: The second individual participating in the crossover. :returns: A tuple of two individuals. This function uses the :func:`~random.randint` function from the python base :mod:`random` module.
625941b55510c4643540f1fa
def minimize(self): <NEW_LINE> <INDENT> self.determinize() <NEW_LINE> self.remove_unreachable_states() <NEW_LINE> self.remove_dead_states() <NEW_LINE> self.remove_equivalent_states()
Minimiza o autômato atual
625941b566656f66f7cbbfab
def test_format_without_config_path(parser, commandline, json_config, tmpdir, json_converter): <NEW_LINE> <INDENT> cmd_args = json_config <NEW_LINE> cmd_args.extend(commandline) <NEW_LINE> parser.parse(cmd_args) <NEW_LINE> trial = Trial(params=[ {'name': '/lr', 'type': 'real', 'value': -2.4}, {'name': '/prior', 'type': 'categorical', 'value': 'sgd'}, {'name': '/layers/1/width', 'type': 'integer', 'value': 100}, {'name': '/layers/1/type', 'type': 'categorical', 'value': 'relu'}, {'name': '/layers/2/type', 'type': 'categorical', 'value': 'sigmoid'}, {'name': '/training/lr0', 'type': 'real', 'value': 0.032}, {'name': '/training/mbs', 'type': 'integer', 'value': 64}, {'name': '/something-same', 'type': 'categorical', 'value': '3'}]) <NEW_LINE> with pytest.raises(ValueError) as exc_info: <NEW_LINE> <INDENT> parser.format(trial=trial) <NEW_LINE> <DEDENT> assert "Cannot format without a `config_path` argument." in str(exc_info.value)
Verify that parser.format() raises ValueError when config path not passed.
625941b59c8ee82313fbb576
def test_children_attribute_accepts_correct_class_instances_only(self): <NEW_LINE> <INDENT> kwargs = copy.copy(self.kwargs) <NEW_LINE> d = DAGMixinFooMixedInClass(**kwargs) <NEW_LINE> with pytest.raises(TypeError) as cm: <NEW_LINE> <INDENT> d.children = ['not', 1, '', 'of', 'correct', 'instances'] <NEW_LINE> <DEDENT> assert str(cm.value) == 'DAGMixinFooMixedInClass.children should be a list of ' 'DAGMixinFooMixedInClass (or derivative) instances, not str'
testing if the children attribute accepts only correct class instances
625941b5b830903b967e9719
@login_required <NEW_LINE> def excluir_itemhorario(request, id_itemhorario, id_horario): <NEW_LINE> <INDENT> itemhorario = ItemHorario.objects.get(id=id_itemhorario) <NEW_LINE> try: <NEW_LINE> <INDENT> itemhorario.delete() <NEW_LINE> messages.success(request, 'A exclusão foi realizada!') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> messages.error(request, 'Houve algum problema técnico e a exclusão ' + 'não foi realizada!') <NEW_LINE> <DEDENT> return redirect('cadd:lista_itenshorario', id_horario)
Função para a exclusão de um item de uma previsão de horário
625941b515fb5d323cde090a
def write_segmented_images(imbw, imc, settings, timestamp): <NEW_LINE> <INDENT> if (settings.General.loglevel == 'DEBUG') and settings.ExportParticles.export_images: <NEW_LINE> <INDENT> fname = os.path.join(settings.ExportParticles.outputpath, timestamp.strftime('D%Y%m%dT%H%M%S.%f-SEG.bmp')) <NEW_LINE> imbw_ = np.uint8(255 * imbw) <NEW_LINE> imsave(fname, imbw_) <NEW_LINE> fname = os.path.join(settings.ExportParticles.outputpath, timestamp.strftime('D%Y%m%dT%H%M%S.%f-IMC.bmp')) <NEW_LINE> imsave(fname, imc)
writes binary images as bmp files to the same place as hdf5 files if loglevel is in DEBUG mode Useful for checking threshold and segmentation Args: imbw : segmented image settings : PySilCam settings timestamp : timestamp of image collection
625941b5796e427e537b03c3
def cmd_dettach(self, cmd, args): <NEW_LINE> <INDENT> unused = cmd <NEW_LINE> if self.inferior is None: <NEW_LINE> <INDENT> self.console_print('The inferior progam was not attached.\n') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.inferior.close() <NEW_LINE> <DEDENT> self.print_prompt()
Dettach from Node.js debugger.
625941b5de87d2750b85fb8f
def __init__(self, iterable=None): <NEW_LINE> <INDENT> self._linkedlist = LinkedList() <NEW_LINE> if iterable and hasattr(iterable, "__iter__"): <NEW_LINE> <INDENT> for item in iterable: <NEW_LINE> <INDENT> self.push(item) <NEW_LINE> <DEDENT> <DEDENT> elif iterable: <NEW_LINE> <INDENT> raise TypeError
Create a new stack, from LinkedList using composition.
625941b530dc7b766590176c
def _extend_types(self, data): <NEW_LINE> <INDENT> pass
Extend the type-converter
625941b50fa83653e4656dbf
def _update_job(self, c, id_, crabid=None, command=None, time=None, timezone=None): <NEW_LINE> <INDENT> fields = ['installed=CURRENT_TIMESTAMP', 'deleted=NULL'] <NEW_LINE> params = [] <NEW_LINE> if crabid is not None: <NEW_LINE> <INDENT> fields.append('crabid=?') <NEW_LINE> params.append(crabid) <NEW_LINE> <DEDENT> if command is not None: <NEW_LINE> <INDENT> fields.append('command=?') <NEW_LINE> params.append(command) <NEW_LINE> <DEDENT> if time is not None: <NEW_LINE> <INDENT> fields.append('time=?') <NEW_LINE> params.append(time) <NEW_LINE> <DEDENT> if timezone is not None: <NEW_LINE> <INDENT> fields.append('timezone=?') <NEW_LINE> params.append(timezone) <NEW_LINE> <DEDENT> params.append(id_) <NEW_LINE> c.execute('UPDATE job SET ' + ', '.join(fields) + ' ' 'WHERE id=?', params)
Marks a job as not deleted, and updates its information. Only fields not given as None are updated.
625941b5379a373c97cfa94c
def prepare_row(self): <NEW_LINE> <INDENT> pass
Prepare row to be parsed
625941b591af0d3eaac9b815
def glInitDrawBuffersATI(): <NEW_LINE> <INDENT> from OpenGL import extensions <NEW_LINE> return extensions.hasGLExtension( EXTENSION_NAME )
Return boolean indicating whether this extension is available
625941b5004d5f362079a139
def del_folder(folder_name, worked_dir): <NEW_LINE> <INDENT> folder_path = os.path.join(worked_dir, folder_name) <NEW_LINE> if os.path.isfile(folder_path): <NEW_LINE> <INDENT> os.remove(folder_path) <NEW_LINE> print('Файл успешно удален.') <NEW_LINE> <DEDENT> elif os.path.isdir(folder_path): <NEW_LINE> <INDENT> os.rmdir(folder_path) <NEW_LINE> print('Папка успешно удалена.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Нет такой папки\файла.')
Удаляет в рабочей дирректории папку или файл :param folder_name: имя папки\файла str :param worked_dir: полный путь к рабочей дирректории str
625941b58e71fb1e9831d5af
def __init__(self, model): <NEW_LINE> <INDENT> self._model = model <NEW_LINE> if self.ALLOWED_SETTINGS: <NEW_LINE> <INDENT> self.update_settings({setting: self.ALLOWED_SETTINGS[setting][0] for setting in self.ALLOWED_SETTINGS})
Create a new instance of this visualization. `BaseVisualization` is an interface and should only be instantiated via a subclass. Args: model (:obj:`.models.model.BaseModel`): NN model to be visualized.
625941b5d268445f265b4c76
def get_pos(self, param_list=None): <NEW_LINE> <INDENT> pos_dict = self._magnet_device.get_pos(param_list) <NEW_LINE> return pos_dict
Gets current position of the stage. @param list param_list: optional, if a specific position of an axis is desired, then the labels of the needed axis should be passed as the param_list. If nothing is passed, then from each axis the position is asked. @return dict: with keys being the axis labels and item the current position.
625941b5cdde0d52a9e52e30
def binary_search_iterative(an_array, key): <NEW_LINE> <INDENT> left = 0 <NEW_LINE> right = len(an_array) - 1 <NEW_LINE> while left <= right: <NEW_LINE> <INDENT> mid = (left + right) // 2 <NEW_LINE> if an_array[mid] == key: <NEW_LINE> <INDENT> return mid <NEW_LINE> <DEDENT> elif key > an_array[mid]: <NEW_LINE> <INDENT> left = mid + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> right = mid - 1 <NEW_LINE> <DEDENT> <DEDENT> return -1
>>> binary_search_iterative([2, 4, 6, 8, 10], 10) 4 >>> binary_search_iterative([2, 4, 6, 8, 10], 9) -1
625941b58c3a8732951581bf
def collate(data, tokenizer, block_size, device): <NEW_LINE> <INDENT> data = [x for x in data if not len(x[1]) == 0] <NEW_LINE> names = [name for name, _, _ in data] <NEW_LINE> summaries = [" ".join(summary_list) for _, _, summary_list in data] <NEW_LINE> encoded_text = [encode_for_summarization(story, summary, tokenizer) for _, story, summary in data] <NEW_LINE> encoded_stories = torch.tensor( [truncate_or_pad(story, block_size, tokenizer.pad_token_id) for story, _ in encoded_text] ) <NEW_LINE> encoder_token_type_ids = compute_token_type_ids(encoded_stories, tokenizer.cls_token_id) <NEW_LINE> encoder_mask = build_mask(encoded_stories, tokenizer.pad_token_id) <NEW_LINE> batch = Batch( document_names=names, batch_size=len(encoded_stories), src=encoded_stories.to(device), segs=encoder_token_type_ids.to(device), mask_src=encoder_mask.to(device), tgt_str=summaries, ) <NEW_LINE> return batch
Collate formats the data passed to the data loader. In particular we tokenize the data batch after batch to avoid keeping them all in memory. We output the data as a namedtuple to fit the original BertAbs's API.
625941b5d10714528d5ffae1
def mul(x: np.ndarray, y: Union[np.ndarray, float, int]) -> np.ndarray: <NEW_LINE> <INDENT> return calculate(x, y, 'mul')
Apply multiply with a array of PaillierEncryptedNumber One of x and y should be np.ndarray. if both x and y are array, they should have same shape. Args: x, np.ndarray of PaillierEncryptedNumber y, np.ndarray, float, int. !!! Attention y only contains scalar type, PaillierEncryptedNumber not allowed. !!! Returns: x*y, result with the same shape of x. Example: >>> x = np.random.randint(0, 1000, (100,)) >>> y = np.random.randint(0, 10000, (100,)) >>> en_x = pe.encrypt(x) >>> result = pd.decrypt(parallel_ops.mul(en_x, y)) >>> assertAlmostEqual(x*y, result)
625941b5a219f33f34628777
def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user)
Create a new Objects
625941b50a50d4780f666c91
def authenhandler(request): <NEW_LINE> <INDENT> request.user = '' <NEW_LINE> if isValidSession(request): <NEW_LINE> <INDENT> return apache.OK <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> options = request.get_options() <NEW_LINE> login_url = options.get('PLONEPROXY_LOGIN_URL', '/ext/login') <NEW_LINE> util.redirect(request, "%s?next=%s" % (login_url, request.unparsed_uri)) <NEW_LINE> return apache.HTTP_UNAUTHORIZED
a very simple PythonAuthenHandler based on Django's default Session management.
625941b5d6c5a10208143e49
def rotateRight(self, head, k): <NEW_LINE> <INDENT> if not head or not head.next: <NEW_LINE> <INDENT> return head <NEW_LINE> <DEDENT> h, size = head, 1 <NEW_LINE> while h.next: <NEW_LINE> <INDENT> h = h.next <NEW_LINE> size += 1 <NEW_LINE> <DEDENT> h.next = head <NEW_LINE> h = head <NEW_LINE> for _ in range(size-k % size-1): <NEW_LINE> <INDENT> h = h.next <NEW_LINE> <DEDENT> head = h.next <NEW_LINE> h.next = None <NEW_LINE> return head
:type head: ListNode :type k: int :rtype: ListNode
625941b545492302aab5e0c1
@pr.command('get') <NEW_LINE> @click.argument('number', type=click.types.INT, metavar='<number>') <NEW_LINE> @files_opt <NEW_LINE> @commits_opt <NEW_LINE> @open_opt <NEW_LINE> @click.pass_context <NEW_LINE> def get_pr(ctx, number, files, commits, open): <NEW_LINE> <INDENT> client = ctx.obj.client <NEW_LINE> pull = client.get(number) <NEW_LINE> print_pull(pull) <NEW_LINE> if files: <NEW_LINE> <INDENT> click.echo('\n Files:') <NEW_LINE> for f in pull.files: <NEW_LINE> <INDENT> click.echo(colored('{0:4}{1}'.format('', f.name), 'green')) <NEW_LINE> <DEDENT> <DEDENT> if commits: <NEW_LINE> <INDENT> click.echo('\n Commits:') <NEW_LINE> for c in pull.commits: <NEW_LINE> <INDENT> line = colored('{0:4}{1:8}'.format('', c.sha[:7]), 'yellow') <NEW_LINE> line += colored(c.message, 'blue') <NEW_LINE> click.echo(line) <NEW_LINE> <DEDENT> <DEDENT> if open: <NEW_LINE> <INDENT> webbrowser.open(pull.url)
Retrive info about PR number <number>, optionally including the list of files and commits associated with the PR.
625941b5091ae35668666d68
def imagecollection(self, image_collection_id) -> 'ImageCollection': <NEW_LINE> <INDENT> if self._isVersion040(): <NEW_LINE> <INDENT> return self._image_040(image_collection_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from .imagery import RestImagery <NEW_LINE> collection = RestImagery({'collection_id': image_collection_id}, self) <NEW_LINE> self.fetch_metadata(image_collection_id, collection) <NEW_LINE> return collection
Get imagecollection by id. :param image_collection_id: String image collection identifier :return: collection: RestImageCollection the imagecollection with the id
625941b5fff4ab517eb2f23b
def _getallplugins(self): <NEW_LINE> <INDENT> msg = [] <NEW_LINE> plugins = sorted([i['plugin'] for i in self.loadedpluginsd.values()], key=operator.attrgetter('package')) <NEW_LINE> packageheader = [] <NEW_LINE> msg.append("%-10s : %-25s %-10s %-5s %s@w" % ('Short Name', 'Name', 'Author', 'Vers', 'Purpose')) <NEW_LINE> msg.append('-' * 75) <NEW_LINE> for tpl in plugins: <NEW_LINE> <INDENT> if tpl.package not in packageheader: <NEW_LINE> <INDENT> if packageheader: <NEW_LINE> <INDENT> msg.append('') <NEW_LINE> <DEDENT> packageheader.append(tpl.package) <NEW_LINE> limp = 'plugins.%s' % tpl.package <NEW_LINE> mod = __import__(limp) <NEW_LINE> try: <NEW_LINE> <INDENT> desc = getattr(mod, tpl.package).DESCRIPTION <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> desc = '' <NEW_LINE> <DEDENT> msg.append('@GPackage: %s%s@w' % (tpl.package, ' - ' + desc if desc else '')) <NEW_LINE> msg.append('@G' + '-' * 75 + '@w') <NEW_LINE> <DEDENT> msg.append("%-10s : %-25s %-10s %-5s %s@w" % (tpl.sname, tpl.name, tpl.author, tpl.version, tpl.purpose)) <NEW_LINE> <DEDENT> return msg
create a message of all plugins
625941b52c8b7c6e89b355c6
def __str__(self): <NEW_LINE> <INDENT> sp = round(self.staying_power(), 3) <NEW_LINE> cf = round(self.continuous_fire(), 3) <NEW_LINE> pf = round(self.pulse_fire(), 3) <NEW_LINE> sideString = "{:<10s} – SP: {:<6} | CF: {:<6} | PF: {:<6}".format(self.name, sp, cf, pf) <NEW_LINE> return sideString
String override.
625941b5a79ad161976cbf47
def tenants_delete_with_http_info(self, id, **kwargs): <NEW_LINE> <INDENT> all_params = ['id'] <NEW_LINE> all_params.append('callback') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method tenants_delete" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('id' not in params) or (params['id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `id` when calling `tenants_delete`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> if 'id' in params: <NEW_LINE> <INDENT> path_params['id'] = params['id'] <NEW_LINE> <DEDENT> query_params = [] <NEW_LINE> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> auth_settings = [] <NEW_LINE> return self.api_client.call_api('/api/tenancy/tenants/{id}/', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.tenants_delete_with_http_info(id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int id: A unique integer value identifying this tenant. (required) :return: None If the method is called asynchronously, returns the request thread.
625941b582261d6c526ab2a5
def test_no_update_in_minimum(self): <NEW_LINE> <INDENT> def myloss(circuit_output, X): <NEW_LINE> <INDENT> return (circuit_output - 1.) ** 2 <NEW_LINE> <DEDENT> def myregularizer(regularized_params): <NEW_LINE> <INDENT> return 0. <NEW_LINE> <DEDENT> self.hyperp['loss'] = myloss <NEW_LINE> self.hyperp['regularizer'] = myregularizer <NEW_LINE> for opt in ALLOWED_OPTIMIZERS_TF: <NEW_LINE> <INDENT> with self.subTest(i=opt): <NEW_LINE> <INDENT> self.hyperp['optimizer'] = opt <NEW_LINE> after = self.get_circuit_params(steps=3) <NEW_LINE> self.assertAlmostEqual(1., after)
Test if the parameter does not change if we start in the minimum of a quadratic loss centered at 1.
625941b523849d37ff7b2e94
def _check_sparse_coo_members_individually( check_tensors: Callable[..., Optional[_TestingErrorMeta]] ) -> Callable[..., Optional[_TestingErrorMeta]]: <NEW_LINE> <INDENT> @functools.wraps(check_tensors) <NEW_LINE> def wrapper(actual: Tensor, expected: Tensor, **kwargs: Any) -> Optional[_TestingErrorMeta]: <NEW_LINE> <INDENT> if not actual.is_sparse: <NEW_LINE> <INDENT> return check_tensors(actual, expected, **kwargs) <NEW_LINE> <DEDENT> if actual._nnz() != expected._nnz(): <NEW_LINE> <INDENT> return _TestingErrorMeta( AssertionError, f"The number of specified values does not match: {actual._nnz()} != {expected._nnz()}" ) <NEW_LINE> <DEDENT> kwargs_equal = dict(kwargs, rtol=0, atol=0) <NEW_LINE> error_meta = check_tensors(actual._indices(), expected._indices(), **kwargs_equal) <NEW_LINE> if error_meta: <NEW_LINE> <INDENT> return error_meta.amend_msg(postfix="\n\nThe failure occurred for the indices.") <NEW_LINE> <DEDENT> error_meta = check_tensors(actual._values(), expected._values(), **kwargs) <NEW_LINE> if error_meta: <NEW_LINE> <INDENT> return error_meta.amend_msg(postfix="\n\nThe failure occurred for the values.") <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> return wrapper
Decorates strided tensor check functions to individually handle sparse COO members. If the inputs are not sparse COO, this decorator is a no-op. Args: check_tensors (Callable[[Tensor, Tensor], Optional[Exception]]): Tensor check function for strided tensors.
625941b58c0ade5d55d3e7c1
def test_peek(): <NEW_LINE> <INDENT> q = Queue() <NEW_LINE> q.peek(1) <NEW_LINE> q.peek(2) <NEW_LINE> actual = q.peek() <NEW_LINE> expected = 2 <NEW_LINE> assert actual == expected
Can successfully peek into a queue, seeing the expected value
625941b526238365f5f0ec6b