code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def test_Lobby(self): <NEW_LINE> <INDENT> driver = self.driver <NEW_LINE> self.test_LoginSuccess() <NEW_LINE> driver.find_element_by_link_text("Lobby").click() <NEW_LINE> widget = driver.find_element_by_xpath(".//*[@id='content']/div[1]/h3") <NEW_LINE> mapsettings = widget.text <NEW_LINE> assert 'Map Settings' in mapsettings <NEW_LINE> popup = driver.find_element_by_xpath(".//*[@id='alert-div']/div") <NEW_LINE> xmpperr = popup.text <NEW_LINE> assert 'Xmpp is not up or xmlrpc module is not loaded' not in xmpperr
Author: Dusty Stokes ([email protected])
625941b6e1aae11d1e749ac4
def liberate(target): <NEW_LINE> <INDENT> UpdateState() <NEW_LINE> importlib.reload(State) <NEW_LINE> if (target.isPerson): <NEW_LINE> <INDENT> if(State.CanLiberate()): <NEW_LINE> <INDENT> for line in target.lastWords.split('\n'): <NEW_LINE> <INDENT> finalWords = "" <NEW_LINE> for c in line: <NEW_LINE> <INDENT> finalWords+=chr((ord(c)-42)%95+32) <NEW_LINE> <DEDENT> Say(target.__name__,finalWords) <NEW_LINE> <DEDENT> Update(State.__file__, target.__name__ + "_liberated","True") <NEW_LINE> os.remove(target.__file__) <NEW_LINE> importlib.reload(State) <NEW_LINE> if(State.allLiberated()): <NEW_LINE> <INDENT> Wait(3) <NEW_LINE> Pause() <NEW_LINE> print("!!!!!!GAME OVER!!!!!!!\n") <NEW_LINE> for line in GameOverMsg.split('\n'): <NEW_LINE> <INDENT> finalWords = "" <NEW_LINE> for c in line: <NEW_LINE> <INDENT> finalWords+=chr((ord(c)-42)%95+32) <NEW_LINE> <DEDENT> Say("Developers",finalWords) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("\nIt seems there is still unease in the world, and you are unable to liberate the "+ target.__name__+".\n") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("\n"+target.__name__+" is not a person and cannot be liberated.\n")
COMMAND: LIBERATE For liberating persons that no longer have woes in the world. Enter "liberate(<name>)" to liberate a person.
625941b6507cdc57c6306ae3
def gather_binaries(): <NEW_LINE> <INDENT> tags = [tag.decode() for tag in subprocess.check_output(["git", "tag"]).split()] <NEW_LINE> tags.remove("PRERELEASE") <NEW_LINE> rust_tags = list(filter(lambda tag: not tag.startswith("0"), tags)) <NEW_LINE> rust_tags = list(filter(lambda tag: not tag.startswith("1"), rust_tags)) <NEW_LINE> rust_tags = list(sorted(rust_tags, key=natural_keys))[-3:] <NEW_LINE> with tempfile.TemporaryDirectory(prefix="riff-benchmark") as clonedir: <NEW_LINE> <INDENT> subprocess.run(["git", "clone", "-b", "master", ".", clonedir], check=True) <NEW_LINE> build_latest_commit(clonedir) <NEW_LINE> for tag in rust_tags: <NEW_LINE> <INDENT> binary_name = os.path.join(BINDIR, f"riff-{tag}") <NEW_LINE> if os.path.isfile(binary_name): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> print() <NEW_LINE> print(f"Building missing binary: {binary_name}") <NEW_LINE> build_binary(clonedir, tag, binary_name) <NEW_LINE> <DEDENT> <DEDENT> print() <NEW_LINE> print("Building current sources...") <NEW_LINE> cargo_build() <NEW_LINE> shutil.copy("target/release/riff", os.path.join(BINDIR, "riff-current"))
Gather binaries to benchmark in BINDIR
625941b6046cf37aa974cb5c
def __extract_part_price(self, part): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> distributor = part["distributors"] <NEW_LINE> if len(distributor) == 1: <NEW_LINE> <INDENT> distributor = distributor[0] <NEW_LINE> return distributor['price'] <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return 0
:param part: PartKeepr API response, content of ["hydra:member"] for particular part :return: int price if is available in other case 0
625941b63346ee7daa2b2b7a
def _init_subjects(self): <NEW_LINE> <INDENT> self.subject_data = {} <NEW_LINE> for s in self.project.subjects: <NEW_LINE> <INDENT> if s.has_file(self.niftii) and s.has_file(self.behavioral): <NEW_LINE> <INDENT> nii_path = s.file_path(self.niftii) <NEW_LINE> beh_path = s.file_path(self.behavioral) <NEW_LINE> subdata = SubjectData(nii_path, beh_path, self.trs, self.lag) <NEW_LINE> self.subject_data[s.name] = subdata <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(s.name, "is missing data at", s.file_path(self.niftii), ", excluding...") <NEW_LINE> if s.name == self.test_subj: <NEW_LINE> <INDENT> raise(ValueError, "test_subj data not found, exiting...") <NEW_LINE> <DEDENT> self.training_subjects.remove(s.name) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.nii_shape = subdata.nii_shape <NEW_LINE> <DEDENT> except UnboundLocalError: <NEW_LINE> <INDENT> print("No subjects found!")
Create a dictionary of SubjectData objects for every subject
625941b67b180e01f3dc4618
def validate(number): <NEW_LINE> <INDENT> number = compact(number) <NEW_LINE> if len(number) not in (10, 13): <NEW_LINE> <INDENT> raise InvalidLength() <NEW_LINE> <DEDENT> if not isdigits(number): <NEW_LINE> <INDENT> raise InvalidFormat() <NEW_LINE> <DEDENT> if number[2:9] == '0000000': <NEW_LINE> <INDENT> raise InvalidComponent() <NEW_LINE> <DEDENT> if len(number) == 13 and number[-3:] == '000': <NEW_LINE> <INDENT> raise InvalidComponent() <NEW_LINE> <DEDENT> if number[9] != calc_check_digit(number): <NEW_LINE> <INDENT> raise InvalidChecksum() <NEW_LINE> <DEDENT> return number
Check if the number is a valid Vietnam MST number. This checks the length, formatting and check digit.
625941b663d6d428bbe44300
def threeSumClosest(self, nums, target): <NEW_LINE> <INDENT> nums = sorted(nums) <NEW_LINE> dist, closest = float('inf'), 0 <NEW_LINE> for i in range(len(nums) - 2): <NEW_LINE> <INDENT> j = i + 1 <NEW_LINE> k = len(nums) - 1 <NEW_LINE> while (j < k): <NEW_LINE> <INDENT> if nums[j] + nums[k] == target - nums[i]: <NEW_LINE> <INDENT> return target <NEW_LINE> <DEDENT> curr_dist = abs(nums[i] + nums[j] + nums[k] - target) <NEW_LINE> if curr_dist < dist: <NEW_LINE> <INDENT> dist = curr_dist <NEW_LINE> closest = nums[i] + nums[j] + nums[k] <NEW_LINE> <DEDENT> if nums[j] + nums[k] < target - nums[i]: <NEW_LINE> <INDENT> j += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> k -=1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return closest
:type nums: List[int] :type target: int :rtype: int
625941b6b7558d58953c4d2d
def compress_with_gs(input_file_path, output_file_path, power=3): <NEW_LINE> <INDENT> quality = { 0: '/default', 1: '/prepress', 2: '/printer', 3: '/ebook', 4: '/screen' } <NEW_LINE> if not os.path.isfile(input_file_path): <NEW_LINE> <INDENT> logging.fatal("Error: invalid path for input PDF file") <NEW_LINE> return <NEW_LINE> <DEDENT> if input_file_path.split('.')[-1].lower() != 'pdf': <NEW_LINE> <INDENT> logging.fatal("Error: input file is not a PDF") <NEW_LINE> return <NEW_LINE> <DEDENT> logging.info("Compress PDF...") <NEW_LINE> initial_size = os.path.getsize(input_file_path) <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.call(['gs', '-sDEVICE=pdfwrite', '-dCompatibilityLevel=1.4', '-dPDFSETTINGS={}'.format(quality[power]), '-dFILTERTEXT', '-dNOPAUSE', '-dQUIET', '-dBATCH', '-sOutputFile={}'.format(output_file_path), input_file_path] ) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if e.errno == errno.ENOENT: <NEW_LINE> <INDENT> logging.error("ghostscript not found. Proceeding without compression.") <NEW_LINE> shutil.copyfile(input_file_path, output_file_path) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> final_size = os.path.getsize(output_file_path) <NEW_LINE> ratio = 1 - (final_size / initial_size) <NEW_LINE> logging.info("Compression by {0:.0%}.".format(ratio)) <NEW_LINE> logging.info("Final file size is {0:.1f}MB".format(final_size / 1000000)) <NEW_LINE> return ratio
Function to compress PDF and remove text via Ghostscript command line interface :param power: 0,1,2,3,4
625941b6ac7a0e7691ed3eeb
def write(self, positions, topology, unitcell_lengths=None, unitcell_angles=None): <NEW_LINE> <INDENT> if not self._mode == 'w': <NEW_LINE> <INDENT> raise ValueError('file not opened for writing') <NEW_LINE> <DEDENT> from openmm.app import PDBxFile <NEW_LINE> from openmm.unit import nanometers <NEW_LINE> if self._next_model == 0: <NEW_LINE> <INDENT> self._openmm_topology = topology.to_openmm() <NEW_LINE> if unitcell_lengths is None: <NEW_LINE> <INDENT> self._openmm_topology.setPeriodicBoxVectors(None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vectors = lengths_and_angles_to_box_vectors(*unitcell_lengths[0], *unitcell_angles[0]) <NEW_LINE> self._openmm_topology.setPeriodicBoxVectors(vectors*nanometers) <NEW_LINE> <DEDENT> PDBxFile.writeHeader(self._openmm_topology, self._file) <NEW_LINE> self._next_model = 1 <NEW_LINE> <DEDENT> if len(positions.shape) == 3: <NEW_LINE> <INDENT> positions = positions[0] <NEW_LINE> <DEDENT> PDBxFile.writeModel(self._openmm_topology, positions*nanometers, self._file, self._next_model) <NEW_LINE> self._next_model += 1
Write one frame of a molecular dynamics trajectory to disk in PDBx/mmCIF format. Parameters ---------- positions : array_like The list of atomic positions to write. topology : mdtraj.Topology The Topology defining the model to write. unitcell_lengths : {tuple, None} Lengths of the three unit cell vectors, or None for a non-periodic system unitcell_angles : {tuple, None} Angles between the three unit cell vectors, or None for a non-periodic system
625941b6fff4ab517eb2f24a
def process(self): <NEW_LINE> <INDENT> data_list = list() <NEW_LINE> self.seqs = list() <NEW_LINE> for file in self.raw_file_names: <NEW_LINE> <INDENT> m = re.search(r'(\d{3}_)?PF\d{5}_\d+', file) <NEW_LINE> if m is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> aln = osp.join(self.alns_dir, m.group(0) + '.fasta') <NEW_LINE> tree = Phylo.read(file, 'newick') <NEW_LINE> with open(aln) as fin: <NEW_LINE> <INDENT> seqs = SeqIO.parse(fin, 'fasta') <NEW_LINE> seq_dict = {seq.name: seq.seq for seq in seqs} <NEW_LINE> <DEDENT> features, ei, el, y, lud = to_coo(tree, self.target_tree, seq_dict) <NEW_LINE> self.seqs.append((aln, lud)) <NEW_LINE> data_list.append(Data(x=features, edge_index=ei, edge_attr=el, y=y, filename=file)) <NEW_LINE> <DEDENT> if self.pre_filter is not None: <NEW_LINE> <INDENT> data_list = [data for data in data_list if self.pre_filter(data)] <NEW_LINE> <DEDENT> if self.pre_transform is not None: <NEW_LINE> <INDENT> data_list = [self.pre_transform(data) for data in data_list] <NEW_LINE> <DEDENT> self.data, self.slices = self.collate(data_list) <NEW_LINE> torch.save((self.data, self.slices), self.processed_paths[0]) <NEW_LINE> with open(self.processed_paths[1], 'wb') as fout: <NEW_LINE> <INDENT> pickle.dump(self.seqs, fout)
Loads files from the disk and processes them :return: None
625941b685dfad0860c3ac6a
def contact(request): <NEW_LINE> <INDENT> text = Text.objects.get(textblock__shortname='contact_ain7') <NEW_LINE> from django import forms <NEW_LINE> class ContactForm(forms.Form): <NEW_LINE> <INDENT> name = forms.CharField(max_length=200) <NEW_LINE> email = forms.EmailField(max_length=200, required=True) <NEW_LINE> message = forms.CharField(widget=forms.Textarea, required=True) <NEW_LINE> <DEDENT> initials = {} <NEW_LINE> if request.user.is_authenticated(): <NEW_LINE> <INDENT> initials = {'name': request.user.person.complete_name, 'email': request.user.person.mail} <NEW_LINE> <DEDENT> form = ContactForm(request.POST or initials) <NEW_LINE> if request.method == 'POST' and form.is_valid(): <NEW_LINE> <INDENT> msg = form.cleaned_data['message'] <NEW_LINE> msg += "\n\n--\n" <NEW_LINE> msg += u"Message généré par la page de contact AIn7" <NEW_LINE> send_mail( (u"Question de {name} reçue sur le site web").format(name=form.cleaned_data['name']), msg, ("{name} <{email}>").format(email=form.cleaned_data['email'], name=form.cleaned_data['name']), ['[email protected]'], fail_silently=True, ) <NEW_LINE> form = None <NEW_LINE> messages.success(request, _("Your message have successfully been sent to the association")) <NEW_LINE> <DEDENT> return render(request, 'association/contact.html', { 'count_members': count_members(), 'form': form, 'text': text, } )
contact page
625941b6aad79263cf39084c
def encode_token(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> payload = { 'exp': datetime.utcnow() + timedelta(days=14), 'iat': datetime.utcnow(), 'sub': user_id } <NEW_LINE> return jwt.encode(payload, current_app.config.get('SECRET'), algorithm='HS256') <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> return str(error)
Generate user token
625941b660cbc95b062c635a
def registerHandler(self, p_int, QObject): <NEW_LINE> <INDENT> pass
registerHandler(self, int, QObject)
625941b68e05c05ec3eea182
def cube_to_xyz(cube, ztop=-1): <NEW_LINE> <INDENT> assert len(cube.shape)==3, "cube is not 3-D" <NEW_LINE> data = cube[:ztop,:,:].data <NEW_LINE> if np.ma.is_masked(data): <NEW_LINE> <INDENT> data=data.data <NEW_LINE> <DEDENT> xyz = np.moveaxis(np.moveaxis(data,0,2),0,1) <NEW_LINE> return xyz
take iris cube [lev, lat, lon] pull out the data and reshape it to [lon,lat,lev]
625941b6ec188e330fd5a5b8
def prepare(card, browser): <NEW_LINE> <INDENT> if card.did != defaultID: <NEW_LINE> <INDENT> noteToDeck(card.nid, browser, did=card.did)
state that cid's note's deck is card's deck if it is not default
625941b6498bea3a759b98c3
def test_invalids(self): <NEW_LINE> <INDENT> for number in self.invalid_numbers: <NEW_LINE> <INDENT> self.assertRaises(forms.ValidationError, validate_dutchbanknumber, number)
Tests that the validate_dutchbanknumber function throws the right error in invalid input
625941b607d97122c417869c
def maxCoins(self, nums): <NEW_LINE> <INDENT> if len(nums) == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> nums = [1] + nums + [1] <NEW_LINE> n = len(nums) <NEW_LINE> dp = [[0 for _ in range(n)] for _ in range(n)] <NEW_LINE> for l in range(2, n): <NEW_LINE> <INDENT> for i in range(n - l): <NEW_LINE> <INDENT> j = i + l <NEW_LINE> for k in range(i + 1, j): <NEW_LINE> <INDENT> dp[i][j] = max(dp[i][j], nums[i] * nums[k] * nums[j] + dp[i][k] + dp[k][j]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dp[0][-1]
:type nums: List[int] :rtype: int
625941b691af0d3eaac9b825
def fillContent(self, context): <NEW_LINE> <INDENT> entry = context.findEntry() <NEW_LINE> for unassignedSense in [e.getUnassignedSense() for e in context.getPage().entries if e.getWordLanguage() == entry.getWordLanguage()]: <NEW_LINE> <INDENT> for reference in self.references: <NEW_LINE> <INDENT> unassignedSense.addReference(reference) <NEW_LINE> <DEDENT> <DEDENT> return
* Add external links to WordEntry
625941b67d43ff24873a2ab5
def getLayerFromLegendByTableProps( project: QgsProject, table_name: str, geom_col: str = 'geom', sql: str = '' ) -> Union[None, QgsMapLayer]: <NEW_LINE> <INDENT> _ = sql <NEW_LINE> for _, layer in list(project.mapLayers().items()): <NEW_LINE> <INDENT> if not hasattr(layer, 'providerType'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if hasattr(layer, 'type') and layer.type() != QgsMapLayer.VectorLayer: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not layer.providerType() in ('postgres', 'spatialite'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> connection_params = getConnectionParameterFromDbLayer(layer) <NEW_LINE> reg = r'(\.| )?(%s)' % table_name <NEW_LINE> if connection_params and ( connection_params['table'] == table_name or (re.findall(reg, '%s' % connection_params['table']) and re.findall(reg, '%s' % connection_params['table'])[0]) ) and connection_params['geocol'] == geom_col: <NEW_LINE> <INDENT> return layer <NEW_LINE> <DEDENT> <DEDENT> return None
Get the layer from QGIS legend corresponding to a database table name (postgis or sqlite)
625941b691f36d47f21ac307
@pytest.fixture <NEW_LINE> def visited_links_good(): <NEW_LINE> <INDENT> url = "/api/visited_links/" <NEW_LINE> data = { "links": [ "https://ya.ru", "https://ya.ru?q=123", "funbox.ru", "https://stackoverflow.com/questions/11828270/how-to-exit-the-vim-editor", "https://www.youtube.com/" ] } <NEW_LINE> request_method = factory.post(url, json.dumps(data), content_type='application/json') <NEW_LINE> yield post_visited_links(request_method) <NEW_LINE> REDIS_CONNECTION.delete(STREAM)
:return: возвращает ожидаемый объект POST запроса в POST request handler
625941b6507cdc57c6306ae4
def extract(chr_rom, tile): <NEW_LINE> <INDENT> address = tile * 16 <NEW_LINE> plane1 = numpy.unpackbits(chr_rom[address:address + 8]) <NEW_LINE> plane2 = numpy.unpackbits(chr_rom[address + 8:address + 16]) <NEW_LINE> plane2 = numpy.left_shift(plane2, 1) <NEW_LINE> return numpy.bitwise_or(plane1, plane2).reshape((8, 8))
Extracts a single 8x8 image from CHR ROM as a 8x8 numpy array. Args: chr_rom (array): The 4096 numpy array representing CHR ROM tile (int): The tile number to extract Returns: array: The 8x8 image from CHR ROM.
625941b66e29344779a62427
def load_webtool(self, tool_name): <NEW_LINE> <INDENT> webtool = self.webtools.get(tool_name) <NEW_LINE> if webtool: <NEW_LINE> <INDENT> return webtool()
TODO :param tool_name: the tool name
625941b621a7993f00bc7afa
def test_fresh_install_no_cell_mappings(self): <NEW_LINE> <INDENT> result = self.cmd._check_ironic_flavor_migration() <NEW_LINE> self.assertEqual(upgradecheck.Code.WARNING, result.code) <NEW_LINE> self.assertIn('Unable to determine ironic flavor migration without ' 'cell mappings', result.details)
Tests the scenario where we don't have any cell mappings (no cells v2 setup yet) so we don't know what state we're in and we return a warning.
625941b6c432627299f04a56
def peak_begin_dates(start="01/01/1972", end=datetime.now()): <NEW_LINE> <INDENT> rec_dates = DataReader("USRECQ", "fred", start=start) <NEW_LINE> one_vals = np.where(rec_dates == 1)[0] <NEW_LINE> rec_start = [one_vals[0]] <NEW_LINE> for d in one_vals: <NEW_LINE> <INDENT> if d > max(rec_start) + 12: <NEW_LINE> <INDENT> rec_start.append(d) <NEW_LINE> <DEDENT> <DEDENT> rec_startind = rec_dates.index[rec_start] <NEW_LINE> return rec_startind
Use the fred dataset `USRECQ` to determine the beginning of the peaks before all recessions between dates start and end
625941b68e7ae83300e4addd
def load_from_base64(self, b64_msg): <NEW_LINE> <INDENT> return self.load_from_yaml(base64.decodebytes(b64_msg))
Deserializes a base64-encoded YAML message
625941b6e8904600ed9f1d3a
def flatten_flags(self): <NEW_LINE> <INDENT> mro_tree = defaultdict(list) <NEW_LINE> for cls in self._help_classes: <NEW_LINE> <INDENT> clsname = cls.__name__ <NEW_LINE> for parent in cls.mro()[1:-3]: <NEW_LINE> <INDENT> mro_tree[parent.__name__].append(clsname) <NEW_LINE> <DEDENT> <DEDENT> aliases = {} <NEW_LINE> for alias, cls_trait in iteritems(self.aliases): <NEW_LINE> <INDENT> cls,trait = cls_trait.split('.',1) <NEW_LINE> children = mro_tree[cls] <NEW_LINE> if len(children) == 1: <NEW_LINE> <INDENT> cls = children[0] <NEW_LINE> <DEDENT> aliases[alias] = '.'.join([cls,trait]) <NEW_LINE> <DEDENT> flags = {} <NEW_LINE> for key, (flagdict, help) in iteritems(self.flags): <NEW_LINE> <INDENT> newflag = {} <NEW_LINE> for cls, subdict in iteritems(flagdict): <NEW_LINE> <INDENT> children = mro_tree[cls] <NEW_LINE> if len(children) == 1: <NEW_LINE> <INDENT> cls = children[0] <NEW_LINE> <DEDENT> newflag[cls] = subdict <NEW_LINE> <DEDENT> flags[key] = (newflag, help) <NEW_LINE> <DEDENT> return flags, aliases
flatten flags and aliases, so cl-args override as expected. This prevents issues such as an alias pointing to InteractiveShell, but a config file setting the same trait in TerminalInteraciveShell getting inappropriate priority over the command-line arg. Only aliases with exactly one descendent in the class list will be promoted.
625941b67c178a314d6ef269
def predict(self,image): <NEW_LINE> <INDENT> x_rcnn, _ = data.transforms.presets.rcnn.transform_test(mx.nd.array(image)) <NEW_LINE> box_ids, scores, bboxes = self.model(x_rcnn.as_in_context(self.ctx)) <NEW_LINE> scores = scores.asnumpy().squeeze() <NEW_LINE> class_ids = box_ids.asnumpy().squeeze()[np.where(scores>self.threshold)].astype(int) <NEW_LINE> classes = np.array(fasterrcnn_coco_net.classes)[class_ids].tolist() <NEW_LINE> bboxes = bboxes.asnumpy().squeeze()[np.where(scrs>0.5)].astype(int).tolist() <NEW_LINE> return bboxes,classes
function that takes an image runs predictions on the image the returns a list of classes and bounding boxes args: image (numpy.ndarray) : image to run predictions on return: boxes (list) : list of bounding boxes in the form [[bb1_xmin,bb1_ymin,bb1_xmax,bb1_ymax],[bb2_xmin,bb2_ymin,bb2_xmax,bb2_ymax]] predicted from the image classes (list): list of classes predicted from the image
625941b6e64d504609d74652
def compare_md5(self): <NEW_LINE> <INDENT> if self.direction == "put": <NEW_LINE> <INDENT> remote_md5 = self.remote_md5() <NEW_LINE> return self.source_md5 == remote_md5 <NEW_LINE> <DEDENT> elif self.direction == "get": <NEW_LINE> <INDENT> local_md5 = self.file_md5(self.dest_file) <NEW_LINE> return self.source_md5 == local_md5
Compare md5 of file on network device to md5 of local file.
625941b6097d151d1a222c6e
def __init__(self, chat_id=None): <NEW_LINE> <INDENT> super(BotCommandScopeChatAdministrators, self).__init__(type='chat_administrators', chat_id=chat_id)
Represents the scope of bot commands, covering a specific chat. @param chat_id: Unique identifier for the target chat
625941b60383005118ecf3f6
def has_data(self, model, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.instance = model.objects.get(**kwargs) <NEW_LINE> return True <NEW_LINE> <DEDENT> except model.DoesNotExist: <NEW_LINE> <INDENT> return False
:param model: 要查询的对象 :param kwargs: 过滤器字段 :return: True (会修改instance为model对象)/False
625941b6d18da76e235322e3
def RestoreOldDC(dc, oldPen, oldBrush, oldMode): <NEW_LINE> <INDENT> dc.SetPen(oldPen) <NEW_LINE> dc.SetBrush(oldBrush) <NEW_LINE> dc.SetLogicalFunction(oldMode)
Restores the old settings for a :class:`wx.DC`. :param `dc`: an instance of :class:`wx.DC`; :param `oldPen`: an instance of :class:`wx.Pen`; :param `oldBrush`: an instance of :class:`wx.Brush`; :param `oldMode`: the :class:`wx.DC` drawing mode bit.
625941b6d10714528d5ffaf1
def rot_scale(v_co, scale, directions, rot_z): <NEW_LINE> <INDENT> (x, y, z) = directions <NEW_LINE> directions = Vector((-x, -y, z)) <NEW_LINE> q = Vector((0, 0, 1)).rotation_difference(directions) <NEW_LINE> mat_rot = q.to_matrix() <NEW_LINE> mat_rot.resize_4x4() <NEW_LINE> mc = Matrix.Rotation(rot_z, 4, 'Z') <NEW_LINE> v_co = [((v * scale) * mc) * mat_rot for v in v_co] <NEW_LINE> return v_co
Rotates and scales a set of vectors Args: v_co - (list of (float, float, float)) The coordinates of the vectors scale - (foat) The scalar by xhich each vector is multiplied directions - (tuple) A vector that would be collinear whith a former (0,0,1) vector after the rotation rot_z - (float) The rotation of the set of vector around directions Returns: A set of coordinates representing all vectors of v_co after rotation and scaling
625941b630c21e258bdfa2af
def test_10(self): <NEW_LINE> <INDENT> product = self.create_product() <NEW_LINE> self.assertEqual(product.state2, 'draft') <NEW_LINE> self.assertEqual(product.qty_available, 0.0) <NEW_LINE> self.assertEqual(product.purchase_incoming_qty, 0.0) <NEW_LINE> product.state2 = 'obsolete' <NEW_LINE> self.assertEqual(product.state2, 'obsolete') <NEW_LINE> product.update_product_state() <NEW_LINE> self.assertEqual(product.state2, 'obsolete') <NEW_LINE> purchase_order = self.create_po() <NEW_LINE> self.create_pol(purchase_order, product) <NEW_LINE> purchase_order.signal_workflow('purchase_confirm') <NEW_LINE> self.assertEqual(purchase_order.state, 'approved') <NEW_LINE> self.assertEqual(product.qty_available, 0.0) <NEW_LINE> self.assertEqual(product.purchase_incoming_qty, 10.0) <NEW_LINE> product.update_product_state() <NEW_LINE> self.assertEqual(product.state2, 'end') <NEW_LINE> purchase_order.picking_ids.do_transfer() <NEW_LINE> self.assertEqual(product.qty_available, 10.0) <NEW_LINE> self.assertEqual(product.purchase_incoming_qty, 0.0) <NEW_LINE> product.update_product_state() <NEW_LINE> self.assertEqual(product.state2, 'end')
Test method used in ir.cron to update product state. [On hand, incoming} 1 00 = 0 2 01 = 1 3 10 = 1 4 11 = 1
625941b6adb09d7d5db6c5a5
def get_process_template(self, template_id): <NEW_LINE> <INDENT> url = '/applications/templates/{0}/'.format(template_id) <NEW_LINE> return self._make_api_request(url)
Returns a dictionary of details for a specific template matching template_id
625941b6d6c5a10208143e59
def write_pid_file(pid_file, pid): <NEW_LINE> <INDENT> import fcntl <NEW_LINE> import stat <NEW_LINE> try: <NEW_LINE> <INDENT> fd = os.open(pid_file, os.O_RDWR | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> flags = fcntl.fcntl(fd, fcntl.F_GETFD) <NEW_LINE> assert flags != -1 <NEW_LINE> flags |= fcntl.FD_CLOEXEC <NEW_LINE> r = fcntl.fcntl(fd, fcntl.F_SETFD, flags) <NEW_LINE> assert r != -1 <NEW_LINE> try: <NEW_LINE> <INDENT> fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> r = os.read(fd, 32) <NEW_LINE> if r: <NEW_LINE> <INDENT> logging.error('already started at pid %s' % common.to_str(r)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('already started') <NEW_LINE> <DEDENT> os.close(fd) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> os.ftruncate(fd, 0) <NEW_LINE> os.write(fd, common.to_bytes(str(pid))) <NEW_LINE> return 0
Use the pid file to govern that the daemon is only running one instance. Open the pid file and set the close-on-exec flag firstly. Then try to acquire the exclusive lock of the pid file: If success, return 0 to start the daemon process. else, there already is a daemon process running, return -1.
625941b6656771135c3eb684
def probe_local_ports(addr, max_num): <NEW_LINE> <INDENT> suc_list = [] <NEW_LINE> for idx in range(max_num): <NEW_LINE> <INDENT> c_sock = socket.socket() <NEW_LINE> try: <NEW_LINE> <INDENT> c_sock.connect(addr) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> suc_list.append((c_sock, c_sock.getsockname())) <NEW_LINE> <DEDENT> <DEDENT> print('%d connections succeed with %d attempts.\n' % (len(suc_list), max_num))
Probe the maximum port num.
625941b623e79379d52ee37a
def ask(self, message): <NEW_LINE> <INDENT> self.lock() <NEW_LINE> try: <NEW_LINE> <INDENT> return get_raw_input(message) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.unlock()
This wraps the built-in raw_input function to ensure that only 1 thread is asking for input from the user at a give time. Any process that tries to log output to the terminal will block while the user is being prompted.
625941b699fddb7c1c9de1a5
def has_override(self, name): <NEW_LINE> <INDENT> return name in self.get_overrides()
Check to see if a variable has an override.
625941b60a366e3fb873e628
def update_parameters(self) -> None: <NEW_LINE> <INDENT> self.update_pmin()
Updates p_min parameter
625941b6187af65679ca4f2f
def filter_function(): <NEW_LINE> <INDENT> print('#####################################') <NEW_LINE> print('######## filter function ############') <NEW_LINE> print('#####################################') <NEW_LINE> my_list = [1, 2, 3, 4, 5] <NEW_LINE> print(list(filter(check_even, my_list))) <NEW_LINE> for n in filter(check_even, my_list): <NEW_LINE> <INDENT> print(n)
filter function
625941b682261d6c526ab2b5
def nested_cv(X, Y): <NEW_LINE> <INDENT> metric_list_nca = [] <NEW_LINE> metric_list_no_nca = [] <NEW_LINE> model_list = [] <NEW_LINE> parameter_list_nca = [] <NEW_LINE> parameter_list_no_nca = [] <NEW_LINE> random_state = [20, 40, 70, 80, 90] <NEW_LINE> scaling = MinMaxScaler() <NEW_LINE> esterase = ['EH51(22)', 'EH75(16)', 'EH46(23)', 'EH98(11)', 'EH49(23)'] <NEW_LINE> for states in random_state: <NEW_LINE> <INDENT> X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.20, random_state=states, stratify=Y) <NEW_LINE> X_train = X_train.loc[[x for x in X_train.index if x not in esterase]] <NEW_LINE> X_test = X_test.loc[[x for x in X_test.index if x not in esterase]] <NEW_LINE> Y_train = Y_train.loc[[x for x in Y_train.index if x not in esterase]] <NEW_LINE> Y_test = Y_test.loc[[x for x in Y_test.index if x not in esterase]] <NEW_LINE> transformed_x = scaling.fit_transform(X_train) <NEW_LINE> transformed_x = pd.DataFrame(transformed_x) <NEW_LINE> transformed_x.index = X_train.index <NEW_LINE> transformed_x.columns = X_train.columns <NEW_LINE> test_x = scaling.transform(X_test) <NEW_LINE> test_x = pd.DataFrame(test_x) <NEW_LINE> test_x.index = X_test.index <NEW_LINE> test_x.columns = X_test.columns <NEW_LINE> nca_model_list = knn_classification_nca(transformed_x, Y_train, test_x) <NEW_LINE> nca_score = print_score(nca_model_list, Y_train, Y_test) <NEW_LINE> metric_list_nca.append([nca_score.grid_score, nca_score.train_mat, nca_score.grid_matthews]) <NEW_LINE> parameter_list_nca.append( [nca_score.grid_params, nca_score.grid_confusion, nca_score.tr_report, nca_score.te_report, nca_score.grid_train_confusion]) <NEW_LINE> no_nca_model_list = knn_classification(transformed_x, Y_train, test_x) <NEW_LINE> no_nca_score = print_score(no_nca_model_list, Y_train, Y_test) <NEW_LINE> metric_list_no_nca.append([no_nca_score.grid_score, no_nca_score.train_mat, no_nca_score.grid_matthews]) <NEW_LINE> parameter_list_no_nca.append( [no_nca_score.grid_params, no_nca_score.grid_confusion, no_nca_score.tr_report, no_nca_score.te_report, no_nca_score.grid_train_confusion]) <NEW_LINE> model_list.append([nca_model_list.fitted_grid, nca_model_list.y_grid, no_nca_model_list.fitted_grid, no_nca_model_list.y_grid]) <NEW_LINE> <DEDENT> return model_list, metric_list_nca, metric_list_no_nca, parameter_list_nca, parameter_list_no_nca, random_state
Performs something similar to a nested cross-validation
625941b65fdd1c0f98dc0043
def get_multi_list(self): <NEW_LINE> <INDENT> return self.multi_list
批量操作列表 :return:
625941b6a79ad161976cbf57
def go_to_page(self, page_name, css_selector='section.xblock--drag-and-drop'): <NEW_LINE> <INDENT> self.browser.get(self.live_server_url) <NEW_LINE> self.browser.find_element_by_link_text(page_name).click() <NEW_LINE> return self.browser.find_element_by_css_selector(css_selector)
Navigate to the page `page_name`, as listed on the workbench home Returns the DOM element on the visited page located by the `css_selector`
625941b6fb3f5b602dac34a0
def copy_or_paste(self, widget, event): <NEW_LINE> <INDENT> control_key = Gdk.ModifierType.CONTROL_MASK <NEW_LINE> shift_key = Gdk.ModifierType.SHIFT_MASK <NEW_LINE> if event.type == Gdk.EventMask.KEY_PRESS_MASK: <NEW_LINE> <INDENT> if event.state & (shift_key | control_key) == shift_key | control_key: <NEW_LINE> <INDENT> if event.keyval == 67: <NEW_LINE> <INDENT> widget.copy_clipboard() <NEW_LINE> <DEDENT> elif event.keyval == 86: <NEW_LINE> <INDENT> widget.paste_clipboard() <NEW_LINE> <DEDENT> return True
Decides if the Ctrl+Shift is pressed, in which case returns True. If Ctrl+Shift+C or Ctrl+Shift+V are pressed, copies or pastes, acordingly. Return necessary so it doesn't perform other action, like killing the process, on Ctrl+C.
625941b650812a4eaa59c138
def ping(self, sensor_name: str, angle: float): <NEW_LINE> <INDENT> if sensor_name not in self.sensors: <NEW_LINE> <INDENT> error_msg = "ERROR - Ping failed. Sensor '{}' not found".format(sensor_name) <NEW_LINE> logger.error(error_msg) <NEW_LINE> return None <NEW_LINE> <DEDENT> logger.debug("Ping sensor '{}' at angle {}°".format(sensor_name, angle)) <NEW_LINE> return self.sensors[sensor_name].ping(angle)
Return single sensor reading in a given direction
625941b6be7bc26dc91cd418
@override_style() <NEW_LINE> def plot_multi_cmfs( cmfs: Union[ MultiSpectralDistributions, str, Sequence[Union[MultiSpectralDistributions, str]], ], **kwargs: Any, ) -> Tuple[plt.Figure, plt.Axes]: <NEW_LINE> <INDENT> cmfs = cast( List[MultiSpectralDistributions], list(filter_cmfs(cmfs).values()) ) <NEW_LINE> _figure, axes = artist(**kwargs) <NEW_LINE> axes.axhline( color=CONSTANTS_COLOUR_STYLE.colour.dark, linestyle="--", zorder=CONSTANTS_COLOUR_STYLE.zorder.foreground_line, ) <NEW_LINE> x_limit_min, x_limit_max, y_limit_min, y_limit_max = [], [], [], [] <NEW_LINE> for i, cmfs_i in enumerate(cmfs): <NEW_LINE> <INDENT> for j, RGB in enumerate( as_float_array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) ): <NEW_LINE> <INDENT> RGB = [reduce(lambda y, _: y * 0.5, range(i), x) for x in RGB] <NEW_LINE> values = cmfs_i.values[:, j] <NEW_LINE> shape = cmfs_i.shape <NEW_LINE> x_limit_min.append(shape.start) <NEW_LINE> x_limit_max.append(shape.end) <NEW_LINE> y_limit_min.append(np.min(values)) <NEW_LINE> y_limit_max.append(np.max(values)) <NEW_LINE> axes.plot( cmfs_i.wavelengths, values, color=RGB, label=f"{cmfs_i.strict_labels[j]} - {cmfs_i.strict_name}", zorder=CONSTANTS_COLOUR_STYLE.zorder.midground_line, ) <NEW_LINE> <DEDENT> <DEDENT> bounding_box = ( min(x_limit_min), max(x_limit_max), min(y_limit_min) - np.abs(np.min(y_limit_min)) * 0.05, max(y_limit_max) + np.abs(np.max(y_limit_max)) * 0.05, ) <NEW_LINE> cmfs_strict_names = ", ".join([cmfs_i.strict_name for cmfs_i in cmfs]) <NEW_LINE> title = f"{cmfs_strict_names} - Colour Matching Functions" <NEW_LINE> settings: Dict[str, Any] = { "axes": axes, "bounding_box": bounding_box, "legend": True, "title": title, "x_label": "Wavelength $\\lambda$ (nm)", "y_label": "Tristimulus Values", } <NEW_LINE> settings.update(kwargs) <NEW_LINE> return render(**settings)
Plot given colour matching functions. Parameters ---------- cmfs Colour matching functions to plot. ``cmfs`` elements can be of any type or form supported by the :func:`colour.plotting.filter_cmfs` definition. Other Parameters ---------------- kwargs {:func:`colour.plotting.artist`, :func:`colour.plotting.render`}, See the documentation of the previously listed definitions. Returns ------- :class:`tuple` Current figure and axes. Examples -------- >>> cmfs = [ ... 'CIE 1931 2 Degree Standard Observer', ... 'CIE 1964 10 Degree Standard Observer', ... ] >>> plot_multi_cmfs(cmfs) # doctest: +ELLIPSIS (<Figure size ... with 1 Axes>, <...AxesSubplot...>) .. image:: ../_static/Plotting_Plot_Multi_CMFS.png :align: center :alt: plot_multi_cmfs
625941b6e5267d203edcdab3
def __init__(self,tup): <NEW_LINE> <INDENT> if len(tup) == 1 : <NEW_LINE> <INDENT> self.pubkey = tup[0] <NEW_LINE> self.privkey = None <NEW_LINE> <DEDENT> elif len(tup) == 3 : <NEW_LINE> <INDENT> n, p, q = tup <NEW_LINE> assert n == pow(p,2)*q, 'Input private key raise error.' <NEW_LINE> self.pubkey = n <NEW_LINE> self.privkey = (inverse(self.pubkey, lcm(p-1,q-1)), p*q) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> raise ValueError('SchmidtSamoa(tup) : \n\t@tup : \n\t\t(n) for public key \n\t\t(n,p,q) for private key')
Choose two large distinct primes p and q and compute N = (p**2) * q
625941b699cbb53fe67929f9
def test_token_endpoint_autherror(self): <NEW_LINE> <INDENT> testurl = ('/token?grant_type=client_credentials&' 'client_id=test_client&client_secret=invalidsecret') <NEW_LINE> resp = self.testapp.post(testurl, status=400) <NEW_LINE> self.assertEqual(resp.content_type, 'application/json') <NEW_LINE> testurl = ('/token?grant_type=client_credentials&' 'client_id=invalid_client&client_secret=test_secret') <NEW_LINE> resp = self.testapp.post(testurl, status=400) <NEW_LINE> self.assertEqual(resp.content_type, 'application/json') <NEW_LINE> payload = {"grant_type": "client_credentials", "client_id": "test_client", "client_secret": "invalid_secret"} <NEW_LINE> resp = self.testapp.post('/token', payload, status=400) <NEW_LINE> self.assertEqual(resp.content_type, 'application/json') <NEW_LINE> payload = {"grant_type": "client_credentials", "client_id": "invalid_client", "client_secret": "test_secret"} <NEW_LINE> resp = self.testapp.post('/token', payload, status=400) <NEW_LINE> self.assertEqual(resp.content_type, 'application/json')
On autherrors MUST return Bad Request (400)
625941b66fece00bbac2d54d
def test_gg_filters_env_correctly(self): <NEW_LINE> <INDENT> good_groups = [ self.mock_group_dictionary("mhcfoo"), self.mock_group_dictionary("mhcbar"), self.mock_group_dictionary("mhcfoobar") ] <NEW_LINE> bad_groups = [ self.mock_group_dictionary("mhcnoncomformist", name="foo-mhcnoncomformist-123141231123") ] <NEW_LINE> with patch("disco_aws_automation.disco_autoscale.get_boto3_paged_results", MagicMock(return_value=(good_groups + bad_groups))): <NEW_LINE> <INDENT> good_group_ids = [group['AutoScalingGroupName'] for group in good_groups] <NEW_LINE> actual_group_ids = [group['name'] for group in self._autoscale.get_existing_groups()] <NEW_LINE> self.assertEqual(sorted(good_group_ids), sorted(actual_group_ids))
group_generator correctly filters based on the environment
625941b6167d2b6e312189b0
def test_minimal_rj(self): <NEW_LINE> <INDENT> user = ServiceUser(self.assoc, mode="acceptor") <NEW_LINE> user.primitive = self.primitive_rj <NEW_LINE> assert user.writeable is False <NEW_LINE> assert user.primitive == self.primitive_rj <NEW_LINE> assert user.mode == "acceptor" <NEW_LINE> assert user.maximum_length is None <NEW_LINE> assert user.implementation_class_uid is None <NEW_LINE> assert user.implementation_version_name is None <NEW_LINE> assert user.asynchronous_operations == (1, 1) <NEW_LINE> assert user.role_selection == {} <NEW_LINE> assert user.sop_class_common_extended == {} <NEW_LINE> assert user.sop_class_extended == {} <NEW_LINE> assert user.user_identity is None
Test access with a minimal allowed A-ASSOCIATE (rj) primitive.
625941b630bbd722463cbbd4
def uniques(elts): <NEW_LINE> <INDENT> if elts is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> unique_elts = list(set(elts)) <NEW_LINE> unique_elts.sort() <NEW_LINE> return unique_elts
Unique elements, sorted Arguments: elts {list} -- List of elements Returns: list -- List of sorted unique elements
625941b69c8ee82313fbb587
def gettext(): <NEW_LINE> <INDENT> win32clipboard.OpenClipboard() <NEW_LINE> t = w.GetClipboardData(win32con.CF_TEXT) <NEW_LINE> win32clipboard.CloseClipboard() <NEW_LINE> return t
获取剪切板内容
625941b64e4d5625662d41f0
def leaves(t): <NEW_LINE> <INDENT> if is_leaf(t): <NEW_LINE> <INDENT> return [root(t)] <NEW_LINE> <DEDENT> _leaves = [] <NEW_LINE> for b in branches(t): <NEW_LINE> <INDENT> _leaves += leaves(b) <NEW_LINE> <DEDENT> return _leaves
>>> leaves(tree('D', [tree('B', [tree('A'), tree('C')]), tree('F', [tree('E'), tree('H', [tree('G'), tree('I')])])])) ['A', 'C', 'E', 'G', 'I']
625941b655399d3f055884c6
def teardown(self): <NEW_LINE> <INDENT> print("[SCD] Teardown {}".format(self.automated_test.name)) <NEW_LINE> cleanup_test_step = 0 <NEW_LINE> for role, target in self.targets.items(): <NEW_LINE> <INDENT> flight_names = target.managed_flights() <NEW_LINE> for flight_name in flight_names: <NEW_LINE> <INDENT> print("[SCD] - Deleting {} flights for target {}.".format(len(flight_names), target.name)) <NEW_LINE> step_ref = TestStepReference( name="Clean up flight {} in {}".format(flight_name, target.name), index=cleanup_test_step, phase=TestPhase.Cleanup ) <NEW_LINE> try: <NEW_LINE> <INDENT> resp, query = target.delete_flight(flight_name) <NEW_LINE> self.report_recorder.capture_interaction(step_ref, query, 'Remove flight during test cleanup') <NEW_LINE> <DEDENT> except QueryError as e: <NEW_LINE> <INDENT> interaction_id = self.report_recorder.capture_interaction(step_ref, e.query, 'Remove flight during test cleanup') <NEW_LINE> self.report_recorder.capture_deletion_unknown_issue( interaction_id=interaction_id, summary="Deletion request for flight {} was unsuccessful".format(flight_name), details="Deletion attempt failed with status {}.".format(e.query.status_code), flight_name=flight_name, target_name=target.name, uss_role=role ) <NEW_LINE> print("[SCD] Error: Unable to delete flight {} during teardown".format(flight_name)) <NEW_LINE> <DEDENT> cleanup_test_step = cleanup_test_step + 1
Delete resources created by this test runner.
625941b6d58c6744b4257a73
def __lt__(self, other): <NEW_LINE> <INDENT> if self.votes == other.votes: <NEW_LINE> <INDENT> return self._time_created > other._time_created <NEW_LINE> <DEDENT> return self.votes > other.votes
Because we are using a min-heap queue, the 'smallest' track will be played first
625941b6be8e80087fb20a61
def branchpoint(name=None,templates=None): <NEW_LINE> <INDENT> queryDiagnostic("BP",name,templates)
Print the ``branch-point function''. Type FUNC(x) to list the value of the ``branch-point function'' in the diagnostics of the bifurcation diagram object x. This function vanishes at a branch point. Type FUNC() to list the value of the ``branch-point function'' in the output-file fort.9. Type FUNC('xxx') to list the value of the ``branch-point function'' in the info file 'd.xxx'.
625941b6d4950a0f3b08c16d
def test(self): <NEW_LINE> <INDENT> connection = self._connect() <NEW_LINE> start_time = time_now() <NEW_LINE> connection.process_data_events(time_limit=0) <NEW_LINE> elapsed = time_now() - start_time <NEW_LINE> self.assertLess(elapsed, 0.25) <NEW_LINE> start_time = time_now() <NEW_LINE> connection.process_data_events(time_limit=0.005) <NEW_LINE> elapsed = time_now() - start_time <NEW_LINE> self.assertGreaterEqual(elapsed, 0.005) <NEW_LINE> self.assertLess(elapsed, 0.25)
BlockingConnection.process_data_events
625941b6fb3f5b602dac34a1
def init_engine(self): <NEW_LINE> <INDENT> interface_type = "site" <NEW_LINE> bootstrap_data = getattr(self.sgtk, "_desktop_data", None) <NEW_LINE> if bootstrap_data is not None: <NEW_LINE> <INDENT> if "proxy_pipe" in bootstrap_data and "proxy_auth" in bootstrap_data: <NEW_LINE> <INDENT> interface_type = "project" <NEW_LINE> <DEDENT> <DEDENT> self._is_site_engine = interface_type == "site" <NEW_LINE> from sgtk.platform import qt <NEW_LINE> base_def = self._define_qt_base() <NEW_LINE> qt.QtCore = base_def.get("qt_core") <NEW_LINE> qt.QtGui = base_def.get("qt_gui") <NEW_LINE> qt.TankDialogBase = base_def.get("dialog_base") <NEW_LINE> tk_desktop = self.import_module("tk_desktop") <NEW_LINE> self.__impl = tk_desktop.get_engine_implementation(interface_type)(self) <NEW_LINE> if hasattr(self.__impl, "init_engine"): <NEW_LINE> <INDENT> self.__impl.init_engine()
Initialize the engine
625941b6498bea3a759b98c4
@app.route('/api/remove', methods=['DELETE']) <NEW_LINE> @require_login <NEW_LINE> @require_group_str_id <NEW_LINE> def remove(user=None, group=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with handle_exceptions(): <NEW_LINE> <INDENT> if group.owner.google_id == user.google_id: <NEW_LINE> <INDENT> for g_user in group.users: <NEW_LINE> <INDENT> group.users.remove(g_user) <NEW_LINE> attempt_delete_user(g_user) <NEW_LINE> <DEDENT> db.session.delete(group) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> group.users.remove(user) <NEW_LINE> attempt_delete_user(user) <NEW_LINE> <DEDENT> db.session.commit() <NEW_LINE> return jsonify({}), 202 <NEW_LINE> <DEDENT> <DEDENT> except APIError as e: <NEW_LINE> <INDENT> return e.response, e.code
If the user is the owner of the group remove all users from the group, delete users if they don't belong to any other group, and then delete the group. If the user is just a member, remove the user from the group and try to delete the user.
625941b6be383301e01b52a0
def send_to_all_ml(obj): <NEW_LINE> <INDENT> base.send_to_all_ml(obj)
Send an object to all ml processes @param obj The object to be sent
625941b69b70327d1c4e0be6
def save(self): <NEW_LINE> <INDENT> filename = os.path.join(self.directory, 'experiment.json') <NEW_LINE> with open(filename, 'w') as f: <NEW_LINE> <INDENT> json.dump(self.report, f, indent=2, sort_keys=True) <NEW_LINE> <DEDENT> filename = os.path.join(self.directory, 'training_progress.csv') <NEW_LINE> with open(filename, 'w') as csvfile: <NEW_LINE> <INDENT> csv.writer(csvfile).writerows(self.history) <NEW_LINE> <DEDENT> filename = os.path.join(self.directory, 'learned_parameters.npy') <NEW_LINE> parameters = lasagne.layers.get_all_param_values(self.__network) <NEW_LINE> parameters = parameters <NEW_LINE> numpy.save(filename, parameters)
Save the results of the experiment to self.directory.
625941b69f2886367277a6a4
def eval(self, expr): <NEW_LINE> <INDENT> self.expr = expr <NEW_LINE> return self._eval(ast.parse(expr.strip()).body[0].value)
evaluate an expresssion, using the operators, functions and names previously set up.
625941b65f7d997b871748ad
def _mset_gen_iter(self): <NEW_LINE> <INDENT> return MSetIter(self)
Return an iterator over the MSet. The iterator will return MSetItem objects, which will be evaluated lazily where appropriate.
625941b644b2445a33931eb3
def make_aggregate_graphs(data, filename, ignore_initial_rounds): <NEW_LINE> <INDENT> data = clean_nans(data) <NEW_LINE> print('make_aggregate_graphs', filename) <NEW_LINE> columns = [col.replace('_ttl', '') for col in data.columns if col.endswith('_ttl')] <NEW_LINE> index = data['round'] <NEW_LINE> titles = [] <NEW_LINE> plots = [] <NEW_LINE> for col in columns: <NEW_LINE> <INDENT> if col == 'index': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> title = make_title(filename, col) <NEW_LINE> plot = abcEconomics_figure(title) <NEW_LINE> plot.yaxis.visible = False <NEW_LINE> plot.legend.orientation = "top_left" <NEW_LINE> try: <NEW_LINE> <INDENT> plot.extra_y_ranges['std'] = y_range(col, 'std', data, ignore_initial_rounds) <NEW_LINE> plot.line(index, data[col + '_std'], legend='std', line_width=2, line_color='red', y_range_name="std") <NEW_LINE> plot.add_layout(LinearAxis(y_range_name="std"), 'right') <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> plot.extra_y_ranges['ttl'] = y_range(col, 'ttl', data, ignore_initial_rounds) <NEW_LINE> plot.line(index, data[col + '_ttl'], legend='mean/total', line_width=2, line_color='blue', y_range_name="ttl") <NEW_LINE> plot.add_layout(LinearAxis(y_range_name="ttl"), 'left') <NEW_LINE> try: <NEW_LINE> <INDENT> plot.extra_y_ranges['mean'] = y_range(col, 'mean', data, ignore_initial_rounds) <NEW_LINE> plot.add_layout(LinearAxis(y_range_name="mean"), 'left') <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> titles.append(title + ' (agg)') <NEW_LINE> plots.append(plot) <NEW_LINE> <DEDENT> return titles, plots
Make timeseries graphs from aggregate or aggregate_ files, which contain _ttl, _mean and _std suffixes, to denote total, mean and standard deviation columns
625941b616aa5153ce36228b
def _get_usage_dict(self, object_or_dict, **updates): <NEW_LINE> <INDENT> usage = {} <NEW_LINE> if isinstance(object_or_dict, objects.Instance): <NEW_LINE> <INDENT> usage = instance_obj.compat_instance(object_or_dict) <NEW_LINE> <DEDENT> elif isinstance(object_or_dict, objects.Flavor): <NEW_LINE> <INDENT> usage = obj_base.obj_to_primitive(object_or_dict) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> usage.update(object_or_dict) <NEW_LINE> <DEDENT> for key in ('numa_topology',): <NEW_LINE> <INDENT> if key in updates: <NEW_LINE> <INDENT> usage[key] = updates[key] <NEW_LINE> <DEDENT> <DEDENT> return usage
Make a usage dict _update methods expect. Accepts a dict or an Instance or Flavor object, and a set of updates. Converts the object to a dict and applies the updates. :param object_or_dict: instance or flavor as an object or just a dict :param updates: key-value pairs to update the passed object. Currently only considers 'numa_topology', all other keys are ignored. :returns: a dict with all the information from object_or_dict updated with updates
625941b6ff9c53063f47c011
def _evalf_(self, x, parent=None): <NEW_LINE> <INDENT> if parent is float: <NEW_LINE> <INDENT> return math.asin(1/x) <NEW_LINE> <DEDENT> return (1/x).arcsin()
EXAMPLES:: sage: arccsc(2).n(100) 0.52359877559829887307710723055 sage: float(arccsc(2)) 0.52359877559829...
625941b61f037a2d8b946012
def remove_unfinished(self): <NEW_LINE> <INDENT> for trial_num in self.unfinished_trial_nums: <NEW_LINE> <INDENT> del self.trials[trial_num] <NEW_LINE> <DEDENT> self.unfinished_trial_nums = set()
remove any unfinished trials. This is useful for still being able to plot after interrupting an Optimiser before it finished
625941b64a966d76dd550e1e
def await_future(future): <NEW_LINE> <INDENT> interval = 0.02 <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return future.get(timeout=interval) <NEW_LINE> <DEDENT> except Queue.Empty as e: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> logger.debug("Awaiting future...") <NEW_LINE> if QT_AVAILABLE and is_mainthread(): <NEW_LINE> <INDENT> flush_qt_events()
Wait for a queue (future) message without blocking the main (Qt) thread. This is effectively a technique I use to get around completely blocking IDA's mainthread while waiting for a threaded result that may need to make use of the execute_sync operators. Waiting for a 'future' thread result to come through via this function lets other execute_sync actions to slip through (at least Read, Fast).
625941b6cc40096d61595766
def get_item_id_sold_last_from_table(table): <NEW_LINE> <INDENT> date_table = [[int(line[year_index]), int(line[month_index]), int(line[day_index])] for line in table] <NEW_LINE> data_max = max(date_table) <NEW_LINE> for line in table: <NEW_LINE> <INDENT> if ( data_max[0] == int(line[year_index]) and data_max[1] == int(line[month_index]) and data_max[2] == int(line[day_index]) ): <NEW_LINE> <INDENT> return str(line[id_index])
Returns the _id_ of the item that was sold most recently. Args: table (list of lists): the sales table Returns: str: the _id_ of the item that was sold most recently.
625941b699fddb7c1c9de1a6
def predict(self, data): <NEW_LINE> <INDENT> labels = [] <NEW_LINE> for row in data: <NEW_LINE> <INDENT> dists = [] <NEW_LINE> for c in self.centroids_: <NEW_LINE> <INDENT> dist = float(0) <NEW_LINE> for i in range(len(row)): <NEW_LINE> <INDENT> dist += abs(row[i]-c[i]) ** 2 <NEW_LINE> <DEDENT> dist = math.sqrt(dist) <NEW_LINE> dists.append(dist) <NEW_LINE> <DEDENT> for i in range(self.n_clusters): <NEW_LINE> <INDENT> if dists[i] == min(dists): <NEW_LINE> <INDENT> labels.append(i) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return labels
Takes data and assigns clusters to the points based on current centroids stored in centroids_ input ----- data 2D numpy array (convert DataFrames via df.to_numpy()) output ------ labels ndarray of the labels each point is closest to
625941b6a4f1c619b28afe55
def condition_stat(start_date, end_date, index_code, condition_num): <NEW_LINE> <INDENT> conn = connect_data_source() <NEW_LINE> doom_data = find_condition_date_usa(start_date, end_date, condition_num) <NEW_LINE> select_date_time_list = doom_data.index <NEW_LINE> open_price_change_list = [] <NEW_LINE> day_price_change_list = [] <NEW_LINE> for selected_date in select_date_time_list: <NEW_LINE> <INDENT> open_price_change, day_price_change = trading_day_state(index_code, selected_date, conn) <NEW_LINE> open_price_change_list.append(open_price_change) <NEW_LINE> day_price_change_list.append(day_price_change) <NEW_LINE> <DEDENT> open_price_change_series = Series(open_price_change_list) <NEW_LINE> day_price_change_series = Series(day_price_change_list) <NEW_LINE> open_price_change_series.hist() <NEW_LINE> day_price_change_series.hist() <NEW_LINE> print (open_price_change_series.describe()) <NEW_LINE> print (day_price_change_series.describe()) <NEW_LINE> print (sum(day_price_change_series>0)) <NEW_LINE> return open_price_change_series, day_price_change_series
给定指定日期和美股变化的条件值,index_code为国内股市的指定变化
625941b6cb5e8a47e48b78c3
def _check_match_box(self, data): <NEW_LINE> <INDENT> _return = False <NEW_LINE> tag_element_end_position = self._find_tag_end_position(data, 4) <NEW_LINE> if (tag_element_end_position > 5): <NEW_LINE> <INDENT> tag_params = FormTagsEncoder.parse_tag_parameters("box", data, 0, tag_element_end_position) <NEW_LINE> _return = ("width" in tag_params and re.match("^\\d+$", tag_params['width']) is not None) <NEW_LINE> <DEDENT> return _return
Check if a possible tag match is a valid "box" tag that needs to be changed. :param data: Data starting with the possible tag :return: (bool) True if change required :since: v1.0.0
625941b68a349b6b435e7f87
@zope.component.adapter(ICustomUpdatingDict, interfaces.IFormLayer) <NEW_LINE> @zope.interface.implementer(interfaces.IFieldWidget) <NEW_LINE> def ColorDictInputFieldWidget(field, request): <NEW_LINE> <INDENT> return FieldWidget(field, ColorDictInputWidget(request))
IFieldWidget factory for TextWidget.
625941b6a8370b77170526b4
def testBoardArch(self): <NEW_LINE> <INDENT> self.assertEqual(self.boards.SelectBoards(['arm']), {'all': 2, 'arm': 2})
Test single board selection
625941b6d8ef3951e3243350
def perfect_pairing(list_of_teams): <NEW_LINE> <INDENT> graph_edges = [] <NEW_LINE> weights = get_weights() <NEW_LINE> for i, team1 in enumerate(list_of_teams): <NEW_LINE> <INDENT> for j, team2 in enumerate(list_of_teams): <NEW_LINE> <INDENT> if i > j: <NEW_LINE> <INDENT> weight = calc_weight(team1, team2, i, j, list_of_teams[len(list_of_teams) - i - 1], list_of_teams[len(list_of_teams) - j - 1], len(list_of_teams) - i - 1, len(list_of_teams) - j - 1, weights, TabSettings.get("cur_round", 1), TabSettings.get("tot_rounds", 5)) <NEW_LINE> graph_edges += [(i, j, weight)] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> pairings_num = mwmatching.maxWeightMatching(graph_edges, maxcardinality=True) <NEW_LINE> all_pairs = [] <NEW_LINE> for pair in pairings_num: <NEW_LINE> <INDENT> if pair < len(list_of_teams): <NEW_LINE> <INDENT> team = list_of_teams[pair] <NEW_LINE> matched_team = list_of_teams[pairings_num.index(pair)] <NEW_LINE> pairing = set([team, matched_team]) <NEW_LINE> if pairing not in all_pairs: <NEW_LINE> <INDENT> all_pairs.append(pairing) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return determine_gov_opp(all_pairs)
Uses the mwmatching library to assign teams in a pairing
625941b66aa9bd52df036bb5
def __str__(self): <NEW_LINE> <INDENT> return ', '.join( [key + '=' + repr(value) for key, value in self.items()] )
Format this objekct to 'key1=value1, key2=value2' style.
625941b631939e2706e4cc84
def get_body_parameters(self): <NEW_LINE> <INDENT> p = dict() <NEW_LINE> p['count'] = self.count <NEW_LINE> return p
Values of body parameters as a dictionary (name of parameter: value of the parameter).
625941b6cdde0d52a9e52e41
def update_status(self, context, domain_id, status, serial): <NEW_LINE> <INDENT> self._update_domain_status(context, domain_id, status, serial) <NEW_LINE> self._update_record_status(context, domain_id, status, serial)
:param context: Security context information. :param domain_id: The ID of the designate domain. :param status: The status, 'SUCCESS' or 'ERROR'. :param serial: The consensus serial number for the domain. :return: None
625941b638b623060ff0ac02
def test_encoding5(self): <NEW_LINE> <INDENT> self.assertEqual(self.identifier.encode(123123125, 'base36'), '21AYET')
Encode 123123125 to 21AYET'
625941b68a43f66fc4b53e7d
def unlock(self, interact): <NEW_LINE> <INDENT> print('Q: ' + self.question) <NEW_LINE> answer = interact([self.answer], self.choices) <NEW_LINE> assert len(answer) == 1 <NEW_LINE> answer = answer[0] <NEW_LINE> if answer != self.answer: <NEW_LINE> <INDENT> self.locked = False <NEW_LINE> self.answer = answer
Unlocks the conceptual test case.
625941b62ae34c7f2600cf45
def _get_cert(self, uri): <NEW_LINE> <INDENT> content_type = DER_CONTENT_TYPE <NEW_LINE> response = self.net.get(uri, headers={'Accept': content_type}, content_type=content_type) <NEW_LINE> return response, jose.ComparableX509(OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_ASN1, response.content))
Returns certificate from URI. :param str uri: URI of certificate :returns: tuple of the form (response, :class:`josepy.util.ComparableX509`) :rtype: tuple
625941b68e71fb1e9831d5c0
def loadFixed_slot(self, fnameToLoad=False): <NEW_LINE> <INDENT> self.lasagna.removeIngredientByType('imagestack') <NEW_LINE> if not fnameToLoad: <NEW_LINE> <INDENT> self.lasagna.showStackLoadDialog(fileFilter="Images (*.mhd *.mha *.tiff *tif)") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.lasagna.loadImageStack(fnameToLoad) <NEW_LINE> <DEDENT> if not isinstance(self.lasagna.stacksInTreeList(),list): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> fixed_name = self.lasagna.stacksInTreeList()[0] <NEW_LINE> self.referenceStackName.setText(fixed_name) <NEW_LINE> self.fixedStackPath = self.lasagna.returnIngredientByName(fixed_name).fnameAbsPath <NEW_LINE> self.loadMoving.setEnabled(True) <NEW_LINE> self.updateWidgets_slot() <NEW_LINE> self.movingStackName.setText('') <NEW_LINE> self.elastix_cmd['f'] = self.absToRelPath(self.fixedStackPath)
Clear all stacks and load a fixed image can optionally load a specific file name (used for de-bugging)
625941b6a05bb46b383ec640
def test_convert_from_currency_not_found(): <NEW_LINE> <INDENT> with pytest.raises(IdexCurrencyNotFoundException): <NEW_LINE> <INDENT> with requests_mock.mock() as m: <NEW_LINE> <INDENT> m.post('https://api.idex.market/returnCurrencies', json=currencies_json, status_code=200) <NEW_LINE> for e in not_found_examples: <NEW_LINE> <INDENT> q = client.parse_from_currency_quantity(e[0], e[1])
Test when currency is not found
625941b663d6d428bbe44302
def test_filter_by_wrong_title(self): <NEW_LINE> <INDENT> response = self.create_article( self.login_verified_user(self.user_data), self.article_data) <NEW_LINE> response = self.client.get( '/api/articles?title=Wrong' ) <NEW_LINE> self.assertNotIn("Django", response.content.decode())
Test a user cannot filter articles by wrong title
625941b68e7ae83300e4adde
def move(self): <NEW_LINE> <INDENT> pass
Returns ------- direction: Directions Which direction to move
625941b676e4537e8c35148a
def traverseDir(self): <NEW_LINE> <INDENT> for f in os.listdir(os.getcwd()): <NEW_LINE> <INDENT> if f.endswith(".py"): <NEW_LINE> <INDENT> module = f[:-3] <NEW_LINE> try: <NEW_LINE> <INDENT> mod = __import__(module) <NEW_LINE> self.methodMap(mod) <NEW_LINE> <DEDENT> except BaseException: <NEW_LINE> <INDENT> continue
traverse only current folder -> subfolders not implemented, but look at os.walk()
625941b6d18da76e235322e4
def isAttached(self): <NEW_LINE> <INDENT> return _soxt.SoXtMaterialEditor_isAttached(self)
isAttached(self) -> SbBool
625941b696565a6dacc8f4e8
def load_bom_from_url(self, url): <NEW_LINE> <INDENT> logging.debug('Loading %s', url) <NEW_LINE> try: <NEW_LINE> <INDENT> text = check_subprocess('gsutil cat ' + url) <NEW_LINE> return yaml.load(text) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> self.__bad_files[self.url_to_bom_name(url)] = ex.message <NEW_LINE> maybe_log_exception('load_from_from_url', ex, action_msg='Skipping %s' % url) <NEW_LINE> return None
Returns the bom specification dict from a gcs url.
625941b685dfad0860c3ac6c
def fetch_annthyroid(data_home=None, download_if_missing=True, random_state=None, shuffle=False): <NEW_LINE> <INDENT> URL1 = ('http://archive.ics.uci.edu/ml/' 'machine-learning-databases/thyroid-disease/ann-train.data') <NEW_LINE> URL2 = ('http://archive.ics.uci.edu/ml/' 'machine-learning-databases/thyroid-disease/ann-test.data') <NEW_LINE> data_home = get_data_home(data_home=data_home) <NEW_LINE> annthyroid_dir = join(data_home, "annthyroid") <NEW_LINE> samples_path = _pkl_filepath(annthyroid_dir, "samples") <NEW_LINE> targets_path = _pkl_filepath(annthyroid_dir, "targets") <NEW_LINE> available = exists(samples_path) <NEW_LINE> if download_if_missing and not available: <NEW_LINE> <INDENT> makedirs(annthyroid_dir, exist_ok=True) <NEW_LINE> logger.warning("Downloading %s" % URL1) <NEW_LINE> f = BytesIO(urlopen(URL1).read()) <NEW_LINE> Xy1 = np.genfromtxt(f, delimiter=' ') <NEW_LINE> logger.warning("Downloading %s" % URL2) <NEW_LINE> f = BytesIO(urlopen(URL2).read()) <NEW_LINE> Xy2 = np.genfromtxt(f, delimiter=' ') <NEW_LINE> Xy = np.r_[Xy1, Xy2] <NEW_LINE> X = Xy[:, :-1] <NEW_LINE> y = Xy[:, -1].astype(np.int32) <NEW_LINE> joblib.dump(X, samples_path, compress=9) <NEW_LINE> joblib.dump(y, targets_path, compress=9) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> X, y <NEW_LINE> <DEDENT> except NameError: <NEW_LINE> <INDENT> X = joblib.load(samples_path) <NEW_LINE> y = joblib.load(targets_path) <NEW_LINE> <DEDENT> if shuffle: <NEW_LINE> <INDENT> ind = np.arange(X.shape[0]) <NEW_LINE> rng = check_random_state(random_state) <NEW_LINE> rng.shuffle(ind) <NEW_LINE> X = X[ind] <NEW_LINE> y = y[ind] <NEW_LINE> <DEDENT> return Bunch(data=X, target=y, DESCR=__doc__)
Load the annthyroid dataset, downloading it if necessary.
625941b6099cdd3c635f0a70
def __mkbox__(self): <NEW_LINE> <INDENT> corp = Corporation.by_uuid(self.get_argument('corporation_uuid')) <NEW_LINE> level = GameLevel.by_number(int(self.get_argument('game_level'))) <NEW_LINE> box = Box( name=unicode(self.get_argument('box_name')), description=unicode(self.get_argument('description')), difficulty=unicode(self.get_argument('difficulty')), corporation_id=corp.id, game_level_id=level.id, avatar=avatar, ) <NEW_LINE> dbsession.add(box) <NEW_LINE> dbsession.flush() <NEW_LINE> return box
Creates a box in the database
625941b607d97122c417869e
def tear_down(self): <NEW_LINE> <INDENT> print("\nDisconnecting...") <NEW_LINE> self.mygarage.disconnect()
Tear down functions
625941b6d7e4931a7ee9dd2f
def selection_sort(array): <NEW_LINE> <INDENT> for i in range (0 , len(array)-1): <NEW_LINE> <INDENT> for j in range (i + 1, len(array)): <NEW_LINE> <INDENT> if array[i] > array[j]: <NEW_LINE> <INDENT> swap(array,i,j)
Ordenamiento por medio del algoritmo de Selection Sort Parametros: array: Un arreglo a ordenar Efecto Secundario: El arreglo de entrada es ordenado en orden ascendente
625941b6507cdc57c6306ae6
def wordBreak(self, s, wordDict): <NEW_LINE> <INDENT> d = [False] * len(s) <NEW_LINE> for i in range(len(s)): <NEW_LINE> <INDENT> for word in wordDict: <NEW_LINE> <INDENT> if word == s[i-len(word)+1:i+1] and (d[i-len(word)] or i-len(word) == -1): <NEW_LINE> <INDENT> d[i] = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return d[-1]
:type s: str :type wordDict: List[str] :rtype: bool
625941b6379a373c97cfa95e
def test_toCSR(self): <NEW_LINE> <INDENT> pass
implicitly tested in test_convert_* functions
625941b607f4c71912b1129a
def point_arrays(self): <NEW_LINE> <INDENT> assert self.number_of_loops() == 1, "Point access is only available for single-loop paths" <NEW_LINE> return (self._xvalues[1:], self._yvalues[1:])
:returns: a two-tuple with x and y vectors Raises an exception if this path has subpaths, so iterate if you have multiple loops.
625941b60a50d4780f666ca2
def test_input_types(): <NEW_LINE> <INDENT> with pytest.raises(TypeError): <NEW_LINE> <INDENT> cb.plot_lines(x,nonlist_y,alpha =1.0, labels =['a','b','c','d','e','f','g','h','i'], palette = 'deutera', title = "Deutera Line Example", x_lab = "X label" , y_lab = "Y label" , legend_title = "Legend") <NEW_LINE> cb.plot_lines(nonlist_x,y_list,alpha ="1.0", labels =['a','b','c','d','e','f','g','h','i'], palette = 'deutera', title = "Deutera Line Example", x_lab = "X label" , y_lab = "Y label" , legend_title = "Legend")
tests input parameters are correct types
625941b66e29344779a62429
def horBar(fatness=1, median=MEDIAN, buttL=BUTT, buttR=BUTT): <NEW_LINE> <INDENT> horLine( boxPen, (0, median), (WIDTH, median), STROKE * fatness, buttL, buttR )
Horizontal bar.
625941b65fc7496912cc3799
def collide(self, obstacles): <NEW_LINE> <INDENT> for thing in obstacles: <NEW_LINE> <INDENT> if isinstance(thing, Circle): <NEW_LINE> <INDENT> delta = Vector2D(self._w / 2, self._h / 2) <NEW_LINE> dist = thing._pos - (self._pos + delta) <NEW_LINE> if dist.magnitude() < thing._radius + delta.magnitude(): <NEW_LINE> <INDENT> ul = thing._pos - self._pos <NEW_LINE> ur = thing._pos - (self._pos + Vector2D(self._w, 0)) <NEW_LINE> bl = thing._pos - (self._pos + Vector2D(0, self._h)) <NEW_LINE> br = thing._pos - (self._pos + Vector2D(self._w, self._h)) <NEW_LINE> corners = (ul, ur, bl, br) <NEW_LINE> for pos in corners: <NEW_LINE> <INDENT> if pos.magnitude() < thing._radius: <NEW_LINE> <INDENT> self._pos = self._pos - self._velocity <NEW_LINE> self._velocity *= -1 <NEW_LINE> break
Changes the velocity of the supercar when hitting an obstacle. obstacles: The list of obstacles that the car can collide with. The method only handles circular obstacles at the moment. It is advised to avoid obstacles close to each other or to the borders of _room, since hitting multiple objects between two updates is not taken into consideration by the method. Additionally, the change in velocity is not very scientific (assumes a head-on elastic collision).
625941b62c8b7c6e89b355d7
def testUserNoRoleToNoRoleWhileMentorRoleOffered(self): <NEW_LINE> <INDENT> profile = profile_utils.seedNDBProfile(self.program.key()) <NEW_LINE> connection = connection_utils.seed_new_connection( profile.key, self.org.key, org_role=connection_model.MENTOR_ROLE) <NEW_LINE> old_seen_by_org = connection.seen_by_org <NEW_LINE> old_seen_by_user = connection.seen_by_user <NEW_LINE> kwargs = { 'sponsor': self.sponsor.link_id, 'program': self.program.program_id, 'user': profile.profile_id, 'id': str(connection.key.id()) } <NEW_LINE> request = http.HttpRequest() <NEW_LINE> request.POST = {'role': connection_model.NO_ROLE} <NEW_LINE> data = request_data.RequestData(request, None, kwargs) <NEW_LINE> handler = connection_view.UserActionsFormHandler(self.view, url='unsed') <NEW_LINE> handler.handle(data, None, None) <NEW_LINE> connection = connection.key.get() <NEW_LINE> profile = profile.key.get() <NEW_LINE> self.assertEqual(connection.user_role, connection_model.NO_ROLE) <NEW_LINE> self.assertEqual(connection.org_role, connection_model.MENTOR_ROLE) <NEW_LINE> self.assertNotIn(self.org.key, profile.admin_for) <NEW_LINE> self.assertNotIn(self.org.key, profile.mentor_for) <NEW_LINE> self.assertEqual(connection.seen_by_user, old_seen_by_user) <NEW_LINE> self.assertEqual(connection.seen_by_org, old_seen_by_org) <NEW_LINE> query = connection_model.ConnectionMessage.query(ancestor=connection.key) <NEW_LINE> message = query.get() <NEW_LINE> self.assertIsNone(message)
Tests NO ROLE if user has no role and mentor role is offered.
625941b6d10714528d5ffaf2
def __init__(self, adj, expected_neighbors=None): <NEW_LINE> <INDENT> self.adj = adj <NEW_LINE> self.n_vertices = len(adj)-1 <NEW_LINE> self.expected_neighbors = expected_neighbors <NEW_LINE> self.vertices = range(1, len(adj))
adj: adjacency information about the graph. adj[n] is a list of the vertices that are adjacent to vertex n. adj[0] is **unused** expected_neighbors: If the graph is uniform, this is the degree of every vertex. (default: None)
625941b60c0af96317bb7ffd