code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def pay_order_interfaces(order: Order): <NEW_LINE> <INDENT> return set_order_paid(order)
将订单设置为已支付状态,同时生成一些其他的数据, 带commit
625941baac7a0e7691ed3f7e
def __init__(self, available_replicas=None, collision_count=None, conditions=None, observed_generation=None, ready_replicas=None, replicas=None, unavailable_replicas=None, updated_replicas=None): <NEW_LINE> <INDENT> self._available_replicas = None <NEW_LINE> self._collision_count = None <NEW_LINE> self._conditions = None <NEW_LINE> self._observed_generation = None <NEW_LINE> self._ready_replicas = None <NEW_LINE> self._replicas = None <NEW_LINE> self._unavailable_replicas = None <NEW_LINE> self._updated_replicas = None <NEW_LINE> self.discriminator = None <NEW_LINE> if available_replicas is not None: <NEW_LINE> <INDENT> self.available_replicas = available_replicas <NEW_LINE> <DEDENT> if collision_count is not None: <NEW_LINE> <INDENT> self.collision_count = collision_count <NEW_LINE> <DEDENT> if conditions is not None: <NEW_LINE> <INDENT> self.conditions = conditions <NEW_LINE> <DEDENT> if observed_generation is not None: <NEW_LINE> <INDENT> self.observed_generation = observed_generation <NEW_LINE> <DEDENT> if ready_replicas is not None: <NEW_LINE> <INDENT> self.ready_replicas = ready_replicas <NEW_LINE> <DEDENT> if replicas is not None: <NEW_LINE> <INDENT> self.replicas = replicas <NEW_LINE> <DEDENT> if unavailable_replicas is not None: <NEW_LINE> <INDENT> self.unavailable_replicas = unavailable_replicas <NEW_LINE> <DEDENT> if updated_replicas is not None: <NEW_LINE> <INDENT> self.updated_replicas = updated_replicas
ExtensionsV1beta1DeploymentStatus - a model defined in Swagger
625941ba26238365f5f0ed0f
def put(self, key, data): <NEW_LINE> <INDENT> hashValue = self.hashFunction(key, self.size) <NEW_LINE> if self.keys[hashValue] == None: <NEW_LINE> <INDENT> self.keys[hashValue] = key <NEW_LINE> self.data[hashValue] = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.keys[hashValue] == key: <NEW_LINE> <INDENT> self.data[hashValue] = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newhashvalue = self.rehashFunction(hashValue, self.size) <NEW_LINE> while self.keys[newhashvalue] != None and self.keys[newhashvalue] != key: <NEW_LINE> <INDENT> newhashvalue = self.rehashFunction(newhashvalue, self.size) <NEW_LINE> <DEDENT> if self.keys[newhashvalue] == None: <NEW_LINE> <INDENT> self.keys[newhashvalue] = key <NEW_LINE> self.data[newhashvalue] = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data[newhashvalue] = data
Given key/value pair, insert in hash table, or replace data.
625941ba2c8b7c6e89b35668
def test_delete_message(self): <NEW_LINE> <INDENT> pass
Test case for delete_message Delete message # noqa: E501
625941ba091ae35668666e0a
def myPow(self, x, n): <NEW_LINE> <INDENT> sign = n < 0 <NEW_LINE> n = abs(n) <NEW_LINE> temp = x <NEW_LINE> re = 1 <NEW_LINE> while n: <NEW_LINE> <INDENT> if n % 2: <NEW_LINE> <INDENT> re *= temp <NEW_LINE> <DEDENT> temp *= temp <NEW_LINE> n /= 2 <NEW_LINE> <DEDENT> return 1 / re if sign else re
:type x: float :type n: int :rtype: float
625941ba23849d37ff7b2f37
def form_key(self, *args, **kwargs): <NEW_LINE> <INDENT> normalized = self.call_sign.normalize(*args, **kwargs) <NEW_LINE> cache_key = CacheKey(*normalized.args, **normalized.kwargs) <NEW_LINE> return cache_key.tuple
Form normalized cache key from args and kwargs
625941baab23a570cc250025
def test_update_reservation_conflict_override(self): <NEW_LINE> <INDENT> student = User.query.filter_by(name='student').first() <NEW_LINE> team_type = TeamType.query.filter_by(name='other_team').first() <NEW_LINE> initial_team = Team(name="other_team_1") <NEW_LINE> initial_team.team_type = team_type <NEW_LINE> initial_team.members.append(student) <NEW_LINE> database.get_db().add(initial_team) <NEW_LINE> team_type = TeamType.query.filter_by(name='senior_project').first() <NEW_LINE> override_team = Team(name="senior_project_1") <NEW_LINE> override_team.team_type = team_type <NEW_LINE> override_team.members.append(student) <NEW_LINE> database.get_db().add(override_team) <NEW_LINE> room = Room.query.first() <NEW_LINE> start_time = datetime.datetime.now() <NEW_LINE> end_time = start_time + datetime.timedelta(hours=1) <NEW_LINE> token = student.generate_auth_token() <NEW_LINE> reservation_low = Reservation( start=start_time, end=end_time, team=initial_team, room=room, created_by=student ) <NEW_LINE> database.get_db().add(reservation_low) <NEW_LINE> reservation_high = Reservation( start=end_time, end=end_time + datetime.timedelta(hours=1), team=override_team, room=room, created_by=student ) <NEW_LINE> database.get_db().add(reservation_high) <NEW_LINE> database.get_db().commit() <NEW_LINE> reservation_high_id = reservation_high.id <NEW_LINE> initial_team_id = initial_team.id <NEW_LINE> override_team_id = override_team.id <NEW_LINE> room_id = room.id <NEW_LINE> rv = self.app.put( '/v1/reservation/' + str(reservation_high_id), data=json.dumps({ "room_id": room_id, "start": start_time.isoformat(), "end": (end_time + datetime.timedelta(hours=1)).isoformat() }), content_type='application/json', headers={ "Authorization": "Bearer " + token } ) <NEW_LINE> self.assertEquals(rv.status_code, 409) <NEW_LINE> response_json = json.loads(rv.data) <NEW_LINE> self.assertTrue("overridable" in response_json) <NEW_LINE> self.assertTrue(response_json["overridable"]) <NEW_LINE> self.assertEquals(len(Reservation.query.filter_by(team_id=override_team_id).all()), 1) <NEW_LINE> self.assertEquals(len(Reservation.query.filter_by(team_id=initial_team_id).all()), 1) <NEW_LINE> rv = self.app.put( '/v1/reservation/' + str(reservation_high_id), data=json.dumps({ "room_id": room_id, "start": start_time.isoformat(), "end": (end_time + datetime.timedelta(hours=1)).isoformat(), "override": True }), content_type='application/json', headers={ "Authorization": "Bearer " + token } ) <NEW_LINE> self.assertEquals(rv.status_code, 204) <NEW_LINE> self.assertEquals(len(Reservation.query.filter_by(team_id=override_team_id).all()), 1) <NEW_LINE> self.assertEquals(len(Reservation.query.filter_by(team_id=initial_team_id).all()), 0)
Update a reservation, and then override it.
625941ba379a373c97cfa9f0
def set_image(self): <NEW_LINE> <INDENT> if self.is_killing: <NEW_LINE> <INDENT> image_index = int(floor(pygame.time.get_ticks() * Character.TILES_CHANGE_SPEED) % self.kill_length) <NEW_LINE> self.image = self.kill_textures[image_index] <NEW_LINE> <DEDENT> elif self.is_running: <NEW_LINE> <INDENT> image_index = int(floor(pygame.time.get_ticks() * Character.TILES_CHANGE_SPEED) % self.run_length) <NEW_LINE> self.image = self.run_textures[image_index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> image_index = int(floor(pygame.time.get_ticks() * Character.TILES_CHANGE_SPEED) % self.idle_length) <NEW_LINE> self.image = self.idle_textures[image_index] <NEW_LINE> <DEDENT> if self.direction in [Directions.LEFT, Directions.UP]: <NEW_LINE> <INDENT> self.image = pygame.transform.flip(self.image, True, False)
Method that sets 'image' field using character's textures
625941ba167d2b6e31218a3c
def get_provides_port(self): <NEW_LINE> <INDENT> return copy(self._provides_port_ports)
Get all objects related with this object with relation `providesPort`. :rtype: :py:class:`OrderedDict` :return: A copy of the collection of objects related with this object.
625941ba2ae34c7f2600cfd7
def rang(self): <NEW_LINE> <INDENT> return self.matriu.rank()
Retorna el rang de la matriu
625941ba50485f2cf553cc3e
@app.route(API_V1 + "authentication", methods=['GET']) <NEW_LINE> def authentication(): <NEW_LINE> <INDENT> return jsonify(enabled=authentication_enabled())
check if basic authentication is enabled
625941ba1d351010ab8559c2
def ModFactoryImage(factory_bin, test_src, test_list_src): <NEW_LINE> <INDENT> subprocess.check_call([_MOUNT_PARTITION_SCRIPT, _MOUNT_RW, factory_bin, _MOUNT_PARTITION_INDEX, _MOUNT_POINT]) <NEW_LINE> try: <NEW_LINE> <INDENT> test_sink = os.path.join(_MOUNT_POINT, _IMAGE_TEST_DIR) <NEW_LINE> test_list_sink = os.path.join(_MOUNT_POINT,_IMAGE_TEST_LIST) <NEW_LINE> distutils.dir_util.copy_tree(test_src, test_sink) <NEW_LINE> distutils.file_util.copy_file(test_list_src, test_list_sink) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> subprocess.check_call(['umount', _MOUNT_POINT])
Adds new tests and a test_list to the given factory image. Args: factory_bin: path to factory image file. test_src: path to directory containing tests. test_list_src: path to test list. Raises: CalledProcessError: if a script or command returns non-zero. DistutilsFileError: on file copy failure.
625941ba3cc13d1c6d3c7229
def score_obs(self, params): <NEW_LINE> <INDENT> Xb = np.dot(self.exog, params) <NEW_LINE> prob = self.cdf(Xb) <NEW_LINE> return (self.endog - prob)[:, None] * self.exog
Logit model Jacobian of the log-likelihood for each observation Parameters ---------- params: array-like The parameters of the model Returns ------- jac : array-like The derivative of the loglikelihood for each observation evaluated at `params`. Notes ----- .. math:: \frac{\partial\ln L_{i}}{\partial\beta} =\left(y_{i}-\Lambda_{i}\right)x_{i} for observations :math:`i=1, ..., n`
625941bad486a94d0b98dff2
def addon(self, name): <NEW_LINE> <INDENT> cmd = ["heroku", "addons:create", name, "--app", self.name] <NEW_LINE> self._run(cmd)
Set up an addon
625941ba97e22403b379ce3e
def subtract_backgrounds(iss_data=[], ranges=[], btype='linear', avg=3): <NEW_LINE> <INDENT> AVG = avg + 0.5 <NEW_LINE> for iss in iss_data: <NEW_LINE> <INDENT> iss._background = dict() <NEW_LINE> for key in iss.energy.keys(): <NEW_LINE> <INDENT> iss._background[key] = subtract_single_background(iss.get_xy(key), ranges, avg) <NEW_LINE> <DEDENT> iss.background_settings['type'] = btype <NEW_LINE> iss.background_settings['ranges'] = ranges <NEW_LINE> iss.background_settings['on'] = True
Subtract a linear background from defined 'ranges'. Return data above backgrounds.
625941baa17c0f6771cbdef9
def fetch_campaigns(self): <NEW_LINE> <INDENT> if not self.id: <NEW_LINE> <INDENT> log.error('It is neccesary to save account before saving campaigns') <NEW_LINE> <DEDENT> response = api_call('act_%s/adcampaigns' % self.account_id) <NEW_LINE> instances = [] <NEW_LINE> for resource in response.data: <NEW_LINE> <INDENT> instance = AdCampaign.remote.get_or_create_from_resource(resource) <NEW_LINE> instances += [instance] <NEW_LINE> <DEDENT> return instances
Retrieve and save all campaigns for account
625941baa8ecb033257d2f7b
def _refresh_os_mixins(self, extras): <NEW_LINE> <INDENT> template_schema = 'http://schemas.openstack.org/template/os#' <NEW_LINE> images = vm.retrieve_images(extras['nova_ctx']) <NEW_LINE> os_lst = [occify_terms(item['name']) for item in images] <NEW_LINE> occi_lst = [item.term for item in self.registry.get_categories( extras) if item.scheme == template_schema] <NEW_LINE> for item in list(set(occi_lst) - set(os_lst)): <NEW_LINE> <INDENT> self.registry.delete_mixin(os_mixins.OsTemplate(template_schema, item), extras) <NEW_LINE> <DEDENT> for img in images: <NEW_LINE> <INDENT> if (((img['container_format'] or img['disk_format']) in ('ari', 'aki'))): <NEW_LINE> <INDENT> msg = 'Not registering kernel/RAM image.' <NEW_LINE> LOG.debug(msg) <NEW_LINE> continue <NEW_LINE> <DEDENT> ctg_term = occify_terms(img['id']) <NEW_LINE> os_template = os_mixins.OsTemplate( term=ctg_term, scheme=template_schema, os_id=img['id'], related=[infrastructure.OS_TEMPLATE], attributes=None, title='Image: %s' % get_image_name(img), location='/' + ctg_term + '/' ) <NEW_LINE> try: <NEW_LINE> <INDENT> self.registry.get_backend(os_template, extras) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> msg = 'Registering an OS image type as: %s' % str(os_template) <NEW_LINE> LOG.debug(msg) <NEW_LINE> self.register_backend(os_template, MIXIN_BACKEND)
Register images as OsTemplate mixins from information retrieved from glance (shared and user-specific).
625941ba9b70327d1c4e0c79
def __init__(self, parent): <NEW_LINE> <INDENT> self._isshown = True <NEW_LINE> self._parent = parent <NEW_LINE> Control.__init__(self, parent)
Default class constructor. :param `parent`: the separator parent object, an instance of :class:`ButtonPanel`.
625941bae5267d203edcdb46
def _frac_y_to_pixel_y(self, frac_y: float) -> int: <NEW_LINE> <INDENT> return self._frac_to_pixel(frac_y, self._surface.get_height())
Converts a fractional y value to its corresponding y pixel value :param frac_y: The fractional location on the y axis. (0.0 - 1.0) :return: An int that is the pixel value on the y axis
625941ba507cdc57c6306b7a
def split(self, output, motion_output=None): <NEW_LINE> <INDENT> with self.outputs_lock: <NEW_LINE> <INDENT> outputs = {} <NEW_LINE> if output is not None: <NEW_LINE> <INDENT> outputs[PiVideoFrameType.frame] = output <NEW_LINE> <DEDENT> if motion_output is not None: <NEW_LINE> <INDENT> outputs[PiVideoFrameType.motion_data] = motion_output <NEW_LINE> <DEDENT> self._next_output.append(outputs) <NEW_LINE> <DEDENT> timeout = float(self._intra_period / self.parent.framerate) * 2.0 <NEW_LINE> if not self.event.wait(timeout): <NEW_LINE> <INDENT> raise PiCameraRuntimeError( 'Timed out waiting for an SPS header (ensure inline_headers ' 'is True and bitrate is not 0)') <NEW_LINE> <DEDENT> self.event.clear()
Called to switch the encoder's output. This method is called by :meth:`~PiCamera.split_recording` and :meth:`~PiCamera.record_sequence` to switch the encoder's :attr:`output` object to the *output* parameter (which can be a filename or a file-like object, as with :meth:`start`).
625941babaa26c4b54cb0fc8
def forward(self, x, depth, alpha): <NEW_LINE> <INDENT> x = self.fc(x) <NEW_LINE> assert depth < self.depth, "Requested output depth cannot be produced" <NEW_LINE> y = self.initial_block(x) <NEW_LINE> if depth > 0: <NEW_LINE> <INDENT> for block in self.layers[:depth - 1]: <NEW_LINE> <INDENT> y = block(y) <NEW_LINE> <DEDENT> residual = self.rgb_converters[depth - 1](self.temporaryUpsampler(y)) <NEW_LINE> straight = self.rgb_converters[depth](self.layers[depth - 1](y)) <NEW_LINE> out = (alpha * straight) + ((1 - alpha) * residual) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = self.rgb_converters[0](y) <NEW_LINE> <DEDENT> return out
forward pass of the Generator :param x: input noise :param depth: current depth from where output is required :param alpha: value of alpha for fade-in effect :return: y => output
625941ba4d74a7450ccd4069
def testTCPShortWrite(self): <NEW_LINE> <INDENT> name = 'short-write.tcp-short.tests.powerdns.com.' <NEW_LINE> query = dns.message.make_query(name, 'AXFR', 'IN') <NEW_LINE> responses = [] <NEW_LINE> soa = dns.rrset.from_text(name, 60, dns.rdataclass.IN, dns.rdatatype.SOA, 'ns.' + name + ' hostmaster.' + name + ' 1 3600 3600 3600 60') <NEW_LINE> soaResponse = dns.message.make_response(query) <NEW_LINE> soaResponse.use_edns(edns=False) <NEW_LINE> soaResponse.answer.append(soa) <NEW_LINE> responses.append(soaResponse) <NEW_LINE> response = dns.message.make_response(query) <NEW_LINE> response.use_edns(edns=False) <NEW_LINE> content = "" <NEW_LINE> for i in range(200): <NEW_LINE> <INDENT> if len(content) > 0: <NEW_LINE> <INDENT> content = content + ', ' <NEW_LINE> <DEDENT> content = content + (str(i)*50) <NEW_LINE> <DEDENT> rrset = dns.rrset.from_text(name, 3600, dns.rdataclass.IN, dns.rdatatype.TXT, content) <NEW_LINE> response.answer.append(rrset) <NEW_LINE> for _ in range(200): <NEW_LINE> <INDENT> responses.append(response) <NEW_LINE> <DEDENT> responses.append(soaResponse) <NEW_LINE> conn = self.openTCPConnection() <NEW_LINE> for response in responses: <NEW_LINE> <INDENT> self._toResponderQueue.put(response, True, 2.0) <NEW_LINE> <DEDENT> self.sendTCPQueryOverConnection(conn, query) <NEW_LINE> time.sleep(1) <NEW_LINE> receivedResponses = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> datalen = conn.recv(2) <NEW_LINE> if not datalen: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> (datalen,) = struct.unpack("!H", datalen) <NEW_LINE> data = b'' <NEW_LINE> remaining = datalen <NEW_LINE> got = conn.recv(remaining) <NEW_LINE> while got: <NEW_LINE> <INDENT> data = data + got <NEW_LINE> if len(data) == datalen: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> remaining = remaining - len(got) <NEW_LINE> if remaining <= 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> got = conn.recv(remaining) <NEW_LINE> <DEDENT> if data and len(data) == datalen: <NEW_LINE> <INDENT> receivedResponse = dns.message.from_wire(data) <NEW_LINE> receivedResponses.append(receivedResponse) <NEW_LINE> <DEDENT> <DEDENT> receivedQuery = None <NEW_LINE> if not self._fromResponderQueue.empty(): <NEW_LINE> <INDENT> receivedQuery = self._fromResponderQueue.get(True, 2.0) <NEW_LINE> <DEDENT> conn.close() <NEW_LINE> self.assertTrue(receivedQuery) <NEW_LINE> receivedQuery.id = query.id <NEW_LINE> self.assertEquals(query, receivedQuery) <NEW_LINE> self.assertEquals(receivedResponses, responses)
TCP: Short write to client
625941ba8e71fb1e9831d653
def find_nearby_stops(x, y): <NEW_LINE> <INDENT> radius = 500 <NEW_LINE> params = { "Circle": "{:.6f},{:.6f},{:d}".format(y, x, radius), "ReturnList": ",".join(RETURN_LIST), } <NEW_LINE> url = format_url("/instant_V2", **params) <NEW_LINE> request = pan.http.get(url, encoding="utf_8") <NEW_LINE> return parsejson_find_nearby_stops(request)
Return a list of stops near given coordinates.
625941ba5166f23b2e1a4fff
def get_pf(pfname): <NEW_LINE> <INDENT> if hasattr(stock, 'pfread'): <NEW_LINE> <INDENT> return stock.pfread(pfname).pf2dict() <NEW_LINE> <DEDENT> elif hasattr(stock, 'pfget'): <NEW_LINE> <INDENT> return stock.pfget(pfname) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("No pf function available")
Return a dict from a pf file
625941ba7c178a314d6ef2ff
def setLogThreshold(self, threshold): <NEW_LINE> <INDENT> Log.getDefaultLog().setThreshold(threshold)
set the importance threshold for the default log. This and setDebugVerbosity are different APIs that affect the same underlying limit that controls how many messages get logged. Normally one uses one of the predefined values--Log.DEBUG, Log.INFO, Log.WARN, and Log.FATAL--as input. @param threshold the minimum importance of the message that will get printed.
625941ba4a966d76dd550eb2
def test_one_of_each(self): <NEW_LINE> <INDENT> results = { MockPlugin(): (0, ['C'], ''), MockPlugin(): (0, {u'a.txt': u'F'}, ''), MockPlugin(): (0, {u'a.txt': [[1, None, u'L']]}, '')} <NEW_LINE> rc = ResultsCollater(results) <NEW_LINE> self.assertEqual({u'info': 3, u'warn': 0, u'stop': 0}, rc.counts) <NEW_LINE> cm, fm, lm = rc.messages <NEW_LINE> self.assertEqual(1, len(cm)) <NEW_LINE> self.assertEqual(1, len(fm)) <NEW_LINE> self.assertEqual(1, len(lm)) <NEW_LINE> self.assertEqual( Message(None, type="info", body="C"), cm[0]) <NEW_LINE> self.assertEqual( Message(None, type="info", body="F", file=u'a.txt'), fm[0]) <NEW_LINE> self.assertEqual( Message(None, type="info", body="L", file=u'a.txt', line=1), lm[0])
One of each type of message is captured.
625941ba0a366e3fb873e6bd
def __init__(self, connector, identity, redfish_version=None, registries=None, root=None): <NEW_LINE> <INDENT> super(SessionCollection, self).__init__( connector, identity, redfish_version=redfish_version, registries=registries, root=root)
A class representing a SessionCollection :param connector: A Connector instance :param identity: The identity of the Session resource :param redfish_version: The version of RedFish. Used to construct the object according to schema of given version. :param registries: Dict of Redfish Message Registry objects to be used in any resource that needs registries to parse messages :param root: Sushy root object. Empty for Sushy root itself.
625941ba67a9b606de4a7d62
def ffmpeg_resize(video,output,size): <NEW_LINE> <INDENT> cmd= [get_setting("FFMPEG_BINARY"), "-i", video, "-vf", "scale=%d:%d"%(res[0], res[1]), output] <NEW_LINE> subprocess_call(cmd)
resizes ``video`` to new size ``size`` and write the result in file ``output``.
625941ba6fece00bbac2d5e1
def test_related_field(self): <NEW_LINE> <INDENT> countries = self.env['res.country'].search([('code', '!=', False)], limit=100) <NEW_LINE> self.assertEqual(len(countries), 100, "Not enough records in comodel 'res.country'") <NEW_LINE> partners = self.env['res.partner'].create([ {'name': country.code, 'country_id': country.id} for country in countries ]) <NEW_LINE> query_count = self.cr.sql_log_count <NEW_LINE> self.env['ir.model.fields'].create({ 'model_id': self.env['ir.model']._get_id('res.partner'), 'name': 'x_oh_box', 'field_description': 'x_oh_box', 'ttype': 'char', }) <NEW_LINE> query_count = self.cr.sql_log_count - query_count <NEW_LINE> with self.assertQueryCount(query_count + 3): <NEW_LINE> <INDENT> self.env['ir.model.fields'].create({ 'model_id': self.env['ir.model']._get_id('res.partner'), 'name': 'x_oh_boy', 'field_description': 'x_oh_boy', 'ttype': 'char', 'related': 'country_id.code', 'store': True, }) <NEW_LINE> <DEDENT> for partner in partners: <NEW_LINE> <INDENT> self.assertEqual(partner.x_oh_boy, partner.country_id.code)
create a custom related field, and check filled values
625941bade87d2750b85fc34
def test_selection(self): <NEW_LINE> <INDENT> path = self.tmp_repos() <NEW_LINE> os.mkdir(path) <NEW_LINE> python_file = '001_initial_.py' <NEW_LINE> sqlite_upgrade_file = '001_sqlite_upgrade.sql' <NEW_LINE> default_upgrade_file = '001_default_upgrade.sql' <NEW_LINE> for file_ in [sqlite_upgrade_file, default_upgrade_file, python_file]: <NEW_LINE> <INDENT> filepath = '%s/%s' % (path, file_) <NEW_LINE> open(filepath, 'w').close() <NEW_LINE> <DEDENT> ver = Version(1, path, [sqlite_upgrade_file]) <NEW_LINE> self.assertEqual(os.path.basename(ver.script('sqlite', 'upgrade').path), sqlite_upgrade_file) <NEW_LINE> ver = Version(1, path, [default_upgrade_file]) <NEW_LINE> self.assertEqual(os.path.basename(ver.script('default', 'upgrade').path), default_upgrade_file) <NEW_LINE> ver = Version(1, path, [sqlite_upgrade_file, default_upgrade_file]) <NEW_LINE> self.assertEqual(os.path.basename(ver.script('sqlite', 'upgrade').path), sqlite_upgrade_file) <NEW_LINE> ver = Version(1, path, [sqlite_upgrade_file, default_upgrade_file, python_file]) <NEW_LINE> self.assertEqual(os.path.basename(ver.script('postgres', 'upgrade').path), default_upgrade_file) <NEW_LINE> ver = Version(1, path, [sqlite_upgrade_file, python_file]) <NEW_LINE> self.assertEqual(os.path.basename(ver.script('postgres', 'upgrade').path), python_file)
Verify right sql script is selected
625941bad8ef3951e32433e3
def bottom(self) -> None: <NEW_LINE> <INDENT> current_index = ALL_WINDOWS.index(self) <NEW_LINE> ALL_WINDOWS.pop(current_index) <NEW_LINE> ALL_WINDOWS.insert(0, self)
Sets this window to the bottom of the drawing buffer (In other words, it will be drawn under everything else).
625941baa8370b7717052746
def get_namespace(self, bundle): <NEW_LINE> <INDENT> return "{level}.project_data.%s" % (bundle.obj.get_space_replaced_name)
Hook to return the dotted path to this field based on the level and the name of the field The level name is formatted in the dehydrate method of the DataFormConfigResource
625941ba99cbb53fe6792a8d
def get_recipe_ids(): <NEW_LINE> <INDENT> return request_list('https://api.wynncraft.com/v2/recipe/list')
Gets a :class:`list` of :class:`str` objects containing all recipe IDs from the Wynncraft API. :returns: A list of all recipeIDs as :class:`str` :rtype: :class:`list`
625941ba656771135c3eb718
def vslice(self, value): <NEW_LINE> <INDENT> if isinstance(value, datetime.datetime): <NEW_LINE> <INDENT> value = date2num(value) <NEW_LINE> <DEDENT> ind = bisect.bisect_right(self['spectrogram']['xedges'], value) <NEW_LINE> ans = dm.SpaceData() <NEW_LINE> ans[self['spectrogram'].attrs['variables'][1]] = tb.bin_edges_to_center(self['spectrogram']['yedges']) <NEW_LINE> ans['yedges'] = self['spectrogram']['yedges'].copy() <NEW_LINE> ans['xedges'] = self['spectrogram']['xedges'][ind:ind+2].copy() <NEW_LINE> ans[self['spectrogram'].attrs['variables'][2]] = self['spectrogram']['spectrogram'][:,ind:ind+1] <NEW_LINE> return ans
slice a spectrogram at a given position along the x axis, maintains variable names from spectrogram Parameters ========== value : float or datetime.datetime the value to slice the spectrogram at Returns ======= out : datamodel.SpaceData spacedata containing the slice
625941baa8370b7717052747
def to_nx_graph(item, to_undirected=False): <NEW_LINE> <INDENT> if isinstance(item, MineList): <NEW_LINE> <INDENT> g = get_nx_graph(item) <NEW_LINE> if to_undirected: <NEW_LINE> <INDENT> g = g.to_undirected(reciprocal=False) <NEW_LINE> <DEDENT> return g <NEW_LINE> <DEDENT> elif isinstance(item, list): <NEW_LINE> <INDENT> return [to_nx_graph(ele, to_undirected=to_undirected) for ele in item] <NEW_LINE> <DEDENT> elif isinstance(item, dict): <NEW_LINE> <INDENT> return {key: to_nx_graph(value, to_undirected=to_undirected) for key, value in item.items()} <NEW_LINE> <DEDENT> elif isinstance(item, tuple): <NEW_LINE> <INDENT> return tuple(to_nx_graph(ele, to_undirected=to_undirected) for ele in item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise
Recursively transform an item or iterable into nx Graph.
625941bad4950a0f3b08c1f8
def get_root( self, **kwargs ): <NEW_LINE> <INDENT> kwargs['async_req'] = kwargs.get( 'async_req', False ) <NEW_LINE> kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) <NEW_LINE> kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) <NEW_LINE> kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) <NEW_LINE> kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) <NEW_LINE> kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) <NEW_LINE> kwargs['_host_index'] = kwargs.get('_host_index') <NEW_LINE> return self.get_root_endpoint.call_with_http_info(**kwargs)
Root resource # noqa: E501 Issue a `GET` request to the root resource to find all of the resource categories supported by the API # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_root(async_req=True) >>> result = thread.get() Keyword Args: _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: RootResponse If the method is called asynchronously, returns the request thread.
625941ba45492302aab5e166
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1EphemeralVolumeSource): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict()
Returns true if both objects are equal
625941ba4e4d5625662d4282
def DescribePublicConfigs(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = request._serialize() <NEW_LINE> body = self.call("DescribePublicConfigs", params) <NEW_LINE> response = json.loads(body) <NEW_LINE> if "Error" not in response["Response"]: <NEW_LINE> <INDENT> model = models.DescribePublicConfigsResponse() <NEW_LINE> model._deserialize(response["Response"]) <NEW_LINE> return model <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code = response["Response"]["Error"]["Code"] <NEW_LINE> message = response["Response"]["Error"]["Message"] <NEW_LINE> reqid = response["Response"]["RequestId"] <NEW_LINE> raise TencentCloudSDKException(code, message, reqid) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, TencentCloudSDKException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TencentCloudSDKException(e.message, e.message)
查询公共配置项列表 :param request: Request instance for DescribePublicConfigs. :type request: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigsRequest` :rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigsResponse`
625941ba94891a1f4081b94e
def __init__(self, user=None, password=None, confirm_password=None): <NEW_LINE> <INDENT> self._user = None <NEW_LINE> self._password = None <NEW_LINE> self._confirm_password = None <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> self.user = user <NEW_LINE> <DEDENT> if password is not None: <NEW_LINE> <INDENT> self.password = password <NEW_LINE> <DEDENT> if confirm_password is not None: <NEW_LINE> <INDENT> self.confirm_password = confirm_password
Body1 - a model defined in Swagger
625941ba30bbd722463cbc69
def set_thumbnail(self, pixmap): <NEW_LINE> <INDENT> pixmap = pixmap.scaledToHeight(self.thumb_size, QtCore.Qt.SmoothTransformation) <NEW_LINE> if pixmap.width() > self.thumb_size: <NEW_LINE> <INDENT> extra = pixmap.width() - self.thumb_size <NEW_LINE> pixmap = pixmap.copy(extra / 2, 0, self.thumb_size, self.thumb_size) <NEW_LINE> <DEDENT> self.ui.thumbnail.setPixmap(pixmap)
Set a thumbnail given the current pixmap.
625941ba711fe17d82542218
def read(self, source_path): <NEW_LINE> <INDENT> from cStringIO import StringIO <NEW_LINE> with pelican_open(source_path) as source: <NEW_LINE> <INDENT> text = StringIO(source.encode('utf8')) <NEW_LINE> <DEDENT> content = StringIO() <NEW_LINE> ad = AsciiDocAPI() <NEW_LINE> options = self.settings.get('ASCIIDOC_OPTIONS', []) <NEW_LINE> options = self.default_options + options <NEW_LINE> for o in options: <NEW_LINE> <INDENT> ad.options(*o.split()) <NEW_LINE> <DEDENT> backend = self.settings.get('ASCIIDOC_BACKEND', self.default_backend) <NEW_LINE> ad.execute(text, content, backend=backend) <NEW_LINE> content = content.getvalue().decode('utf8') <NEW_LINE> metadata = {} <NEW_LINE> for name, value in ad.asciidoc.document.attributes.items(): <NEW_LINE> <INDENT> name = name.lower() <NEW_LINE> metadata[name] = self.process_metadata(name, value) <NEW_LINE> <DEDENT> if 'doctitle' in metadata: <NEW_LINE> <INDENT> metadata['title'] = metadata['doctitle'] <NEW_LINE> <DEDENT> return content, metadata
Parse content and metadata of asciidoc files
625941ba63f4b57ef0000fc7
def _iter_built_with_prepended(installed, infos): <NEW_LINE> <INDENT> yield installed <NEW_LINE> versions_found = {installed.version} <NEW_LINE> for version, func in infos: <NEW_LINE> <INDENT> if version in versions_found: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> candidate = func() <NEW_LINE> if candidate is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> yield candidate <NEW_LINE> versions_found.add(version)
Iterator for ``FoundCandidates``. This iterator is used when the resolver prefers the already-installed candidate and NOT to upgrade. The installed candidate is therefore always yielded first, and candidates from index come later in their normal ordering, except skipped when the version is already installed.
625941ba2ae34c7f2600cfd8
def set(self, start_time, end_time, weekdays=None, instance_ids=None): <NEW_LINE> <INDENT> if start_time >= end_time: <NEW_LINE> <INDENT> raise ValueError( 'Start time can\'t be greater than or equal to end time' ) <NEW_LINE> <DEDENT> start_time = start_time.isoformat() <NEW_LINE> end_time = end_time.isoformat() <NEW_LINE> updated = '{0}-{1}'.format(start_time, end_time) <NEW_LINE> if weekdays: <NEW_LINE> <INDENT> weekdays = [self.weekdays.index(weekday) for weekday in weekdays] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weekdays = range(len(self.weekdays)) <NEW_LINE> <DEDENT> instances = self.conn.get_only_instances(instance_ids=instance_ids) <NEW_LINE> for instance in instances: <NEW_LINE> <INDENT> if instance.state == 'terminated': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> times = instance.tags.get('times') <NEW_LINE> if times is None: <NEW_LINE> <INDENT> times = ';'.join([str(None)] * 7) <NEW_LINE> tags = { 'times': times, 'tz': self.tz, } <NEW_LINE> try: <NEW_LINE> <INDENT> instance.add_tags(tags) <NEW_LINE> <DEDENT> except self.conn.ResponseError as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> times = times.split(';') <NEW_LINE> if self.iso: <NEW_LINE> <INDENT> times.insert(0, None) <NEW_LINE> <DEDENT> for weekday in weekdays: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> times[weekday] = updated <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> actual = len(times) <NEW_LINE> desired = weekday <NEW_LINE> while actual < desired: <NEW_LINE> <INDENT> times.append(None) <NEW_LINE> actual += 1 <NEW_LINE> <DEDENT> times.append(updated) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if len(times) < 7: <NEW_LINE> <INDENT> diff = 7 - len(times) <NEW_LINE> times.extend([None] * diff) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.iso: <NEW_LINE> <INDENT> times.pop(0) <NEW_LINE> <DEDENT> times = ';'.join([str(time) for time in times]) <NEW_LINE> tags = { 'times': times, 'tz': self.tz, } <NEW_LINE> try: <NEW_LINE> <INDENT> instance.add_tags(tags) <NEW_LINE> <DEDENT> except self.conn.ResponseError as e: <NEW_LINE> <INDENT> raise e
Create or update weekday run times for all or specific EC2 instances. :type instance_ids: list :param instance_ids: A list of strings of instance IDs :type weekdays: list :param weekdays: A list of strings of weekdays (e.g. `Monday`) :type start_time: datetime.time :param start_time: The instance starting time :type end_time: datetime.time :param end_time: The instance ending time
625941bad7e4931a7ee9ddc2
@contract(cadena='str', returns='int') <NEW_LINE> def contar_vocales(cadena: str) -> int: <NEW_LINE> <INDENT> return sum(1 for _ in filter(lambda x: x in ("a", "e", "i", "o", "u"), cadena))
9: Return number of vocales in a word. >>> contar_vocales("murcielago") 5
625941ba4c3428357757c1d1
def GetReverseOrdering(self): <NEW_LINE> <INDENT> return _itkLabelStatisticsKeepNObjectsImageFilterPython.itkLabelStatisticsKeepNObjectsImageFilterIUC2IUS2_GetReverseOrdering(self)
GetReverseOrdering(self) -> bool
625941bab5575c28eb68dea4
def Lderivs(self, E, n, prec, d): <NEW_LINE> <INDENT> if prec > 64: <NEW_LINE> <INDENT> raise ValueError("prec (=%s) must be at most 64" % prec) <NEW_LINE> <DEDENT> if prec < 1: <NEW_LINE> <INDENT> raise ValueError("prec (=%s) must be at least 1" % prec) <NEW_LINE> <DEDENT> v = self('-sp %sp%sd%s %s' % (n, prec, d, self._curve_str(E))) <NEW_LINE> return self._fix_err(v)
Return `0^{th}` to `d^{th}` derivatives of `L(\mathrm{Sym}^{(n)}(E,s)` to prec digits of precision, where `s` is the right edge if `n` is even and the center if `n` is odd. INPUT: - ``E`` - elliptic curve - ``n`` - integer (even or odd) - ``prec`` - integer - ``d`` - integer OUTPUT: a string, exactly as output by sympow .. note:: To use this function you may have to run a few commands like ``sympow('-new_data 1d2')``, each which takes a few minutes. If this function fails it will indicate what commands have to be run. EXAMPLES:: sage: print(sympow.Lderivs(EllipticCurve('11a'), 1, 16, 2)) # not tested ... 1n0: 2.538418608559107E-01 1w0: 2.538418608559108E-01 1n1: 1.032321840884568E-01 1w1: 1.059251499158892E-01 1n2: 3.238743180659171E-02 1w2: 3.414818600982502E-02
625941ba0fa83653e4656e63
def __init__(self, count, minimum, mean, median, maximum, variance, stdev, stdev2, stdev3): <NEW_LINE> <INDENT> self.__count = count <NEW_LINE> self.__minimum = minimum <NEW_LINE> self.__mean = mean <NEW_LINE> self.__median = median <NEW_LINE> self.__maximum = maximum <NEW_LINE> self.__variance = variance <NEW_LINE> self.__stdev = stdev <NEW_LINE> self.__stdev2 = stdev2 <NEW_LINE> self.__stdev3 = stdev3
Constructor
625941ba38b623060ff0ac95
def default_speedup(self): <NEW_LINE> <INDENT> return 100
Tracker seems to be race-free
625941bad8ef3951e32433e4
def create_master(self,name): <NEW_LINE> <INDENT> repository_master_path = os.path.join(self.base_path,name) <NEW_LINE> os.system("svnadmin create " + repository_master_path) <NEW_LINE> self.created_path.append(repository_master_path) <NEW_LINE> return repository_master_path
Create svn master repository. Return create master repository directory path.
625941bafb3f5b602dac3536
def deposit(self, amount): <NEW_LINE> <INDENT> assert isinstance(amount, money.Money) <NEW_LINE> self._balance += amount
Adds amount of money to the balance. Args: amount (float): amount to be added to balance Raises: AssertionError: If amount is not of the type "money"
625941ba4e4d5625662d4283
def texwithoutdagger(self): <NEW_LINE> <INDENT> show = string.join([self.type[0],"_{",repr(self.index),"}"], "") <NEW_LINE> return show
Returns a human-friendly string of the content
625941ba0a366e3fb873e6be
def t_OBJECT_CAST(t): <NEW_LINE> <INDENT> t.value = "".join(t.value.split()) <NEW_LINE> return t
\( [\ \t]* ([Oo][Bb][Jj][Ee][Cc][Tt]) [\ \t]* \)
625941ba30bbd722463cbc6a
def initialize_connection(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.connection = psycopg2.connect(user = Connection.USER, port = Connection.PORT, database = Connection.DATABASE) <NEW_LINE> self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.DictCursor) <NEW_LINE> <DEDENT> except (Exception, psycopg2.Error) as error: <NEW_LINE> <INDENT> print("error no connecting PostgreSQL", error)
method for initialyse the connection to the database
625941ba66656f66f7cbc051
def OnFilterLeftClick(self, event = None): <NEW_LINE> <INDENT> itm = self.list.HitTest(event.GetPosition())[0] <NEW_LINE> if event.ShiftDown() and itm>-1: <NEW_LINE> <INDENT> self.OnEnableFilter(itm) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event.Skip()
Actions triggered by left mouse button click on filter list. Parameters: event - wx.Event
625941bacad5886f8bd26e89
def QueryOpMode(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> reply_text = self.__SendCommand('OM?') <NEW_LINE> print('MFC is in ' + reply_text) <NEW_LINE> return True <NEW_LINE> <DEDENT> except Warning: <NEW_LINE> <INDENT> print('Unsuccessful communication with MFC ' + str(self.address)) <NEW_LINE> return False
Checks to see if the MFC is in run mode or in ______ mode. Parameters: None Returns: boolean value which indicates whether communication with MFC was successful
625941ba5f7d997b87174942
def get_long_name(self): <NEW_LINE> <INDENT> return self.jsettings["settings"]["long_name"]
Returns module long name.
625941ba3539df3088e2e1f2
def time_trace_plots(metadata, data, agent_ids): <NEW_LINE> <INDENT> dim = 2 <NEW_LINE> time_vec = np.arange(start=0, stop=metadata['max_time']+2*metadata['dt'], step=metadata['dt']) <NEW_LINE> for id_ in agent_ids: <NEW_LINE> <INDENT> a = data['agents'][id_] <NEW_LINE> est_data = a.local_filter.state_history <NEW_LINE> cov_data = a.local_filter.cov_history <NEW_LINE> truth_data = a.true_state <NEW_LINE> b = data['baseline'] <NEW_LINE> bl_est_data = b.state_history <NEW_LINE> bl_cov_data = b.cov_history <NEW_LINE> _, idx = a.get_location(a.agent_id) <NEW_LINE> bl_idx = np.arange(a.agent_id*a.num_states,a.agent_id*a.num_states+a.num_states) <NEW_LINE> est_data_vec = np.concatenate([np.array(x[idx]) for x in est_data],axis=1) <NEW_LINE> truth_data_vec = np.concatenate([np.expand_dims(x,axis=1) for x in truth_data],axis=1) <NEW_LINE> var_data_vec = np.concatenate([np.expand_dims(np.diag(x[np.ix_(idx,idx)]),axis=1) for x in cov_data],axis=1) <NEW_LINE> bl_est_data_vec = np.concatenate([np.expand_dims(np.array(x[bl_idx]),axis=1) for x in bl_est_data],axis=1) <NEW_LINE> bl_var_data_vec = np.concatenate([np.expand_dims(np.diag(x[np.ix_(bl_idx,bl_idx)]),axis=1) for x in bl_cov_data],axis=1) <NEW_LINE> plt.figure(id_) <NEW_LINE> plt.rc('text', usetex=True) <NEW_LINE> plt.rc('font',family='serif') <NEW_LINE> plt.grid() <NEW_LINE> plt.plot(time_vec,(est_data_vec[dim,:]-truth_data_vec[dim,:]),'r') <NEW_LINE> plt.plot(time_vec,2*np.sqrt(var_data_vec[dim,:]),'r--') <NEW_LINE> plt.plot(time_vec,-2*np.sqrt(var_data_vec[dim,:]),'r--') <NEW_LINE> plt.plot(time_vec,(bl_est_data_vec[dim,:]-truth_data_vec[dim,:]),'g') <NEW_LINE> plt.xlabel('Time [s]') <NEW_LINE> plt.ylabel('Est error [m]') <NEW_LINE> plt.title(r'Agent {} ownship $\xi$ est. err: $\delta={}$, $\tau_g={}$, msg drop={}'.format(id_+1,metadata['delta'],metadata['tau_goal'],metadata['msg_drop_prob'])) <NEW_LINE> plt.legend(['Est error',r'$\pm 2\sigma$','','BL est error',r'$\pm 2\sigma$','']) <NEW_LINE> <DEDENT> plt.show()
Creates time trace plots for provided agent ids Inputs: metadata -- sim run metadata data -- sim results data structure agent_ids -- list of agent ids to plot Outputs: plots -- matplotlib plot objects
625941bacdde0d52a9e52ed6
def do_gridsearch(self,useDropOut = False,multi=False): <NEW_LINE> <INDENT> seed = 7 <NEW_LINE> numpy.random.seed(seed) <NEW_LINE> model = KerasClassifier(build_fn=self.create_model, verbose=0) <NEW_LINE> learn_rate = [0.0001, 0.001, 0.1] <NEW_LINE> dropout_rate = [0.1,0.2, 0.3] <NEW_LINE> X = self.trainDF[var_list].values <NEW_LINE> Y = self.trainDF['isSignal'].values <NEW_LINE> w = self.class_weights <NEW_LINE> param_grid = dict(learn_rate=learn_rate, dropout_rate=dropout_rate) <NEW_LINE> grid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1) <NEW_LINE> self.grid_result = grid.fit(X, Y ) <NEW_LINE> if not os.path.exists(self.outdir): os.makedirs(self.outdir) <NEW_LINE> outFile = open(os.path.join(self.outdir, 'gridSearchResults.txt'), 'w') <NEW_LINE> outFile.write("Best: %f using %s \n\n" % (self.grid_result.best_score_, self.grid_result.best_params_)) <NEW_LINE> means = self.grid_result.cv_results_['mean_test_score'] <NEW_LINE> stds = self.grid_result.cv_results_['std_test_score'] <NEW_LINE> params = self.grid_result.cv_results_['params'] <NEW_LINE> for mean, stdev, param in zip(means, stds, params): <NEW_LINE> <INDENT> outFile.write("%f (%f) with: %r\n" % (mean, stdev, param)) <NEW_LINE> <DEDENT> outFile.close()
Implementation of the sklearn grid search for hyper parameter tuning, making use of kfolds cross validation. Pass a dictionary of lists of parameters to test on. Choose number of cores to run on with n_jobs, -1 is all of them
625941ba91f36d47f21ac396
@asyncio.coroutine <NEW_LINE> def async_trigger(hass, config, action): <NEW_LINE> <INDENT> entity_id = config.get(CONF_ENTITY_ID) <NEW_LINE> from_state = config.get(CONF_FROM, MATCH_ALL) <NEW_LINE> to_state = get_deprecated(config, CONF_TO, CONF_STATE, MATCH_ALL) <NEW_LINE> time_delta = config.get(CONF_FOR) <NEW_LINE> async_remove_state_for_cancel = None <NEW_LINE> async_remove_state_for_listener = None <NEW_LINE> match_all = (from_state == MATCH_ALL and to_state == MATCH_ALL) <NEW_LINE> @callback <NEW_LINE> def clear_listener(): <NEW_LINE> <INDENT> nonlocal async_remove_state_for_cancel, async_remove_state_for_listener <NEW_LINE> if async_remove_state_for_listener is not None: <NEW_LINE> <INDENT> async_remove_state_for_listener() <NEW_LINE> async_remove_state_for_listener = None <NEW_LINE> <DEDENT> if async_remove_state_for_cancel is not None: <NEW_LINE> <INDENT> async_remove_state_for_cancel() <NEW_LINE> async_remove_state_for_cancel = None <NEW_LINE> <DEDENT> <DEDENT> @callback <NEW_LINE> def state_automation_listener(entity, from_s, to_s): <NEW_LINE> <INDENT> nonlocal async_remove_state_for_cancel, async_remove_state_for_listener <NEW_LINE> def call_action(): <NEW_LINE> <INDENT> hass.async_run_job(action, { 'trigger': { 'platform': 'state', 'entity_id': entity, 'from_state': from_s, 'to_state': to_s, 'for': time_delta, } }) <NEW_LINE> <DEDENT> if (not match_all and from_s is not None and to_s is not None and from_s.last_changed == to_s.last_changed): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if time_delta is None: <NEW_LINE> <INDENT> call_action() <NEW_LINE> return <NEW_LINE> <DEDENT> @callback <NEW_LINE> def state_for_listener(now): <NEW_LINE> <INDENT> nonlocal async_remove_state_for_listener <NEW_LINE> async_remove_state_for_listener = None <NEW_LINE> clear_listener() <NEW_LINE> call_action() <NEW_LINE> <DEDENT> @callback <NEW_LINE> def state_for_cancel_listener(entity, inner_from_s, inner_to_s): <NEW_LINE> <INDENT> if inner_to_s.state == to_s.state: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> clear_listener() <NEW_LINE> <DEDENT> clear_listener() <NEW_LINE> async_remove_state_for_listener = async_track_point_in_utc_time( hass, state_for_listener, dt_util.utcnow() + time_delta) <NEW_LINE> async_remove_state_for_cancel = async_track_state_change( hass, entity, state_for_cancel_listener) <NEW_LINE> <DEDENT> unsub = async_track_state_change( hass, entity_id, state_automation_listener, from_state, to_state) <NEW_LINE> @callback <NEW_LINE> def async_remove(): <NEW_LINE> <INDENT> unsub() <NEW_LINE> clear_listener() <NEW_LINE> <DEDENT> return async_remove
Listen for state changes based on configuration.
625941ba1f037a2d8b9460a6
def set_control_register(ate_inst, value): <NEW_LINE> <INDENT> wb_addr = 0x00001000 + 0x403 <NEW_LINE> assert (ate_inst.write(wb_addr, value & 0x1))
:param ate_inst: :param value: :return:
625941ba99fddb7c1c9de23a
def getLocalAreaDensity(self): <NEW_LINE> <INDENT> return _algorithms.Inhibition2_getLocalAreaDensity(self)
getLocalAreaDensity(self) -> nta::algorithms::Inhibition2::value_type
625941ba7b25080760e39301
def serialize(self, buff): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x = self.laser_scan_topic <NEW_LINE> length = len(_x) <NEW_LINE> if python3 or type(_x) == unicode: <NEW_LINE> <INDENT> _x = _x.encode('utf-8') <NEW_LINE> length = len(_x) <NEW_LINE> <DEDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> _x = self.multi_echo_laser_scan_topic <NEW_LINE> length = len(_x) <NEW_LINE> if python3 or type(_x) == unicode: <NEW_LINE> <INDENT> _x = _x.encode('utf-8') <NEW_LINE> length = len(_x) <NEW_LINE> <DEDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> _x = self.point_cloud2_topic <NEW_LINE> length = len(_x) <NEW_LINE> if python3 or type(_x) == unicode: <NEW_LINE> <INDENT> _x = _x.encode('utf-8') <NEW_LINE> length = len(_x) <NEW_LINE> <DEDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> _x = self.imu_topic <NEW_LINE> length = len(_x) <NEW_LINE> if python3 or type(_x) == unicode: <NEW_LINE> <INDENT> _x = _x.encode('utf-8') <NEW_LINE> length = len(_x) <NEW_LINE> <DEDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> _x = self.odometry_topic <NEW_LINE> length = len(_x) <NEW_LINE> if python3 or type(_x) == unicode: <NEW_LINE> <INDENT> _x = _x.encode('utf-8') <NEW_LINE> length = len(_x) <NEW_LINE> <DEDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> <DEDENT> except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) <NEW_LINE> except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
serialize message into buffer :param buff: buffer, ``StringIO``
625941ba711fe17d82542219
def read_dataset(dataset, path, limit=0, concat=False, all_years_label=False): <NEW_LINE> <INDENT> _, trigram_vecs_data = make_dataset.read_trigram_vecs('./data/raw/{}_cluster/'.format(dataset)) <NEW_LINE> df = pd.read_csv(path) <NEW_LINE> if limit != 0: <NEW_LINE> <INDENT> df = df.head(limit) <NEW_LINE> <DEDENT> labels = df['y'].values <NEW_LINE> trigram_idx_strings = df.loc[:, df.columns != 'y'].values <NEW_LINE> parsed_trigram_idxs = [list(map(lambda x: ast.literal_eval(x), example)) for example in trigram_idx_strings] <NEW_LINE> trigram_vecs = np.array(build_features.map_idxs_to_vecs(parsed_trigram_idxs, trigram_vecs_data)) <NEW_LINE> if all_years_label: <NEW_LINE> <INDENT> trigram_idx = np.array(parsed_trigram_idxs)[:, :, 1] <NEW_LINE> pre_labels = (trigram_idx[:, 1:] != trigram_idx[:, :-1]).astype(int) <NEW_LINE> labels = np.concatenate((pre_labels, np.expand_dims(labels, axis=1)), axis=1) <NEW_LINE> print(labels.shape) <NEW_LINE> <DEDENT> if concat: <NEW_LINE> <INDENT> trigram_vecs = np.reshape(trigram_vecs, [len(df.columns) - 1, len(df.index), -1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> trigram_vecs = np.sum(trigram_vecs, axis=2) <NEW_LINE> trigram_vecs = np.moveaxis(trigram_vecs, 1, 0) <NEW_LINE> <DEDENT> return trigram_vecs, labels
Reads the data set from given path, expecting it to contain a 'y' column with the label and each year in its own column containing a number of trigram indexes. Limit sets the maximum number of examples to read, zero meaning no limit. If concat is true each of the trigrams in a year is concatenated, if false they are instead summed elementwise.
625941bafff4ab517eb2f2e1
def test_export_pdf_table_with_images_and_comments(self, fail=False): <NEW_LINE> <INDENT> self.user_login('test') <NEW_LINE> self.export_pdf_with_images_and_comments(fail=False, pdf_type="table") <NEW_LINE> self.export_pdf_token(pdf_type="table")
Tests exporting a workout as a pdf as the owner user with images and comments
625941ba29b78933be1e5560
def p_statement(p): <NEW_LINE> <INDENT> p[0] = p[1]
statement : affectation
625941ba851cf427c661a3b9
def network_status(self): <NEW_LINE> <INDENT> return self._transmit("network status")
dump network status
625941ba16aa5153ce36231f
def get_table_instance(self, p_table_name, p_schema='city4age_sr'): <NEW_LINE> <INDENT> return super(SRPostORM, self).get_table_instance(p_table_name, p_schema)
By giving a name of a table, this method returns the base instance :param p_table_name The name of the table :param p_schema The name of the given schema :return: A Base instance of the table to be computed
625941babe7bc26dc91cd4ac
def ddx(self): <NEW_LINE> <INDENT> N = self.owner.grid.num_points <NEW_LINE> g = 1/(2.0 * self.dr) <NEW_LINE> col_below = np.zeros(N) - g <NEW_LINE> col_above = np.zeros(N) + g <NEW_LINE> D = sparse.dia_matrix(([col_below, col_above], [-1, 1]), shape=(N, N)) <NEW_LINE> return D
Finite difference matrix for df/dx (centered) Returns ------- :class:`scipy.sparse.dia_matrix` Matrix which implements the centered finite difference approximation to df/dx
625941ba6fb2d068a760ef41
def _find_conflict_paths(self, end_clauses, relevant_clauses): <NEW_LINE> <INDENT> lit_to_clauses = defaultdict(set) <NEW_LINE> for c in relevant_clauses: <NEW_LINE> <INDENT> for lit in c: <NEW_LINE> <INDENT> lit_to_clauses[abs(lit)].add(c) <NEW_LINE> <DEDENT> <DEDENT> lit_to_clauses = dict(lit_to_clauses) <NEW_LINE> def get_neighbors(clause): <NEW_LINE> <INDENT> clause_sets = (lit_to_clauses[abs(lit)] for lit in clause) <NEW_LINE> return sorted(set.union(*clause_sets), key=lambda c: c.lits) <NEW_LINE> <DEDENT> if len(end_clauses) < 2: <NEW_LINE> <INDENT> return (end_clauses,) <NEW_LINE> <DEDENT> ends = OrderedDict.fromkeys(end_clauses) <NEW_LINE> ends = tuple(ends.keys()) <NEW_LINE> def jobs_in_path(path): <NEW_LINE> <INDENT> return len([clause for clause in path if clause.rule.reason.is_job]) <NEW_LINE> <DEDENT> def tails(seq): <NEW_LINE> <INDENT> for i in range(1, len(seq)): <NEW_LINE> <INDENT> yield seq[i:] <NEW_LINE> <DEDENT> <DEDENT> raw_paths = list(itertools.chain.from_iterable( breadth_first_search(start, get_neighbors, rest) for start, rest in zip(ends, tails(ends)))) <NEW_LINE> empty = (0, ()) <NEW_LINE> path_groups = itertools.groupby( sorted(raw_paths, key=jobs_in_path, reverse=True), key=jobs_in_path) <NEW_LINE> equal_job_paths = tuple(next(path_groups, empty)[1]) <NEW_LINE> seen_clauses = set() <NEW_LINE> paths = [] <NEW_LINE> for path in sorted(equal_job_paths, key=len): <NEW_LINE> <INDENT> p = set(path) <NEW_LINE> if p - seen_clauses: <NEW_LINE> <INDENT> seen_clauses.update(p) <NEW_LINE> paths.append(path) <NEW_LINE> <DEDENT> <DEDENT> return tuple(paths)
Return a tuple of paths representing conflicts between a set of clauses. See https://github.com/enthought/sat-solver/wiki/Unsatisfiability-Error-Messages for discussion about how best to implement this.
625941ba30dc7b7665901811
def random_distort(img, angle): <NEW_LINE> <INDENT> new_img = img.astype(float) <NEW_LINE> value = np.random.randint(-28, 28) <NEW_LINE> if value > 0: <NEW_LINE> <INDENT> mask = (new_img[:,:,0] + value) > 255 <NEW_LINE> <DEDENT> if value <= 0: <NEW_LINE> <INDENT> mask = (new_img[:,:,0] + value) < 0 <NEW_LINE> <DEDENT> new_img[:,:,0] += np.where(mask, 0, value) <NEW_LINE> h,w = new_img.shape[0:2] <NEW_LINE> mid = np.random.randint(0,w) <NEW_LINE> factor = np.random.uniform(0.6,0.8) <NEW_LINE> if np.random.rand() > .5: <NEW_LINE> <INDENT> new_img[:,0:mid,0] *= factor <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_img[:,mid:w,0] *= factor <NEW_LINE> <DEDENT> return (new_img.astype(np.uint8), angle)
method for adding random distortion to dataset images, including random brightness adjust and shadow
625941babe8e80087fb20aef
def add_datetime(orig_datetime, units='seconds=1'): <NEW_LINE> <INDENT> k = dict() <NEW_LINE> for u in units.split(","): <NEW_LINE> <INDENT> x = u.split('=') <NEW_LINE> p = x[0].lower() <NEW_LINE> k[p if p[-1] == 's' else p+'s'] = int(x[1]) <NEW_LINE> <DEDENT> return orig_datetime + relativedelta.relativedelta(**k)
Add delta to datetime :param orig_datetime: the base datetime/ date :param units: the units to be added, e.g. month=1,day=-2,hour=3,minute=-7 :return: added result
625941ba76d4e153a657e9d7
def testTransactionTemplateItem(self): <NEW_LINE> <INDENT> pass
Test TransactionTemplateItem
625941ba5fc7496912cc382d
def _check_weights(self): <NEW_LINE> <INDENT> for one_layer in self.model.layers: <NEW_LINE> <INDENT> print ('layer = ',one_layer) <NEW_LINE> print ('weights =', one_layer.get_weights()) <NEW_LINE> print ('weights shape = ', np.shape(one_layer.get_weights()))
Print the weights of layers. For debug purpose, not using anywhere.
625941ba23e79379d52ee40e
def test_template_cli_5(self): <NEW_LINE> <INDENT> with self.assertRaises(subprocess.CalledProcessError) as error: <NEW_LINE> <INDENT> self.run_test_on_template_fixture(number=5) <NEW_LINE> <DEDENT> self.assertIn( "NoInputOptionNotHandledByTemplateError", error.exception.stderr.decode() )
Run the test on the test template 5. - The initialize.py doesn't cater for no-input, raise exception.
625941ba7b25080760e39302
def agdispQaqc(model, csv_path): <NEW_LINE> <INDENT> pd_obj_inputs = pd.read_csv(csv_path, index_col=0, header=None, skiprows=1, skipfooter=46, engine='python') <NEW_LINE> pd_obj_inputs = pd_obj_inputs.drop(labels=pd_obj_inputs.columns[range(4)], axis=1) <NEW_LINE> pd_obj_inputs.index.name = None <NEW_LINE> pd_obj_inputs.columns = pd_obj_inputs.columns - 5 <NEW_LINE> pd_obj_exp_out = pd.read_csv(csv_path, index_col=0, header=None, skiprows=50, engine='python') <NEW_LINE> pd_obj_exp_out = pd_obj_exp_out.drop(labels=pd_obj_exp_out.columns[range(4)], axis=1) <NEW_LINE> pd_obj_exp_out.index.name = None <NEW_LINE> pd_obj_exp_out.columns = pd_obj_exp_out.columns - 5 <NEW_LINE> return pd_obj_inputs, pd_obj_exp_out
Read in QAQC CSV as Pandas DataFrame, removing any uneeded columns, setting the index_col name to None, and renumbering the data columns.
625941baaad79263cf3908e3
def rhobca_rate(params, state, dx, Sca): <NEW_LINE> <INDENT> N = params['N'] <NEW_LINE> Np = params['Np'] <NEW_LINE> rhob_ca = state['rhob_ca'] <NEW_LINE> v = state['v'] <NEW_LINE> drhobca_dt = np.zeros(N) <NEW_LINE> drhobca_dt[0] = -1 / dx[0] * (-rhob_ca[1] * v[1] + rhob_ca[0] * v[0]) + Sca[0] <NEW_LINE> drhobca_dt[1:Np] = -1 / dx[1:Np] * (-rhob_ca[2:Np + 1] * v[2:Np + 1] + rhob_ca[1:Np] * v[1:Np]) + Sca[1:Np] <NEW_LINE> return drhobca_dt
Calculate char accumulation rate.
625941bae1aae11d1e749b5c
def log_model_neptune( checkpoint_path: pathlib.Path, save_directory: pathlib.Path, name: str, neptune_logger, ): <NEW_LINE> <INDENT> checkpoint = torch.load(checkpoint_path) <NEW_LINE> model = checkpoint["hyper_parameters"]["model"] <NEW_LINE> torch.save(model.state_dict(), save_directory / name) <NEW_LINE> neptune_logger.experiment.set_property("checkpoint_name", checkpoint_path.name) <NEW_LINE> neptune_logger.experiment.log_artifact(str(save_directory / name)) <NEW_LINE> if os.path.isfile(save_directory / name): <NEW_LINE> <INDENT> os.remove(save_directory / name)
Saves the model to disk, uploads it to neptune and removes it again.
625941badc8b845886cb53dc
def test_stdout(test, capfd): <NEW_LINE> <INDENT> logging.disable(logging.CRITICAL) <NEW_LINE> try: <NEW_LINE> <INDENT> process = sh.sh("-c", "echo test1; echo test2 >&2; echo тест3; echo тест4 >&2;", _stdout=STDOUT) <NEW_LINE> process.execute() <NEW_LINE> assert process.stdout() == "" <NEW_LINE> assert process.stderr() == "test2\nтест4\n" <NEW_LINE> stdout, stderr = capfd.readouterr() <NEW_LINE> assert stdout == "test1\nтест3\n" <NEW_LINE> assert stderr == "" <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> logging.disable(logging.NOTSET)
Tests output to stdout.
625941ba50485f2cf553cc40
def test_convert_bad_remote_pnglist(self): <NEW_LINE> <INDENT> self.assertAllTargetFormatsRaise(Exception, [ 'http://localhost:62010/www/icon16x16.png', 'http://localhost:62010/www/foo.png', 'http://localhost:62010/www/icon32x32.png' ])
Test conversion from bad remote source.
625941ba1d351010ab8559c4
def alter_list_data_to_serialize(self, request, data): <NEW_LINE> <INDENT> shape_type = request.GET.get('shape_type', 'simple') <NEW_LINE> for obj in data['objects']: <NEW_LINE> <INDENT> if shape_type != 'simple': <NEW_LINE> <INDENT> del obj.data['simple_shape'] <NEW_LINE> <DEDENT> if shape_type != 'full': <NEW_LINE> <INDENT> del obj.data['shape'] <NEW_LINE> <DEDENT> <DEDENT> return data
Allow the selection of simple, full or no shapes using a query parameter.
625941ba0c0af96317bb8090
def _check_for_peers(self) -> None: <NEW_LINE> <INDENT> for peer in list(self.peers.values()): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> their_peers = self._recoverable_get( peer, '--node', 'internal', 'peers', '--edit', default={}) <NEW_LINE> <DEDENT> except exceptions.FailedQuery: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if self.info['identity'] in their_peers: <NEW_LINE> <INDENT> del their_peers[self.info['identity']] <NEW_LINE> <DEDENT> for their_peer in their_peers: <NEW_LINE> <INDENT> if their_peer not in self.peers: <NEW_LINE> <INDENT> host = their_peers[their_peer]['host'] <NEW_LINE> port = int(their_peers[their_peer]['port']) <NEW_LINE> self.peers_to_join.add((host, port,))
none -> none check our peers' peer lists to see if they know anyone we don't, also works as a heartbeat to our connected peers
625941ba3c8af77a43ae3645
def parse_xml(self, xml_doc): <NEW_LINE> <INDENT> self.__reset_dom__() <NEW_LINE> try: <NEW_LINE> <INDENT> self.dom = xml.dom.minidom.parseString(xml_doc) <NEW_LINE> self.dom.normalize() <NEW_LINE> <DEDENT> except xml.parsers.expat.ExpatError as msg: <NEW_LINE> <INDENT> self.errors.append((2001, None, _T('Invalid XML document. ExpatError: %s' % msg))) <NEW_LINE> <DEDENT> except LookupError as msg: <NEW_LINE> <INDENT> self.errors.append((2001, None, _T('Document has wrong encoding. LookupError: %s' % msg)))
Parse XML into DOM object.
625941ba4c3428357757c1d2
def test_got_buckets_return(self): <NEW_LINE> <INDENT> self.kz_partitioner.acquired = True <NEW_LINE> self.kz_partitioner.__iter__.return_value = [2, 3] <NEW_LINE> self.buckets_got = None <NEW_LINE> d = Deferred() <NEW_LINE> def got_buckets(_buckets): <NEW_LINE> <INDENT> self.buckets_got = _buckets <NEW_LINE> return d <NEW_LINE> <DEDENT> partitioner = Partitioner( self.kz_client, 10, self.path, self.buckets, self.time_boundary, self.log, got_buckets, clock=self.clock) <NEW_LINE> partitioner.startService() <NEW_LINE> self.log.msg.assert_called_once_with( 'Got buckets {buckets}', buckets=[2, 3], old_buckets=[], path=self.path, otter_msg_type='partition-acquired') <NEW_LINE> self.assertEqual(self.buckets_got, [2, 3]) <NEW_LINE> self.clock.advance(10) <NEW_LINE> sd = partitioner.stopService() <NEW_LINE> self.assertNoResult(sd) <NEW_LINE> d.callback(None) <NEW_LINE> self.successResultOf(sd)
`got_buckets` return value is propogated to timerservice that ensures that the service stops after returned deferred is fired
625941ba97e22403b379ce40
def remoteCompletion(self, text): <NEW_LINE> <INDENT> self.__sendCommand("%s%s\n" % (RequestCompletion, text))
Public slot to get the a list of possible commandline completions from the remote client. @param text the text to be completed (string or QString)
625941ba07f4c71912b1132f
def rmtree(directory: str, ignore_errors=False): <NEW_LINE> <INDENT> LOG.debug("Removing directory tree %s", directory) <NEW_LINE> shutil.rmtree( directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly )
Remove directory and contents.
625941ba507cdc57c6306b7c
def get_skyblock_news(self): <NEW_LINE> <INDENT> response = general.do_request(BASE_URL, 'skyblock/news', {'key': self.api_key}).json() <NEW_LINE> return response['items']
Returns a dict containing skyblock news with a title, description and thread
625941bae5267d203edcdb48
def update_file(self, name, message, content): <NEW_LINE> <INDENT> self.sha = self.get_contents(name)['sha'] <NEW_LINE> self.payload = json.dumps({'message': message, 'content': base64.b64encode(content), 'sha': self.sha}) <NEW_LINE> r = self.session.put(self.url + name, data=self.payload) <NEW_LINE> if r.status_code == 200: <NEW_LINE> <INDENT> return r.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r.raise_for_status()
Updates a file in Github repo Args: name: Name of the file to be updated message: Commit message content: Content of commit
625941ba3539df3088e2e1f3
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'todoRest.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941baa05bb46b383ec6cc
def get_disks_in_pci_address(pci_address): <NEW_LINE> <INDENT> disks_path = "/dev/disk/by-path/" <NEW_LINE> disk_list = [] <NEW_LINE> for dev in os.listdir(disks_path): <NEW_LINE> <INDENT> if pci_address in dev: <NEW_LINE> <INDENT> link = os.readlink(os.path.join(disks_path, dev)) <NEW_LINE> disk_list.append(os.path.abspath(os.path.join(disks_path, link))) <NEW_LINE> <DEDENT> <DEDENT> return disk_list
Gets disks in a PCI address. :param pci_address: Any segment of a PCI address (1f, 0000:00:1f, ...) :return: list of disks in a PCI address.
625941ba8e71fb1e9831d655
def skip_forward(x, n_out_channels): <NEW_LINE> <INDENT> N, n_in_channels, H, W = x.shape <NEW_LINE> assert (n_in_channels == n_out_channels) or ( n_out_channels == n_in_channels*2), 'Invalid n_out_channels' <NEW_LINE> skip = np.array(x, copy=True) <NEW_LINE> pool_cache, downsampled, skip_p = None, False, 0 <NEW_LINE> if n_out_channels > n_in_channels: <NEW_LINE> <INDENT> pool_param = {'pool_width': 2, 'pool_height': 2, 'stride': 2} <NEW_LINE> skip, pool_cache = avg_pool_forward(skip, pool_param) <NEW_LINE> p = skip_p = (n_in_channels)/2 <NEW_LINE> skip = np.pad(skip, ((0, 0), (p, p), (0, 0), (0, 0)), mode='constant') <NEW_LINE> downsampled = True <NEW_LINE> <DEDENT> return skip, (pool_cache, downsampled, skip_p)
Computes the forward pass for a skip connection. The input x has shape (N, d_1, d_2, d_3) where x[i] is the ith input. If n_out_channels is equal to 2* d_1, downsampling and padding are applied else, the input is replicated in output Inputs: x - Input data, of shape (N, d_1, d_2, d_3) n_out_channels - Number of channels in output Returns a tuple of: - skip: output, of shape (N, n_out_channels, d_2/2, d_3/2) - cache: (pool_cache, downsampled, skip_p)
625941bae64d504609d746e8
def test_makeAMZDate(self): <NEW_LINE> <INDENT> instant = datetime.datetime(2016, 11, 11, 2, 45, 50) <NEW_LINE> self.assertEqual(makeAMZDate(instant), "20161111T024550Z")
A L{datetime.datetime} instance is formatted according to the convention for AMZ dates.
625941ba5e10d32532c5edd6
def _find_lineage_for_domains(config, domains): <NEW_LINE> <INDENT> if config.duplicate: <NEW_LINE> <INDENT> return "newcert", None <NEW_LINE> <DEDENT> ident_names_cert, subset_names_cert = cert_manager.find_duplicative_certs(config, domains) <NEW_LINE> if ident_names_cert is None and subset_names_cert is None: <NEW_LINE> <INDENT> return "newcert", None <NEW_LINE> <DEDENT> if ident_names_cert is not None: <NEW_LINE> <INDENT> return _handle_identical_cert_request(config, ident_names_cert) <NEW_LINE> <DEDENT> elif subset_names_cert is not None: <NEW_LINE> <INDENT> return _handle_subset_cert_request(config, domains, subset_names_cert)
Determine whether there are duplicated names and how to handle them (renew, reinstall, newcert, or raising an error to stop the client run if the user chooses to cancel the operation when prompted). :returns: Two-element tuple containing desired new-certificate behavior as a string token ("reinstall", "renew", or "newcert"), plus either a RenewableCert instance or None if renewal shouldn't occur. :raises .Error: If the user would like to rerun the client again.
625941ba4a966d76dd550eb5
def subSplats( self, res=None, req=None ): <NEW_LINE> <INDENT> if req is not None: <NEW_LINE> <INDENT> source = self.request_dict <NEW_LINE> dikt = req <NEW_LINE> <DEDENT> elif res is not None: <NEW_LINE> <INDENT> source = self.response_dict <NEW_LINE> dikt = res <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "RequestTranslator.subSplats(): must get one of res or req!" ) <NEW_LINE> <DEDENT> for key, val in source.items(): <NEW_LINE> <INDENT> if key not in dikt.keys(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for innerK, innerV in val.items(): <NEW_LINE> <INDENT> if innerK == '*': <NEW_LINE> <INDENT> for k, v in innerV.items(): <NEW_LINE> <INDENT> if v is None: <NEW_LINE> <INDENT> if k in dikt.keys(): <NEW_LINE> <INDENT> del dikt[ k ] <NEW_LINE> <DEDENT> <DEDENT> elif v == '*': <NEW_LINE> <INDENT> dikt[ k ] = dikt[ key ] <NEW_LINE> <DEDENT> elif type( v ) == types.FunctionType: <NEW_LINE> <INDENT> if k in dikt.keys(): <NEW_LINE> <INDENT> dikt[ k ] = v( dikt[ k ] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dikt[ k ] = v() <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> res[ k ] = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> dikt[ k ] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return dikt
Asterisk (the Splat) matches all values. A translation dictionary entry of the form: 'field': { '*': { 'other': 'value' || None }} or even 'field': { '*': { 'other': '*' }} is handled in this function. The second mechanism provides a way to switch original vals to a new key. E.g. the subdict: 'field1': { '*': { 'otherfield': '*' } } results in the following switch: request[ otherfield ] = request[ field1 ]
625941baa8370b7717052749
def print_board(player_board, opponent_board, hitmiss, score_dic, fleet_list): <NEW_LINE> <INDENT> alphabet = ["A","B","C","D","E","F","G","H","I","J"] <NEW_LINE> if sys.platform.startswith('linux') or sys.platform.startswith('darwin'): <NEW_LINE> <INDENT> os.system('clear') <NEW_LINE> <DEDENT> elif sys.plaform.startswith('win'): <NEW_LINE> <INDENT> os.system('clr') <NEW_LINE> <DEDENT> print("Tracking Board") <NEW_LINE> for i in range(11): <NEW_LINE> <INDENT> line="" <NEW_LINE> for j in range(11): <NEW_LINE> <INDENT> if (i==0) and (j!=0): <NEW_LINE> <INDENT> line+=" "+alphabet[j-1]+" " <NEW_LINE> <DEDENT> elif (j==0) and (i!=0): <NEW_LINE> <INDENT> line+="{:>2} |".format(i) <NEW_LINE> <DEDENT> elif (i==0) and (j==0): <NEW_LINE> <INDENT> line+=" " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if opponent_board[i-1][j-1] not in ["*", "#"]: <NEW_LINE> <INDENT> line+=" |" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> line+=" "+opponent_board[i-1][j-1]+" |" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print(line) <NEW_LINE> print(" -----------------------------------------") <NEW_LINE> <DEDENT> print("\nYour Board:") <NEW_LINE> for i in range(11): <NEW_LINE> <INDENT> line="" <NEW_LINE> for j in range(11): <NEW_LINE> <INDENT> if (i==0) and (j!=0): <NEW_LINE> <INDENT> line+=" "+alphabet[j-1]+" " <NEW_LINE> <DEDENT> elif (j==0) and (i!=0): <NEW_LINE> <INDENT> line+="{:>2} |".format(i) <NEW_LINE> <DEDENT> elif (i==0) and (j==0): <NEW_LINE> <INDENT> line+=" " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> line+=" "+player_board[i-1][j-1]+" |" <NEW_LINE> <DEDENT> <DEDENT> print(line) <NEW_LINE> print(" -----------------------------------------") <NEW_LINE> <DEDENT> if hitmiss == "MISS": <NEW_LINE> <INDENT> print(hitmiss) <NEW_LINE> <DEDENT> elif hitmiss in ["A","B","S","D","M"]: <NEW_LINE> <INDENT> print("HIT") <NEW_LINE> fleet_list = sunk_ships(score_dic,fleet_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("")
prints the boards to the screen including scores and sunk ships
625941babe7bc26dc91cd4ad
def _get_index_name_by_column(table, column_name): <NEW_LINE> <INDENT> for _, index_metadata in six.iteritems(table.indexes): <NEW_LINE> <INDENT> options = dict(index_metadata.index_options) <NEW_LINE> if 'target' in options and options['target'] == column_name: <NEW_LINE> <INDENT> return index_metadata.name <NEW_LINE> <DEDENT> <DEDENT> return None
Find the index name for a given table and column.
625941bac4546d3d9de728d9
def execute(self, sql, args): <NEW_LINE> <INDENT> self.__cur.execute(sql, args) <NEW_LINE> return self.__cur
The database cursor.
625941ba498bea3a759b9959
def upload(self, filename): <NEW_LINE> <INDENT> response = None <NEW_LINE> try: <NEW_LINE> <INDENT> with open(filename, 'rb') as file: <NEW_LINE> <INDENT> body = file.read() <NEW_LINE> <DEDENT> url = urlparse(self._url) <NEW_LINE> response = post_multipart(url, fields=None, files=[('myfile', path.basename(filename), body)], certificate=self._certificate, timeout=self._timeout) <NEW_LINE> <DEDENT> except HTTPException as e: <NEW_LINE> <INDENT> os.remove(file.name) <NEW_LINE> logger.error('Impossibile effettuare l\'invio del file delle misure. Errore: %s' % e) <NEW_LINE> <DEDENT> except SSLError as e: <NEW_LINE> <INDENT> os.remove(file.name) <NEW_LINE> logger.error('Errore SSL durante l\'invio del file delle misure: %s' % e) <NEW_LINE> <DEDENT> return response
Effettua l'upload del file. Restituisce la risposta ricevuta dal repository o None se c'è stato un problema.
625941ba187af65679ca4fc6
def _enqueue(self, queue_name, next_queue, plugin, command, server, channel, nick, message): <NEW_LINE> <INDENT> user = nick.split('!')[0] <NEW_LINE> if not user in self._command_queues[queue_name]: <NEW_LINE> <INDENT> self._command_queues[queue_name][user] = [] <NEW_LINE> <DEDENT> self._command_queues[queue_name][user].append( (next_queue, plugin, command, server, channel, nick, ' '.join(message)))
Adds a command to the appropriate queue for later processing.
625941ba96565a6dacc8f57d
def _allMetrics(self): <NEW_LINE> <INDENT> return [metric for an in self.ps.analyses for metric in an.metrics]
Returns all metrics except for ones in transient analysis
625941ba566aa707497f4421
def save_training_result(self, im_name, im, dir=False, epoch=0): <NEW_LINE> <INDENT> if dir: <NEW_LINE> <INDENT> save_path = os.path.join(self.sample, str(epoch)) <NEW_LINE> if not os.path.exists(save_path): <NEW_LINE> <INDENT> os.mkdir(save_path) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> save_path = self.sample <NEW_LINE> <DEDENT> if isinstance(im, Variable): <NEW_LINE> <INDENT> im = im.cpu() if im.is_cuda else im <NEW_LINE> im = VAR2PIL(torch.clamp(im, min=0.0, max=1.0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> im = to_pil_image(torch.clamp(im, min=0.0, max=1.0)) <NEW_LINE> <DEDENT> im.save(os.path.join(save_path, im_name))
save test result during training procedure, [tensor, Variable] :param im_name: :param im: :param dir: /sample/epoch/i.png :param epoch: :return:
625941bad53ae8145f87a11e