code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def load_module(self, chain): <NEW_LINE> <INDENT> module_names = chain.split('.') <NEW_LINE> if module_names[0] in MODULES: <NEW_LINE> <INDENT> base_module = MODULES[module_names[0]] <NEW_LINE> for module in module_names: <NEW_LINE> <INDENT> if (isinstance(base_module, ModuleType) and module in base_module.submodules): <NEW_LINE> <INDENT> base_module = base_module.submodules[module] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.report_issue("Submodule not found", {"name": chain}) <NEW_LINE> <DEDENT> <DEDENT> return base_module <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.report_issue("Module not found", {"name": chain}) <NEW_LINE> return ModuleType()
Finds the module in the set of available modules. Args: chain (str): A chain of module imports (e.g., "matplotlib.pyplot") Returns: ModuleType: The specific module with its members, or an empty module type.
625941b9c432627299f04abb
def make_move_on_list(numbers): <NEW_LINE> <INDENT> assert len(numbers) == 4 <NEW_LINE> numbers.sort(key=bool, reverse=True) <NEW_LINE> newLst = [] <NEW_LINE> i = 0 <NEW_LINE> while i < 4: <NEW_LINE> <INDENT> newLst.append(numbers[i]) <NEW_LINE> if not numbers[i] or i == 3: <NEW_LINE> <INDENT> break; <NEW_LINE> <DEDENT> if numbers[i] == numbers[i+1]: <NEW_LINE> <INDENT> newLst[-1] += numbers[i] <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> newLst += [0]*(4-len(newLst)) <NEW_LINE> return newLst
Make a move given a list of 4 numbers using the rules of the 2048 game. Argument: numbers -- a list of 4 numbers Return value: the list after moving the numbers to the left.
625941b930dc7b76659017e1
def update_site_forward(apps, schema_editor): <NEW_LINE> <INDENT> Site = apps.get_model('sites', 'Site') <NEW_LINE> Site.objects.update_or_create( id=settings.SITE_ID, defaults={ 'domain': '127.0.0.1:8000', 'name': 'sozluk' } )
Set site domain and name.
625941b95510c4643540f26e
def OnTouchLeave(self,*args): <NEW_LINE> <INDENT> pass
OnTouchLeave(self: ContentElement,e: TouchEventArgs) Provides class handling for the System.Windows.ContentElement.TouchLeave routed event that occurs when a touch moves from inside to outside the bounds of this element. e: A System.Windows.Input.TouchEventArgs that contains the event data.
625941b929b78933be1e5530
def s_bigword(x): <NEW_LINE> <INDENT> return s_long(x, endian=BIG_ENDIAN)
Big word.
625941b9e1aae11d1e749b2b
def roles_from_payload( self, payload: discord.RawReactionActionEvent ) -> List[discord.Role]: <NEW_LINE> <INDENT> cursor = self.bot.database.execute( "SELECT data FROM rolekiosk_entries WHERE oid=:oid", { "oid": self.bot.objects.by_data( m=[payload.channel_id, payload.message_id] ) }, ) <NEW_LINE> result = cursor.fetchone() <NEW_LINE> if not result: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> data = json.loads(result["data"]) <NEW_LINE> return [ self.bot.get_guild(payload.guild_id).get_role(number) for (emoji, number) in data if emoji in (payload.emoji.name, payload.emoji.id) ]
Turn a reaction payload into a list of roles to apply or take away.
625941b9d99f1b3c44c6740e
def read_pressure(self): <NEW_LINE> <INDENT> UT = self.read_raw_temp() <NEW_LINE> UP = self.read_raw_pressure() <NEW_LINE> X1 = ((UT - self.cal_AC6) * self.cal_AC5) >> 15 <NEW_LINE> X2 = (self.cal_MC << 11) / (X1 + self.cal_MD) <NEW_LINE> B5 = X1 + X2 <NEW_LINE> B6 = B5 - 4000 <NEW_LINE> X1 = (self.cal_B2 * (B6 * B6) >> 12) >> 11 <NEW_LINE> X2 = (self.cal_AC2 * B6) >> 11 <NEW_LINE> X3 = X1 + X2 <NEW_LINE> B3 = (((self.cal_AC1 * 4 + X3) << self._mode) + 2) / 4 <NEW_LINE> X1 = (self.cal_AC3 * B6) >> 13 <NEW_LINE> X2 = (self.cal_B1 * ((B6 * B6) >> 12)) >> 16 <NEW_LINE> X3 = ((X1 + X2) + 2) >> 2 <NEW_LINE> B4 = (self.cal_AC4 * (X3 + 32768)) >> 15 <NEW_LINE> B7 = (UP - B3) * (50000 >> self._mode) <NEW_LINE> if B7 < 0x80000000: <NEW_LINE> <INDENT> p = (B7 * 2) / B4 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> p = (B7 / B4) * 2 <NEW_LINE> <DEDENT> X1 = (p >> 8) * (p >> 8) <NEW_LINE> X1 = (X1 * 3038) >> 16 <NEW_LINE> X2 = (-7357 * p) >> 16 <NEW_LINE> p = p + ((X1 + X2 + 3791) >> 4) <NEW_LINE> return p
Gets the compensated pressure in Pascals.
625941b9baa26c4b54cb0f9a
def ldap_connect(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn = ldap.initialize(settings.LDAP_SERVER) <NEW_LINE> conn.simple_bind_s(settings.LDAP_AUTH_DN, settings.LDAP_PASS) <NEW_LINE> print("\nConnected to LDAP server {server}\n".format(server=settings.LDAP_SERVER)) <NEW_LINE> return conn <NEW_LINE> <DEDENT> except ldap.SERVER_DOWN: <NEW_LINE> <INDENT> print("Connection to LDAP server failed") <NEW_LINE> return None
Returns an LDAP connection object, to be used by various search functions.
625941b90c0af96317bb8060
def testGetVotersToDial(self): <NEW_LINE> <INDENT> voters1 = self.campaign1.getVotersToDial() <NEW_LINE> voters2 = self.campaign2.getVotersToDial() <NEW_LINE> self.assertEqual(voters2.count(), 1) <NEW_LINE> self.assertFalse(voters2.filter(pk__in=[3,4,5,6]).exists()) <NEW_LINE> self.assertTrue(voters1.filter(pk=7).exists()) <NEW_LINE> self.assertFalse(voters1.filter(pk=8).exists()) <NEW_LINE> self.assertEqual(voters1.count(), 3)
Test the Campaign model's getVotersToDial method, which should filter the query set returned by the method getVotersToContact voters without valid phone contact information. Phone contact information is invalid if both phone_numberX fields are blank or if no available phone number has less than two reports of being invalid.
625941b92c8b7c6e89b3563a
def __init__(self, dbconn): <NEW_LINE> <INDENT> self.dbconn = dbconn <NEW_LINE> self.db = None
Initialize the database :param dbconn: a DbConnection object
625941b910dbd63aa1bd2a26
def fitness(self, outputs): <NEW_LINE> <INDENT> names = outputs.keys() <NEW_LINE> names.sort() <NEW_LINE> values = [(outputs[k], self.targets[k][0]) for k in names] <NEW_LINE> weights = np.array([self.targets[k][1] for k in names]) <NEW_LINE> norm_error = lambda v: ((max(v)-min(v))/float(max(v)))**2 <NEW_LINE> errors = np.array([norm_error(v) for v in values]) <NEW_LINE> E = np.dot(weights, errors) / (weights.sum() + errors.shape[0]) <NEW_LINE> return 1 / E
Gets scalar objective fitness value for a set of output criteria
625941b9a17c0f6771cbdecb
def _predict(self, X): <NEW_LINE> <INDENT> self._find_neighbours(X) <NEW_LINE> return [max_occurence(row) for row in self._neighbours]
Prediction using a majority classifier :return:
625941b9435de62698dfdacb
def is_same_entry(entry_1, entry_2): <NEW_LINE> <INDENT> if entry_1 == entry_2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if os.path.realpath(entry_1) == os.path.realpath(entry_2): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if (os.path.basename(entry_1) == os.path.basename(entry_2) and (os.path.basename(os.path.dirname(entry_1)) == os.path.basename(os.path.dirname(entry_2))) and os.path.basename(os.path.dirname(entry_1)).startswith('tmp')): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Return True iff both paths can be considered to point to the same module. This is the case if and only if at least one of these conditions holds: - They are equal. - Their real paths are equal. - They share the same temporary work directory and module file name.
625941b921bff66bcd6847cc
def tcl_set_generic(self, name, value): <NEW_LINE> <INDENT> return 'set_parameter -name ' + str(name) + ' ' + str(value) + '\n'
Return the Quartus API TCL command string: set_parameter -name *<name>* *<value>*
625941b9b57a9660fec336f8
def extract_absolute_tempo_envelope(self, start_beat: float = 0, step_size: float = 0.1, tolerance: float = 0.005) -> TempoEnvelope: <NEW_LINE> <INDENT> if self.is_master(): <NEW_LINE> <INDENT> return self.tempo_history.as_tempo_envelope() <NEW_LINE> <DEDENT> clocks = self.inheritance() <NEW_LINE> tempo_histories = [deepcopy(clock.tempo_history) for clock in clocks] <NEW_LINE> tempo_histories[0].go_to_beat(start_beat) <NEW_LINE> initial_rate = tempo_histories[0].rate <NEW_LINE> for i in range(1, len(tempo_histories)): <NEW_LINE> <INDENT> tempo_histories[i].go_to_beat(clocks[i-1].parent_offset + tempo_histories[i-1].time()) <NEW_LINE> initial_rate *= tempo_histories[i].rate <NEW_LINE> <DEDENT> def step_and_get_beat_length(step): <NEW_LINE> <INDENT> beat_change = step <NEW_LINE> for tempo_history in tempo_histories: <NEW_LINE> <INDENT> _, beat_change = tempo_history.advance(beat_change) <NEW_LINE> <DEDENT> return beat_change / step <NEW_LINE> <DEDENT> output_curve = TempoEnvelope(initial_rate, units="rate") <NEW_LINE> while any(tempo_envelope.beat() < tempo_envelope.length() for tempo_envelope in tempo_histories): <NEW_LINE> <INDENT> start_level = output_curve.end_level() <NEW_LINE> halfway_level = step_and_get_beat_length(step_size / 2) <NEW_LINE> end_level = step_and_get_beat_length(step_size / 2) <NEW_LINE> if min(start_level, end_level) < halfway_level < max(start_level, end_level): <NEW_LINE> <INDENT> output_curve.append_segment(end_level, step_size, tolerance=tolerance, halfway_level=halfway_level) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_curve.append_segment(halfway_level, step_size / 2, tolerance=tolerance) <NEW_LINE> output_curve.append_segment(end_level, step_size / 2, tolerance=tolerance) <NEW_LINE> <DEDENT> <DEDENT> return output_curve
Extracts this clock's absolute TempoHistory (as opposed to the TempoHistory relative to parent clock). Used when creating a score from this clock's point of view. :param start_beat: where on the TempoHistory to start :param step_size: granularity :param tolerance: error tolerance with which we allow a step to simply extend the previous segment rather than create a new one. :return: A TempoHistory representing the true variation of tempo on this clock, as filtered through the changing rates of its parents.
625941b95e10d32532c5eda6
def post(self): <NEW_LINE> <INDENT> if request.data["username"].strip() and request.data["password"].strip(): <NEW_LINE> <INDENT> user = User.query.filter_by( username=request.data["username"]).first() <NEW_LINE> if user: <NEW_LINE> <INDENT> res = { "message": "User already exists. Please login" } <NEW_LINE> return make_response(jsonify(res)), 409 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = request.data <NEW_LINE> username = data["username"] <NEW_LINE> password = data["password"] <NEW_LINE> email = data["email"] <NEW_LINE> if email and EMAIL_REGEX.match(email): <NEW_LINE> <INDENT> user = User(username=username, password=password, email=email) <NEW_LINE> user.save() <NEW_LINE> response = { "message": 'User registration successful.' } <NEW_LINE> return make_response(jsonify(response)), 201 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = { "message": 'Cannot register with invalid email.' } <NEW_LINE> return make_response(jsonify(response)), 400 <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> response = { "message": str(e) } <NEW_LINE> return make_response(jsonify(response)), 401 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> response = { "message": "Error. The username or password cannot be empty" } <NEW_LINE> return make_response(jsonify(response)), 400
for requests of kind post from /auth/register/
625941b9099cdd3c635f0ad4
def list( self, filter=None, top=None, skiptoken=None, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = self.list.metadata['url'] <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> if filter is not None: <NEW_LINE> <INDENT> query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') <NEW_LINE> <DEDENT> if top is not None: <NEW_LINE> <INDENT> query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=1000, minimum=1) <NEW_LINE> <DEDENT> if skiptoken is not None: <NEW_LINE> <INDENT> query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') <NEW_LINE> <DEDENT> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> raise models.ErrorResponseException(self._deserialize, response) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.MarketplacePaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.MarketplacePaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Lists the marketplaces for a scope by subscriptionId. Marketplaces are available via this API only for May 1, 2014 or later. :param filter: May be used to filter marketplaces by properties/usageEnd (Utc time), properties/usageStart (Utc time), properties/resourceGroup, properties/instanceName or properties/instanceId. The filter supports 'eq', 'lt', 'gt', 'le', 'ge', and 'and'. It does not currently support 'ne', 'or', or 'not'. :type filter: str :param top: May be used to limit the number of results to the most recent N marketplaces. :type top: int :param skiptoken: Skiptoken is only used if a previous operation returned a partial result. If a previous response contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that specifies a starting point to use for subsequent calls. :type skiptoken: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of Marketplace :rtype: ~azure.mgmt.consumption.models.MarketplacePaged[~azure.mgmt.consumption.models.Marketplace] :raises: :class:`ErrorResponseException<azure.mgmt.consumption.models.ErrorResponseException>`
625941b9796e427e537b043a
def encoder_cfg(self, max_rpm: int, encdr_res: int, res_power: int, stall_ms: int): <NEW_LINE> <INDENT> darr = [] <NEW_LINE> darr.extend(split_high_low(max_rpm)) <NEW_LINE> darr.extend(split_high_low(encdr_res)) <NEW_LINE> darr.append(res_power) <NEW_LINE> darr.append(stall_ms) <NEW_LINE> self.b.write_i2c_block_data(self.addr, 2, darr)
Write encoder configuration data to the device.
625941b973bcbd0ca4b2bef5
def on_open_database(self, event): <NEW_LINE> <INDENT> dialog = wx.FileDialog( parent=self, message="Open", defaultFile="", wildcard="Skype database (*.db)|*.db|All files|*.*", style=wx.FD_FILE_MUST_EXIST | wx.FD_OPEN | wx.RESIZE_BORDER ) <NEW_LINE> if wx.ID_OK == dialog.ShowModal(): <NEW_LINE> <INDENT> filename = dialog.GetPath() <NEW_LINE> if filename: <NEW_LINE> <INDENT> self.update_database_list(filename) <NEW_LINE> self.load_database_page(filename)
Handler for open database menu or button, displays a file dialog and loads the chosen database.
625941b97b25080760e392d2
def productExceptSelf(self, nums): <NEW_LINE> <INDENT> result = [1] * len(nums) <NEW_LINE> l, r = 1, 1 <NEW_LINE> for i in range(len(nums)): <NEW_LINE> <INDENT> result[i] *= l <NEW_LINE> l *= nums[i] <NEW_LINE> result[len(nums) - i - 1] *= r <NEW_LINE> r *= nums[len(nums) - i - 1] <NEW_LINE> <DEDENT> return result
:type nums: List[int] :rtype: List[int]
625941b95166f23b2e1a4fd1
def pr_request_grandfather_name(default): <NEW_LINE> <INDENT> root_org = current.auth.root_org_name() <NEW_LINE> if root_org in (ARCS, BRCS, IRCS): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Whether to request GrandFather's Name in AddPersonWidget2
625941b9be383301e01b5304
def load(self): <NEW_LINE> <INDENT> return dict()
load from file and return a dict. subclass of BaseParser should overwrite this method. :return: a list of loaded values. each value is a dict with all keys in config and its value.
625941b930c21e258bdfa315
def get(self, sid): <NEW_LINE> <INDENT> return FunctionVersionContext( self._version, service_sid=self._solution['service_sid'], function_sid=self._solution['function_sid'], sid=sid, )
Constructs a FunctionVersionContext :param sid: The SID that identifies the Function Version resource to fetch :returns: twilio.rest.serverless.v1.service.function.function_version.FunctionVersionContext :rtype: twilio.rest.serverless.v1.service.function.function_version.FunctionVersionContext
625941b963f4b57ef0000f99
def AdamOptimizer(loss, learningRate, clipGrads=False): <NEW_LINE> <INDENT> optimizer = tf.train.AdamOptimizer(learningRate) <NEW_LINE> if clipGrads: <NEW_LINE> <INDENT> gradients, variables = zip(*optimizer.compute_gradients(loss)) <NEW_LINE> gradients, _ = tf.clip_by_global_norm(gradients, 5.0) <NEW_LINE> trainOperation = optimizer.apply_gradients(zip(gradients, variables)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> gradients, variables = zip(*optimizer.compute_gradients(loss)) <NEW_LINE> trainOperation = optimizer.apply_gradients(zip(gradients, variables)) <NEW_LINE> <DEDENT> return trainOperation, gradients
Given the network loss, constructs the training op needed to train the network using adam optimization. Returns: the operation that begins the backpropogation through the network (i.e., the operation that minimizes the loss function).
625941b94d74a7450ccd403a
def extract_model_info(self, ell_model, labels_file): <NEW_LINE> <INDENT> if not ell_model: <NEW_LINE> <INDENT> self.model_name = "d_I160x160x3CMCMCMCMCMCMC1AS" <NEW_LINE> self.ell_model = self.model_name + ".ell" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ell_model = ell_model <NEW_LINE> name,ext = os.path.splitext(ell_model) <NEW_LINE> if ext.lower() == ".zip": <NEW_LINE> <INDENT> with zipfile.ZipFile(ell_model) as myzip: <NEW_LINE> <INDENT> extract = True <NEW_LINE> for name in myzip.namelist(): <NEW_LINE> <INDENT> if os.path.splitext(name)[1] == ".ell": <NEW_LINE> <INDENT> filename = os.path.join(self.test_dir, name) <NEW_LINE> if os.path.isfile(filename) and os.path.getmtime(self.ell_model) < os.path.getmtime(filename): <NEW_LINE> <INDENT> extract = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if extract: <NEW_LINE> <INDENT> filename = myzip.extract(myzip.filelist[0], self.test_dir) <NEW_LINE> self.logger.info("extracted: {}".format(filename)) <NEW_LINE> <DEDENT> <DEDENT> if filename != "": <NEW_LINE> <INDENT> self.ell_model = filename <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ell_model = ell_model <NEW_LINE> <DEDENT> <DEDENT> self.model_name, ext = os.path.splitext(basename(self.ell_model)) <NEW_LINE> if ext.lower() == ".zip": <NEW_LINE> <INDENT> self.model_name, ext = os.path.splitext(self.model_name) <NEW_LINE> <DEDENT> <DEDENT> if not labels_file: <NEW_LINE> <INDENT> self.labels_file = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.labels_file = os.path.abspath(labels_file)
Extracts information about a model
625941b9c4546d3d9de728a8
def tearDown(self): <NEW_LINE> <INDENT> self.router.connect() <NEW_LINE> self.router.purge() <NEW_LINE> self.message_worker.connect() <NEW_LINE> self.message_worker.purge()
Manually purge the router We need a persistent message router to test features since we send the message THEN start the router but we don't want message to remain in the queue between to test
625941b955399d3f0558852b
def passive_power_spectrum_model_hydro( f, fc, diffusion_constant, gamma0, bead_radius, rho_sample, rho_bead, distance_to_surface, ): <NEW_LINE> <INDENT> re_drag, im_drag = calculate_complex_drag( f, gamma0, rho_sample, bead_radius, distance_to_surface ) <NEW_LINE> frequency_m = calculate_dissipation_frequency(gamma0, bead_radius, rho_bead) <NEW_LINE> denominator = (fc + f * im_drag - f**2 / frequency_m) ** 2 + (f * re_drag) ** 2 <NEW_LINE> power_spectrum = diffusion_constant / (np.pi**2) * re_drag / denominator <NEW_LINE> return power_spectrum
Theoretical model for the hydrodynamically correct power spectrum. Parameters ---------- f : numpy.ndarray Frequency values, in Hz. fc : float Corner frequency, in Hz. diffusion_constant : float Diffusion constant, in (a.u.)^2/s gamma0 : float Drag coefficient, in mPas. bead_radius : float Bead radius, in m. rho_sample : float Sample mass density, in kg/m^3 rho_bead : float Bead mass density, in kg/m^3 distance_to_surface : float Distance to nearest surface, in m
625941b997e22403b379ce11
def isEqualWithoutConsideringStr(self, *args): <NEW_LINE> <INDENT> return _MEDCalculator.MEDCouplingTimeDiscretization_isEqualWithoutConsideringStr(self, *args)
isEqualWithoutConsideringStr(self, MEDCouplingTimeDiscretization other, double prec) -> bool 1
625941b966656f66f7cbc022
def train_network(model, epochs = 5): <NEW_LINE> <INDENT> modelname = "model" <NEW_LINE> print(model.summary()) <NEW_LINE> data = DataFeeder() <NEW_LINE> callbacks = [ModelCheckpoint('model{epoch:02d}.h5')] <NEW_LINE> model.compile(optimizer = "adam", loss = "mse") <NEW_LINE> history = model.fit_generator(data.fetch_train(), nb_epoch = epochs, steps_per_epoch = data.steps_per_epoch, validation_data = data.fetch_valid(), validation_steps = data.validation_steps, verbose = 1, callbacks = callbacks) <NEW_LINE> model.save(modelname + ".h5") <NEW_LINE> print("Model saved to {}.h5".format(modelname)) <NEW_LINE> fig = plt.figure() <NEW_LINE> plt.plot(history.history['loss']) <NEW_LINE> plt.plot(history.history['val_loss']) <NEW_LINE> plt.title('Model Loss') <NEW_LINE> plt.ylabel('Loss') <NEW_LINE> plt.xlabel('Epoch') <NEW_LINE> plt.legend(['train', 'validation'], loc='upper right') <NEW_LINE> plt.savefig(modelname + '_training_history.png') <NEW_LINE> plt.close(fig) <NEW_LINE> return "Finish"
Main script for training the Behavioral Cloning Network model
625941b9a79ad161976cbfbd
def example_game(self): <NEW_LINE> <INDENT> animations = [] <NEW_LINE> animations.append(self.flip_cell(2, 2)) <NEW_LINE> animations.append(self.flip_cell(2, 3)) <NEW_LINE> self.play(*animations) <NEW_LINE> animations = [] <NEW_LINE> animations.append(self.flip_cell(5, 6)) <NEW_LINE> animations.append(self.flip_cell(6, 7)) <NEW_LINE> self.play(*animations)
An example of how the animation can be done
625941b98a43f66fc4b53ee1
def play(self): <NEW_LINE> <INDENT> print(self.state.instructions) <NEW_LINE> print(self.state + '\n') <NEW_LINE> while self.state.possible_next_moves(): <NEW_LINE> <INDENT> if self.state.next_player == 'p1': <NEW_LINE> <INDENT> m = self.state.get_move() <NEW_LINE> while not m in self.state.possible_next_moves(): <NEW_LINE> <INDENT> print('Illegal move: {}\nPlease try again.\n'.format(m)) <NEW_LINE> print(self.state.instructions) <NEW_LINE> print(self.state) <NEW_LINE> m = self.state.get_move() <NEW_LINE> <DEDENT> print('You choose: {}'.format(m)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = self.strategy.suggest_move(self.state) <NEW_LINE> print('The second computer chooses: {}'.format(m)) <NEW_LINE> <DEDENT> self.state = self.state.apply_move(m) <NEW_LINE> print('New game state: ', str(self.state)) <NEW_LINE> print() <NEW_LINE> <DEDENT> if self.state.winner('p2'): <NEW_LINE> <INDENT> print('Beat ya!') <NEW_LINE> <DEDENT> elif self.state.winner('p1'): <NEW_LINE> <INDENT> print('Congrats -- you won!!') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('We tied...')
(GameView) -> NoneType Play a game.
625941b9046cf37aa974cbc2
def check_aliens_bottom(ai_settings, stats, screen, ship, aliens, bullets): <NEW_LINE> <INDENT> screen_rect = screen.get_rect() <NEW_LINE> for alien in aliens.sprites(): <NEW_LINE> <INDENT> if alien.rect.bottom >= screen_rect.bottom: <NEW_LINE> <INDENT> ship_hit(ai_settings, stats, screen, ship, aliens, bullets) <NEW_LINE> break
Checks of any aliens have reached the bottom of the screen
625941b96aa9bd52df036c1a
def calDesviacionEstandar(self): <NEW_LINE> <INDENT> return math.sqrt(self.sumatoriaMedia()/(self.numero-1))
Esta funcion calcula la Desviacion estandar de una sumatoria regresa el calculo
625941b999fddb7c1c9de20b
def raise_deprecation_error( *, deprecated_qual_name_usage: str, new_qual_name_usage: str, to_be_removed_in_version: str, ) -> NoReturn: <NEW_LINE> <INDENT> check_overdue(deprecated_qual_name_usage, to_be_removed_in_version) <NEW_LINE> message = ( f"Usage of {deprecated_qual_name_usage!r} was deprecated, " f"use {new_qual_name_usage!r} instead.\n" "It wasn't possible to restore the original behavior of this usage " "(mostlikely due to an object hierarchy change)." "This usage change message won't be show as of version: " f"{to_be_removed_in_version!r}." ) <NEW_LINE> raise GlotaranDeprectedApiError(message)
Raise :class:`GlotaranDeprectedApiError` error, with formatted message. This should only be used if there is no reasonable way to keep the deprecated usage functional! Parameters ---------- deprecated_qual_name_usage : str Old usage with fully qualified name e.g.: ``'glotaran.read_model_from_yaml(model_yml_str)'`` new_qual_name_usage : str New usage as fully qualified name e.g.: ``'glotaran.io.load_model(model_yml_str, format_name="yml_str")'`` to_be_removed_in_version : str Version the support for this usage will be removed. Raises ------ OverDueDeprecation If the current version is greater or equal to ``to_be_removed_in_version``. GlotaranDeprectedApiError If :class:`OverDueDeprecation` wasn't raised before. .. # noqa: DAR402 OverDueDeprecation .. # noqa: DAR401 GlotaranDeprectedApiError
625941b930dc7b76659017e2
def check_directions(next_door, current_node, goal_node, chip, crossroad, travelled_path, colide): <NEW_LINE> <INDENT> if next_door[2] < 0 or next_door[2] > 7: <NEW_LINE> <INDENT> return crossroad <NEW_LINE> <DEDENT> if next_door[0] < 0 or next_door[0] > chip.width - 1 or next_door[1] < 0 or next_door[1] > chip.height - 1: <NEW_LINE> <INDENT> return crossroad <NEW_LINE> <DEDENT> (x, y, z) = current_node.position <NEW_LINE> if chip.coordinates[z][y][x].connections[next_door].used: <NEW_LINE> <INDENT> return crossroad <NEW_LINE> <DEDENT> next_node = chip.coordinates[next_door[2]][next_door[1]][next_door[0]] <NEW_LINE> neighbour = nd.Node(next_door, current_node, next_node.cost, next_node.cost + next_node.distance_to_goal) <NEW_LINE> if neighbour != goal_node and chip.coordinates[next_door[2]][next_door[1]][next_door[0]].gate is not None: <NEW_LINE> <INDENT> return crossroad <NEW_LINE> <DEDENT> if neighbour in travelled_path: <NEW_LINE> <INDENT> return crossroad <NEW_LINE> <DEDENT> if add_to_crossroad(neighbour, crossroad, colide): <NEW_LINE> <INDENT> crossroad.append(neighbour) <NEW_LINE> <DEDENT> return crossroad
Check a neighbouring coordinate on validity. Args: next_door (int): Coordinates of a crossing next to the current coordinate current_node (object): The heuristics form the current coordinates goal_node (object): The heuristics from the goal coordinates target_coords (int): The gate coordinates which the algorithm will try to reach Returns: crossroad (list of objects): All the possible paths the algorithm can choose
625941b9046cf37aa974cbc3
def crawler(url, keyword): <NEW_LINE> <INDENT> code = requests.get(url) <NEW_LINE> plain = code.text <NEW_LINE> s = BeautifulSoup(plain, "html.parser") <NEW_LINE> answer = '' <NEW_LINE> for text in s.findAll('a', string=re.compile('.* {0} .*'.format(keyword)), recursive=True,): <NEW_LINE> <INDENT> href = text.get('href') <NEW_LINE> answer += '\n' + str(text) <NEW_LINE> if href not in visited: <NEW_LINE> <INDENT> visited[href] = href <NEW_LINE> crawler(href, keyword) <NEW_LINE> <DEDENT> <DEDENT> return answer
Crawler uses BeautifulSoup for crawling a webpage :url - the url to crawl from :keyword - the keyword to search for :return string with the
625941b9e8904600ed9f1da1
def test_creation_OnUpdateSchedule_success_with_exec_on_load_True(self): <NEW_LINE> <INDENT> sc = schedules.OnUpdateSchedule(exec_on_load=True) <NEW_LINE> self.assertEqual(sc.exec_on_load, True) <NEW_LINE> self.assertEqual(sc.activation_metrics, []) <NEW_LINE> self.assertEqual(sc.meets(), False)
creating a OnUpdateSchedule object should succeed with exec_on_load True
625941b915fb5d323cde0982
def symbol(self): <NEW_LINE> <INDENT> cur = self.currentToken <NEW_LINE> self.advance() <NEW_LINE> return cur
Returns the character which is the current token, and advances the input. Should be called only when tokenType() is SYMBOL.
625941b97d43ff24873a2b1c
def insertplainheader(lines, header, value): <NEW_LINE> <INDENT> newprio = PLAINHEADERS[header.lower()] <NEW_LINE> bestpos = len(lines) <NEW_LINE> for i, line in enumerate(lines): <NEW_LINE> <INDENT> if ':' in line: <NEW_LINE> <INDENT> lheader = line.split(':', 1)[0].strip().lower() <NEW_LINE> lprio = PLAINHEADERS.get(lheader, newprio + 1) <NEW_LINE> if lprio == newprio: <NEW_LINE> <INDENT> lines[i] = '%s: %s' % (header, value) <NEW_LINE> return lines <NEW_LINE> <DEDENT> if lprio > newprio and i < bestpos: <NEW_LINE> <INDENT> bestpos = i <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if line: <NEW_LINE> <INDENT> lines.insert(i, '') <NEW_LINE> <DEDENT> if i < bestpos: <NEW_LINE> <INDENT> bestpos = i <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> lines.insert(bestpos, '%s: %s' % (header, value)) <NEW_LINE> return lines
For lines containing a plain patch header, add a header line with value. >>> insertplainheader([], 'Date', 'z') ['Date: z'] >>> insertplainheader([''], 'Date', 'z') ['Date: z', ''] >>> insertplainheader(['x'], 'Date', 'z') ['Date: z', '', 'x'] >>> insertplainheader(['From: y', 'x'], 'Date', 'z') ['From: y', 'Date: z', '', 'x'] >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z') [' date : x', 'From: z', ''] >>> insertplainheader(['', 'Date: y'], 'Date', 'z') ['Date: z', '', 'Date: y'] >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y') ['From: y', 'foo: bar', 'DATE: z', '', 'x']
625941b9fbf16365ca6f6035
def decode_pasv_reply(raw_reply): <NEW_LINE> <INDENT> reply = raw_reply.decode() <NEW_LINE> h1, h2, h3, h4, p1, p2 = reply[reply.find('(') + 1:reply.find(')')].split(',') <NEW_LINE> host = '.'.join([h1, h2, h3, h4]) <NEW_LINE> port = int(p1)*256 + int(p2) <NEW_LINE> return host, port
:type raw_reply: bytes
625941b9c432627299f04abc
def create_fd (self): <NEW_LINE> <INDENT> if self.filename is None: <NEW_LINE> <INDENT> return i18n.get_encoded_writer(encoding=self.output_encoding, errors=self.codec_errors) <NEW_LINE> <DEDENT> return codecs.open(self.filename, "wb", self.output_encoding, self.codec_errors)
Create open file descriptor.
625941b98e71fb1e9831d625
def delete(self, imageUrl): <NEW_LINE> <INDENT> print("开始删除图片") <NEW_LINE> deleteUrl = imageUrl.delete <NEW_LINE> r = requests.get(deleteUrl) <NEW_LINE> if r.status_code == 200: <NEW_LINE> <INDENT> return [0, "删除成功"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [-1, "未知异常"]
删除图片 :param imageUrl: 待上传图片的ImageUrl对象 :return: [int, string] 状态码,状态
625941b98e7ae83300e4ae44
def add_to_sample_data(self, forecast, outfit, athlete_name='default', **kwargs): <NEW_LINE> <INDENT> if self._sample_data is None: <NEW_LINE> <INDENT> self._sample_data = self.get_dataframe_format() <NEW_LINE> <DEDENT> forecast = vars(forecast) if type(forecast) is not dict else forecast <NEW_LINE> fields = {x: outfit[x] for x in self.labels if x in outfit} <NEW_LINE> fields.update({x: forecast[x] for x in self.features if x in forecast}) <NEW_LINE> fields.update({x: kwargs[x] for x in self.features + self.labels if x in kwargs}) <NEW_LINE> fields.update({'Athlete': athlete_name, 'activity': self.activity_name}) <NEW_LINE> record_number = len(self._sample_data) <NEW_LINE> for k, v in fields.items(): <NEW_LINE> <INDENT> self._sample_data.loc[record_number, k] = v <NEW_LINE> <DEDENT> self._have_sample_data = True <NEW_LINE> return self._sample_data
Add a row of sample data to a model file. :param forecast: :param outfit: :param athlete_name: :return:
625941b955399d3f0558852c
def class2size(pred_cls, residual): <NEW_LINE> <INDENT> mean_size = g_type_mean_size[g_class2type[pred_cls]] <NEW_LINE> return mean_size + residual
Inverse function to size2class.
625941b938b623060ff0ac67
def getFileDataLines(self, fileName, lineNo): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> lines = linecache.getlines(fileName, lineNo) <NEW_LINE> return lines[lineNo-1:] <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> self.log.error("{}".format(err))
Desc: get data from file
625941b930bbd722463cbc3b
def map_extra_regions(pt): <NEW_LINE> <INDENT> mappings = [] <NEW_LINE> for entry in args.map: <NEW_LINE> <INDENT> elements = entry.split(',') <NEW_LINE> if len(elements) < 2: <NEW_LINE> <INDENT> error("Not enough arguments for --map %s" % entry) <NEW_LINE> <DEDENT> one_map = {} <NEW_LINE> one_map['cmdline'] = entry <NEW_LINE> one_map['phys'] = int(elements[0], 16) <NEW_LINE> one_map['size']= int(elements[1], 16) <NEW_LINE> one_map['large_page'] = False <NEW_LINE> flags = FLAG_P | ENTRY_XD <NEW_LINE> if len(elements) > 2: <NEW_LINE> <INDENT> map_flags = elements[2] <NEW_LINE> if not bool(re.match('^[LUWXD]*$', map_flags)): <NEW_LINE> <INDENT> error("Unrecognized flags: %s" % map_flags) <NEW_LINE> <DEDENT> flags = FLAG_P | ENTRY_XD <NEW_LINE> if 'W' in map_flags: <NEW_LINE> <INDENT> flags |= ENTRY_RW <NEW_LINE> <DEDENT> if 'X' in map_flags: <NEW_LINE> <INDENT> flags &= ~ENTRY_XD <NEW_LINE> <DEDENT> if 'U' in map_flags: <NEW_LINE> <INDENT> flags |= ENTRY_US <NEW_LINE> <DEDENT> if 'L' in map_flags: <NEW_LINE> <INDENT> flags |= FLAG_SZ <NEW_LINE> one_map['large_page'] = True <NEW_LINE> <DEDENT> if 'D' in map_flags: <NEW_LINE> <INDENT> flags |= FLAG_CD <NEW_LINE> <DEDENT> <DEDENT> one_map['flags'] = flags <NEW_LINE> if len(elements) > 3: <NEW_LINE> <INDENT> one_map['virt'] = int(elements[3], 16) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> one_map['virt'] = one_map['phys'] <NEW_LINE> <DEDENT> mappings.append(one_map) <NEW_LINE> <DEDENT> for one_map in mappings: <NEW_LINE> <INDENT> phys = one_map['phys'] <NEW_LINE> size = one_map['size'] <NEW_LINE> flags = one_map['flags'] <NEW_LINE> virt = one_map['virt'] <NEW_LINE> level = PD_LEVEL if one_map['large_page'] else PT_LEVEL <NEW_LINE> if pt.is_region_mapped(virt, size, level): <NEW_LINE> <INDENT> error(("Region 0x%x (%d) already been mapped " "for --map %s" % (virt, size, one_map['cmdline']))) <NEW_LINE> <DEDENT> pt.reserve_unaligned(virt, size, level) <NEW_LINE> pt.map(phys, virt, size, flags, level)
Map extra regions specified in command line
625941b91f037a2d8b946077
def __inner_eval(self, data_name, data_idx, feval=None): <NEW_LINE> <INDENT> if data_idx >= self.__num_dataset: <NEW_LINE> <INDENT> raise ValueError("data_idx should be smaller than number of dataset") <NEW_LINE> <DEDENT> self.__get_eval_info() <NEW_LINE> ret = [] <NEW_LINE> if self.__num_inner_eval > 0: <NEW_LINE> <INDENT> result = np.array([0.0 for _ in range(self.__num_inner_eval)], dtype=np.float32) <NEW_LINE> tmp_out_len = ctypes.c_int64(0) <NEW_LINE> _safe_call(_LIB.LGBM_BoosterGetEval( self.handle, data_idx, ctypes.byref(tmp_out_len), result.ctypes.data_as(ctypes.POINTER(ctypes.c_float)))) <NEW_LINE> if tmp_out_len.value != self.__num_inner_eval: <NEW_LINE> <INDENT> raise ValueError("incorrect number of eval results") <NEW_LINE> <DEDENT> for i in range(self.__num_inner_eval): <NEW_LINE> <INDENT> ret.append((data_name, self.__name_inner_eval[i], result[i], self.__higher_better_inner_eval[i])) <NEW_LINE> <DEDENT> <DEDENT> if feval is not None: <NEW_LINE> <INDENT> if data_idx == 0: <NEW_LINE> <INDENT> cur_data = self.train_set <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur_data = self.valid_sets[data_idx - 1] <NEW_LINE> <DEDENT> feval_ret = feval(self.__inner_predict(data_idx), cur_data) <NEW_LINE> if isinstance(feval_ret, list): <NEW_LINE> <INDENT> for eval_name, val, is_higher_better in feval_ret: <NEW_LINE> <INDENT> ret.append((data_name, eval_name, val, is_higher_better)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> eval_name, val, is_higher_better = feval_ret <NEW_LINE> ret.append((data_name, eval_name, val, is_higher_better)) <NEW_LINE> <DEDENT> <DEDENT> return ret
Evaulate training or validation data
625941b9adb09d7d5db6c60b
def create(self, match_report: MatchReport): <NEW_LINE> <INDENT> self._db_session.merge(match_report) <NEW_LINE> self._db_session.commit()
Creates a new row of match report in the database :param match_report: Match report object containing all the information
625941b950485f2cf553cc11
def get_sections(df): <NEW_LINE> <INDENT> dfd = df.dropna(how='all') <NEW_LINE> secheads = [re.match('//(.*)', s) for s in dfd.iloc[:, 0]] <NEW_LINE> indheads = np.where(secheads)[0] <NEW_LINE> sections = {} <NEW_LINE> for n in range(len(indheads)): <NEW_LINE> <INDENT> secname = secheads[indheads[n]].group(1) <NEW_LINE> try: <NEW_LINE> <INDENT> r = range(indheads[n] + 1, indheads[n + 2]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> r = range(indheads[n] + 1, dfd.shape[0]) <NEW_LINE> <DEDENT> assert secname not in sections, 'Multiple "%s" found' % secname <NEW_LINE> sections[secname] = dfd.iloc[r, :] <NEW_LINE> <DEDENT> return sections
Split into sections indicated by //.*, remove empty rows
625941b938b623060ff0ac68
def xmlOneDotOne(data): <NEW_LINE> <INDENT> desiredOutput = { "display_name": "displayName", "display_short_name": "displayShortName", "incoming_hostname": "hostname", "incoming_port": "port", "incoming_socket_type": "socketType", "incoming_username_form": "username", "incoming_authentication": "authentication", "outgoing_hostname": "hostname", "outgoing_port": "port", "outgoing_socket_type": "socketType", "outgoing_username_form": "username", "outgoing_authentication": "authentication", } <NEW_LINE> incoming = None <NEW_LINE> outgoing = None <NEW_LINE> config = ET.Element("clientConfig") <NEW_LINE> config.attrib["version"] = "1.1" <NEW_LINE> emailProvider = ET.SubElement(config, "emailProvider") <NEW_LINE> qs = Domain.objects.filter(config=data) or ( DomainRequest.objects.filter(config=data)) <NEW_LINE> for domain in qs: <NEW_LINE> <INDENT> if not data.email_provider_id: <NEW_LINE> <INDENT> data.email_provider_id = domain.name <NEW_LINE> <DEDENT> ET.SubElement(emailProvider, "domain").text = domain.name <NEW_LINE> <DEDENT> emailProvider.attrib["id"] = data.email_provider_id <NEW_LINE> for field in data._meta.fields: <NEW_LINE> <INDENT> if field.name not in desiredOutput: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if field.name.startswith("incoming"): <NEW_LINE> <INDENT> if incoming is None: <NEW_LINE> <INDENT> incoming = ET.SubElement(emailProvider, "incomingServer") <NEW_LINE> incoming.attrib["type"] = data.incoming_type <NEW_LINE> <DEDENT> name = field.name <NEW_LINE> currParent = incoming <NEW_LINE> <DEDENT> elif field.name.startswith("outgoing"): <NEW_LINE> <INDENT> if outgoing is None: <NEW_LINE> <INDENT> outgoing = ET.SubElement(emailProvider, "outgoingServer") <NEW_LINE> outgoing.attrib["type"] = "smtp" <NEW_LINE> <DEDENT> name = field.name <NEW_LINE> currParent = outgoing <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = field.name <NEW_LINE> currParent = emailProvider <NEW_LINE> <DEDENT> if (name == "incoming_username_form") and ( data.incoming_username_form == ""): <NEW_LINE> <INDENT> data.incoming_username_form = data.outgoing_username_form <NEW_LINE> <DEDENT> name = desiredOutput[name] <NEW_LINE> e = ET.SubElement(currParent, name) <NEW_LINE> text = getattr(data, field.name) <NEW_LINE> if type(text) is bool: <NEW_LINE> <INDENT> text = unicode(text).lower() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text = unicode(text) <NEW_LINE> <DEDENT> e.text = text <NEW_LINE> <DEDENT> for enableurl in data.enableurl_set.all(): <NEW_LINE> <INDENT> enable = ET.SubElement(emailProvider, "enable") <NEW_LINE> enable.attrib["visiturl"] = enableurl.url <NEW_LINE> for inst in enableurl.instructions.all(): <NEW_LINE> <INDENT> d = ET.SubElement(enable, "instruction") <NEW_LINE> d.attrib["lang"] = inst.language <NEW_LINE> d.text = unicode(inst.description) <NEW_LINE> <DEDENT> <DEDENT> for docurl in data.docurl_set.all(): <NEW_LINE> <INDENT> doc = ET.SubElement(emailProvider, "documentation") <NEW_LINE> doc.attrib["url"] = docurl.url <NEW_LINE> for desc in docurl.descriptions.all(): <NEW_LINE> <INDENT> d = ET.SubElement(doc, "descr") <NEW_LINE> d.attrib["lang"] = desc.language <NEW_LINE> d.text = unicode(desc.description) <NEW_LINE> <DEDENT> <DEDENT> retval = StringIO("w") <NEW_LINE> xml = ET.ElementTree(config) <NEW_LINE> xml.write(retval, encoding="UTF-8", xml_declaration=True) <NEW_LINE> return retval.getvalue()
Return the configuration using the XML document that Thunderbird is expecting.
625941b94c3428357757c1a3
def __init__(self,SRQ=0,RFC=0,Site='NLCWIT',Client='None',Requestor='None',Description='None'): <NEW_LINE> <INDENT> self.SRQ = SRQ <NEW_LINE> self.RFC = RFC <NEW_LINE> self.Site = Site <NEW_LINE> self.Client = Client <NEW_LINE> self.Requestor = Requestor <NEW_LINE> self.Description= Description.replace(':','').replace('>','2').replace('!','') <NEW_LINE> self.__folder__ = None
Initialise default values for SRQProject object
625941b90a366e3fb873e690
def test_timeout_replace_only_when_necessary(self): <NEW_LINE> <INDENT> self.assertEqual( converge( DesiredGroupState(server_config={}, capacity=2), set([server('slowpoke', ServerState.BUILD, created=0), server('old-ok', ServerState.ACTIVE, created=0), server('new-ok', ServerState.ACTIVE, created=3600)]), set(), 3600), pbag([DeleteServer(server_id='slowpoke')]))
If a server is timing out *and* we're over capacity, it will be deleted without replacement.
625941b9099cdd3c635f0ad5
def test_transform_record_default_schema(): <NEW_LINE> <INDENT> serializer = SimpleMarshmallowSerializer() <NEW_LINE> data = serializer.transform_record( PersistentIdentifier(pid_type='recid', pid_value='1'), Record({'title': 'test'}) ) <NEW_LINE> assert data == { 'id': '1', 'created': None, 'links': {}, 'metadata': {'title': 'test'}, 'updated': None }
Test marshmallow serializer without providing a schema.
625941b9ec188e330fd5a61f
def undo_cat(col, lim): <NEW_LINE> <INDENT> def cat_decode(x, limits): <NEW_LINE> <INDENT> for k, v in limits.items(): <NEW_LINE> <INDENT> if x < k: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return col.apply(lambda x: cat_decode(x, lim))
Convert the categorical column to normalized valus Arguments: col {Column} -- The dataframe's column lim {Dict} -- The dictionary containing the limits per catergorical column Returns: Column -- The new column
625941b93d592f4c4ed1cef8
def gyy(xp, yp, zp, prisms): <NEW_LINE> <INDENT> if xp.shape != yp.shape != zp.shape: <NEW_LINE> <INDENT> raise ValueError("Input arrays xp, yp, and zp must have same shape!") <NEW_LINE> <DEDENT> res = 0 <NEW_LINE> for prism in prisms: <NEW_LINE> <INDENT> if prism is None or 'density' not in prism.props: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> density = prism.props['density'] <NEW_LINE> res += kernelyy(xp, yp, zp, prism)*density <NEW_LINE> <DEDENT> res *= G * SI2EOTVOS <NEW_LINE> return res
yy component of the gravity gradient tensor of a polygonal prism. .. note:: The coordinate system of the input parameters is to be x -> North, y -> East and z -> Down. .. note:: All input values in SI units and output in Eotvos! Parameters: * xp, yp, zp : arrays The x, y, and z coordinates of the computation points. * prisms : list of :class:`geoist.inversion.mesher.PolygonalPrism` The model used to calculate the field. Prisms must have the physical property ``'density'`` will be ignored. Returns: * res : array The effect calculated on the computation points. References: Plouff, D. , 1976, Gravity and magnetic fields of polygonal prisms and applications to magnetic terrain corrections, Geophysics, 41(4), 727-741, doi:10.1190/1.1440645.
625941b9e8904600ed9f1da2
def find_vstructures(self,startTraitIdx=0, nTraits=np.inf): <NEW_LINE> <INDENT> T = self.phenoreader.get_nrows() <NEW_LINE> self.vstructures = vstructures.VstructureList() <NEW_LINE> for t in range(startTraitIdx, min(startTraitIdx + nTraits,T)): <NEW_LINE> <INDENT> for isnps, igenes in self.find_vstructures_given_focal_gene(t): <NEW_LINE> <INDENT> if (isnps is not None) and (igenes is not None): <NEW_LINE> <INDENT> self.vstructures.add(t,isnps,igenes)
returns an iterator over all (snp,orth_gene) pairs where snp -> anchor gene -> gene t <- orth gene
625941b9379a373c97cfa9c3
def view(self): <NEW_LINE> <INDENT> return self.__d.treeView
@return QTreeView
625941b9be7bc26dc91cd47e
def run(): <NEW_LINE> <INDENT> run_container("expel-server-run", run_mounts(), ["--publish", "7777:7777/udp"], [])
Run an EXILED server to test your plugin
625941b9a05bb46b383ec6a6
def _ucb_halluc_acq(x): <NEW_LINE> <INDENT> mu, sigma = gp.eval_with_hallucinated_observations(x, halluc_pts, uncert_form='std') <NEW_LINE> return mu + beta_th * sigma
Computes GP-UCB acquisition with hallucinated observations.
625941b9925a0f43d2549ced
def __init__(self, win, cam, forcex=0, forcey=0): <NEW_LINE> <INDENT> if FilterManager.notify is None: <NEW_LINE> <INDENT> FilterManager.notify = directNotify.newCategory("FilterManager") <NEW_LINE> <DEDENT> region = None <NEW_LINE> for dr in win.getDisplayRegions(): <NEW_LINE> <INDENT> drcam = dr.getCamera() <NEW_LINE> if drcam == cam: <NEW_LINE> <INDENT> region = dr <NEW_LINE> <DEDENT> <DEDENT> if region is None: <NEW_LINE> <INDENT> self.notify.error('Could not find appropriate DisplayRegion to filter') <NEW_LINE> return False <NEW_LINE> <DEDENT> self.win = win <NEW_LINE> self.forcex = forcex <NEW_LINE> self.forcey = forcey <NEW_LINE> self.engine = win.getGsg().getEngine() <NEW_LINE> self.region = region <NEW_LINE> self.wclears = self.getClears(self.win) <NEW_LINE> self.rclears = self.getClears(self.region) <NEW_LINE> self.camera = cam <NEW_LINE> self.caminit = cam.node().getInitialState() <NEW_LINE> self.camstate = self.caminit <NEW_LINE> self.buffers = [] <NEW_LINE> self.sizes = [] <NEW_LINE> self.nextsort = self.win.getSort() - 1000 <NEW_LINE> self.basex = 0 <NEW_LINE> self.basey = 0 <NEW_LINE> self.accept("window-event", self.windowEvent)
The FilterManager constructor requires you to provide a window which is rendering a scene, and the camera which is used by that window to render the scene. These are henceforth called the 'original window' and the 'original camera.'
625941b9d99f1b3c44c67410
def testNormalizer(self): <NEW_LINE> <INDENT> from spatialdata.units.Nondimensional import Nondimensional <NEW_LINE> normalizer = Nondimensional() <NEW_LINE> normalizer._configure() <NEW_LINE> self.friction.normalizer(normalizer) <NEW_LINE> return
Test normalizer().
625941b90c0af96317bb8062
def test_POST_delete_background(self): <NEW_LINE> <INDENT> self.site_location.background = File(file(self._data_file('logo.png'))) <NEW_LINE> self.site_location.save() <NEW_LINE> c = Client() <NEW_LINE> c.login(username='admin', password='admin') <NEW_LINE> self.POST_data['delete_background'] = 'yes' <NEW_LINE> POST_response = c.post(self.url, self.POST_data) <NEW_LINE> self.assertStatusCodeEquals(POST_response, 302) <NEW_LINE> self.assertEquals(POST_response['Location'], 'http://%s%s' % ( 'testserver', self.url)) <NEW_LINE> site_location = models.SiteLocation.objects.get( pk=self.site_location.pk) <NEW_LINE> self.assertEquals(site_location.background, '')
A POST request to the edit_content view with POST['delete_background'] should remove the background image and redirect back to the edit design view.
625941b90c0af96317bb8063
def receive_packet_out(logical_device_id, egress_port_no, msg): <NEW_LINE> <INDENT> pass
Pass a packet_out message content to adapter so that it can forward it out to the device. This is only called on root devices. :param logical_device_id: :param egress_port: egress logical port number :param msg: actual message :return: None
625941b95166f23b2e1a4fd2
def Conv(X, nbFilters, kernel, stride): <NEW_LINE> <INDENT> channels = X.shape.as_list()[-1] <NEW_LINE> F = tf.get_variable('kernel', [kernel, kernel, channels, nbFilters], tf.float32, tf.contrib.layers.xavier_initializer()) <NEW_LINE> b = tf.get_variable('Cbias', [nbFilters], tf.float32, tf.zeros_initializer()) <NEW_LINE> return tf.nn.bias_add(tf.nn.conv2d(X, F, [1, stride, stride, 1], 'SAME'), b)
:param X: input tensor :param nbFilters: number of filters :param kernel: kernel shape for convolution operation :param stride: stride for convolution operation :return: Convolution operation with bias
625941b967a9b606de4a7d36
def next_url(request): <NEW_LINE> <INDENT> next_page = request.REQUEST.get("next", "") <NEW_LINE> host = request.get_host() <NEW_LINE> return next_page if next_page and is_safe_url( next_page, host=host) else None
Returns URL to redirect to from the ``next`` param in the request.
625941b921bff66bcd6847cf
def die(*args): <NEW_LINE> <INDENT> print_err(*args) <NEW_LINE> sys.exit(2)
Prints an error message on stderr and aborts execution with return code set to 2.
625941b93617ad0b5ed67d79
def findRightInterval(self, intervals): <NEW_LINE> <INDENT> data = [] <NEW_LINE> for i, interval in enumerate(intervals): <NEW_LINE> <INDENT> data.append((interval.start, True, i)) <NEW_LINE> data.append((interval.end, False, i)) <NEW_LINE> <DEDENT> data.sort() <NEW_LINE> ri = {} <NEW_LINE> n = len(data) <NEW_LINE> sp = ep = 0 <NEW_LINE> while ep < n: <NEW_LINE> <INDENT> if data[ep][1]: <NEW_LINE> <INDENT> ep += 1 <NEW_LINE> <DEDENT> if sp < ep: <NEW_LINE> <INDENT> sp = ep + 1 <NEW_LINE> <DEDENT> while sp < n and not data[sp][1]: <NEW_LINE> <INDENT> sp += 1 <NEW_LINE> <DEDENT> ri[data[ep][2]] = data[sp][2] if sp < n else -1 <NEW_LINE> ep += 1 <NEW_LINE> <DEDENT> return [ri[i] for i in range(len(intervals))]
:type intervals: List[Interval] :rtype: List[int]
625941b985dfad0860c3acd2
@socketio.on('joined', namespace='/chat') <NEW_LINE> def joined(message): <NEW_LINE> <INDENT> room = session.get('room') <NEW_LINE> join_room(room) <NEW_LINE> emit('status', {'msg': session.get('name') + ' joined'}, room=room)
Sent by clients when they enter a room. A status message is broadcast to all people in the room.
625941b98e7ae83300e4ae45
def get_story_summary_from_model(story_summary_model): <NEW_LINE> <INDENT> return story_domain.StorySummary( story_summary_model.id, story_summary_model.title, story_summary_model.language_code, story_summary_model.version, story_summary_model.node_count, story_summary_model.story_model_created_on, story_summary_model.story_model_last_updated )
Returns a domain object for an Oppia story summary given a story summary model. Args: story_summary_model: StorySummaryModel. Returns: StorySummary.
625941b9293b9510aa2c3112
def book_isbn_to_id(self, isbn): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> response = self.get('book/isbn_to_id', {'isbn': isbn})
Get the Goodreads book ID given an ISBN: http://www.goodreads.com/api#book.isbn_to_id
625941b9cc0a2c11143dcd12
def get_form_kwargs(self, step=None): <NEW_LINE> <INDENT> if step in ('token', 'backup'): <NEW_LINE> <INDENT> return { 'user': self.get_user(), 'initial_device': self.get_device(step), } <NEW_LINE> <DEDENT> return {}
AuthenticationTokenForm requires the user kwarg.
625941b9090684286d50eb5a
def angle_cycle(self): <NEW_LINE> <INDENT> return self._cmd('ang')
Cycles through available camera angles
625941b930c21e258bdfa317
def allocate(self, value=0, **kwargs): <NEW_LINE> <INDENT> if value in ['random', 'random_int']: <NEW_LINE> <INDENT> out = self.get_uniform_copy() <NEW_LINE> shape = out.as_array().shape <NEW_LINE> seed = kwargs.get('seed', None) <NEW_LINE> if seed is not None: <NEW_LINE> <INDENT> numpy.random.seed(seed) <NEW_LINE> <DEDENT> if value == 'random': <NEW_LINE> <INDENT> out.fill(numpy.random.random_sample(shape)) <NEW_LINE> <DEDENT> elif value == 'random_int': <NEW_LINE> <INDENT> max_value = kwargs.get('max_value', 100) <NEW_LINE> out.fill(numpy.random.randint(max_value, size=shape)) <NEW_LINE> <DEDENT> <DEDENT> elif value is None: <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> out = self.get_uniform_copy(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = self.copy() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> out = self.get_uniform_copy(value) <NEW_LINE> <DEDENT> return out
Alias to get_uniform_copy for CIL/SIRF compatibility.
625941b963f4b57ef0000f9b
def AddLinks(sf_name, src_dir, dest_dir): <NEW_LINE> <INDENT> sf_path_in = os.path.join(src_dir, sf_name) <NEW_LINE> sf_path_basename = os.path.join(sf_path_in, sf_name) <NEW_LINE> sf_path_out = os.path.join(dest_dir, sf_name) <NEW_LINE> gdf = gpd.read_file(sf_path_in) <NEW_LINE> with fiona.open(sf_path_basename + '.shp') as f: <NEW_LINE> <INDENT> input_schema = f.schema <NEW_LINE> <DEDENT> output_schema = input_schema <NEW_LINE> outprops = output_schema['properties'] <NEW_LINE> if 'API' in gdf.columns: <NEW_LINE> <INDENT> gdf['API10'] = gdf.API.str.slice(stop = 10) <NEW_LINE> outprops.update({'API10':'str:10'}) <NEW_LINE> gdf['Production'] = gdf.API10 .apply(lambda s: 'https://oilgas.ogm.utah.gov/oilgasweb/live-data-search/lds-prod/prod-grid.xhtml?wellno={}'.format(s)) <NEW_LINE> outprops.update({'Production':'str:80'}) <NEW_LINE> gdf['WellHistory'] = gdf.API10 .apply(lambda s: 'https://oilgas.ogm.utah.gov/oilgasweb/live-data-search/lds-well/well-history-lu.xhtml?todo=srchIndvRow&api={}'.format(s)) <NEW_LINE> outprops.update({'WellHistory':'str:80'}) <NEW_LINE> gdf['WellLogs'] = gdf.API10 .apply(lambda s: 'https://oilgas.ogm.utah.gov/oilgasweb/live-data-search/lds-logs/logs-lu.xhtml?todo=srchIndvRow&api={}'.format(s)) <NEW_LINE> outprops.update({'WellLogs':'str:80'}) <NEW_LINE> <DEDENT> prj_file = sf_path_basename + '.prj' <NEW_LINE> prj = [l.strip() for l in open(prj_file,'r')][0] <NEW_LINE> output_schema['properties'] = outprops <NEW_LINE> gdf.to_file(sf_path_out, crs_wkt=prj, schema = output_schema) <NEW_LINE> copy2(sf_path_basename + '.sbn', sf_path_out) <NEW_LINE> copy2(sf_path_basename + '.sbx', sf_path_out) <NEW_LINE> copy2(sf_path_basename + '.shp.xml', sf_path_out)
Adds 3 urls to each feature in shape file, if shape file contains a column named 'API'. Writes shape file to destination folder and copies ancillary files from the source to the dest Arguments: sf_name: string, name of the shapefile, folder in src_dir src_dir: directory (folder) that contains sf_name dest_dir: directory (folder) where the modified shapefile is to be written
625941b997e22403b379ce13
def dump_odict_yaml(data, stream=None, Dumper=yaml.Dumper, **kwds): <NEW_LINE> <INDENT> class OrderedDumper(Dumper): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _dict_representer(dumper, data): <NEW_LINE> <INDENT> return dumper.represent_mapping( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items() ) <NEW_LINE> <DEDENT> OrderedDumper.add_representer(OrderedDict, _dict_representer) <NEW_LINE> return yaml.dump(data, stream, OrderedDumper, **kwds)
* Function: Dump an OrderedDict onto a yaml file * -----------{returns}------------ * Serialised OrderedDict object into yaml . . . * ------------{usage}------------- >>> dump_odict_yaml(data, Dumper=yaml.SafeDumper)
625941b950812a4eaa59c19f
def is_finished(self, quest_key): <NEW_LINE> <INDENT> return quest_key in self.finished_quests
Whether the character finished this quest or not. Args: quest_key: (string) quest's key Returns: None
625941b90a366e3fb873e691
def is_alive(self): <NEW_LINE> <INDENT> return self.last_hello + timedelta(seconds=10) >= datetime.now().replace(microsecond=0)
:return: client is alive
625941b9e5267d203edcdb1b
def ping(self, t: int) -> int: <NEW_LINE> <INDENT> in_range_count = 0 <NEW_LINE> self.requests.append(t) <NEW_LINE> i = 0 <NEW_LINE> for req in reversed(self.requests): <NEW_LINE> <INDENT> if t - 3000 <= req <= t: <NEW_LINE> <INDENT> in_range_count += 1 <NEW_LINE> <DEDENT> elif req > t: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> return in_range_count
It is guaranteed that every call to ping uses a strictly larger value of t than the previous call.
625941b985dfad0860c3acd3
def cmd_exec_stdout(self, command, errormsg='', log=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> log and Log.debug(self, "Running command: {0}".format(command)) <NEW_LINE> with subprocess.Popen([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) as proc: <NEW_LINE> <INDENT> (cmd_stdout_bytes, cmd_stderr_bytes) = proc.communicate() <NEW_LINE> (cmd_stdout, cmd_stderr) = (cmd_stdout_bytes.decode('utf-8', "replace"), cmd_stderr_bytes.decode('utf-8', "replace")) <NEW_LINE> <DEDENT> if proc.returncode == 0: <NEW_LINE> <INDENT> Log.debug(self, "Command Output: {0}, \nCommand Error: {1}" .format(cmd_stdout, cmd_stderr)) <NEW_LINE> return cmd_stdout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Log.debug(self, "Command Output: {0}, \nCommand Error: {1}" .format(cmd_stdout, cmd_stderr)) <NEW_LINE> return cmd_stdout <NEW_LINE> <DEDENT> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> Log.debug(self, str(e)) <NEW_LINE> raise CommandExecutionError <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> Log.debug(self, str(e)) <NEW_LINE> raise CommandExecutionError
Run shell command from Python
625941b9507cdc57c6306b4e
def set_filename(self, kwdict): <NEW_LINE> <INDENT> self.kwdict = kwdict <NEW_LINE> self.temppath = self.prefix = "" <NEW_LINE> ext = kwdict["extension"] <NEW_LINE> kwdict["extension"] = self.extension = self.extension_map(ext, ext) <NEW_LINE> if self.extension: <NEW_LINE> <INDENT> self.build_path() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.filename = ""
Set general filename data
625941b90fa83653e4656e37
def _clearOldMessages(self): <NEW_LINE> <INDENT> cherrypy.response.headerMap["cache-control"] = "no-cache" <NEW_LINE> global _currentMesages <NEW_LINE> now = time.time() <NEW_LINE> idx = len(_currentMesages) <NEW_LINE> while idx > 0: <NEW_LINE> <INDENT> idx -= 1 <NEW_LINE> msg = _currentMesages[idx] <NEW_LINE> if msg._expires and ( msg._expires < now ): <NEW_LINE> <INDENT> self._logger.debug( "_clearOldMessages(delete) %i %s %s" % (msg._pri, msg._txt, msg._expires ) ) <NEW_LINE> del _currentMesages[idx]
Check for obsolete messages in the _currentMessages
625941b930dc7b76659017e4
@slash_blueprint.route('/upcoming', methods=["POST"]) <NEW_LINE> def slack_upcoming_leaves(): <NEW_LINE> <INDENT> upcoming_leaves = EmployeeLeaveModel.query.filter( and_( EmployeeLeaveModel.slackTeamId == g.team_id, EmployeeLeaveModel.endDate >= date.today() ) ).order_by(EmployeeLeaveModel.startDate).all() <NEW_LINE> if upcoming_leaves is not None and len(upcoming_leaves) > 0: <NEW_LINE> <INDENT> slack_msg_builder = { "response_type": "in_channel", "text": "Coming up:", } <NEW_LINE> slack_msg_attachment_list = [] <NEW_LINE> for emp_leave in upcoming_leaves: <NEW_LINE> <INDENT> if emp_leave.leaveType == "ooo": <NEW_LINE> <INDENT> msg_color = "#42a5f5" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg_color = "#bbdefb" <NEW_LINE> <DEDENT> if emp_leave.startDate == emp_leave.endDate: <NEW_LINE> <INDENT> leave_period = "On " + emp_leave.startDate.strftime("%d/%b/%y") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> leave_period = emp_leave.startDate.strftime("%d/%b/%y") + " to " + emp_leave.endDate.strftime("%d/%b/%y") <NEW_LINE> <DEDENT> slack_msg_attachment = { "title": emp_leave.slackFullName + " - (" + emp_leave.slackUsername + ")", "color": msg_color, "text": emp_leave.rawComment, "fields": [ { "title": "Period", "value": leave_period, "short": True }, { "title": "Status", "value": emp_leave.leaveType.upper(), "short": True } ] } <NEW_LINE> slack_msg_attachment_list.append(slack_msg_attachment) <NEW_LINE> <DEDENT> slack_msg_builder["attachments"] = slack_msg_attachment_list <NEW_LINE> return jsonify(slack_msg_builder), 200 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return jsonify( { "response_type": "in_channel", "text": "Wow! Everybody is in today.", } ), 200
Show all the ooo or wfh of all the users for the requested day :return:
625941b9d10714528d5ffb59
def get_email2(self, principal): <NEW_LINE> <INDENT> if isinstance(principal, types.StringTypes) and self.is_inbox(principal): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif ICatalogBrain.providedBy(principal) and brain_is_contact(principal): <NEW_LINE> <INDENT> return principal.email2 <NEW_LINE> <DEDENT> elif IUser.providedBy(principal) or isinstance(principal, UserDict): <NEW_LINE> <INDENT> return principal.email2 <NEW_LINE> <DEDENT> elif self.is_contact(principal): <NEW_LINE> <INDENT> return self.get_contact(principal).email2 <NEW_LINE> <DEDENT> elif self.is_user(principal): <NEW_LINE> <INDENT> return self.get_user(principal).email2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unknown principal type: %s' % str(principal))
Returns the second email address of a `principal`.
625941b992d797404e304003
def display(values): <NEW_LINE> <INDENT> width = 1 + max(len(values[s]) for s in values) <NEW_LINE> line = '+'.join(['-' * (width * 3)] * 3) <NEW_LINE> for r in rows: <NEW_LINE> <INDENT> print(''.join(values[r + c].center(width) + ('|' if c in '36' else '') for c in cols)) <NEW_LINE> if r in 'CF': print(line) <NEW_LINE> <DEDENT> return
Display the values as a 2-D grid. Args: values(dict): The sudoku in dictionary form
625941b9a17c0f6771cbdece
def assign_texture(self, key, tex_id): <NEW_LINE> <INDENT> self._texture_assignments[key] = tex_id
Assign texture to a key (for example object id). key will usually be a game object
625941b9fbf16365ca6f6037
def ctraj(t0, t1, r): <NEW_LINE> <INDENT> if isinstance(r,(int,int32,float,float64)): <NEW_LINE> <INDENT> i = mat(range(1,r+1)) <NEW_LINE> r = (i-1.)/(r-1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = arg2array(r); <NEW_LINE> <DEDENT> if any(r>1) or any(r<0): <NEW_LINE> <INDENT> raise 'path position values (R) must 0<=R<=1' <NEW_LINE> <DEDENT> traj = [] <NEW_LINE> for s in r.T: <NEW_LINE> <INDENT> traj.append( T.trinterp(t0, t1, float(s)) ) <NEW_LINE> <DEDENT> return traj
Compute a Cartesian trajectory between poses C{t0} and C{t1}. The number of points is the length of the path distance vector C{r}. Each element of C{r} gives the distance along the path, and the must be in the range [0 1]. If {r} is a scalar it is taken as the number of points, and the points are equally spaced between C{t0} and C{t1}. The trajectory is a list of transform matrices. @type t0: homogeneous transform @param t0: initial pose @rtype: list of M{4x4} matrices @return: Cartesian trajectory @see: L{trinterp}, L{jtraj}
625941b94a966d76dd550e87
def setSpeeds(self, speeds: list): <NEW_LINE> <INDENT> for module, speed in zip(self.modules, speeds): <NEW_LINE> <INDENT> module.setWheelSpeed(speed)
Sets the speeds of the wheels in inches per second. It takes a list. Please use setUniformModuleSpeed if you want to set the same speed amongst all the modules.
625941b94527f215b584c2d5
def __init__(self): <NEW_LINE> <INDENT> self.__homedir = os.getenv('USERPROFILE') or os.getenv('HOME') <NEW_LINE> self.__projdir = os.path.join(self.__homedir, User.PROJECT_DIR) <NEW_LINE> self.__servers = None <NEW_LINE> self.load()
Constructor
625941b92ae34c7f2600cfac
def _ProcessLogContainer(self, logs_container: containers.GCPLogs) -> None: <NEW_LINE> <INDENT> if not logs_container.path: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> output_file = tempfile.NamedTemporaryFile( mode='w', encoding='utf-8', delete=False, suffix='.jsonl') <NEW_LINE> output_path = output_file.name <NEW_LINE> with open(logs_container.path, 'r') as input_file: <NEW_LINE> <INDENT> for line in input_file: <NEW_LINE> <INDENT> transformed_line = self._ProcessLogLine( line, logs_container.filter_expression, logs_container.project_name) <NEW_LINE> if transformed_line: <NEW_LINE> <INDENT> output_file.write(transformed_line) <NEW_LINE> output_file.write('\n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> output_file.close() <NEW_LINE> timeline_name = 'GCP logs {0:s} "{1:s}"'.format( logs_container.project_name, logs_container.filter_expression) <NEW_LINE> container = containers.File(name=timeline_name, path=output_path) <NEW_LINE> self.state.StoreContainer(container)
Processes a GCP logs container. Args: logs_container (GCPLogs): logs container.
625941b9be383301e01b5307
def iPhone_connection(status): <NEW_LINE> <INDENT> for a in range(0,3): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sleep(0.1); requests.get('http://192.168.1.176/control/rf433/i_am_home/' + ('on' if status else 'off')) <NEW_LINE> sleep(0.1); requests.get('http://192.168.1.176/control/color/' + ('green' if status else 'red')) <NEW_LINE> Speak('iPhone ' + ('connected' if status else 'connection lost')) <NEW_LINE> logger.info('iPhone '+ ('connected' if status else 'connection lost')) <NEW_LINE> return <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass
True / False
625941b9d10714528d5ffb5a
def renameVar(self, key, newkey, **loginfo): <NEW_LINE> <INDENT> val = self.getVar(key, 0, parsing=True) <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> loginfo['variable'] = newkey <NEW_LINE> loginfo['op'] = 'rename from %s' % key <NEW_LINE> loginfo['detail'] = val <NEW_LINE> self.varhistory.record(**loginfo) <NEW_LINE> self.setVar(newkey, val, ignore=True, parsing=True) <NEW_LINE> <DEDENT> for i in (__setvar_keyword__): <NEW_LINE> <INDENT> src = self.getVarFlag(key, i, False) <NEW_LINE> if src is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> dest = self.getVarFlag(newkey, i, False) or [] <NEW_LINE> dest.extend(src) <NEW_LINE> self.setVarFlag(newkey, i, dest, ignore=True) <NEW_LINE> <DEDENT> if key in self.overridedata: <NEW_LINE> <INDENT> self.overridedata[newkey] = [] <NEW_LINE> for (v, o) in self.overridedata[key]: <NEW_LINE> <INDENT> self.overridedata[newkey].append([v.replace(key, newkey), o]) <NEW_LINE> self.renameVar(v, v.replace(key, newkey)) <NEW_LINE> <DEDENT> <DEDENT> if '_' in newkey and val is None: <NEW_LINE> <INDENT> self._setvar_update_overrides(newkey, **loginfo) <NEW_LINE> <DEDENT> loginfo['variable'] = key <NEW_LINE> loginfo['op'] = 'rename (to)' <NEW_LINE> loginfo['detail'] = newkey <NEW_LINE> self.varhistory.record(**loginfo) <NEW_LINE> self.delVar(key, ignore=True)
Rename the variable key to newkey
625941b957b8e32f5248331a
def _constraints_satisfied(self, state, step): <NEW_LINE> <INDENT> if not super()._constraints_satisfied(state, step=step): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> i, sz = step <NEW_LINE> px = state[i].price <NEW_LINE> newpos_mv = state[i].shares * px + sz <NEW_LINE> trdtoday = newpos_mv - self._bod_positions[i] * px <NEW_LINE> mdv_mv = self._mdvs[i] * px <NEW_LINE> if hasattr(self, '_per_optimization_mdv_trading_limit'): <NEW_LINE> <INDENT> trdopt_shs = newpos_mv / px - self.istate[i].shares <NEW_LINE> if (np.fabs(trdopt_shs * px) > self._per_optimization_mdv_trading_limit * mdv_mv and np.fabs(trdopt_shs) > self._round_lots[i]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return (newpos_mv <= self._smaxs[i] and newpos_mv >= self._smins[i] and np.fabs(newpos_mv) <= self._mdv_position_limit * mdv_mv and np.fabs(trdtoday) <= self._mdv_trading_limit * mdv_mv)
Test whether step satisfies the various hard/soft/restriction/etc. constraints. For this implementation of this method, we assume the constraints are satisfied for the given state, so we only test the constraints having to do with the state elements corresponding to the step. @param state: State vector. @param step: (index, desired trade in currency units) tuple. @return: Boolean depending on whether step satisfies constraints.
625941b94e4d5625662d4257
@blueprint.route("/register", methods=['GET', 'POST']) <NEW_LINE> def register(): <NEW_LINE> <INDENT> register_form = RegisterForm() <NEW_LINE> if not register_form.validate(): <NEW_LINE> <INDENT> return jsonify({ "errors": register_form.errors.items(), "success": False, "user": None, "sent_json": request.json }) <NEW_LINE> <DEDENT> user = User.create(username=request.json['username'], password=request.json['password']) <NEW_LINE> g.user = user <NEW_LINE> return jsonify({ "errors": [], "success": True, "user": g.user.username, "sent_json": request.json })
Handles the register logic as well as displaying the form :return:
625941b94f88993c3716beee
def poll(self, write_only=False): <NEW_LINE> <INDENT> events = self._poll.poll(int(SelectPoller.TIMEOUT * 1000)) <NEW_LINE> if events: <NEW_LINE> <INDENT> LOGGER.debug("Calling %s with %d events", self._handler, len(events)) <NEW_LINE> for fileno, event in events: <NEW_LINE> <INDENT> self._handler(fileno, event, write_only=write_only)
Poll until TIMEOUT waiting for an event :param bool write_only: Only process write events
625941b9711fe17d825421ed
def serialize_numpy(self, buff, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x = self <NEW_LINE> buff.write(_get_struct_2h().pack(_x.left_hz, _x.right_hz)) <NEW_LINE> <DEDENT> except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) <NEW_LINE> except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
625941b97b180e01f3dc4680
def add(self, message): <NEW_LINE> <INDENT> keys = self.keys() <NEW_LINE> if len(keys) == 0: <NEW_LINE> <INDENT> new_key = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_key = max(keys) + 1 <NEW_LINE> <DEDENT> new_path = os.path.join(self._path, str(new_key)) <NEW_LINE> f = _create_carefully(new_path) <NEW_LINE> closed = False <NEW_LINE> try: <NEW_LINE> <INDENT> if self._locked: <NEW_LINE> <INDENT> _lock_file(f) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._dump_message(message, f) <NEW_LINE> <DEDENT> except BaseException: <NEW_LINE> <INDENT> if self._locked: <NEW_LINE> <INDENT> _unlock_file(f) <NEW_LINE> <DEDENT> _sync_close(f) <NEW_LINE> closed = True <NEW_LINE> os.remove(new_path) <NEW_LINE> raise <NEW_LINE> <DEDENT> if isinstance(message, MHMessage): <NEW_LINE> <INDENT> self._dump_sequences(message, new_key) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if self._locked: <NEW_LINE> <INDENT> _unlock_file(f) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if not closed: <NEW_LINE> <INDENT> _sync_close(f) <NEW_LINE> <DEDENT> <DEDENT> return new_key
Add message and return assigned key.
625941b9d8ef3951e32433b8
def defineFileGroups(self, mergeableFiles): <NEW_LINE> <INDENT> fileGroups = {} <NEW_LINE> foundFiles = [] <NEW_LINE> for mergeableFile in mergeableFiles: <NEW_LINE> <INDENT> if mergeableFile["file_lfn"] not in foundFiles: <NEW_LINE> <INDENT> foundFiles.append(mergeableFile["file_lfn"]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if mergeableFile["pnn"] not in fileGroups: <NEW_LINE> <INDENT> if self.mergeAcrossRuns: <NEW_LINE> <INDENT> fileGroups[mergeableFile["pnn"]] = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fileGroups[mergeableFile["pnn"]] = {} <NEW_LINE> <DEDENT> <DEDENT> if self.mergeAcrossRuns: <NEW_LINE> <INDENT> fileGroups[mergeableFile["pnn"]].append(mergeableFile) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if mergeableFile["file_run"] not in fileGroups[mergeableFile["pnn"]]: <NEW_LINE> <INDENT> fileGroups[mergeableFile["pnn"]][mergeableFile["file_run"]] = [] <NEW_LINE> <DEDENT> fileGroups[mergeableFile["pnn"]][mergeableFile["file_run"]].append(mergeableFile) <NEW_LINE> <DEDENT> <DEDENT> return fileGroups
_defineFileGroups_ Group mergeable files by their SE name and run number so that we don't try to merge together files on different SEs. Merging against across run boundaries is configurable.
625941b97d847024c06be13b
def on_experiment_end(self, runner: "IRunner") -> None: <NEW_LINE> <INDENT> if runner.engine.process_index == 0: <NEW_LINE> <INDENT> log_message = "Top models:\n" <NEW_LINE> log_message += "\n".join( [ f"{checkpoint.logpath}\t{checkpoint.metric:3.4f}" for checkpoint in self._storage ] ) <NEW_LINE> print(log_message) <NEW_LINE> <DEDENT> if self.load_best_on_end: <NEW_LINE> <INDENT> self._load(runner=runner, resume_logpath=self._storage[0].logpath)
Event handler.
625941b96fece00bbac2d5b6
def test_pickle_dictionary_type_string_true(self): <NEW_LINE> <INDENT> the_type = str <NEW_LINE> data = {0: {"1ID": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995"}, 1: {"IhD": "A2f3", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995"}, 2: {"IjD": "Aa23", "Genkder": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995"}, 3: {"IgD": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995"}} <NEW_LINE> data = self.pickler.pickle_dictionary_values(data) <NEW_LINE> data[2] = "This is a string" <NEW_LINE> result = (type(value) == the_type for value in data.values()) <NEW_LINE> self.assertTrue(any(result))
True if a value in dictionary are of type 'string'
625941b9ab23a570cc24fffa
def signal_handler(self, _signum, _frame): <NEW_LINE> <INDENT> print("CTRL+C...") <NEW_LINE> self.stop_event.set()
Handles CTR+C
625941b98a43f66fc4b53ee4