code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
@contextlib.contextmanager <NEW_LINE> def tempdir(): <NEW_LINE> <INDENT> d = tempfile.mkdtemp() <NEW_LINE> yield d <NEW_LINE> shutil.rmtree(d) | Create a temporary directory.
Use as a context manager so the directory is automatically cleaned up.
>>> with tempdir() as tmpdir:
... print(tmpdir) # prints a folder like /tmp/randomname | 625941b68e05c05ec3eea18b |
def detectCycle(head): <NEW_LINE> <INDENT> if (head == None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if (head.next == head): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if (head.next == None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> point1 = head <NEW_LINE> point2 = head <NEW_LINE> index = 1 <NEW_LINE> cur = head <NEW_LINE> while (not point1 == None and not point2.next == None): <NEW_LINE> <INDENT> point1 = point1.next <NEW_LINE> point2 = point2.next.next <NEW_LINE> cur = cur.next <NEW_LINE> if (point1 == point2): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None | :type head: ListNode
:rtype: ListNode | 625941b65fdd1c0f98dc004c |
def build_strings(self): <NEW_LINE> <INDENT> self.window_title = "Voice Emotion Recognizer" <NEW_LINE> self.main_label = "How are you Feeling today?" <NEW_LINE> self.quit_label = "Quit" <NEW_LINE> self.record_label = StringVar() <NEW_LINE> self.analyze_label = StringVar() <NEW_LINE> self.record_label.set("Record") <NEW_LINE> self.analyze_label.set("Analyze") | Declares the String labels for many of the prompts and buttons used in this GUI.
:return: None | 625941b6be383301e01b52a7 |
def test_patch_request_remapped(self): <NEW_LINE> <INDENT> client = self._create_client('basic.json') <NEW_LINE> res = client.patch('/user/1') <NEW_LINE> expected = self._read_json('basic/user/userid_patch.json') <NEW_LINE> self.assertEqual(to_unicode(res.data), expected) | PATCH /user/<userid> should serve userid_patch.json. | 625941b6dd821e528d63afc5 |
def ugettext(message, **variables): <NEW_LINE> <INDENT> return _translate('ugettext', message, **variables) | Translates `message`. | 625941b69c8ee82313fbb58f |
def add_external_recurrence(self, incoming): <NEW_LINE> <INDENT> self.external_rec, _ = get_input(incoming) | Use this Layer as recurrent connections for the conLSTM instead of convLSTM hidden activations
Don't forget to make sure that the bias and weight shapes fit to new shape!
Parameters
-------
incoming : layer class, tensorflow tensor or placeholder
Incoming external recurrence for convLSTM layer as layer class or tensor of shape
(samples, 1, array_x, array_y, features)
Example
-------
>>> # Example for convLSTM that uses its own hidden state and the output of a higher convolutional layer as
>>> # recurrent connections
>>> conv_lstm = ConvLSTMLayer(...)
>>> conv_1 = ConvLayer(incoming=conv_lstm, ...)
>>> modified_recurrence = ConcatLayer(conv_lstm, conv_1)
>>> conv_lstm.add_external_recurrence(modified_recurrence)
>>> conv_lstm.get_output() | 625941b6b57a9660fec3369a |
def __contains__(self, value): <NEW_LINE> <INDENT> return value in self._values | Checks whether value is contained within stored time points
Parameters
----------
value: value point to check for whether it is contained
Returns
-------
true if value point is contained else false | 625941b630bbd722463cbbdd |
def read(self, node): <NEW_LINE> <INDENT> if node == nullid: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> raw = self._read(hex(node)) <NEW_LINE> index, size = self._parsesize(raw) <NEW_LINE> return raw[(index + 1):(index + 1 + size)] | returns the file contents at this node | 625941b692d797404e303fa5 |
def SVGMatrixFromNode(node, context): <NEW_LINE> <INDENT> tagName = node.tagName.lower() <NEW_LINE> tags = ['svg:use', 'svg:symbol'] <NEW_LINE> if tagName not in tags and 'svg:' + tagName not in tags: <NEW_LINE> <INDENT> return Matrix() <NEW_LINE> <DEDENT> rect = context['rect'] <NEW_LINE> m = Matrix() <NEW_LINE> x = SVGParseCoord(node.getAttribute('x') or '0', rect[0]) <NEW_LINE> y = SVGParseCoord(node.getAttribute('y') or '0', rect[1]) <NEW_LINE> w = SVGParseCoord(node.getAttribute('width') or str(rect[0]), rect[0]) <NEW_LINE> h = SVGParseCoord(node.getAttribute('height') or str(rect[1]), rect[1]) <NEW_LINE> m = Matrix.Translation(Vector((x, y, 0.0))) <NEW_LINE> if len(context['rects']) > 1: <NEW_LINE> <INDENT> m = m * Matrix.Scale(w / rect[0], 4, Vector((1.0, 0.0, 0.0))) <NEW_LINE> m = m * Matrix.Scale(h / rect[1], 4, Vector((0.0, 1.0, 0.0))) <NEW_LINE> <DEDENT> if node.getAttribute('viewBox'): <NEW_LINE> <INDENT> viewBox = node.getAttribute('viewBox').replace(',', ' ').split() <NEW_LINE> vx = SVGParseCoord(viewBox[0], w) <NEW_LINE> vy = SVGParseCoord(viewBox[1], h) <NEW_LINE> vw = SVGParseCoord(viewBox[2], w) <NEW_LINE> vh = SVGParseCoord(viewBox[3], h) <NEW_LINE> sx = w / vw <NEW_LINE> sy = h / vh <NEW_LINE> scale = min(sx, sy) <NEW_LINE> tx = (w - vw * scale) / 2 <NEW_LINE> ty = (h - vh * scale) / 2 <NEW_LINE> m = m * Matrix.Translation(Vector((tx, ty, 0.0))) <NEW_LINE> m = m * Matrix.Translation(Vector((-vx, -vy, 0.0))) <NEW_LINE> m = m * Matrix.Scale(scale, 4, Vector((1.0, 0.0, 0.0))) <NEW_LINE> m = m * Matrix.Scale(scale, 4, Vector((0.0, 1.0, 0.0))) <NEW_LINE> <DEDENT> return m | Get transformation matrix from given node | 625941b61f5feb6acb0c496f |
def test_init(): <NEW_LINE> <INDENT> app = Flask('testapp') <NEW_LINE> ext = InvenioCommunities(app) <NEW_LINE> assert 'invenio-communities' in app.extensions <NEW_LINE> app = Flask('testapp') <NEW_LINE> ext = InvenioCommunities() <NEW_LINE> assert 'invenio-communities' not in app.extensions <NEW_LINE> ext.init_app(app) <NEW_LINE> assert 'invenio-communities' in app.extensions | Test extension initialization. | 625941b691af0d3eaac9b82e |
def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'voices') and self.voices is not None: <NEW_LINE> <INDENT> _dict['voices'] = [x._to_dict() for x in self.voices] <NEW_LINE> <DEDENT> return _dict | Return a json dictionary representing this model. | 625941b601c39578d7e74c5e |
def serialize( self, required=None, label=None, initial=None, help_text=None, **kwargs ): <NEW_LINE> <INDENT> serialize_function = getattr(self, f"serialize_{self.data_type}") <NEW_LINE> serialized = serialize_function(initial, **kwargs) <NEW_LINE> serialized["data_type"] = self.data_type <NEW_LINE> serialized["name"] = self.name <NEW_LINE> serialized["required"] = required if required else self.required <NEW_LINE> serialized["label"] = label if label else self.label <NEW_LINE> serialized["help_text"] = help_text if help_text else self.help_text <NEW_LINE> return serialized | Execute the serialize function appropriate for the current data type | 625941b6d99f1b3c44c673b2 |
def p_Ident_DOT(p): <NEW_LINE> <INDENT> p[0] = p[1] | Ident_DOT : Identifier_DOT
| 625941b6be8e80087fb20a69 |
def test_rus_fit_transform_half(): <NEW_LINE> <INDENT> ratio = 0.5 <NEW_LINE> rus = RandomUnderSampler(ratio=ratio, random_state=RND_SEED) <NEW_LINE> X_resampled, y_resampled = rus.fit_transform(X, Y) <NEW_LINE> currdir = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> X_gt = np.load(os.path.join(currdir, 'data', 'rus_x_05.npy')) <NEW_LINE> y_gt = np.load(os.path.join(currdir, 'data', 'rus_y_05.npy')) <NEW_LINE> assert_array_equal(X_resampled, X_gt) <NEW_LINE> assert_array_equal(y_resampled, y_gt) | Test the fit transform routine with a 0.5 ratio | 625941b65f7d997b871748b5 |
def save_params(self): <NEW_LINE> <INDENT> if not self.dynamic_env.has_key('PARAMETERS_FILE'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> file_path = preprocess(self.dynamic_env['PARAMETERS_FILE'], self.dynamic_env) <NEW_LINE> with open(file_path, "w") as f_param: <NEW_LINE> <INDENT> for k in self.dynamic_env.keys(): <NEW_LINE> <INDENT> f_param.write('%s: %s\n' % (k, preprocess(self.dynamic_env[k], environ=self.dynamic_env))) <NEW_LINE> <DEDENT> f_param.write('#----------------------------------------\n') <NEW_LINE> for k in os.environ.keys(): <NEW_LINE> <INDENT> f_param.write('%s: %s\n' % (k, os.environ[k])) | Converts the input to a string and dumps it to provided file.
The file is only generated if local environment contains
a key called `PARAMETERS_FILE`. Otherwise we assume that the
user does not want a separate file.
TODO:generate a runnable yaml file. | 625941b6d4950a0f3b08c175 |
def evaluatePressure(self, t, c): <NEW_LINE> <INDENT> u_shape = c[('u', 0)].shape <NEW_LINE> grad_shape = c[('grad(u)', 0)].shape <NEW_LINE> if self.pressureIncrementModelIndex is None: <NEW_LINE> <INDENT> phi = np.zeros(c[('r', 0)][:].shape, 'd') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if u_shape == self.pressureIncrementModel.q[('u', 0)].shape: <NEW_LINE> <INDENT> phi = self.pressureIncrementModel.q[('u', 0)] <NEW_LINE> rho = self.fluidModel.coefficients.q_rho <NEW_LINE> nu = self.fluidModel.coefficients.q_nu <NEW_LINE> velocity = self.fluidModel.q[('uncorrectedVelocity', 0)] <NEW_LINE> <DEDENT> elif u_shape == self.pressureIncrementModel.ebqe[('u', 0)].shape: <NEW_LINE> <INDENT> phi = self.pressureIncrementModel.ebqe[('u', 0)] <NEW_LINE> rho = self.fluidModel.coefficients.ebqe_rho <NEW_LINE> nu = self.fluidModel.coefficients.ebqe_nu <NEW_LINE> velocity = self.fluidModel.ebqe[('uncorrectedVelocity', 0)] <NEW_LINE> <DEDENT> <DEDENT> p = c[('u', 0)] <NEW_LINE> p_last = c[('u_last', 0)] <NEW_LINE> if self.useRotationalForm: <NEW_LINE> <INDENT> for i in range(c[('f', 0)].shape[-1]): <NEW_LINE> <INDENT> c[('f', 0)][..., i] = np.min(rho * nu) * velocity[..., i] <NEW_LINE> <DEDENT> <DEDENT> c[('r', 0)][:] = p - p_last - phi <NEW_LINE> c[('dr', 0, 0)][:] = 1.0 | Evaluate the coefficients after getting the specified velocity and density | 625941b66e29344779a62430 |
def _update_sprite_heading(self): <NEW_LINE> <INDENT> i = (int(self._heading + 5) % 360) / (360 / SHAPES) <NEW_LINE> if not self._hidden and self.spr is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.spr.set_shape(self._shapes[i]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> self.spr.set_shape(self._shapes[0]) | Update the sprite to reflect the current heading | 625941b6ab23a570cc24ff9a |
def __init__(self,house=None, api_type=None, api_name=None, schema=None, entry_columns=None): <NEW_LINE> <INDENT> self.house = house <NEW_LINE> self.api_type = api_type <NEW_LINE> self.api_name = api_name <NEW_LINE> self.schema = schema <NEW_LINE> self.table = ('_').join([self.house, self.api_type, self.api_name]) <NEW_LINE> self.conn = utils.connect_sqlalchemy() <NEW_LINE> self.entry_structure = dict.fromkeys(entry_columns) | Parent class for data capture.
:param house: str
:param api_type: str
:param api_name: str
:param schema: str
:param entry_columns: list | 625941b68a43f66fc4b53e84 |
def warning(self, message): <NEW_LINE> <INDENT> c_warning_color = '\033[93m' <NEW_LINE> c_endc = '\033[0m' <NEW_LINE> print(c_warning_color + message + c_endc) <NEW_LINE> if self.out_widget: <NEW_LINE> <INDENT> w_warning_color = "<font color=\"Yellow\">" <NEW_LINE> w_endc = "</font>" <NEW_LINE> self.out_widget.append(w_warning_color + message + w_endc) <NEW_LINE> <DEDENT> entry = ['Warning', message] <NEW_LINE> self.log.append(entry) <NEW_LINE> return entry | @brief Error messaging | 625941b69b70327d1c4e0bee |
def error(self): <NEW_LINE> <INDENT> error_array = [] <NEW_LINE> if self._response_error: <NEW_LINE> <INDENT> error_array.append(self._response_error) <NEW_LINE> <DEDENT> error_array.extend(self._get_resp_body_errors()) <NEW_LINE> if len(error_array) > 0: <NEW_LINE> <INDENT> error_string = "%s: %s" % (self.verb, ", ".join(error_array)) <NEW_LINE> return error_string <NEW_LINE> <DEDENT> return "" | Builds and returns the api error message. | 625941b624f1403a92600984 |
def get_distance_from_point(self, p_end): <NEW_LINE> <INDENT> a = numpy.array((self.base_position.x, self.base_position.y, self.base_position.z)) <NEW_LINE> b = numpy.array((p_end.x, p_end.y, p_end.z)) <NEW_LINE> distance = numpy.linalg.norm(a - b) <NEW_LINE> return distance | Given a Vector3 Object, get distance from current position
:param p_end:
:return: | 625941b616aa5153ce362293 |
def _observedFromPupilCoords(xPupil, yPupil, obs_metadata=None, includeRefraction=True, epoch=2000.0): <NEW_LINE> <INDENT> are_arrays = _validate_inputs([xPupil, yPupil], ['xPupil', 'yPupil'], "observedFromPupilCoords") <NEW_LINE> if obs_metadata is None: <NEW_LINE> <INDENT> raise RuntimeError("Cannot call observedFromPupilCoords without obs_metadata") <NEW_LINE> <DEDENT> if epoch is None: <NEW_LINE> <INDENT> raise RuntimeError("Cannot call observedFromPupilCoords; epoch is None") <NEW_LINE> <DEDENT> if obs_metadata.rotSkyPos is None: <NEW_LINE> <INDENT> raise RuntimeError("Cannot call observedFromPupilCoords without rotSkyPos " + "in obs_metadata") <NEW_LINE> <DEDENT> if obs_metadata.pointingRA is None or obs_metadata.pointingDec is None: <NEW_LINE> <INDENT> raise RuntimeError("Cannot call observedFromPupilCoords " + "without pointingRA, pointingDec in obs_metadata") <NEW_LINE> <DEDENT> if obs_metadata.mjd is None: <NEW_LINE> <INDENT> raise RuntimeError("Cannot calculate RA, Dec without mjd " + "in obs_metadata") <NEW_LINE> <DEDENT> ra_pointing, dec_pointing = _observedFromICRS(obs_metadata._pointingRA, obs_metadata._pointingDec, obs_metadata=obs_metadata, epoch=epoch, includeRefraction=includeRefraction) <NEW_LINE> theta = obs_metadata._rotSkyPos <NEW_LINE> x_g = xPupil*np.cos(theta) - yPupil*np.sin(theta) <NEW_LINE> y_g = xPupil*np.sin(theta) + yPupil*np.cos(theta) <NEW_LINE> if are_arrays: <NEW_LINE> <INDENT> raObs, decObs = palpy.dtp2sVector(x_g, y_g, ra_pointing, dec_pointing) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raObs, decObs = palpy.dtp2s(x_g, y_g, ra_pointing, dec_pointing) <NEW_LINE> <DEDENT> return raObs, decObs | Convert pupil coordinates into observed (RA, Dec)
@param [in] xPupil -- pupil coordinates in radians.
Can be a numpy array or a number.
@param [in] yPupil -- pupil coordinates in radians.
Can be a numpy array or a number.
@param [in] obs_metadata -- an instantiation of ObservationMetaData characterizing
the state of the telescope
@param [in] epoch -- julian epoch of the mean equinox used for the coordinate
transformations (in years; defaults to 2000)
@param[in] includeRefraction -- a boolean which controls the effects of refraction
(refraction is used when finding the observed coordinates of the boresite specified
by obs_metadata)
@param [out] a 2-D numpy array in which the first row is observed RA and the second
row is observed Dec (both in radians). Note: these are not ICRS coordinates.
These are RA and Dec-like coordinates resulting from applying precession, nutation,
diurnal aberration and annual aberration on top of ICRS coordinates.
WARNING: This method does not account for apparent motion due to parallax.
This method is only useful for mapping positions on a theoretical focal plane
to positions on the celestial sphere. | 625941b6e8904600ed9f1d43 |
def to_jd(year, dayofyear): <NEW_LINE> <INDENT> return gregorian.to_jd(year, 1, 1) + dayofyear - 1 | Return Julian day count of given ordinal date. | 625941b699cbb53fe6792a02 |
def save_object(self): <NEW_LINE> <INDENT> signals.crud_pre_save.send(self, current=self.current, object=self.object) <NEW_LINE> obj_is_new = not self.object.exist <NEW_LINE> self.object.blocking_save() <NEW_LINE> signals.crud_post_save.send(self, current=self.current, object=self.object) | Saves object into DB.
Triggers pre_save and post_save signals.
Sets task_data['``added_obj``'] if object is new. | 625941b6293b9510aa2c30b4 |
def quartiles(self) -> t.Tuple[float, float, float]: <NEW_LINE> <INDENT> return self.percentile(25), self.percentile(50), self.percentile(75) | Calculates the 3 quartiles (1, 2 and 3) | 625941b624f1403a92600985 |
def plot_slice(self, data, idx): <NEW_LINE> <INDENT> plt.figure() <NEW_LINE> plt.imshow(data[idx, :, :], cmap='gray') <NEW_LINE> plt.axis('off') <NEW_LINE> plt.show() <NEW_LINE> return | PLOT_SLICE
Show one slice according to the given index. | 625941b6099cdd3c635f0a77 |
def throttling_all(*validators): <NEW_LINE> <INDENT> def decorator(klass): <NEW_LINE> <INDENT> dispatch = getattr(klass, 'dispatch') <NEW_LINE> setattr(klass, 'dispatch', throttling(*validators)(dispatch)) <NEW_LINE> return klass <NEW_LINE> <DEDENT> return decorator | Adds throttling validators to a class. | 625941b6442bda511e8be241 |
def replace(interval=Unchanged, count=Unchanged, until=Unchanged, exceptions=Unchanged, monthly=Unchanged): <NEW_LINE> <INDENT> pass | Return a copy of this recurrence rule with new specified fields. | 625941b663b5f9789fde6f00 |
def GetAccountInfo(self, sessionID, liveTradeID, key): <NEW_LINE> <INDENT> pass | Parameters:
- sessionID
- liveTradeID
- key | 625941b66aa9bd52df036bbd |
def __init__(self, s): <NEW_LINE> <INDENT> self.s = s <NEW_LINE> self.pw = pw = [1] * (len(s) + 1) <NEW_LINE> l = len(s) <NEW_LINE> self.h = h = [0] * (l + 1) <NEW_LINE> v = 0 <NEW_LINE> for i in range(l): <NEW_LINE> <INDENT> h[i + 1] = v = (v * self.base + ord(s[i])) % self.mod <NEW_LINE> <DEDENT> v = 1 <NEW_LINE> for i in range(l): <NEW_LINE> <INDENT> pw[i + 1] = v = v * self.base % self.mod | sに関するローリングハッシュを構築する O(|s|)
cf. |s|<=5000 の構築で最大200ms程度 | 625941b615fb5d323cde0924 |
def get_courses(self, branch='published', qualifiers=None): <NEW_LINE> <INDENT> if qualifiers is None: <NEW_LINE> <INDENT> qualifiers = {} <NEW_LINE> <DEDENT> qualifiers.update({"versions.{}".format(branch): {"$exists": True}}) <NEW_LINE> matching = self.db_connection.find_matching_course_indexes(qualifiers) <NEW_LINE> version_guids = [] <NEW_LINE> id_version_map = {} <NEW_LINE> for structure in matching: <NEW_LINE> <INDENT> version_guid = structure['versions'][branch] <NEW_LINE> version_guids.append(version_guid) <NEW_LINE> id_version_map[version_guid] = structure['_id'] <NEW_LINE> <DEDENT> course_entries = self.db_connection.find_matching_structures({'_id': {'$in': version_guids}}) <NEW_LINE> result = [] <NEW_LINE> for entry in course_entries: <NEW_LINE> <INDENT> envelope = { 'course_id': id_version_map[entry['_id']], 'branch': branch, 'structure': entry, } <NEW_LINE> root = entry['root'] <NEW_LINE> result.extend(self._load_items(envelope, [root], 0, lazy=True)) <NEW_LINE> <DEDENT> return result | Returns a list of course descriptors matching any given qualifiers.
qualifiers should be a dict of keywords matching the db fields or any
legal query for mongo to use against the active_versions collection.
Note, this is to find the current head of the named branch type
(e.g., 'draft'). To get specific versions via guid use get_course.
:param branch: the branch for which to return courses. Default value is 'published'.
:param qualifiers: a optional dict restricting which elements should match | 625941b6a4f1c619b28afe5d |
def bfs_traverse(lista): <NEW_LINE> <INDENT> nodes = [lista[1], lista[2]] <NEW_LINE> yield lista[0] <NEW_LINE> for q in nodes: <NEW_LINE> <INDENT> yield q[0] if q[0] is not None else () <NEW_LINE> nodes.append(q[1]) if q[1] is not None else () <NEW_LINE> nodes.append(q[2]) if q[2] is not None else () | Przejscie bfs | 625941b699fddb7c1c9de1ae |
def CheckForHeaderGuard(filename, lines, error): <NEW_LINE> <INDENT> cppvar = GetHeaderGuardCPPVariable(filename) <NEW_LINE> ifndef = None <NEW_LINE> ifndef_linenum = 0 <NEW_LINE> define = None <NEW_LINE> endif = None <NEW_LINE> endif_linenum = 0 <NEW_LINE> for linenum, line in enumerate(lines): <NEW_LINE> <INDENT> linesplit = line.split() <NEW_LINE> if len(linesplit) >= 2: <NEW_LINE> <INDENT> if not ifndef and linesplit[0] == '#ifndef': <NEW_LINE> <INDENT> ifndef = linesplit[1] <NEW_LINE> ifndef_linenum = linenum <NEW_LINE> <DEDENT> if not define and linesplit[0] == '#define': <NEW_LINE> <INDENT> define = linesplit[1] <NEW_LINE> <DEDENT> <DEDENT> if line.startswith('#endif'): <NEW_LINE> <INDENT> endif = line <NEW_LINE> endif_linenum = linenum <NEW_LINE> <DEDENT> <DEDENT> if not ifndef: <NEW_LINE> <INDENT> error(filename, 0, 'build/header_guard', 5, 'No #ifndef header guard found, suggested CPP variable is: %s' % cppvar) <NEW_LINE> return <NEW_LINE> <DEDENT> if not define: <NEW_LINE> <INDENT> error(filename, 0, 'build/header_guard', 5, 'No #define header guard found, suggested CPP variable is: %s' % cppvar) <NEW_LINE> return <NEW_LINE> <DEDENT> if ifndef != cppvar: <NEW_LINE> <INDENT> error_level = 0 <NEW_LINE> if ifndef != cppvar + '_': <NEW_LINE> <INDENT> error_level = 5 <NEW_LINE> <DEDENT> ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum, error) <NEW_LINE> error(filename, ifndef_linenum, 'build/header_guard', error_level, '#ifndef header guard has wrong style, please use: %s' % cppvar) <NEW_LINE> <DEDENT> if define != ifndef: <NEW_LINE> <INDENT> error(filename, ifndef_linenum + 1, 'build/header_guard', 5, '#ifndef and #define don\'t match, suggested CPP variable is: %s' % cppvar) <NEW_LINE> return <NEW_LINE> <DEDENT> if endif != ('#endif // %s' % cppvar): <NEW_LINE> <INDENT> error_level = 0 <NEW_LINE> if endif != ('#endif // %s' % (cppvar + '_')): <NEW_LINE> <INDENT> error_level = 5 <NEW_LINE> <DEDENT> ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum, error) <NEW_LINE> error(filename, endif_linenum, 'build/header_guard', error_level, '#endif line should be "#endif // %s"' % cppvar) | Checks that the file contains a header guard.
Logs an error if no #ifndef header guard is present. For other
headers, checks that the full pathname is used.
Args:
filename: The name of the C++ header file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found. | 625941b65fcc89381b1e14df |
def on_intent(intent_request, session): <NEW_LINE> <INDENT> print("on_intent requestId=" + intent_request['requestId'] + ", sessionId=" + session['sessionId']) <NEW_LINE> intent = intent_request['intent'] <NEW_LINE> intent_name = intent_request['intent']['name'] <NEW_LINE> if intent_name == "FeedIntent": <NEW_LINE> <INDENT> return get_feeding_response() <NEW_LINE> <DEDENT> elif intent_name == "StatsIntent": <NEW_LINE> <INDENT> return get_stats_response() <NEW_LINE> <DEDENT> elif intent_name == "AMAZON.HelpIntent": <NEW_LINE> <INDENT> return get_welcome_response() <NEW_LINE> <DEDENT> elif intent_name == "AMAZON.CancelIntent" or intent_name == "AMAZON.StopIntent": <NEW_LINE> <INDENT> return handle_session_end_request() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid intent") | Called when the user specifies an intent for this skill | 625941b6e5267d203edcdabc |
def poll(self, message, timeout=None): <NEW_LINE> <INDENT> if not timeout: <NEW_LINE> <INDENT> timeout = self.poll_timeout.duration <NEW_LINE> <DEDENT> msg_len = len(message) <NEW_LINE> if msg_len > 0xFFFE: <NEW_LINE> <INDENT> raise JobError("Message was too long to send!") <NEW_LINE> <DEDENT> c_iter = 0 <NEW_LINE> response = None <NEW_LINE> delay = self.settings['poll_delay'] <NEW_LINE> self.logger.debug("Connecting to LAVA Coordinator on %s:%s timeout=%d seconds." % ( self.settings['coordinator_hostname'], self.settings['port'], timeout)) <NEW_LINE> while True: <NEW_LINE> <INDENT> c_iter += self.settings['poll_delay'] <NEW_LINE> if self._connect(delay): <NEW_LINE> <INDENT> delay = self.settings['poll_delay'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> delay += 2 <NEW_LINE> continue <NEW_LINE> <DEDENT> if not c_iter % int(10 * self.settings['poll_delay']): <NEW_LINE> <INDENT> self.logger.debug("sending message: %s waited %s of %s seconds" % ( json.loads(message)['request'], c_iter, int(timeout))) <NEW_LINE> <DEDENT> if not self._send_message(message): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.sock.shutdown(socket.SHUT_WR) <NEW_LINE> response = self._recv_message() <NEW_LINE> self.sock.close() <NEW_LINE> try: <NEW_LINE> <INDENT> json_data = json.loads(response) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.logger.debug("response starting '%s' was not JSON" % response[:42]) <NEW_LINE> self.finalise_protocol() <NEW_LINE> break <NEW_LINE> <DEDENT> if json_data['response'] != 'wait': <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(delay) <NEW_LINE> <DEDENT> if c_iter > timeout: <NEW_LINE> <INDENT> self.finalise_protocol() <NEW_LINE> raise JobError("protocol %s timed out" % self.name) <NEW_LINE> <DEDENT> <DEDENT> return response | Blocking, synchronous polling of the Coordinator on the configured port.
Single send operations greater than 0xFFFF are rejected to prevent truncation.
:param msg_str: The message to send to the Coordinator, as a JSON string.
:return: a JSON string of the response to the poll | 625941b6be7bc26dc91cd421 |
def _splitSeries(self, aoSeries): <NEW_LINE> <INDENT> if len(aoSeries) <= 1: <NEW_LINE> <INDENT> if len(aoSeries) < 1: <NEW_LINE> <INDENT> return []; <NEW_LINE> <DEDENT> return [aoSeries,]; <NEW_LINE> <DEDENT> dUnitSeries = dict(); <NEW_LINE> for oSeries in aoSeries: <NEW_LINE> <INDENT> if oSeries.iUnit not in dUnitSeries: <NEW_LINE> <INDENT> dUnitSeries[oSeries.iUnit] = []; <NEW_LINE> <DEDENT> dUnitSeries[oSeries.iUnit].append(oSeries); <NEW_LINE> <DEDENT> for iUnit in dUnitSeries: <NEW_LINE> <INDENT> def mycmp(oSelf, oOther): <NEW_LINE> <INDENT> iCmp = utils.stricmp(oSelf.oBuildCategory.sProduct, oOther.oBuildCategory.sProduct); <NEW_LINE> if iCmp != 0: <NEW_LINE> <INDENT> return iCmp; <NEW_LINE> <DEDENT> iCmp = utils.stricmp(oSelf.oBuildCategory.sBranch, oOther.oBuildCategory.sBranch); <NEW_LINE> if iCmp != 0: <NEW_LINE> <INDENT> return iCmp; <NEW_LINE> <DEDENT> iCmp = utils.stricmp(oSelf.oBuildCategory.sType, oOther.oBuildCategory.sType); <NEW_LINE> if iCmp != 0: <NEW_LINE> <INDENT> return iCmp; <NEW_LINE> <DEDENT> iCmp = utils.stricmp(oSelf.oTestBox.sName, oOther.oTestBox.sName); <NEW_LINE> if iCmp != 0: <NEW_LINE> <INDENT> return iCmp; <NEW_LINE> <DEDENT> return 0; <NEW_LINE> <DEDENT> dUnitSeries[iUnit] = sorted(dUnitSeries[iUnit], key = functools.cmp_to_key(mycmp)); <NEW_LINE> <DEDENT> cMaxPerGraph = self._dParams[WuiMain.ksParamGraphWizMaxPerGraph]; <NEW_LINE> aaoRet = []; <NEW_LINE> for iUnit in dUnitSeries: <NEW_LINE> <INDENT> aoUnitSeries = dUnitSeries[iUnit]; <NEW_LINE> while len(aoUnitSeries) > cMaxPerGraph: <NEW_LINE> <INDENT> aaoRet.append(aoUnitSeries[:cMaxPerGraph]); <NEW_LINE> aoUnitSeries = aoUnitSeries[cMaxPerGraph:]; <NEW_LINE> <DEDENT> if len(aoUnitSeries) > 0: <NEW_LINE> <INDENT> aaoRet.append(aoUnitSeries); <NEW_LINE> <DEDENT> <DEDENT> return aaoRet; | Splits the data series (ReportGraphModel.DataSeries) into one or more graphs.
Returns an array of data series arrays. | 625941b6a17c0f6771cbde6f |
def clean_up(): <NEW_LINE> <INDENT> catalog_dir = os.path.join(prefs.pref('ManagedInstallDir'), 'catalogs') <NEW_LINE> for item in os.listdir(catalog_dir): <NEW_LINE> <INDENT> if item not in _CATALOG: <NEW_LINE> <INDENT> os.unlink(os.path.join(catalog_dir, item)) | Removes any catalog files that are no longer in use by this client | 625941b6507cdc57c6306aed |
def setXr(self, Xr): <NEW_LINE> <INDENT> return _core.CKroneckerCF_setXr(self, Xr) | setXr(CKroneckerCF self, limix::CovarInput const & Xr)
Parameters
----------
Xr: limix::CovarInput const & | 625941b631939e2706e4cc8c |
def __init__(self, include_related_objects=None): <NEW_LINE> <INDENT> self.swagger_types = { 'include_related_objects': 'bool' } <NEW_LINE> self.attribute_map = { 'include_related_objects': 'include_related_objects' } <NEW_LINE> self._include_related_objects = include_related_objects | RetrieveCatalogObjectRequest - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition. | 625941b6a05bb46b383ec648 |
def energy_step(inputs, states): <NEW_LINE> <INDENT> assert_msg = "States must be a list. However states {} is of type {}".format(states, type(states)) <NEW_LINE> assert isinstance(states, list) or isinstance(states, tuple), assert_msg <NEW_LINE> """ Some parameters required for shaping tensors""" <NEW_LINE> batch_size = encoder_out_seq.shape[0] <NEW_LINE> en_seq_len, en_hidden = encoder_out_seq.shape[1], encoder_out_seq.shape[2] <NEW_LINE> de_hidden = inputs.shape[-1] <NEW_LINE> """ Computing S.Wa where S=[s0, s1, ..., si]""" <NEW_LINE> reshaped_enc_outputs = K.reshape(encoder_out_seq, (batch_size * en_seq_len, en_hidden)) <NEW_LINE> W_a_dot_s = K.reshape(K.dot(reshaped_enc_outputs, self.W_a), (batch_size, en_seq_len, en_hidden)) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('wa.s>',W_a_dot_s.shape) <NEW_LINE> <DEDENT> """ Computing hj.Ua """ <NEW_LINE> U_a_dot_h = K.expand_dims(K.dot(inputs, self.U_a), 1) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('Ua.h>',U_a_dot_h.shape) <NEW_LINE> <DEDENT> """ tanh(S.Wa + hj.Ua) """ <NEW_LINE> reshaped_Ws_plus_Uh = K.tanh(K.reshape(W_a_dot_s + U_a_dot_h, (batch_size * en_seq_len, en_hidden))) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('Ws+Uh>', reshaped_Ws_plus_Uh.shape) <NEW_LINE> <DEDENT> """ softmax(va.tanh(S.Wa + hj.Ua)) """ <NEW_LINE> e_i = K.reshape(K.dot(reshaped_Ws_plus_Uh, self.V_a), (batch_size, en_seq_len)) <NEW_LINE> e_i = K.softmax(e_i) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('ei>', e_i.shape) <NEW_LINE> <DEDENT> return e_i, [e_i] | Step function for computing energy for a single decoder state | 625941b660cbc95b062c6364 |
def get_nearest_room(rooms, list_of_rooms): <NEW_LINE> <INDENT> starting_room = None <NEW_LINE> nearest_room = None <NEW_LINE> center_point = midpoint(rooms) <NEW_LINE> for room in list_of_rooms: <NEW_LINE> <INDENT> new_center = midpoint(rooms) <NEW_LINE> dist = ((new_center[0] - center_point[0]) ** 2 + (new_center[1] - center_point[1]) ** 2) ** 0.5 <NEW_LINE> if not starting_room or (starting_room and dist < nearest_room): <NEW_LINE> <INDENT> starting_room = room <NEW_LINE> nearest_room = dist <NEW_LINE> <DEDENT> <DEDENT> return starting_room | Helper function for finding the nearest room on builting bridges
Args:
rooms(int)
list_of_rooms(list): | 625941b6fff4ab517eb2f254 |
def test_corr_actual_video(self): <NEW_LINE> <INDENT> eval = VideoEvaluator(vid_metric='ME profile correlation') <NEW_LINE> val = eval(recon_vid_list,true_vid_list) <NEW_LINE> print(np.mean(val)) <NEW_LINE> self.assertEqual(val.flatten(), 721050) | Test profile correlation evaluation for true and reconstructed image
| 625941b6099cdd3c635f0a78 |
def _pmtkAck(self, sentence): <NEW_LINE> <INDENT> keywords = ['PMTK', 'command', 'flag'] <NEW_LINE> return self._mixhash(keywords, sentence) | convert the ack message | 625941b6c4546d3d9de7284c |
def handlers_for_address(self, path): <NEW_LINE> <INDENT> def callback(path, *args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> log('Calling {} for {}'.format(actions[path].__name__, path)) <NEW_LINE> actions[path](*args) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> log('No callback for {}'.format(path)) <NEW_LINE> <DEDENT> <DEDENT> yield pythonosc.dispatcher.Handler(callback, []) | yields Handler namedtuples matching the given OSC pattern. | 625941b6ac7a0e7691ed3ef5 |
def can_bid(self, member): <NEW_LINE> <INDENT> if self.get_last_bidder() == member: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return member.auction_bids_left(self) | Returns True if the member has commited bids left to use on the
auction, and its not the last bidder. | 625941b68e71fb1e9831d5c9 |
def OnEraseBackground(self, event): <NEW_LINE> <INDENT> if not self._backgroundImage: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._imageStretchStyle == _StyleTile: <NEW_LINE> <INDENT> dc = event.GetDC() <NEW_LINE> if not dc: <NEW_LINE> <INDENT> dc = wx.ClientDC(self) <NEW_LINE> rect = self.GetUpdateRegion().GetBox() <NEW_LINE> dc.SetClippingRegion(rect) <NEW_LINE> <DEDENT> self.TileBackground(dc) | Handles the wx.EVT_ERASE_BACKGROUND event. | 625941b6283ffb24f3c55728 |
def _paje_clca(self, agem_holder, af_nbenf, paje_base, inactif, partiel1, partiel2, P = law.fam): <NEW_LINE> <INDENT> agem = self.split_by_roles(agem_holder, roles = ENFS) <NEW_LINE> paje = paje_base >= 0 <NEW_LINE> age_m_benjamin = age_en_mois_benjamin(agem) <NEW_LINE> condition1 = (af_nbenf == 1) * (age_m_benjamin >= 0) * (age_m_benjamin < P.paje.clca.duree1) <NEW_LINE> age_benjamin = floor(age_m_benjamin / 12) <NEW_LINE> condition2 = (age_benjamin <= (P.paje.base.age - 1)) <NEW_LINE> condition = (af_nbenf >= 2) * condition2 + condition1 <NEW_LINE> paje_clca = (condition * P.af.bmaf) * ( (not_(paje)) * (inactif * P.paje.clca.sansab_tx_inactif + partiel1 * P.paje.clca.sansab_tx_partiel1 + partiel2 * P.paje.clca.sansab_tx_partiel2) + (paje) * (inactif * P.paje.clca.avecab_tx_inactif + partiel1 * P.paje.clca.avecab_tx_partiel1 + partiel2 * P.paje.clca.avecab_tx_partiel2)) <NEW_LINE> return 12 * paje_clca | Prestation d'accueil du jeune enfant - Complément de libre choix d'activité
'fam'
Parameters:
-----------
age : âge en mois
af_nbenf : nombre d'enfants aus sens des allocations familiales
paje_base : allocation de base de la PAJE
inactif : indicatrice d'inactivité
partiel1 : Salarié: Temps de travail ne dépassant pas 50 % de la durée du travail fixée dans l'entreprise pour les salariés
VRP ou non salarié travaillant à temps partiel: Temps de travail ne dépassant pas 76 heures par mois
et un revenu professionnel mensuel inférieur ou égal à (smic_8.27*169*85 %)
partiel2 : Salarié: Temps de travail compris entre 50 et 80 % de la durée du travail fixée dans l'entreprise.
VRP ou non salarié travaillant à temps partiel: Temps de travail compris entre 77 et 122 heures par mois et un revenu professionnel mensuel ne dépassant pas
(smic_8.27*169*136 %)
http://www.caf.fr/wps/portal/particuliers/catalogue/metropole/paje | 625941b6796e427e537b03dd |
def testTruncateTCEDNS(self): <NEW_LINE> <INDENT> name = 'atruncatetc.tests.powerdns.com.' <NEW_LINE> query = dns.message.make_query(name, 'A', 'IN', use_edns=True, payload=4096, want_dnssec=True) <NEW_LINE> response = dns.message.make_response(query, our_payload=4242) <NEW_LINE> rrset = dns.rrset.from_text(name, 3600, dns.rdataclass.IN, dns.rdatatype.A, '127.0.0.1') <NEW_LINE> response.answer.append(rrset) <NEW_LINE> response.flags |= dns.flags.TC <NEW_LINE> expectedResponse = dns.message.make_response(query, our_payload=4242) <NEW_LINE> expectedResponse.flags |= dns.flags.TC <NEW_LINE> (receivedQuery, receivedResponse) = self.sendUDPQuery(query, response) <NEW_LINE> receivedQuery.id = query.id <NEW_LINE> self.assertEqual(query, receivedQuery) <NEW_LINE> self.assertEqual(response.flags, receivedResponse.flags) <NEW_LINE> self.assertEqual(response.question, receivedResponse.question) <NEW_LINE> self.assertFalse(response.answer == receivedResponse.answer) <NEW_LINE> self.assertEqual(len(receivedResponse.answer), 0) <NEW_LINE> self.assertEqual(len(receivedResponse.authority), 0) <NEW_LINE> self.assertEqual(len(receivedResponse.additional), 0) <NEW_LINE> print(expectedResponse) <NEW_LINE> print(receivedResponse) <NEW_LINE> self.checkMessageEDNSWithoutOptions(expectedResponse, receivedResponse) <NEW_LINE> self.assertFalse(receivedResponse.ednsflags & dns.flags.DO) <NEW_LINE> self.assertEqual(receivedResponse.payload, 4242) | Basics: Truncate TC with EDNS
dnsdist is configured to truncate TC (default),
we make the backend send responses
with TC set and additional content,
and check that the received response has been fixed.
Note that the query and initial response had EDNS,
so the final response should have it too. | 625941b6d10714528d5ffafa |
def __init__(self, int=stats.DUMB): <NEW_LINE> <INDENT> self.int = int <NEW_LINE> self.wantToMove = False <NEW_LINE> logic.ALLTHINGS.add(self) <NEW_LINE> logic.AIs.append(self) | judges when to fire and when to move | 625941b67c178a314d6ef273 |
def start_citationref(self, attrs): <NEW_LINE> <INDENT> handle = self.inaugurate(attrs['hlink'], "citation", Citation) <NEW_LINE> self.__add_citation(handle) | Add a citation reference to the object currently processed. | 625941b6e64d504609d7465c |
def winning_minimal_coalitions(players, quota): <NEW_LINE> <INDENT> min_to_max_weight = [i[0] for i in sorted(players.items(), key=itemgetter(1))] <NEW_LINE> max_to_min_weight = [i[0] for i in sorted(players.items(), key=itemgetter(1), reverse=True)] <NEW_LINE> min_players = number_players_search(max_to_min_weight, players, quota) <NEW_LINE> max_players = number_players_search(min_to_max_weight, players, quota) <NEW_LINE> for i in range(min_players, max_players + 1): <NEW_LINE> <INDENT> potencial_coalitions = combinations(players, i) <NEW_LINE> for coalition in potencial_coalitions: <NEW_LINE> <INDENT> total_power = 0 <NEW_LINE> for player in coalition: <NEW_LINE> <INDENT> total_power += players[player] <NEW_LINE> <DEDENT> if total_power == quota: <NEW_LINE> <INDENT> yield coalition <NEW_LINE> <DEDENT> elif total_power > quota: <NEW_LINE> <INDENT> flag = 1 <NEW_LINE> for player in coalition: <NEW_LINE> <INDENT> if not(total_power-players[player] < quota): <NEW_LINE> <INDENT> flag = 0 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if flag: <NEW_LINE> <INDENT> yield coalition | List the minimal coalitions that win the game.
Parameters
players : dictionary
Name of the the players and their weights.
quota : int
Necesary weight to win the game.
Yields
-------
list
A minimal coalition. | 625941b6090684286d50eafb |
def encode_image(image_file): <NEW_LINE> <INDENT> encoded = base64.b64encode(open(image_file, 'rb').read()) <NEW_LINE> return 'data:image/png;base64,{}'.format(encoded.decode()) | Function: encodes a png image | 625941b60fa83653e4656dd9 |
def __init__(self, bot, db_runner, logger): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> self.db_runner = db_runner <NEW_LINE> self.logger = logger | initialization View | 625941b6097d151d1a222c78 |
def ground_fd(fd, objective, tbox, abox): <NEW_LINE> <INDENT> fg = tbox.FunctionGrounding("fg_"+fd.name.replace('fd_', ''), namespace=abox, typeFD=fd, solvesO=objective) <NEW_LINE> return fg | Given a FunctionDesign fd and an Objective objective,
creates an individual FunctionGrounds with typeF fd and solve) objective
returns the fg | 625941b6091ae35668666d81 |
@permission_required('shadowsocks') <NEW_LINE> def donateData(request): <NEW_LINE> <INDENT> data = [Donate.totalDonateNums(), int(Donate.totalDonateMoney())] <NEW_LINE> result = json.dumps(data, ensure_ascii=False) <NEW_LINE> return HttpResponse(result, content_type='application/json') | 返回捐赠信息
捐赠笔数
捐赠总金额 | 625941b6d10714528d5ffafb |
def emphasize(s): <NEW_LINE> <INDENT> print('\n\033[1m{}\033[0m'.format(s)) | Like print(), but emphasizes the line using ANSI escape sequences. | 625941b6d6c5a10208143e63 |
def load_key(self): <NEW_LINE> <INDENT> file = open(self.key) <NEW_LINE> for line in file: <NEW_LINE> <INDENT> x = line.strip().split(";") <NEW_LINE> key =x[0] <NEW_LINE> value = list(eval(x[1])) <NEW_LINE> self.dic_list[key] = value <NEW_LINE> <DEDENT> file.close() | loads a text file containing the names and numeric values and saves it to a dict | 625941b62eb69b55b151c6c6 |
def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> super(SearchInputSet, self)._set_input('ResponseFormat', value) | Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.) | 625941b621a7993f00bc7b05 |
def _destructively_unify(feature1, feature2, bindings1, bindings2, memo, fail, depth=0): <NEW_LINE> <INDENT> if depth > 50: <NEW_LINE> <INDENT> print("Infinite recursion in this unification:") <NEW_LINE> print(show(dict(feature1=feature1, feature2=feature2, bindings1=bindings1, bindings2=bindings2, memo=memo))) <NEW_LINE> raise ValueError("Infinite recursion in unification") <NEW_LINE> <DEDENT> if (id(feature1), id(feature2)) in memo: <NEW_LINE> <INDENT> result = memo[id(feature1), id(feature2)] <NEW_LINE> if result is UnificationFailure: raise result() <NEW_LINE> <DEDENT> unified = _do_unify(feature1, feature2, bindings1, bindings2, memo, fail, depth) <NEW_LINE> memo[id(feature1), id(feature2)] = unified <NEW_LINE> return unified | Attempt to unify C{self} and C{other} by modifying them
in-place. If the unification succeeds, then C{self} will
contain the unified value, and the value of C{other} is
undefined. If the unification fails, then a
UnificationFailure is raised, and the values of C{self}
and C{other} are undefined. | 625941b697e22403b379cdb4 |
def to_json(result_dict, filename): <NEW_LINE> <INDENT> with open(filename, 'w') as f: <NEW_LINE> <INDENT> content = json.dumps(result_dict) <NEW_LINE> f.write(content) | Save test results to status.json | 625941b623849d37ff7b2eae |
def start_ipykernel(self, client, wdir=None, give_focus=True): <NEW_LINE> <INDENT> if not self.get_option('monitor/enabled'): <NEW_LINE> <INDENT> QMessageBox.warning(self, _('Open an IPython console'), _("The console monitor was disabled: the IPython kernel will " "be started as expected, but an IPython console will have " "to be connected manually to the kernel."), QMessageBox.Ok) <NEW_LINE> <DEDENT> if not wdir: <NEW_LINE> <INDENT> wdir = getcwd() <NEW_LINE> <DEDENT> self.main.ipyconsole.visibility_changed(True) <NEW_LINE> self.start(fname=None, wdir=to_text_string(wdir), args='', interact=True, debug=False, python=True, ipykernel=True, ipyclient=client, give_ipyclient_focus=give_focus) | Start new IPython kernel | 625941b6b57a9660fec3369b |
def onDisconnected(self, reason): <NEW_LINE> <INDENT> self.connectionStatus = False <NEW_LINE> self.loginStatus = False <NEW_LINE> self.gateway.mdConnected = False <NEW_LINE> content = (u'行情服务器连接断开,原因:%s' %reason) <NEW_LINE> self.writeLog(content) | 连接断开 | 625941b6a17c0f6771cbde70 |
def getAllAlignments(self): <NEW_LINE> <INDENT> lAlignments = [] <NEW_LINE> with open(self.inputBlastXMLFile, 'r') as input : <NEW_LINE> <INDENT> blast_records = NCBIXML.parse(input) <NEW_LINE> index = 0 <NEW_LINE> for blast_record in blast_records: <NEW_LINE> <INDENT> logging.debug('QUERY: {}'.format(blast_record.query)) <NEW_LINE> for alignment in blast_record.alignments: <NEW_LINE> <INDENT> logging.debug('SUBJECT: {}'.format(alignment.hit_id)) <NEW_LINE> nb_hsp = 0 <NEW_LINE> for hsp in alignment.hsps: <NEW_LINE> <INDENT> nb_hsp += 1 <NEW_LINE> index += 1 <NEW_LINE> if hsp.frame[1] == 1: <NEW_LINE> <INDENT> lAlignments.append(Alignment(blast_record.query, alignment.hit_id, hsp.query_start, hsp.query_end, hsp.sbjct_start, hsp.sbjct_end, hsp.align_length, hsp.identities, hsp.frame[0], hsp.frame[1], id=index)) <NEW_LINE> <DEDENT> elif hsp.frame[1] == -1: <NEW_LINE> <INDENT> lAlignments.append(Alignment(blast_record.query, alignment.hit_id, hsp.query_start, hsp.query_end, hsp.sbjct_end, hsp.sbjct_start, hsp.align_length, hsp.identities, hsp.frame[0], hsp.frame[1], id=index)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('Blast Parsing: Unknown strand') <NEW_LINE> raise Exception("Unknown strand") <NEW_LINE> <DEDENT> <DEDENT> logging.debug('{} HSP parsed'.format(nb_hsp)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> input.closed <NEW_LINE> return lAlignments | Return list of all Alignments | 625941b6187af65679ca4f39 |
def roi_pooling(input, rois, size=(7, 7), spatial_scale=1.0): <NEW_LINE> <INDENT> assert (rois.dim() == 2) <NEW_LINE> assert (rois.size(1) == 4) <NEW_LINE> output = [] <NEW_LINE> rois = rois.data.float() <NEW_LINE> num_rois = rois.size(0) <NEW_LINE> rois.mul_(spatial_scale) <NEW_LINE> rois = rois.long() <NEW_LINE> for i in range(num_rois): <NEW_LINE> <INDENT> roi = rois[i] <NEW_LINE> im = input[..., roi[1]:(roi[3] + 1), roi[0]:(roi[2] + 1)] <NEW_LINE> output.append(adaptive_avg_pool2d(im, size)) <NEW_LINE> <DEDENT> return torch.cat(output, 0) | :param input: input feature or images (batch_size,channel,height,width)
:param rois: cropped bboxing (batch_size,bbox), bbox=[x1,y1,x2,y2]
:param size: output size (height,width)
:param spatial_scale: down scale bboxes
:return: (batch_size,channel,cropped_height,cropped_width) | 625941b6796e427e537b03de |
def get_resource_parts(self): <NEW_LINE> <INDENT> if not self.is_api_request(): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> parts_list = list(filter(lambda x: x.replace(' ', '') != '', self.path.split(API_PATH))) <NEW_LINE> if len(parts_list) <= 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return list(filter(lambda x: x.replace(' ', '') != '', parts_list[0].split('/'))) | Returns a list of resource parts: if URL is 'API_PATH/foo/bar' it returns ['foo', 'bar']
If not is a valid API_REQUEST, returns an empty list | 625941b650812a4eaa59c142 |
def read(self, params, file_name): <NEW_LINE> <INDENT> self.geometry = read_polydata(file_name) | Read centerlines from a .vtp file. | 625941b63617ad0b5ed67d1b |
def prefix_notation(node): <NEW_LINE> <INDENT> pass | Fill this in! | 625941b6be7bc26dc91cd422 |
def Score(self, Ranges, pose, sigma=1.0): <NEW_LINE> <INDENT> dis = 0.0 <NEW_LINE> score = 1.0 <NEW_LINE> for i in range(self.BeaconSet.shape[0]): <NEW_LINE> <INDENT> dis = np.linalg.norm(self.BeaconSet[i, :] - pose) <NEW_LINE> if Ranges[i] > 0.0: <NEW_LINE> <INDENT> score *= (self.NormPdf(Ranges[i], dis, sigma) + 1e-50) <NEW_LINE> <DEDENT> <DEDENT> return score | METHOnd 2 | 625941b6e5267d203edcdabd |
def math_from_doc(fitfunc, maxlen=np.inf): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = fitfunc.__doc__ <NEW_LINE> res = res.replace(':math:', '') <NEW_LINE> res = res.replace('`', '$') <NEW_LINE> if len(res) > maxlen: <NEW_LINE> <INDENT> term = res.find(" + ", 0, len(res)) <NEW_LINE> res = res[:term+2]+' ...$' <NEW_LINE> <DEDENT> if len(res) > maxlen: <NEW_LINE> <INDENT> if fitfunc.__name__ == 'f_complex': <NEW_LINE> <INDENT> res = 'Complex' <NEW_LINE> <DEDENT> elif fitfunc.__name__ == 'f_exponential_offset': <NEW_LINE> <INDENT> res = 'Exp+Offset' <NEW_LINE> <DEDENT> elif fitfunc.__name__ == 'f_exponential': <NEW_LINE> <INDENT> res = 'Exponential' <NEW_LINE> <DEDENT> elif fitfunc.__name__ == 'f_linear': <NEW_LINE> <INDENT> res = 'Linear' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = fitfunc.__name__ <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.debug('Exception passed when casting function description', exc_info=True) <NEW_LINE> res = fitfunc.__name__ <NEW_LINE> <DEDENT> return res | convert sphinx compatible math to matplotlib/tex | 625941b6596a8972360898e6 |
def testAllOfUserGroupPermissionSet(self): <NEW_LINE> <INDENT> pass | Test AllOfUserGroupPermissionSet | 625941b68e05c05ec3eea18d |
def _assign_to_grid(self): <NEW_LINE> <INDENT> grid_locals = sys._getframe(2).f_locals <NEW_LINE> grid_cls_cols = grid_locals.setdefault('__cls_cols__', []) <NEW_LINE> grid_cls_cols.append(self) | Columns being set up in declarative fashion need to be attached to the class
somewhere. In WebGrid, we have a class attribute `__cls_cols__` that columns
append themselves to. Subclasses, use of mixins, etc. will combine these column
lists elsewhere. | 625941b663b5f9789fde6f01 |
def hdfs_delete(server, username, path, **args): <NEW_LINE> <INDENT> response = _namenode_request(server, username, 'DELETE', path, 'DELETE', args) <NEW_LINE> content = response.read() <NEW_LINE> _check_code(response.status, content) <NEW_LINE> boolean_json = json.loads(content) <NEW_LINE> return boolean_json['boolean'] | Make a directory. | 625941b6cad5886f8bd26dfe |
def run(): <NEW_LINE> <INDENT> DataDirBase = "./DataByVoltage/" <NEW_LINE> Voltages = GetImageJData(DataDirBase,ext=".xls") <NEW_LINE> OverhangObjsByVoltage = ConvertToOverhangObjects(Voltages) <NEW_LINE> Distributions = OrderedDict() <NEW_LINE> for Volt,AllLanes in OverhangObjsByVoltage.items(): <NEW_LINE> <INDENT> AllLin = [l.LinearRelative for l in AllLanes] <NEW_LINE> AllCirc = [l.CircularRelative for l in AllLanes] <NEW_LINE> AllConcat = [l.ConcatemerRelative for l in AllLanes] <NEW_LINE> Distributions[Volt] = DistributionObj(AllLin,AllCirc,AllConcat) <NEW_LINE> <DEDENT> Strings = Distributions.keys() <NEW_LINE> VoltageFloats = [float(v) for v in Strings] <NEW_LINE> SortIdx = np.argsort(VoltageFloats) <NEW_LINE> VoltageFloats = [VoltageFloats[i] for i in SortIdx] <NEW_LINE> Voltages = [Strings[i] for i in SortIdx] <NEW_LINE> GetFlat = lambda f: [f(Distributions[k]) for k in Voltages] <NEW_LINE> FlatLin = GetFlat(lambda x: x.Linear) <NEW_LINE> FlatCirc = GetFlat(lambda x: x.Circular) <NEW_LINE> FlatConcat = GetFlat(lambda x: x.Concat) <NEW_LINE> CommonStyle = dict(markersize=10) <NEW_LINE> LinProps = dict(marker='x', color='g',linewidth=2,label="Linear",**CommonStyle) <NEW_LINE> CirProps = dict(marker='o', color='r',linestyle='--',label="Circular",**CommonStyle) <NEW_LINE> ConcatProps = dict(color='k',marker='*', linestyle='--',label="Dimers+",**CommonStyle) <NEW_LINE> fig = pPlotUtil.figure() <NEW_LINE> Mean = lambda dist: [np.mean(v) for v in dist] <NEW_LINE> Stdev = lambda dist: [np.std(v) for v in dist] <NEW_LINE> PlotDist = lambda dist,props: plt.errorbar(x=VoltageFloats, y=Mean(dist), yerr=Stdev(dist), **props) <NEW_LINE> PlotDist(FlatLin,LinProps) <NEW_LINE> PlotDist(FlatCirc,CirProps) <NEW_LINE> PlotDist(FlatConcat,ConcatProps) <NEW_LINE> pPlotUtil.lazyLabel("Voltage (V)","Intensity Fraction", "Circular DNA fraction saturates at low voltage", frameon=True) <NEW_LINE> plt.xlim([0,np.max(VoltageFloats)*1.1]) <NEW_LINE> pPlotUtil.savefig(fig,"BoxPlots.png") | Shows how the concatemeters etc change with voltage | 625941b67b25080760e39277 |
def __init__(self, visualizer, gc): <NEW_LINE> <INDENT> self.agent_dict = {} <NEW_LINE> self.agent_list = [] <NEW_LINE> self.new_agents = [] <NEW_LINE> self.visualizer = visualizer <NEW_LINE> self.gc = gc | Initializes an abm with the given number of agents and returns it
:return: An initialized ABM. | 625941b6167d2b6e312189ba |
def test_diff_output_bad_path(builddir): <NEW_LINE> <INDENT> runner = CliRunner() <NEW_LINE> result = runner.invoke( main.cli, ["--debug", "--path", builddir, "diff", "src/baz.py"], catch_exceptions=False, ) <NEW_LINE> assert result.exit_code == 0, result.stdout <NEW_LINE> assert "test.py" not in result.stdout | Test the diff feature with no changes | 625941b650485f2cf553cbb5 |
def predictive_entropy(x): <NEW_LINE> <INDENT> return entropy(np.mean(x, axis=1)) | Take a tensor of MC predictions [#images x #MC x #classes] and return the
entropy of the mean predictive distribution across the MC samples. | 625941b64527f215b584c277 |
def pna_csv_2_ntwks3(filename): <NEW_LINE> <INDENT> header, comments, d = read_pna_csv(filename) <NEW_LINE> col_headers = pna_csv_header_split(filename) <NEW_LINE> z0 = npy.ones((npy.shape(d)[0]))*50 <NEW_LINE> f = d[:,0]/1e9 <NEW_LINE> name = os.path.splitext(os.path.basename(filename))[0] <NEW_LINE> if 'db' in header.lower() and 'deg' in header.lower(): <NEW_LINE> <INDENT> s = npy.zeros((len(f),2,2), dtype=complex) <NEW_LINE> for k, h in enumerate(col_headers[1:]): <NEW_LINE> <INDENT> if 's11' in h.lower() and 'db' in h.lower(): <NEW_LINE> <INDENT> s[:,0,0] = mf.dbdeg_2_reim(d[:,k+1], d[:,k+2]) <NEW_LINE> <DEDENT> elif 's21' in h.lower() and 'db' in h.lower(): <NEW_LINE> <INDENT> s[:,1,0] = mf.dbdeg_2_reim(d[:,k+1], d[:,k+2]) <NEW_LINE> <DEDENT> elif 's12' in h.lower() and 'db' in h.lower(): <NEW_LINE> <INDENT> s[:,0,1] = mf.dbdeg_2_reim(d[:,k+1], d[:,k+2]) <NEW_LINE> <DEDENT> elif 's22' in h.lower() and 'db' in h.lower(): <NEW_LINE> <INDENT> s[:,1,1] = mf.dbdeg_2_reim(d[:,k+1], d[:,k+2]) <NEW_LINE> <DEDENT> <DEDENT> n = Network(f=f,s=s,z0=z0, name = name) <NEW_LINE> return n <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warn("File does not seem to be formatted properly (only dB/deg supported for now)") | Read a CSV file exported from an Agilent PNA in dB/deg format
Parameters
--------------
filename : str
full path or filename
Returns
---------
out : n
2-Port Network
Examples
---------- | 625941b692d797404e303fa7 |
def add_item(self, mol_name=None, mol_cont=None): <NEW_LINE> <INDENT> if len(self) and self.is_empty(): <NEW_LINE> <INDENT> self[0].mol_name = mol_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(len(self)): <NEW_LINE> <INDENT> if self[i].mol_name == mol_name: <NEW_LINE> <INDENT> raise RelaxError("The molecule '%s' already exists." % mol_name) <NEW_LINE> <DEDENT> <DEDENT> self.append(mol_cont) <NEW_LINE> self[-1].mol_name = mol_name <NEW_LINE> <DEDENT> return self[-1] | Append the given MolContainer instance to the MolList.
@keyword mol_name: The molecule number.
@type mol_name: int
@keyword mol_cont: The data structure for the molecule.
@type mol_cont: MolContainer instance
@return: The new molecule container.
@rtype: MolContainer instance | 625941b64e4d5625662d41fa |
def z(self, zw): <NEW_LINE> <INDENT> return zw * (np.cos(self.beta-self.omega))/(np.cos(self.beta) * np.cos(self.omega)) | Depth below the sloping ground surface | 625941b691af0d3eaac9b830 |
def list_keys( self, resource_group_name, namespace_name, topic_name, authorization_rule_name, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/topics/{topicName}/authorizationRules/{authorizationRuleName}/ListKeys' <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), 'topicName': self._serialize.url("topic_name", topic_name, 'str', max_length=50, min_length=1), 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', max_length=50, min_length=1), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.post(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> raise models.ErrorResponseException(self._deserialize, response) <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('AccessKeys', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized | Gets the primary and secondary connection strings for the topic.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name
:type namespace_name: str
:param topic_name: The topic name.
:type topic_name: str
:param authorization_rule_name: The authorizationrule name.
:type authorization_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`AccessKeys <azure.mgmt.servicebus.models.AccessKeys>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.servicebus.models.ErrorResponseException>` | 625941b655399d3f055884d0 |
def ZeroEvecs(n): <NEW_LINE> <INDENT> matrix = allJ(n) <NEW_LINE> evals, evecs = linalg.eig(matrix) <NEW_LINE> evecst = np.transpose(evecs) <NEW_LINE> sevecs = [np.zeros(2**n)] <NEW_LINE> for i in range(len(evals)): <NEW_LINE> <INDENT> if np.round(evals[i], 5)==0: <NEW_LINE> <INDENT> sevecs = np.append(sevecs, [evecst[i]], 0) <NEW_LINE> <DEDENT> <DEDENT> return sevecs | Gives the eigenvectors of total S_y operator with zero eigenvalue | 625941b6d58c6744b4257a7d |
def strip_actions(self, model_path, folder): <NEW_LINE> <INDENT> path, model_file = os.path.split(model_path) <NEW_LINE> with open(model_path, 'r') as mf: <NEW_LINE> <INDENT> mlines = mf.readlines() <NEW_LINE> stripped_lines = filter(lambda x: self._not_action(x), mlines) <NEW_LINE> <DEDENT> stripped_model = os.path.join(folder, model_file) <NEW_LINE> with open(stripped_model, 'w') as sf: <NEW_LINE> <INDENT> sf.writelines(stripped_lines) <NEW_LINE> <DEDENT> return stripped_model | Strips actions from a BNGL folder and makes a copy
into the given folder | 625941b6925a0f43d2549c90 |
def nav(items=[],delimiter="|",extra=[]): <NEW_LINE> <INDENT> ... <NEW_LINE> nav = """<nav {0}>\n""".format(" ".join(extra)) <NEW_LINE> for i in range(len(items)): <NEW_LINE> <INDENT> if i != len(items)-1: <NEW_LINE> <INDENT> nav += """{0} {1}\n""".format(items[i],delimiter) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nav += """{0}\n""".format(items[i]) <NEW_LINE> <DEDENT> <DEDENT> nav += """</nav>\n""" <NEW_LINE> return nav | writes the the itmes inside the nav seperated by delimiter
-----
>>link1 = link("home","/home")
>>link2 = link("contact","/contact")
>>link3 = link("about us","/about us")
>>print(nav([link1,link2,link3]))
<nav>
<a href="/home" target="_blank" title="">home</a> |
<a href="/contact" target="_blank" title="">contact</a> |
<a href="/about us" target="_blank" title="">about us</a>
</nav> | 625941b61b99ca400220a8cd |
def scheduled_plans_for_space(self, space_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.scheduled_plans_for_space_with_http_info(space_id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.scheduled_plans_for_space_with_http_info(space_id, **kwargs) <NEW_LINE> return data | Scheduled Plans for Space
### Get scheduled plans by using a space id for the requesting user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.scheduled_plans_for_space(space_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int space_id: Space Id (required)
:param str fields: Requested fields.
:return: list[ScheduledPlan]
If the method is called asynchronously,
returns the request thread. | 625941b6cb5e8a47e48b78cc |
def notify_restart(self, name, **kw): <NEW_LINE> <INDENT> print(u"SSD1306 plugin: received restart signal; turning off LCD...") <NEW_LINE> try: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> print(u"SSD1306 plugin: Exception caught while trying to stop") <NEW_LINE> traceback.print_exc() <NEW_LINE> print(ex) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(u"SSD1306 plugin: LCD has been shut off") | Restart handler | 625941b6236d856c2ad445fb |
def get_query_executions(ids): <NEW_LINE> <INDENT> response = athena_client.batch_get_query_execution( QueryExecutionIds=ids ) <NEW_LINE> return response['QueryExecutions'] | Retrieve details on the provided query execuution IDs | 625941b60383005118ecf401 |
def can_run_in_direction(vDict): <NEW_LINE> <INDENT> if not vDict['runInDirection']: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> player_coords = vDict['gameLevel'].player.co <NEW_LINE> level = vDict['gameLevel'] <NEW_LINE> player = level.player <NEW_LINE> player_coords = player.co <NEW_LINE> new_coords = player_coords + player.ai.move_direction <NEW_LINE> if not level.grid[new_coords.y][new_coords.x].moveThru: <NEW_LINE> <INDENT> vDict['runInDirection'] = False <NEW_LINE> return False <NEW_LINE> <DEDENT> def generateNearbyTiles(co): <NEW_LINE> <INDENT> for y in range(co.y - 1, co.y + 1): <NEW_LINE> <INDENT> for x in range(co.x - 1, co.x + 1): <NEW_LINE> <INDENT> yield level.grid[y][x] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> nearby_tiles = tuple(generateNearbyTiles(player_coords)) <NEW_LINE> next_tiles = tuple(generateNearbyTiles(new_coords)) <NEW_LINE> t = nearby_tiles != next_tiles <NEW_LINE> if nearby_tiles != next_tiles: <NEW_LINE> <INDENT> vDict['runInDirection'] = False <NEW_LINE> return False <NEW_LINE> <DEDENT> entities = list((e for e in level.getEntitesInRange(player, player.getSightRadius, allEntieiesExceptThis=player))) <NEW_LINE> if len(entities) > 0: <NEW_LINE> <INDENT> for e in entities: <NEW_LINE> <INDENT> if level.checkForObstructionBetweenPoints(player.co, e.co, maxRange=player.getSightRadius): <NEW_LINE> <INDENT> vDict['runInDirection'] = False <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> old_coords = player_coords - player.move_direction <NEW_LINE> ajacent_items_current = set((i for i in level.itemsOnFloor if player_coords.is_adjacent(i.co))) <NEW_LINE> ajacent_items_prevous = set((i for i in level.itemsOnFloor if old_coords.is_adjacent(i.co))) <NEW_LINE> diffrence = ajacent_items_current - ajacent_items_prevous <NEW_LINE> if len(diffrence) > 0: <NEW_LINE> <INDENT> vDict['runInDirection'] = False <NEW_LINE> return False <NEW_LINE> <DEDENT> return True | Runs a serise of checks to see if the player can "run" in a direction. First it checks to see if the grid tile
in front of the player can be moved through.
there are any
other enties in the player's sight. Then it checks to see if FInaly, it checks if any the 9 * 9 grid of tiles ajacent to the player is different then the 9 * 9 grid
ajacent to the players' next position. If any of those three checks are failed, then vDict['runInDirection'] is set
to False. | 625941b6435de62698dfda71 |
def _init_observation_spec(self): <NEW_LINE> <INDENT> if self._to_float: <NEW_LINE> <INDENT> pixels_dtype = np.float <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pixels_dtype = np.uint8 <NEW_LINE> <DEDENT> if self._grayscaling: <NEW_LINE> <INDENT> pixels_spec_shape = (self._height, self._width) <NEW_LINE> pixels_spec_name = "grayscale" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pixels_spec_shape = (self._height, self._width, NUM_COLOR_CHANNELS) <NEW_LINE> pixels_spec_name = "RGB" <NEW_LINE> <DEDENT> pixel_spec = specs.Array( shape=pixels_spec_shape, dtype=pixels_dtype, name=pixels_spec_name) <NEW_LINE> pixel_spec = self._frame_stacker.update_spec(pixel_spec) <NEW_LINE> if self._expose_lives_observation: <NEW_LINE> <INDENT> return (pixel_spec,) + self._environment.observation_spec()[1:] <NEW_LINE> <DEDENT> return pixel_spec | Computes the observation spec for the pixel observations.
Returns:
An `Array` specification for the pixel observations. | 625941b61f037a2d8b94601b |
def __init__(self, filepath, report, date_list): <NEW_LINE> <INDENT> self.report = report <NEW_LINE> self.date_list = date_list <NEW_LINE> self.workbook = xlsxwriter.Workbook(filepath) <NEW_LINE> self.worksheet = self.workbook.add_worksheet() <NEW_LINE> self.worksheet.set_column('A:B', 20) <NEW_LINE> self.finalized = False <NEW_LINE> self.row = 0 <NEW_LINE> self.col = 0 <NEW_LINE> self.title_format = self.workbook.add_format( {'bold': True, 'font_size': 36} ) <NEW_LINE> self.report_name_format = self.workbook.add_format( {'bold': True, 'font_size': 28} ) <NEW_LINE> self.ds_format = self.workbook.add_format( {'font_size': 20} ) <NEW_LINE> self.dept_title_format = self.workbook.add_format( {'bold': True, 'font_size': 20} ) <NEW_LINE> self.field_format = self.workbook.add_format({'bold': True}) <NEW_LINE> self.currency_format = self.workbook.add_format( {'num_format': '$#,##0.00;[Red]($#,##0.00)'} ) <NEW_LINE> self.double_format = self.workbook.add_format( {'num_format': '0.000'} ) <NEW_LINE> self.integer_format = self.workbook.add_format( {'num_format': '0'} ) <NEW_LINE> self.string_format = self.workbook.add_format( ) <NEW_LINE> self.time_format = self.workbook.add_format( {'num_format': 'h:mm'} ) <NEW_LINE> self.initialize_worksheet() | Initialize an ExcelSheetHelper by creating an XLSX Workbook | 625941b63539df3088e2e168 |
def get_compatiblility(spacecraft_ch): <NEW_LINE> <INDENT> gs_chs = compatibility_models.ChannelCompatibility.objects.filter( spacecraft_channel=spacecraft_ch ) <NEW_LINE> for g in gs_chs: <NEW_LINE> <INDENT> print(g) <NEW_LINE> <DEDENT> compatible_tuples = [ ( c.groundstation_channel.groundstation, c.groundstation_channel ) for c in gs_chs ] <NEW_LINE> for c in compatible_tuples: <NEW_LINE> <INDENT> print(c) <NEW_LINE> <DEDENT> return compatibility_serializers.serialize_gs_ch_compatibility_tuples( compatible_tuples ) | Common method
Returns the tuples (GS, GS_CH) with the compatible Ground Station
channels with the given spacecraft channel.
:param spacecraft_ch: The channel for which the tuples are compatible with
:return: List with the (GS, GS_CH) tuples | 625941b64a966d76dd550e28 |
def reprojShapefile(sourcepath, outpath=None, newprojdictionary={'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84'}): <NEW_LINE> <INDENT> if isinstance(outpath, type(None)): <NEW_LINE> <INDENT> outpath = sourcepath <NEW_LINE> <DEDENT> shpfile = gpd.GeoDataFrame.from_file(sourcepath) <NEW_LINE> shpfile = shpfile.to_crs(newprojdictionary) <NEW_LINE> shpfile.to_file(outpath) | Convert a shapefile into a new projection
sourcepath: (dir) the path to the .shp file
newprojdictionary: (dict) the new projection definitions (default is longlat projection with WGS84 datum)
outpath: (dir) the output path for the new shapefile | 625941b6b545ff76a8913c3c |
def __init__(self, *args): <NEW_LINE> <INDENT> this = _ArNetworkingPy.new_ArServerSimpleServerCommands(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this | __init__(self, ArServerHandlerCommands commands, ArServerBase server,
bool addLogConnections = True) -> ArServerSimpleServerCommands
__init__(self, ArServerHandlerCommands commands, ArServerBase server) -> ArServerSimpleServerCommands | 625941b61f037a2d8b94601c |
def first_nonrepeating (s): <NEW_LINE> <INDENT> if isinstance(s,str): <NEW_LINE> <INDENT> if (len(s)==0 or (s[0]=='\t') or (s[0]==' ')): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> order = [] <NEW_LINE> counts = {} <NEW_LINE> for x in s: <NEW_LINE> <INDENT> if x in counts: <NEW_LINE> <INDENT> counts[x] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> counts[x] = 1 <NEW_LINE> order.append(x) <NEW_LINE> <DEDENT> <DEDENT> for x in order: <NEW_LINE> <INDENT> if counts[x] == 1: <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | :returns first unique letter | 625941b630dc7b7665901787 |
def _initialize_chain(self, test_dir, num_nodes, cachedir): <NEW_LINE> <INDENT> assert num_nodes <= MAX_NODES <NEW_LINE> create_cache = False <NEW_LINE> for i in range(MAX_NODES): <NEW_LINE> <INDENT> if not os.path.isdir(os.path.join(cachedir, 'node' + str(i))): <NEW_LINE> <INDENT> create_cache = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if create_cache: <NEW_LINE> <INDENT> self.log.debug("Creating data directories from cached datadir") <NEW_LINE> for i in range(MAX_NODES): <NEW_LINE> <INDENT> if os.path.isdir(os.path.join(cachedir, "node" + str(i))): <NEW_LINE> <INDENT> shutil.rmtree(os.path.join(cachedir, "node" + str(i))) <NEW_LINE> <DEDENT> <DEDENT> for i in range(MAX_NODES): <NEW_LINE> <INDENT> datadir = initialize_datadir(cachedir, i) <NEW_LINE> args = [os.getenv("riffcashD", "riffcashd"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"] <NEW_LINE> if i > 0: <NEW_LINE> <INDENT> args.append("-connect=127.0.0.1:" + str(p2p_port(0))) <NEW_LINE> <DEDENT> self.bitcoind_processes[i] = subprocess.Popen(args) <NEW_LINE> self.log.debug("initialize_chain: riffcashd started, waiting for RPC to come up") <NEW_LINE> self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i) <NEW_LINE> self.log.debug("initialize_chain: RPC successfully started") <NEW_LINE> <DEDENT> self.nodes = [] <NEW_LINE> for i in range(MAX_NODES): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.nodes.append(get_rpc_proxy(rpc_url(get_datadir_path(cachedir, i), i), i)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.log.exception("Error connecting to node %d" % i) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> self.enable_mocktime() <NEW_LINE> block_time = self.mocktime - (201 * 10 * 60) <NEW_LINE> for i in range(2): <NEW_LINE> <INDENT> for peer in range(4): <NEW_LINE> <INDENT> for j in range(25): <NEW_LINE> <INDENT> set_node_times(self.nodes, block_time) <NEW_LINE> self.nodes[peer].generate(1) <NEW_LINE> block_time += 10 * 60 <NEW_LINE> <DEDENT> sync_blocks(self.nodes) <NEW_LINE> <DEDENT> <DEDENT> self.stop_nodes() <NEW_LINE> self.nodes = [] <NEW_LINE> self.disable_mocktime() <NEW_LINE> for i in range(MAX_NODES): <NEW_LINE> <INDENT> os.remove(log_filename(cachedir, i, "debug.log")) <NEW_LINE> os.remove(log_filename(cachedir, i, "db.log")) <NEW_LINE> os.remove(log_filename(cachedir, i, "peers.dat")) <NEW_LINE> os.remove(log_filename(cachedir, i, "fee_estimates.dat")) <NEW_LINE> <DEDENT> <DEDENT> for i in range(num_nodes): <NEW_LINE> <INDENT> from_dir = os.path.join(cachedir, "node" + str(i)) <NEW_LINE> to_dir = os.path.join(test_dir, "node" + str(i)) <NEW_LINE> shutil.copytree(from_dir, to_dir) <NEW_LINE> initialize_datadir(test_dir, i) | Initialize a pre-mined blockchain for use by the test.
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache. | 625941b6462c4b4f79d1d4ed |
def save_file(self, filename, encoding='utf8', headers=None, convertors=None, display=True, **kwargs): <NEW_LINE> <INDENT> global save_file <NEW_LINE> convertors = convertors or {} <NEW_LINE> headers = headers or [] <NEW_LINE> fields = self.get_fields() <NEW_LINE> _header = [] <NEW_LINE> for i, column in enumerate(fields): <NEW_LINE> <INDENT> if column.name not in convertors: <NEW_LINE> <INDENT> if display: <NEW_LINE> <INDENT> def f(value, data): <NEW_LINE> <INDENT> return column.get_display_value(value) <NEW_LINE> <DEDENT> convertors[column.name] = f <NEW_LINE> <DEDENT> <DEDENT> flag = False <NEW_LINE> for j in headers: <NEW_LINE> <INDENT> if not isinstance(j, dict): <NEW_LINE> <INDENT> raise ValueError("Header should be a list of dict, but {} type found".format(type(j))) <NEW_LINE> <DEDENT> if j['name'] == column.name: <NEW_LINE> <INDENT> _header.append(j) <NEW_LINE> flag = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not flag: <NEW_LINE> <INDENT> d = {'name':column.name} <NEW_LINE> if display: <NEW_LINE> <INDENT> d['title'] = column.verbose_name or column.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d['title'] = column.name <NEW_LINE> <DEDENT> _header.append(d) <NEW_LINE> <DEDENT> <DEDENT> return save_file(self.run(), filename, encoding=encoding, headers=_header, convertors=convertors, **kwargs) | save result to a csv file.
display = True will convert value according choices value | 625941b663b5f9789fde6f02 |
def solvr_nonlinear_osc(Y, t, bound = 0.01): <NEW_LINE> <INDENT> Q = 100.0 <NEW_LINE> gain = 10.0 <NEW_LINE> beta = 1 <NEW_LINE> return [ Y[1], -(Y[0] + Y[1]/Q + beta*Y[0]**3) + np.clip(gain*Y[1], -1.0*bound, 1.0*bound), ] | Now let me move on to the self-sustaining oscillator, nonlinear case
x'' + (1+x^2)x'/Q + x + beta * x**3== clip(gain*x)
omega will set to be resonance, i.e. 1# define the ODE for linear oscillator
the clipping function np.clip(input, lower_bound, upper_bound)
will prevent the divergence | 625941b66aa9bd52df036bbf |
def retrieve(self, request, pk=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> breath_sounds = BreathSounds.objects.get(pk=pk) <NEW_LINE> serializer = BreathSoundsDropdownSerializer(breath_sounds, context={'request': request}) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> return HttpResponseServerError(ex) | Handle GET requests for BreathSounds
Returns:
Response -- JSON serialized patient instance | 625941b656ac1b37e6263ffd |
def make_bams(paired,fasta,in_file, out_file, length = 35, std = 1, index_file = "temp.index", sam_file = "temp.sam"): <NEW_LINE> <INDENT> cmd = make_kallisto_index_command(fasta, index_file) <NEW_LINE> output = run_commandline(cmd) <NEW_LINE> write_log("KALLISTO INDEX",output) <NEW_LINE> if paired == "": <NEW_LINE> <INDENT> cmd = make_single_kallisto_command(index_file, in_file, sam_file, length, std) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd = make_double_kallisto_command(index_file, in_file, paired, sam_file) <NEW_LINE> <DEDENT> output = run_commandline(cmd) <NEW_LINE> write_log("KALLISTO RUN",output) <NEW_LINE> cmd = make_samtools_sort_command(out_file, sam_file) <NEW_LINE> output = run_commandline(cmd) <NEW_LINE> write_log("SAMTOOLS SORT",output) <NEW_LINE> cmd = make_samtools_index_command(out_file) <NEW_LINE> output = run_commandline(cmd) <NEW_LINE> write_log("SAMTOOLS INDEX",output) <NEW_LINE> cmd = "rm temp*" <NEW_LINE> run_commandline(cmd) | mapps the reads in a fastq file to the genome in a fasta file, makes a log of the steps taken
keyword arguments:
fasta: str, name of the fasta file containing the genome (or transcriptome)
in_file: str, the name of the (first) fastq file with the reads to be mapped
paired: str, the name of the second fastq file if the sequence is paired ended, make it an empty string ("") if the sequence is single ended
out_file: str, the desired name of the output bam file
length: int, the average length of the sequnces, default = 35, for kallisto if the reads are single ended
std: int, the standard deviation of length of the sequnces, default = 1, for kallisto if the reads are single ended
index_file: str, the name for the intermidate index file, default = "temp.index"
sam_file: str, the name for the intermidate sam file, default = "temp.sam"
WARNING: will remove all files in the working directory that start with "temp" | 625941b65e10d32532c5ed4c |
def test_schedule_cron_style_policy_with_invalid_cron_month(self): <NEW_LINE> <INDENT> schedule_value_list = ['* * * -30 *', '* * * 13 *', '* * * 2- *', '* * * 6-0 *', '* * * -9 *', '* * * $ *'] <NEW_LINE> for each_schedule_value in schedule_value_list: <NEW_LINE> <INDENT> schedule_policy_cron_style = self.autoscale_behaviors.create_schedule_policy_given( group_id=self.group.id, sp_change=self.sp_change, schedule_cron=each_schedule_value) <NEW_LINE> self.assertEquals(schedule_policy_cron_style['status_code'], 400, msg='Create schedule cron style policy with {0} results in {1}' ' for group {2}'.format(each_schedule_value, schedule_policy_cron_style['status_code'], self.group.id)) | Creating a scaling policy of type schedule via cron style with invalid month value in
cron results in a 400. | 625941b638b623060ff0ac0c |
def set_VideoDuration(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'VideoDuration', value) | Set the value of the VideoDuration input for this Choreo. ((optional, string) Filters search results based on the video duration. Valid values are: any, long, medium, and short.) | 625941b6a17c0f6771cbde71 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.