code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def test_create_consistencygroup(self): <NEW_LINE> <INDENT> driver = mock.MagicMock() <NEW_LINE> driver.VERSION = "VERSION" <NEW_LINE> p = self.proxy( self.default_storage_info, mock.MagicMock(), test_mock.cinder.exception, driver) <NEW_LINE> p.ibm_storage_cli = mock.MagicMock() <NEW_LINE> group_obj = self._create_test_group() <NEW_LINE> model_update = p.create_group({}, group_obj) <NEW_LINE> p.ibm_storage_cli.cmd.cg_create.assert_called_once_with( cg=p._cg_name_from_id(group_obj.id), pool='WTF32') <NEW_LINE> self.assertEqual('available', model_update['status'])
test a successful cg create
625941b87b25080760e392be
def parseLinks(self, linkParams): <NEW_LINE> <INDENT> links = {} <NEW_LINE> for addr1, addr2, p1, p2, c12, c21 in linkParams: <NEW_LINE> <INDENT> link = Link(addr1, addr2, c12, c21, self.latencyMultiplier) <NEW_LINE> links[(addr1,addr2)] = (p1, p2, c12, c21, link) <NEW_LINE> <DEDENT> return links
Parse links from linkParams, dict
625941b8baa26c4b54cb0f87
def update_zone_index(self, direction): <NEW_LINE> <INDENT> if direction == "north": <NEW_LINE> <INDENT> for key, value in self.zone_index.items(): <NEW_LINE> <INDENT> value[0] = value[0] + 1 <NEW_LINE> <DEDENT> <DEDENT> if direction == "west": <NEW_LINE> <INDENT> for key, value in self.zone_index.items(): <NEW_LINE> <INDENT> value[1] = value[1] + 1
Updates the `zone_index` when new rows or columns are added to the `zone_array`. Indices are only increased when zones are added to the North (i.e. a new row is added to the 0th index of the array) or to the West (i.e. a new column is added in the 0th index of each row). :param direction: Direction in which to update the `zone_array`. :return: None (mutates the `zone_index` values in place)
625941b830c21e258bdfa301
def select_random_move(self): <NEW_LINE> <INDENT> return self._move_function.select_random_move( self.current_neighbourhood)
A method used to generate a random move from the current neighbourhood. Note that this function will only be useable if the neighbourhood given to the constructor is a MultiNeighbourhood. Returns ------- tuple of int A random valid move from the current neighbourhood. Raises ------ WrongMoveTypeError If the move_function isn't a MultiNeighbourhood.
625941b863f4b57ef0000f86
def test_stage2_bootstrap_signals(self): <NEW_LINE> <INDENT> soledad.events.signal.reset_mock() <NEW_LINE> sol = self._soledad_instance( secrets_path='alternative.json', local_db_path='alternative.u1db') <NEW_LINE> soledad.events.signal.mock_calls.reverse() <NEW_LINE> soledad.events.signal.call_args = soledad.events.signal.call_args_list[0] <NEW_LINE> soledad.events.signal.call_args_list.reverse() <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_DOWNLOADING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_DONE_DOWNLOADING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_CREATING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_DONE_CREATING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_DOWNLOADING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_DONE_DOWNLOADING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_UPLOADING_KEYS, ADDRESS, ) <NEW_LINE> self._pop_mock_call(soledad.events.signal) <NEW_LINE> soledad.events.signal.assert_called_with( proto.SOLEDAD_DONE_UPLOADING_KEYS, ADDRESS, )
Test that a fresh soledad emits all bootstrap signals.
625941b8e5267d203edcdb05
def _create_signals(self): <NEW_LINE> <INDENT> self.exit_item.triggered.connect(self.close)
Привязывает логику к графическим элементом парами "сигнал-слот". @return: -
625941b891af0d3eaac9b878
def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> pass
Do nothing here for __init__ will be called every time cls is instanced
625941b8009cb60464c63220
def get_mailinfo(self, box, index): <NEW_LINE> <INDENT> req = self.set_req(MAIL + '/%s/%s' % (box, index)) <NEW_LINE> return self.send_req(req)
:param box: inbox|outbox|deleted :param index: :return:
625941b8090684286d50eb44
def linenum(self): <NEW_LINE> <INDENT> return self.tos().linenum
Return linenum of current tos
625941b8b7558d58953c4d7f
def is_boolean(self): <NEW_LINE> <INDENT> return self.tree._root.get("profileType") == "booleanTerms"
Does the VDEX profile type denote a boolean type vocabulary?
625941b850812a4eaa59c189
def apply_default_labels(self, other): <NEW_LINE> <INDENT> other_updated = other.copy() <NEW_LINE> other_updated.units_label = self.units_label <NEW_LINE> other_updated.name_label = self.name_label <NEW_LINE> other_updated.notes_label = self.notes_label <NEW_LINE> other_updated.desc_label = self.desc_label <NEW_LINE> other_updated.plot_label = self.plot_label <NEW_LINE> other_updated.axis_label = self.axis_label <NEW_LINE> other_updated.scale_label = self.scale_label <NEW_LINE> other_updated.min_label = self.min_label <NEW_LINE> other_updated.max_label = self.max_label <NEW_LINE> other_updated.fill_label = self.fill_label <NEW_LINE> return other
Applies labels for default meta labels from self onto other. Parameters ---------- other : Meta Meta object to have default labels applied Returns ------- Meta
625941b8d18da76e23532336
def _construct_monthly_climate(self, cl, now, end): <NEW_LINE> <INDENT> if self.simulation: <NEW_LINE> <INDENT> if self.md.duration_unit=='month': <NEW_LINE> <INDENT> months = range(self.timestep_length) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> months = range(12 * self.timestep_length) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> months = range(12) <NEW_LINE> <DEDENT> rain = 0.0 <NEW_LINE> temp = 0.0 <NEW_LINE> maxtemp = 0.0 <NEW_LINE> mintemp = 0.0 <NEW_LINE> maxind = len(self.md.monthly_climate) - 1 <NEW_LINE> for m in months: <NEW_LINE> <INDENT> if self.curr_month_ind > maxind: <NEW_LINE> <INDENT> self.curr_month_ind = 0 <NEW_LINE> <DEDENT> mtemp = self.md.monthly_climate[self.curr_month_ind].temperature <NEW_LINE> temp += mtemp <NEW_LINE> if mtemp < mintemp: <NEW_LINE> <INDENT> mintemp = mtemp <NEW_LINE> <DEDENT> if mtemp > maxtemp: <NEW_LINE> <INDENT> maxtemp = mtemp <NEW_LINE> <DEDENT> rain += 12 * self.md.monthly_climate[self.curr_month_ind].rainfall <NEW_LINE> self.curr_month_ind += 1 <NEW_LINE> <DEDENT> cl['rain'] = rain / len(months) <NEW_LINE> cl['temp'] = temp / len(months) <NEW_LINE> cl['amplitude'] = (maxtemp - mintemp) / 2.0 <NEW_LINE> return cl
Summarizes the monthly climate data into rain, temp and amplitude given the start and end dates cl -- climate dictionary now -- start date end -- end date
625941b8d58c6744b4257ac4
def get_bader_charges(atoms, calc, charge_source="all-electron", gridrefinement=4): <NEW_LINE> <INDENT> if spawn.find_executable("bader") is None: <NEW_LINE> <INDENT> error(( "Cannot find the \"bader\" executable in PATH. The bader " "executable is provided in the pysic/tools folder, or it can be " "downloaded from http://theory.cm.utexas.edu/henkelman/code/bader/. " "Ensure that the executable is named \"bader\", place it in any " "directory you want and then add that directory to your system" "PATH.")) <NEW_LINE> <DEDENT> atoms_copy = atoms.copy() <NEW_LINE> calc.set_atoms(atoms_copy) <NEW_LINE> if charge_source == "pseudo": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> density = np.array(calc.get_pseudo_density()) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> error("The calculator doesn't provide pseudo density.") <NEW_LINE> <DEDENT> <DEDENT> if charge_source == "all-electron": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> density = np.array(calc.get_all_electron_density(gridrefinement=gridrefinement)) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> error("The calculator doesn't provide all electron density.") <NEW_LINE> <DEDENT> <DEDENT> wrk_dir = os.getcwd()+"/.BADERTEMP" <NEW_LINE> dir_created = False <NEW_LINE> if rank == 0: <NEW_LINE> <INDENT> if not os.path.exists(wrk_dir): <NEW_LINE> <INDENT> os.makedirs(wrk_dir) <NEW_LINE> dir_created = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error("Tried to create a temporary folder in " + wrk_dir + ", but the folder already existed. Please remove it manually first.") <NEW_LINE> <DEDENT> rho = density * Bohr**3 <NEW_LINE> write(wrk_dir + '/electron_density.cube', atoms, data=rho) <NEW_LINE> command = "cd " + wrk_dir + "; bader electron_density.cube" <NEW_LINE> subprocess.check_output(command, shell=True) <NEW_LINE> <DEDENT> barrier() <NEW_LINE> bader.attach_charges(atoms_copy, wrk_dir + "/ACF.dat") <NEW_LINE> try: <NEW_LINE> <INDENT> bader_charges = np.array(atoms_copy.get_initial_charges()) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> bader_charges = np.array(atoms_copy.get_charges()) <NEW_LINE> <DEDENT> if rank == 0: <NEW_LINE> <INDENT> if dir_created: <NEW_LINE> <INDENT> shutil.rmtree(wrk_dir) <NEW_LINE> <DEDENT> <DEDENT> return bader_charges
This function uses an external Bader charge calculator from http://theory.cm.utexas.edu/henkelman/code/bader/. This tool is provided also in pysic/tools. Before using this function the bader executable directory has to be added to PATH. Parameters: atoms: ASE Atoms The structure from which we want to calculate the charges from. calc: ASE calculator charge_source: string Indicates the electron density that is used in charge calculation. Can be "pseudo" or "all-electron". gridrefinement: int The factor by which the calculation grid is densified in charge calculation. Returns: numpy array of the atomic charges
625941b8cc40096d615957b7
def IP_Source7(self): <NEW_LINE> <INDENT> url = ['http://www.xdaili.cn/ipagent/freeip/getFreeIps?page=1&rows=10'] <NEW_LINE> try: <NEW_LINE> <INDENT> res = requests.get(url[0], headers=self.headers) <NEW_LINE> proxies = res.json()['RESULT']['rows'] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('source7', e.args) <NEW_LINE> return <NEW_LINE> <DEDENT> for __ in proxies: <NEW_LINE> <INDENT> proxy = __['ip'] + ":" + __['port'], __['anony'] <NEW_LINE> yield proxy
IP代理源:讯代理 http://www.xdaili.cn
625941b8a79ad161976cbfa9
def _add_data_to_model(self, qinfos): <NEW_LINE> <INDENT> if self.gps is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> qinfos = [qinfo for qinfo in qinfos if qinfo.val != EVAL_ERROR_CODE] <NEW_LINE> if len(qinfos) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> new_points = [qinfo.point for qinfo in qinfos] <NEW_LINE> new_vals = [qinfo.val for qinfo in qinfos] <NEW_LINE> if self.is_an_mf_method(): <NEW_LINE> <INDENT> raise NotImplementedError(_NO_MF_FOR_MOGPB_ERR_MSG) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._add_data_to_gps((new_points, new_vals))
Add data to self.gp
625941b8187af65679ca4f81
def pltlf_and(self, args): <NEW_LINE> <INDENT> if len(args) == 1: <NEW_LINE> <INDENT> return args[0] <NEW_LINE> <DEDENT> elif (len(args) - 1) % 2 == 0: <NEW_LINE> <INDENT> subformulas = args[::2] <NEW_LINE> return PLTLfAnd(subformulas) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ParsingError
Parse PLTLf And.
625941b8283ffb24f3c55770
def dochdir(thedir): <NEW_LINE> <INDENT> if flag_echo or flag_dryrun: <NEW_LINE> <INDENT> sys.stderr.write("cd " + thedir + "\n") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.chdir(thedir) <NEW_LINE> <DEDENT> except OSError as err: <NEW_LINE> <INDENT> u.error("chdir failed: %s" % err)
Switch to dir.
625941b8e1aae11d1e749b18
def setPublisherRef(self, publisherRef): <NEW_LINE> <INDENT> self.logPublisher = publisherRef
Sets the publisher handle so this class can publish log messages :param publisherRef: publisher handle (passed by reference) :type: ZeroMQPublisher()
625941b8adb09d7d5db6c5f7
def deploy_local_data(): <NEW_LINE> <INDENT> require('settings', provided_by=[production, staging]) <NEW_LINE> write_www_files() <NEW_LINE> _gzip_www() <NEW_LINE> local(('s3cmd -P --add-header=Cache-control:max-age=5 --add-header=Content-encoding:gzip --guess-mime-type put gzip/*.json s3://%(s3_bucket)s/') % env) <NEW_LINE> if env.alt_s3_bucket: <NEW_LINE> <INDENT> local(('s3cmd -P --add-header=Cache-control:max-age=5 --add-header=Content-encoding:gzip --guess-mime-type put gzip/*.json s3://%(alt_s3_bucket)s/') % env)
Deploy the local data files to S3.
625941b8dd821e528d63b00f
def _parse_outer(self): <NEW_LINE> <INDENT> begin = self._consumer.get_pos() <NEW_LINE> end = begin <NEW_LINE> begin_line = self._consumer.get_line() <NEW_LINE> begin_row = self._consumer.get_row() <NEW_LINE> ch = self._consumer.peek() <NEW_LINE> while ch != '\0': <NEW_LINE> <INDENT> if ch == '{': <NEW_LINE> <INDENT> ahead = self._consumer.peek(1) <NEW_LINE> if ahead == '%': <NEW_LINE> <INDENT> if begin != end: <NEW_LINE> <INDENT> return TemplateParser.OUTER_TOKEN_LITERAL, self._consumer.substr(begin, end), begin_line, begin_row <NEW_LINE> <DEDENT> self._consumer.read() <NEW_LINE> self._consumer.read() <NEW_LINE> begin_line = self._consumer.get_line() <NEW_LINE> begin_row = self._consumer.get_row() <NEW_LINE> chars = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> ch = self._consumer.read() <NEW_LINE> if ch == '\0': <NEW_LINE> <INDENT> raise ParseError("Unexpected eof", self._consumer.get_line(), self._consumer.get_row()) <NEW_LINE> <DEDENT> elif ch == '%': <NEW_LINE> <INDENT> if self._consumer.peek() == '}': <NEW_LINE> <INDENT> self._consumer.read() <NEW_LINE> return TemplateParser.OUTER_TOKEN_EXPRESS, "".join(chars), begin_line, begin_row <NEW_LINE> <DEDENT> elif self._consumer.peek() == '%': <NEW_LINE> <INDENT> self._consumer.read() <NEW_LINE> <DEDENT> <DEDENT> chars.append(ch) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._consumer.read() <NEW_LINE> ch = self._consumer.peek() <NEW_LINE> end = self._consumer.get_pos() <NEW_LINE> <DEDENT> return TemplateParser.OUTER_TOKEN_LITERAL, self._consumer.substr(begin, end), begin_line, begin_row
外层解析函数 将输入拆分成字符串(Literal)和表达式(Expression)两个组成。 遇到'{%'开始解析Expression,在解析Expression时允许使用'%%'转义,即'%%'->'%',这使得'%%>'->'%>'而不会结束表达式。 :return: 类型, 内容, 起始行, 起始列
625941b86aa9bd52df036c06
def load_point_rdd(csv_rdd): <NEW_LINE> <INDENT> def load_record(record): <NEW_LINE> <INDENT> result = StringIO.StringIO(record) <NEW_LINE> fieldnames = ['longitude', 'latitude', 'month', 'maximum', 'mean'] <NEW_LINE> reader = csv.DictReader(result, fieldnames) <NEW_LINE> return reader.next() <NEW_LINE> <DEDENT> return csv_rdd.map(load_record).map(lambda rec: Point(**rec))
Return an RDD of Point objects. The rdd argument must be an RDD of CSV records representative of Point objects.
625941b8287bf620b61d38d3
def bfgs(self, f, x0, d0, g0, Q0, epslon, i, alpha): <NEW_LINE> <INDENT> g = Gradient(f, x0) <NEW_LINE> if sum(abs(d0)) < epslon or i is not 0: <NEW_LINE> <INDENT> Q = [self.params['hessian']['initial'] if self.params['hessian']['initial'] else np.identity(len(x0))][0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = (g - g0)[np.newaxis].T <NEW_LINE> p = (alpha * d0)[np.newaxis].T <NEW_LINE> Q = Q0 + (1.0 + q.T.dot(Q0).dot(q) / (q.T.dot(p))) * (p.dot(p.T)) / (p.T.dot(q)) - ( p.dot(q.T).dot(Q0) + Q0.dot(q).dot(p.T)) / (q.T.dot(p)) <NEW_LINE> <DEDENT> d = -Q.dot(g) <NEW_LINE> return d, g, Q
Broyden-Fletcher-Goldfarb-Shanno ..fun as callable object; must be a function of x0 and return a single number ..x0 as a numeric array; point from which to start
625941b8167d2b6e31218a02
def _crop_pool_layer(bottom, rois, max_pool=True): <NEW_LINE> <INDENT> rois = rois.detach() <NEW_LINE> batch_size = bottom.size(0) <NEW_LINE> D = bottom.size(1) <NEW_LINE> H = bottom.size(2) <NEW_LINE> W = bottom.size(3) <NEW_LINE> roi_per_batch = rois.size(0) / batch_size <NEW_LINE> x1 = rois[:, 1::4] / 16.0 <NEW_LINE> y1 = rois[:, 2::4] / 16.0 <NEW_LINE> x2 = rois[:, 3::4] / 16.0 <NEW_LINE> y2 = rois[:, 4::4] / 16.0 <NEW_LINE> height = bottom.size(2) <NEW_LINE> width = bottom.size(3) <NEW_LINE> zero = Variable(rois.data.new(rois.size(0), 1).zero_()) <NEW_LINE> theta = torch.cat([ (x2 - x1) / (width - 1), zero, (x1 + x2 - width + 1) / (width - 1), zero, (y2 - y1) / (height - 1), (y1 + y2 - height + 1) / (height - 1)], 1).view(-1, 2, 3) <NEW_LINE> if max_pool: <NEW_LINE> <INDENT> pre_pool_size = cfg.POOLING_SIZE * 2 <NEW_LINE> grid = F.affine_grid(theta, torch.Size((rois.size(0), 1, pre_pool_size, pre_pool_size))) <NEW_LINE> bottom = bottom.view(1, batch_size, D, H, W).contiguous().expand(roi_per_batch, batch_size, D, H, W) .contiguous().view(-1, D, H, W) <NEW_LINE> crops = F.grid_sample(bottom, grid) <NEW_LINE> crops = F.max_pool2d(crops, 2, 2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> grid = F.affine_grid(theta, torch.Size((rois.size(0), 1, cfg.POOLING_SIZE, cfg.POOLING_SIZE))) <NEW_LINE> bottom = bottom.view(1, batch_size, D, H, W).contiguous().expand(roi_per_batch, batch_size, D, H, W) .contiguous().view(-1, D, H, W) <NEW_LINE> crops = F.grid_sample(bottom, grid) <NEW_LINE> <DEDENT> return crops, grid
[ x2-x1 x1 + x2 - W + 1 ] [ ----- 0 --------------- ] [ W - 1 W - 1 ] [ ] [ y2-y1 y1 + y2 - H + 1 ] [ 0 ----- --------------- ] [ H - 1 H - 1 ]
625941b815fb5d323cde096e
def test_deprecated(caplog): <NEW_LINE> <INDENT> schema = vol.Schema({ 'venus': cv.boolean, 'mars': cv.boolean }) <NEW_LINE> deprecated_schema = vol.All( cv.deprecated('mars'), schema ) <NEW_LINE> deprecated_schema({'venus': True}) <NEW_LINE> assert len(caplog.records) == 0 <NEW_LINE> deprecated_schema({'mars': True}) <NEW_LINE> assert len(caplog.records) == 1 <NEW_LINE> assert caplog.records[0].name == __name__ <NEW_LINE> assert ("The 'mars' option (with value 'True') is deprecated, " "please remove it from your configuration.") in caplog.text
Test deprecation log.
625941b85fc7496912cc37ea
def cnst_c(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Compute constant component :math:`\mathbf{c}` of ADMM problem constraint. This method should not be used or overridden: all calculations should make use of components :meth:`cnst_c0` and :meth:`cnst_c1` so that these methods can return scalar zeros instead of zero arrays if appropriate.
625941b885dfad0860c3acbd
def move(self): <NEW_LINE> <INDENT> location = self.check_grid(int) <NEW_LINE> if location and not self.moved: <NEW_LINE> <INDENT> assert abs(self.x-location[0])<=1 and abs(self.y-location[1])<=1 and abs(self.z-location[2])<=1, "Error in move" <NEW_LINE> self.island.remove(self) <NEW_LINE> self.x = location[0] <NEW_LINE> self.y = location[1] <NEW_LINE> self.z = location[2] <NEW_LINE> self.island.register(self) <NEW_LINE> self.moved=True
Move to an open, neighbouring position
625941b8d8ef3951e32433a1
def gen_W(users, items, ratings): <NEW_LINE> <INDENT> user = users.tolist() <NEW_LINE> item = items.tolist() <NEW_LINE> rating = ratings.tolist() <NEW_LINE> B = nx.Graph() <NEW_LINE> B.add_nodes_from(user, bipartite=0) <NEW_LINE> B.add_nodes_from(item, bipartite=1) <NEW_LINE> for i in range(len(user)): <NEW_LINE> <INDENT> B.add_edges_from([(user[i], item[i])]) <NEW_LINE> B[user[i]][item[i]]['weight'] = rating[i] <NEW_LINE> <DEDENT> users_unique = users.unique() <NEW_LINE> items_unique = items.unique() <NEW_LINE> W = biadjacency_matrix(B, users_unique,items_unique).toarray() <NEW_LINE> W = spa.csr_matrix(W) <NEW_LINE> print('Shape of W: '+str(W.shape)) <NEW_LINE> return W, users_unique, items_unique
This function generates sparse matrix represantation of bipartite graph for players and linups Input: users - pandas series items - pandas series ratings - pandas series Output: sparse biadjacency matrix W
625941b823e79379d52ee3cc
def copy_visibility(vis: Union[Visibility, BlockVisibility], zero=False) -> Union[Visibility, BlockVisibility]: <NEW_LINE> <INDENT> newvis = copy.copy(vis) <NEW_LINE> newvis.data = copy.deepcopy(vis.data) <NEW_LINE> if zero: <NEW_LINE> <INDENT> newvis.data['vis'][...] = 0.0 <NEW_LINE> <DEDENT> return newvis
Copy a visibility Performs a deepcopy of the data array
625941b8507cdc57c6306b38
def submit_button(*args, **kwargs): <NEW_LINE> <INDENT> submit_button = wtforms.SubmitField(*args, **kwargs) <NEW_LINE> submit_button.input_type = 'submit_button' <NEW_LINE> return submit_button
Create a submit button
625941b8cb5e8a47e48b7913
def set_axis(self, axis): <NEW_LINE> <INDENT> assert is_integer(axis), LOGGER.error("rotation symmetry axis must be an integer") <NEW_LINE> axis = INT_TYPE(axis) <NEW_LINE> assert axis>=0, LOGGER.error("rotation symmetry axis must be positive.") <NEW_LINE> assert axis<=2,LOGGER.error("rotation symmetry axis must be smaller or equal to 2") <NEW_LINE> self.__axis = axis
Set the symmetry axis index to rotate about. :Parameters: #. axis (integer): Must be 0,1 or 2 for respectively the main, secondary or tertiary symmetry axis
625941b855399d3f05588518
def mesh_pattern_coordinate(mesh_pattern): <NEW_LINE> <INDENT> coordinate_list = [] <NEW_LINE> for rownum, row in enumerate(mesh_pattern): <NEW_LINE> <INDENT> for colnum, value in enumerate(row): <NEW_LINE> <INDENT> if value == 1: <NEW_LINE> <INDENT> coordinate_list.append((rownum, colnum)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return coordinate_list
List all coordinates of prohibited areas in the mesh pattern @param mesh_pattern: 4x4 list showing prohibited areas in pattern @return: coordinates of prohibited areas labelled as 1 >>> mesh_pattern_coordinate([[0, 0, 1, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 1, 0]]) ... [(0, 2), (2, 2), (3, 2)]
625941b86e29344779a6247a
def Clone(self): <NEW_LINE> <INDENT> pass
Clone(self: VectorKeyFrameCollection) -> VectorKeyFrameCollection Creates a modifiable clone of this System.Windows.Media.Animation.VectorKeyFrameCollection, making deep copies of this object's values. When copying dependency properties,this method copies resource references and data bindings (but they might no longer resolve) but not animations or their current values. Returns: A modifiable clone of the current object. The cloned object's System.Windows.Freezable.IsFrozen property will be false even if the source's System.Windows.Freezable.IsFrozen property was true.
625941b830bbd722463cbc27
@csrf_protect <NEW_LINE> @login_required(login_url='/userprofile/login/') <NEW_LINE> def like(request, comment_id, next=None): <NEW_LINE> <INDENT> comment = get_object_or_404(get_comment_model(), pk=comment_id, site__pk=get_current_site_id(request)) <NEW_LINE> if not has_app_model_option(comment)['allow_feedback']: <NEW_LINE> <INDENT> ctype = ContentType.objects.get_for_model(comment.content_object) <NEW_LINE> raise Http404("Comments posted to instances of '%s.%s' are not " "explicitly allowed to receive 'liked it' flags. " "Check the COMMENTS_XTD_APP_MODEL_OPTIONS " "setting." % (ctype.app_label, ctype.model)) <NEW_LINE> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> perform_like(request, comment) <NEW_LINE> return next_redirect(request, fallback=next or 'comments-xtd-like-done', c=comment.pk) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> liked_it = request.user in comment.users_flagging(LIKEDIT_FLAG) <NEW_LINE> return render(request, 'django_comments_xtd/like.html', {'comment': comment, 'already_liked_it': liked_it, 'next': next})
Like a comment. Confirmation on GET, action on POST. Templates: :template:`django_comments_xtd/like.html`, Context: comment the flagged `comments.comment` object
625941b88e71fb1e9831d612
def transform(self, y): <NEW_LINE> <INDENT> check_is_fitted(self) <NEW_LINE> y = column_or_1d(y, warn=True) <NEW_LINE> if _num_samples(y) == 0: <NEW_LINE> <INDENT> return np.array([]) <NEW_LINE> <DEDENT> return _encode(y, uniques=self.classes_)
Transform labels to normalized encoding. Parameters ---------- y : array-like of shape (n_samples,) Target values. Returns ------- y : array-like of shape (n_samples,) Labels as normalized encodings.
625941b8fbf16365ca6f6021
def __init__(self, name, book_value, profits, mu=0.058, brownian_delta=0.00396825396, brownian_sigma=0.125, dividend_rate=1): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.book_value = book_value <NEW_LINE> self.profit = profits[-1] <NEW_LINE> self.dividend_rate = dividend_rate <NEW_LINE> self.profit_history = profits <NEW_LINE> self.drift_mu = mu <NEW_LINE> self.brownian_delta = brownian_delta <NEW_LINE> self.brownian_sigma = brownian_sigma <NEW_LINE> self.accounting_rate_of_return = profits[-1] / float(book_value)
Creates a new trader
625941b8d10714528d5ffb44
def test_12_update_application(self): <NEW_LINE> <INDENT> self.register() <NEW_LINE> self.new_application() <NEW_LINE> res = self.update_application(method="GET") <NEW_LINE> assert self.html_title("Update the application: Sample App") in res.data, res <NEW_LINE> assert 'input id="id" name="id" type="hidden" value="1"' in res.data, res <NEW_LINE> assert "Save the changes" in res.data, res <NEW_LINE> res = self.update_application(new_name="New Sample App", new_short_name="newshortname", new_description="New description", new_long_description=u'New long desc', new_hidden=True) <NEW_LINE> assert "Application updated!" in res.data, res
Test WEB update application works
625941b83539df3088e2e1b0
def test_prev_next(self) -> None: <NEW_LINE> <INDENT> sample_to_sample_datas = defaultdict(lambda: []) <NEW_LINE> for sample_data in self.nuim.sample_data: <NEW_LINE> <INDENT> sample_to_sample_datas[sample_data['sample_token']].append(sample_data['token']) <NEW_LINE> <DEDENT> print('Checking prev-next pointers for completeness and correct ordering...') <NEW_LINE> for sample in self.nuim.sample: <NEW_LINE> <INDENT> sd_tokens_pointers = self.nuim.get_sample_content(sample['token']) <NEW_LINE> sd_tokens_all = sample_to_sample_datas[sample['token']] <NEW_LINE> self.assertTrue(set(sd_tokens_pointers) == set(sd_tokens_all), 'Error: Inconsistency in prev/next pointers!') <NEW_LINE> timestamps = [] <NEW_LINE> for sd_token in sd_tokens_pointers: <NEW_LINE> <INDENT> sample_data = self.nuim.get('sample_data', sd_token) <NEW_LINE> timestamps.append(sample_data['timestamp']) <NEW_LINE> <DEDENT> self.assertTrue(sorted(timestamps) == timestamps, 'Error: Timestamps not properly sorted!')
Test that the prev and next points in sample_data cover all entries and have the correct ordering.
625941b8d164cc6175782bb2
def load_pretrained_weights(model, file_path='', pretrained_dict=None, extra_prefix=''): <NEW_LINE> <INDENT> def _remove_prefix(key, prefix): <NEW_LINE> <INDENT> prefix = prefix + '.' <NEW_LINE> if key.startswith(prefix): <NEW_LINE> <INDENT> key = key[len(prefix):] <NEW_LINE> <DEDENT> return key <NEW_LINE> <DEDENT> if file_path: <NEW_LINE> <INDENT> check_isfile(file_path) <NEW_LINE> <DEDENT> checkpoint = (load_checkpoint(file_path) if not pretrained_dict else pretrained_dict) <NEW_LINE> if 'state_dict' in checkpoint: <NEW_LINE> <INDENT> state_dict = checkpoint['state_dict'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state_dict = checkpoint <NEW_LINE> <DEDENT> model_dict = model.state_dict() <NEW_LINE> new_state_dict = OrderedDict() <NEW_LINE> matched_layers, discarded_layers = [], [] <NEW_LINE> for k, v in state_dict.items(): <NEW_LINE> <INDENT> k = extra_prefix + _remove_prefix(k, 'module') <NEW_LINE> if k in model_dict and model_dict[k].size() == v.size(): <NEW_LINE> <INDENT> new_state_dict[k] = v <NEW_LINE> matched_layers.append(k) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> discarded_layers.append(k) <NEW_LINE> <DEDENT> <DEDENT> model_dict.update(new_state_dict) <NEW_LINE> model.load_state_dict(model_dict) <NEW_LINE> message = file_path if file_path else "pretrained dict" <NEW_LINE> unmatched_layers = sorted(set(model_dict.keys()) - set(new_state_dict)) <NEW_LINE> if len(matched_layers) == 0: <NEW_LINE> <INDENT> print( 'The pretrained weights "{}" cannot be loaded, ' 'please check the key names manually'.format(message) ) <NEW_LINE> _print_loading_weights_inconsistencies(discarded_layers, unmatched_layers) <NEW_LINE> raise RuntimeError(f'The pretrained weights {message} cannot be loaded') <NEW_LINE> <DEDENT> print( 'Successfully loaded pretrained weights from "{}"'. format(message) ) <NEW_LINE> _print_loading_weights_inconsistencies(discarded_layers, unmatched_layers)
Loads pretrianed weights to model. Imported from openvinotoolkit/deep-object-reid. Features:: - Incompatible layers (unmatched in name or size) will be ignored. - Can automatically deal with keys containing "module.". Args: model (nn.Module): network model. file_path (str): path to pretrained weights.
625941b8a4f1c619b28afea6
def is_stale(self): <NEW_LINE> <INDENT> return True
With attributes set on self, return a boolean. Calc lat/lng bounds of this tile (include half-dot-width of padding) SELECT count(uid) FROM points WHERE modtime < modtime_tile
625941b845492302aab5e125
def get_city_data(city_id) : <NEW_LINE> <INDENT> def clean_data(data) : <NEW_LINE> <INDENT> keys = {'name', 'display_name', 'statistics'} <NEW_LINE> missing_keys = keys - set(data.keys()) <NEW_LINE> if missing_keys : <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> city_stats = data['statistics']['all'] <NEW_LINE> city_data = { 'name': data['display_name'] if data['display_name'] else data['name'], 'popularity': city_stats['followers'] if city_stats['followers'] else 0, 'investor_followers': city_stats['investor_followers'] if city_stats['investor_followers'] else 0, 'num_companies': city_stats['startups'] if city_stats['startups'] else 0, 'num_people': city_stats['users'] if city_stats['users'] else 0 } <NEW_LINE> return city_data <NEW_LINE> <DEDENT> response = None <NEW_LINE> retries = 3 <NEW_LINE> while retries and not response : <NEW_LINE> <INDENT> try : <NEW_LINE> <INDENT> response = al.get_tags(city_id) <NEW_LINE> logging.info("Received response for city data of: " + str(city_id)) <NEW_LINE> <DEDENT> except : <NEW_LINE> <INDENT> logging.debug("City data response error from city: " + str(city_id)) <NEW_LINE> retries -= 1 <NEW_LINE> time.sleep(2) <NEW_LINE> <DEDENT> <DEDENT> if not response or response['tag_type'] != 'LocationTag' : <NEW_LINE> <INDENT> logging.debug("tag_id is not for a location: " + city_id) <NEW_LINE> num_missed_city_data += 1 <NEW_LINE> return <NEW_LINE> <DEDENT> return clean_data(response)
Returns all city data
625941b85510c4643540f25c
def connect_JSON(config): <NEW_LINE> <INDENT> testnet = config.get('testnet', '0') <NEW_LINE> testnet = (int(testnet) > 0) <NEW_LINE> if not 'rpcport' in config: <NEW_LINE> <INDENT> config['rpcport'] = 16990 if testnet else 6990 <NEW_LINE> <DEDENT> connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport']) <NEW_LINE> try: <NEW_LINE> <INDENT> result = ServiceProxy(connect) <NEW_LINE> if result.getmininginfo()['testnet'] != testnet: <NEW_LINE> <INDENT> sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> sys.stderr.write("Error connecting to RPC server at "+connect+"\n") <NEW_LINE> sys.exit(1)
Connect to a hotchain JSON-RPC server
625941b8c432627299f04aa9
def getAgentsOfActiveMap(self): <NEW_LINE> <INDENT> return self.getAgentsOfMap(self.active_map.map.getId())
Returns the agents that are on active map @return: A dictionary with the agents of the map
625941b8796e427e537b0427
def write_table(self, anffd, timeq, flags, swid, phases, delays, rates): <NEW_LINE> <INDENT> timeq2 = timeq + " AND (ANTENNA1 = {} OR ANTENNA2 = {})".format(self.ref_antenna, self.ref_antenna) <NEW_LINE> obsid, field, scan = [ffd.distinct_thing(self.msname, timeq2, col) for col in ['OBSERVATION_ID', 'FIELD_ID', 'SCAN_NUMBER']] <NEW_LINE> darr = -delays*1e9 <NEW_LINE> pharr = -phases <NEW_LINE> rarr = -rates <NEW_LINE> for i, s in enumerate(self.antennas2): <NEW_LINE> <INDENT> antenna = s <NEW_LINE> casalog.post("Writing row {} for antenna {}".format(i, antenna)) <NEW_LINE> assert anffd.get_antenna_index(s) == i <NEW_LINE> time = anffd.get_min_time() <NEW_LINE> interval = anffd.get_interval() <NEW_LINE> ref_freq = anffd.ref_freq <NEW_LINE> ph = pharr + 0.5*interval*2*np.pi*rarr*ref_freq <NEW_LINE> if (flags[0, i] or flags[1, i]): <NEW_LINE> <INDENT> casalog.post("Skipping station {} swid {}" "".format(s, swid)) <NEW_LINE> continue <NEW_LINE> <DEDENT> casalog.post("Writing station {} swid {}" "".format(s, swid)) <NEW_LINE> v = [ph[0, i], darr[0, i], rarr[0, i], ph[1, i], darr[1, i], rarr[1, i]], <NEW_LINE> param = np.array(v, dtype='float32', ndmin=2).transpose() <NEW_LINE> make_table.add_row(self.fj_name, self.rowcount, time + interval/2, interval, antenna, field, scan, obsid, swid, param) <NEW_LINE> self.rowcount += 1 <NEW_LINE> <DEDENT> return None
Write out the results in the approved FringeJones table format. We use 'make_table' to handle the details of the table.
625941b8462c4b4f79d1d535
def history(self, hist_type=None, hist_subtype=0, force_time_axis=False, reference_time=None, sync_to_index=None): <NEW_LINE> <INDENT> import pandas as pd <NEW_LINE> hist_str = None <NEW_LINE> hist_defined = [c for c in dir(Enum) if c.startswith("HIST_")] <NEW_LINE> if hist_type is None: <NEW_LINE> <INDENT> all_hist = {} <NEW_LINE> for hist in hist_defined: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = self.getHistory(hist) <NEW_LINE> all_hist[hist] = result <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return all_hist <NEW_LINE> <DEDENT> if type(hist_type) == str: <NEW_LINE> <INDENT> if "HIST_" not in hist_type: <NEW_LINE> <INDENT> hist_str = hist_type <NEW_LINE> hist_type = "HIST_" + hist_type <NEW_LINE> <DEDENT> if hist_type not in hist_defined: <NEW_LINE> <INDENT> msg = "Unknown History Type {}.".format(hist_type) <NEW_LINE> msg += "available constants in ifm.Enum: " <NEW_LINE> msg += ",".join(hist_defined) <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> hist_type = Enum.__dict__[hist_type] <NEW_LINE> <DEDENT> chart = self.doc.getHistoryValues(hist_type, hist_subtype) <NEW_LINE> times = chart[0] <NEW_LINE> values = chart[1] <NEW_LINE> itemnames = chart[2] <NEW_LINE> df = pd.DataFrame(values, columns=times, index=itemnames, ).T <NEW_LINE> df.index.name = "Simulation Time" <NEW_LINE> if hist_type in ['ANA', 'MULTW_FLUX', 'BHE', 'VARIO']: <NEW_LINE> <INDENT> force_time_axis = True <NEW_LINE> <DEDENT> if self.doc.getReferenceTime() is None and reference_time is None: <NEW_LINE> <INDENT> force_time_axis = True <NEW_LINE> <DEDENT> if not force_time_axis: <NEW_LINE> <INDENT> if reference_time is None: <NEW_LINE> <INDENT> reference_time = self.doc.getReferenceTime() <NEW_LINE> <DEDENT> df["Time"] = pd.to_datetime(df.index, unit="D", origin=reference_time) <NEW_LINE> df.set_index("Time", inplace=True) <NEW_LINE> <DEDENT> if sync_to_index is not None: <NEW_LINE> <INDENT> if type(sync_to_index) == pd.DataFrame or type(sync_to_index) == pd.Series: <NEW_LINE> <INDENT> sync_to_index = sync_to_index.index <NEW_LINE> <DEDENT> union_index = sync_to_index.union(df.index) <NEW_LINE> df = df.reindex(union_index).interpolate().loc[sync_to_index] <NEW_LINE> <DEDENT> return df
Returns the values of any history charting window as a dataframe. Calling the function without arguments returns a dictionary of all available histories :param hist_type: History Type. :type hist_type: str, int, ifm.Enum or None. :param hist_subtype: History Sub-Type (int) :type hist_subtype: int :param force_time_axis: If True, the index of the dataframe will be the simulation time in days. If False (default), the index type will be of type datetime if a reference time is set in the model, and simulation time in days otherwise. :type force_time_axis: bool :param reference_time: Specify (or override) a reference time. Note that this only accounts for this export and is not a permanent change of the model settings. :type reference_time: datetime.datetime
625941b89f2886367277a6f6
def _validate(self, rdn, properties, basedn): <NEW_LINE> <INDENT> if properties is None: <NEW_LINE> <INDENT> raise ldap.UNWILLING_TO_PERFORM('Invalid request to create. Properties cannot be None') <NEW_LINE> <DEDENT> if type(properties) != dict: <NEW_LINE> <INDENT> raise ldap.UNWILLING_TO_PERFORM("properties must be a dictionary") <NEW_LINE> <DEDENT> for attr in self._must_attributes: <NEW_LINE> <INDENT> if properties.get(attr, None) is None: <NEW_LINE> <INDENT> raise ldap.UNWILLING_TO_PERFORM('Attribute %s must not be None' % attr) <NEW_LINE> <DEDENT> <DEDENT> if properties.get(self._rdn_attribute, None) is None and rdn is None: <NEW_LINE> <INDENT> raise ldap.UNWILLING_TO_PERFORM('Attribute %s must not be None or rdn provided' % self._rdn_attribute) <NEW_LINE> <DEDENT> for k, v in properties.items(): <NEW_LINE> <INDENT> if isinstance(v, list): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> properties[k] = [v,] <NEW_LINE> <DEDENT> <DEDENT> tdn = self._dn <NEW_LINE> if tdn is None: <NEW_LINE> <INDENT> if basedn is None: <NEW_LINE> <INDENT> raise ldap.UNWILLING_TO_PERFORM('Invalid request to create. basedn cannot be None') <NEW_LINE> <DEDENT> if rdn is not None: <NEW_LINE> <INDENT> tdn = ensure_str('%s,%s' % (rdn, basedn)) <NEW_LINE> <DEDENT> elif properties.get(self._rdn_attribute, None) is not None: <NEW_LINE> <INDENT> v = properties.get(self._rdn_attribute) <NEW_LINE> rdn = ensure_str(v[0]) <NEW_LINE> erdn = ensure_str(ldap.dn.escape_dn_chars(rdn)) <NEW_LINE> self._log.debug("Using first property %s: %s as rdn" % (self._rdn_attribute, erdn)) <NEW_LINE> if erdn != rdn: <NEW_LINE> <INDENT> properties[self._rdn_attribute].append(erdn) <NEW_LINE> <DEDENT> tdn = ensure_str('%s=%s,%s' % (self._rdn_attribute, erdn, basedn)) <NEW_LINE> <DEDENT> <DEDENT> str_props = {} <NEW_LINE> for k, v in properties.items(): <NEW_LINE> <INDENT> str_props[k] = ensure_list_bytes(v) <NEW_LINE> <DEDENT> return (tdn, str_props)
Used to validate a create request. This way, it can be over-ridden without affecting the create types. It also checks that all the values in _must_attribute exist in some form in the dictionary. It has the useful trick of returning the dn, so subtypes can use extra properties to create the dn's here for this.
625941b8d164cc6175782bb3
def test_horovod_allgather(self): <NEW_LINE> <INDENT> hvd.init() <NEW_LINE> rank = hvd.rank() <NEW_LINE> size = hvd.size() <NEW_LINE> dtypes = ['int32', 'int64', 'float32', 'float64'] <NEW_LINE> dims = [1, 2, 3] <NEW_LINE> ctx = self._current_context() <NEW_LINE> for dtype, dim in itertools.product(dtypes, dims): <NEW_LINE> <INDENT> tensor = mx.ndarray.ones(shape=[17] * dim, dtype=dtype, ctx=ctx) * rank <NEW_LINE> gathered = hvd.allgather(tensor) <NEW_LINE> assert list(gathered.shape) == [17 * size] + [17] * (dim - 1) <NEW_LINE> for i in range(size): <NEW_LINE> <INDENT> rank_tensor = gathered[i * 17:(i + 1) * 17] <NEW_LINE> assert list(rank_tensor.shape) == [17] * dim, 'hvd.allgather produces incorrect gathered shape' <NEW_LINE> assert rank_tensor.min() == i, 'hvd.allgather produces incorrect gathered tensor' <NEW_LINE> assert rank_tensor.max() == i, 'hvd.allgather produces incorrect gathered tensor'
Test that the allgather correctly gathers 1D, 2D, 3D tensors.
625941b894891a1f4081b90d
def parse(description): <NEW_LINE> <INDENT> measurements = {'teaspoons', 'tablespoons', 'cup', 'cups', 'pints', 'pint', 'quarts', 'quart', 'ounce', 'ounces', 'dash', 'pinch', 'cube', 'cubes'} <NEW_LINE> printable = set(string.printable) <NEW_LINE> description = filter(lambda x: x in printable, description) <NEW_LINE> description = description.split(' ') <NEW_LINE> quantity = description[0].encode('utf8') <NEW_LINE> measurement, increments = None, None <NEW_LINE> if description[1] in measurements: <NEW_LINE> <INDENT> increments = description[1] <NEW_LINE> description = ' '.join(description[2:]) <NEW_LINE> measurement = measurement_lookup.by_name(increments) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> description = ' '.join(description[1:]) <NEW_LINE> <DEDENT> ingredient = ingredient_lookup.by_name(description) <NEW_LINE> if ingredient is None: <NEW_LINE> <INDENT> ingredient_modify.add(Ingredient(name=description)) <NEW_LINE> ingredient = ingredient_lookup.by_name(description) <NEW_LINE> <DEDENT> if measurement is None and increments != None: <NEW_LINE> <INDENT> measurement_modify.add(Measurement(name=increments)) <NEW_LINE> measurement = measurement_lookup.by_name(increments) <NEW_LINE> <DEDENT> if not measurement: <NEW_LINE> <INDENT> measurement = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> measurement = measurement.id <NEW_LINE> <DEDENT> return ingredient.id, measurement, quantity
Parse an ingredient text and insert into measurement/ingredients tables
625941b8435de62698dfdab9
def save_result(df): <NEW_LINE> <INDENT> df.to_csv(("../submit/submit_" + datetime.now().strftime('%Y%m%d_%H%M%S') + ".csv"), header=None, index=False)
导出数据结果 :param data: :return:
625941b89c8ee82313fbb5da
def gauss_seidel_iteration(A, b, tol=1e-9, Max_iter=5000): <NEW_LINE> <INDENT> D = np.diag(np.diag(A)) <NEW_LINE> L = -1 * np.tril(A - D) <NEW_LINE> U = -1 * np.triu(A - D) <NEW_LINE> B = np.linalg.inv(D - L).dot(U) <NEW_LINE> f = np.linalg.inv(D - L).dot(b) <NEW_LINE> x = np.ones_like(b) <NEW_LINE> k = 1 <NEW_LINE> y = B.dot(x) + f <NEW_LINE> while np.max(np.abs(y - x)) >= tol and k < Max_iter: <NEW_LINE> <INDENT> k += 1 <NEW_LINE> x = y <NEW_LINE> y = B.dot(x) + f <NEW_LINE> <DEDENT> return (y, k)
Solve linear equations by Gauss Seidel iteration method. Args: A: ndarray, coefficients matrix. b: ndarray, constant vector. tol: double, iteration accuracy, default=1e-9. Max_iter: int, maximum iteration number, default=5000. Returns: y: ndarray, solution vector. k: int, iteration number.
625941b8e8904600ed9f1d8e
def number_of_subscribers(subreddit): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = requests.get( url="{}/r/{}/about.json".format(base_url, subreddit), headers={'user-agent': 'APP-NAME by REDDIT-USERNAME'}, ) <NEW_LINE> data = response.json()['data'] <NEW_LINE> return data['subscribers'] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return False
return the number of subscribers
625941b8be7bc26dc91cd46a
def find_shortest_path(graph, start, end, path=[]): <NEW_LINE> <INDENT> path = path + [start] <NEW_LINE> if start == end: <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> if not graph.has_key(start): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> shortest = None <NEW_LINE> for node in graph[start]: <NEW_LINE> <INDENT> if node not in path: <NEW_LINE> <INDENT> newpath = find_shortest_path(graph, node, end, path) <NEW_LINE> if newpath: <NEW_LINE> <INDENT> if not shortest or len(newpath) < len(shortest): <NEW_LINE> <INDENT> shortest = newpath <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return shortest
Find the shortest path between two nodes of a graph. Works on graphs like this: graph ={'A': ['B', 'C'], 'B': ['C', 'D'], 'C': ['D'], 'D': ['C'], 'E': ['F'], 'F': ['C']}
625941b8d486a94d0b98dfb2
def expm1(*args, **kwargs): <NEW_LINE> <INDENT> pass
Return exp(x)-1. This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x.
625941b80fa83653e4656e22
def post_ui_autopilot_waypoint(self, add_to_beginning, clear_other_waypoints, destination_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.post_ui_autopilot_waypoint_with_http_info(add_to_beginning, clear_other_waypoints, destination_id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.post_ui_autopilot_waypoint_with_http_info(add_to_beginning, clear_other_waypoints, destination_id, **kwargs) <NEW_LINE> return data
Set Autopilot Waypoint Set a solar system as autopilot waypoint --- This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.post_ui_autopilot_waypoint(add_to_beginning, clear_other_waypoints, destination_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param bool add_to_beginning: Whether this solar system should be added to the beginning of all waypoints (required) :param bool clear_other_waypoints: Whether clean other waypoints beforing adding this one (required) :param int destination_id: The destination to travel to, can be solar system, station or structure's id (required) :param str datasource: The server name you would like data from :param str token: Access token to use if unable to set a header :param str user_agent: Client identifier, takes precedence over headers :param str x_user_agent: Client identifier, takes precedence over User-Agent :return: None If the method is called asynchronously, returns the request thread.
625941b850485f2cf553cbfe
def assert_equal(first, second): <NEW_LINE> <INDENT> if _test is None: <NEW_LINE> <INDENT> assert first == second <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _test.assertEqual(first, second)
Assert two objects are equal.
625941b824f1403a926009cf
def _find_K(self, I): <NEW_LINE> <INDENT> K = 0 <NEW_LINE> for tuples in it.product(I, repeat=3): <NEW_LINE> <INDENT> K += self.triple[tuples] <NEW_LINE> <DEDENT> return K
We compute: K^I = \sum_{i,j,k \in I} triple[i,j,k] Parameters ---------- I : list list of Kähler indices send to inf triple : np.array[h11, h11, h11] triple intersection numbers Returns ------- int K^I
625941b8b545ff76a8913c84
def __consumerAction(self, idx:int, dist:np.array) -> tuple: <NEW_LINE> <INDENT> _consumer = self.getConsommateur(idx) <NEW_LINE> _1 = _consumer.getDecision() <NEW_LINE> rayon = _consumer.getDecision() <NEW_LINE> _vrai = dist<=rayon <NEW_LINE> _who = dist[_vrai] <NEW_LINE> _2 = np.array(_consumer.preference) <NEW_LINE> _prices = np.array([(np.inf if self.__choix[_] is None else self.__choix[_][1]) for _ in range(self.firmes) ]) <NEW_LINE> if _vrai.sum()>1: <NEW_LINE> <INDENT> _3 = (1 - _2/_2.sum()) <NEW_LINE> _4 = _3*_prices <NEW_LINE> _vrai = _4==_4.min() <NEW_LINE> <DEDENT> if _vrai.sum()>1: <NEW_LINE> <INDENT> _vrai = _2==_2.max() <NEW_LINE> <DEDENT> if _vrai.sum()>1: <NEW_LINE> <INDENT> _vrai = _prices==_prices.min() <NEW_LINE> <DEDENT> if _vrai.sum()>1: <NEW_LINE> <INDENT> _vrai = dist==dist.min() <NEW_LINE> <DEDENT> if _vrai.sum()>1: <NEW_LINE> <INDENT> k = np.random.choice(np.arange(self.firmes)[_vrai]) <NEW_LINE> _vrai ^= _vrai <NEW_LINE> _vrai[k] = True <NEW_LINE> <DEDENT> assert _vrai.sum() <= 1, "oddities" <NEW_LINE> _penalty = np.float(- _consumer.cout(rayon) -1e-3) <NEW_LINE> _rew = _vrai*(_2 / _2.mean())* (np.float(_consumer.utilite) - _prices) + _penalty <NEW_LINE> _rew = np.where( np.isnan(_rew), _penalty, _rew) <NEW_LINE> if not isinstance(_consumer, PrefConso): <NEW_LINE> <INDENT> _rew = np.nanmax(_rew) <NEW_LINE> <DEDENT> _consumer.updateModel(_rew) <NEW_LINE> return _vrai.astype(bool), _1, _rew
On détermine le reward On fait l'updateModel @return un vecteur de booléen + choix + récompense
625941b8d18da76e23532337
def get_string(self): <NEW_LINE> <INDENT> return self.string
Return passport string
625941b8097d151d1a222cc1
def distance(self, *arg): <NEW_LINE> <INDENT> (start, end) = normalize_argv(arg, 2) <NEW_LINE> if to_int(start) is None or (to_int(end) is None and not self._is_running()): <NEW_LINE> <INDENT> self._missing_argument() <NEW_LINE> <DEDENT> sp = None <NEW_LINE> if end is None: <NEW_LINE> <INDENT> sp = peda.getreg("sp") <NEW_LINE> end = start <NEW_LINE> start = sp <NEW_LINE> <DEDENT> dist = end - start <NEW_LINE> text = "From 0x%x%s to 0x%x: " % (start, " (SP)" if start == sp else "", end) <NEW_LINE> text += "%d bytes, %d dwords%s" % (dist, dist/4, " (+%d bytes)" % (dist%4) if (dist%4 != 0) else "") <NEW_LINE> msg(text) <NEW_LINE> return
Calculate distance between two addresses Usage: MYNAME address (calculate from current $SP to address) MYNAME address1 address2
625941b863f4b57ef0000f87
def test_search_exit_status_code_when_finds_no_package(): <NEW_LINE> <INDENT> env = reset_env(use_distribute=True) <NEW_LINE> result = run_pip('search', 'non-existant-package', expect_error=True) <NEW_LINE> assert result.returncode == NO_MATCHES_FOUND
Test search exit status code for no matches
625941b8711fe17d825421d8
def test_blockquote(self): <NEW_LINE> <INDENT> text = '> This line should be wrapped in blockquotes\n> ##This is an H2 in a blockquote\nThis is not in blockquotes' <NEW_LINE> self.assertEqual( run_markdown(text), '<blockquote>\n<p>This line should be wrapped in blockquotes</p>\n<h2>This is an H2 in a blockquote</h2>\n</blockquote>\n<p>This is not in blockquotes</p>')
Lines preceeded by > should be wrapped in 'blockquote' tags until the first line with no >
625941b8f548e778e58cd3e1
def get_uni_version(self): <NEW_LINE> <INDENT> version, major_version = None, None <NEW_LINE> target_uri = "/%s/system/version" % self.U4V_VERSION <NEW_LINE> response = self.get_request(target_uri, 'version') <NEW_LINE> if response and response.get('version'): <NEW_LINE> <INDENT> version = response['version'] <NEW_LINE> version_list = version.split('.') <NEW_LINE> major_version = version_list[0][1] + version_list[1] <NEW_LINE> <DEDENT> return version, major_version
Get the unisphere version from the server. :return: version and major_version(e.g. ("V8.4.0.16", "84"))
625941b80a50d4780f666cf5
def isDraw(self, board): <NEW_LINE> <INDENT> for i in range(9): <NEW_LINE> <INDENT> if board[i] == '-': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Was the game a draw? Return: True|False
625941b8a219f33f346287da
def get_currently_display_image(self): <NEW_LINE> <INDENT> return self.images[self.current_frame]
Get the image that is currently displaying in this animation.
625941b899fddb7c1c9de1f9
def number_of_lines(filename=""): <NEW_LINE> <INDENT> with open(filename, encoding='utf-8') as f: <NEW_LINE> <INDENT> linenum = 0 <NEW_LINE> for lines in f: <NEW_LINE> <INDENT> linenum += 1 <NEW_LINE> <DEDENT> return linenum
Args: filename (file): The first parameter. Returns: int: number of lines in text file
625941b8e5267d203edcdb07
def copy_designer_template(self): <NEW_LINE> <INDENT> template_files = glob.glob('{}*.sdt'.format( FilePaths().defaultConfigPath() )) <NEW_LINE> templates_path = composer_template_path() <NEW_LINE> for temp_file in template_files: <NEW_LINE> <INDENT> destination_file = os.path.join( templates_path, os.path.basename(temp_file)) <NEW_LINE> if not os.path.isfile(destination_file): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> shutil.copyfile(temp_file, destination_file) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> os.makedirs(templates_path) <NEW_LINE> shutil.copyfile(temp_file, destination_file)
Copies designer templates from the templates folder in the plugin. :return: :rtype:
625941b8046cf37aa974cbb0
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'iais.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941b87047854f462a1273
def flatlandSpaceStations(n, c): <NEW_LINE> <INDENT> c.sort() <NEW_LINE> c = [-c[0]] + c + [2*(n-1)-c[-1]] <NEW_LINE> return int(max(c[i]-c[i-1] for i in range(1, len(c)))/2)
L'astuce à la con : on crée des points virtuels aux deux extrémités comme si il y avait des stations en miroir de celles au bout pour pouvoir résoudre l'exercice en faisant simplement un max sur le tableau
625941b80a366e3fb873e67d
def freq_peak(pop, bandwidth='silverman', max_precision=0.05): <NEW_LINE> <INDENT> if isinstance(bandwidth, (int, float)): <NEW_LINE> <INDENT> bw = bandwidth / np.std(pop, ddof=1) <NEW_LINE> kde = gaussian_kde(pop, bw_method=bw) <NEW_LINE> <DEDENT> elif isinstance(bandwidth, str): <NEW_LINE> <INDENT> kde = gaussian_kde(pop, bw_method=bandwidth) <NEW_LINE> bw = round(kde.covariance_factor() * pop.std(ddof=1), 2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("bandwidth must be integer, float, or plug-in methods 'silverman' or 'scott'") <NEW_LINE> <DEDENT> xgrid = gen_xgrid(pop.min(), pop.max(), max_precision) <NEW_LINE> densities = kde(xgrid) <NEW_LINE> y_max, peak_grain_size = np.max(densities), xgrid[np.argmax(densities)] <NEW_LINE> return (xgrid, densities), peak_grain_size, y_max, bw
Returns the peak of the frequency ("mode") of a continuous distribution based on the Gaussian kernel density estimator. It uses Scipy's gaussian kde method. Parameters ---------- pop : array_like the diameters of the grains bandwidth : string, positive scalar or callable the method to estimate the bandwidth or a scalar directly defining the bandwidth. Methods can be 'silverman' or 'scott'. max_precision : positive scalar, default is 0.05 the maximum precision expected for the "peak" estimator. Call functions -------------- - gen_xgrid - gaussian_kde from scipy Returns ------- the x and y values to contruct the kde (tuple), the mode or peak grain size, the density value of the peak, the bandwidth
625941b8009cb60464c63222
def interval(self, irc, msg, args): <NEW_LINE> <INDENT> data = self._interval() <NEW_LINE> if data is None or data == '': <NEW_LINE> <INDENT> irc.error("Failed to retrieve data. Try again later.") <NEW_LINE> return <NEW_LINE> <DEDENT> irc.reply(data)
takes no arguments Shows average interval, in seconds, between last 1000 blocks.
625941b897e22403b379cdff
def test_add_offer_invalid_trade_type(new_order_book: Callable[[], OrderBook]) -> NoReturn: <NEW_LINE> <INDENT> book = new_order_book <NEW_LINE> trade_type = 'foo' <NEW_LINE> price = 1 <NEW_LINE> quantity = 1 <NEW_LINE> with pytest.raises(ParamValueException): <NEW_LINE> <INDENT> book.add_offer(trade_type, price, quantity)
Add new offer into invalid trade type
625941b8656771135c3eb6d8
def findCelebrity(self, n): <NEW_LINE> <INDENT> if n == 0: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> if n == 1: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> self.crowd = set([i for i in range(n)]) <NEW_LINE> while len(self.crowd) > 1: <NEW_LINE> <INDENT> crowd_list = list(self.crowd) <NEW_LINE> if len(crowd_list) % 2 == 1: <NEW_LINE> <INDENT> self.filter(crowd_list.pop(), crowd_list[0]) <NEW_LINE> <DEDENT> for i in range(len(crowd_list) // 2): <NEW_LINE> <INDENT> self.filter(crowd_list[i], crowd_list[len(crowd_list) - 1 - i]) <NEW_LINE> <DEDENT> <DEDENT> if len(self.crowd) == 0: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> e = self.crowd.pop() <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> if i != e and not (knows(i, e) and not knows(e, i)): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> return e
:type n: int :rtype: int
625941b8236d856c2ad44644
def _exec_sysprep_tasks(self): <NEW_LINE> <INDENT> tasks = self.list_syspreps() <NEW_LINE> enabled = [t for t in tasks if self.sysprep_enabled(t)] <NEW_LINE> size = len(enabled) <NEW_LINE> enabled = [t for t in enabled if self.sysprep_info(t).name != 'shrink'] <NEW_LINE> if len(enabled) != size: <NEW_LINE> <INDENT> enabled.append(self._shrink) <NEW_LINE> <DEDENT> enabled = [t for t in enabled if self.sysprep_info(t).name != 'microsoft-sysprep'] <NEW_LINE> if len(enabled) != size: <NEW_LINE> <INDENT> enabled.append(self._microsoft_sysprep) <NEW_LINE> <DEDENT> cnt = 0 <NEW_LINE> for task in enabled: <NEW_LINE> <INDENT> cnt += 1 <NEW_LINE> self.out.info(('(%d/%d)' % (cnt, size)).ljust(7), False) <NEW_LINE> task() <NEW_LINE> del self._sysprep_tasks[task.__name__] <NEW_LINE> <DEDENT> self.out.info("Sending shut down command ...", False) <NEW_LINE> if not self.sysprepped: <NEW_LINE> <INDENT> self._shutdown() <NEW_LINE> <DEDENT> self.out.success("done")
This function hosts the actual code for executing the enabled sysprep tasks. At the end of this method the VM is shut down if needed.
625941b8e1aae11d1e749b1a
def sample(self, points_to_sample, mode="constant", cval=False, **kwargs): <NEW_LINE> <INDENT> return Image.sample(self, points_to_sample, order=0, mode=mode, cval=cval)
Sample this image at the given sub-pixel accurate points. The input PointCloud should have the same number of dimensions as the image e.g. a 2D PointCloud for a 2D multi-channel image. A numpy array will be returned the has the values for every given point across each channel of the image. Parameters ---------- points_to_sample : :map:`PointCloud` Array of points to sample from the image. Should be `(n_points, n_dims)` mode : ``{constant, nearest, reflect, wrap}``, optional Points outside the boundaries of the input are filled according to the given mode. cval : `float`, optional Used in conjunction with mode ``constant``, the value outside the image boundaries. Returns ------- sampled_pixels : (`n_points`, `n_channels`) `bool ndarray` The interpolated values taken across every channel of the image.
625941b88e05c05ec3eea1d7
def test_get_next_id(): <NEW_LINE> <INDENT> uid = db.get_next_id() <NEW_LINE> assert type(uid) == int <NEW_LINE> item = client.get('til_stats', 'data') <NEW_LINE> item['idCounter'] -= 1 <NEW_LINE> client.put(item.collection, item.key, item.json, item.ref).raise_for_status()
Testing get_next_id
625941b8d10714528d5ffb45
def get_queryset(self): <NEW_LINE> <INDENT> return Question.objects.order_by('-pub_date')[:5]
Return last five published questions.
625941b88a43f66fc4b53ecf
def point(points, colors, opacity=1, point_radius=0.1, theta=8, phi=8): <NEW_LINE> <INDENT> if np.array(colors).ndim == 1: <NEW_LINE> <INDENT> colors = np.tile(colors, (len(points), 1)) <NEW_LINE> <DEDENT> scalars = vtk.vtkUnsignedCharArray() <NEW_LINE> scalars.SetNumberOfComponents(3) <NEW_LINE> pts = vtk.vtkPoints() <NEW_LINE> cnt_colors = 0 <NEW_LINE> for p in points: <NEW_LINE> <INDENT> pts.InsertNextPoint(p[0], p[1], p[2]) <NEW_LINE> scalars.InsertNextTuple3( round(255 * colors[cnt_colors][0]), round(255 * colors[cnt_colors][1]), round(255 * colors[cnt_colors][2])) <NEW_LINE> cnt_colors += 1 <NEW_LINE> <DEDENT> src = vtk.vtkSphereSource() <NEW_LINE> src.SetRadius(point_radius) <NEW_LINE> src.SetThetaResolution(theta) <NEW_LINE> src.SetPhiResolution(phi) <NEW_LINE> polyData = vtk.vtkPolyData() <NEW_LINE> polyData.SetPoints(pts) <NEW_LINE> polyData.GetPointData().SetScalars(scalars) <NEW_LINE> glyph = vtk.vtkGlyph3D() <NEW_LINE> glyph.SetSourceConnection(src.GetOutputPort()) <NEW_LINE> if major_version <= 5: <NEW_LINE> <INDENT> glyph.SetInput(polyData) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> glyph.SetInputData(polyData) <NEW_LINE> <DEDENT> glyph.SetColorModeToColorByScalar() <NEW_LINE> glyph.SetScaleModeToDataScalingOff() <NEW_LINE> mapper = vtk.vtkPolyDataMapper() <NEW_LINE> if major_version <= 5: <NEW_LINE> <INDENT> mapper.SetInput(glyph.GetOutput()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mapper.SetInputData(glyph.GetOutput()) <NEW_LINE> <DEDENT> actor = vtk.vtkActor() <NEW_LINE> actor.SetMapper(mapper) <NEW_LINE> actor.GetProperty().SetOpacity(opacity) <NEW_LINE> return actor
Visualize points as sphere glyphs Parameters ---------- points : ndarray, shape (N, 3) colors : ndarray (N,3) or tuple (3,) point_radius : float theta : int phi : int Returns ------- vtkActor Examples -------- >>> from dipy.viz import fvtk >>> ren = fvtk.ren() >>> pts = np.random.rand(5, 3) >>> point_actor = fvtk.point(pts, fvtk.colors.coral) >>> fvtk.add(ren, point_actor) >>> #fvtk.show(ren)
625941b8a17c0f6771cbdeba
@builder.rule('ElementList', w=1) <NEW_LINE> def _(m): <NEW_LINE> <INDENT> return m.make('AssignmentExpression')
ElementList: AssignmentExpression
625941b86e29344779a6247b
def __init__(self, servant=None, stewards=None, name='', bufsize=8096, wl=None, ha=None, host=u'', port=None, eha=None, scheme=u'', dictable=False, timeout=None, **kwa): <NEW_LINE> <INDENT> self.stewards = stewards if stewards is not None else dict() <NEW_LINE> self.dictable = True if dictable else False <NEW_LINE> if timeout is None: <NEW_LINE> <INDENT> timeout = self.Timeout <NEW_LINE> <DEDENT> ha = ha or (host, port) <NEW_LINE> if servant: <NEW_LINE> <INDENT> if isinstance(servant, tcp.ServerTls): <NEW_LINE> <INDENT> if scheme and scheme != u'https': <NEW_LINE> <INDENT> raise ValueError("Provided scheme '{0}' incompatible with servant".format(scheme)) <NEW_LINE> <DEDENT> secured = True <NEW_LINE> scheme = u'https' <NEW_LINE> defaultPort = 443 <NEW_LINE> <DEDENT> elif isinstance(servant, tcp.Server): <NEW_LINE> <INDENT> if scheme and scheme != u'http': <NEW_LINE> <INDENT> raise ValueError("Provided scheme '{0}' incompatible with servant".format(scheme)) <NEW_LINE> <DEDENT> secured = False <NEW_LINE> scheme = 'http' <NEW_LINE> defaultPort = 80 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid servant type {0}".format(type(servant))) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> scheme = u'https' if scheme == u'https' else u'http' <NEW_LINE> if scheme == u'https': <NEW_LINE> <INDENT> secured = True <NEW_LINE> defaultPort = 443 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> secured = False <NEW_LINE> defaultPort = 80 <NEW_LINE> <DEDENT> <DEDENT> host, port = ha <NEW_LINE> port = port or defaultPort <NEW_LINE> ha = (host, port) <NEW_LINE> if servant: <NEW_LINE> <INDENT> if servant.ha != ha: <NEW_LINE> <INDENT> ValueError("Provided ha '{0}:{1}' incompatible with servant".format(ha[0], ha[1])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if secured: <NEW_LINE> <INDENT> servant = tcp.ServerTls(name=name, ha=ha, eha=eha, bufsize=bufsize, wl=wl, tymeout=timeout, **kwa) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> servant = tcp.Server(name=name, ha=ha, eha=eha, bufsize=bufsize, wl=wl, tymeout=timeout, **kwa) <NEW_LINE> <DEDENT> <DEDENT> self.secured = secured <NEW_LINE> self.servant = servant
Initialization method for instance. servant = instance of Server or ServerTls or None stewards = dict of Steward instances kwa needed to pass additional parameters to servant if servantinstances are not provided (None) some or all of these parameters will be used for initialization name = user friendly name for servant bufsize = buffer size wl = WireLog instance if any ha = host address duple (host, port) for local servant listen socket host = host address for local servant listen socket, '' means any interface on host port = socket port for local servant listen socket eha = external destination address for incoming connections used in TLS scheme = http scheme u'http' or u'https' or empty dictable = Boolean flag If True attempt to convert body from json for requestants
625941b8711fe17d825421d9
def nextLine(length, arr): <NEW_LINE> <INDENT> line = "" <NEW_LINE> prev = 0 <NEW_LINE> while not length == 0: <NEW_LINE> <INDENT> l = randint(0, length) <NEW_LINE> if l == prev: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> prev = l <NEW_LINE> line = line+nextWord(l, arr)+' ' <NEW_LINE> length -= l <NEW_LINE> <DEDENT> return line
возвращает строку с заданным кол-вом слогов length - "длинна" слова в слогах arr - массив возможных слов
625941b824f1403a926009d0
def _get_components(self, *component_types: _Type) -> _Iterable[_Tuple[int, ...]]: <NEW_LINE> <INDENT> entity_db = self._entities <NEW_LINE> comp_db = self._components <NEW_LINE> try: <NEW_LINE> <INDENT> for entity in set.intersection(*[comp_db[ct] for ct in component_types]): <NEW_LINE> <INDENT> yield entity, [entity_db[entity][ct] for ct in component_types] <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass
Get an iterator for Entity and multiple Component sets. :param component_types: Two or more Component types. :return: An iterator for Entity, (Component1, Component2, etc) tuples.
625941b8b5575c28eb68de64
def setUp(self): <NEW_LINE> <INDENT> self.user_name = os.environ['TRAKT_TEST_USER'] <NEW_LINE> self.calendar = UserCalendar(self.user_name)
Create a PremiereCalendar and hold onto it
625941b8167d2b6e31218a04
def input_signature(self): <NEW_LINE> <INDENT> return _digital_swig.digital_diff_decoder_bb_sptr_input_signature(self)
input_signature(self) -> gr_io_signature_sptr
625941b8d53ae8145f87a0dc
def add_binding(self, *keys, **kwargs): <NEW_LINE> <INDENT> filter = kwargs.pop('filter', None) or NoFilter() <NEW_LINE> assert not kwargs <NEW_LINE> assert keys <NEW_LINE> assert isinstance(filter, Filter), 'Expected Filter instance, got %r' % filter <NEW_LINE> def decorator(func): <NEW_LINE> <INDENT> self.key_bindings.append(_Binding(keys, func, filter=filter)) <NEW_LINE> return func <NEW_LINE> <DEDENT> return decorator
Decorator for annotating key bindings.
625941b816aa5153ce3622de
def send(self, message): <NEW_LINE> <INDENT> if message.control and message.topic == 'mail:new': <NEW_LINE> <INDENT> return self.new_mail(message)
:type message: simargl.message.Message :param message: :return:
625941b8851cf427c661a380
def get_pin_status(self): <NEW_LINE> <INDENT> for p in self.pins: <NEW_LINE> <INDENT> if self.hasGPIO: <NEW_LINE> <INDENT> p.update_status(GPIO.input(p.get_num())) <NEW_LINE> <DEDENT> <DEDENT> return self.pins
:return: an array of Pin objects with updated status (updated by reading the GPIO channels)
625941b8d58c6744b4257ac7
def sort_by_posts(d): <NEW_LINE> <INDENT> rslt = [ {'first_name':u['name'].split()[0], 'last_name':u['name'].split()[-1], 'posts':u['posts']} for u in d if u['posts'] > 0 ] <NEW_LINE> rslt.sort(key=operator.itemgetter('posts')) <NEW_LINE> rslt.reverse() <NEW_LINE> return rslt
task2.2 Write a function that returns a list of objects with the following structure: first_name, last_name, posts, ordered by number of posts, removing any users that have not made any posts.
625941b85fdd1c0f98dc0098
def read_from_file(path="", raw=False): <NEW_LINE> <INDENT> if not path: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not xbmcvfs.exists(path): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open(path) as f: <NEW_LINE> <INDENT> log("opened textfile %s." % (path)) <NEW_LINE> if not raw: <NEW_LINE> <INDENT> result = json.load(f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = f.read() <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> log("failed to load textfile: " + path) <NEW_LINE> return False
return data from file with *path
625941b8dd821e528d63b012
def set_exp_priority(self, exp_priority): <NEW_LINE> <INDENT> self.set_param('exp_priority', exp_priority)
Exposure priority (0.8 - exposure 80%, gain 20%).XI_PRM_EXP_PRIORITY
625941b855399d3f0558851a
def hash_pandas_object( obj, index: bool = True, encoding: str = "utf8", hash_key: Optional[str] = _default_hash_key, categorize: bool = True, ): <NEW_LINE> <INDENT> from my_happy_pandas import Series <NEW_LINE> if hash_key is None: <NEW_LINE> <INDENT> hash_key = _default_hash_key <NEW_LINE> <DEDENT> if isinstance(obj, ABCMultiIndex): <NEW_LINE> <INDENT> return Series(hash_tuples(obj, encoding, hash_key), dtype="uint64", copy=False) <NEW_LINE> <DEDENT> elif isinstance(obj, ABCIndexClass): <NEW_LINE> <INDENT> h = hash_array(obj._values, encoding, hash_key, categorize).astype( "uint64", copy=False ) <NEW_LINE> h = Series(h, index=obj, dtype="uint64", copy=False) <NEW_LINE> <DEDENT> elif isinstance(obj, ABCSeries): <NEW_LINE> <INDENT> h = hash_array(obj._values, encoding, hash_key, categorize).astype( "uint64", copy=False ) <NEW_LINE> if index: <NEW_LINE> <INDENT> index_iter = ( hash_pandas_object( obj.index, index=False, encoding=encoding, hash_key=hash_key, categorize=categorize, )._values for _ in [None] ) <NEW_LINE> arrays = itertools.chain([h], index_iter) <NEW_LINE> h = _combine_hash_arrays(arrays, 2) <NEW_LINE> <DEDENT> h = Series(h, index=obj.index, dtype="uint64", copy=False) <NEW_LINE> <DEDENT> elif isinstance(obj, ABCDataFrame): <NEW_LINE> <INDENT> hashes = (hash_array(series._values) for _, series in obj.items()) <NEW_LINE> num_items = len(obj.columns) <NEW_LINE> if index: <NEW_LINE> <INDENT> index_hash_generator = ( hash_pandas_object( obj.index, index=False, encoding=encoding, hash_key=hash_key, categorize=categorize, )._values for _ in [None] ) <NEW_LINE> num_items += 1 <NEW_LINE> _hashes = itertools.chain(hashes, index_hash_generator) <NEW_LINE> hashes = (x for x in _hashes) <NEW_LINE> <DEDENT> h = _combine_hash_arrays(hashes, num_items) <NEW_LINE> h = Series(h, index=obj.index, dtype="uint64", copy=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError(f"Unexpected type for hashing {type(obj)}") <NEW_LINE> <DEDENT> return h
Return a data hash of the Index/Series/DataFrame. Parameters ---------- index : bool, default True Include the index in the hash (if Series/DataFrame). encoding : str, default 'utf8' Encoding for data & key when strings. hash_key : str, default _default_hash_key Hash_key for string key to encode. categorize : bool, default True Whether to first categorize object arrays before hashing. This is more efficient when the array contains duplicate values. Returns ------- Series of uint64, same length as the object
625941b8cc40096d615957ba
def set_intercept(self, X_mean, y_mean, X_std): <NEW_LINE> <INDENT> self.coef_ = self.coef_ / X_std <NEW_LINE> self.intercept_ = y_mean - np.einsum('ij,ij->i',X_mean,self.coef_)
Calculate the intercept_
625941b86e29344779a6247c
def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/Google/Drive/Permissions/List')
Create a new instance of the List Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
625941b8c432627299f04aaa
def get_app_secret() -> str: <NEW_LINE> <INDENT> app_secret = _reg.get('tumblr.app_secret') or _reg.get('tumblr.app_secret') <NEW_LINE> if not app_secret: <NEW_LINE> <INDENT> raise _error.AppSecretNotSet("Configuration parameter 'tumblr.app_secret' is not set") <NEW_LINE> <DEDENT> return app_secret
Get application's secret key.
625941b8e64d504609d746a7
def _sample_par_sim_pairs(self, n_samples, n_samples_per_param): <NEW_LINE> <INDENT> self.n_samples_per_param = n_samples_per_param <NEW_LINE> self.accepted_parameters_manager.broadcast(self.backend, 1) <NEW_LINE> rng_pds = self._generate_rng_pds(n_samples) <NEW_LINE> parameters_simulations_pds = self.backend.map(self._sample_parameter_simulation, rng_pds) <NEW_LINE> parameters_simulations = self.backend.collect(parameters_simulations_pds) <NEW_LINE> parameters, simulations = [list(t) for t in zip(*parameters_simulations)] <NEW_LINE> parameters = np.array(parameters) <NEW_LINE> simulations = np.array(simulations) <NEW_LINE> parameters = parameters.reshape((parameters.shape[0], parameters.shape[1])) <NEW_LINE> simulations = simulations.reshape((simulations.shape[0], simulations.shape[2], simulations.shape[3],)) <NEW_LINE> return parameters, simulations
Not for end use; please use `sample_par_sim_pairs`. Samples (parameter, simulation) pairs from the prior distribution from the model distribution. Specifically, parameter values are sampled from the prior and used to generate the specified number of simulations per parameter value. This returns arrays. This is an helper function called by the main `sample_par_sim_pair` one in order to split drawing from the prior in chunks to avoid parallelization issues with MPI. Parameters ---------- n_samples: integer Number of samples to generate n_samples_per_param: integer Number of data points in each simulated data set. Returns ------- tuple A tuple of numpy.ndarray's containing parameter and simulation values. The first element of the tuple is an array with shape (n_samples, d_theta), where d_theta is the dimension of the parameters. The second element of the tuple is an array with shape (n_samples, n_samples_per_param, d_x), where d_x is the dimension of each simulation.
625941b850485f2cf553cbff
def test_loopBlocks(self): <NEW_LINE> <INDENT> expfile = path.join(self.exp.prefsPaths['tests'], 'data', 'testLoopsBlocks.psyexp') <NEW_LINE> self.exp.loadFromXML(expfile) <NEW_LINE> datafileBase = os.path.join(self.tmp_dir, 'testLoopsBlocks') <NEW_LINE> datafileBaseRel = os.path.relpath(datafileBase,expfile) <NEW_LINE> self.exp.settings.params['Data filename'].val = repr(datafileBaseRel) <NEW_LINE> script = self.exp.writeScript(expPath=expfile) <NEW_LINE> py_file = os.path.join(self.tmp_dir, 'testLoopBlocks.py') <NEW_LINE> with codecs.open(py_file, 'w', 'utf-8-sig') as f: <NEW_LINE> <INDENT> f.write(script) <NEW_LINE> <DEDENT> stdout, stderr = core.shellCall([sys.executable, py_file], stderr=True) <NEW_LINE> if stderr: <NEW_LINE> <INDENT> with codecs.open(expfile + "_local.py", "w", 'utf-8-sig') as f: <NEW_LINE> <INDENT> f.write(script) <NEW_LINE> <DEDENT> raise AssertionError(f"File {py_file} raised error:\n {stderr}") <NEW_LINE> <DEDENT> print("searching..." +datafileBase) <NEW_LINE> print(glob.glob(datafileBase+'*')) <NEW_LINE> f = open(datafileBase+".csv", 'rb') <NEW_LINE> dat = numpy.recfromcsv(f, case_sensitive=True) <NEW_LINE> f.close() <NEW_LINE> assert len(dat)==8
An experiment file with made-up params and routines to see whether future versions of experiments will get loaded.
625941b8099cdd3c635f0ac3
def output_stats(self): <NEW_LINE> <INDENT> aux_shape = (1, len(self.ev_mat)) <NEW_LINE> plt.figure(self.bias_fig.number) <NEW_LINE> per = 20000 <NEW_LINE> ev = self.ev_mat.copy() <NEW_LINE> ev = np.reshape(ev, aux_shape)[np.max([0, len(ev)-per]):] <NEW_LINE> perf = self.perf_mat.copy() <NEW_LINE> perf = np.reshape(perf, aux_shape)[np.max([0, len(perf)-per]):] <NEW_LINE> action = self.action.copy() <NEW_LINE> action = np.reshape(action, aux_shape)[np.max([0, len(action)-per]):] <NEW_LINE> an.plot_psychometric_curves(ev, perf, action, blk_dur=self.block_dur) <NEW_LINE> self.bias_fig.canvas.draw() <NEW_LINE> plt.figure(self.perf_fig.number) <NEW_LINE> ev = self.ev_mat.copy() <NEW_LINE> ev = np.reshape(ev, aux_shape)[np.max([0, len(ev)-per]):] <NEW_LINE> perf = self.perf_mat.copy() <NEW_LINE> perf = np.reshape(perf, aux_shape)[np.max([0, len(perf)-per]):] <NEW_LINE> action = self.action.copy() <NEW_LINE> action = np.reshape(action, aux_shape)[np.max([0, len(action)-per]):] <NEW_LINE> stim_pos = self.stm_pos.copy() <NEW_LINE> stim_pos = np.reshape(stim_pos, aux_shape)[np.max([0, len(stim_pos)-per]):] <NEW_LINE> an.plot_learning(perf, ev, stim_pos, action) <NEW_LINE> self.perf_fig.canvas.draw() <NEW_LINE> print('--------------------')
plot temporary learning and bias curves
625941b863d6d428bbe44356
def get_corners(self, file_path): <NEW_LINE> <INDENT> file_path = file_path.decode('utf-8') <NEW_LINE> filename = str(file_path).split('/')[-1] <NEW_LINE> pts = np.zeros((4, 2)) <NEW_LINE> rows = self.labels[self.labels['filename']==filename] <NEW_LINE> for idx, row in rows.iterrows(): <NEW_LINE> <INDENT> row_c = row['class'] <NEW_LINE> if row_c=='topleft': <NEW_LINE> <INDENT> pts_loc = 0 <NEW_LINE> <DEDENT> elif row_c=='topright': <NEW_LINE> <INDENT> pts_loc = 1 <NEW_LINE> <DEDENT> elif row_c=='bottomright': <NEW_LINE> <INDENT> pts_loc = 2 <NEW_LINE> <DEDENT> elif row_c=='bottomleft': <NEW_LINE> <INDENT> pts_loc = 3 <NEW_LINE> <DEDENT> pts[pts_loc] = np.array([row['x'], row['y']]) <NEW_LINE> <DEDENT> return pts
get corners by file path
625941b8fff4ab517eb2f2a1
def unbroadcast(out, in_shape): <NEW_LINE> <INDENT> if in_shape == (1,): <NEW_LINE> <INDENT> sum_axis = None <NEW_LINE> return out.sum(axis=sum_axis).reshape(in_shape) <NEW_LINE> <DEDENT> original_in_shape = in_shape <NEW_LINE> if len(in_shape) == 1: <NEW_LINE> <INDENT> n = in_shape[0] <NEW_LINE> index = out.shape[::-1].index(n) <NEW_LINE> temp_axis = [n if i == index else 1 for i in range(len(out.shape))] <NEW_LINE> in_shape = temp_axis[::-1] <NEW_LINE> <DEDENT> sum_axis = tuple([dim for dim in range(len(in_shape)) if in_shape[dim]==1 and out.shape[dim]>1]) <NEW_LINE> return out.sum(axis=sum_axis).reshape(original_in_shape)
Sum the gradients of the output in the case that broadcasting was performed during the calculation of a result. This effectively avoids explicitly splitting a broadcasting operation into several clone modules beforehand.
625941b8b7558d58953c4d82
def requires_auth(f): <NEW_LINE> <INDENT> @wraps(f) <NEW_LINE> def decorated(*args, **kwargs): <NEW_LINE> <INDENT> token = get_token_auth_header() <NEW_LINE> jsonurl = urlopen(f'https://{AUTH0_DOMAIN}/.well-known/jwks.json') <NEW_LINE> jwks = json.loads(jsonurl.read()) <NEW_LINE> unverified_header = jwt.get_unverified_header(token) <NEW_LINE> rsa_key = {} <NEW_LINE> for key in jwks['keys']: <NEW_LINE> <INDENT> if key['kid'] == unverified_header['kid']: <NEW_LINE> <INDENT> rsa_key = { 'kty': key['kty'], 'kid': key['kid'], 'use': key['use'], 'n': key['n'], 'e': key['e'] } <NEW_LINE> <DEDENT> <DEDENT> if rsa_key: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> payload = jwt.decode( token, rsa_key, algorithms=ALGORITHMS, audience=API_AUDIENCE, issuer='https://' + AUTH0_DOMAIN + '/' ) <NEW_LINE> <DEDENT> except jwt.ExpiredSignatureError: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'token_expired', 'description': 'Token expired.' }, 401) <NEW_LINE> <DEDENT> except jwt.JWTClaimsError: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'invalid_claims', 'description': 'Incorrect claims. Please, check the audience and issuer.' }, 401) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'invalid_header', 'description': 'Unable to parse authentication token.' }, 400) <NEW_LINE> <DEDENT> _request_ctx_stack.top.current_user = payload <NEW_LINE> return f(*args, **kwargs) <NEW_LINE> <DEDENT> raise AuthError({ 'code': 'invalid_header', 'description': 'Unable to find the appropriate key.' }, 400) <NEW_LINE> <DEDENT> return decorated
Determines if the Access Token is valid
625941b8627d3e7fe0d68cb5
def test_failUnlessFalse(self): <NEW_LINE> <INDENT> self._assertTrueFalse(self.failUnless)
L{SynchronousTestCase.failUnless} raises L{SynchronousTestCase.failureException} if its argument is not considered true.
625941b88da39b475bd64dde
def quickselect(l, k, less_fn, pivot_fn): <NEW_LINE> <INDENT> if len(l) == 1: <NEW_LINE> <INDENT> assert k == 0 <NEW_LINE> return l[0] <NEW_LINE> <DEDENT> pivot = pivot_fn(l) <NEW_LINE> lows = [ el for el in l if less_fn(el, pivot) ] <NEW_LINE> highs = [ el for el in l if less_fn(pivot, el) ] <NEW_LINE> pivots = [ el for el in l if (not less_fn(el, pivot)) and (not less_fn(pivot, el)) ] <NEW_LINE> if k < len(lows): <NEW_LINE> <INDENT> return quickselect(lows, k, less_fn, pivot_fn) <NEW_LINE> <DEDENT> elif k < len(lows) + len(pivots): <NEW_LINE> <INDENT> return pivots[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return quickselect(highs, k - (len(lows) + len(pivots)), less_fn, pivot_fn)
Selects the kth minimum in list (0-based) :param l: List of numerics :param k: Index of minimum :param less_fn: Function of x1 and x2 that returns True if x1 < x2 :param pivot_fn: Function to choose a pivot :return: The kth minimum of l
625941b8435de62698dfdabb