code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def __init__(self, genes: Genes=None): <NEW_LINE> <INDENT> logger.debug("Constructing new population.") <NEW_LINE> if not isinstance(genes, Genes): <NEW_LINE> <INDENT> raise ValueError("Invalid genes:", genes) <NEW_LINE> <DEDENT> if not genes.size: <NEW_LINE> <INDENT> raise RuntimeError("Empty genetic code.") <NEW_LINE> <DEDENT> self.genes = genes <NEW_LINE> genes = self.genes.get_genes() <NEW_LINE> self.survivors = pd.DataFrame() <NEW_LINE> condition = genes[self.genes.A].str[0] == genes[self.genes.B].str[0] <NEW_LINE> self.chromosomes = genes[condition][[self.genes.A, self.genes.B]] <NEW_LINE> self.fit() | Population constructor.
@param genes: A Genes instance is required.
Mutations will take genes from this
instance and generate variations.
NOTE: The initial set of chromosomes will only contain
pairs of genes whose first character match in order
to reduce the algorithm complexity. | 625941b3d18da76e23532292 |
def testNonZ(self): <NEW_LINE> <INDENT> self.doTest(datetime(2013, 1, 31, 3, 45, 0, 123000), '2013-01-31T04:45:00.123+01:00') | jsonutils.json2datetime() should convert a string with a numeric timezone offset | 625941b3adb09d7d5db6c556 |
def logical_and(self, other, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = getcontext() <NEW_LINE> <DEDENT> other = _convert_other(other, raiseit=True) <NEW_LINE> if not self._islogical() or not other._islogical(): <NEW_LINE> <INDENT> return context._raise_error(InvalidOperation) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> opa, opb = self._fill_logical(context, self._int, other._int) <NEW_LINE> result = ''.join([ str(int(a) & int(b)) for a, b in zip(opa, opb) ]) <NEW_LINE> return _dec_from_triple(0, result.lstrip('0') or '0', 0) | Applies an 'and' operation between self and other's digits. | 625941b355399d3f05588475 |
def fix_minmax(image): <NEW_LINE> <INDENT> hdulist = fits.open(image, mode='update') <NEW_LINE> datamin, datamax = numpy.nanmin(hdulist[0].data), numpy.nanmax(hdulist[0].data) <NEW_LINE> hdulist[0].header['DATAMIN'] = datamin <NEW_LINE> hdulist[0].header['DATAMAX'] = datamax <NEW_LINE> hdulist.flush() <NEW_LINE> hdulist.close() | Recalculate the datamin/datamax. | 625941b363b5f9789fde6ea8 |
def setTool(self, pose, tool_id=None, tool_name=None): <NEW_LINE> <INDENT> xyzwpr = Pose_2_Fanuc(pose) <NEW_LINE> if tool_id is None or tool_id < 0: <NEW_LINE> <INDENT> for i in range(6): <NEW_LINE> <INDENT> self.addline('PR[%i,%i]=%.3f ;' % (self.SPARE_PR, i+1, xyzwpr[i])) <NEW_LINE> <DEDENT> for i in range(6,self.nAxes): <NEW_LINE> <INDENT> self.addline('PR[%i,%i]=%.3f ;' % (self.SPARE_PR, i+1, 0)) <NEW_LINE> <DEDENT> self.addline('UTOOL[%i]=PR[%i] ;' % (self.ACTIVE_UT, self.SPARE_PR)) <NEW_LINE> self.addline('UTOOL_NUM=%i ;' % (self.ACTIVE_UT)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ACTIVE_UT = tool_id <NEW_LINE> self.addline('UTOOL_NUM=%i ;' % (self.ACTIVE_UT)) <NEW_LINE> self.RunMessage('UT%i:%.1f,%.1f,%.1f,%.1f,%.1f,%.1f' % (tool_id, xyzwpr[0], xyzwpr[1], xyzwpr[2], xyzwpr[3], xyzwpr[4], xyzwpr[5]), True) | Change the robot TCP | 625941b3d268445f265b4c37 |
def endWrite(self): <NEW_LINE> <INDENT> self.writing = False <NEW_LINE> if len(self.okToRead._waiters) > 0: <NEW_LINE> <INDENT> self.okToRead.notify() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.okToWrite.notify() <NEW_LINE> <DEDENT> self.okToRead.release() <NEW_LINE> self.okToWrite.release() | Notify the next waiting writer if the readers
condition queue is empty. Otherwise, notify the
next waiting reader. | 625941b3e5267d203edcda64 |
def get_clanLeagueWars(self, war_tag): <NEW_LINE> <INDENT> return self.get_request('/clanwarleagues/wars/%23{}'.format(war_tag.lstrip("#").upper())) | This one requires the clan_war tag that you get from get_clanLeagueGroup
It's currently not working. Doesn't return roster numbers or peoples accurate position
ARGS:
war_tag (str): Unique tag of the war | 625941b3ec188e330fd5a56a |
def __init__(self, loc=None, covariance_matrix=None, validate_args=False, allow_nan_stats=True, name="MultivariateNormalFullCovariance"): <NEW_LINE> <INDENT> parameters = locals() <NEW_LINE> with ops.name_scope(name) as name: <NEW_LINE> <INDENT> with ops.name_scope("init", values=[loc, covariance_matrix]): <NEW_LINE> <INDENT> if covariance_matrix is None: <NEW_LINE> <INDENT> scale_tril = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> covariance_matrix = ops.convert_to_tensor( covariance_matrix, name="covariance_matrix") <NEW_LINE> if validate_args: <NEW_LINE> <INDENT> covariance_matrix = control_flow_ops.with_dependencies([ check_ops.assert_near( covariance_matrix, array_ops.matrix_transpose(covariance_matrix), message="Matrix was not symmetric")], covariance_matrix) <NEW_LINE> <DEDENT> scale_tril = linalg_ops.cholesky(covariance_matrix) <NEW_LINE> <DEDENT> super(MultivariateNormalFullCovariance, self).__init__( loc=loc, scale_tril=scale_tril, validate_args=validate_args, allow_nan_stats=allow_nan_stats, name=name) <NEW_LINE> <DEDENT> <DEDENT> self._parameters = parameters | Construct Multivariate Normal distribution on `R^k`.
The `batch_shape` is the broadcast shape between `loc` and
`covariance_matrix` arguments.
The `event_shape` is given by last dimension of the matrix implied by
`covariance_matrix`. The last dimension of `loc` (if provided) must
broadcast with this.
A non-batch `covariance_matrix` matrix is a `k x k` symmetric positive
definite matrix. In other words it is (real) symmetric with all eigenvalues
strictly positive.
Additional leading dimensions (if any) will index batches.
Args:
loc: Floating-point `Tensor`. If this is set to `None`, `loc` is
implicitly `0`. When specified, may have shape `[B1, ..., Bb, k]` where
`b >= 0` and `k` is the event size.
covariance_matrix: Floating-point, symmetric positive definite `Tensor` of
same `dtype` as `loc`. The strict upper triangle of `covariance_matrix`
is ignored, so if `covariance_matrix` is not symmetric no error will be
raised (unless `validate_args is True`). `covariance_matrix` has shape
`[B1, ..., Bb, k, k]` where `b >= 0` and `k` is the event size.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: if neither `loc` nor `covariance_matrix` are specified. | 625941b382261d6c526ab265 |
def tree(self, **kwargs): <NEW_LINE> <INDENT> if kwargs.get('full', False) or kwargs.get('show_union', False): <NEW_LINE> <INDENT> return '\n'.join(self._raw_tree(**kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._step.tree(**kwargs) | Internal structure of chain | 625941b3a17c0f6771cbde17 |
def get_environment_config(self): <NEW_LINE> <INDENT> env_vars = dict( VCENTER_HOST=self._get_cloud_config('vcenter_host'), ) <NEW_LINE> ansible_vars = dict( vcsim=self._get_cloud_config('vcenter_host'), ) <NEW_LINE> return CloudEnvironmentConfig( env_vars=env_vars, ansible_vars=ansible_vars, ) | :rtype: CloudEnvironmentConfig | 625941b376e4537e8c351439 |
def checksum(self, data): <NEW_LINE> <INDENT> return hashlib.md5(data).hexdigest() | Return a checksum for some data. | 625941b391f36d47f21ac2b7 |
def i2c_master_write(self, address, *data): <NEW_LINE> <INDENT> address = int_any_base(address) <NEW_LINE> if len(data) == 1: <NEW_LINE> <INDENT> data = list_any_input(data[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = [int_any_base(c) for c in data] <NEW_LINE> <DEDENT> logger.info('Writing %d bytes to %02xh: %s' % (len(data), address, ' '.join('%02x' % d for d in data))) <NEW_LINE> data = ''.join('%c' % chr(c) for c in data) <NEW_LINE> self._device.i2c_master_write(address, data) | Perform an I2C master write access.
Writes the given `data` to a slave device with address given in the
`address` argument. The `data` argument can either be list of bytes, a
whitespace separated list of bytes or a single byte. See the examples
below.
Both the `address` and `data` can be given either as strings or
integers. Strings are parsed accoding to their prefix. Eg. `0x`
denotes a hexadecimal number.
Examples:
| I2C Master Write | 0xa4 | 0x10 |
| I2C Master Write | 0xa4 | 0x10 0x12 0x13 |
| I2C Master Write | 0xa4 | 0x10 | 0x12 | 0x13 | | 625941b3d164cc6175782b10 |
def _auth_api_key(self, api_key): <NEW_LINE> <INDENT> req = requests.post( self._auth_url(), data={ 'type': 'api_key', 'api_key': api_key}, timeout=5) <NEW_LINE> self._evaluate_response(req) | Authenticate as with api_key. | 625941b315baa723493c3d34 |
def _test_create_examples(): <NEW_LINE> <INDENT> vbbvg.run_examples(u'Bundesplatz') | Test creating examples. | 625941b323849d37ff7b2e55 |
def convert_amount(amount, target_currency, date): <NEW_LINE> <INDENT> if amount == 0: <NEW_LINE> <INDENT> return amount <NEW_LINE> <DEDENT> currency = amount.currency <NEW_LINE> if currency == target_currency: <NEW_LINE> <INDENT> return amount <NEW_LINE> <DEDENT> exchange_rate = currency.value_in(target_currency, date) <NEW_LINE> return Amount(amount.value * exchange_rate, target_currency) | Returns ``amount`` converted to ``target_currency`` using ``date`` exchange rates.
.. seealso:: :meth:`.Currency.value_in`
:param amount: :class:`Amount`
:param target_currency: :class:`.Currency`
:param date: ``datetime.date`` | 625941b373bcbd0ca4b2be3f |
def modify_model(prev_model, new_class_num): <NEW_LINE> <INDENT> new_output = Dense(units=new_class_num, activation="softmax", name="output")(prev_model.layers[-2].output) <NEW_LINE> new_model = Model(inputs=prev_model.input, outputs=new_output) <NEW_LINE> return new_model | Arguments
- prev_model: tf.keras.Model
- 기존 pretrained 모델
- new_class_num: int
- 분류할 클래스 수
Returns
- output_layer가 변경된 tf.keras.Model | 625941b399cbb53fe67929aa |
def protocolNegotiationMechanisms(): <NEW_LINE> <INDENT> support = ProtocolNegotiationSupport.NOSUPPORT <NEW_LINE> ctx = SSL.Context(SSL.SSLv23_METHOD) <NEW_LINE> try: <NEW_LINE> <INDENT> ctx.set_npn_advertise_callback(lambda c: None) <NEW_LINE> <DEDENT> except (AttributeError, NotImplementedError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> support |= ProtocolNegotiationSupport.NPN <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ctx.set_alpn_select_callback(lambda c: None) <NEW_LINE> <DEDENT> except (AttributeError, NotImplementedError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> support |= ProtocolNegotiationSupport.ALPN <NEW_LINE> <DEDENT> return support | Checks whether your versions of PyOpenSSL and OpenSSL are recent enough to
support protocol negotiation, and if they are, what kind of protocol
negotiation is supported.
@return: A combination of flags from L{ProtocolNegotiationSupport} that
indicate which mechanisms for protocol negotiation are supported.
@rtype: L{FlagConstant} | 625941b332920d7e50b27f8f |
def _Resolve(self): <NEW_LINE> <INDENT> with self._unresolved_subjects_lock: <NEW_LINE> <INDENT> if self in self._unresolved_subjects: <NEW_LINE> <INDENT> self._unresolved_subjects.remove(self) | Marks the current subject as having been adequately asserted. | 625941b392d797404e303f54 |
def create_geometry(src2obj, det2obj, det_pixel, theta_range): <NEW_LINE> <INDENT> geometry = {'det_pixel':det_pixel, 'det_hrz':0., 'det_vrt':0., 'det_mag':0., 'src_hrz':0., 'src_vrt':0., 'src_mag':0., 'axs_hrz':0., 'det_rot':0., 'anisotropy':[1,1,1], 'vol_rot':[0. ,0. ,0.], 'vol_hrz':0., 'vol_tra':[0., 0., 0.], 'vol_mag':0., 'sample':[1,1,1], 'src2obj': src2obj, 'det2obj':det2obj, 'unit':'millimetre', 'type':'flex', 'binning': 1} <NEW_LINE> geometry['src2det'] = geometry.get('src2obj') + geometry.get('det2obj') <NEW_LINE> if src2obj != 0: <NEW_LINE> <INDENT> m = (src2obj + det2obj) / src2obj <NEW_LINE> geometry['img_pixel'] = det_pixel / m <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> geometry['img_pixel'] = 0 <NEW_LINE> <DEDENT> geometry['theta_max'] = theta_range[1] <NEW_LINE> geometry['theta_min'] = theta_range[0] <NEW_LINE> return geometry | Initialize an empty geometry record. | 625941b3099cdd3c635f0a1f |
def kthSmallest(self, root, k): <NEW_LINE> <INDENT> self.ans = [] <NEW_LINE> self.all(root=root) <NEW_LINE> return self.ans[k - 1] | :type root: TreeNode
:type k: int
:rtype: int | 625941b397e22403b379cd5b |
def set(isamAppliance, name, connection, type, jndiId, description='', locked=False, connectionManager=None, new_name=None, check_mode=False, force=False): <NEW_LINE> <INDENT> if _check_exists(isamAppliance, name=name) is False: <NEW_LINE> <INDENT> return add(isamAppliance, name=name, connection=connection, type=type, jndiId=jndiId, description=description, locked=locked, connectionManager=connectionManager, check_mode=check_mode, force=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return update(isamAppliance=isamAppliance, name=name, connection=connection, type=type, jndiId=jndiId, description=description, locked=locked, connectionManager=connectionManager, new_name=new_name, check_mode=check_mode, force=force) | Creating or Modifying an JDBC server connection | 625941b321a7993f00bc7aab |
def write_polygon(self, p, polygons): <NEW_LINE> <INDENT> writer = shapefile.Writer(shapefile.POLYGON) <NEW_LINE> for pp in polygons: <NEW_LINE> <INDENT> writer.poly(parts=[pp.points]) <NEW_LINE> <DEDENT> writer.save(p) | polygons: list of Polygon objects
Polygon.points=[[x0,y0],...,[xN,yN]] | 625941b3d10714528d5ffaa1 |
def _media_item_image_url(self, source, url): <NEW_LINE> <INDENT> if source == "local": <NEW_LINE> <INDENT> url = f"http://{self.hostname}:{self.port}{url}" <NEW_LINE> <DEDENT> url = f"{url}?t=x" <NEW_LINE> return url | Return the correct url to the item's thumbnail. | 625941b35fc7496912cc3748 |
def create_graph(): <NEW_LINE> <INDENT> return | Create and return graph components | 625941b32ae34c7f2600cef5 |
def test_max_users_per_device_on_update_canocicals(setup_database): <NEW_LINE> <INDENT> config.max_users_per_device = 1 <NEW_LINE> database.process_user_login(login_id=20, language_id=7, platform_id=1, device_token='d20', application_version=1007) <NEW_LINE> database.process_user_login(login_id=21, language_id=7, platform_id=1, device_token='d20new', application_version=1007) <NEW_LINE> assert sorted(list(database.get_device_tokens(login_id=20))) == [(1, 'd20')] <NEW_LINE> assert sorted(list(database.get_device_tokens(login_id=21))) == [(1, 'd20new')] <NEW_LINE> database.update_canonicals([{'login_id': 20, 'old_token': 'd20', 'new_token': 'd20new'}]) <NEW_LINE> assert sorted(list(database.get_device_tokens(login_id=20))) == [] <NEW_LINE> assert sorted(list(database.get_device_tokens(login_id=21))) == [(1, 'd20new')] | Test that max_users_per_device works on update cannonicals | 625941b3cb5e8a47e48b7874 |
def custom_method(self): <NEW_LINE> <INDENT> self.log.info("Running custom_method") | Do some custom behaviour for this test
Define it in a method here because you're going to use it repeatedly.
If you think it's useful in general, consider moving it to the base
BitsendTestFramework class so other tests can use it. | 625941b3283ffb24f3c556d1 |
def test_iters(self): <NEW_LINE> <INDENT> for hashtype in HashTypes: <NEW_LINE> <INDENT> self.do_test_iters(hashtype) | Test iteration using various hash types. | 625941b391af0d3eaac9b7d6 |
def get_user_groups_checked(user, db_session): <NEW_LINE> <INDENT> ax.verify_param(user, not_none=True, http_error=HTTPNotFound, msg_on_fail=s.Groups_CheckInfo_NotFoundResponseSchema.description) <NEW_LINE> group_names = ax.evaluate_call(lambda: [group.group_name for group in user.groups], fallback=lambda: db_session.rollback(), http_error=HTTPForbidden, msg_on_fail=s.Groups_CheckInfo_ForbiddenResponseSchema.description) <NEW_LINE> return sorted(group_names) | Obtains the validated list of group names from a pre-validated user. | 625941b30a366e3fb873e5d9 |
def lambda_handler(event, context): <NEW_LINE> <INDENT> stock_price = event["stock_price"] <NEW_LINE> transaction_result = { "id": str(uuid4()), "price": str(stock_price), "type": "buy", "qty": str(randint(1, 10)), "timestamp": datetime.now().isoformat(), } <NEW_LINE> return transaction_result | Sample Lambda function which mocks the operation of buying a random number
of shares for a stock.
For demonstration purposes, this Lambda function does not actually perform any
actual transactions. It simply returns a mocked result.
Parameters
----------
event: dict, required
Input event to the Lambda function
context: object, required
Lambda Context runtime methods and attributes
Returns
------
dict: Object containing details of the stock buying transaction | 625941b356ac1b37e6263fa5 |
def compare_tests_avg(request, test_id): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> source = request.POST.get('source', '0') <NEW_LINE> num_of_tests = request.POST.get('num_of_tests_to_compare', '0') <NEW_LINE> data = get_compare_tests_aggregate_data( test_id, num_of_tests, source=source) <NEW_LINE> current_rank = 1 <NEW_LINE> counter = 0 <NEW_LINE> arr = [] <NEW_LINE> for d in data: <NEW_LINE> <INDENT> if counter < 1: <NEW_LINE> <INDENT> d['rank'] = current_rank <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if int(d['start_time']) == int( data[counter - 1]['start_time']): <NEW_LINE> <INDENT> d['rank'] = current_rank <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_rank += 1 <NEW_LINE> d['rank'] = current_rank <NEW_LINE> <DEDENT> <DEDENT> if int(d['rank']) <= int(num_of_tests) + 1: <NEW_LINE> <INDENT> arr.append(d) <NEW_LINE> <DEDENT> counter += 1 <NEW_LINE> <DEDENT> response = list(arr) <NEW_LINE> <DEDENT> return JsonResponse(response, safe=False) | Compare average response times for current and N previous tests | 625941b35166f23b2e1a4f1c |
def cleanDataParam(r,varNames): <NEW_LINE> <INDENT> data = {} <NEW_LINE> for i, val in enumerate(varNames): <NEW_LINE> <INDENT> y = r.values(val)[-1][-1] <NEW_LINE> data[val] = y <NEW_LINE> <DEDENT> return data | Clean parameter data by removing all extra values and time information
Returns dictionary of varNames with single values | 625941b324f1403a92600935 |
def iec_custom_exception_handler(exc, context): <NEW_LINE> <INDENT> response = exception_handler(exc, context) <NEW_LINE> if response is not None: <NEW_LINE> <INDENT> response.data['status_code'] = response.status_code <NEW_LINE> response.data['error'] = response.data['detail'] <NEW_LINE> del response.data['detail'] <NEW_LINE> <DEDENT> return response | Custom exception handler for Django Rest Framework that adds
the `status_code` to the response and renames the `detail` key to `error`. | 625941b3fb3f5b602dac345a |
def make_tgt_mask(tgt, tgt_pad): <NEW_LINE> <INDENT> tgt_mask = (tgt != tgt_pad).unsqueeze(-2) <NEW_LINE> tgt_mask = tgt_mask & Variable( mask_invalid_positions(tgt.size(-1)).type_as(tgt_mask.data)) <NEW_LINE> return tgt_mask | Make the mask for the target to hide padding and future words
Arguments:
tgt: target sequence Tensor of shape [batch_size, seq_len]
tgt_pad: id of the padding token
Returns:
A mask of size [batch_size, seq_len, seq_len] | 625941b37047854f462a11d1 |
def render(self): <NEW_LINE> <INDENT> return self._parent + ' ' + self._render_link_type() + ' ' + self._child | Render class relationship to string
This method generically appends the parent name, a rendering of the
link type (obtained from the :func:`_render_link_type` method) and the
child object name.
Returns
-------
str
The string representation of the class relationship following the
PlantUML syntax | 625941b38c0ade5d55d3e782 |
def aper13(ut11, ut12, astrom): <NEW_LINE> <INDENT> era = era00(ut11, ut12) <NEW_LINE> return aper(era, astrom) | aper13(ut11, ut12, astrom) -> astrom
In the star-independent astrometry parameters, update only the
Earth rotation angle. The caller provides UT1, (n.b. not UTC). | 625941b45f7d997b8717485f |
def extract_from_tag(tag, line): <NEW_LINE> <INDENT> open_tag = "<" + tag + ">" <NEW_LINE> close_tag = "</" + tag + ">" <NEW_LINE> try: <NEW_LINE> <INDENT> i = line.index(open_tag) <NEW_LINE> start = i + len(open_tag) <NEW_LINE> stop = line.index(close_tag, start) <NEW_LINE> return line[start : stop] <NEW_LINE> <DEDENT> except ValueError : <NEW_LINE> <INDENT> print("Tag doesn't exist!") <NEW_LINE> return None | This function accepts a line with html like tags, enter a tag name and you can find the word wrapped within the tag | 625941b40383005118ecf3a8 |
@office_route.route('offices/<int:office_id>',methods=['GET']) <NEW_LINE> def get_specific_office(office_id): <NEW_LINE> <INDENT> query = """SELECT * FROM offices WHERE office_id = '{}'""".format(office_id) <NEW_LINE> office = office_model.Office() <NEW_LINE> office = database.select_from_db(query) <NEW_LINE> if not office: <NEW_LINE> <INDENT> return make_response(jsonify({ "status":404, "message": "Office with id {} is not available".format(office_id), }), 404) <NEW_LINE> <DEDENT> return make_response(jsonify({ "status":200, "office": office }), 200) | get an office by id | 625941b4e5267d203edcda65 |
def describe_direct_connect_gateways(directConnectGatewayId=None, maxResults=None, nextToken=None): <NEW_LINE> <INDENT> pass | Lists all your Direct Connect gateways or only the specified Direct Connect gateway. Deleted Direct Connect gateways are not returned.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_direct_connect_gateways(
directConnectGatewayId='string',
maxResults=123,
nextToken='string'
)
:type directConnectGatewayId: string
:param directConnectGatewayId: The ID of the Direct Connect gateway.
:type maxResults: integer
:param maxResults: The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned nextToken value.
If MaxResults is given a value larger than 100, only 100 results are returned.
:type nextToken: string
:param nextToken: The token provided in the previous call to retrieve the next page.
:rtype: dict
ReturnsResponse Syntax
{
'directConnectGateways': [
{
'directConnectGatewayId': 'string',
'directConnectGatewayName': 'string',
'amazonSideAsn': 123,
'ownerAccount': 'string',
'directConnectGatewayState': 'pending'|'available'|'deleting'|'deleted',
'stateChangeError': 'string'
},
],
'nextToken': 'string'
}
Response Structure
(dict) --
directConnectGateways (list) --
The Direct Connect gateways.
(dict) --
Information about a Direct Connect gateway, which enables you to connect virtual interfaces and virtual private gateway or transit gateways.
directConnectGatewayId (string) --
The ID of the Direct Connect gateway.
directConnectGatewayName (string) --
The name of the Direct Connect gateway.
amazonSideAsn (integer) --
The autonomous system number (ASN) for the Amazon side of the connection.
ownerAccount (string) --
The ID of the AWS account that owns the Direct Connect gateway.
directConnectGatewayState (string) --
The state of the Direct Connect gateway. The following are the possible values:
pending : The initial state after calling CreateDirectConnectGateway .
available : The Direct Connect gateway is ready for use.
deleting : The initial state after calling DeleteDirectConnectGateway .
deleted : The Direct Connect gateway is deleted and cannot pass traffic.
stateChangeError (string) --
The error message if the state of an object failed to advance.
nextToken (string) --
The token to retrieve the next page.
Exceptions
DirectConnect.Client.exceptions.DirectConnectServerException
DirectConnect.Client.exceptions.DirectConnectClientException
:return: {
'directConnectGateways': [
{
'directConnectGatewayId': 'string',
'directConnectGatewayName': 'string',
'amazonSideAsn': 123,
'ownerAccount': 'string',
'directConnectGatewayState': 'pending'|'available'|'deleting'|'deleted',
'stateChangeError': 'string'
},
],
'nextToken': 'string'
}
:returns:
pending : The initial state after calling CreateDirectConnectGateway .
available : The Direct Connect gateway is ready for use.
deleting : The initial state after calling DeleteDirectConnectGateway .
deleted : The Direct Connect gateway is deleted and cannot pass traffic.
| 625941b4377c676e91271f71 |
def test_conv_to_self_type(self): <NEW_LINE> <INDENT> input = [1,2,3] <NEW_LINE> exp = array([1,2,3]) <NEW_LINE> obs = self.dt1._conv_to_self_type(input) <NEW_LINE> self.assertEqual(obs, exp) <NEW_LINE> exp = array([[1],[2],[3]]) <NEW_LINE> obs = self.dt1._conv_to_self_type([input], transpose=True) <NEW_LINE> self.assertEqual(obs, exp) | Should convert from other to numpy type | 625941b48e05c05ec3eea134 |
def reset(self): <NEW_LINE> <INDENT> super().reset() <NEW_LINE> self.initialised = False | Resets the scheme. | 625941b4293b9510aa2c305d |
def __reset_syncer(self): <NEW_LINE> <INDENT> if self._syncer is not None: <NEW_LINE> <INDENT> self._syncer.reset() | Reset state watcher
:return: void | 625941b430bbd722463cbb87 |
def device(): <NEW_LINE> <INDENT> pass | Return the device associated with this component. | 625941b431939e2706e4cc35 |
def upload_single_file_from_buffer(self, request): <NEW_LINE> <INDENT> assert isinstance(request, UploadFileFromBufferRequest) <NEW_LINE> check_params_ret = self._check_params(request) <NEW_LINE> if check_params_ret is not None: <NEW_LINE> <INDENT> return check_params_ret <NEW_LINE> <DEDENT> data = request.get_data() <NEW_LINE> file_size = len(data) <NEW_LINE> if file_size > self.max_single_file: <NEW_LINE> <INDENT> return CosErr.get_err_msg(CosErr.NETWORK_ERROR, 'file is too big, please use upload_file interface') <NEW_LINE> <DEDENT> auth = cos_auth.Auth(self._cred) <NEW_LINE> bucket = request.get_bucket_name() <NEW_LINE> cos_path = request.get_cos_path() <NEW_LINE> expired = int(time.time()) + self._expired_period <NEW_LINE> sign = auth.sign_more(bucket, cos_path, expired) <NEW_LINE> http_header = dict() <NEW_LINE> http_header['Authorization'] = sign <NEW_LINE> http_header['User-Agent'] = self._config.get_user_agent() <NEW_LINE> file_content = data <NEW_LINE> http_body = dict() <NEW_LINE> http_body['op'] = 'upload' <NEW_LINE> http_body['filecontent'] = file_content <NEW_LINE> http_body['sha'] = FileOp._sha1_content(file_content) <NEW_LINE> http_body['biz_attr'] = request.get_biz_attr() <NEW_LINE> http_body['insertOnly'] = str(request.get_insert_only()) <NEW_LINE> timeout = self._config.get_timeout() <NEW_LINE> ret = self.send_request('POST', bucket, cos_path, headers=http_header, files=http_body, timeout=timeout) <NEW_LINE> return ret | 单文件上传
:param request:
:return: | 625941b43d592f4c4ed1ce45 |
def region_across(self, curtime, result): <NEW_LINE> <INDENT> date = curtime.format_date() <NEW_LINE> time = curtime.format_time() <NEW_LINE> for attribute in range(len(self.array())): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> depart = self.array()[attribute].station_number() <NEW_LINE> arrive = self.array()[attribute + 1].station_number() <NEW_LINE> response = make_response(depart, arrive, date, time) <NEW_LINE> route_info, new_date = make_route(response) <NEW_LINE> result.add(route_info) <NEW_LINE> date = new_date.format_date() <NEW_LINE> time = new_date.format_time() <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> depart = self.array()[-1].station_number() <NEW_LINE> arrive = self.array()[0].station_number() <NEW_LINE> response = make_response(depart, arrive, date, time) <NEW_LINE> route_info, new_date = make_route(response) <NEW_LINE> result.add(route_info) <NEW_LINE> return new_date, result | Creates the route across the region centers
:param curtime: CurTime() instance
:param result: Result() instance
:return: new CurTime() instance,
updated Result() instance | 625941b4656771135c3eb636 |
def peek(self): <NEW_LINE> <INDENT> if (self.size == 0): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.items[1] | Returns the item at the root of the binary heap without removing it.
Returns <None> if the list is empty. | 625941b445492302aab5e083 |
def test_admin_setattr(self): <NEW_LINE> <INDENT> self.cmdhandler.admin.blacklist = False <NEW_LINE> cmd = self.cmd_str('setattr') <NEW_LINE> args = 'admin.blacklist True' <NEW_LINE> userinput = '{} {}'.format(cmd, args) <NEW_LINE> setattrfunc = self.get_cmdfunc( self.cmdhandler, userinput, asadmin=True) <NEW_LINE> if isinstance(setattrfunc, NoCommand): <NEW_LINE> <INDENT> self.fail_nocmd(setattrfunc) <NEW_LINE> <DEDENT> setattrfunc(args, nick='testadmin') <NEW_LINE> self.assertEqual( self.cmdhandler.admin.blacklist, True, msg='Failed to set attribute') | admin command setattr works | 625941b4dd821e528d63af70 |
def advance_round(ballots, active_candidates): <NEW_LINE> <INDENT> to_update = ~ ballots.apply(get_active_choice, axis=1) .isin(active_candidates) <NEW_LINE> ballots.loc[to_update] = ballots[to_update]. apply(find_high_valid_choice, axis=1, args=(active_candidates,)) <NEW_LINE> return ballots | Updates active choice on ballots after a candidate is eliminated
Inputs:
ballots (Pandas dataframe): all the ballots in the election
active_candidates (set): uneliminated candidates | 625941b421a7993f00bc7aac |
def __init__( self, *, kubernetes_version: str, os_type: Union[str, "OSType"] = "Linux", name: Optional[str] = None, upgrades: Optional[List["ManagedClusterPoolUpgradeProfileUpgradesItem"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ManagedClusterPoolUpgradeProfile, self).__init__(**kwargs) <NEW_LINE> self.kubernetes_version = kubernetes_version <NEW_LINE> self.name = name <NEW_LINE> self.os_type = os_type <NEW_LINE> self.upgrades = upgrades | :keyword kubernetes_version: Required. Kubernetes version (major, minor, patch).
:paramtype kubernetes_version: str
:keyword name: Pool name.
:paramtype name: str
:keyword os_type: Required. OsType to be used to specify os type. Choose from Linux and
Windows. Default to Linux. Possible values include: "Linux", "Windows". Default value: "Linux".
:paramtype os_type: str or ~azure.mgmt.containerservice.v2020_12_01.models.OSType
:keyword upgrades: List of orchestrator types and versions available for upgrade.
:paramtype upgrades:
list[~azure.mgmt.containerservice.v2020_12_01.models.ManagedClusterPoolUpgradeProfileUpgradesItem] | 625941b47d847024c06be084 |
def make_class_dictable( cls, exclude=constants.default_exclude, exclude_underscore=constants.default_exclude_underscore, fromdict_allow_pk=constants.default_fromdict_allow_pk, include=None, asdict_include=None, fromdict_include=None): <NEW_LINE> <INDENT> setattr(cls, 'dictalchemy_exclude', exclude) <NEW_LINE> setattr(cls, 'dictalchemy_exclude_underscore', exclude_underscore) <NEW_LINE> setattr(cls, 'dictalchemy_fromdict_allow_pk', fromdict_allow_pk) <NEW_LINE> setattr(cls, 'asdict', asdict) <NEW_LINE> setattr(cls, 'fromdict', fromdict) <NEW_LINE> setattr(cls, '__iter__', iter) <NEW_LINE> setattr(cls, 'dictalchemy_include', include) <NEW_LINE> setattr(cls, 'dictalchemy_asdict_include', asdict_include) <NEW_LINE> setattr(cls, 'dictalchemy_fromdict_include', fromdict_include) <NEW_LINE> return cls | Make a class dictable
Useful for when the Base class is already defined, for example when using
Flask-SQLAlchemy.
Warning: This method will overwrite existing attributes if they exists.
:param exclude: Will be set as dictalchemy_exclude on the class
:param exclude_underscore: Will be set as dictalchemy_exclude_underscore on the class
:param fromdict_allow_pk: Will be set as dictalchemy_fromdict_allow_pk on the class
:param include: Will be set as dictalchemy_include on the class.
:param asdict_include: Will be set as `dictalchemy_asdict_include` on the class. If not None it will override `dictalchemy_include`.
:param fromdict_include: Will be set as `dictalchemy_fromdict_include` on the class. If not None it will override `dictalchemy_include`.
:returns: The class | 625941b40c0af96317bb7fad |
def computeOverlap(self, return_dict=True): <NEW_LINE> <INDENT> W = np.matrix(self.getWeights(), np.float64) <NEW_LINE> O = np.multiply(self.N_k, W.T * W) <NEW_LINE> (eigenvals, eigevec) = linalg.eig(O) <NEW_LINE> eigenvals = np.sort(eigenvals)[::-1] <NEW_LINE> overlap_scalar = 1 - eigenvals[1] <NEW_LINE> results_vals = dict() <NEW_LINE> results_vals['scalar'] = overlap_scalar <NEW_LINE> results_vals['eigenvalues'] = eigenvals <NEW_LINE> results_vals['matrix'] = O <NEW_LINE> if return_dict: <NEW_LINE> <INDENT> return results_vals <NEW_LINE> <DEDENT> return overlap_scalar, eigenvals, O | Compute estimate of overlap matrix between the states.
Parameters
----------
return_dict : bool, Default False
If true, results are a dict, else a tuple
Returns
-------
'scalar' : np.ndarray, float, shape=(K, K)
One minus the largest nontrival eigenvalue (largest is 1 or -1)
If return_dict, key is 'scalar'
'eigenvalues' : np.ndarray, float, shape=(K)
The sorted (descending) eigenvalues of the overlap matrix.
If return_dict, key is 'eigenvalues'
'matrix' : np.ndarray, float, shape=(K, K)
Estimated state overlap matrix: O[i,j] is an estimate
of the probability of observing a sample from state i in state j
If return_dict, key is 'matrix'
Notes
-----
.. code-block:: none
W.T * W pprox \int (p_i p_j /\sum_k N_k p_k)^2 \sum_k N_k p_k dq^N
= \int (p_i p_j /\sum_k N_k p_k) dq^N
Multiplying elementwise by N_i, the elements of row i give the probability
for a sample from state i being observed in state j.
Examples
--------
>>> from lib.pymbar import testsystems
>>> (x_kn, u_kn, N_k, s_n) = testsystems.HarmonicOscillatorsTestCase().sample(mode='u_kn')
>>> mbar = MBAR(u_kn, N_k)
>>> results = mbar.computeOverlap(return_dict=True) | 625941b42eb69b55b151c66e |
def run(self): <NEW_LINE> <INDENT> self.run_simulation(nsims=self.nsims) <NEW_LINE> if self.path is None: <NEW_LINE> <INDENT> os.chdir(self.cur_dir) | Main way this class is intended to function | 625941b47b180e01f3dc45cc |
def getCoordinates(self): <NEW_LINE> <INDENT> return list(self.gridVars.keys()) | Coordinates variables contained within the host object
Returns
-------
list of coordinate names | 625941b4460517430c393f54 |
def MakeTestIonoclass(testv=False,testtemp=False,N_0=1e11,z_0=250.0,H_0=50.0,coords=None,times =np.array([[0,1e6]])): <NEW_LINE> <INDENT> if coords is None: <NEW_LINE> <INDENT> xvec = np.arange(-250.0,250.0,20.0) <NEW_LINE> yvec = np.arange(-250.0,250.0,20.0) <NEW_LINE> zvec = np.arange(50.0,900.0,2.0) <NEW_LINE> xx,zz,yy = np.meshgrid(xvec,zvec,yvec) <NEW_LINE> coords = np.zeros((xx.size,3)) <NEW_LINE> coords[:,0] = xx.flatten() <NEW_LINE> coords[:,1] = yy.flatten() <NEW_LINE> coords[:,2] = zz.flatten() <NEW_LINE> zzf=zz.flatten() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> zzf = coords[:,2] <NEW_LINE> <DEDENT> Ne_profile = Chapmanfunc(zzf,H_0,z_0,N_0) <NEW_LINE> if testtemp: <NEW_LINE> <INDENT> (Te,Ti)= TempProfile(zzf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Te = np.ones_like(zzf)*2000.0 <NEW_LINE> Ti = np.ones_like(zzf)*1500.0 <NEW_LINE> <DEDENT> (Nlocs,ndims) = coords.shape <NEW_LINE> Ntime= len(times) <NEW_LINE> vel = np.zeros((Nlocs,Ntime,ndims)) <NEW_LINE> if testv: <NEW_LINE> <INDENT> vel[:,:,2] = np.repeat(zzf[:,np.newaxis],Ntime,axis=1)/5.0 <NEW_LINE> <DEDENT> species=['O+','e-'] <NEW_LINE> params = np.zeros((Ne_profile.size,len(times),2,2)) <NEW_LINE> params[:,:,0,1] = np.repeat(Ti[:,np.newaxis],Ntime,axis=1) <NEW_LINE> params[:,:,1,1] = np.repeat(Te[:,np.newaxis],Ntime,axis=1) <NEW_LINE> params[:,:,0,0] = np.repeat(Ne_profile[:,np.newaxis],Ntime,axis=1) <NEW_LINE> params[:,:,1,0] = np.repeat(Ne_profile[:,np.newaxis],Ntime,axis=1) <NEW_LINE> Icont1 = IonoContainer(coordlist=coords,paramlist=params,times = times,sensor_loc = np.zeros(3),ver =0,coordvecs = ['x','y','z'],paramnames=None,species=species,velocity=vel) <NEW_LINE> return Icont1 | This function will create a test ionoclass with an electron density that
follows a chapman function.
Inputs
testv - A bool to add velocities. If not then all of the velocity values will be zero.
testtemp - If true then a tempreture profile will be used. If not then there will be a set tempreture
of 2000 k for Te and Ti.
N_0 - The peak value of the chapman functions
z_0 - The peak altitude of the chapman function.
H_0 - The scale hight.
coords - A list of coordinates that the data will be created over.
times - A list of times the data will be created over.
Outputs
Icont - A test ionocontainer. | 625941b4dc8b845886cb52f8 |
def __init__(self, product_id): <NEW_LINE> <INDENT> self.product_id = product_id | :param product_id: str, length:31 产品代码 | 625941b4d4950a0f3b08c120 |
def mean_average_precision(cand_set, queries, c_gdtruth, q_gdtruth): <NEW_LINE> <INDENT> scorer = APScorer(cand_set.shape[0]) <NEW_LINE> simmat = np.matmul(queries, cand_set.T) <NEW_LINE> ap_sum = 0 <NEW_LINE> for q in range(simmat.shape[0]): <NEW_LINE> <INDENT> sim = simmat[q] <NEW_LINE> index = np.argsort(-sim) <NEW_LINE> sorted_labels = [] <NEW_LINE> for i in range(index.shape[0]): <NEW_LINE> <INDENT> if c_gdtruth[index[i]] == q_gdtruth[q]: <NEW_LINE> <INDENT> sorted_labels.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sorted_labels.append(0) <NEW_LINE> <DEDENT> <DEDENT> ap = scorer.score(sorted_labels) <NEW_LINE> ap_sum += ap <NEW_LINE> <DEDENT> mAP = ap_sum / simmat.shape[0] <NEW_LINE> return mAP | calculate mAP of a conditional set. Samples in candidate and query set are of the same condition.
cand_set:
type: nparray
shape: c x feature dimension
queries:
type: nparray
shape: q x feature dimension
c_gdtruth:
type: nparray
shape: c
q_gdtruth:
type: nparray
shape: q | 625941b445492302aab5e084 |
def splrnk(n, n0, p, x, y): <NEW_LINE> <INDENT> isplit = 1 <NEW_LINE> n1 = n0[0, 0] <NEW_LINE> p1 = p[0] <NEW_LINE> for i in range(1, n): <NEW_LINE> <INDENT> if n0[i, 0] < n1 or (n0[i, 0] == n1 and p[i] < p1): <NEW_LINE> <INDENT> isplit = i + 1 <NEW_LINE> n1 = n0[i] <NEW_LINE> p1 = p[i] <NEW_LINE> <DEDENT> <DEDENT> if n1 > 0: <NEW_LINE> <INDENT> splval = split2(x[isplit-1, 0], y[isplit-1, 0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> splval = math.inf <NEW_LINE> <DEDENT> return isplit, splval | determines the splitting index and splitting value for splitting a
box by rank | 625941b463b5f9789fde6ea9 |
def test_getChildWithDefaultUnauthorizedPassword(self): <NEW_LINE> <INDENT> return self._invalidAuthorizationTest( 'Basic ' + b64encode(self.username + ':bar')) | If L{HTTPAuthSessionWrapper.getChildWithDefault} is called with a
request with an I{Authorization} header with a user which exists and
the wrong password, an L{IResource} which renders a 401 response code
is returned. | 625941b4b7558d58953c4ce0 |
def gotStringDigitsGetIntegers( d ): <NEW_LINE> <INDENT> for k, v in getItemIter( d ): <NEW_LINE> <INDENT> d[ k ] = int( v ) | modifies the dictionary in place -- returns None | 625941b467a9b606de4a7c81 |
def _updateEventQueue(self, playlist): <NEW_LINE> <INDENT> self.messenger.queueRemoveEvents('playtrack', 'player') <NEW_LINE> for track in playlist['playlist']['trackList']['track']: <NEW_LINE> <INDENT> if track.has_key('time') and track.has_key('start'): <NEW_LINE> <INDENT> starts = str(track['start'] + 'T' + track['time']) <NEW_LINE> event = {'job':'play', 'location': track['location'],'length': track['length'], 'date': track['start'], 'time': track['time']} <NEW_LINE> self.messenger.queueAddEvent('playtrack', starts, event, 'player') | Playlist Eventqueue updaten
@type playlist: dict
@param playlist: Playlist | 625941b4d486a94d0b98df13 |
def get_predefined_styles(): <NEW_LINE> <INDENT> lReturn = [] <NEW_LINE> sStylePath = os.path.join(os.path.dirname(__file__), 'styles') <NEW_LINE> lStyles = os.listdir(sStylePath) <NEW_LINE> for sStyle in lStyles: <NEW_LINE> <INDENT> if sStyle.endswith('.yaml'): <NEW_LINE> <INDENT> with open(os.path.join(sStylePath, sStyle)) as yaml_file: <NEW_LINE> <INDENT> tempConfiguration = yaml.safe_load(yaml_file) <NEW_LINE> <DEDENT> lReturn.append(tempConfiguration['name']) <NEW_LINE> <DEDENT> <DEDENT> return lReturn | Reads all predefined styles and returns a list of names.
Parameters : None
Returns : (list of strings) | 625941b4cc40096d61595719 |
def scroll(amount, horizontal=False): <NEW_LINE> <INDENT> wheel = MOUSEEVENTF_WHEEL if not horizontal else MOUSEEVENTF_HWHEEL <NEW_LINE> scroll_event = Input( type=INPUT_MOUSE, mi=MouseInput(dwFlags=wheel, mouseData=amount * 120)) <NEW_LINE> _err(ctypes.windll.user32.SendInput( 1, ctypes.byref(scroll_event), ctypes.sizeof(scroll_event))) | Horizontal and horizontal scrolling. | 625941b4e5267d203edcda66 |
def exp1(): <NEW_LINE> <INDENT> phases = range(2,31) <NEW_LINE> fil3 = open("exp1_bonmin.csv",'w') <NEW_LINE> fil3.write("numPhases,ratio,ratio2,etBONMIN,etPkMin\n") <NEW_LINE> for numPhases in phases: <NEW_LINE> <INDENT> colNames = ["alloc-"+str(u) for u in range(1,numPhases+1)] <NEW_LINE> colNames.append("pkp") <NEW_LINE> colNames.append("status") <NEW_LINE> colNames.append("OH") <NEW_LINE> fld1 = open("workloads-exp1/wkld_"+str(numPhases)+"_matlab.out.csv","r") <NEW_LINE> df1 = pd.read_csv(fld1,header=None,names=colNames) <NEW_LINE> df11 = df1[df1.status == "passed"] <NEW_LINE> arr1 = df11['pkp'].values <NEW_LINE> colNames.append("wrst") <NEW_LINE> fld2 = open("workloads-exp1/wkld_"+str(numPhases)+"_cpp.out.csv","r") <NEW_LINE> df2 = pd.read_csv(fld2,header=None,names=colNames) <NEW_LINE> df21 = df2[df2.status == "passed"] <NEW_LINE> arr2 = df21['pkp'].values <NEW_LINE> arr3 = df21['wrst'].values <NEW_LINE> ratio = np.divide(arr2,arr1) <NEW_LINE> ratio2 = np.divide(arr3,arr2) <NEW_LINE> etbonmin = df11['OH'].values <NEW_LINE> etPkMin = df21['OH'].values <NEW_LINE> fil3.write(f"{numPhases},{np.mean(ratio)},{np.mean(ratio2)},{np.mean(etbonmin)},{np.mean(etPkMin)}\n") <NEW_LINE> <DEDENT> fil3.close() | Average peak power for applications under PkMin with
increasing number of phases normalized against their peak
power under Oracle. | 625941b45e10d32532c5ecf4 |
def _setChangeLog(self, log): <NEW_LINE> <INDENT> if self._ui != None: <NEW_LINE> <INDENT> self._ui.setAppChangeLog(log) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._changelogHolder = log | Caches or displays the given change log.
log: list of strings | 625941b476e4537e8c35143b |
def numSquares_TLE(self, n): <NEW_LINE> <INDENT> F = [i for i in xrange(n+1)] <NEW_LINE> for i in xrange(1, n+1): <NEW_LINE> <INDENT> for j in xrange(1, int(math.sqrt(i))+1): <NEW_LINE> <INDENT> if i-j*j >= 0: <NEW_LINE> <INDENT> F[i] = min(F[i], F[i-j*j]+1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return F[n] | DP
:type n: int
:rtype: int | 625941b430c21e258bdfa261 |
def get_exp_ind(filePath, fileName, overwriteExpName=None): <NEW_LINE> <INDENT> if overwriteExpName is not None: <NEW_LINE> <INDENT> return exp_name_to_ind(overwriteExpName), overwriteExpName; <NEW_LINE> <DEDENT> if 'V1_orig' in filePath: <NEW_LINE> <INDENT> return 1, 'sfMix'; <NEW_LINE> <DEDENT> if fileName.startswith('mr'): <NEW_LINE> <INDENT> name_root = fileName.split('_')[1]; <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name_root = fileName.split('_')[0]; <NEW_LINE> <DEDENT> orig_files = os.listdir(filePath + '../recordings/'); <NEW_LINE> for f in orig_files: <NEW_LINE> <INDENT> if f.startswith(name_root) and '.xml' in f and 'sfMix' in f: <NEW_LINE> <INDENT> expName = f.split('[')[1].split(']')[0]; <NEW_LINE> return exp_name_to_ind(expName), expName; <NEW_LINE> <DEDENT> <DEDENT> return None, None; | returns the following:
index of experiment (see get_exp_params)
name of experiment (e.g. sfMix, sfMixHalfInt)
this function relies on the fact that all .npy files (in /structures) have an associated matlab file
in /recordings with the full experiment name
EXCEPT: V1_orig files... | 625941b43346ee7daa2b2b2c |
def gpscat(gpfile,rxn,data): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> from datfcn import order <NEW_LINE> om = [] <NEW_LINE> with open(gpfile,'w+') as gnu: <NEW_LINE> <INDENT> gnu.write("# sza / rad ") <NEW_LINE> for h in range(len(rxn)): <NEW_LINE> <INDENT> gnu.write("\t%s" % (rxn[h][1])) <NEW_LINE> <DEDENT> gnu.write("\n") <NEW_LINE> for y in range(len(rxn)): <NEW_LINE> <INDENT> o1, o2 = order(data[0][y+1]) <NEW_LINE> om = np.append(om,o2) <NEW_LINE> <DEDENT> om = np.insert(om,0,1.) <NEW_LINE> for x in range(len(data)): <NEW_LINE> <INDENT> for y in range(len(rxn)+1): <NEW_LINE> <INDENT> if (y == 0): <NEW_LINE> <INDENT> data[x][0] = np.deg2rad(data[x][0]) <NEW_LINE> <DEDENT> gnu.write("\t%.3f" % (data[x][y]/om[y])) <NEW_LINE> <DEDENT> gnu.write("\n") <NEW_LINE> <DEDENT> <DEDENT> return rxn, data, om | Function gnuinp
===============
Purpose:
Write gnuplot input file with data for scatter plots of TUV data.
Variables:
I/O:
gpfile: gnuplot file with data for scatter plots
rxn: matrix with indices and labels of available photoreactions
data: matrix with sza-dependent j values from TUV model
om: matrix with all orders of magnitudes (for sza column 0 dummy inserted)
internal:
h,x,y: counters/indices
o1,o2: order of magnitude of maximum value in each j data column
Dependencies:
uses: numpy, datfcn.order
called from: photMCM (main) | 625941b41b99ca400220a874 |
def test_gapMaps(self): <NEW_LINE> <INDENT> empty = '' <NEW_LINE> no_gaps = 'aaa' <NEW_LINE> all_gaps = '---' <NEW_LINE> start_gaps = '--abc' <NEW_LINE> end_gaps = 'ab---' <NEW_LINE> mid_gaps = '--a--b-cd---' <NEW_LINE> gm = lambda x: self.RNA(x).gapMaps() <NEW_LINE> self.assertEqual(gm(empty), ({},{})) <NEW_LINE> self.assertEqual(gm(no_gaps), ({0:0,1:1,2:2}, {0:0,1:1,2:2})) <NEW_LINE> self.assertEqual(gm(all_gaps), ({},{})) <NEW_LINE> self.assertEqual(gm(start_gaps), ({0:2,1:3,2:4},{2:0,3:1,4:2})) <NEW_LINE> self.assertEqual(gm(end_gaps), ({0:0,1:1},{0:0,1:1})) <NEW_LINE> self.assertEqual(gm(mid_gaps), ({0:2,1:5,2:7,3:8},{2:0,5:1,7:2,8:3})) | Sequence gapMaps should return dicts mapping gapped/ungapped pos | 625941b4293b9510aa2c305e |
def intToRoman(self, num): <NEW_LINE> <INDENT> div = 1 <NEW_LINE> final_str = "" <NEW_LINE> count = 0 <NEW_LINE> while num/div >= 10: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> div = div*10 <NEW_LINE> <DEDENT> while num > 0: <NEW_LINE> <INDENT> left_digit = num/div <NEW_LINE> left_value = left_digit*div <NEW_LINE> num = num - left_value <NEW_LINE> div = div/10 <NEW_LINE> if count == 0: <NEW_LINE> <INDENT> if left_digit == 4: <NEW_LINE> <INDENT> final_str += "IV" <NEW_LINE> <DEDENT> elif left_digit == 9: <NEW_LINE> <INDENT> final_str += "IX" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> while left_digit > 0: <NEW_LINE> <INDENT> if left_digit >= 5: <NEW_LINE> <INDENT> final_str += "V" <NEW_LINE> left_digit -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> final_str += "I" <NEW_LINE> left_digit -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> elif count == 1: <NEW_LINE> <INDENT> if left_digit == 4: <NEW_LINE> <INDENT> final_str += "XL" <NEW_LINE> <DEDENT> elif left_digit == 9: <NEW_LINE> <INDENT> final_str += "XC" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> while left_digit > 0: <NEW_LINE> <INDENT> if left_digit >= 5: <NEW_LINE> <INDENT> final_str += "L" <NEW_LINE> left_digit -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> final_str += "X" <NEW_LINE> left_digit -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> elif count == 2: <NEW_LINE> <INDENT> if left_digit == 4: <NEW_LINE> <INDENT> final_str += "CD" <NEW_LINE> <DEDENT> elif left_digit == 9: <NEW_LINE> <INDENT> final_str += "CM" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> while left_digit > 0: <NEW_LINE> <INDENT> if left_digit >= 5: <NEW_LINE> <INDENT> final_str += "D" <NEW_LINE> left_digit -= 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> final_str += "C" <NEW_LINE> left_digit -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> while left_digit > 0: <NEW_LINE> <INDENT> final_str += "M" <NEW_LINE> left_digit -= 1 <NEW_LINE> <DEDENT> <DEDENT> count -= 1 <NEW_LINE> <DEDENT> return final_str | :type num: int
:rtype: str
Find the division of the number in form of 10's
Then count the number of digits
Get the left digit and slice it from the number
For the left digit, identify the roman numeral | 625941b45fdd1c0f98dbfff6 |
def _handle_pcl(self, ros_cloud): <NEW_LINE> <INDENT> pcl_original = ros_to_pcl(ros_cloud) <NEW_LINE> voxel_filter = PCLSegmenter.VoxelFilter( pcl_original, 0.01 ) <NEW_LINE> pcl_clustered = voxel_filter.filter() <NEW_LINE> pass_through_filter = PCLSegmenter.PassThroughFilter( pcl_clustered, 'z', [0.6, 1.1] ) <NEW_LINE> pcl_roi = pass_through_filter.filter() <NEW_LINE> plane_segmenter = PCLSegmenter.PlaneSegmenter( pcl_roi, 0.01 ) <NEW_LINE> (idx_table, normal_table) = plane_segmenter.segment() <NEW_LINE> pcl_table = pcl_roi.extract(idx_table, negative=False) <NEW_LINE> pcl_objects = pcl_roi.extract(idx_table, negative=True) <NEW_LINE> ros_cloud_table = pcl_to_ros(pcl_table) <NEW_LINE> ros_cloud_objects = pcl_to_ros(pcl_objects) <NEW_LINE> self._pub_pcl_table.publish(ros_cloud_table) <NEW_LINE> self._pub_pcl_objects.publish(ros_cloud_objects) <NEW_LINE> object_segmenter = PCLSegmenter.EuclideanSegmenter( pcl_objects, eps = 0.025, min_samples = 16, max_samples = 2048 ) <NEW_LINE> pcl_separate_objects = object_segmenter.segment() <NEW_LINE> ros_cloud_separate_objects = pcl_to_ros(pcl_separate_objects) <NEW_LINE> self._pub_pcl_separate_objects.publish(ros_cloud_separate_objects) | Handle ROS pc2 message
| 625941b44d74a7450ccd3f88 |
@mock.patch('foremast.utils.lookups.gitlab') <NEW_LINE> def test_init(gitlab): <NEW_LINE> <INDENT> my_git = FileLookup() <NEW_LINE> assert my_git.git_short == '' <NEW_LINE> assert my_git.server == gitlab.Gitlab.return_value <NEW_LINE> my_git.server.projects.get.assert_called_with(my_git.git_short) | Check init. | 625941b4046cf37aa974cb0f |
def pretty_to_link(inst, from_link, to_link): <NEW_LINE> <INDENT> values = '' <NEW_LINE> prefix = '' <NEW_LINE> for name, ty in inst.__a__: <NEW_LINE> <INDENT> if name in from_link.ids: <NEW_LINE> <INDENT> value = getattr(inst, name) <NEW_LINE> value = xtuml.serialize_value(value, ty) <NEW_LINE> idx = from_link.ids.index(name) <NEW_LINE> name = to_link.ids[idx] <NEW_LINE> values += '%s%s=%s' % (prefix, name, value) <NEW_LINE> prefix=', ' <NEW_LINE> <DEDENT> <DEDENT> return '%s(%s)' % (to_link.kind, values) | Create a human-readable representation of a link on the 'TO'-side | 625941b4ec188e330fd5a56d |
def update(self): <NEW_LINE> <INDENT> while not self.stopped: <NEW_LINE> <INDENT> self.grabbed, self.frame = self.stream.read() <NEW_LINE> if self.grabbed: <NEW_LINE> <INDENT> self.has_frame.set() | Continuously update the stream with the most recent frame
until stopped. | 625941b432920d7e50b27f90 |
def __init__(self, size=1024, *, loop=None): <NEW_LINE> <INDENT> self.loop = loop or _get_loop() <NEW_LINE> self.size = size <NEW_LINE> self._executed = 0 <NEW_LINE> self._joined = set() <NEW_LINE> self._waiting = {} <NEW_LINE> self._active = {} <NEW_LINE> self.semaphore = aio.Semaphore(value=self.size) | Pool of asyncio coroutines with familiar interface.
Pool makes sure _no more_ and _no less_ (if possible) than `size`
spawned coroutines are active at the same time. _spawned_ means created
and scheduled with one of the pool interface methods, _active_ means
coroutine function started executing it's code, as opposed to
_waiting_ -- which waits for pool space without entering coroutine
function.
Support asynchronous context management protocol (`aenter`, `aexit`).
The main idea behind spwaning methods is -- they return newly created
futures, not "native" ones, returned by `pool.create_task` or used for
`await`. Read more about this in readme and docstrings below. | 625941b4ff9c53063f47bfc3 |
def get_folders(self): <NEW_LINE> <INDENT> return self.mail.list() | Return a list of folders | 625941b421bff66bcd68471a |
def _on_canvas_clicked(self, event: tkinter.Event) -> None: <NEW_LINE> <INDENT> count = 0 <NEW_LINE> canvas_height = self._canvas.winfo_height() <NEW_LINE> canvas_width = self._canvas.winfo_width() <NEW_LINE> height = canvas_height / self._rows <NEW_LINE> wid = canvas_width / self._cols <NEW_LINE> desired_row = self.get_square(event.x, event.y)[0] <NEW_LINE> desired_col = self.get_square(event.x,event.y)[1] <NEW_LINE> if self._x.everything_filled() == True or self._x.one_only(): <NEW_LINE> <INDENT> self.winner_label.config(text = "Winner: "+ self._x._winner) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._x.check_board_spaces() <NEW_LINE> self._x.reassign(desired_row,desired_col) <NEW_LINE> if self._x._turn == 'B': <NEW_LINE> <INDENT> if (desired_row,desired_col) in self._x._valid_black_moves: <NEW_LINE> <INDENT> self._x._is_valid_black = True <NEW_LINE> self._x._board[desired_row][desired_col] = 'B' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._x._is_valid_black = False <NEW_LINE> <DEDENT> <DEDENT> elif self._x._turn == 'W': <NEW_LINE> <INDENT> if (desired_row,desired_col) in self._x._valid_white_moves: <NEW_LINE> <INDENT> self._x._is_valid_white = True <NEW_LINE> self._x._board[desired_row][desired_col] = 'W' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._x._is_valid_white = False <NEW_LINE> <DEDENT> <DEDENT> if count>0 : <NEW_LINE> <INDENT> if self._x.one_filled(): <NEW_LINE> <INDENT> self._x.no_pos() <NEW_LINE> <DEDENT> <DEDENT> count +=1 <NEW_LINE> self._redraw_all() <NEW_LINE> if self._x._turn == 'B' and self._x._is_valid_black == False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._x._turn == 'W': <NEW_LINE> <INDENT> if self._x._is_valid_white == False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> self._x._opposite_turn() <NEW_LINE> self.print_if_move_valid(event.x,event.y) <NEW_LINE> if count>0 : <NEW_LINE> <INDENT> if self._x.one_filled(): <NEW_LINE> <INDENT> self._x.no_pos() <NEW_LINE> <DEDENT> <DEDENT> if self._x.everything_filled() == True or self._x.no_pos()or self._x.both_filled() or self._x.one_only(): <NEW_LINE> <INDENT> self.winner_label.config(text = "Winner: "+ self._x._winner) <NEW_LINE> self._turn_label.config(text = 'GAME OVER') <NEW_LINE> return <NEW_LINE> <DEDENT> self._change_turn_labels() <NEW_LINE> self._change_point_labels() <NEW_LINE> print('tkinter black ',self._x._valid_black_moves, '\n' , 'tkinter white ',self._x._valid_white_moves) | runs when canvas is clicked | 625941b421bff66bcd68471b |
def bind_tensors(self, tensors): <NEW_LINE> <INDENT> def _binder(placeholder, tensor): <NEW_LINE> <INDENT> placeholder.tensor = tensor <NEW_LINE> <DEDENT> U.recursive_binary_combine( self.struct, tensors, combinator=_binder, is_base=_is_placeholder, ) | Bind tensor values to placeholders for actual layer computation | 625941b499fddb7c1c9de158 |
def notify_condition(self, condition): <NEW_LINE> <INDENT> detected_foreign_database = self.__get_foreign_database(condition['id']) <NEW_LINE> versions_range_from_condition = VersionsRange(dump=condition['versions_range']) <NEW_LINE> if self.get_hash(versions_range_from_condition) == condition['hash']: <NEW_LINE> <INDENT> detected_foreign_database.set_versions_range_with_detected_hash_equivalence(versions_range_from_condition) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> detected_foreign_database.set_versions_range_with_detected_hash_difference(versions_range_from_condition) | Сообщить базе о внешней базе и ее состоянии в указанном диапазоне. | 625941b41f037a2d8b945fc4 |
def update_product(self, product_id, data): <NEW_LINE> <INDENT> product = self.get_one(product_id) <NEW_LINE> data['modified_date'] = str(datetime.now().strftime('%b-%d-%Y : %H:%M:%S')) <NEW_LINE> product[0].update(data) <NEW_LINE> return product | Method for updating an product | 625941b4627d3e7fe0d68c12 |
def computeComponentsNumber(imIn, grid=mamba.DEFAULT_GRID): <NEW_LINE> <INDENT> imWrk = mamba.imageMb(imIn, 32) <NEW_LINE> return mamba.label(imIn, imWrk, grid=grid) | Computes the number of connected components in image 'imIn'. The result is
an integer value. | 625941b401c39578d7e74c09 |
def test_gen_modules_executors(self): <NEW_LINE> <INDENT> mock_opts = self.get_config('minion', from_scratch=True) <NEW_LINE> io_loop = salt.ext.tornado.ioloop.IOLoop() <NEW_LINE> io_loop.make_current() <NEW_LINE> minion = salt.minion.Minion(mock_opts, io_loop=io_loop) <NEW_LINE> class MockPillarCompiler(object): <NEW_LINE> <INDENT> def compile_pillar(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> with patch('salt.pillar.get_pillar', return_value=MockPillarCompiler()): <NEW_LINE> <INDENT> with patch('salt.loader.executors') as execmock: <NEW_LINE> <INDENT> minion.gen_modules() <NEW_LINE> <DEDENT> <DEDENT> assert execmock.called_with(minion.opts, minion.functions) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> minion.destroy() | Ensure gen_modules is called with the correct arguments #54429 | 625941b421a7993f00bc7aad |
def test_basicInsertion(self): <NEW_LINE> <INDENT> x = MimeDict() <NEW_LINE> x["hello"] = "hello" <NEW_LINE> self.assertEqual("hello", x["hello"] ) | Insertion into a dictionary succeeds | 625941b45fc7496912cc374a |
def print_square(size): <NEW_LINE> <INDENT> if isinstance(size, float) and i < 0: <NEW_LINE> <INDENT> raise TypeError("size must be an integer") <NEW_LINE> <DEDENT> if not isinstance(size, int): <NEW_LINE> <INDENT> raise TypeError("size must be an integer") <NEW_LINE> <DEDENT> if size < 0: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> if size == 0: <NEW_LINE> <INDENT> print() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(0, size): <NEW_LINE> <INDENT> for j in range(0, size): <NEW_LINE> <INDENT> print("#", end='') <NEW_LINE> <DEDENT> print() | prints square given a size | 625941b4627d3e7fe0d68c13 |
def Get_LocatorRecord_Object(rloc, weights="1/1/255/0", flags=0o01, loc_id="ISP1"): <NEW_LINE> <INDENT> sys.path.insert(0, workspace) <NEW_LINE> from LISPFlowMappingYANGBindings.odl_mappingservice_rpc.add_mapping.input import ( input, ) <NEW_LINE> rpc_input = input() <NEW_LINE> lrecord_obj = rpc_input.mapping_record.LocatorRecord <NEW_LINE> lrecord_obj.add(loc_id) <NEW_LINE> lrecord_ele = weights.split("/") <NEW_LINE> lrecord_obj[loc_id].priority = lrecord_ele[0] <NEW_LINE> lrecord_obj[loc_id].weight = lrecord_ele[1] <NEW_LINE> lrecord_obj[loc_id].multicastPriority = lrecord_ele[2] <NEW_LINE> lrecord_obj[loc_id].multicastWeight = lrecord_ele[3] <NEW_LINE> laddr_obj = lrecord_obj[loc_id].rloc <NEW_LINE> laddr_obj = Get_LispAddress_Object(rloc, laddr_obj=laddr_obj) <NEW_LINE> lrecord_obj[loc_id].localLocator = flags % 10 <NEW_LINE> lrecord_obj[loc_id].rlocProbed = (flags / 10) % 10 <NEW_LINE> lrecord_obj[loc_id].routed = (flags / 100) % 10 <NEW_LINE> return lrecord_obj | Description: Returns locator record object from pyangbind generated classes
Returns: locator record object
Params:
rloc: eid_string for lisp address object
weights: priority/weight/multicastPriority/multicastWeight
flags: Three bit parameter in the sequence routed->rlocProbed->routed
loc_id: id of locator record object | 625941b456b00c62f0f14421 |
def __init__(self, width=None, height=None, name=None, bounds=None, bg=(1, 1, 1), format=None): <NEW_LINE> <INDENT> if bounds is None: <NEW_LINE> <INDENT> assert width is not None <NEW_LINE> assert height is not None <NEW_LINE> bounds = Bounds(0, 0, width, height) <NEW_LINE> <DEDENT> self.bounds = bounds <NEW_LINE> self.width = int(self.bounds.width) <NEW_LINE> self.height = int(self.bounds.height) <NEW_LINE> self.name, self.format = name_and_format(name, format) <NEW_LINE> if self.format == 'png': <NEW_LINE> <INDENT> self.surface = cairo.ImageSurface(cairo.Format.ARGB32, self.width, self.height) <NEW_LINE> <DEDENT> elif self.format == 'svg': <NEW_LINE> <INDENT> self.surface = cairo.SVGSurface(self.name, self.width, self.height) <NEW_LINE> <DEDENT> self.ctx = cairo.Context(self.surface) <NEW_LINE> self.ctx.set_antialias(cairo.Antialias.BEST) <NEW_LINE> self.ctx.set_line_cap(cairo.LineCap.ROUND) <NEW_LINE> self.ctx.set_line_join(cairo.LineJoin.MITER) <NEW_LINE> self.translate(-self.bounds.llx, -self.bounds.lly) <NEW_LINE> if bg: <NEW_LINE> <INDENT> with self.style(rgb=bg): <NEW_LINE> <INDENT> self.rectangle(self.bounds.llx, self.bounds.lly, self.width, self.height) <NEW_LINE> self.fill() | Create a new Cairo drawing.
If `bounds` is provided, it's a Bounds describing the extend of the
drawing. Otherwise, provide `width` and `height` to specify a size
explicitly.
`bg` is the background color to paint initially. | 625941b4a4f1c619b28afe09 |
def process_list_system_clients(self): <NEW_LINE> <INDENT> cmd = shlex.split("/usr/bin/curl http://localhost:8080/list/system/clients/asdf") <NEW_LINE> p = subprocess.Popen(cmd, stdout=subprocess.PIPE) <NEW_LINE> out, err = p.communicate() <NEW_LINE> error = [] <NEW_LINE> if err: error.append(err) <NEW_LINE> return error, out | Tests /list/system/clients/(.*)
:return: returns any errors | 625941b48a43f66fc4b53e2f |
def test_fetch_harvestable_not_nested(self): <NEW_LINE> <INDENT> non_nested_obj = json.load(open(self.test_simple_object)) <NEW_LINE> harvestable = self.dh.fetch_harvestable(non_nested_obj) <NEW_LINE> self.assertEqual(harvestable, [non_nested_obj]) | test that a non-nested object is identified as harvestable | 625941b4fbf16365ca6f5f88 |
def construct_relationships(filename): <NEW_LINE> <INDENT> dict1=read_files(filename) <NEW_LINE> people =ListNode(Person(None)) <NEW_LINE> temp=people <NEW_LINE> for key in dict1: <NEW_LINE> <INDENT> if temp.val.get_name() is None: <NEW_LINE> <INDENT> temp.val.set_name(key) <NEW_LINE> <DEDENT> temp.next = ListNode(Person(key)) <NEW_LINE> temp=temp.next <NEW_LINE> for string in dict1[key]: <NEW_LINE> <INDENT> if temp.val.get_friends() is None: <NEW_LINE> <INDENT> temp.val.set_friends(string) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> temp.val.add_friend(string) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return people | this function is built link list and each value of
linklist is name and thier friends.People start def as None
linklist,it val equal to key of dictionary,if get_name is NOne
it will set that name first then,add another name one by one as linklist
for thier friends,if get_friends is None, set first firends,then add other friends
as linklist. Finally, return the structure people. | 625941b4e1aae11d1e749a79 |
def get_id_context_inter_etabs(self): <NEW_LINE> <INDENT> s = "SELECT id" " FROM {entete}context" " WHERE contextlevel = %(context_level)s" " AND instanceid = %(instanceid)s" .format(entete=self.entete) <NEW_LINE> self.mark.execute(s, params={'context_level': self.constantes.niveau_ctx_categorie, 'instanceid': self.constantes.id_instance_moodle}) <NEW_LINE> ligne = self.safe_fetchone() <NEW_LINE> if ligne is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return ligne[0] | Fonction permettant de recuperer l'id du contexte de la categorie inter-etablissements
:return: | 625941b40a366e3fb873e5db |
def hash256_bit_length(data, length): <NEW_LINE> <INDENT> return hash_function(data, length, 256) | :param length: length in bits | 625941b4c4546d3d9de727fd |
def setContentData(content): <NEW_LINE> <INDENT> pass | Sets the content that will be used as the form processing context.
| 625941b415fb5d323cde08cd |
def describe_log_inspection_rule_on_computer(self, computer_id, log_inspection_rule_id, api_version, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.describe_log_inspection_rule_on_computer_with_http_info(computer_id, log_inspection_rule_id, api_version, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.describe_log_inspection_rule_on_computer_with_http_info(computer_id, log_inspection_rule_id, api_version, **kwargs) <NEW_LINE> return data | Describe an log inspection rule # noqa: E501
Describe an log inspection rule including computer-level overrides. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_log_inspection_rule_on_computer(computer_id, log_inspection_rule_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int computer_id: The ID number of the computer. (required)
:param int log_inspection_rule_id: The ID number of the log inspection rule. (required)
:param str api_version: The version of the api being called. (required)
:param bool overrides: Show only overrides defined for the current computer.
:return: LogInspectionRule
If the method is called asynchronously,
returns the request thread. | 625941b43539df3088e2e110 |
def get_activity_feed_deleted(self, **kwargs): <NEW_LINE> <INDENT> kwargs.update({'api_key': self.params['api_key'], 'activity_type': 'delete-destination', }) <NEW_LINE> if self.check_required_params(kwargs, ['api_key', 'route_id']): <NEW_LINE> <INDENT> self.response = self.api._request_get(ACTIVITY_FEED, kwargs) <NEW_LINE> return self.response.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ParamValueException('params', 'Params are not complete') | Get Activity Feed Deleted using GET request
:return: API response
:raise: ParamValueException if required params are not present. | 625941b45166f23b2e1a4f1e |
def update_package_data() -> None: <NEW_LINE> <INDENT> download_all_data(_const.DATA_PATH) | Update all data files currently installed by the module.
| 625941b4236d856c2ad445a4 |
def lemonadeChange(self, bills): <NEW_LINE> <INDENT> five = ten = 0 <NEW_LINE> for bill in bills: <NEW_LINE> <INDENT> if bill == 20: <NEW_LINE> <INDENT> if ten and five: <NEW_LINE> <INDENT> ten -=1 <NEW_LINE> five -=1 <NEW_LINE> <DEDENT> elif five >= 3: <NEW_LINE> <INDENT> five -=3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> elif bill == 10: <NEW_LINE> <INDENT> if five: <NEW_LINE> <INDENT> five -= 1 <NEW_LINE> ten += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> five += 1 <NEW_LINE> <DEDENT> <DEDENT> return True | :type bills: List[int]
:rtype: bool | 625941b471ff763f4b549453 |
def _init_writer(self, config): <NEW_LINE> <INDENT> tb_base = os.environ['TENSORBOARD_DIR'] if 'TENSORBOARD_DIR' in os.environ else None <NEW_LINE> tb_dir = config.get("tensorboard_dir", "default") <NEW_LINE> if tb_base is None or tb_dir is None: <NEW_LINE> <INDENT> self.writer = DevNullSummaryWriter() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if tb_dir == "default": <NEW_LINE> <INDENT> tb_dir = strftime("%m-%d/%H-%M-%S", gmtime()) <NEW_LINE> <DEDENT> self.writer = SummaryWriter(os.path.join(tb_base, tb_dir)) | set the writer | 625941b47cff6e4e8111774c |
def __init__(self, variety): <NEW_LINE> <INDENT> self._variety = variety <NEW_LINE> if not variety.is_orbifold(): <NEW_LINE> <INDENT> raise NotImplementedError('Requires an orbifold toric variety.') <NEW_LINE> <DEDENT> R = PolynomialRing(QQ, variety.variable_names()) <NEW_LINE> self._polynomial_ring = R <NEW_LINE> I = variety._fan.linear_equivalence_ideal(R) + variety._fan.Stanley_Reisner_ideal(R) <NEW_LINE> super(CohomologyRing, self).__init__(R, I, names=variety.variable_names()) | See :class:`CohomologyRing` for documentation.
TESTS::
sage: P2 = toric_varieties.P2()
sage: P2.cohomology_ring()
Rational cohomology ring of a 2-d CPR-Fano toric variety covered by 3 affine patches
::
sage: cone1 = Cone([(1,0)]); cone2 = Cone([(1,0)])
sage: cone1 is cone2
False
sage: fan1 = Fan([cone1]); fan2 = Fan([cone2])
sage: fan1 is fan2
False
sage: X1 = ToricVariety(fan1); X2 = ToricVariety(fan2)
sage: X1 is X2
False
sage: X1.cohomology_ring() is X2.cohomology_ring() # see http://trac.sagemath.org/sage_trac/ticket/10325
True
sage: TDiv = X1.toric_divisor_group()
sage: X1.toric_divisor_group() is TDiv
True
sage: X2.toric_divisor_group() is TDiv
True
sage: TDiv.scheme() is X1 # as you expect
True
sage: TDiv.scheme() is X2 # perhaps less obvious, but toric_divisor_group is unique!
False
sage: TDiv.scheme() == X2 # isomorphic, but not necessarily identical
True
sage: TDiv.scheme().cohomology_ring() is X2.cohomology_ring() # this is where it gets tricky
True
sage: TDiv.gen(0).Chern_character() * X2.cohomology_ring().one()
[1] | 625941b426068e7796caea9d |
def __addViewPanelMenu(self, panel, title): <NEW_LINE> <INDENT> if not self.__haveMenu: <NEW_LINE> <INDENT> return None, [] <NEW_LINE> <DEDENT> actionItems = [] <NEW_LINE> actionNames = [name for (name, obj) in panel.getActions()] <NEW_LINE> actionTitles = {} <NEW_LINE> pluginCtrls = plugins.listControls(type(panel)) <NEW_LINE> if len(pluginCtrls) > 0: <NEW_LINE> <INDENT> actionNames.append(None) <NEW_LINE> ctrlOrder = panel.controlOrder() <NEW_LINE> if ctrlOrder is not None: <NEW_LINE> <INDENT> names, clss = zip(*pluginCtrls.items()) <NEW_LINE> ctrlOrder = [plugins.lookupControl(c) for c in ctrlOrder] <NEW_LINE> indices = [ctrlOrder.index(c) if c in ctrlOrder else len(pluginCtrls) for c in clss] <NEW_LINE> pluginCtrls = sorted(zip(indices, names, clss)) <NEW_LINE> pluginCtrls = {t[1] : t[2] for t in pluginCtrls} <NEW_LINE> <DEDENT> for ctrlName, ctrlType in pluginCtrls.items(): <NEW_LINE> <INDENT> name = ctrlType.__name__ <NEW_LINE> actionNames.append(name) <NEW_LINE> actionTitles[name] = ctrlName <NEW_LINE> <DEDENT> actionNames.append('removeAllPanels') <NEW_LINE> <DEDENT> if len(actionNames) == 0: <NEW_LINE> <INDENT> return None, [] <NEW_LINE> <DEDENT> menu = wx.Menu() <NEW_LINE> submenu = self.__settingsMenu.AppendSubMenu(menu, title) <NEW_LINE> actionNames.append(None) <NEW_LINE> actionNames.append('removeFromFrame') <NEW_LINE> actionItems.extend(self.populateMenu( menu, panel, actionNames=actionNames, actionTitles=actionTitles)) <NEW_LINE> self.refreshToolsMenu() <NEW_LINE> return submenu, actionItems | Called by :meth:`addViewPanel`. Adds a menu item for the newly
created :class:`.ViewPanel` instance, and adds any tools
for the view panel to the tools menu.
:arg panel: The newly created ``ViewPanel`` instance.
:arg title: The name given to the ``panel``. | 625941b499fddb7c1c9de159 |
def testStorageSource(self): <NEW_LINE> <INDENT> model = swagger_client.models.storage_source.StorageSource() | Test StorageSource | 625941b49f2886367277a65f |
def get_id(self,f): <NEW_LINE> <INDENT> res=None <NEW_LINE> for identifiant in self._files: <NEW_LINE> <INDENT> if self._files[identifiant]==f: <NEW_LINE> <INDENT> res=identifiant <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return res | Renvoie l'identifiant selon le nom du fichier | 625941b4a05bb46b383ec5f3 |
def _check_sparse_inputs(options, A_ub, A_eq): <NEW_LINE> <INDENT> _sparse_presolve = options.pop('_sparse_presolve', False) <NEW_LINE> if _sparse_presolve and A_eq is not None: <NEW_LINE> <INDENT> A_eq = sps.coo_matrix(A_eq) <NEW_LINE> <DEDENT> if _sparse_presolve and A_ub is not None: <NEW_LINE> <INDENT> A_ub = sps.coo_matrix(A_ub) <NEW_LINE> <DEDENT> sparse = options.get('sparse', False) <NEW_LINE> if not sparse and (sps.issparse(A_eq) or sps.issparse(A_ub)): <NEW_LINE> <INDENT> options['sparse'] = True <NEW_LINE> warn("Sparse constraint matrix detected; setting 'sparse':True.", OptimizeWarning) <NEW_LINE> <DEDENT> return options, A_ub, A_eq | Check the provided ``A_ub`` and ``A_eq`` matrices conform to the specified
optional sparsity variables.
Parameters
----------
A_ub : 2D array, optional
2D array which, when matrix-multiplied by ``x``, gives the values of
the upper-bound inequality constraints at ``x``.
A_eq : array_like, optional
2D array which, when matrix-multiplied by ``x``, gives the values of
the equality constraints at ``x``.
options : dict
A dictionary of solver options. All methods accept the following
generic options:
maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
For method-specific options, see :func:`show_options('linprog')`.
Returns
-------
A_ub : 2D array
2D array which, when matrix-multiplied by ``x``, gives the values of
the upper-bound inequality constraints at ``x``.
A_eq : array_like
2D array which, when matrix-multiplied by ``x``, gives the values of
the equality constraints at ``x``.
options : dict
A dictionary of solver options. All methods accept the following
generic options:
maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
For method-specific options, see :func:`show_options('linprog')`. | 625941b426238365f5f0ec2e |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.