code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def numJewelsInStones(self, str): <NEW_LINE> <INDENT> return str.lower() | :type J: str
:type S: str
:rtype: int | 625941b75166f23b2e1a4f82 |
def get_command_body(update, with_id=False): <NEW_LINE> <INDENT> command_list = update.message.text.split(' ') <NEW_LINE> if with_id and len(command_list) > 2: <NEW_LINE> <INDENT> return { 'id': command_list[1], 'body': ' '.join(command_list[2:]) } <NEW_LINE> <DEDENT> if not with_id and len(command_list) > 1: <NEW_LINE> <INDENT> return { 'body': ' '.join(command_list[1:]) } <NEW_LINE> <DEDENT> return None | Returns command body
:param Update update:
:param bool with_id:
:return: | 625941b7b7558d58953c4d45 |
def write_a_bunch_of_values(self, how_many): <NEW_LINE> <INDENT> self.container = DaosContainer(self.context) <NEW_LINE> self.container.create(self.pool.pool.handle) <NEW_LINE> self.container.open() <NEW_LINE> ioreq = IORequest(self.context, self.container, None) <NEW_LINE> print("Started Writing the Dataset-----------\n") <NEW_LINE> inc = 50000 <NEW_LINE> last_key = inc <NEW_LINE> for key in range(how_many): <NEW_LINE> <INDENT> c_dkey = create_string_buffer("dkey {0}".format(key)) <NEW_LINE> c_akey = create_string_buffer("akey {0}".format(key)) <NEW_LINE> c_value = create_string_buffer( "some data that gets stored with the key {0}".format(key)) <NEW_LINE> c_size = ctypes.c_size_t(ctypes.sizeof(c_value)) <NEW_LINE> ioreq.single_insert(c_dkey, c_akey, c_value, c_size) <NEW_LINE> if key > last_key: <NEW_LINE> <INDENT> print("written: {}".format(key)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> last_key = key + inc <NEW_LINE> <DEDENT> <DEDENT> print("Started Verification of the Dataset-----------\n") <NEW_LINE> last_key = inc <NEW_LINE> for key in range(how_many): <NEW_LINE> <INDENT> c_dkey = create_string_buffer("dkey {0}".format(key)) <NEW_LINE> c_akey = create_string_buffer("akey {0}".format(key)) <NEW_LINE> the_data = "some data that gets stored with the key {0}".format(key) <NEW_LINE> val = ioreq.single_fetch(c_dkey, c_akey, len(the_data)+1) <NEW_LINE> exp_value = val.value.decode("utf-8") <NEW_LINE> if the_data != exp_value: <NEW_LINE> <INDENT> self.fail("ERROR: Data mismatch for dkey = {0}, akey={1}, " "Expected Value={2} and Received Value={3}\n" .format("dkey {0}".format(key), "akey {0}".format(key), the_data, exp_value)) <NEW_LINE> <DEDENT> if key > last_key: <NEW_LINE> <INDENT> print("veried: {}".format(key)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> last_key = key + inc <NEW_LINE> <DEDENT> <DEDENT> print("starting destroy") <NEW_LINE> self.container.close() <NEW_LINE> self.container.destroy() <NEW_LINE> print("destroy complete") | Write data to an object, each with a dkey and akey. The how_many
parameter determines how many key:value pairs are written. | 625941b78e71fb1e9831d5d7 |
def test_update_user_publicity_to_true_accepted(client): <NEW_LINE> <INDENT> headers = { 'Content-Type': 'application/json', 'Authorization': f'Bearer {TOKEN}' } <NEW_LINE> data = { 'publicity': True } <NEW_LINE> res = client.put( USER_PUBLICITY_ROUTE, data=json.dumps(data), headers=headers ) <NEW_LINE> assert res.status_code == 200 | Change users publicity to true, to pass the next test
if there is no other public user in DB | 625941b7ad47b63b2c509db3 |
def test_messageSentLocalAndRemoteUserOk(self): <NEW_LINE> <INDENT> sc_user = 'test-user-sc' <NEW_LINE> gs_user = 'test-user-gs' <NEW_LINE> username_test = 'test-user-sc' <NEW_LINE> mockSC = Mock() <NEW_LINE> mockSC.callRemote = MagicMock(return_value=True) <NEW_LINE> mockGS = Mock() <NEW_LINE> self.sp.factory.active_protocols[str(sc_user)] = mockSC <NEW_LINE> self.sp.factory.active_protocols[str(gs_user)] = mockGS <NEW_LINE> self.sp.factory.active_connections[str(sc_user)] = str(gs_user) <NEW_LINE> self.sp.factory.active_connections[str(gs_user)] = str(sc_user) <NEW_LINE> self.sp.username = username_test <NEW_LINE> self.sp.rpc = Mock() <NEW_LINE> self.sp.rpc.testing = MagicMock(return_value=True) <NEW_LINE> time = misc.get_utc_timestamp() <NEW_LINE> res = self.sp.vSendMsg(self.correctFrame, time) <NEW_LINE> return self.assertTrue(res['bResult']) | Remove local protocol and the active connections which are involving it.
callRemote methods are mocked for testing purposes.
:return: assertTrue statement | 625941b7d164cc6175782b77 |
def __init__(self): <NEW_LINE> <INDENT> self.stack = Stack() <NEW_LINE> self.maxvals = Stack() | Initialize an empty stack | 625941b73d592f4c4ed1ceaa |
@transaction.atomic <NEW_LINE> def refresh_state(modeladmin, request, queryset): <NEW_LINE> <INDENT> opts = queryset.model._meta <NEW_LINE> module = '%s.%s' % (opts.app_label, opts.object_name) <NEW_LINE> ids = queryset.values_list('pk', flat=True) <NEW_LINE> try: <NEW_LINE> <INDENT> result = get_state.delay(module, ids=list(ids), lock=False) <NEW_LINE> result.get() <NEW_LINE> <DEDENT> except OperationLocked: <NEW_LINE> <INDENT> msg = 'This operation is currently being executed by another process.' <NEW_LINE> messages.error(request, msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = 'The state of %d %ss has been updated.' % (queryset.count(), opts.object_name) <NEW_LINE> modeladmin.message_user(request, msg) | gate_state from Node/Sliver queryset synchronously | 625941b794891a1f4081b8d1 |
def __init__( self, *, type: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ResourceIdentity, self).__init__(**kwargs) <NEW_LINE> self.principal_id = None <NEW_LINE> self.tenant_id = None <NEW_LINE> self.type = type | :keyword type: Type of Identity. Supported values are: 'None', 'SystemAssigned'.
:paramtype type: str | 625941b730c21e258bdfa2c6 |
def test_copy_to_with_more_failures_than_max_attempts(self): <NEW_LINE> <INDENT> num_records = 100000 <NEW_LINE> start, end = self.prepare_copy_to_with_failures() <NEW_LINE> logger.debug('Running stress') <NEW_LINE> stress_table = 'keyspace1.standard1' <NEW_LINE> self.node1.stress(['write', 'n={}'.format(num_records), 'no-warmup', '-rate', 'threads=50']) <NEW_LINE> tempfile = self.get_temp_file() <NEW_LINE> failures = {'failing_range': {'start': start, 'end': end, 'num_failures': 5}} <NEW_LINE> os.environ['CQLSH_COPY_TEST_FAILURES'] = json.dumps(failures) <NEW_LINE> logger.debug('Exporting to csv file: {} with {} and 3 max attempts' .format(tempfile.name, os.environ['CQLSH_COPY_TEST_FAILURES'])) <NEW_LINE> out, err, _ = self.run_cqlsh(cmds="COPY {} TO '{}' WITH MAXATTEMPTS='3'" .format(stress_table, tempfile.name)) <NEW_LINE> logger.debug(out) <NEW_LINE> logger.debug(err) <NEW_LINE> assert 'some records might be missing' in err <NEW_LINE> assert len(open(tempfile.name).readlines()) < num_records | Test exporting rows with failure injection by setting the environment variable CQLSH_COPY_TEST_FAILURES,
which is used by ExportProcess in pylib/copyutil.py to deviate its behavior from performing normal queries.
Here we set a token range that will fail more times than the maximum number of attempts, therefore
we expect this COPY TO job to fail.
@jira_ticket CASSANDRA-9304 | 625941b7d53ae8145f87a0a0 |
def convert(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(os.path.join(self.gitbook_path, self.SUMMARY), 'r') as f: <NEW_LINE> <INDENT> data = f.read() <NEW_LINE> <DEDENT> <DEDENT> except FileNotFoundError as e: <NEW_LINE> <INDENT> print('[ERROR] Not found file {}'.format(self.SUMMARY)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> pattern = re.compile(r'( *)\* *\[(.*?)\]\((.*)\)') <NEW_LINE> fileTree = re.findall(pattern, data) <NEW_LINE> group = [''] * self.MAX_LEVEL <NEW_LINE> for space, title, file in fileTree: <NEW_LINE> <INDENT> level = int(len(space)/self.TAB_INDENT) <NEW_LINE> group[level] = title <NEW_LINE> if not file: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> group_name = group[0] if not level else ', '.join(group[:level]) <NEW_LINE> group_name = '[{}]'.format(group_name) <NEW_LINE> filepath = os.path.join(self.gitbook_path, file.lstrip('/\\')) <NEW_LINE> filetime = self._get_file_createtime(filepath) <NEW_LINE> newfilepath = r'{}/{}-{}.md'.format(self.post_path, filetime[:10], '-'.join(title.split()).lower()) <NEW_LINE> copy2(filepath, newfilepath) <NEW_LINE> print(group_name, filepath, '->', newfilepath) <NEW_LINE> infos = { 'layout': 'post', 'title': title, 'date': filetime, 'categories': group_name } <NEW_LINE> self._update_post(newfilepath, infos) <NEW_LINE> <DEDENT> except FileNotFoundError as e: <NEW_LINE> <INDENT> print('<WARNING> Not found file {}, skipped.'.format(filepath)) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> self._duplicate_resources() | Convert gitbook document to hexo blog. | 625941b7460517430c393fb9 |
def emit(self, record: logging.LogRecord) -> None: <NEW_LINE> <INDENT> message = self.format(record) <NEW_LINE> labels: Optional[Dict[str, str]] <NEW_LINE> if self.labels and self.task_instance_labels: <NEW_LINE> <INDENT> labels = {} <NEW_LINE> labels.update(self.labels) <NEW_LINE> labels.update(self.task_instance_labels) <NEW_LINE> <DEDENT> elif self.labels: <NEW_LINE> <INDENT> labels = self.labels <NEW_LINE> <DEDENT> elif self.task_instance_labels: <NEW_LINE> <INDENT> labels = self.task_instance_labels <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labels = None <NEW_LINE> <DEDENT> self._transport.send(record, message, resource=self.resource, labels=labels) | Actually log the specified logging record.
:param record: The record to be logged.
:type record: logging.LogRecord | 625941b776d4e153a657e95a |
def ik_solver_status_cb(self, msg): <NEW_LINE> <INDENT> self.ik_solver_status = msg.data | Obtains the status of the ik solver (as an event). | 625941b7097d151d1a222c86 |
def type_input_verify(self, verify_num): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.type_input(self.verify_loc, verify_num) <NEW_LINE> <DEDENT> except Exception as msg: <NEW_LINE> <INDENT> return "异常原因%s" % msg | :param verify_num: # 输入验证码
:return: 返回异常原因 | 625941b721a7993f00bc7b13 |
def draw_Board(self): <NEW_LINE> <INDENT> score = self.board_font.render('SCORE ' + str(self.evaluator.score), 10, (255, 255, 255)) <NEW_LINE> highscore = self.board_font.render('HIGH SCORE ' + str(self.evaluator.highscore), 10, (255, 255, 255)) <NEW_LINE> self.newgame = self.board_font.render('NEW GAME', 10, (250, 250, 250)) <NEW_LINE> self.screen.fill((100, 100, 200)) <NEW_LINE> self.screen.blit(highscore,(20,20)) <NEW_LINE> self.screen.blit(score,(20,45)) <NEW_LINE> self.screen.blit(self.newgame,(480 - self.newgame.get_rect().width,20)) <NEW_LINE> pygame.draw.rect(self.screen, (90,90,190), pygame.Rect(0, 150, 500, 500)) <NEW_LINE> for col in range(0, 480, 120): <NEW_LINE> <INDENT> for row in range(150, 630, 120): <NEW_LINE> <INDENT> pygame.draw.rect(self.screen,(100, 100, 220), pygame.Rect(col + self.tiles[0].spacing, row + self.tiles[0].spacing, 100, 100)) | Draw the game board which includes score, highscore,
newgame button and backgrounds | 625941b75510c4643540f222 |
def splitCityState(citySt): <NEW_LINE> <INDENT> citySt = citySt.split(',') <NEW_LINE> if len(citySt) >= 2: <NEW_LINE> <INDENT> return ','.join( citySt[:-1] ), citySt[-1] <NEW_LINE> <DEDENT> return citySt[0], '' | Split City, ST string on comma
Arguments:
citySt (str) : Contains City, ST information
Keyword arguments:
None.
Returns:
tuple : City, state | 625941b72c8b7c6e89b355ed |
def snapshot(self, context, instance, image_id, update_task_state): <NEW_LINE> <INDENT> vm_ref = self._get_vm_opaque_ref(instance) <NEW_LINE> label = "%s-snapshot" % instance['name'] <NEW_LINE> with vm_utils.snapshot_attached_here( self._session, instance, vm_ref, label, post_snapshot_callback=update_task_state) as vdi_uuids: <NEW_LINE> <INDENT> update_task_state(task_state=task_states.IMAGE_UPLOADING, expected_state=task_states.IMAGE_PENDING_UPLOAD) <NEW_LINE> self.image_upload_handler.upload_image(context, self._session, instance, vdi_uuids, image_id) <NEW_LINE> <DEDENT> LOG.debug("Finished snapshot and upload for VM", instance=instance) | Create snapshot from a running VM instance.
:param context: request context
:param instance: instance to be snapshotted
:param image_id: id of image to upload to
Steps involved in a XenServer snapshot:
1. XAPI-Snapshot: Snapshotting the instance using XenAPI. This
creates: Snapshot (Template) VM, Snapshot VBD, Snapshot VDI,
Snapshot VHD
2. Wait-for-coalesce: The Snapshot VDI and Instance VDI both point to
a 'base-copy' VDI. The base_copy is immutable and may be chained
with other base_copies. If chained, the base_copies
coalesce together, so, we must wait for this coalescing to occur to
get a stable representation of the data on disk.
3. Push-to-data-store: Once coalesced, we call
'image_upload_handler' to upload the images. | 625941b730c21e258bdfa2c7 |
def format_result(self, response, field_counts) -> Dict[str, List[dict]]: <NEW_LINE> <INDENT> response = self._format_initial_response(response) <NEW_LINE> final_result = {'indices': []} <NEW_LINE> for index_name, search_dict in response.items(): <NEW_LINE> <INDENT> reformated_agg_dict = dict() <NEW_LINE> reformated_agg_dict["index_name"] = index_name <NEW_LINE> total_documents = search_dict['hits']['total'] <NEW_LINE> reformated_agg_dict["total_documents"] = total_documents <NEW_LINE> aggregations_dict = search_dict['aggregations'] <NEW_LINE> reformated_agg_dict["aggregations"] = self._format_aggregation_dict(aggregations_dict) <NEW_LINE> grouped_aggregations = reformated_agg_dict['aggregations'] <NEW_LINE> self._add_value_count_percentages(grouped_aggregations, total_documents, field_counts) <NEW_LINE> final_result['indices'].append(reformated_agg_dict) <NEW_LINE> <DEDENT> return final_result | Main function to format the response of aggregations.
Takes input in the form of {<index_name>: ES_agg_query_result}
:param field_counts:
:param response:
:return: | 625941b7925a0f43d2549c9d |
def push(self, x): <NEW_LINE> <INDENT> self.stack1.append(x) | Push element x onto stack.
:type x: int
:rtype: void | 625941b7b57a9660fec336a9 |
def testCreateDocTypeFailure(self): <NEW_LINE> <INDENT> pass | Test CreateDocTypeFailure | 625941b70a50d4780f666cb9 |
def detect_block_size(encryption_function): <NEW_LINE> <INDENT> string = "A" <NEW_LINE> length = len(encryption_function(string)) <NEW_LINE> previous_length = length <NEW_LINE> while (length == previous_length): <NEW_LINE> <INDENT> string += "A" <NEW_LINE> previous_length = length <NEW_LINE> length = len(encryption_function(string)) <NEW_LINE> <DEDENT> return length - previous_length | Given a block mode encryption function, determine the block size. | 625941b7eab8aa0e5d26d988 |
def create_lock(self, user, explicit=False): <NEW_LINE> <INDENT> is_new = self.lock_user is None <NEW_LINE> self.lock_user = user <NEW_LINE> if user is None: <NEW_LINE> <INDENT> self.lock_time = timezone.now() <NEW_LINE> self.save() <NEW_LINE> return <NEW_LINE> <DEDENT> self.update_lock_time(explicit, is_new) | Creates lock on translation. | 625941b782261d6c526ab2cd |
def line_color(self, *args, **kwargs): <NEW_LINE> <INDENT> return _qtgui_swig.time_sink_f_sptr_line_color(self, *args, **kwargs) | line_color(time_sink_f_sptr self, int which) -> std::string | 625941b7baa26c4b54cb0f4d |
def ids(val) -> str: <NEW_LINE> <INDENT> return f'api="{val[0]}" code={val[1]}' | Метод форатирующий строку в удобочитаемый вид, для отображения в консоли при выполнении тестов.
Поскольку тесты запускаются с множеством различных параметров, то необходимо понимать на каком
из сочетании входных параметров тест падает (или наоборот, проходит)
:param val: список параметров, которые будут переданы в тест
:return str: готовая отформатированная строка | 625941b78a349b6b435e7f9e |
def get_time_block(self): <NEW_LINE> <INDENT> start_hours = random.choice(self.possible_start_hours) <NEW_LINE> start_minutes = random.choice(self.possible_start_minutes) <NEW_LINE> duration = random.choice(self.possible_durations) <NEW_LINE> duration_in_minutes_and_hours = self.minutes_to_hours(duration) <NEW_LINE> duration_hours = duration_in_minutes_and_hours[0] <NEW_LINE> duration_minutes = duration_in_minutes_and_hours[1] <NEW_LINE> end_hours = start_hours + duration_hours <NEW_LINE> end_minutes = start_minutes + duration_minutes <NEW_LINE> if start_minutes + duration_minutes > 60: <NEW_LINE> <INDENT> end_minutes -= 60 <NEW_LINE> end_hours += 1 <NEW_LINE> <DEDENT> start_time = datetime.time(hour=start_hours, minute=start_minutes) <NEW_LINE> end_time = datetime.time(hour=end_hours, minute=end_minutes) <NEW_LINE> return TimeBlock(start_time, end_time) | Generates a 'realistic' time block for a class. | 625941b721bff66bcd68477f |
def _checkAndAddToLogingFailsIfNeeded(self,host,t,req,reply,line): <NEW_LINE> <INDENT> while len(self.blackList)>0 and t-self.blackList[0].lastSeenTryTime> LocalData.fiveMinDelay: <NEW_LINE> <INDENT> del self.blackList[0] <NEW_LINE> self.faliureHistoryOfRecentHosts.pop(host,None) <NEW_LINE> <DEDENT> if host in self.blackList: <NEW_LINE> <INDENT> self.report(host,t,line) <NEW_LINE> return <NEW_LINE> <DEDENT> loginFail=self.isLoginFail(req,reply) <NEW_LINE> if loginFail==True: <NEW_LINE> <INDENT> self._updateRecentlyLoginAttempts(host,t) <NEW_LINE> failedAttemptTimes=self.faliureHistoryOfRecentHosts.get(host,[]) <NEW_LINE> for fat in failedAttemptTimes: <NEW_LINE> <INDENT> if t-fat>LocalData.twentySecDelay: <NEW_LINE> <INDENT> failedAttemptTimes.remove(fat) <NEW_LINE> <DEDENT> <DEDENT> failedAttemptTimes.append(t) <NEW_LINE> self.faliureHistoryOfRecentHosts[host]=failedAttemptTimes <NEW_LINE> if len(failedAttemptTimes)>2: <NEW_LINE> <INDENT> self.blackList.append(LocalData.Client(host,t)) <NEW_LINE> <DEDENT> <DEDENT> elif loginFail==False: <NEW_LINE> <INDENT> self.faliureHistoryOfRecentHosts.pop(host,None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass | First, check whether the host is in black list or not. If it is,
report the line and return
Second, React based on the access type; whether it is failed login,
successfull login, or it is none of them.
Third, update data storage.
Parameters
----------
line: str, (=None) -> input line. If the reset of the parameters
are None, the data will be extracted from
this line.
host: str, (=None)-> hostname or ip
req: str, (=None) -> request
reply: str, (=None) -> reply code
t: datatime, (=None)-> a datetime object of timeStr
| 625941b797e22403b379cdc2 |
def __iter__(self): <NEW_LINE> <INDENT> return iter(self._offsets) | Iterate over the keys. | 625941b75fdd1c0f98dc005b |
def test_publish_invalid_is_a_noop(self): <NEW_LINE> <INDENT> entry = self.entry <NEW_LINE> latest = entry.latest <NEW_LINE> old_count = PublicationChange.objects.filter( changerecord=latest).count() <NEW_LINE> self.assertFalse(latest.publish(self.foo)) <NEW_LINE> self.assertEqual( PublicationChange.objects.filter(changerecord=latest).count(), old_count) | Publishing a change record that encode an invalid state of an
article is a noop. | 625941b7fb3f5b602dac34b8 |
def MSE_node(act): <NEW_LINE> <INDENT> return [np.mean([MSE(data, node[1]) for data in node[2]]) for node in act] | Return list of mean MSE by node | 625941b763b5f9789fde6f0f |
def playHand(hand, wordList, n): <NEW_LINE> <INDENT> totalScore = 0 <NEW_LINE> while calculateHandlen(hand) > 0: <NEW_LINE> <INDENT> displayHand(hand) <NEW_LINE> d = input('Enter word, or a "." to indicate that you are finished: ') <NEW_LINE> if d == "." : <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> elif isValidWord(d, hand, wordList) is False: <NEW_LINE> <INDENT> print("Invalid word, please try again.\n") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> totalScore += getWordScore(d, n) <NEW_LINE> print(str(d) + " earned " +str(getWordScore(d, n)) +" points. " + "Total : " + str(totalScore) + " points") <NEW_LINE> hand = updateHand(hand, d) <NEW_LINE> calculateHandlen(hand) <NEW_LINE> <DEDENT> <DEDENT> if calculateHandlen(hand) == 0: <NEW_LINE> <INDENT> print("Run out of letters. Total score : " + str(totalScore) + " points ") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Goodbye!. Total Score : " + str(totalScore)) | Allows the user to play the given hand, as follows:
* The hand is displayed.
* The user may input a word or a single period (the string ".")
to indicate they're done playing
* Invalid words are rejected, and a message is displayed asking
the user to choose another word until they enter a valid word or "."
* When a valid word is entered, it uses up letters from the hand.
* After every valid word: the score for that word is displayed,
the remaining letters in the hand are displayed, and the user
is asked to input another word.
* The sum of the word scores is displayed when the hand finishes.
* The hand finishes when there are no more unused letters or the user
inputs a "."
hand: dictionary (string -> int)
wordList: list of lowercase strings
n: integer (HAND_SIZE; i.e., hand size required for additional points)
| 625941b7091ae35668666d90 |
def test_static_asset_path_use(self): <NEW_LINE> <INDENT> module = render.get_module( self.user, self.request, self.location, self.field_data_cache, self.course.id, static_asset_path="toy_course_dir", ) <NEW_LINE> result_fragment = module.runtime.render(module, None, 'student_view') <NEW_LINE> self.assertIn('href="/static/toy_course_dir', result_fragment.content) | when a course is loaded with do_import_static=False (see xml_importer.py), then
static_asset_path is set as an lms kv in course. That should make static paths
not be mangled (ie not changed to c4x://). | 625941b74d74a7450ccd3fed |
def changes(self, **kwargs): <NEW_LINE> <INDENT> path = self._get_id_path('changes') <NEW_LINE> response = self._GET(path, kwargs) <NEW_LINE> self._set_attrs_to_values(response) <NEW_LINE> return response | Get the changes for a person. By default only the last 24 hours are returned.
You can query up to 14 days in a single query by using the start_date
and end_date query parameters.
Args:
start_date: (optional) Filter the results with a start date.
Expected format is 'YYYY-MM-DD'.
end_date: (optional) Filter the results with a end date.
Expected format is 'YYYY-MM-DD'.
page: (optional) Minimum 1, maximum 1000, default 1.
Returns:
A dict respresentation of the JSON returned from the API. | 625941b7e5267d203edcdacb |
def __init__(self, name, *urls): <NEW_LINE> <INDENT> super(Dnf, self).__init__() <NEW_LINE> self.dnf_root = None <NEW_LINE> self.load_config_to_self("dnf_root", ["dnf.root"], "/") <NEW_LINE> self.name = name <NEW_LINE> self.urls = urls <NEW_LINE> self.dnf_base = dnf.Base() <NEW_LINE> self.dnf_base.conf.debuglevel = 0 <NEW_LINE> self.dnf_base.conf.installroot = self.dnf_root <NEW_LINE> self.dnf_base.conf.cachedir = get_temp_dir("dnf") <NEW_LINE> self.dnf_base.read_all_repos() <NEW_LINE> self.dnf_base.repos.all().disable() <NEW_LINE> for i, url in enumerate(urls): <NEW_LINE> <INDENT> if isinstance(url, six.string_types): <NEW_LINE> <INDENT> if url.startswith("/"): <NEW_LINE> <INDENT> url = "file://{0}".format(url) <NEW_LINE> <DEDENT> self.dnf_base.repos.add_new_repo("faf_{0}-{1}".format(self.name, i), self.dnf_base.conf, baseurl=[str(url)], skip_if_unavailable=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for url_single in url: <NEW_LINE> <INDENT> if url_single.startswith("/"): <NEW_LINE> <INDENT> url_single = "file://{0}".format(url_single) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> request.urlopen(os.path.join(url_single, "repodata/repomd.xml")) <NEW_LINE> self.dnf_base.repos.add_new_repo("faf_{0}-{1}".format(self.name, i), self.dnf_base.conf, baseurl=[url_single], skip_if_unavailable=False) <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.log_error("No mirrors available") <NEW_LINE> raise NameError('NoMirrorsAvailable') <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> self.dnf_base.fill_sack() <NEW_LINE> <DEDENT> except dnf.exceptions.RepoError: <NEW_LINE> <INDENT> self.log_error("Repo error") | Following `url` schemes are supported:
http://, ftp://, file:// (used if full
path is passed). | 625941b715baa723493c3d9c |
def test_delete_event_with_tasks_and_awards(self): <NEW_LINE> <INDENT> event = Event.objects.get(slug="starts-today-ongoing") <NEW_LINE> role = Role.objects.create(name="NonInstructor") <NEW_LINE> badge = Badge.objects.create( name="noninstructor", title="Non-instructor", criteria="" ) <NEW_LINE> task = Task.objects.create(event=event, person=self.spiderman, role=role) <NEW_LINE> award = Award.objects.create( person=self.spiderman, badge=badge, awarded=date.today(), event=event ) <NEW_LINE> rv = self.client.post(reverse("event_delete", args=[event.slug])) <NEW_LINE> self.assertEqual(rv.status_code, 200) <NEW_LINE> content = rv.content.decode("utf-8") <NEW_LINE> self.assertIn("Failed to delete", content) <NEW_LINE> self.assertIn("tasks", content) <NEW_LINE> Event.objects.get(pk=event.pk) <NEW_LINE> Badge.objects.get(pk=badge.pk) <NEW_LINE> Task.objects.get(pk=task.pk) <NEW_LINE> Award.objects.get(pk=award.pk) | Ensure we cannot delete an event with related tasks and awards.
Deletion is prevented via Award.event's on_delete=PROTECT
and Task.event's on_delete=PROTECT. | 625941b799cbb53fe6792a11 |
def push(self, item): <NEW_LINE> <INDENT> self._queue.append(item) | Push an element to the end of the queue.
Parameters
----------
item :
The element to append. | 625941b7fb3f5b602dac34b9 |
def get_tag_member_count(self, id, member_type, level='0', member_association_type=None, headers=None, **request_parameters): <NEW_LINE> <INDENT> check_type(headers, dict) <NEW_LINE> check_type(member_type, basestring, may_be_none=False) <NEW_LINE> check_type(member_association_type, basestring) <NEW_LINE> check_type(level, basestring) <NEW_LINE> check_type(id, basestring, may_be_none=False) <NEW_LINE> if headers is not None: <NEW_LINE> <INDENT> if 'X-Auth-Token' in headers: <NEW_LINE> <INDENT> check_type(headers.get('X-Auth-Token'), basestring, may_be_none=False) <NEW_LINE> <DEDENT> <DEDENT> _params = { 'memberType': member_type, 'memberAssociationType': member_association_type, 'level': level, } <NEW_LINE> _params.update(request_parameters) <NEW_LINE> _params = dict_from_items_with_values(_params) <NEW_LINE> path_params = { 'id': id, } <NEW_LINE> with_custom_headers = False <NEW_LINE> _headers = self._session.headers or {} <NEW_LINE> if headers: <NEW_LINE> <INDENT> _headers.update(dict_of_str(headers)) <NEW_LINE> with_custom_headers = True <NEW_LINE> <DEDENT> e_url = ('/dna/intent/api/v1/tag/${id}/member/count') <NEW_LINE> endpoint_full_url = apply_path_params(e_url, path_params) <NEW_LINE> if with_custom_headers: <NEW_LINE> <INDENT> json_data = self._session.get(endpoint_full_url, params=_params, headers=_headers) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_data = self._session.get(endpoint_full_url, params=_params) <NEW_LINE> <DEDENT> return self._object_factory('bpm_2e9db85840fbb1cf_v2_1_1', json_data) | Returns the number of members in a given tag.
Args:
id(basestring): Tag ID.
member_type(basestring): memberType query parameter.
member_association_type(basestring): memberAssociationType query parameter.
level(basestring): level query parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error. | 625941b7956e5f7376d70ca5 |
def findItemUrl(self, item, oldUrl, folderID=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if item.url == None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if item.url.find(oldUrl) > -1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> print (e) <NEW_LINE> return False | Use this to find the URL for items such as Map Services.
This can also find part of a URL. The text of oldUrl is replaced with the text of newUrl. For example you could change just the host name of your URLs. | 625941b7925a0f43d2549c9e |
def test_is_path(self): <NEW_LINE> <INDENT> self.assertTrue(isdir(PATH_DATA)) | assert that data folder exits | 625941b730bbd722463cbbed |
def __init__(self, sess, save_file_name, max_to_keep): <NEW_LINE> <INDENT> self.__sess = sess <NEW_LINE> self.__save_file_name = save_file_name <NEW_LINE> self.__max_to_keep = max_to_keep | checkpoint保存类构造函数
:param sess: 当前会话
:param save_file_name: 模型临时保存路径
:param max_to_keep: 保存模型的个数(可自动根据max_to_keep的数值保存模型) | 625941b745492302aab5e0ea |
def get_closing_data(self, days=100, normalize=False, logreturn=False): <NEW_LINE> <INDENT> closing_data = pd.DataFrame() <NEW_LINE> for index in INDEX_LIST: <NEW_LINE> <INDENT> df = pd.read_csv(self.basedir + index + ".csv").set_index('Date') <NEW_LINE> closing_data[index] = df["Close"][:days] if days else df["Close"] <NEW_LINE> <DEDENT> closing_data = closing_data.fillna(method="ffill") <NEW_LINE> for index in INDEX_LIST: <NEW_LINE> <INDENT> if normalize: <NEW_LINE> <INDENT> closing_data[index] = closing_data[index] / max(closing_data[index]) <NEW_LINE> <DEDENT> if logreturn: <NEW_LINE> <INDENT> closing_data[index] = np.log(closing_data[index] / closing_data[index].shift()) <NEW_LINE> <DEDENT> <DEDENT> return closing_data | csvファイルを読み込んで、各ファイルの終値を抽出、正規化を行う関数 | 625941b73539df3088e2e175 |
def add(u,v): <NEW_LINE> <INDENT> assert u.D == v.D <NEW_LINE> return Vec(u.D, {x:getitem(u,x) + getitem(v,x) for x in u.D } ) | Returns the sum of the two vectors.
Make sure to add together values for all keys from u.f and v.f even if some keys in u.f do not
exist in v.f (or vice versa)
>>> a = Vec({'a','e','i','o','u'}, {'a':0,'e':1,'i':2})
>>> b = Vec({'a','e','i','o','u'}, {'o':4,'u':7})
>>> c = Vec({'a','e','i','o','u'}, {'a':0,'e':1,'i':2,'o':4,'u':7})
>>> a + b == c
True
>>> a == Vec({'a','e','i','o','u'}, {'a':0,'e':1,'i':2})
True
>>> b == Vec({'a','e','i','o','u'}, {'o':4,'u':7})
True
>>> d = Vec({'x','y','z'}, {'x':2,'y':1})
>>> e = Vec({'x','y','z'}, {'z':4,'y':-1})
>>> f = Vec({'x','y','z'}, {'x':2,'y':0,'z':4})
>>> d + e == f
True
>>> b + Vec({'a','e','i','o','u'}, {}) == b
True | 625941b792d797404e303fb4 |
def get_openerp_id(self, model, prestashop_id): <NEW_LINE> <INDENT> binder = self.binder_for(model) <NEW_LINE> erp_ps_id = binder.to_openerp(prestashop_id) <NEW_LINE> if erp_ps_id is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> model = self.session.pool.get(model) <NEW_LINE> erp_ps_object = model.read( self.session.cr, self.session.uid, erp_ps_id, ['openerp_id'], context=self.session.context ) <NEW_LINE> return erp_ps_object['openerp_id'][0] | Returns an openerp id from a model name and a prestashop_id.
This permits to find the openerp id through the external application
model in Erp. | 625941b7fbf16365ca6f5fe6 |
def build_maximin(factor_level_ranges, num_samples=None): <NEW_LINE> <INDENT> for key in factor_level_ranges: <NEW_LINE> <INDENT> if len(factor_level_ranges[key])!=2: <NEW_LINE> <INDENT> factor_level_ranges[key][1]=factor_level_ranges[key][-1] <NEW_LINE> factor_level_ranges[key]=factor_level_ranges[key][:2] <NEW_LINE> print(f"{key} had more than two levels. Assigning the end point to the high level.") <NEW_LINE> <DEDENT> <DEDENT> factor_count=len(factor_level_ranges) <NEW_LINE> factor_lists=[] <NEW_LINE> if num_samples==None: <NEW_LINE> <INDENT> num_samples=factor_count <NEW_LINE> <DEDENT> for key in factor_level_ranges: <NEW_LINE> <INDENT> factor_lists.append(factor_level_ranges[key]) <NEW_LINE> <DEDENT> x = maximin_reconstruction(num_points=num_samples,dimension=factor_count) <NEW_LINE> factor_lists=np.array(factor_lists) <NEW_LINE> df = construct_df_from_random_matrix(x,factor_lists) <NEW_LINE> df.columns=factor_level_ranges.keys() <NEW_LINE> return df | Builds a maximin reconstructed design dataframe from a dictionary of factor/level ranges.
Only min and max values of the range are required.
Example of the dictionary:
{'Pressure':[50,70],'Temperature':[290, 350],'Flow rate':[0.9,1.0]}
num_samples: Number of samples to be generated
This algorithm carries out a user-specified number of iterations to maximize the minimal distance of a point in the set to
* other points in the set,
* existing (fixed) points,
* the boundary of the hypercube. | 625941b7090684286d50eb0a |
def test_set_mf_diff_spheroid_default_Dz(self): <NEW_LINE> <INDENT> pipes.switch('mf') <NEW_LINE> diffusion_tensor.init((1e-9, 2e6, 0, 0)) <NEW_LINE> self.assertRaises(RelaxError, self.value_fns.set, param='Dz') | Set the spheroidal diffusion tensor Dz parameter to the default value.
The functions tested are both pipe_control.value.set() and prompt.value.set(). | 625941b73346ee7daa2b2b93 |
def f_dice(a, b, timespan, pairwise=False): <NEW_LINE> <INDENT> if pairwise: <NEW_LINE> <INDENT> y_list = [None] * timespan <NEW_LINE> a = tf.expand_dims(a, 2) <NEW_LINE> a_list = tf.split(1, timespan, a) <NEW_LINE> b = tf.expand_dims(b, 1) <NEW_LINE> card_b = tf.reduce_sum(b + 1e-5, [3, 4]) <NEW_LINE> for ii in xrange(timespan): <NEW_LINE> <INDENT> y_list[ii] = 2 * f_inter(a_list[ii], b) / (tf.reduce_sum(a_list[ii] + 1e-5, [3, 4]) + card_b) <NEW_LINE> <DEDENT> return tf.concat(1, y_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> card_a = tf.reduce_sum(a + 1e-5, _get_reduction_indices(a)) <NEW_LINE> card_b = tf.reduce_sum(b + 1e-5, _get_reduction_indices(b)) <NEW_LINE> return 2 * f_inter(a, b) / (card_a + card_b) | Computes DICE score.
Args:
a: [B, N, H, W], or [N, H, W], or [H, W]
b: [B, N, H, W], or [N, H, W], or [H, W]
in pairwise mode, the second dimension can be different,
e.g. [B, M, H, W], or [M, H, W], or [H, W]
pairwise: whether the inputs are already aligned, outputs [B, N] or
the inputs are orderless, outputs [B, N, M]. | 625941b715fb5d323cde0934 |
def compute_node_get(context, compute_id): <NEW_LINE> <INDENT> return IMPL.compute_node_get(context, compute_id) | Get a compute node by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Dictionary-like object containing properties of the compute node,
including its corresponding service and statistics
Raises ComputeHostNotFound if compute node with the given ID doesn't exist. | 625941b7ab23a570cc24ffab |
def test_get_mean_defaults(self): <NEW_LINE> <INDENT> data = [[0, 1, 3], [0.01, 10, 3.5], [0.02, 21, 4.0]] <NEW_LINE> p = self.SAXSProfile() <NEW_LINE> p.add_data(data) <NEW_LINE> gp = MockGP(1, 2) <NEW_LINE> m = IMP.kernel.Model() <NEW_LINE> s = IMP.isd.Scale.setup_particle(IMP.kernel.Particle(m), 3.0) <NEW_LINE> functions = {} <NEW_LINE> functions['mean'] = MockFunction() <NEW_LINE> functions['covariance'] = MockFunction() <NEW_LINE> p.set_interpolant( gp, {'sigma': s}, functions, 'test', IMP.kernel.Model(), None) <NEW_LINE> test = p.get_mean() <NEW_LINE> expected_q = numpy.linspace(0, 0.02, num=200) <NEW_LINE> expected_I = map(gp.get_posterior_mean, [[i] for i in expected_q]) <NEW_LINE> expected_err = map(lambda a: numpy.sqrt(gp.get_posterior_covariance([a], [1.0])), expected_q) <NEW_LINE> expected_mean = numpy.array(list(map(lambda a: functions['mean'](a), expected_q))) <NEW_LINE> for q, I, err, mean, v in zip(expected_q, expected_I, expected_err, expected_mean, test): <NEW_LINE> <INDENT> self.assertEqual(len(v), 4) <NEW_LINE> self.assertAlmostEqual([q], v[0]) <NEW_LINE> self.assertAlmostEqual(I, v[1]) <NEW_LINE> self.assertAlmostEqual(err, v[2]) <NEW_LINE> self.assertAlmostEqual(mean, v[3]) | test get_mean defaults | 625941b721bff66bcd684780 |
def _parse_add_response(request, response, parent): <NEW_LINE> <INDENT> parent_url = model_url(parent, request) <NEW_LINE> if not response.location: <NEW_LINE> <INDENT> errors = _parse_form_errors(response.body) <NEW_LINE> raise AssertionError( "Add operation failed. %s" % errors) <NEW_LINE> <DEDENT> if not response.location.startswith(parent_url): <NEW_LINE> <INDENT> raise AssertionError("URL mismatch: %r is not a parent of %r" % (parent_url, response.location)) <NEW_LINE> <DEDENT> s = response.location[len(parent_url):] <NEW_LINE> if s.startswith('/'): <NEW_LINE> <INDENT> s = s[1:] <NEW_LINE> <DEDENT> obj_id = s.split('/', 1)[0].split('?', 1)[0] <NEW_LINE> return parent[obj_id] | If the add form succeeded, return the object added.
Raises an exception if the add form did not redirect as expected. | 625941b72ae34c7f2600cf5c |
def test(self, input_image, output_image): <NEW_LINE> <INDENT> img = extract_n_normalize_image(input_image) <NEW_LINE> x_image = np.reshape(np.array([img]), (1, self.image_size, self.image_size, 1)) <NEW_LINE> sess, _ = self.init_session() <NEW_LINE> y_image = sess.run(self.Y, feed_dict={self.X: x_image}) <NEW_LINE> encoded_image = y_image.reshape((self.image_size, self.image_size)) <NEW_LINE> imsave(output_image, encoded_image) | Test image | 625941b74f6381625f114871 |
def set_adm_location(self): <NEW_LINE> <INDENT> adm_q = None <NEW_LINE> municip_dic = self.data_files["municipalities"] <NEW_LINE> reg_dic = self.data_files["regions"] <NEW_LINE> municip_q = utils.q_from_first_wikilink("es", self.municipio) <NEW_LINE> if utils.get_item_from_dict_by_key(dict_name=municip_dic, search_term=municip_q, search_in="item"): <NEW_LINE> <INDENT> adm_q = municip_q <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.add_to_report("municipio", self.municipio, "located_adm") <NEW_LINE> <DEDENT> if adm_q is None: <NEW_LINE> <INDENT> iso = self.iso <NEW_LINE> iso_match = utils.get_item_from_dict_by_key( dict_name=reg_dic, search_term=iso, search_in="iso") <NEW_LINE> if len(iso_match) == 1: <NEW_LINE> <INDENT> adm_q = iso_match[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.add_to_report("iso", self.iso, "located_adm") <NEW_LINE> <DEDENT> <DEDENT> if adm_q: <NEW_LINE> <INDENT> self.add_statement("located_adm", adm_q) | Set the Admin Location.
Use the linked Municipality first, checking
against external list.
If failed, use the Region iso code, which is a
bigger unit. | 625941b76aa9bd52df036bcd |
def disable_ui(self): <NEW_LINE> <INDENT> self.centralwidget.setEnabled(False) <NEW_LINE> self.menuProjekt.setEnabled(False) <NEW_LINE> self.menuImport.setEnabled(False) <NEW_LINE> self.menuExport.setEnabled(False) <NEW_LINE> self.actionSave.setEnabled(False) <NEW_LINE> self.actionSaveAs.setEnabled(False) | Disable the UI. | 625941b7293b9510aa2c30c4 |
def show_watch(self, ctxt, watch_name): <NEW_LINE> <INDENT> return self.call(ctxt, self.make_msg('show_watch', watch_name=watch_name), topic=_engine_topic(self.topic, ctxt, None)) | The show_watch method returns the attributes of one watch
or all watches if no watch_name is passed
:param ctxt: RPC context.
:param watch_name: Name of the watch/alarm you want to see,
or None to see all | 625941b7be383301e01b52b8 |
def equal(self, extent): <NEW_LINE> <INDENT> if self.D["end_time"] is None and extent.D["end_time"] is None: <NEW_LINE> <INDENT> if self.D["start_time"] == extent.D["start_time"]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if self.D["end_time"] is None or extent.D["end_time"] is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.D["start_time"] == extent.D["start_time"] and self.D["end_time"] == extent.D["end_time"]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Return True if this temporal extent (A) is equal to the provided
temporal extent (B)
::
A |---------|
B |---------|
:param extent: The temporal extent object that is equal
during this extent
Usage:
.. code-block:: python
>>> A = TemporalExtent(start_time=5, end_time=6 )
>>> B = TemporalExtent(start_time=5, end_time=6 )
>>> A.equal(B)
True
>>> B.equal(A)
True | 625941b767a9b606de4a7ce8 |
def get_response(self,t,x,dx): <NEW_LINE> <INDENT> equa = EquationClass.Equation.get_instance() <NEW_LINE> if self.switch == True: <NEW_LINE> <INDENT> dy = -2*np.sign(equa.dy)*200 <NEW_LINE> if abs(equa.dy - self.ddy) > abs(2*equa.b0): <NEW_LINE> <INDENT> self.switch = False <NEW_LINE> dy=0 <NEW_LINE> <DEDENT> <DEDENT> elif abs(self.line - x) < equa.step: <NEW_LINE> <INDENT> dy = -2*np.sign(equa.dy)*100 <NEW_LINE> self.switch = True <NEW_LINE> self.ddy = equa.dy <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dy = 0 <NEW_LINE> <DEDENT> y = functions.sign(t, x) <NEW_LINE> return y, dy | получить выход | 625941b730dc7b7665901795 |
def preorder(self, root: 'Node') -> List[int]: <NEW_LINE> <INDENT> pass | put solution here | 625941b7d18da76e235322fc |
def dt_to_ut(jd): <NEW_LINE> <INDENT> return jd - deltaT_seconds(jd) / seconds_per_day | Convert Julian Day from dynamical to universal time.
Arguments:
- `jd` : (int) Julian Day number (dynamical time)
Returns:
- Julian Day number : (int) (universal time) | 625941b726238365f5f0ec94 |
def write(self,fname,vectors=None): <NEW_LINE> <INDENT> c = Interface.XYZ.XYZ() <NEW_LINE> webpath = c.write(fname,geoms=[self],vectors=vectors) <NEW_LINE> return webpath | The actual function that writes coords is in the XYZ parser,
and it takes an object of class ListGeoms as an argument.
Here, we have only one geometry, so we supposed to create
an instance of ListGeoms. However, we can make a trick
by providing a simple list instead of ListGeoms to
write coords | 625941b7aad79263cf390866 |
def accuracy(predictions, labels): <NEW_LINE> <INDENT> return 100.0 * np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1)) / predictions.shape[0] | Return accuracy for a batch | 625941b75fcc89381b1e14ef |
def set(self, other): <NEW_LINE> <INDENT> if isinstance(other, LagrangeInterpolation): <NEW_LINE> <INDENT> self.pos = other.pos <NEW_LINE> self.lx = other.lx <NEW_LINE> self.dlx = other.dlx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Expects LagrangeInterpolation") | Sets this class according to another python instance of itself | 625941b766673b3332b91ec2 |
def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/RoadNetworkCleaner/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull()) | Test we can click OK. | 625941b7e1aae11d1e749ade |
def shutdown(self): <NEW_LINE> <INDENT> self.pubsub.disconnect() | Terminate connections and server. | 625941b78e05c05ec3eea19c |
def __init__(self, max_distance): <NEW_LINE> <INDENT> self.max_distance = max_distance | Initializes the noise module.
Args:
max_distance: The maximum permutation distance. | 625941b71d351010ab855949 |
def objective_function(x): <NEW_LINE> <INDENT> for i in range(len(embedding_lst)): <NEW_LINE> <INDENT> if (x == embedding_lst[i]).all(): <NEW_LINE> <INDENT> return eval_lst[i] | Map a sampled domain to evaluation results returned from the model
:param x: domain sampled from bayesian optimization
:return: the corresponding evaluation result | 625941b7a4f1c619b28afe6d |
def restoreIpAddresses2(self, s): <NEW_LINE> <INDENT> if len(s) < 4 or len(s) > 12: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> res = [] <NEW_LINE> cur = [] <NEW_LINE> self.dfs(s, 0, 0, cur, res) <NEW_LINE> result = self.norm(s, res) <NEW_LINE> return result | :type s: str
:rtype: List[str] | 625941b756b00c62f0f14488 |
def _calculate_column_widths(self, rows: _Rows) -> None: <NEW_LINE> <INDENT> for column in range(0, self._number_of_columns): <NEW_LINE> <INDENT> lengths = [0] <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> if isinstance(row, TableSeparator): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> row_ = row.copy() <NEW_LINE> for i, cell in enumerate(row_): <NEW_LINE> <INDENT> if isinstance(cell, TableCell): <NEW_LINE> <INDENT> text_content = self._io.remove_format(cell) <NEW_LINE> text_length = len(text_content) <NEW_LINE> if text_length: <NEW_LINE> <INDENT> length = math.ceil(text_length / cell.colspan) <NEW_LINE> content_columns = [ text_content[i : i + length] for i in range(0, text_length, length) ] <NEW_LINE> for position, content in enumerate(content_columns): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> row_[i + position] = content <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> row_.append(content) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> lengths.append(self._get_cell_width(row_, column)) <NEW_LINE> <DEDENT> self._effective_column_widths[column] = ( max(lengths) + len(self._style.cell_row_content_format) - 2 ) | Calculates column widths. | 625941b763f4b57ef0000f4e |
def DataArrayInt_BuildIntersection(*args): <NEW_LINE> <INDENT> return _MEDCouplingRemapper.DataArrayInt_BuildIntersection(*args) | BuildIntersection(std::vector<(p.q(const).ParaMEDMEM::DataArrayInt,std::allocator<(p.q(const).ParaMEDMEM::DataArrayInt)>)> arr) -> DataArrayInt
DataArrayInt_BuildIntersection(PyObject li) -> DataArrayInt
1 | 625941b7377c676e91271fd5 |
def modelo_para_funcoes_normais(): <NEW_LINE> <INDENT> pass | def make_dict(chave, valor):
return {chave: valor}
def test_make_dict():
assert make_dict('chave', 'valor') == {'chave': 'valor'}
def turn_list_into_str(list_items: list):
return "".join(list_items)
def test_turn_list_into_str():
assert turn_list_into_str(['123']) == '123' | 625941b7c432627299f04a6f |
def rot_sigmay(self, theta,phi): <NEW_LINE> <INDENT> theta = np.radians(theta) <NEW_LINE> phi= np.radians(phi) <NEW_LINE> c1 = cos(theta) <NEW_LINE> s1 = sin(theta) <NEW_LINE> c2 = cos(phi) <NEW_LINE> s2 = sin(phi) <NEW_LINE> c3 = 1. <NEW_LINE> s3 = 0. <NEW_LINE> rotmat = np.array([ [c2 , s2*s3, c3*s2], [s1*s2, c1*c3 - c2*s1*s3, -c1*s3 - c2*c3*s1], [-c1*s2, c3*s1 + c1*c2*s3, c1*c2*c3 - s1*s3] ]) <NEW_LINE> return rotmat | sigmaz rotation matrix (for tilts about z).
X1 Y2 X3 | 625941b7d7e4931a7ee9dd47 |
def batch_iter(inputs,outputs,batch_size,num_epochs): <NEW_LINE> <INDENT> inputs=np.array(inputs) <NEW_LINE> outputs=np.array(outputs) <NEW_LINE> num_batches_per_epoch=(len(inputs)-1)//batch_size+1 <NEW_LINE> for epoch in range(num_epochs): <NEW_LINE> <INDENT> for batch_num in range(num_batches_per_epoch): <NEW_LINE> <INDENT> start_index=batch_num*batch_size <NEW_LINE> end_index=min((batch_num+1)*batch_size,len(inputs)) <NEW_LINE> yield inputs[start_index:end_index],outputs[start_index:end_index] | 生成批数据
:param inputs:
:param outputs:
:param batch_size:
:param num_epochs:
:return: | 625941b732920d7e50b27ff7 |
def test_authenticate_view(self): <NEW_LINE> <INDENT> response = self.client.get('/authenticate/') <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> response = self.client.get('/authenticate/', follow=True) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> response = self.client.post('/authenticate/') <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> settings.SHOPIFY_APP_DEV_MODE = True <NEW_LINE> response = self.client.get('/authenticate/?shop=test.myshopify.com') <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> self.assertGreater(int(self.client.session['_auth_user_id']), 0) <NEW_LINE> self.assertEqual(self.client.session['_auth_user_backend'], 'shopify_auth.backends.ShopUserBackend') <NEW_LINE> self.assertIsNot(self.client.session['_auth_user_hash'], None) | Test the authenticate view renders correctly with a shop param. | 625941b7377c676e91271fd6 |
def __init__(self, config, loop, dock, image=None): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.loop = loop <NEW_LINE> self.dock = dock <NEW_LINE> self.image = image <NEW_LINE> self.version = None <NEW_LINE> self.arch = None <NEW_LINE> self._lock = asyncio.Lock(loop=loop) | Initialize docker base wrapper. | 625941b7d10714528d5ffb0a |
def reference_num(self, content): <NEW_LINE> <INDENT> if self._cur_reference is not None: <NEW_LINE> <INDENT> self.data.references.append(self._cur_reference) <NEW_LINE> <DEDENT> from . import Record <NEW_LINE> self._cur_reference = Record.Reference() <NEW_LINE> self._cur_reference.number = content | Grab the reference number and signal the start of a new reference. | 625941b7379a373c97cfa976 |
def initialize_interfaces(self): <NEW_LINE> <INDENT> self.settings = Settings() <NEW_LINE> device_paths = self.netman.get_devices() <NEW_LINE> if len(device_paths) == 0: <NEW_LINE> <INDENT> dialogs.ErrorDialog(MSG_NO_DEVICES, TITLE_ERROR).showup() <NEW_LINE> return <NEW_LINE> <DEDENT> for device_path in device_paths: <NEW_LINE> <INDENT> device = Device(device_path) <NEW_LINE> ip4config_path, interface, driver, device_type, managed = device.get_properties() <NEW_LINE> if device_type == 1 and managed: <NEW_LINE> <INDENT> devicewired = DeviceWired(device_path) <NEW_LINE> mac, speed, carrier = devicewired.get_properties() <NEW_LINE> interface = Interface(ip4config_path, device_path, interface, driver, device_type, mac, speed, carrier) <NEW_LINE> self.interfaces.append(interface) <NEW_LINE> <DEDENT> <DEDENT> if len(self.interfaces) == 0: <NEW_LINE> <INDENT> dialogs.ErrorDialog(MSG_NO_WIRED_DEVICES, TITLE_ERROR).showup() <NEW_LINE> return <NEW_LINE> <DEDENT> self.interfaces.sort(key=lambda interface: interface.interface) <NEW_LINE> for interface in self.interfaces: <NEW_LINE> <INDENT> if interface.dhcp_request_info.subnet and interface.existing_info.subnet and interface.dhcp_request_info.subnet != interface.existing_info.subnet: <NEW_LINE> <INDENT> self.interfaces_diff_subnet.append(interface) <NEW_LINE> <DEDENT> self.populate_pages(interface) <NEW_LINE> <DEDENT> self.main_dlg.set_deletable(True) <NEW_LINE> self.main_dlg.show_all() <NEW_LINE> self.loading_box.destroy() <NEW_LINE> self.set_default() <NEW_LINE> if len(self.interfaces_diff_subnet) > 0: <NEW_LINE> <INDENT> if len(self.interfaces_diff_subnet) > 1: <NEW_LINE> <INDENT> msg = MSG_SUBNET_PLURAL.format(', '.join([str(interface.interface) for interface in self.interfaces_diff_subnet])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = MSG_SUBNET.format(', '.join([str(interface.interface) for interface in self.interfaces_diff_subnet])) <NEW_LINE> <DEDENT> info_dialog = dialogs.InfoDialog(MSG_TITLE_SUBNET, TITLE_INFO) <NEW_LINE> info_dialog.format_secondary_markup(msg) <NEW_LINE> info_dialog.set_transient_for(self.main_dlg) <NEW_LINE> info_dialog.showup() | Initialize interfaces.
Find devices, populate GUI, show all widgets and destroy loading widget,
set the appropriate method to each device and
if subnet has change alert message,
which define devices with different subnet. | 625941b7fbf16365ca6f5fe7 |
def test02_range(self): <NEW_LINE> <INDENT> a = self.charListME <NEW_LINE> arr = self.h5file.create_array( self.h5file.root, 'somearray', a, "Some array") <NEW_LINE> if self.close: <NEW_LINE> <INDENT> self._reopen('a') <NEW_LINE> arr = self.h5file.root.somearray <NEW_LINE> <DEDENT> a[1:3] = b"xXx" <NEW_LINE> arr[1:3] = b"xXx" <NEW_LINE> if common.verbose: <NEW_LINE> <INDENT> print("Original elements:", a[1:4]) <NEW_LINE> print("Read elements:", arr[1:4]) <NEW_LINE> <DEDENT> self.assertTrue(allequal(a[1:4], arr[1:4])) | Range element update (character types) | 625941b73d592f4c4ed1ceac |
def read_session() -> str | None: <NEW_LINE> <INDENT> home = os.path.expanduser("~") <NEW_LINE> try: <NEW_LINE> <INDENT> with open(os.path.join(home, ".skit", "token"), "r") as handle: <NEW_LINE> <INDENT> return handle.read().strip() <NEW_LINE> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> return None | Read the session from the environment. | 625941b7e8904600ed9f1d54 |
@pytest.fixture() <NEW_LINE> def service(running_app): <NEW_LINE> <INDENT> return current_rdm_records.records_service | The record service. | 625941b7adb09d7d5db6c5bf |
def rm_master_layout(self): <NEW_LINE> <INDENT> self.master_screen.project_overview.remove_presentations() <NEW_LINE> self.master_screen=None <NEW_LINE> self.change_screen_to("switch_layout") | Removes the master layout from screenmanager's screens | 625941b70a366e3fb873e642 |
def CreateRibbonTab(self,tabName): <NEW_LINE> <INDENT> pass | CreateRibbonTab(self: UIControlledApplication,tabName: str)
Creates a new tab on the Revit user interface.
tabName: The name of the tab to be created. | 625941b776d4e153a657e95b |
def diagnostics(): <NEW_LINE> <INDENT> response = '- Running diagnostics... ' <NEW_LINE> if hasattr(psutil, "sensors_temperatures"): <NEW_LINE> <INDENT> temps = psutil.sensors_temperatures() <NEW_LINE> if not temps: <NEW_LINE> <INDENT> response += "Unable to read temperature.\n" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for name, entries in temps.items(): <NEW_LINE> <INDENT> for entry in entries: <NEW_LINE> <INDENT> response += 'Proccessor temperature is currently %.0f degrees Centegrade...\n' %entry.current <NEW_LINE> break <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> response += 'System memory has %.0f Gigabytes free...\n' % oa.util.legacy.bytes2gb(oa.util.legacy.sys.free_memory()) <NEW_LINE> response += 'Internal hard drive has %.0f Gigabytes free...\n' % oa.util.legacy.bytes2gb(psutil.disk_usage('/').free) <NEW_LINE> response += oa.util.legacy.switch(is_online(), True, 'Internet access is currently available.', 'We are offline.') <NEW_LINE> say(response) | Run system diagnostics. | 625941b7627d3e7fe0d68c7a |
def enumerate_code(self, skip_missing=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> content = self.read() <NEW_LINE> <DEDENT> except FileNotFoundError as e: <NEW_LINE> <INDENT> if skip_missing: <NEW_LINE> <INDENT> fLOG("w,unable to find file", self.file) <NEW_LINE> content = " " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> lines = content.split("\n") <NEW_LINE> linebeginning = [] <NEW_LINE> s = 0 <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> linebeginning.append(s) <NEW_LINE> s += len(line) + 1 <NEW_LINE> <DEDENT> p = re.compile("(\\\\begin[{]verbatim(x|no|nocut)?[}]( *[%]{3}(.*?)[%]{3})?((.|\\n)*?)\\\\end[{]verbatim(x|no|nocut)??[}])|" + "(\\\\inputcodes[{]([./a-zA-Z0-9_]+?)[}][{](.*?)[}][{](.*?)[}])|" + "(\\\\input[{]([./a-zA-Z0-9_]+?)[}])|" + "(\\\\inputcode[{]([./a-zA-Z0-9_]+?)[}][{](.*?)[}])") <NEW_LINE> recom = re.compile("([%]{3}(.*?)[%]{3})") <NEW_LINE> for m in p.finditer(content): <NEW_LINE> <INDENT> a = m.span()[0] <NEW_LINE> li = LatexFile.dichotomy_find(linebeginning, a) <NEW_LINE> gs = tuple(m.groups()) <NEW_LINE> if gs[0] is not None: <NEW_LINE> <INDENT> comment = gs[3].strip() if gs[3] is not None else gs[3] <NEW_LINE> if comment is None or len(comment) == 0: <NEW_LINE> <INDENT> ci = li - 1 <NEW_LINE> if ci > 0: <NEW_LINE> <INDENT> com = recom.search(lines[ci]) <NEW_LINE> if com: <NEW_LINE> <INDENT> comment = com.groups()[1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> c = LatexCode(self, self.line + (li,), gs[4], comment) <NEW_LINE> yield c <NEW_LINE> <DEDENT> elif gs[7] is not None: <NEW_LINE> <INDENT> if li not in self.filelines: <NEW_LINE> <INDENT> fil = os.path.join(self.root, gs[8]) <NEW_LINE> self.filelines[li] = LatexIncludedFile( self, self.line + (li,), fil, gs[10]) <NEW_LINE> <DEDENT> for co in self.filelines[li].enumerate_code(): <NEW_LINE> <INDENT> yield co <NEW_LINE> <DEDENT> <DEDENT> elif gs[11] is not None: <NEW_LINE> <INDENT> if li not in self.filelines: <NEW_LINE> <INDENT> fil = os.path.join(self.root, gs[12]) <NEW_LINE> self.filelines[li] = LatexIncludedFile( self, self.line + (li,), fil, None) <NEW_LINE> <DEDENT> for co in self.filelines[li].enumerate_code(skip_missing=skip_missing): <NEW_LINE> <INDENT> yield co <NEW_LINE> <DEDENT> <DEDENT> elif gs[13] is not None: <NEW_LINE> <INDENT> if li not in self.filelines: <NEW_LINE> <INDENT> fil = os.path.join(self.root, gs[14]) <NEW_LINE> self.filelines[li] = LatexIncludedFile( self, self.line + (li,), fil, gs[15]) <NEW_LINE> <DEDENT> for co in self.filelines[li].enumerate_code(): <NEW_LINE> <INDENT> yield co | enumerate all pieces of code (in ``verbatim``, ``verbatimx`` or ``\inputcode`` sections
@param skip_missing if True, avoids stopping whenever a file is not found
@return LatexCode | 625941b7d53ae8145f87a0a2 |
def start(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Start service.
If needed new thread should be started and main loop entered.
All EventHandlers should be registered while started. | 625941b7d6c5a10208143e73 |
def all_buckets(self): <NEW_LINE> <INDENT> return self.s3.buckets.all() | Get an iterator for all buckets. | 625941b75510c4643540f224 |
@utils.arg('id', metavar='<ID>', help='ID of notifycontact') <NEW_LINE> @utils.arg('--email-enable', metavar='<EMAIL_ENABLE>', choices=['True', 'False'], help='Enable or disable the email channel') <NEW_LINE> @utils.arg('--sms-enable', metavar='<SMS_ENABLE>', choices=['True', 'False'], help='Enable or disable the sms channel') <NEW_LINE> @utils.arg('--wechat-enable', metavar='<WECHAT_ENABLE>', choices=['True', 'False'], help='Enable or disable the wechat channel') <NEW_LINE> @utils.arg('--notes', metavar='<NOTES>', help='Notify contact notes') <NEW_LINE> def do_notifycontact_update(client, args): <NEW_LINE> <INDENT> params = {} <NEW_LINE> if args.email_enable: <NEW_LINE> <INDENT> params['email_enable'] = True if args.email_enable == 'True' else False <NEW_LINE> <DEDENT> if args.sms_enable: <NEW_LINE> <INDENT> params['sms_enable'] = True if args.sms_enable == 'True' else False <NEW_LINE> <DEDENT> if args.wechat_enable: <NEW_LINE> <INDENT> params['wechat_enable'] = True if args.wechat_enable == 'True' else False <NEW_LINE> <DEDENT> if args.notes: <NEW_LINE> <INDENT> params['notes'] = args.notes <NEW_LINE> <DEDENT> nc = client.notifycontacts.update(args.id, **params) <NEW_LINE> utils.print_dict(nc) | Only allow to enable/disable channel or edit notes | 625941b7eab8aa0e5d26d98a |
def get_sunset(self): <NEW_LINE> <INDENT> return (self.sunset_time, datetime.fromtimestamp(int(self.sunset_time)).strftime('%Y-%m-%d %H:%M:%S')) | returns sunrise time as tuple (UTC UNIX time, UTC time) | 625941b7656771135c3eb69e |
def tags(request, mlist_fqdn, threadid): <NEW_LINE> <INDENT> if not request.user.is_authenticated(): <NEW_LINE> <INDENT> return HttpResponse('You must be logged in to add a tag', content_type="text/plain", status=403) <NEW_LINE> <DEDENT> if request.method != 'POST': <NEW_LINE> <INDENT> raise SuspiciousOperation <NEW_LINE> <DEDENT> action = request.POST.get("action") <NEW_LINE> if action == "add": <NEW_LINE> <INDENT> form = AddTagForm(request.POST) <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> return HttpResponse("Error adding tag: invalid data", content_type="text/plain", status=500) <NEW_LINE> <DEDENT> tagname = form.data['tag'] <NEW_LINE> <DEDENT> elif action == "rm": <NEW_LINE> <INDENT> tagname = request.POST.get('tag') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SuspiciousOperation <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> tag = Tag.objects.get(threadid=threadid, list_address=mlist_fqdn, tag=tagname) <NEW_LINE> if action == "rm": <NEW_LINE> <INDENT> tag.delete() <NEW_LINE> <DEDENT> <DEDENT> except Tag.DoesNotExist: <NEW_LINE> <INDENT> if action == "add": <NEW_LINE> <INDENT> tag = Tag(list_address=mlist_fqdn, threadid=threadid, tag=tagname, user=request.user) <NEW_LINE> tag.save() <NEW_LINE> <DEDENT> elif action == "rm": <NEW_LINE> <INDENT> raise Http404("No such tag: %s" % tagname) <NEW_LINE> <DEDENT> <DEDENT> tags = Tag.objects.filter(threadid=threadid, list_address=mlist_fqdn) <NEW_LINE> FakeMList = namedtuple("MailingList", ["name"]) <NEW_LINE> tpl = loader.get_template('threads/tags.html') <NEW_LINE> html = tpl.render(RequestContext(request, { "tags": tags, "mlist": FakeMList(name=mlist_fqdn), "threadid": threadid, })) <NEW_LINE> response = {"tags": [ t.tag for t in tags ], "html": html} <NEW_LINE> return HttpResponse(json.dumps(response), mimetype='application/javascript') | Add or remove a tag on a given thread. | 625941b763b5f9789fde6f11 |
def parse_args(): <NEW_LINE> <INDENT> global run_flag, verbose, norun <NEW_LINE> run_flag = True <NEW_LINE> verbose = False <NEW_LINE> norun = False <NEW_LINE> cfg_file = "./cfg/default.cfg" <NEW_LINE> if len(sys.argv) <= 1: <NEW_LINE> <INDENT> print("*** Pick one of the following config files\n") <NEW_LINE> show_configs() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> opts, args = getopt.getopt(sys.argv[1:], "hulsvn", ["help", "usage", "list", "setup", "verbose", "norun", ]) <NEW_LINE> <DEDENT> except getopt.GetoptError as error: <NEW_LINE> <INDENT> print(str(error)) <NEW_LINE> usage() <NEW_LINE> <DEDENT> for opt, _ in opts: <NEW_LINE> <INDENT> if opt == "--help" or opt == "-h": <NEW_LINE> <INDENT> usage() <NEW_LINE> <DEDENT> if opt == "--usage" or opt == "-u": <NEW_LINE> <INDENT> usage() <NEW_LINE> <DEDENT> if opt == "--list" or opt == "-l": <NEW_LINE> <INDENT> show_configs() <NEW_LINE> <DEDENT> if opt == "--setup" or opt == "-s": <NEW_LINE> <INDENT> run_flag = False <NEW_LINE> <DEDENT> if opt == "--verbose" or opt == "-v": <NEW_LINE> <INDENT> verbose = True <NEW_LINE> <DEDENT> if opt == "--norun" or opt == "-n": <NEW_LINE> <INDENT> norun = True <NEW_LINE> <DEDENT> <DEDENT> if not args or len(args) > 1: <NEW_LINE> <INDENT> usage() <NEW_LINE> <DEDENT> fn = find_file(args[0], '.cfg') <NEW_LINE> if not fn: <NEW_LINE> <INDENT> f = args[0] <NEW_LINE> if os.path.splitext(args[0])[1] != '.cfg': <NEW_LINE> <INDENT> f = args[0] + '.cfg' <NEW_LINE> <DEDENT> print("*** Config file '%s' not found" % f) <NEW_LINE> print(" Make sure you are running this command in pktgen top directory") <NEW_LINE> print(" e.g. cd Pktgen-DPDK; ./tools/run.py default") <NEW_LINE> show_configs() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cfg_file = fn <NEW_LINE> <DEDENT> return cfg_file | Parse the command arguments | 625941b7e5267d203edcdacd |
def Corners(self, *args) -> "void": <NEW_LINE> <INDENT> return _gskernel.GsAsteriskPointSymbol_Corners(self, *args) | *Overload 1:*
获取星型角的数量
|
*Overload 2:*
设置星型角的数量 | 625941b7091ae35668666d92 |
def get_current_entry_index(): <NEW_LINE> <INDENT> s_index_re = "Current entry index = (\d+)" <NEW_LINE> s_output = _execute_fred_command("status") <NEW_LINE> m = re.search(s_index_re, s_output) <NEW_LINE> if m != None: <NEW_LINE> <INDENT> fredutil.fred_debug("Current entry index is: %d" % int(m.group(1))) <NEW_LINE> return int(m.group(1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | Return the index of the current entry. | 625941b715baa723493c3d9e |
def sendMessage(self, recipient_id, message): <NEW_LINE> <INDENT> payload = { 'message': { 'text': emoji.emojize(message, use_aliases=True), } } <NEW_LINE> self.call_send_api(recipient_id, payload) | Send plain text messages.
Args:
recipient_id (int): Recipient ID.
message (str): Must be UTF-8 and has a 640 character limit. | 625941b75166f23b2e1a4f85 |
def setPath(self, path): <NEW_LINE> <INDENT> self.__path = path | Sets the path for the item. | 625941b79f2886367277a6bd |
def open(self): <NEW_LINE> <INDENT> if self._is_open: <NEW_LINE> <INDENT> raise exceptions.ClientConnectionFailure( "client connection already open") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.proxy.open() <NEW_LINE> self._is_open = True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.exception("could not open client connection", e) <NEW_LINE> raise e | Open the client connection.
Raises:
ClientConnectionFailure: if the client connection is already open
Exception: if an error occurs while trying to open the connection | 625941b7498bea3a759b98de |
def get_email(self): <NEW_LINE> <INDENT> return self._email | @brief Gets the email of the Member.
@return The email of the Member | 625941b74c3428357757c157 |
def test_compilationCommands_references(): <NEW_LINE> <INDENT> cdb = CompilationDatabase.fromDirectory(kInputsDir) <NEW_LINE> cmds = cdb.getCompileCommands('/home/john.doe/MyProject/project.cpp') <NEW_LINE> del cdb <NEW_LINE> cmd0 = cmds[0] <NEW_LINE> del cmds <NEW_LINE> gc.collect() <NEW_LINE> cmd0.directory | Ensure CompilationsCommand keeps a reference to CompilationCommands | 625941b7293b9510aa2c30c5 |
@roles('web') <NEW_LINE> @task <NEW_LINE> def deploy_backend(update_pkg=False): <NEW_LINE> <INDENT> execute(pydiploy.django.deploy_backend) | Deploy code on server | 625941b73c8af77a43ae35ca |
def optimize(self, w, b, X, Y, num_iterations, learning_rate): <NEW_LINE> <INDENT> costs = [] <NEW_LINE> for i in range(num_iterations): <NEW_LINE> <INDENT> grads, cost = self.propagate(w, b, X, Y) <NEW_LINE> dw = grads["dw"] <NEW_LINE> db = grads["db"] <NEW_LINE> w = w - learning_rate * dw <NEW_LINE> b = b - learning_rate * db <NEW_LINE> if i % 100 == 0: <NEW_LINE> <INDENT> costs.append(cost) <NEW_LINE> <DEDENT> <DEDENT> params = {"w": w, "b": b} <NEW_LINE> grads = {"dw": dw, "db": db} <NEW_LINE> return params, grads, costs | This function optimizes w and b by running a gradient descent algorithm. | 625941b791af0d3eaac9b840 |
def file_list2(working_directory, data_directory, reset=False): <NEW_LINE> <INDENT> all_tags = {} <NEW_LINE> if os.path.isfile(os.path.join(working_directory, 'all_modalities.txt')): <NEW_LINE> <INDENT> if not reset: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.remove(os.path.join(working_directory, 'all_modalities.txt')) <NEW_LINE> <DEDENT> <DEDENT> os.chdir(working_directory) <NEW_LINE> with open('all_modalities.txt', 'w') as outstream: <NEW_LINE> <INDENT> for dirpath, dirname, file_names in os.walk(data_directory): <NEW_LINE> <INDENT> dirpath = dirpath.replace('\\', '/') <NEW_LINE> for a_file in file_names: <NEW_LINE> <INDENT> if a_file.endswith('.dcm'): <NEW_LINE> <INDENT> dcm_tags = dicom.read_file(os.path.join(dirpath, a_file)) <NEW_LINE> if dcm_tags.PatientID not in all_tags: <NEW_LINE> <INDENT> all_tags[dcm_tags.PatientID] = [] <NEW_LINE> outstream.write('\n' + dcm_tags.PatientID + '\n') <NEW_LINE> <DEDENT> if hasattr(dcm_tags, 'SeriesDescription'): <NEW_LINE> <INDENT> sequence = dcm_tags.SeriesDescription <NEW_LINE> <DEDENT> elif hasattr(dcm_tags, 'SequenceName'): <NEW_LINE> <INDENT> sequence = dcm_tags.SequenceName <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(dirpath + 'has no sequence description') <NEW_LINE> sequence = 'NA' <NEW_LINE> <DEDENT> if hasattr(dcm_tags, 'PixelSpacing'): <NEW_LINE> <INDENT> resolution = dcm_tags.PixelSpacing[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resolution = 'NA' <NEW_LINE> print(dirpath + 'has no pixel spacing information') <NEW_LINE> <DEDENT> modality_entry = [sequence, resolution, dirpath] <NEW_LINE> outstream.write(sequence + '&\t' + str(resolution) + '&\t' + dirpath + '\n') <NEW_LINE> all_tags[dcm_tags.PatientID].append(modality_entry) <NEW_LINE> break | Create and save file *all_modalites.txt* in the working directory, which is the major map to a raw dataset.
This file contains in lines the patient ID followed by lines that refer to the sequence description, pixel spacing
and file full path (dicom directory). It is readable by humans. Then the next patient ID follows and so on.
:param string working_directory:
:param string data_directory: raw dataset root directory
:param bool reset: If an *all_modalities.txt* file already exists, delete it and make a new map. Default is False. | 625941b74f88993c3716bea0 |
def add_fields(table_name, field_specs): <NEW_LINE> <INDENT> table = Table(table_name) <NEW_LINE> table.open() <NEW_LINE> try: <NEW_LINE> <INDENT> table.add_fields(field_specs) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> table.close() | adds fields to an existing table | 625941b7925a0f43d2549ca0 |
def start_node(self, i, *args, **kwargs): <NEW_LINE> <INDENT> node = self.nodes[i] <NEW_LINE> node.start(*args, **kwargs) <NEW_LINE> node.wait_for_rpc_connection() <NEW_LINE> time.sleep(10) <NEW_LINE> if self.options.coveragedir is not None: <NEW_LINE> <INDENT> coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) | Start a radarcoind | 625941b74527f215b584c287 |
def __setitem__(self, key, value): <NEW_LINE> <INDENT> if key not in self.data: <NEW_LINE> <INDENT> self.data[key] = [value] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data[key].append(value) | Adds value to the list of values for the specified key.
Arguments:
key (object): Key
value (object): Value | 625941b7dd821e528d63afd8 |
def clear(self): <NEW_LINE> <INDENT> self.login_entry.delete(0, tk.END) <NEW_LINE> self.password_entry.delete(0, tk.END) <NEW_LINE> self.site_entry.delete(0, tk.END) <NEW_LINE> self.comment_text.delete("1.0", tk.END) | Clear all input fields at the main window
:return: None | 625941b7de87d2750b85fbba |
def initAlgorithm(self, config): <NEW_LINE> <INDENT> managerParameter = ParameterFMEManager( self.FME_MANAGER, description=self.tr("FME Manager Parameters") ) <NEW_LINE> managerParameter.setMetadata({ "widget_wrapper": "DsgTools.gui.ProcessingUI.fmeManagerWrapper.FMEManagerWrapper" }) <NEW_LINE> self.addParameter(managerParameter) | Parameter setting. | 625941b7a4f1c619b28afe6e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.