code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_lr(self): """Calculates the learning rate at batch index: ``self.last_batch_idx``. """ cycle = np.floor(1 + self.last_batch_idx / self.total_size) x = 1 + self.last_batch_idx / self.total_size - cycle if x <= self.step_ratio: scale_factor = x / self.step_ratio else: scale_factor = (x-1)/(self.step_ratio-1) lrs = [] for base_lr, max_lr in zip(self.base_lrs, self.max_lrs): base_height = (max_lr - base_lr) * scale_factor if self.scale_mode == 'cycle': lr = base_lr + base_height * self.scale_fn(cycle) else: lr = base_lr + base_height * self.scale_fn(self.last_batch_idx) lrs.append(lr) return lrs
def function[get_lr, parameter[self]]: constant[Calculates the learning rate at batch index: ``self.last_batch_idx``. ] variable[cycle] assign[=] call[name[np].floor, parameter[binary_operation[constant[1] + binary_operation[name[self].last_batch_idx / name[self].total_size]]]] variable[x] assign[=] binary_operation[binary_operation[constant[1] + binary_operation[name[self].last_batch_idx / name[self].total_size]] - name[cycle]] if compare[name[x] less_or_equal[<=] name[self].step_ratio] begin[:] variable[scale_factor] assign[=] binary_operation[name[x] / name[self].step_ratio] variable[lrs] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0ab8d30>, <ast.Name object at 0x7da1b0abaf80>]]] in starred[call[name[zip], parameter[name[self].base_lrs, name[self].max_lrs]]] begin[:] variable[base_height] assign[=] binary_operation[binary_operation[name[max_lr] - name[base_lr]] * name[scale_factor]] if compare[name[self].scale_mode equal[==] constant[cycle]] begin[:] variable[lr] assign[=] binary_operation[name[base_lr] + binary_operation[name[base_height] * call[name[self].scale_fn, parameter[name[cycle]]]]] call[name[lrs].append, parameter[name[lr]]] return[name[lrs]]
keyword[def] identifier[get_lr] ( identifier[self] ): literal[string] identifier[cycle] = identifier[np] . identifier[floor] ( literal[int] + identifier[self] . identifier[last_batch_idx] / identifier[self] . identifier[total_size] ) identifier[x] = literal[int] + identifier[self] . identifier[last_batch_idx] / identifier[self] . identifier[total_size] - identifier[cycle] keyword[if] identifier[x] <= identifier[self] . identifier[step_ratio] : identifier[scale_factor] = identifier[x] / identifier[self] . identifier[step_ratio] keyword[else] : identifier[scale_factor] =( identifier[x] - literal[int] )/( identifier[self] . identifier[step_ratio] - literal[int] ) identifier[lrs] =[] keyword[for] identifier[base_lr] , identifier[max_lr] keyword[in] identifier[zip] ( identifier[self] . identifier[base_lrs] , identifier[self] . identifier[max_lrs] ): identifier[base_height] =( identifier[max_lr] - identifier[base_lr] )* identifier[scale_factor] keyword[if] identifier[self] . identifier[scale_mode] == literal[string] : identifier[lr] = identifier[base_lr] + identifier[base_height] * identifier[self] . identifier[scale_fn] ( identifier[cycle] ) keyword[else] : identifier[lr] = identifier[base_lr] + identifier[base_height] * identifier[self] . identifier[scale_fn] ( identifier[self] . identifier[last_batch_idx] ) identifier[lrs] . identifier[append] ( identifier[lr] ) keyword[return] identifier[lrs]
def get_lr(self): """Calculates the learning rate at batch index: ``self.last_batch_idx``. """ cycle = np.floor(1 + self.last_batch_idx / self.total_size) x = 1 + self.last_batch_idx / self.total_size - cycle if x <= self.step_ratio: scale_factor = x / self.step_ratio # depends on [control=['if'], data=['x']] else: scale_factor = (x - 1) / (self.step_ratio - 1) lrs = [] for (base_lr, max_lr) in zip(self.base_lrs, self.max_lrs): base_height = (max_lr - base_lr) * scale_factor if self.scale_mode == 'cycle': lr = base_lr + base_height * self.scale_fn(cycle) # depends on [control=['if'], data=[]] else: lr = base_lr + base_height * self.scale_fn(self.last_batch_idx) lrs.append(lr) # depends on [control=['for'], data=[]] return lrs
def _UpdateRequestsAndScheduleFPRs(self, responses, cursor=None): """Updates requests and writes FlowProcessingRequests if needed.""" request_keys = set( (r.client_id, r.flow_id, r.request_id) for r in responses) flow_keys = set((r.client_id, r.flow_id) for r in responses) response_counts = self._ReadFlowResponseCounts(request_keys, cursor) next_requests = self._ReadAndLockNextRequestsToProcess(flow_keys, cursor) completed_requests = self._ReadLockAndUpdateCompletedRequests( request_keys, response_counts, cursor) if not completed_requests: return completed_requests fprs_to_write = [] for request_key, r in iteritems(completed_requests): client_id, flow_id, request_id = request_key if next_requests[(client_id, flow_id)] == request_id: fprs_to_write.append( rdf_flows.FlowProcessingRequest( client_id=r.client_id, flow_id=r.flow_id, delivery_time=r.start_time)) if fprs_to_write: self._WriteFlowProcessingRequests(fprs_to_write, cursor) return completed_requests
def function[_UpdateRequestsAndScheduleFPRs, parameter[self, responses, cursor]]: constant[Updates requests and writes FlowProcessingRequests if needed.] variable[request_keys] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1b46f20>]] variable[flow_keys] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1b47700>]] variable[response_counts] assign[=] call[name[self]._ReadFlowResponseCounts, parameter[name[request_keys], name[cursor]]] variable[next_requests] assign[=] call[name[self]._ReadAndLockNextRequestsToProcess, parameter[name[flow_keys], name[cursor]]] variable[completed_requests] assign[=] call[name[self]._ReadLockAndUpdateCompletedRequests, parameter[name[request_keys], name[response_counts], name[cursor]]] if <ast.UnaryOp object at 0x7da1b1c18340> begin[:] return[name[completed_requests]] variable[fprs_to_write] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1c1a230>, <ast.Name object at 0x7da1b1c1beb0>]]] in starred[call[name[iteritems], parameter[name[completed_requests]]]] begin[:] <ast.Tuple object at 0x7da1b1c18730> assign[=] name[request_key] if compare[call[name[next_requests]][tuple[[<ast.Name object at 0x7da1b1c19f60>, <ast.Name object at 0x7da1b1c1bfa0>]]] equal[==] name[request_id]] begin[:] call[name[fprs_to_write].append, parameter[call[name[rdf_flows].FlowProcessingRequest, parameter[]]]] if name[fprs_to_write] begin[:] call[name[self]._WriteFlowProcessingRequests, parameter[name[fprs_to_write], name[cursor]]] return[name[completed_requests]]
keyword[def] identifier[_UpdateRequestsAndScheduleFPRs] ( identifier[self] , identifier[responses] , identifier[cursor] = keyword[None] ): literal[string] identifier[request_keys] = identifier[set] ( ( identifier[r] . identifier[client_id] , identifier[r] . identifier[flow_id] , identifier[r] . identifier[request_id] ) keyword[for] identifier[r] keyword[in] identifier[responses] ) identifier[flow_keys] = identifier[set] (( identifier[r] . identifier[client_id] , identifier[r] . identifier[flow_id] ) keyword[for] identifier[r] keyword[in] identifier[responses] ) identifier[response_counts] = identifier[self] . identifier[_ReadFlowResponseCounts] ( identifier[request_keys] , identifier[cursor] ) identifier[next_requests] = identifier[self] . identifier[_ReadAndLockNextRequestsToProcess] ( identifier[flow_keys] , identifier[cursor] ) identifier[completed_requests] = identifier[self] . identifier[_ReadLockAndUpdateCompletedRequests] ( identifier[request_keys] , identifier[response_counts] , identifier[cursor] ) keyword[if] keyword[not] identifier[completed_requests] : keyword[return] identifier[completed_requests] identifier[fprs_to_write] =[] keyword[for] identifier[request_key] , identifier[r] keyword[in] identifier[iteritems] ( identifier[completed_requests] ): identifier[client_id] , identifier[flow_id] , identifier[request_id] = identifier[request_key] keyword[if] identifier[next_requests] [( identifier[client_id] , identifier[flow_id] )]== identifier[request_id] : identifier[fprs_to_write] . identifier[append] ( identifier[rdf_flows] . identifier[FlowProcessingRequest] ( identifier[client_id] = identifier[r] . identifier[client_id] , identifier[flow_id] = identifier[r] . identifier[flow_id] , identifier[delivery_time] = identifier[r] . identifier[start_time] )) keyword[if] identifier[fprs_to_write] : identifier[self] . identifier[_WriteFlowProcessingRequests] ( identifier[fprs_to_write] , identifier[cursor] ) keyword[return] identifier[completed_requests]
def _UpdateRequestsAndScheduleFPRs(self, responses, cursor=None): """Updates requests and writes FlowProcessingRequests if needed.""" request_keys = set(((r.client_id, r.flow_id, r.request_id) for r in responses)) flow_keys = set(((r.client_id, r.flow_id) for r in responses)) response_counts = self._ReadFlowResponseCounts(request_keys, cursor) next_requests = self._ReadAndLockNextRequestsToProcess(flow_keys, cursor) completed_requests = self._ReadLockAndUpdateCompletedRequests(request_keys, response_counts, cursor) if not completed_requests: return completed_requests # depends on [control=['if'], data=[]] fprs_to_write = [] for (request_key, r) in iteritems(completed_requests): (client_id, flow_id, request_id) = request_key if next_requests[client_id, flow_id] == request_id: fprs_to_write.append(rdf_flows.FlowProcessingRequest(client_id=r.client_id, flow_id=r.flow_id, delivery_time=r.start_time)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if fprs_to_write: self._WriteFlowProcessingRequests(fprs_to_write, cursor) # depends on [control=['if'], data=[]] return completed_requests
def make_tokens(parse_buffer): """ Takes a list of pairs (token_type, read_result) and applies token_type(read_result). Footnotes are already parsed before this point, and span-level parsing is started here. """ tokens = [] for token_type, result in parse_buffer: token = token_type(result) if token is not None: tokens.append(token) return tokens
def function[make_tokens, parameter[parse_buffer]]: constant[ Takes a list of pairs (token_type, read_result) and applies token_type(read_result). Footnotes are already parsed before this point, and span-level parsing is started here. ] variable[tokens] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da2043451b0>, <ast.Name object at 0x7da204347d30>]]] in starred[name[parse_buffer]] begin[:] variable[token] assign[=] call[name[token_type], parameter[name[result]]] if compare[name[token] is_not constant[None]] begin[:] call[name[tokens].append, parameter[name[token]]] return[name[tokens]]
keyword[def] identifier[make_tokens] ( identifier[parse_buffer] ): literal[string] identifier[tokens] =[] keyword[for] identifier[token_type] , identifier[result] keyword[in] identifier[parse_buffer] : identifier[token] = identifier[token_type] ( identifier[result] ) keyword[if] identifier[token] keyword[is] keyword[not] keyword[None] : identifier[tokens] . identifier[append] ( identifier[token] ) keyword[return] identifier[tokens]
def make_tokens(parse_buffer): """ Takes a list of pairs (token_type, read_result) and applies token_type(read_result). Footnotes are already parsed before this point, and span-level parsing is started here. """ tokens = [] for (token_type, result) in parse_buffer: token = token_type(result) if token is not None: tokens.append(token) # depends on [control=['if'], data=['token']] # depends on [control=['for'], data=[]] return tokens
def main(self): """ Run the necessary methods in the correct order """ if not os.path.isfile(self.gdcs_report): logging.info('Starting {} analysis pipeline'.format(self.analysistype)) # Run the analyses ShortKSippingMethods(self, self.cutoff) # Create the reports self.reporter() else: self.report_parse()
def function[main, parameter[self]]: constant[ Run the necessary methods in the correct order ] if <ast.UnaryOp object at 0x7da1b1eeccd0> begin[:] call[name[logging].info, parameter[call[constant[Starting {} analysis pipeline].format, parameter[name[self].analysistype]]]] call[name[ShortKSippingMethods], parameter[name[self], name[self].cutoff]] call[name[self].reporter, parameter[]]
keyword[def] identifier[main] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[gdcs_report] ): identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[analysistype] )) identifier[ShortKSippingMethods] ( identifier[self] , identifier[self] . identifier[cutoff] ) identifier[self] . identifier[reporter] () keyword[else] : identifier[self] . identifier[report_parse] ()
def main(self): """ Run the necessary methods in the correct order """ if not os.path.isfile(self.gdcs_report): logging.info('Starting {} analysis pipeline'.format(self.analysistype)) # Run the analyses ShortKSippingMethods(self, self.cutoff) # Create the reports self.reporter() # depends on [control=['if'], data=[]] else: self.report_parse()
def cmd_reboot(self, args): '''reboot autopilot''' if len(args) > 0 and args[0] == 'bootloader': self.master.reboot_autopilot(True) else: self.master.reboot_autopilot()
def function[cmd_reboot, parameter[self, args]]: constant[reboot autopilot] if <ast.BoolOp object at 0x7da1b2344940> begin[:] call[name[self].master.reboot_autopilot, parameter[constant[True]]]
keyword[def] identifier[cmd_reboot] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )> literal[int] keyword[and] identifier[args] [ literal[int] ]== literal[string] : identifier[self] . identifier[master] . identifier[reboot_autopilot] ( keyword[True] ) keyword[else] : identifier[self] . identifier[master] . identifier[reboot_autopilot] ()
def cmd_reboot(self, args): """reboot autopilot""" if len(args) > 0 and args[0] == 'bootloader': self.master.reboot_autopilot(True) # depends on [control=['if'], data=[]] else: self.master.reboot_autopilot()
def title(self): """ The title of the course. If no entry in the namemap of the configuration is found a new entry is created with name=$STUD.IP_NAME + $SEMESTER_NAME """ name = c.namemap_lookup(self.id) if name is None: name = self._title + " " + client.get_semester_title(self) c.namemap_set(self.id, name) return secure_filename(name)
def function[title, parameter[self]]: constant[ The title of the course. If no entry in the namemap of the configuration is found a new entry is created with name=$STUD.IP_NAME + $SEMESTER_NAME ] variable[name] assign[=] call[name[c].namemap_lookup, parameter[name[self].id]] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] binary_operation[binary_operation[name[self]._title + constant[ ]] + call[name[client].get_semester_title, parameter[name[self]]]] call[name[c].namemap_set, parameter[name[self].id, name[name]]] return[call[name[secure_filename], parameter[name[name]]]]
keyword[def] identifier[title] ( identifier[self] ): literal[string] identifier[name] = identifier[c] . identifier[namemap_lookup] ( identifier[self] . identifier[id] ) keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = identifier[self] . identifier[_title] + literal[string] + identifier[client] . identifier[get_semester_title] ( identifier[self] ) identifier[c] . identifier[namemap_set] ( identifier[self] . identifier[id] , identifier[name] ) keyword[return] identifier[secure_filename] ( identifier[name] )
def title(self): """ The title of the course. If no entry in the namemap of the configuration is found a new entry is created with name=$STUD.IP_NAME + $SEMESTER_NAME """ name = c.namemap_lookup(self.id) if name is None: name = self._title + ' ' + client.get_semester_title(self) c.namemap_set(self.id, name) # depends on [control=['if'], data=['name']] return secure_filename(name)
def fetch_build_egg(self, req): """Fetch an egg needed for building""" from setuptools.command.easy_install import easy_install dist = self.__class__({'script_args': ['easy_install']}) opts = dist.get_option_dict('easy_install') opts.clear() opts.update( (k, v) for k, v in self.get_option_dict('easy_install').items() if k in ( # don't use any other settings 'find_links', 'site_dirs', 'index_url', 'optimize', 'site_dirs', 'allow_hosts', )) if self.dependency_links: links = self.dependency_links[:] if 'find_links' in opts: links = opts['find_links'][1] + links opts['find_links'] = ('setup', links) install_dir = self.get_egg_cache_dir() cmd = easy_install( dist, args=["x"], install_dir=install_dir, exclude_scripts=True, always_copy=False, build_directory=None, editable=False, upgrade=False, multi_version=True, no_report=True, user=False ) cmd.ensure_finalized() return cmd.easy_install(req)
def function[fetch_build_egg, parameter[self, req]]: constant[Fetch an egg needed for building] from relative_module[setuptools.command.easy_install] import module[easy_install] variable[dist] assign[=] call[name[self].__class__, parameter[dictionary[[<ast.Constant object at 0x7da1b1b17340>], [<ast.List object at 0x7da1b1b16500>]]]] variable[opts] assign[=] call[name[dist].get_option_dict, parameter[constant[easy_install]]] call[name[opts].clear, parameter[]] call[name[opts].update, parameter[<ast.GeneratorExp object at 0x7da1b1b15780>]] if name[self].dependency_links begin[:] variable[links] assign[=] call[name[self].dependency_links][<ast.Slice object at 0x7da1b1b16d40>] if compare[constant[find_links] in name[opts]] begin[:] variable[links] assign[=] binary_operation[call[call[name[opts]][constant[find_links]]][constant[1]] + name[links]] call[name[opts]][constant[find_links]] assign[=] tuple[[<ast.Constant object at 0x7da1b1b15ab0>, <ast.Name object at 0x7da1b1b16a40>]] variable[install_dir] assign[=] call[name[self].get_egg_cache_dir, parameter[]] variable[cmd] assign[=] call[name[easy_install], parameter[name[dist]]] call[name[cmd].ensure_finalized, parameter[]] return[call[name[cmd].easy_install, parameter[name[req]]]]
keyword[def] identifier[fetch_build_egg] ( identifier[self] , identifier[req] ): literal[string] keyword[from] identifier[setuptools] . identifier[command] . identifier[easy_install] keyword[import] identifier[easy_install] identifier[dist] = identifier[self] . identifier[__class__] ({ literal[string] :[ literal[string] ]}) identifier[opts] = identifier[dist] . identifier[get_option_dict] ( literal[string] ) identifier[opts] . identifier[clear] () identifier[opts] . identifier[update] ( ( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[get_option_dict] ( literal[string] ). identifier[items] () keyword[if] identifier[k] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , )) keyword[if] identifier[self] . identifier[dependency_links] : identifier[links] = identifier[self] . identifier[dependency_links] [:] keyword[if] literal[string] keyword[in] identifier[opts] : identifier[links] = identifier[opts] [ literal[string] ][ literal[int] ]+ identifier[links] identifier[opts] [ literal[string] ]=( literal[string] , identifier[links] ) identifier[install_dir] = identifier[self] . identifier[get_egg_cache_dir] () identifier[cmd] = identifier[easy_install] ( identifier[dist] , identifier[args] =[ literal[string] ], identifier[install_dir] = identifier[install_dir] , identifier[exclude_scripts] = keyword[True] , identifier[always_copy] = keyword[False] , identifier[build_directory] = keyword[None] , identifier[editable] = keyword[False] , identifier[upgrade] = keyword[False] , identifier[multi_version] = keyword[True] , identifier[no_report] = keyword[True] , identifier[user] = keyword[False] ) identifier[cmd] . identifier[ensure_finalized] () keyword[return] identifier[cmd] . identifier[easy_install] ( identifier[req] )
def fetch_build_egg(self, req): """Fetch an egg needed for building""" from setuptools.command.easy_install import easy_install dist = self.__class__({'script_args': ['easy_install']}) opts = dist.get_option_dict('easy_install') opts.clear() # don't use any other settings opts.update(((k, v) for (k, v) in self.get_option_dict('easy_install').items() if k in ('find_links', 'site_dirs', 'index_url', 'optimize', 'site_dirs', 'allow_hosts'))) if self.dependency_links: links = self.dependency_links[:] if 'find_links' in opts: links = opts['find_links'][1] + links # depends on [control=['if'], data=['opts']] opts['find_links'] = ('setup', links) # depends on [control=['if'], data=[]] install_dir = self.get_egg_cache_dir() cmd = easy_install(dist, args=['x'], install_dir=install_dir, exclude_scripts=True, always_copy=False, build_directory=None, editable=False, upgrade=False, multi_version=True, no_report=True, user=False) cmd.ensure_finalized() return cmd.easy_install(req)
def edit(self, resource): """Edit a job. :param resource: :class:`jobs.Job <jobs.Job>` object :return: :class:`jobs.Job <jobs.Job>` object :rtype: jobs.Job """ schema = JobSchema(exclude=('id', 'status', 'options', 'package_name', 'config_name', 'device_name', 'result_id', 'user_id', 'created', 'updated', 'automatic', 'run_at')) json = self.service.encode(schema, resource) schema = JobSchema() resp = self.service.edit(self.base, resource.name, json) return self.service.decode(schema, resp)
def function[edit, parameter[self, resource]]: constant[Edit a job. :param resource: :class:`jobs.Job <jobs.Job>` object :return: :class:`jobs.Job <jobs.Job>` object :rtype: jobs.Job ] variable[schema] assign[=] call[name[JobSchema], parameter[]] variable[json] assign[=] call[name[self].service.encode, parameter[name[schema], name[resource]]] variable[schema] assign[=] call[name[JobSchema], parameter[]] variable[resp] assign[=] call[name[self].service.edit, parameter[name[self].base, name[resource].name, name[json]]] return[call[name[self].service.decode, parameter[name[schema], name[resp]]]]
keyword[def] identifier[edit] ( identifier[self] , identifier[resource] ): literal[string] identifier[schema] = identifier[JobSchema] ( identifier[exclude] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )) identifier[json] = identifier[self] . identifier[service] . identifier[encode] ( identifier[schema] , identifier[resource] ) identifier[schema] = identifier[JobSchema] () identifier[resp] = identifier[self] . identifier[service] . identifier[edit] ( identifier[self] . identifier[base] , identifier[resource] . identifier[name] , identifier[json] ) keyword[return] identifier[self] . identifier[service] . identifier[decode] ( identifier[schema] , identifier[resp] )
def edit(self, resource): """Edit a job. :param resource: :class:`jobs.Job <jobs.Job>` object :return: :class:`jobs.Job <jobs.Job>` object :rtype: jobs.Job """ schema = JobSchema(exclude=('id', 'status', 'options', 'package_name', 'config_name', 'device_name', 'result_id', 'user_id', 'created', 'updated', 'automatic', 'run_at')) json = self.service.encode(schema, resource) schema = JobSchema() resp = self.service.edit(self.base, resource.name, json) return self.service.decode(schema, resp)
def close(self): """Flush the buffer and finalize the file. When this returns the new file is available for reading. """ if not self.closed: self.closed = True self._flush(finish=True) self._buffer = None
def function[close, parameter[self]]: constant[Flush the buffer and finalize the file. When this returns the new file is available for reading. ] if <ast.UnaryOp object at 0x7da1b050b310> begin[:] name[self].closed assign[=] constant[True] call[name[self]._flush, parameter[]] name[self]._buffer assign[=] constant[None]
keyword[def] identifier[close] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[closed] : identifier[self] . identifier[closed] = keyword[True] identifier[self] . identifier[_flush] ( identifier[finish] = keyword[True] ) identifier[self] . identifier[_buffer] = keyword[None]
def close(self): """Flush the buffer and finalize the file. When this returns the new file is available for reading. """ if not self.closed: self.closed = True self._flush(finish=True) self._buffer = None # depends on [control=['if'], data=[]]
def apply_compact(graph_path): """Run the pruned and frozen inference graph. """ with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess: # Note, we just load the graph and do *not* need to initialize anything. with tf.gfile.GFile(graph_path, "rb") as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) tf.import_graph_def(graph_def) input_img = sess.graph.get_tensor_by_name('import/input_img:0') prediction_img = sess.graph.get_tensor_by_name('import/prediction_img:0') prediction = sess.run(prediction_img, {input_img: cv2.imread('lena.png')[None, ...]}) cv2.imwrite('applied_compact.png', prediction[0])
def function[apply_compact, parameter[graph_path]]: constant[Run the pruned and frozen inference graph. ] with call[name[tf].Session, parameter[]] begin[:] with call[name[tf].gfile.GFile, parameter[name[graph_path], constant[rb]]] begin[:] variable[graph_def] assign[=] call[name[tf].GraphDef, parameter[]] call[name[graph_def].ParseFromString, parameter[call[name[f].read, parameter[]]]] call[name[tf].import_graph_def, parameter[name[graph_def]]] variable[input_img] assign[=] call[name[sess].graph.get_tensor_by_name, parameter[constant[import/input_img:0]]] variable[prediction_img] assign[=] call[name[sess].graph.get_tensor_by_name, parameter[constant[import/prediction_img:0]]] variable[prediction] assign[=] call[name[sess].run, parameter[name[prediction_img], dictionary[[<ast.Name object at 0x7da18f09e9e0>], [<ast.Subscript object at 0x7da18f09e0e0>]]]] call[name[cv2].imwrite, parameter[constant[applied_compact.png], call[name[prediction]][constant[0]]]]
keyword[def] identifier[apply_compact] ( identifier[graph_path] ): literal[string] keyword[with] identifier[tf] . identifier[Session] ( identifier[config] = identifier[tf] . identifier[ConfigProto] ( identifier[allow_soft_placement] = keyword[True] )) keyword[as] identifier[sess] : keyword[with] identifier[tf] . identifier[gfile] . identifier[GFile] ( identifier[graph_path] , literal[string] ) keyword[as] identifier[f] : identifier[graph_def] = identifier[tf] . identifier[GraphDef] () identifier[graph_def] . identifier[ParseFromString] ( identifier[f] . identifier[read] ()) identifier[tf] . identifier[import_graph_def] ( identifier[graph_def] ) identifier[input_img] = identifier[sess] . identifier[graph] . identifier[get_tensor_by_name] ( literal[string] ) identifier[prediction_img] = identifier[sess] . identifier[graph] . identifier[get_tensor_by_name] ( literal[string] ) identifier[prediction] = identifier[sess] . identifier[run] ( identifier[prediction_img] ,{ identifier[input_img] : identifier[cv2] . identifier[imread] ( literal[string] )[ keyword[None] ,...]}) identifier[cv2] . identifier[imwrite] ( literal[string] , identifier[prediction] [ literal[int] ])
def apply_compact(graph_path): """Run the pruned and frozen inference graph. """ with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess: # Note, we just load the graph and do *not* need to initialize anything. with tf.gfile.GFile(graph_path, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) tf.import_graph_def(graph_def) # depends on [control=['with'], data=['f']] input_img = sess.graph.get_tensor_by_name('import/input_img:0') prediction_img = sess.graph.get_tensor_by_name('import/prediction_img:0') prediction = sess.run(prediction_img, {input_img: cv2.imread('lena.png')[None, ...]}) cv2.imwrite('applied_compact.png', prediction[0]) # depends on [control=['with'], data=['sess']]
def attrs_sqlalchemy(maybe_cls=None): """ A class decorator that adds ``__repr__``, ``__eq__``, ``__cmp__``, and ``__hash__`` methods according to the fields defined on the SQLAlchemy model class. """ def wrap(cls): warnings.warn(UserWarning('attrs_sqlalchemy is deprecated')) these = { name: attr.ib() # `__mapper__.columns` is a dictionary mapping field names on the # model class to table columns. SQLAlchemy provides many ways to # access the fields/columns, but this works at the time the class # decorator is called: # # - We can't use `cls.__table__.columns` because that directly maps # the column names rather than the field names on the model. For # example, given `_my_field = Column('field', ...)`, # `__table__.columns` will contain 'field' rather than '_my_field'. # # - We can't use `cls.__dict__`, where values are # `InstrumentedAttribute`s, because that includes relationships, # synonyms, and other features. # # - We can't use `cls.__mapper__.column_attrs` # (or `sqlalchemy.inspect`) because they will attempt to # initialize mappers for all of the classes in the registry, # which won't be ready yet. for name in inspect(cls).columns.keys() } return attr.s(cls, these=these, init=False) # `maybe_cls` depends on the usage of the decorator. It's a class if it's # used as `@attrs_sqlalchemy` but `None` if it's used as # `@attrs_sqlalchemy()` # ref: https://github.com/hynek/attrs/blob/15.2.0/src/attr/_make.py#L195 if maybe_cls is None: return wrap else: return wrap(maybe_cls)
def function[attrs_sqlalchemy, parameter[maybe_cls]]: constant[ A class decorator that adds ``__repr__``, ``__eq__``, ``__cmp__``, and ``__hash__`` methods according to the fields defined on the SQLAlchemy model class. ] def function[wrap, parameter[cls]]: call[name[warnings].warn, parameter[call[name[UserWarning], parameter[constant[attrs_sqlalchemy is deprecated]]]]] variable[these] assign[=] <ast.DictComp object at 0x7da18f7228c0> return[call[name[attr].s, parameter[name[cls]]]] if compare[name[maybe_cls] is constant[None]] begin[:] return[name[wrap]]
keyword[def] identifier[attrs_sqlalchemy] ( identifier[maybe_cls] = keyword[None] ): literal[string] keyword[def] identifier[wrap] ( identifier[cls] ): identifier[warnings] . identifier[warn] ( identifier[UserWarning] ( literal[string] )) identifier[these] ={ identifier[name] : identifier[attr] . identifier[ib] () keyword[for] identifier[name] keyword[in] identifier[inspect] ( identifier[cls] ). identifier[columns] . identifier[keys] () } keyword[return] identifier[attr] . identifier[s] ( identifier[cls] , identifier[these] = identifier[these] , identifier[init] = keyword[False] ) keyword[if] identifier[maybe_cls] keyword[is] keyword[None] : keyword[return] identifier[wrap] keyword[else] : keyword[return] identifier[wrap] ( identifier[maybe_cls] )
def attrs_sqlalchemy(maybe_cls=None): """ A class decorator that adds ``__repr__``, ``__eq__``, ``__cmp__``, and ``__hash__`` methods according to the fields defined on the SQLAlchemy model class. """ def wrap(cls): warnings.warn(UserWarning('attrs_sqlalchemy is deprecated')) # `__mapper__.columns` is a dictionary mapping field names on the # model class to table columns. SQLAlchemy provides many ways to # access the fields/columns, but this works at the time the class # decorator is called: # # - We can't use `cls.__table__.columns` because that directly maps # the column names rather than the field names on the model. For # example, given `_my_field = Column('field', ...)`, # `__table__.columns` will contain 'field' rather than '_my_field'. # # - We can't use `cls.__dict__`, where values are # `InstrumentedAttribute`s, because that includes relationships, # synonyms, and other features. # # - We can't use `cls.__mapper__.column_attrs` # (or `sqlalchemy.inspect`) because they will attempt to # initialize mappers for all of the classes in the registry, # which won't be ready yet. these = {name: attr.ib() for name in inspect(cls).columns.keys()} return attr.s(cls, these=these, init=False) # `maybe_cls` depends on the usage of the decorator. It's a class if it's # used as `@attrs_sqlalchemy` but `None` if it's used as # `@attrs_sqlalchemy()` # ref: https://github.com/hynek/attrs/blob/15.2.0/src/attr/_make.py#L195 if maybe_cls is None: return wrap # depends on [control=['if'], data=[]] else: return wrap(maybe_cls)
def update_isbn(self): """Remove dashes from ISBN.""" isbns = record_get_field_instances(self.record, '020') for field in isbns: for idx, (key, value) in enumerate(field[0]): if key == 'a': field[0][idx] = ('a', value.replace("-", "").strip())
def function[update_isbn, parameter[self]]: constant[Remove dashes from ISBN.] variable[isbns] assign[=] call[name[record_get_field_instances], parameter[name[self].record, constant[020]]] for taget[name[field]] in starred[name[isbns]] begin[:] for taget[tuple[[<ast.Name object at 0x7da207f03970>, <ast.Tuple object at 0x7da207f03490>]]] in starred[call[name[enumerate], parameter[call[name[field]][constant[0]]]]] begin[:] if compare[name[key] equal[==] constant[a]] begin[:] call[call[name[field]][constant[0]]][name[idx]] assign[=] tuple[[<ast.Constant object at 0x7da207f02e90>, <ast.Call object at 0x7da207f03730>]]
keyword[def] identifier[update_isbn] ( identifier[self] ): literal[string] identifier[isbns] = identifier[record_get_field_instances] ( identifier[self] . identifier[record] , literal[string] ) keyword[for] identifier[field] keyword[in] identifier[isbns] : keyword[for] identifier[idx] ,( identifier[key] , identifier[value] ) keyword[in] identifier[enumerate] ( identifier[field] [ literal[int] ]): keyword[if] identifier[key] == literal[string] : identifier[field] [ literal[int] ][ identifier[idx] ]=( literal[string] , identifier[value] . identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ())
def update_isbn(self): """Remove dashes from ISBN.""" isbns = record_get_field_instances(self.record, '020') for field in isbns: for (idx, (key, value)) in enumerate(field[0]): if key == 'a': field[0][idx] = ('a', value.replace('-', '').strip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['field']]
def add(self, pathogenName, sampleName): """ Add a (pathogen name, sample name) combination and get its FASTA/FASTQ file name and unique read count. Write the FASTA/FASTQ file if it does not already exist. Save the unique read count into C{self._proteinGrouper}. @param pathogenName: A C{str} pathogen name. @param sampleName: A C{str} sample name. @return: A C{str} giving the FASTA/FASTQ file name holding all the reads (without duplicates, by id) from the sample that matched the proteins in the given pathogen. """ pathogenIndex = self._pathogens.setdefault(pathogenName, len(self._pathogens)) sampleIndex = self._samples.setdefault(sampleName, len(self._samples)) try: return self._readsFilenames[(pathogenIndex, sampleIndex)] except KeyError: reads = Reads() for proteinMatch in self._proteinGrouper.pathogenNames[ pathogenName][sampleName]['proteins'].values(): for read in self._readsClass(proteinMatch['readsFilename']): reads.add(read) saveFilename = join( proteinMatch['outDir'], 'pathogen-%d-sample-%d.%s' % (pathogenIndex, sampleIndex, self._format)) reads.filter(removeDuplicatesById=True) nReads = reads.save(saveFilename, format_=self._format) # Save the unique read count into self._proteinGrouper self._proteinGrouper.pathogenNames[ pathogenName][sampleName]['uniqueReadCount'] = nReads self._readsFilenames[(pathogenIndex, sampleIndex)] = saveFilename return saveFilename
def function[add, parameter[self, pathogenName, sampleName]]: constant[ Add a (pathogen name, sample name) combination and get its FASTA/FASTQ file name and unique read count. Write the FASTA/FASTQ file if it does not already exist. Save the unique read count into C{self._proteinGrouper}. @param pathogenName: A C{str} pathogen name. @param sampleName: A C{str} sample name. @return: A C{str} giving the FASTA/FASTQ file name holding all the reads (without duplicates, by id) from the sample that matched the proteins in the given pathogen. ] variable[pathogenIndex] assign[=] call[name[self]._pathogens.setdefault, parameter[name[pathogenName], call[name[len], parameter[name[self]._pathogens]]]] variable[sampleIndex] assign[=] call[name[self]._samples.setdefault, parameter[name[sampleName], call[name[len], parameter[name[self]._samples]]]] <ast.Try object at 0x7da207f00160>
keyword[def] identifier[add] ( identifier[self] , identifier[pathogenName] , identifier[sampleName] ): literal[string] identifier[pathogenIndex] = identifier[self] . identifier[_pathogens] . identifier[setdefault] ( identifier[pathogenName] , identifier[len] ( identifier[self] . identifier[_pathogens] )) identifier[sampleIndex] = identifier[self] . identifier[_samples] . identifier[setdefault] ( identifier[sampleName] , identifier[len] ( identifier[self] . identifier[_samples] )) keyword[try] : keyword[return] identifier[self] . identifier[_readsFilenames] [( identifier[pathogenIndex] , identifier[sampleIndex] )] keyword[except] identifier[KeyError] : identifier[reads] = identifier[Reads] () keyword[for] identifier[proteinMatch] keyword[in] identifier[self] . identifier[_proteinGrouper] . identifier[pathogenNames] [ identifier[pathogenName] ][ identifier[sampleName] ][ literal[string] ]. identifier[values] (): keyword[for] identifier[read] keyword[in] identifier[self] . identifier[_readsClass] ( identifier[proteinMatch] [ literal[string] ]): identifier[reads] . identifier[add] ( identifier[read] ) identifier[saveFilename] = identifier[join] ( identifier[proteinMatch] [ literal[string] ], literal[string] %( identifier[pathogenIndex] , identifier[sampleIndex] , identifier[self] . identifier[_format] )) identifier[reads] . identifier[filter] ( identifier[removeDuplicatesById] = keyword[True] ) identifier[nReads] = identifier[reads] . identifier[save] ( identifier[saveFilename] , identifier[format_] = identifier[self] . identifier[_format] ) identifier[self] . identifier[_proteinGrouper] . identifier[pathogenNames] [ identifier[pathogenName] ][ identifier[sampleName] ][ literal[string] ]= identifier[nReads] identifier[self] . identifier[_readsFilenames] [( identifier[pathogenIndex] , identifier[sampleIndex] )]= identifier[saveFilename] keyword[return] identifier[saveFilename]
def add(self, pathogenName, sampleName): """ Add a (pathogen name, sample name) combination and get its FASTA/FASTQ file name and unique read count. Write the FASTA/FASTQ file if it does not already exist. Save the unique read count into C{self._proteinGrouper}. @param pathogenName: A C{str} pathogen name. @param sampleName: A C{str} sample name. @return: A C{str} giving the FASTA/FASTQ file name holding all the reads (without duplicates, by id) from the sample that matched the proteins in the given pathogen. """ pathogenIndex = self._pathogens.setdefault(pathogenName, len(self._pathogens)) sampleIndex = self._samples.setdefault(sampleName, len(self._samples)) try: return self._readsFilenames[pathogenIndex, sampleIndex] # depends on [control=['try'], data=[]] except KeyError: reads = Reads() for proteinMatch in self._proteinGrouper.pathogenNames[pathogenName][sampleName]['proteins'].values(): for read in self._readsClass(proteinMatch['readsFilename']): reads.add(read) # depends on [control=['for'], data=['read']] # depends on [control=['for'], data=['proteinMatch']] saveFilename = join(proteinMatch['outDir'], 'pathogen-%d-sample-%d.%s' % (pathogenIndex, sampleIndex, self._format)) reads.filter(removeDuplicatesById=True) nReads = reads.save(saveFilename, format_=self._format) # Save the unique read count into self._proteinGrouper self._proteinGrouper.pathogenNames[pathogenName][sampleName]['uniqueReadCount'] = nReads self._readsFilenames[pathogenIndex, sampleIndex] = saveFilename return saveFilename # depends on [control=['except'], data=[]]
def load_word_file(filename): """Loads a words file as a list of lines""" words_file = resource_filename(__name__, "words/%s" % filename) handle = open(words_file, 'r') words = handle.readlines() handle.close() return words
def function[load_word_file, parameter[filename]]: constant[Loads a words file as a list of lines] variable[words_file] assign[=] call[name[resource_filename], parameter[name[__name__], binary_operation[constant[words/%s] <ast.Mod object at 0x7da2590d6920> name[filename]]]] variable[handle] assign[=] call[name[open], parameter[name[words_file], constant[r]]] variable[words] assign[=] call[name[handle].readlines, parameter[]] call[name[handle].close, parameter[]] return[name[words]]
keyword[def] identifier[load_word_file] ( identifier[filename] ): literal[string] identifier[words_file] = identifier[resource_filename] ( identifier[__name__] , literal[string] % identifier[filename] ) identifier[handle] = identifier[open] ( identifier[words_file] , literal[string] ) identifier[words] = identifier[handle] . identifier[readlines] () identifier[handle] . identifier[close] () keyword[return] identifier[words]
def load_word_file(filename): """Loads a words file as a list of lines""" words_file = resource_filename(__name__, 'words/%s' % filename) handle = open(words_file, 'r') words = handle.readlines() handle.close() return words
def unbind(self): """ Unbinds this connection from queue and topic managers (freeing up resources) and resets state. """ self.connected = False self.queue_manager.disconnect(self.connection) self.topic_manager.disconnect(self.connection)
def function[unbind, parameter[self]]: constant[ Unbinds this connection from queue and topic managers (freeing up resources) and resets state. ] name[self].connected assign[=] constant[False] call[name[self].queue_manager.disconnect, parameter[name[self].connection]] call[name[self].topic_manager.disconnect, parameter[name[self].connection]]
keyword[def] identifier[unbind] ( identifier[self] ): literal[string] identifier[self] . identifier[connected] = keyword[False] identifier[self] . identifier[queue_manager] . identifier[disconnect] ( identifier[self] . identifier[connection] ) identifier[self] . identifier[topic_manager] . identifier[disconnect] ( identifier[self] . identifier[connection] )
def unbind(self): """ Unbinds this connection from queue and topic managers (freeing up resources) and resets state. """ self.connected = False self.queue_manager.disconnect(self.connection) self.topic_manager.disconnect(self.connection)
def get_file_relative_path_by_id(self, id): """ Given an id, get the corresponding file info relative path joined with file name. Parameters: #. id (string): The file unique id string. :Returns: #. relativePath (string): The file relative path joined with file name. If None, it means file was not found. """ for path, info in self.walk_files_info(): if info['id']==id: return path # none was found return None
def function[get_file_relative_path_by_id, parameter[self, id]]: constant[ Given an id, get the corresponding file info relative path joined with file name. Parameters: #. id (string): The file unique id string. :Returns: #. relativePath (string): The file relative path joined with file name. If None, it means file was not found. ] for taget[tuple[[<ast.Name object at 0x7da204567c40>, <ast.Name object at 0x7da2045678b0>]]] in starred[call[name[self].walk_files_info, parameter[]]] begin[:] if compare[call[name[info]][constant[id]] equal[==] name[id]] begin[:] return[name[path]] return[constant[None]]
keyword[def] identifier[get_file_relative_path_by_id] ( identifier[self] , identifier[id] ): literal[string] keyword[for] identifier[path] , identifier[info] keyword[in] identifier[self] . identifier[walk_files_info] (): keyword[if] identifier[info] [ literal[string] ]== identifier[id] : keyword[return] identifier[path] keyword[return] keyword[None]
def get_file_relative_path_by_id(self, id): """ Given an id, get the corresponding file info relative path joined with file name. Parameters: #. id (string): The file unique id string. :Returns: #. relativePath (string): The file relative path joined with file name. If None, it means file was not found. """ for (path, info) in self.walk_files_info(): if info['id'] == id: return path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # none was found return None
def find_subdirectories(package): """ Get the subdirectories within a package This will include resources (non-submodules) and submodules """ try: subdirectories = os.walk(package_to_path(package)).next()[1] except StopIteration: subdirectories = [] return subdirectories
def function[find_subdirectories, parameter[package]]: constant[ Get the subdirectories within a package This will include resources (non-submodules) and submodules ] <ast.Try object at 0x7da2054a5cf0> return[name[subdirectories]]
keyword[def] identifier[find_subdirectories] ( identifier[package] ): literal[string] keyword[try] : identifier[subdirectories] = identifier[os] . identifier[walk] ( identifier[package_to_path] ( identifier[package] )). identifier[next] ()[ literal[int] ] keyword[except] identifier[StopIteration] : identifier[subdirectories] =[] keyword[return] identifier[subdirectories]
def find_subdirectories(package): """ Get the subdirectories within a package This will include resources (non-submodules) and submodules """ try: subdirectories = os.walk(package_to_path(package)).next()[1] # depends on [control=['try'], data=[]] except StopIteration: subdirectories = [] # depends on [control=['except'], data=[]] return subdirectories
def _make_binary_stream(thing, encoding=None, armor=True): """Encode **thing**, then make it stream/file-like. :param thing: The thing to turn into a encoded stream. :rtype: ``io.BytesIO`` or ``io.StringIO``. :returns: The encoded **thing**, wrapped in an ``io.BytesIO`` (if available), otherwise wrapped in a ``io.StringIO``. """ if _py3k: if isinstance(thing, str): thing = thing.encode(encoding) else: if type(thing) is not str: thing = thing.encode(encoding) try: rv = BytesIO(thing) except NameError: rv = StringIO(thing) return rv
def function[_make_binary_stream, parameter[thing, encoding, armor]]: constant[Encode **thing**, then make it stream/file-like. :param thing: The thing to turn into a encoded stream. :rtype: ``io.BytesIO`` or ``io.StringIO``. :returns: The encoded **thing**, wrapped in an ``io.BytesIO`` (if available), otherwise wrapped in a ``io.StringIO``. ] if name[_py3k] begin[:] if call[name[isinstance], parameter[name[thing], name[str]]] begin[:] variable[thing] assign[=] call[name[thing].encode, parameter[name[encoding]]] <ast.Try object at 0x7da1b078a410> return[name[rv]]
keyword[def] identifier[_make_binary_stream] ( identifier[thing] , identifier[encoding] = keyword[None] , identifier[armor] = keyword[True] ): literal[string] keyword[if] identifier[_py3k] : keyword[if] identifier[isinstance] ( identifier[thing] , identifier[str] ): identifier[thing] = identifier[thing] . identifier[encode] ( identifier[encoding] ) keyword[else] : keyword[if] identifier[type] ( identifier[thing] ) keyword[is] keyword[not] identifier[str] : identifier[thing] = identifier[thing] . identifier[encode] ( identifier[encoding] ) keyword[try] : identifier[rv] = identifier[BytesIO] ( identifier[thing] ) keyword[except] identifier[NameError] : identifier[rv] = identifier[StringIO] ( identifier[thing] ) keyword[return] identifier[rv]
def _make_binary_stream(thing, encoding=None, armor=True): """Encode **thing**, then make it stream/file-like. :param thing: The thing to turn into a encoded stream. :rtype: ``io.BytesIO`` or ``io.StringIO``. :returns: The encoded **thing**, wrapped in an ``io.BytesIO`` (if available), otherwise wrapped in a ``io.StringIO``. """ if _py3k: if isinstance(thing, str): thing = thing.encode(encoding) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif type(thing) is not str: thing = thing.encode(encoding) # depends on [control=['if'], data=[]] try: rv = BytesIO(thing) # depends on [control=['try'], data=[]] except NameError: rv = StringIO(thing) # depends on [control=['except'], data=[]] return rv
def node_set_to_surface(self, tag): """ Converts a node set to surface. """ # Create a dummy node with label 0 nodes = self.nodes.copy() dummy = nodes.iloc[0].copy() dummy["coords"] *= np.nan dummy["sets"] = True nodes.loc[0] = dummy # Getting element surfaces element_surfaces= self.split("surfaces").unstack() # killer hack ! surf = pd.DataFrame( nodes.sets[tag].loc[element_surfaces.values.flatten()] .values.reshape(element_surfaces.shape) .prod(axis = 1) .astype(np.bool), index = element_surfaces.index).unstack().fillna(False) for k in surf.keys(): self.elements["surfaces", tag, "f{0}".format(k[1]+1) ] = surf.loc[:, k]
def function[node_set_to_surface, parameter[self, tag]]: constant[ Converts a node set to surface. ] variable[nodes] assign[=] call[name[self].nodes.copy, parameter[]] variable[dummy] assign[=] call[call[name[nodes].iloc][constant[0]].copy, parameter[]] <ast.AugAssign object at 0x7da1b00032e0> call[name[dummy]][constant[sets]] assign[=] constant[True] call[name[nodes].loc][constant[0]] assign[=] name[dummy] variable[element_surfaces] assign[=] call[call[name[self].split, parameter[constant[surfaces]]].unstack, parameter[]] variable[surf] assign[=] call[call[call[name[pd].DataFrame, parameter[call[call[call[call[call[name[nodes].sets][name[tag]].loc][call[name[element_surfaces].values.flatten, parameter[]]].values.reshape, parameter[name[element_surfaces].shape]].prod, parameter[]].astype, parameter[name[np].bool]]]].unstack, parameter[]].fillna, parameter[constant[False]]] for taget[name[k]] in starred[call[name[surf].keys, parameter[]]] begin[:] call[name[self].elements][tuple[[<ast.Constant object at 0x7da1b002f940>, <ast.Name object at 0x7da1b002ea10>, <ast.Call object at 0x7da1b002ea40>]]] assign[=] call[name[surf].loc][tuple[[<ast.Slice object at 0x7da1b002f700>, <ast.Name object at 0x7da1b002f760>]]]
keyword[def] identifier[node_set_to_surface] ( identifier[self] , identifier[tag] ): literal[string] identifier[nodes] = identifier[self] . identifier[nodes] . identifier[copy] () identifier[dummy] = identifier[nodes] . identifier[iloc] [ literal[int] ]. identifier[copy] () identifier[dummy] [ literal[string] ]*= identifier[np] . identifier[nan] identifier[dummy] [ literal[string] ]= keyword[True] identifier[nodes] . identifier[loc] [ literal[int] ]= identifier[dummy] identifier[element_surfaces] = identifier[self] . identifier[split] ( literal[string] ). identifier[unstack] () identifier[surf] = identifier[pd] . identifier[DataFrame] ( identifier[nodes] . identifier[sets] [ identifier[tag] ]. identifier[loc] [ identifier[element_surfaces] . identifier[values] . identifier[flatten] ()] . identifier[values] . identifier[reshape] ( identifier[element_surfaces] . identifier[shape] ) . identifier[prod] ( identifier[axis] = literal[int] ) . identifier[astype] ( identifier[np] . identifier[bool] ), identifier[index] = identifier[element_surfaces] . identifier[index] ). identifier[unstack] (). identifier[fillna] ( keyword[False] ) keyword[for] identifier[k] keyword[in] identifier[surf] . identifier[keys] (): identifier[self] . identifier[elements] [ literal[string] , identifier[tag] , literal[string] . identifier[format] ( identifier[k] [ literal[int] ]+ literal[int] )]= identifier[surf] . identifier[loc] [:, identifier[k] ]
def node_set_to_surface(self, tag): """ Converts a node set to surface. """ # Create a dummy node with label 0 nodes = self.nodes.copy() dummy = nodes.iloc[0].copy() dummy['coords'] *= np.nan dummy['sets'] = True nodes.loc[0] = dummy # Getting element surfaces element_surfaces = self.split('surfaces').unstack() # killer hack ! surf = pd.DataFrame(nodes.sets[tag].loc[element_surfaces.values.flatten()].values.reshape(element_surfaces.shape).prod(axis=1).astype(np.bool), index=element_surfaces.index).unstack().fillna(False) for k in surf.keys(): self.elements['surfaces', tag, 'f{0}'.format(k[1] + 1)] = surf.loc[:, k] # depends on [control=['for'], data=['k']]
def wait(self): """ Block until a matched message appears. """ if not self._patterns: raise RuntimeError('Listener has nothing to capture') while 1: msg = self._queue.get(block=True) if any(map(lambda p: filtering.match_all(msg, p), self._patterns)): return msg
def function[wait, parameter[self]]: constant[ Block until a matched message appears. ] if <ast.UnaryOp object at 0x7da1b1b0fdf0> begin[:] <ast.Raise object at 0x7da1b1b0d0f0> while constant[1] begin[:] variable[msg] assign[=] call[name[self]._queue.get, parameter[]] if call[name[any], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b1b0fcd0>, name[self]._patterns]]]] begin[:] return[name[msg]]
keyword[def] identifier[wait] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_patterns] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[while] literal[int] : identifier[msg] = identifier[self] . identifier[_queue] . identifier[get] ( identifier[block] = keyword[True] ) keyword[if] identifier[any] ( identifier[map] ( keyword[lambda] identifier[p] : identifier[filtering] . identifier[match_all] ( identifier[msg] , identifier[p] ), identifier[self] . identifier[_patterns] )): keyword[return] identifier[msg]
def wait(self): """ Block until a matched message appears. """ if not self._patterns: raise RuntimeError('Listener has nothing to capture') # depends on [control=['if'], data=[]] while 1: msg = self._queue.get(block=True) if any(map(lambda p: filtering.match_all(msg, p), self._patterns)): return msg # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def cee_map_priority_table_map_cos2_pgid(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map") name_key = ET.SubElement(cee_map, "name") name_key.text = kwargs.pop('name') priority_table = ET.SubElement(cee_map, "priority-table") map_cos2_pgid = ET.SubElement(priority_table, "map-cos2-pgid") map_cos2_pgid.text = kwargs.pop('map_cos2_pgid') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[cee_map_priority_table_map_cos2_pgid, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[cee_map] assign[=] call[name[ET].SubElement, parameter[name[config], constant[cee-map]]] variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[cee_map], constant[name]]] name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]] variable[priority_table] assign[=] call[name[ET].SubElement, parameter[name[cee_map], constant[priority-table]]] variable[map_cos2_pgid] assign[=] call[name[ET].SubElement, parameter[name[priority_table], constant[map-cos2-pgid]]] name[map_cos2_pgid].text assign[=] call[name[kwargs].pop, parameter[constant[map_cos2_pgid]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[cee_map_priority_table_map_cos2_pgid] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[cee_map] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[cee_map] , literal[string] ) identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[priority_table] = identifier[ET] . identifier[SubElement] ( identifier[cee_map] , literal[string] ) identifier[map_cos2_pgid] = identifier[ET] . identifier[SubElement] ( identifier[priority_table] , literal[string] ) identifier[map_cos2_pgid] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def cee_map_priority_table_map_cos2_pgid(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') cee_map = ET.SubElement(config, 'cee-map', xmlns='urn:brocade.com:mgmt:brocade-cee-map') name_key = ET.SubElement(cee_map, 'name') name_key.text = kwargs.pop('name') priority_table = ET.SubElement(cee_map, 'priority-table') map_cos2_pgid = ET.SubElement(priority_table, 'map-cos2-pgid') map_cos2_pgid.text = kwargs.pop('map_cos2_pgid') callback = kwargs.pop('callback', self._callback) return callback(config)
async def _delay(self): """Check for a 'retry-after' header to set timeout, otherwise use configured timeout. """ if self._response is None: await asyncio.sleep(0) if self._response.headers.get('retry-after'): await asyncio.sleep(int(self._response.headers['retry-after'])) else: await asyncio.sleep(self._timeout)
<ast.AsyncFunctionDef object at 0x7da18dc99ff0>
keyword[async] keyword[def] identifier[_delay] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_response] keyword[is] keyword[None] : keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] ) keyword[if] identifier[self] . identifier[_response] . identifier[headers] . identifier[get] ( literal[string] ): keyword[await] identifier[asyncio] . identifier[sleep] ( identifier[int] ( identifier[self] . identifier[_response] . identifier[headers] [ literal[string] ])) keyword[else] : keyword[await] identifier[asyncio] . identifier[sleep] ( identifier[self] . identifier[_timeout] )
async def _delay(self): """Check for a 'retry-after' header to set timeout, otherwise use configured timeout. """ if self._response is None: await asyncio.sleep(0) # depends on [control=['if'], data=[]] if self._response.headers.get('retry-after'): await asyncio.sleep(int(self._response.headers['retry-after'])) # depends on [control=['if'], data=[]] else: await asyncio.sleep(self._timeout)
def mount(self, volume): """Mounts the given volume on the provided mountpoint. The default implementation simply calls mount. :param Volume volume: The volume to be mounted :param mountpoint: The file system path to mount the filesystem on. :raises UnsupportedFilesystemError: when the volume system type can not be mounted. """ volume._make_mountpoint() try: self._call_mount(volume, volume.mountpoint, self._mount_type or self.type, self._mount_opts) except Exception: # undo the creation of the mountpoint volume._clear_mountpoint() raise
def function[mount, parameter[self, volume]]: constant[Mounts the given volume on the provided mountpoint. The default implementation simply calls mount. :param Volume volume: The volume to be mounted :param mountpoint: The file system path to mount the filesystem on. :raises UnsupportedFilesystemError: when the volume system type can not be mounted. ] call[name[volume]._make_mountpoint, parameter[]] <ast.Try object at 0x7da1b04b7fa0>
keyword[def] identifier[mount] ( identifier[self] , identifier[volume] ): literal[string] identifier[volume] . identifier[_make_mountpoint] () keyword[try] : identifier[self] . identifier[_call_mount] ( identifier[volume] , identifier[volume] . identifier[mountpoint] , identifier[self] . identifier[_mount_type] keyword[or] identifier[self] . identifier[type] , identifier[self] . identifier[_mount_opts] ) keyword[except] identifier[Exception] : identifier[volume] . identifier[_clear_mountpoint] () keyword[raise]
def mount(self, volume): """Mounts the given volume on the provided mountpoint. The default implementation simply calls mount. :param Volume volume: The volume to be mounted :param mountpoint: The file system path to mount the filesystem on. :raises UnsupportedFilesystemError: when the volume system type can not be mounted. """ volume._make_mountpoint() try: self._call_mount(volume, volume.mountpoint, self._mount_type or self.type, self._mount_opts) # depends on [control=['try'], data=[]] except Exception: # undo the creation of the mountpoint volume._clear_mountpoint() raise # depends on [control=['except'], data=[]]
def push_async_callback(self, callback, *args, **kwds): """Registers an arbitrary coroutine function and arguments. Cannot suppress exceptions. """ _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds) # We changed the signature, so using @wraps is not appropriate, but # setting __wrapped__ may still help with introspection. _exit_wrapper.__wrapped__ = callback self._push_exit_callback(_exit_wrapper, False) return callback
def function[push_async_callback, parameter[self, callback]]: constant[Registers an arbitrary coroutine function and arguments. Cannot suppress exceptions. ] variable[_exit_wrapper] assign[=] call[name[self]._create_async_cb_wrapper, parameter[name[callback], <ast.Starred object at 0x7da18ede49d0>]] name[_exit_wrapper].__wrapped__ assign[=] name[callback] call[name[self]._push_exit_callback, parameter[name[_exit_wrapper], constant[False]]] return[name[callback]]
keyword[def] identifier[push_async_callback] ( identifier[self] , identifier[callback] ,* identifier[args] ,** identifier[kwds] ): literal[string] identifier[_exit_wrapper] = identifier[self] . identifier[_create_async_cb_wrapper] ( identifier[callback] ,* identifier[args] ,** identifier[kwds] ) identifier[_exit_wrapper] . identifier[__wrapped__] = identifier[callback] identifier[self] . identifier[_push_exit_callback] ( identifier[_exit_wrapper] , keyword[False] ) keyword[return] identifier[callback]
def push_async_callback(self, callback, *args, **kwds): """Registers an arbitrary coroutine function and arguments. Cannot suppress exceptions. """ _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds) # We changed the signature, so using @wraps is not appropriate, but # setting __wrapped__ may still help with introspection. _exit_wrapper.__wrapped__ = callback self._push_exit_callback(_exit_wrapper, False) return callback
def _verify_credentials(self): """ An internal method that verifies the credentials given at instantiation. :raises: :class:`Pymoe.errors.UserLoginFailed` """ r = requests.get(self.apiurl + "account/verify_credentials.xml", auth=HTTPBasicAuth(self._username, self._password), headers=self.header) if r.status_code != 200: raise UserLoginFailed("Username or Password incorrect.")
def function[_verify_credentials, parameter[self]]: constant[ An internal method that verifies the credentials given at instantiation. :raises: :class:`Pymoe.errors.UserLoginFailed` ] variable[r] assign[=] call[name[requests].get, parameter[binary_operation[name[self].apiurl + constant[account/verify_credentials.xml]]]] if compare[name[r].status_code not_equal[!=] constant[200]] begin[:] <ast.Raise object at 0x7da1b05ff370>
keyword[def] identifier[_verify_credentials] ( identifier[self] ): literal[string] identifier[r] = identifier[requests] . identifier[get] ( identifier[self] . identifier[apiurl] + literal[string] , identifier[auth] = identifier[HTTPBasicAuth] ( identifier[self] . identifier[_username] , identifier[self] . identifier[_password] ), identifier[headers] = identifier[self] . identifier[header] ) keyword[if] identifier[r] . identifier[status_code] != literal[int] : keyword[raise] identifier[UserLoginFailed] ( literal[string] )
def _verify_credentials(self): """ An internal method that verifies the credentials given at instantiation. :raises: :class:`Pymoe.errors.UserLoginFailed` """ r = requests.get(self.apiurl + 'account/verify_credentials.xml', auth=HTTPBasicAuth(self._username, self._password), headers=self.header) if r.status_code != 200: raise UserLoginFailed('Username or Password incorrect.') # depends on [control=['if'], data=[]]
def add_key_filters(self, key_filters): """ Adds key filters to the inputs. :param key_filters: a list of filters :type key_filters: list :rtype: :class:`RiakMapReduce` """ if self._input_mode == 'query': raise ValueError('Key filters are not supported in a query.') self._key_filters.extend(key_filters) return self
def function[add_key_filters, parameter[self, key_filters]]: constant[ Adds key filters to the inputs. :param key_filters: a list of filters :type key_filters: list :rtype: :class:`RiakMapReduce` ] if compare[name[self]._input_mode equal[==] constant[query]] begin[:] <ast.Raise object at 0x7da18eb54af0> call[name[self]._key_filters.extend, parameter[name[key_filters]]] return[name[self]]
keyword[def] identifier[add_key_filters] ( identifier[self] , identifier[key_filters] ): literal[string] keyword[if] identifier[self] . identifier[_input_mode] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[_key_filters] . identifier[extend] ( identifier[key_filters] ) keyword[return] identifier[self]
def add_key_filters(self, key_filters): """ Adds key filters to the inputs. :param key_filters: a list of filters :type key_filters: list :rtype: :class:`RiakMapReduce` """ if self._input_mode == 'query': raise ValueError('Key filters are not supported in a query.') # depends on [control=['if'], data=[]] self._key_filters.extend(key_filters) return self
def _build_vertex_data(self): """Rebuild the vertex buffers used for rendering the image when using the subdivide method. """ grid = self._grid w = 1.0 / grid[1] h = 1.0 / grid[0] quad = np.array([[0, 0, 0], [w, 0, 0], [w, h, 0], [0, 0, 0], [w, h, 0], [0, h, 0]], dtype=np.float32) quads = np.empty((grid[1], grid[0], 6, 3), dtype=np.float32) quads[:] = quad mgrid = np.mgrid[0.:grid[1], 0.:grid[0]].transpose(1, 2, 0) mgrid = mgrid[:, :, np.newaxis, :] mgrid[..., 0] *= w mgrid[..., 1] *= h quads[..., :2] += mgrid tex_coords = quads.reshape(grid[1]*grid[0]*6, 3) tex_coords = np.ascontiguousarray(tex_coords[:, :2]) vertices = tex_coords * self.size self._subdiv_position.set_data(vertices.astype('float32')) self._subdiv_texcoord.set_data(tex_coords.astype('float32'))
def function[_build_vertex_data, parameter[self]]: constant[Rebuild the vertex buffers used for rendering the image when using the subdivide method. ] variable[grid] assign[=] name[self]._grid variable[w] assign[=] binary_operation[constant[1.0] / call[name[grid]][constant[1]]] variable[h] assign[=] binary_operation[constant[1.0] / call[name[grid]][constant[0]]] variable[quad] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da1b0e427d0>, <ast.List object at 0x7da1b0e43af0>, <ast.List object at 0x7da1b0e43910>, <ast.List object at 0x7da1b0e427a0>, <ast.List object at 0x7da1b0e40e80>, <ast.List object at 0x7da1b0e439a0>]]]] variable[quads] assign[=] call[name[np].empty, parameter[tuple[[<ast.Subscript object at 0x7da1b0e43b80>, <ast.Subscript object at 0x7da1b0e412a0>, <ast.Constant object at 0x7da1b0e424a0>, <ast.Constant object at 0x7da1b0e42680>]]]] call[name[quads]][<ast.Slice object at 0x7da1b0ff9960>] assign[=] name[quad] variable[mgrid] assign[=] call[call[name[np].mgrid][tuple[[<ast.Slice object at 0x7da1b0ff9750>, <ast.Slice object at 0x7da1b0ff8ee0>]]].transpose, parameter[constant[1], constant[2], constant[0]]] variable[mgrid] assign[=] call[name[mgrid]][tuple[[<ast.Slice object at 0x7da1b0ff8190>, <ast.Slice object at 0x7da1b0ff9420>, <ast.Attribute object at 0x7da1b0ff92a0>, <ast.Slice object at 0x7da1b0ff9a50>]]] <ast.AugAssign object at 0x7da1b0ff8340> <ast.AugAssign object at 0x7da1b0ff8970> <ast.AugAssign object at 0x7da1b0ff9840> variable[tex_coords] assign[=] call[name[quads].reshape, parameter[binary_operation[binary_operation[call[name[grid]][constant[1]] * call[name[grid]][constant[0]]] * constant[6]], constant[3]]] variable[tex_coords] assign[=] call[name[np].ascontiguousarray, parameter[call[name[tex_coords]][tuple[[<ast.Slice object at 0x7da1b10ec3d0>, <ast.Slice object at 0x7da1b10ed0f0>]]]]] variable[vertices] assign[=] binary_operation[name[tex_coords] * name[self].size] call[name[self]._subdiv_position.set_data, parameter[call[name[vertices].astype, parameter[constant[float32]]]]] call[name[self]._subdiv_texcoord.set_data, parameter[call[name[tex_coords].astype, parameter[constant[float32]]]]]
keyword[def] identifier[_build_vertex_data] ( identifier[self] ): literal[string] identifier[grid] = identifier[self] . identifier[_grid] identifier[w] = literal[int] / identifier[grid] [ literal[int] ] identifier[h] = literal[int] / identifier[grid] [ literal[int] ] identifier[quad] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] , literal[int] ],[ identifier[w] , literal[int] , literal[int] ],[ identifier[w] , identifier[h] , literal[int] ], [ literal[int] , literal[int] , literal[int] ],[ identifier[w] , identifier[h] , literal[int] ],[ literal[int] , identifier[h] , literal[int] ]], identifier[dtype] = identifier[np] . identifier[float32] ) identifier[quads] = identifier[np] . identifier[empty] (( identifier[grid] [ literal[int] ], identifier[grid] [ literal[int] ], literal[int] , literal[int] ), identifier[dtype] = identifier[np] . identifier[float32] ) identifier[quads] [:]= identifier[quad] identifier[mgrid] = identifier[np] . identifier[mgrid] [ literal[int] : identifier[grid] [ literal[int] ], literal[int] : identifier[grid] [ literal[int] ]]. identifier[transpose] ( literal[int] , literal[int] , literal[int] ) identifier[mgrid] = identifier[mgrid] [:,:, identifier[np] . identifier[newaxis] ,:] identifier[mgrid] [..., literal[int] ]*= identifier[w] identifier[mgrid] [..., literal[int] ]*= identifier[h] identifier[quads] [...,: literal[int] ]+= identifier[mgrid] identifier[tex_coords] = identifier[quads] . identifier[reshape] ( identifier[grid] [ literal[int] ]* identifier[grid] [ literal[int] ]* literal[int] , literal[int] ) identifier[tex_coords] = identifier[np] . identifier[ascontiguousarray] ( identifier[tex_coords] [:,: literal[int] ]) identifier[vertices] = identifier[tex_coords] * identifier[self] . identifier[size] identifier[self] . identifier[_subdiv_position] . identifier[set_data] ( identifier[vertices] . identifier[astype] ( literal[string] )) identifier[self] . identifier[_subdiv_texcoord] . identifier[set_data] ( identifier[tex_coords] . identifier[astype] ( literal[string] ))
def _build_vertex_data(self): """Rebuild the vertex buffers used for rendering the image when using the subdivide method. """ grid = self._grid w = 1.0 / grid[1] h = 1.0 / grid[0] quad = np.array([[0, 0, 0], [w, 0, 0], [w, h, 0], [0, 0, 0], [w, h, 0], [0, h, 0]], dtype=np.float32) quads = np.empty((grid[1], grid[0], 6, 3), dtype=np.float32) quads[:] = quad mgrid = np.mgrid[0.0:grid[1], 0.0:grid[0]].transpose(1, 2, 0) mgrid = mgrid[:, :, np.newaxis, :] mgrid[..., 0] *= w mgrid[..., 1] *= h quads[..., :2] += mgrid tex_coords = quads.reshape(grid[1] * grid[0] * 6, 3) tex_coords = np.ascontiguousarray(tex_coords[:, :2]) vertices = tex_coords * self.size self._subdiv_position.set_data(vertices.astype('float32')) self._subdiv_texcoord.set_data(tex_coords.astype('float32'))
def _docspec(func, module=None, qualname=None, of_class=None): """ For a callable, get the full spec by merging doc_parse() and argspec() :type func: Callable :rtype: data.FDocstring """ sp = _argspec(func) doc = _doc_parse(getdoc(func), module=module, qualname=qualname) # Merge args doc_map = {a.name: a for a in doc.args} doc.args = [data.Argument(a, doc_map.get(a.name.lstrip('*'), None)) for a in sp] # Args shift: dump `self` if (six.PY2 and inspect.ismethod(func)) or \ (six.PY3 and (inspect.isroutine(func) and of_class is not None) and (of_class is not None and not is_method_static(of_class, func.__name__))): doc.args = doc.args[1:] # Signature doc.update_signature() # Finish return doc
def function[_docspec, parameter[func, module, qualname, of_class]]: constant[ For a callable, get the full spec by merging doc_parse() and argspec() :type func: Callable :rtype: data.FDocstring ] variable[sp] assign[=] call[name[_argspec], parameter[name[func]]] variable[doc] assign[=] call[name[_doc_parse], parameter[call[name[getdoc], parameter[name[func]]]]] variable[doc_map] assign[=] <ast.DictComp object at 0x7da18bc70fa0> name[doc].args assign[=] <ast.ListComp object at 0x7da18bc721a0> if <ast.BoolOp object at 0x7da18bc738e0> begin[:] name[doc].args assign[=] call[name[doc].args][<ast.Slice object at 0x7da1b14d0eb0>] call[name[doc].update_signature, parameter[]] return[name[doc]]
keyword[def] identifier[_docspec] ( identifier[func] , identifier[module] = keyword[None] , identifier[qualname] = keyword[None] , identifier[of_class] = keyword[None] ): literal[string] identifier[sp] = identifier[_argspec] ( identifier[func] ) identifier[doc] = identifier[_doc_parse] ( identifier[getdoc] ( identifier[func] ), identifier[module] = identifier[module] , identifier[qualname] = identifier[qualname] ) identifier[doc_map] ={ identifier[a] . identifier[name] : identifier[a] keyword[for] identifier[a] keyword[in] identifier[doc] . identifier[args] } identifier[doc] . identifier[args] =[ identifier[data] . identifier[Argument] ( identifier[a] , identifier[doc_map] . identifier[get] ( identifier[a] . identifier[name] . identifier[lstrip] ( literal[string] ), keyword[None] )) keyword[for] identifier[a] keyword[in] identifier[sp] ] keyword[if] ( identifier[six] . identifier[PY2] keyword[and] identifier[inspect] . identifier[ismethod] ( identifier[func] )) keyword[or] ( identifier[six] . identifier[PY3] keyword[and] ( identifier[inspect] . identifier[isroutine] ( identifier[func] ) keyword[and] identifier[of_class] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[of_class] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[is_method_static] ( identifier[of_class] , identifier[func] . identifier[__name__] ))): identifier[doc] . identifier[args] = identifier[doc] . identifier[args] [ literal[int] :] identifier[doc] . identifier[update_signature] () keyword[return] identifier[doc]
def _docspec(func, module=None, qualname=None, of_class=None): """ For a callable, get the full spec by merging doc_parse() and argspec() :type func: Callable :rtype: data.FDocstring """ sp = _argspec(func) doc = _doc_parse(getdoc(func), module=module, qualname=qualname) # Merge args doc_map = {a.name: a for a in doc.args} doc.args = [data.Argument(a, doc_map.get(a.name.lstrip('*'), None)) for a in sp] # Args shift: dump `self` if six.PY2 and inspect.ismethod(func) or (six.PY3 and (inspect.isroutine(func) and of_class is not None) and (of_class is not None and (not is_method_static(of_class, func.__name__)))): doc.args = doc.args[1:] # depends on [control=['if'], data=[]] # Signature doc.update_signature() # Finish return doc
def _add_logical_operator(self, operator): """Adds a logical operator in query :param operator: logical operator (str) :raise: - QueryExpressionError: if a expression hasn't been set """ if not self.c_oper: raise QueryExpressionError("Logical operators must be preceded by an expression") self.current_field = None self.c_oper = None self.l_oper = inspect.currentframe().f_back.f_code.co_name self._query.append(operator) return self
def function[_add_logical_operator, parameter[self, operator]]: constant[Adds a logical operator in query :param operator: logical operator (str) :raise: - QueryExpressionError: if a expression hasn't been set ] if <ast.UnaryOp object at 0x7da1b07a3280> begin[:] <ast.Raise object at 0x7da1b07a2dd0> name[self].current_field assign[=] constant[None] name[self].c_oper assign[=] constant[None] name[self].l_oper assign[=] call[name[inspect].currentframe, parameter[]].f_back.f_code.co_name call[name[self]._query.append, parameter[name[operator]]] return[name[self]]
keyword[def] identifier[_add_logical_operator] ( identifier[self] , identifier[operator] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[c_oper] : keyword[raise] identifier[QueryExpressionError] ( literal[string] ) identifier[self] . identifier[current_field] = keyword[None] identifier[self] . identifier[c_oper] = keyword[None] identifier[self] . identifier[l_oper] = identifier[inspect] . identifier[currentframe] (). identifier[f_back] . identifier[f_code] . identifier[co_name] identifier[self] . identifier[_query] . identifier[append] ( identifier[operator] ) keyword[return] identifier[self]
def _add_logical_operator(self, operator): """Adds a logical operator in query :param operator: logical operator (str) :raise: - QueryExpressionError: if a expression hasn't been set """ if not self.c_oper: raise QueryExpressionError('Logical operators must be preceded by an expression') # depends on [control=['if'], data=[]] self.current_field = None self.c_oper = None self.l_oper = inspect.currentframe().f_back.f_code.co_name self._query.append(operator) return self
def _pool_one_shape(features_2d, area_width, area_height, batch_size, width, height, depth, fn=tf.reduce_max, name=None): """Pools for an area in features_2d. Args: features_2d: a Tensor in a shape of [batch_size, height, width, depth]. area_width: the max width allowed for an area. area_height: the max height allowed for an area. batch_size: the batch size. width: the width of the memory. height: the height of the memory. depth: the depth of the features. fn: the TF function for the pooling. name: the op name. Returns: pool_tensor: A Tensor of shape [batch_size, num_areas, depth] """ with tf.name_scope(name, default_name="pool_one_shape"): images = [] for y_shift in range(area_height): image_height = tf.maximum(height - area_height + 1 + y_shift, 0) for x_shift in range(area_width): image_width = tf.maximum(width - area_width + 1 + x_shift, 0) area = features_2d[:, y_shift:image_height, x_shift:image_width, :] flatten_area = tf.reshape(area, [batch_size, -1, depth, 1]) images.append(flatten_area) image_tensor = tf.concat(images, axis=3) max_tensor = fn(image_tensor, axis=3) return max_tensor
def function[_pool_one_shape, parameter[features_2d, area_width, area_height, batch_size, width, height, depth, fn, name]]: constant[Pools for an area in features_2d. Args: features_2d: a Tensor in a shape of [batch_size, height, width, depth]. area_width: the max width allowed for an area. area_height: the max height allowed for an area. batch_size: the batch size. width: the width of the memory. height: the height of the memory. depth: the depth of the features. fn: the TF function for the pooling. name: the op name. Returns: pool_tensor: A Tensor of shape [batch_size, num_areas, depth] ] with call[name[tf].name_scope, parameter[name[name]]] begin[:] variable[images] assign[=] list[[]] for taget[name[y_shift]] in starred[call[name[range], parameter[name[area_height]]]] begin[:] variable[image_height] assign[=] call[name[tf].maximum, parameter[binary_operation[binary_operation[binary_operation[name[height] - name[area_height]] + constant[1]] + name[y_shift]], constant[0]]] for taget[name[x_shift]] in starred[call[name[range], parameter[name[area_width]]]] begin[:] variable[image_width] assign[=] call[name[tf].maximum, parameter[binary_operation[binary_operation[binary_operation[name[width] - name[area_width]] + constant[1]] + name[x_shift]], constant[0]]] variable[area] assign[=] call[name[features_2d]][tuple[[<ast.Slice object at 0x7da1b1e15990>, <ast.Slice object at 0x7da1b1e14ac0>, <ast.Slice object at 0x7da1b1e14b50>, <ast.Slice object at 0x7da1b1e14610>]]] variable[flatten_area] assign[=] call[name[tf].reshape, parameter[name[area], list[[<ast.Name object at 0x7da1b1e16500>, <ast.UnaryOp object at 0x7da1b1e14460>, <ast.Name object at 0x7da1b1e17b80>, <ast.Constant object at 0x7da1b1e14850>]]]] call[name[images].append, parameter[name[flatten_area]]] variable[image_tensor] assign[=] call[name[tf].concat, parameter[name[images]]] variable[max_tensor] assign[=] call[name[fn], parameter[name[image_tensor]]] return[name[max_tensor]]
keyword[def] identifier[_pool_one_shape] ( identifier[features_2d] , identifier[area_width] , identifier[area_height] , identifier[batch_size] , identifier[width] , identifier[height] , identifier[depth] , identifier[fn] = identifier[tf] . identifier[reduce_max] , identifier[name] = keyword[None] ): literal[string] keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] , identifier[default_name] = literal[string] ): identifier[images] =[] keyword[for] identifier[y_shift] keyword[in] identifier[range] ( identifier[area_height] ): identifier[image_height] = identifier[tf] . identifier[maximum] ( identifier[height] - identifier[area_height] + literal[int] + identifier[y_shift] , literal[int] ) keyword[for] identifier[x_shift] keyword[in] identifier[range] ( identifier[area_width] ): identifier[image_width] = identifier[tf] . identifier[maximum] ( identifier[width] - identifier[area_width] + literal[int] + identifier[x_shift] , literal[int] ) identifier[area] = identifier[features_2d] [:, identifier[y_shift] : identifier[image_height] , identifier[x_shift] : identifier[image_width] ,:] identifier[flatten_area] = identifier[tf] . identifier[reshape] ( identifier[area] ,[ identifier[batch_size] ,- literal[int] , identifier[depth] , literal[int] ]) identifier[images] . identifier[append] ( identifier[flatten_area] ) identifier[image_tensor] = identifier[tf] . identifier[concat] ( identifier[images] , identifier[axis] = literal[int] ) identifier[max_tensor] = identifier[fn] ( identifier[image_tensor] , identifier[axis] = literal[int] ) keyword[return] identifier[max_tensor]
def _pool_one_shape(features_2d, area_width, area_height, batch_size, width, height, depth, fn=tf.reduce_max, name=None): """Pools for an area in features_2d. Args: features_2d: a Tensor in a shape of [batch_size, height, width, depth]. area_width: the max width allowed for an area. area_height: the max height allowed for an area. batch_size: the batch size. width: the width of the memory. height: the height of the memory. depth: the depth of the features. fn: the TF function for the pooling. name: the op name. Returns: pool_tensor: A Tensor of shape [batch_size, num_areas, depth] """ with tf.name_scope(name, default_name='pool_one_shape'): images = [] for y_shift in range(area_height): image_height = tf.maximum(height - area_height + 1 + y_shift, 0) for x_shift in range(area_width): image_width = tf.maximum(width - area_width + 1 + x_shift, 0) area = features_2d[:, y_shift:image_height, x_shift:image_width, :] flatten_area = tf.reshape(area, [batch_size, -1, depth, 1]) images.append(flatten_area) # depends on [control=['for'], data=['x_shift']] # depends on [control=['for'], data=['y_shift']] image_tensor = tf.concat(images, axis=3) max_tensor = fn(image_tensor, axis=3) # depends on [control=['with'], data=[]] return max_tensor
def allZero(buffer): """ Tries to determine if a buffer is empty. @type buffer: str @param buffer: Buffer to test if it is empty. @rtype: bool @return: C{True} if the given buffer is empty, i.e. full of zeros, C{False} if it doesn't. """ allZero = True for byte in buffer: if byte != "\x00": allZero = False break return allZero
def function[allZero, parameter[buffer]]: constant[ Tries to determine if a buffer is empty. @type buffer: str @param buffer: Buffer to test if it is empty. @rtype: bool @return: C{True} if the given buffer is empty, i.e. full of zeros, C{False} if it doesn't. ] variable[allZero] assign[=] constant[True] for taget[name[byte]] in starred[name[buffer]] begin[:] if compare[name[byte] not_equal[!=] constant[]] begin[:] variable[allZero] assign[=] constant[False] break return[name[allZero]]
keyword[def] identifier[allZero] ( identifier[buffer] ): literal[string] identifier[allZero] = keyword[True] keyword[for] identifier[byte] keyword[in] identifier[buffer] : keyword[if] identifier[byte] != literal[string] : identifier[allZero] = keyword[False] keyword[break] keyword[return] identifier[allZero]
def allZero(buffer): """ Tries to determine if a buffer is empty. @type buffer: str @param buffer: Buffer to test if it is empty. @rtype: bool @return: C{True} if the given buffer is empty, i.e. full of zeros, C{False} if it doesn't. """ allZero = True for byte in buffer: if byte != '\x00': allZero = False break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['byte']] return allZero
def remove_umis(adj_list, cluster, nodes): '''removes the specified nodes from the cluster and returns the remaining nodes ''' # list incomprehension: for x in nodes: for node in adj_list[x]: yield node nodes_to_remove = set([node for x in nodes for node in adj_list[x]] + nodes) return cluster - nodes_to_remove
def function[remove_umis, parameter[adj_list, cluster, nodes]]: constant[removes the specified nodes from the cluster and returns the remaining nodes ] variable[nodes_to_remove] assign[=] call[name[set], parameter[binary_operation[<ast.ListComp object at 0x7da20c6a8ca0> + name[nodes]]]] return[binary_operation[name[cluster] - name[nodes_to_remove]]]
keyword[def] identifier[remove_umis] ( identifier[adj_list] , identifier[cluster] , identifier[nodes] ): literal[string] identifier[nodes_to_remove] = identifier[set] ([ identifier[node] keyword[for] identifier[x] keyword[in] identifier[nodes] keyword[for] identifier[node] keyword[in] identifier[adj_list] [ identifier[x] ]]+ identifier[nodes] ) keyword[return] identifier[cluster] - identifier[nodes_to_remove]
def remove_umis(adj_list, cluster, nodes): """removes the specified nodes from the cluster and returns the remaining nodes """ # list incomprehension: for x in nodes: for node in adj_list[x]: yield node nodes_to_remove = set([node for x in nodes for node in adj_list[x]] + nodes) return cluster - nodes_to_remove
def round_edge_open_mesh(alpha, subtype='diamond pattern wire', angle=0): r'''Returns the loss coefficient for a round edged open net/screen made of one of the following patterns, according to [1]_: 'round bar screen': .. math:: K = 0.95(1-\alpha) + 0.2(1-\alpha)^2 'diamond pattern wire': .. math:: K = 0.67(1-\alpha) + 1.3(1-\alpha)^2 'knotted net': .. math:: K = 0.70(1-\alpha) + 4.9(1-\alpha)^2 'knotless net': .. math:: K = 0.72(1-\alpha) + 2.1(1-\alpha)^2 Parameters ---------- alpha : float Fraction of net/screen open to flow [-] subtype : str One of 'round bar screen', 'diamond pattern wire', 'knotted net' or 'knotless net'. angle : float, optional Angle of inclination, with 0 being straight and 90 being parallel to flow [degrees] Returns ------- K : float Loss coefficient [-] Notes ----- `alpha` should be between 0.85 and 1 for these correlations. Flow should be turbulent, with Re > 500. Examples -------- >>> round_edge_open_mesh(0.96, angle=33.) 0.02031327712601458 References ---------- .. [1] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.: Van Nostrand Reinhold Co., 1984. ''' if subtype == 'round bar screen': K = 0.95*(1-alpha) + 0.2*(1-alpha)**2 elif subtype == 'diamond pattern wire': K = 0.67*(1-alpha) + 1.3*(1-alpha)**2 elif subtype == 'knotted net': K = 0.70*(1-alpha) + 4.9*(1-alpha)**2 elif subtype == 'knotless net': K = 0.72*(1-alpha) + 2.1*(1-alpha)**2 else: raise Exception('Subtype not recognized') if angle: if angle < 45: K *= cos(radians(angle))**2 else: K *= interp(angle, round_thetas, round_gammas) return K
def function[round_edge_open_mesh, parameter[alpha, subtype, angle]]: constant[Returns the loss coefficient for a round edged open net/screen made of one of the following patterns, according to [1]_: 'round bar screen': .. math:: K = 0.95(1-\alpha) + 0.2(1-\alpha)^2 'diamond pattern wire': .. math:: K = 0.67(1-\alpha) + 1.3(1-\alpha)^2 'knotted net': .. math:: K = 0.70(1-\alpha) + 4.9(1-\alpha)^2 'knotless net': .. math:: K = 0.72(1-\alpha) + 2.1(1-\alpha)^2 Parameters ---------- alpha : float Fraction of net/screen open to flow [-] subtype : str One of 'round bar screen', 'diamond pattern wire', 'knotted net' or 'knotless net'. angle : float, optional Angle of inclination, with 0 being straight and 90 being parallel to flow [degrees] Returns ------- K : float Loss coefficient [-] Notes ----- `alpha` should be between 0.85 and 1 for these correlations. Flow should be turbulent, with Re > 500. Examples -------- >>> round_edge_open_mesh(0.96, angle=33.) 0.02031327712601458 References ---------- .. [1] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.: Van Nostrand Reinhold Co., 1984. ] if compare[name[subtype] equal[==] constant[round bar screen]] begin[:] variable[K] assign[=] binary_operation[binary_operation[constant[0.95] * binary_operation[constant[1] - name[alpha]]] + binary_operation[constant[0.2] * binary_operation[binary_operation[constant[1] - name[alpha]] ** constant[2]]]] if name[angle] begin[:] if compare[name[angle] less[<] constant[45]] begin[:] <ast.AugAssign object at 0x7da1b11846d0> return[name[K]]
keyword[def] identifier[round_edge_open_mesh] ( identifier[alpha] , identifier[subtype] = literal[string] , identifier[angle] = literal[int] ): literal[string] keyword[if] identifier[subtype] == literal[string] : identifier[K] = literal[int] *( literal[int] - identifier[alpha] )+ literal[int] *( literal[int] - identifier[alpha] )** literal[int] keyword[elif] identifier[subtype] == literal[string] : identifier[K] = literal[int] *( literal[int] - identifier[alpha] )+ literal[int] *( literal[int] - identifier[alpha] )** literal[int] keyword[elif] identifier[subtype] == literal[string] : identifier[K] = literal[int] *( literal[int] - identifier[alpha] )+ literal[int] *( literal[int] - identifier[alpha] )** literal[int] keyword[elif] identifier[subtype] == literal[string] : identifier[K] = literal[int] *( literal[int] - identifier[alpha] )+ literal[int] *( literal[int] - identifier[alpha] )** literal[int] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[angle] : keyword[if] identifier[angle] < literal[int] : identifier[K] *= identifier[cos] ( identifier[radians] ( identifier[angle] ))** literal[int] keyword[else] : identifier[K] *= identifier[interp] ( identifier[angle] , identifier[round_thetas] , identifier[round_gammas] ) keyword[return] identifier[K]
def round_edge_open_mesh(alpha, subtype='diamond pattern wire', angle=0): """Returns the loss coefficient for a round edged open net/screen made of one of the following patterns, according to [1]_: 'round bar screen': .. math:: K = 0.95(1-\\alpha) + 0.2(1-\\alpha)^2 'diamond pattern wire': .. math:: K = 0.67(1-\\alpha) + 1.3(1-\\alpha)^2 'knotted net': .. math:: K = 0.70(1-\\alpha) + 4.9(1-\\alpha)^2 'knotless net': .. math:: K = 0.72(1-\\alpha) + 2.1(1-\\alpha)^2 Parameters ---------- alpha : float Fraction of net/screen open to flow [-] subtype : str One of 'round bar screen', 'diamond pattern wire', 'knotted net' or 'knotless net'. angle : float, optional Angle of inclination, with 0 being straight and 90 being parallel to flow [degrees] Returns ------- K : float Loss coefficient [-] Notes ----- `alpha` should be between 0.85 and 1 for these correlations. Flow should be turbulent, with Re > 500. Examples -------- >>> round_edge_open_mesh(0.96, angle=33.) 0.02031327712601458 References ---------- .. [1] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.: Van Nostrand Reinhold Co., 1984. """ if subtype == 'round bar screen': K = 0.95 * (1 - alpha) + 0.2 * (1 - alpha) ** 2 # depends on [control=['if'], data=[]] elif subtype == 'diamond pattern wire': K = 0.67 * (1 - alpha) + 1.3 * (1 - alpha) ** 2 # depends on [control=['if'], data=[]] elif subtype == 'knotted net': K = 0.7 * (1 - alpha) + 4.9 * (1 - alpha) ** 2 # depends on [control=['if'], data=[]] elif subtype == 'knotless net': K = 0.72 * (1 - alpha) + 2.1 * (1 - alpha) ** 2 # depends on [control=['if'], data=[]] else: raise Exception('Subtype not recognized') if angle: if angle < 45: K *= cos(radians(angle)) ** 2 # depends on [control=['if'], data=['angle']] else: K *= interp(angle, round_thetas, round_gammas) # depends on [control=['if'], data=[]] return K
def union(left, right, distinct=False): """ Union two collections. :param left: left collection :param right: right collection :param distinct: :return: collection :Example: >>> df['name', 'id'].union(df2['id', 'name']) """ left, right = _make_different_sources(left, right) return UnionCollectionExpr(_lhs=left, _rhs=right, _distinct=distinct)
def function[union, parameter[left, right, distinct]]: constant[ Union two collections. :param left: left collection :param right: right collection :param distinct: :return: collection :Example: >>> df['name', 'id'].union(df2['id', 'name']) ] <ast.Tuple object at 0x7da18eb564d0> assign[=] call[name[_make_different_sources], parameter[name[left], name[right]]] return[call[name[UnionCollectionExpr], parameter[]]]
keyword[def] identifier[union] ( identifier[left] , identifier[right] , identifier[distinct] = keyword[False] ): literal[string] identifier[left] , identifier[right] = identifier[_make_different_sources] ( identifier[left] , identifier[right] ) keyword[return] identifier[UnionCollectionExpr] ( identifier[_lhs] = identifier[left] , identifier[_rhs] = identifier[right] , identifier[_distinct] = identifier[distinct] )
def union(left, right, distinct=False): """ Union two collections. :param left: left collection :param right: right collection :param distinct: :return: collection :Example: >>> df['name', 'id'].union(df2['id', 'name']) """ (left, right) = _make_different_sources(left, right) return UnionCollectionExpr(_lhs=left, _rhs=right, _distinct=distinct)
def get_ugali_dir(): """Get the path to the ugali data directory from the environment""" dirname = os.getenv('UGALIDIR') # Get the HOME directory if not dirname: dirname=os.path.join(os.getenv('HOME'),'.ugali') if not os.path.exists(dirname): from ugali.utils.logger import logger msg = "Creating UGALIDIR:\n%s"%dirname logger.warning(msg) return mkdir(dirname)
def function[get_ugali_dir, parameter[]]: constant[Get the path to the ugali data directory from the environment] variable[dirname] assign[=] call[name[os].getenv, parameter[constant[UGALIDIR]]] if <ast.UnaryOp object at 0x7da20cabc4f0> begin[:] variable[dirname] assign[=] call[name[os].path.join, parameter[call[name[os].getenv, parameter[constant[HOME]]], constant[.ugali]]] if <ast.UnaryOp object at 0x7da20cabed70> begin[:] from relative_module[ugali.utils.logger] import module[logger] variable[msg] assign[=] binary_operation[constant[Creating UGALIDIR: %s] <ast.Mod object at 0x7da2590d6920> name[dirname]] call[name[logger].warning, parameter[name[msg]]] return[call[name[mkdir], parameter[name[dirname]]]]
keyword[def] identifier[get_ugali_dir] (): literal[string] identifier[dirname] = identifier[os] . identifier[getenv] ( literal[string] ) keyword[if] keyword[not] identifier[dirname] : identifier[dirname] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getenv] ( literal[string] ), literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dirname] ): keyword[from] identifier[ugali] . identifier[utils] . identifier[logger] keyword[import] identifier[logger] identifier[msg] = literal[string] % identifier[dirname] identifier[logger] . identifier[warning] ( identifier[msg] ) keyword[return] identifier[mkdir] ( identifier[dirname] )
def get_ugali_dir(): """Get the path to the ugali data directory from the environment""" dirname = os.getenv('UGALIDIR') # Get the HOME directory if not dirname: dirname = os.path.join(os.getenv('HOME'), '.ugali') # depends on [control=['if'], data=[]] if not os.path.exists(dirname): from ugali.utils.logger import logger msg = 'Creating UGALIDIR:\n%s' % dirname logger.warning(msg) # depends on [control=['if'], data=[]] return mkdir(dirname)
def batch_get(self, offset=0, count=50, status_list=None): """ 批量查询卡券信息 """ card_data = { 'offset': offset, 'count': count } if status_list: card_data['status_list'] = status_list return self._post( 'card/batchget', data=card_data )
def function[batch_get, parameter[self, offset, count, status_list]]: constant[ 批量查询卡券信息 ] variable[card_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b21a2bf0>, <ast.Constant object at 0x7da1b21a1600>], [<ast.Name object at 0x7da1b21a2350>, <ast.Name object at 0x7da1b21a2dd0>]] if name[status_list] begin[:] call[name[card_data]][constant[status_list]] assign[=] name[status_list] return[call[name[self]._post, parameter[constant[card/batchget]]]]
keyword[def] identifier[batch_get] ( identifier[self] , identifier[offset] = literal[int] , identifier[count] = literal[int] , identifier[status_list] = keyword[None] ): literal[string] identifier[card_data] ={ literal[string] : identifier[offset] , literal[string] : identifier[count] } keyword[if] identifier[status_list] : identifier[card_data] [ literal[string] ]= identifier[status_list] keyword[return] identifier[self] . identifier[_post] ( literal[string] , identifier[data] = identifier[card_data] )
def batch_get(self, offset=0, count=50, status_list=None): """ 批量查询卡券信息 """ card_data = {'offset': offset, 'count': count} if status_list: card_data['status_list'] = status_list # depends on [control=['if'], data=[]] return self._post('card/batchget', data=card_data)
def assistant_from_yaml(cls, source, y, superassistant, fully_loaded=True, role=settings.DEFAULT_ASSISTANT_ROLE): """Constructs instance of YamlAssistant loaded from given structure y, loaded from source file source. Args: source: path to assistant source file y: loaded yaml structure superassistant: superassistant of this assistant Returns: YamlAssistant instance constructed from y with source file source Raises: YamlError: if the assistant is malformed """ # In pre-0.9.0, we required assistant to be a mapping of {name: assistant_attributes} # now we allow that, but we also allow omitting the assistant name and putting # the attributes to top_level, too. name = os.path.splitext(os.path.basename(source))[0] yaml_checker.check(source, y) assistant = yaml_assistant.YamlAssistant(name, y, source, superassistant, fully_loaded=fully_loaded, role=role) return assistant
def function[assistant_from_yaml, parameter[cls, source, y, superassistant, fully_loaded, role]]: constant[Constructs instance of YamlAssistant loaded from given structure y, loaded from source file source. Args: source: path to assistant source file y: loaded yaml structure superassistant: superassistant of this assistant Returns: YamlAssistant instance constructed from y with source file source Raises: YamlError: if the assistant is malformed ] variable[name] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[source]]]]]][constant[0]] call[name[yaml_checker].check, parameter[name[source], name[y]]] variable[assistant] assign[=] call[name[yaml_assistant].YamlAssistant, parameter[name[name], name[y], name[source], name[superassistant]]] return[name[assistant]]
keyword[def] identifier[assistant_from_yaml] ( identifier[cls] , identifier[source] , identifier[y] , identifier[superassistant] , identifier[fully_loaded] = keyword[True] , identifier[role] = identifier[settings] . identifier[DEFAULT_ASSISTANT_ROLE] ): literal[string] identifier[name] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[source] ))[ literal[int] ] identifier[yaml_checker] . identifier[check] ( identifier[source] , identifier[y] ) identifier[assistant] = identifier[yaml_assistant] . identifier[YamlAssistant] ( identifier[name] , identifier[y] , identifier[source] , identifier[superassistant] , identifier[fully_loaded] = identifier[fully_loaded] , identifier[role] = identifier[role] ) keyword[return] identifier[assistant]
def assistant_from_yaml(cls, source, y, superassistant, fully_loaded=True, role=settings.DEFAULT_ASSISTANT_ROLE): """Constructs instance of YamlAssistant loaded from given structure y, loaded from source file source. Args: source: path to assistant source file y: loaded yaml structure superassistant: superassistant of this assistant Returns: YamlAssistant instance constructed from y with source file source Raises: YamlError: if the assistant is malformed """ # In pre-0.9.0, we required assistant to be a mapping of {name: assistant_attributes} # now we allow that, but we also allow omitting the assistant name and putting # the attributes to top_level, too. name = os.path.splitext(os.path.basename(source))[0] yaml_checker.check(source, y) assistant = yaml_assistant.YamlAssistant(name, y, source, superassistant, fully_loaded=fully_loaded, role=role) return assistant
def values(self, *keys): """ Return the values of the record, optionally filtering to include only certain values by index or key. :param keys: indexes or keys of the items to include; if none are provided, all values will be included :return: list of values """ if keys: d = [] for key in keys: try: i = self.index(key) except KeyError: d.append(None) else: d.append(self[i]) return d return list(self)
def function[values, parameter[self]]: constant[ Return the values of the record, optionally filtering to include only certain values by index or key. :param keys: indexes or keys of the items to include; if none are provided, all values will be included :return: list of values ] if name[keys] begin[:] variable[d] assign[=] list[[]] for taget[name[key]] in starred[name[keys]] begin[:] <ast.Try object at 0x7da18f813eb0> return[name[d]] return[call[name[list], parameter[name[self]]]]
keyword[def] identifier[values] ( identifier[self] ,* identifier[keys] ): literal[string] keyword[if] identifier[keys] : identifier[d] =[] keyword[for] identifier[key] keyword[in] identifier[keys] : keyword[try] : identifier[i] = identifier[self] . identifier[index] ( identifier[key] ) keyword[except] identifier[KeyError] : identifier[d] . identifier[append] ( keyword[None] ) keyword[else] : identifier[d] . identifier[append] ( identifier[self] [ identifier[i] ]) keyword[return] identifier[d] keyword[return] identifier[list] ( identifier[self] )
def values(self, *keys): """ Return the values of the record, optionally filtering to include only certain values by index or key. :param keys: indexes or keys of the items to include; if none are provided, all values will be included :return: list of values """ if keys: d = [] for key in keys: try: i = self.index(key) # depends on [control=['try'], data=[]] except KeyError: d.append(None) # depends on [control=['except'], data=[]] else: d.append(self[i]) # depends on [control=['for'], data=['key']] return d # depends on [control=['if'], data=[]] return list(self)
def saltmem(human_readable=False): ''' .. versionadded:: 2015.8.0 Returns the amount of memory that salt is using human_readable : False return the value in a nicely formatted number CLI Example: .. code-block:: bash salt '*' status.saltmem salt '*' status.saltmem human_readable=True ''' # psutil.Process defaults to current process (`os.getpid()`) p = psutil.Process() # Use oneshot to get a snapshot with p.oneshot(): mem = p.memory_info().rss if human_readable: return _byte_calc(mem) return mem
def function[saltmem, parameter[human_readable]]: constant[ .. versionadded:: 2015.8.0 Returns the amount of memory that salt is using human_readable : False return the value in a nicely formatted number CLI Example: .. code-block:: bash salt '*' status.saltmem salt '*' status.saltmem human_readable=True ] variable[p] assign[=] call[name[psutil].Process, parameter[]] with call[name[p].oneshot, parameter[]] begin[:] variable[mem] assign[=] call[name[p].memory_info, parameter[]].rss if name[human_readable] begin[:] return[call[name[_byte_calc], parameter[name[mem]]]] return[name[mem]]
keyword[def] identifier[saltmem] ( identifier[human_readable] = keyword[False] ): literal[string] identifier[p] = identifier[psutil] . identifier[Process] () keyword[with] identifier[p] . identifier[oneshot] (): identifier[mem] = identifier[p] . identifier[memory_info] (). identifier[rss] keyword[if] identifier[human_readable] : keyword[return] identifier[_byte_calc] ( identifier[mem] ) keyword[return] identifier[mem]
def saltmem(human_readable=False): """ .. versionadded:: 2015.8.0 Returns the amount of memory that salt is using human_readable : False return the value in a nicely formatted number CLI Example: .. code-block:: bash salt '*' status.saltmem salt '*' status.saltmem human_readable=True """ # psutil.Process defaults to current process (`os.getpid()`) p = psutil.Process() # Use oneshot to get a snapshot with p.oneshot(): mem = p.memory_info().rss # depends on [control=['with'], data=[]] if human_readable: return _byte_calc(mem) # depends on [control=['if'], data=[]] return mem
def compile(self): """ Recursively compile this widget as well as all of its children to HTML. :returns: HTML string representation of this widget. """ self.content = "".join(map(lambda x: x.compile(), self.children)) return self._generate_html()
def function[compile, parameter[self]]: constant[ Recursively compile this widget as well as all of its children to HTML. :returns: HTML string representation of this widget. ] name[self].content assign[=] call[constant[].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da18fe935b0>, name[self].children]]]] return[call[name[self]._generate_html, parameter[]]]
keyword[def] identifier[compile] ( identifier[self] ): literal[string] identifier[self] . identifier[content] = literal[string] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[compile] (), identifier[self] . identifier[children] )) keyword[return] identifier[self] . identifier[_generate_html] ()
def compile(self): """ Recursively compile this widget as well as all of its children to HTML. :returns: HTML string representation of this widget. """ self.content = ''.join(map(lambda x: x.compile(), self.children)) return self._generate_html()
def resource_to_portal_type(resource): """Converts a resource to a portal type :param resource: Resource name as it is used in the content route :type name: string :returns: Portal type name :rtype: string """ if resource is None: return None resource_mapping = get_resource_mapping() portal_type = resource_mapping.get(resource.lower()) if portal_type is None: logger.warn("Could not map the resource '{}' " "to any known portal type".format(resource)) return portal_type
def function[resource_to_portal_type, parameter[resource]]: constant[Converts a resource to a portal type :param resource: Resource name as it is used in the content route :type name: string :returns: Portal type name :rtype: string ] if compare[name[resource] is constant[None]] begin[:] return[constant[None]] variable[resource_mapping] assign[=] call[name[get_resource_mapping], parameter[]] variable[portal_type] assign[=] call[name[resource_mapping].get, parameter[call[name[resource].lower, parameter[]]]] if compare[name[portal_type] is constant[None]] begin[:] call[name[logger].warn, parameter[call[constant[Could not map the resource '{}' to any known portal type].format, parameter[name[resource]]]]] return[name[portal_type]]
keyword[def] identifier[resource_to_portal_type] ( identifier[resource] ): literal[string] keyword[if] identifier[resource] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[resource_mapping] = identifier[get_resource_mapping] () identifier[portal_type] = identifier[resource_mapping] . identifier[get] ( identifier[resource] . identifier[lower] ()) keyword[if] identifier[portal_type] keyword[is] keyword[None] : identifier[logger] . identifier[warn] ( literal[string] literal[string] . identifier[format] ( identifier[resource] )) keyword[return] identifier[portal_type]
def resource_to_portal_type(resource): """Converts a resource to a portal type :param resource: Resource name as it is used in the content route :type name: string :returns: Portal type name :rtype: string """ if resource is None: return None # depends on [control=['if'], data=[]] resource_mapping = get_resource_mapping() portal_type = resource_mapping.get(resource.lower()) if portal_type is None: logger.warn("Could not map the resource '{}' to any known portal type".format(resource)) # depends on [control=['if'], data=[]] return portal_type
def _execShowCountCmd(self, showcmd): """Execute 'show' command and return result dictionary. @param cmd: Command string. @return: Result dictionary. """ result = None lines = self._execCmd("show", showcmd + " count") for line in lines: mobj = re.match('\s*(\d+)\s+total', line) if mobj: return int(mobj.group(1)) return result
def function[_execShowCountCmd, parameter[self, showcmd]]: constant[Execute 'show' command and return result dictionary. @param cmd: Command string. @return: Result dictionary. ] variable[result] assign[=] constant[None] variable[lines] assign[=] call[name[self]._execCmd, parameter[constant[show], binary_operation[name[showcmd] + constant[ count]]]] for taget[name[line]] in starred[name[lines]] begin[:] variable[mobj] assign[=] call[name[re].match, parameter[constant[\s*(\d+)\s+total], name[line]]] if name[mobj] begin[:] return[call[name[int], parameter[call[name[mobj].group, parameter[constant[1]]]]]] return[name[result]]
keyword[def] identifier[_execShowCountCmd] ( identifier[self] , identifier[showcmd] ): literal[string] identifier[result] = keyword[None] identifier[lines] = identifier[self] . identifier[_execCmd] ( literal[string] , identifier[showcmd] + literal[string] ) keyword[for] identifier[line] keyword[in] identifier[lines] : identifier[mobj] = identifier[re] . identifier[match] ( literal[string] , identifier[line] ) keyword[if] identifier[mobj] : keyword[return] identifier[int] ( identifier[mobj] . identifier[group] ( literal[int] )) keyword[return] identifier[result]
def _execShowCountCmd(self, showcmd): """Execute 'show' command and return result dictionary. @param cmd: Command string. @return: Result dictionary. """ result = None lines = self._execCmd('show', showcmd + ' count') for line in lines: mobj = re.match('\\s*(\\d+)\\s+total', line) if mobj: return int(mobj.group(1)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] return result
def wrap(self, alias=None): """ Wraps the query by selecting all fields from itself :rtype: :class:`Query <querybuilder.query.Query>` :return: The wrapped query """ field_names = self.get_field_names() query = Query(self.connection).from_table(deepcopy(self), alias=alias) self.__dict__.update(query.__dict__) # set explicit field names self.tables[0].set_fields(field_names) field_names = self.get_field_names() return self
def function[wrap, parameter[self, alias]]: constant[ Wraps the query by selecting all fields from itself :rtype: :class:`Query <querybuilder.query.Query>` :return: The wrapped query ] variable[field_names] assign[=] call[name[self].get_field_names, parameter[]] variable[query] assign[=] call[call[name[Query], parameter[name[self].connection]].from_table, parameter[call[name[deepcopy], parameter[name[self]]]]] call[name[self].__dict__.update, parameter[name[query].__dict__]] call[call[name[self].tables][constant[0]].set_fields, parameter[name[field_names]]] variable[field_names] assign[=] call[name[self].get_field_names, parameter[]] return[name[self]]
keyword[def] identifier[wrap] ( identifier[self] , identifier[alias] = keyword[None] ): literal[string] identifier[field_names] = identifier[self] . identifier[get_field_names] () identifier[query] = identifier[Query] ( identifier[self] . identifier[connection] ). identifier[from_table] ( identifier[deepcopy] ( identifier[self] ), identifier[alias] = identifier[alias] ) identifier[self] . identifier[__dict__] . identifier[update] ( identifier[query] . identifier[__dict__] ) identifier[self] . identifier[tables] [ literal[int] ]. identifier[set_fields] ( identifier[field_names] ) identifier[field_names] = identifier[self] . identifier[get_field_names] () keyword[return] identifier[self]
def wrap(self, alias=None): """ Wraps the query by selecting all fields from itself :rtype: :class:`Query <querybuilder.query.Query>` :return: The wrapped query """ field_names = self.get_field_names() query = Query(self.connection).from_table(deepcopy(self), alias=alias) self.__dict__.update(query.__dict__) # set explicit field names self.tables[0].set_fields(field_names) field_names = self.get_field_names() return self
def preview_box(endpoint=None, filename=None): """Create a preview box. :param endpoint: The endpoint of view function that serve avatar image file. :param filename: The filename of the image that need to be crop. """ preview_size = current_app.config['AVATARS_CROP_PREVIEW_SIZE'] or current_app.config['AVATARS_SIZE_TUPLE'][2] if endpoint is None or filename is None: url = url_for('avatars.static', filename='default/default_l.jpg') else: url = url_for(endpoint, filename=filename) return Markup(''' <div id="preview-box"> <div class="preview-box" style="width: %dpx; height: %dpx; overflow: hidden;"> <img src="%s" class="jcrop-preview" alt="Preview"/> </div> </div>''' % (preview_size, preview_size, url))
def function[preview_box, parameter[endpoint, filename]]: constant[Create a preview box. :param endpoint: The endpoint of view function that serve avatar image file. :param filename: The filename of the image that need to be crop. ] variable[preview_size] assign[=] <ast.BoolOp object at 0x7da1b0fedd50> if <ast.BoolOp object at 0x7da1b0feeec0> begin[:] variable[url] assign[=] call[name[url_for], parameter[constant[avatars.static]]] return[call[name[Markup], parameter[binary_operation[constant[ <div id="preview-box"> <div class="preview-box" style="width: %dpx; height: %dpx; overflow: hidden;"> <img src="%s" class="jcrop-preview" alt="Preview"/> </div> </div>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0feea40>, <ast.Name object at 0x7da18c4cf130>, <ast.Name object at 0x7da18c4ceef0>]]]]]]
keyword[def] identifier[preview_box] ( identifier[endpoint] = keyword[None] , identifier[filename] = keyword[None] ): literal[string] identifier[preview_size] = identifier[current_app] . identifier[config] [ literal[string] ] keyword[or] identifier[current_app] . identifier[config] [ literal[string] ][ literal[int] ] keyword[if] identifier[endpoint] keyword[is] keyword[None] keyword[or] identifier[filename] keyword[is] keyword[None] : identifier[url] = identifier[url_for] ( literal[string] , identifier[filename] = literal[string] ) keyword[else] : identifier[url] = identifier[url_for] ( identifier[endpoint] , identifier[filename] = identifier[filename] ) keyword[return] identifier[Markup] ( literal[string] %( identifier[preview_size] , identifier[preview_size] , identifier[url] ))
def preview_box(endpoint=None, filename=None): """Create a preview box. :param endpoint: The endpoint of view function that serve avatar image file. :param filename: The filename of the image that need to be crop. """ preview_size = current_app.config['AVATARS_CROP_PREVIEW_SIZE'] or current_app.config['AVATARS_SIZE_TUPLE'][2] if endpoint is None or filename is None: url = url_for('avatars.static', filename='default/default_l.jpg') # depends on [control=['if'], data=[]] else: url = url_for(endpoint, filename=filename) return Markup('\n <div id="preview-box">\n <div class="preview-box" style="width: %dpx; height: %dpx; overflow: hidden;">\n <img src="%s" class="jcrop-preview" alt="Preview"/>\n </div>\n </div>' % (preview_size, preview_size, url))
def change_nick(self,new_nick): """ Send a nick change request to the room. :Parameters: - `new_nick`: the new nickname requested. :Types: - `new_nick`: `unicode` """ new_room_jid=JID(self.room_jid.node,self.room_jid.domain,new_nick) p=Presence(to_jid=new_room_jid) self.manager.stream.send(p)
def function[change_nick, parameter[self, new_nick]]: constant[ Send a nick change request to the room. :Parameters: - `new_nick`: the new nickname requested. :Types: - `new_nick`: `unicode` ] variable[new_room_jid] assign[=] call[name[JID], parameter[name[self].room_jid.node, name[self].room_jid.domain, name[new_nick]]] variable[p] assign[=] call[name[Presence], parameter[]] call[name[self].manager.stream.send, parameter[name[p]]]
keyword[def] identifier[change_nick] ( identifier[self] , identifier[new_nick] ): literal[string] identifier[new_room_jid] = identifier[JID] ( identifier[self] . identifier[room_jid] . identifier[node] , identifier[self] . identifier[room_jid] . identifier[domain] , identifier[new_nick] ) identifier[p] = identifier[Presence] ( identifier[to_jid] = identifier[new_room_jid] ) identifier[self] . identifier[manager] . identifier[stream] . identifier[send] ( identifier[p] )
def change_nick(self, new_nick): """ Send a nick change request to the room. :Parameters: - `new_nick`: the new nickname requested. :Types: - `new_nick`: `unicode` """ new_room_jid = JID(self.room_jid.node, self.room_jid.domain, new_nick) p = Presence(to_jid=new_room_jid) self.manager.stream.send(p)
def save_metadata_json(self, filename: str, structure: JsonExportable) -> None: """Saves metadata JSON file of a structure.""" if self.compress_json: filename += '.json.xz' else: filename += '.json' save_structure_to_file(structure, filename) if isinstance(structure, (Post, StoryItem)): # log 'json ' message when saving Post or StoryItem self.context.log('json', end=' ', flush=True)
def function[save_metadata_json, parameter[self, filename, structure]]: constant[Saves metadata JSON file of a structure.] if name[self].compress_json begin[:] <ast.AugAssign object at 0x7da20c6e62f0> call[name[save_structure_to_file], parameter[name[structure], name[filename]]] if call[name[isinstance], parameter[name[structure], tuple[[<ast.Name object at 0x7da20c6e7460>, <ast.Name object at 0x7da20c6e7fa0>]]]] begin[:] call[name[self].context.log, parameter[constant[json]]]
keyword[def] identifier[save_metadata_json] ( identifier[self] , identifier[filename] : identifier[str] , identifier[structure] : identifier[JsonExportable] )-> keyword[None] : literal[string] keyword[if] identifier[self] . identifier[compress_json] : identifier[filename] += literal[string] keyword[else] : identifier[filename] += literal[string] identifier[save_structure_to_file] ( identifier[structure] , identifier[filename] ) keyword[if] identifier[isinstance] ( identifier[structure] ,( identifier[Post] , identifier[StoryItem] )): identifier[self] . identifier[context] . identifier[log] ( literal[string] , identifier[end] = literal[string] , identifier[flush] = keyword[True] )
def save_metadata_json(self, filename: str, structure: JsonExportable) -> None: """Saves metadata JSON file of a structure.""" if self.compress_json: filename += '.json.xz' # depends on [control=['if'], data=[]] else: filename += '.json' save_structure_to_file(structure, filename) if isinstance(structure, (Post, StoryItem)): # log 'json ' message when saving Post or StoryItem self.context.log('json', end=' ', flush=True) # depends on [control=['if'], data=[]]
def json_2_injector_component(json_obj): """ transform the JSON return by Ariane server to local object :param json_obj: the JSON returned by Ariane server :return: a new InjectorCachedComponent """ LOGGER.debug("InjectorCachedComponent.json_2_injector_component") return InjectorCachedComponent( component_id=json_obj['componentId'], component_name=json_obj['componentName'], component_type=json_obj['componentType'], component_admin_queue=json_obj['componentAdminQueue'], refreshing=json_obj['refreshing'], next_action=json_obj['nextAction'], json_last_refresh=json_obj['jsonLastRefresh'], attached_gear_id=json_obj['attachedGearId'] )
def function[json_2_injector_component, parameter[json_obj]]: constant[ transform the JSON return by Ariane server to local object :param json_obj: the JSON returned by Ariane server :return: a new InjectorCachedComponent ] call[name[LOGGER].debug, parameter[constant[InjectorCachedComponent.json_2_injector_component]]] return[call[name[InjectorCachedComponent], parameter[]]]
keyword[def] identifier[json_2_injector_component] ( identifier[json_obj] ): literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] ) keyword[return] identifier[InjectorCachedComponent] ( identifier[component_id] = identifier[json_obj] [ literal[string] ], identifier[component_name] = identifier[json_obj] [ literal[string] ], identifier[component_type] = identifier[json_obj] [ literal[string] ], identifier[component_admin_queue] = identifier[json_obj] [ literal[string] ], identifier[refreshing] = identifier[json_obj] [ literal[string] ], identifier[next_action] = identifier[json_obj] [ literal[string] ], identifier[json_last_refresh] = identifier[json_obj] [ literal[string] ], identifier[attached_gear_id] = identifier[json_obj] [ literal[string] ] )
def json_2_injector_component(json_obj): """ transform the JSON return by Ariane server to local object :param json_obj: the JSON returned by Ariane server :return: a new InjectorCachedComponent """ LOGGER.debug('InjectorCachedComponent.json_2_injector_component') return InjectorCachedComponent(component_id=json_obj['componentId'], component_name=json_obj['componentName'], component_type=json_obj['componentType'], component_admin_queue=json_obj['componentAdminQueue'], refreshing=json_obj['refreshing'], next_action=json_obj['nextAction'], json_last_refresh=json_obj['jsonLastRefresh'], attached_gear_id=json_obj['attachedGearId'])
def clear_cache(self, items=None, topic=EVENT_TOPIC): """ expects event object to be in the format of a session-stop or session-expire event, whose results attribute is a namedtuple(identifiers, session_key) """ try: for realm in self.realms: identifier = items.identifiers.from_source(realm.name) if identifier: realm.clear_cached_authc_info(identifier) except AttributeError: msg = ('Could not clear authc_info from cache after event. ' 'items: ' + str(items)) logger.warn(msg)
def function[clear_cache, parameter[self, items, topic]]: constant[ expects event object to be in the format of a session-stop or session-expire event, whose results attribute is a namedtuple(identifiers, session_key) ] <ast.Try object at 0x7da20c6c7a00>
keyword[def] identifier[clear_cache] ( identifier[self] , identifier[items] = keyword[None] , identifier[topic] = identifier[EVENT_TOPIC] ): literal[string] keyword[try] : keyword[for] identifier[realm] keyword[in] identifier[self] . identifier[realms] : identifier[identifier] = identifier[items] . identifier[identifiers] . identifier[from_source] ( identifier[realm] . identifier[name] ) keyword[if] identifier[identifier] : identifier[realm] . identifier[clear_cached_authc_info] ( identifier[identifier] ) keyword[except] identifier[AttributeError] : identifier[msg] =( literal[string] literal[string] + identifier[str] ( identifier[items] )) identifier[logger] . identifier[warn] ( identifier[msg] )
def clear_cache(self, items=None, topic=EVENT_TOPIC): """ expects event object to be in the format of a session-stop or session-expire event, whose results attribute is a namedtuple(identifiers, session_key) """ try: for realm in self.realms: identifier = items.identifiers.from_source(realm.name) if identifier: realm.clear_cached_authc_info(identifier) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['realm']] # depends on [control=['try'], data=[]] except AttributeError: msg = 'Could not clear authc_info from cache after event. items: ' + str(items) logger.warn(msg) # depends on [control=['except'], data=[]]
def on_execute__set_surface_alphas(self, request): ''' .. versionchanged:: 0.12 Queue redraw after setting surface alphas. ''' data = decode_content_data(request) logger.debug('[on_execute__set_surface_alphas] %s', data['surface_alphas']) for name, alpha in data['surface_alphas'].iteritems(): self.parent.canvas_slave.set_surface_alpha(name, alpha) self.parent.canvas_slave.render() gobject.idle_add(self.parent.canvas_slave.draw)
def function[on_execute__set_surface_alphas, parameter[self, request]]: constant[ .. versionchanged:: 0.12 Queue redraw after setting surface alphas. ] variable[data] assign[=] call[name[decode_content_data], parameter[name[request]]] call[name[logger].debug, parameter[constant[[on_execute__set_surface_alphas] %s], call[name[data]][constant[surface_alphas]]]] for taget[tuple[[<ast.Name object at 0x7da20e74bd90>, <ast.Name object at 0x7da20e749540>]]] in starred[call[call[name[data]][constant[surface_alphas]].iteritems, parameter[]]] begin[:] call[name[self].parent.canvas_slave.set_surface_alpha, parameter[name[name], name[alpha]]] call[name[self].parent.canvas_slave.render, parameter[]] call[name[gobject].idle_add, parameter[name[self].parent.canvas_slave.draw]]
keyword[def] identifier[on_execute__set_surface_alphas] ( identifier[self] , identifier[request] ): literal[string] identifier[data] = identifier[decode_content_data] ( identifier[request] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[data] [ literal[string] ]) keyword[for] identifier[name] , identifier[alpha] keyword[in] identifier[data] [ literal[string] ]. identifier[iteritems] (): identifier[self] . identifier[parent] . identifier[canvas_slave] . identifier[set_surface_alpha] ( identifier[name] , identifier[alpha] ) identifier[self] . identifier[parent] . identifier[canvas_slave] . identifier[render] () identifier[gobject] . identifier[idle_add] ( identifier[self] . identifier[parent] . identifier[canvas_slave] . identifier[draw] )
def on_execute__set_surface_alphas(self, request): """ .. versionchanged:: 0.12 Queue redraw after setting surface alphas. """ data = decode_content_data(request) logger.debug('[on_execute__set_surface_alphas] %s', data['surface_alphas']) for (name, alpha) in data['surface_alphas'].iteritems(): self.parent.canvas_slave.set_surface_alpha(name, alpha) # depends on [control=['for'], data=[]] self.parent.canvas_slave.render() gobject.idle_add(self.parent.canvas_slave.draw)
def do_use(self, line): "use {tablename}" self.table = boto.dynamodb2.table.Table(line, connection=self.conn) self.pprint(self.table.describe()) self.prompt = "%s> " % self.table.table_name
def function[do_use, parameter[self, line]]: constant[use {tablename}] name[self].table assign[=] call[name[boto].dynamodb2.table.Table, parameter[name[line]]] call[name[self].pprint, parameter[call[name[self].table.describe, parameter[]]]] name[self].prompt assign[=] binary_operation[constant[%s> ] <ast.Mod object at 0x7da2590d6920> name[self].table.table_name]
keyword[def] identifier[do_use] ( identifier[self] , identifier[line] ): literal[string] identifier[self] . identifier[table] = identifier[boto] . identifier[dynamodb2] . identifier[table] . identifier[Table] ( identifier[line] , identifier[connection] = identifier[self] . identifier[conn] ) identifier[self] . identifier[pprint] ( identifier[self] . identifier[table] . identifier[describe] ()) identifier[self] . identifier[prompt] = literal[string] % identifier[self] . identifier[table] . identifier[table_name]
def do_use(self, line): """use {tablename}""" self.table = boto.dynamodb2.table.Table(line, connection=self.conn) self.pprint(self.table.describe()) self.prompt = '%s> ' % self.table.table_name
def replace(self, to_replace, value, inplace=False, filter=None, regex=False, convert=True): """replace the to_replace value with value, possible to create new blocks here this is just a call to putmask. regex is not used here. It is used in ObjectBlocks. It is here for API compatibility. """ inplace = validate_bool_kwarg(inplace, 'inplace') original_to_replace = to_replace # try to replace, if we raise an error, convert to ObjectBlock and # retry try: values, to_replace = self._try_coerce_args(self.values, to_replace) mask = missing.mask_missing(values, to_replace) if filter is not None: filtered_out = ~self.mgr_locs.isin(filter) mask[filtered_out.nonzero()[0]] = False blocks = self.putmask(mask, value, inplace=inplace) if convert: blocks = [b.convert(by_item=True, numeric=False, copy=not inplace) for b in blocks] return blocks except (TypeError, ValueError): # GH 22083, TypeError or ValueError occurred within error handling # causes infinite loop. Cast and retry only if not objectblock. if is_object_dtype(self): raise # try again with a compatible block block = self.astype(object) return block.replace(to_replace=original_to_replace, value=value, inplace=inplace, filter=filter, regex=regex, convert=convert)
def function[replace, parameter[self, to_replace, value, inplace, filter, regex, convert]]: constant[replace the to_replace value with value, possible to create new blocks here this is just a call to putmask. regex is not used here. It is used in ObjectBlocks. It is here for API compatibility. ] variable[inplace] assign[=] call[name[validate_bool_kwarg], parameter[name[inplace], constant[inplace]]] variable[original_to_replace] assign[=] name[to_replace] <ast.Try object at 0x7da18fe90c70>
keyword[def] identifier[replace] ( identifier[self] , identifier[to_replace] , identifier[value] , identifier[inplace] = keyword[False] , identifier[filter] = keyword[None] , identifier[regex] = keyword[False] , identifier[convert] = keyword[True] ): literal[string] identifier[inplace] = identifier[validate_bool_kwarg] ( identifier[inplace] , literal[string] ) identifier[original_to_replace] = identifier[to_replace] keyword[try] : identifier[values] , identifier[to_replace] = identifier[self] . identifier[_try_coerce_args] ( identifier[self] . identifier[values] , identifier[to_replace] ) identifier[mask] = identifier[missing] . identifier[mask_missing] ( identifier[values] , identifier[to_replace] ) keyword[if] identifier[filter] keyword[is] keyword[not] keyword[None] : identifier[filtered_out] =~ identifier[self] . identifier[mgr_locs] . identifier[isin] ( identifier[filter] ) identifier[mask] [ identifier[filtered_out] . identifier[nonzero] ()[ literal[int] ]]= keyword[False] identifier[blocks] = identifier[self] . identifier[putmask] ( identifier[mask] , identifier[value] , identifier[inplace] = identifier[inplace] ) keyword[if] identifier[convert] : identifier[blocks] =[ identifier[b] . identifier[convert] ( identifier[by_item] = keyword[True] , identifier[numeric] = keyword[False] , identifier[copy] = keyword[not] identifier[inplace] ) keyword[for] identifier[b] keyword[in] identifier[blocks] ] keyword[return] identifier[blocks] keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[if] identifier[is_object_dtype] ( identifier[self] ): keyword[raise] identifier[block] = identifier[self] . identifier[astype] ( identifier[object] ) keyword[return] identifier[block] . identifier[replace] ( identifier[to_replace] = identifier[original_to_replace] , identifier[value] = identifier[value] , identifier[inplace] = identifier[inplace] , identifier[filter] = identifier[filter] , identifier[regex] = identifier[regex] , identifier[convert] = identifier[convert] )
def replace(self, to_replace, value, inplace=False, filter=None, regex=False, convert=True): """replace the to_replace value with value, possible to create new blocks here this is just a call to putmask. regex is not used here. It is used in ObjectBlocks. It is here for API compatibility. """ inplace = validate_bool_kwarg(inplace, 'inplace') original_to_replace = to_replace # try to replace, if we raise an error, convert to ObjectBlock and # retry try: (values, to_replace) = self._try_coerce_args(self.values, to_replace) mask = missing.mask_missing(values, to_replace) if filter is not None: filtered_out = ~self.mgr_locs.isin(filter) mask[filtered_out.nonzero()[0]] = False # depends on [control=['if'], data=['filter']] blocks = self.putmask(mask, value, inplace=inplace) if convert: blocks = [b.convert(by_item=True, numeric=False, copy=not inplace) for b in blocks] # depends on [control=['if'], data=[]] return blocks # depends on [control=['try'], data=[]] except (TypeError, ValueError): # GH 22083, TypeError or ValueError occurred within error handling # causes infinite loop. Cast and retry only if not objectblock. if is_object_dtype(self): raise # depends on [control=['if'], data=[]] # try again with a compatible block block = self.astype(object) return block.replace(to_replace=original_to_replace, value=value, inplace=inplace, filter=filter, regex=regex, convert=convert) # depends on [control=['except'], data=[]]
def _process_change(self, server_description): """Process a new ServerDescription on an opened topology. Hold the lock when calling this. """ td_old = self._description if self._publish_server: old_server_description = td_old._server_descriptions[ server_description.address] self._events.put(( self._listeners.publish_server_description_changed, (old_server_description, server_description, server_description.address, self._topology_id))) self._description = updated_topology_description( self._description, server_description) self._update_servers() self._receive_cluster_time_no_lock(server_description.cluster_time) if self._publish_tp: self._events.put(( self._listeners.publish_topology_description_changed, (td_old, self._description, self._topology_id))) # Wake waiters in select_servers(). self._condition.notify_all()
def function[_process_change, parameter[self, server_description]]: constant[Process a new ServerDescription on an opened topology. Hold the lock when calling this. ] variable[td_old] assign[=] name[self]._description if name[self]._publish_server begin[:] variable[old_server_description] assign[=] call[name[td_old]._server_descriptions][name[server_description].address] call[name[self]._events.put, parameter[tuple[[<ast.Attribute object at 0x7da20c9933a0>, <ast.Tuple object at 0x7da20c990be0>]]]] name[self]._description assign[=] call[name[updated_topology_description], parameter[name[self]._description, name[server_description]]] call[name[self]._update_servers, parameter[]] call[name[self]._receive_cluster_time_no_lock, parameter[name[server_description].cluster_time]] if name[self]._publish_tp begin[:] call[name[self]._events.put, parameter[tuple[[<ast.Attribute object at 0x7da20c9916c0>, <ast.Tuple object at 0x7da20c9900a0>]]]] call[name[self]._condition.notify_all, parameter[]]
keyword[def] identifier[_process_change] ( identifier[self] , identifier[server_description] ): literal[string] identifier[td_old] = identifier[self] . identifier[_description] keyword[if] identifier[self] . identifier[_publish_server] : identifier[old_server_description] = identifier[td_old] . identifier[_server_descriptions] [ identifier[server_description] . identifier[address] ] identifier[self] . identifier[_events] . identifier[put] (( identifier[self] . identifier[_listeners] . identifier[publish_server_description_changed] , ( identifier[old_server_description] , identifier[server_description] , identifier[server_description] . identifier[address] , identifier[self] . identifier[_topology_id] ))) identifier[self] . identifier[_description] = identifier[updated_topology_description] ( identifier[self] . identifier[_description] , identifier[server_description] ) identifier[self] . identifier[_update_servers] () identifier[self] . identifier[_receive_cluster_time_no_lock] ( identifier[server_description] . identifier[cluster_time] ) keyword[if] identifier[self] . identifier[_publish_tp] : identifier[self] . identifier[_events] . identifier[put] (( identifier[self] . identifier[_listeners] . identifier[publish_topology_description_changed] , ( identifier[td_old] , identifier[self] . identifier[_description] , identifier[self] . identifier[_topology_id] ))) identifier[self] . identifier[_condition] . identifier[notify_all] ()
def _process_change(self, server_description): """Process a new ServerDescription on an opened topology. Hold the lock when calling this. """ td_old = self._description if self._publish_server: old_server_description = td_old._server_descriptions[server_description.address] self._events.put((self._listeners.publish_server_description_changed, (old_server_description, server_description, server_description.address, self._topology_id))) # depends on [control=['if'], data=[]] self._description = updated_topology_description(self._description, server_description) self._update_servers() self._receive_cluster_time_no_lock(server_description.cluster_time) if self._publish_tp: self._events.put((self._listeners.publish_topology_description_changed, (td_old, self._description, self._topology_id))) # depends on [control=['if'], data=[]] # Wake waiters in select_servers(). self._condition.notify_all()
def _parse_path(self, path): """Return (hosts, path) tuple""" # Support specifying another host via hdfs://host:port/path syntax # We ignore the scheme and piece together the query and fragment # Note that HDFS URIs are not URL encoded, so a '?' or a '#' in the URI is part of the # path parts = urlsplit(path, allow_fragments=False) if not parts.path.startswith('/'): raise ValueError("Path must be absolute, was given {}".format(path)) if parts.scheme not in ('', 'hdfs', 'hftp', 'webhdfs'): warnings.warn("Unexpected scheme {}".format(parts.scheme)) assert not parts.fragment path = parts.path if parts.query: path += '?' + parts.query if parts.netloc: hosts = self._parse_hosts(parts.netloc) else: hosts = self.hosts return hosts, path
def function[_parse_path, parameter[self, path]]: constant[Return (hosts, path) tuple] variable[parts] assign[=] call[name[urlsplit], parameter[name[path]]] if <ast.UnaryOp object at 0x7da18fe91d20> begin[:] <ast.Raise object at 0x7da18fe92980> if compare[name[parts].scheme <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18fe92290>, <ast.Constant object at 0x7da18fe92920>, <ast.Constant object at 0x7da18fe93d00>, <ast.Constant object at 0x7da18fe91930>]]] begin[:] call[name[warnings].warn, parameter[call[constant[Unexpected scheme {}].format, parameter[name[parts].scheme]]]] assert[<ast.UnaryOp object at 0x7da18fe910f0>] variable[path] assign[=] name[parts].path if name[parts].query begin[:] <ast.AugAssign object at 0x7da18fe92380> if name[parts].netloc begin[:] variable[hosts] assign[=] call[name[self]._parse_hosts, parameter[name[parts].netloc]] return[tuple[[<ast.Name object at 0x7da18fe92890>, <ast.Name object at 0x7da18fe92da0>]]]
keyword[def] identifier[_parse_path] ( identifier[self] , identifier[path] ): literal[string] identifier[parts] = identifier[urlsplit] ( identifier[path] , identifier[allow_fragments] = keyword[False] ) keyword[if] keyword[not] identifier[parts] . identifier[path] . identifier[startswith] ( literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[path] )) keyword[if] identifier[parts] . identifier[scheme] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[warnings] . identifier[warn] ( literal[string] . identifier[format] ( identifier[parts] . identifier[scheme] )) keyword[assert] keyword[not] identifier[parts] . identifier[fragment] identifier[path] = identifier[parts] . identifier[path] keyword[if] identifier[parts] . identifier[query] : identifier[path] += literal[string] + identifier[parts] . identifier[query] keyword[if] identifier[parts] . identifier[netloc] : identifier[hosts] = identifier[self] . identifier[_parse_hosts] ( identifier[parts] . identifier[netloc] ) keyword[else] : identifier[hosts] = identifier[self] . identifier[hosts] keyword[return] identifier[hosts] , identifier[path]
def _parse_path(self, path): """Return (hosts, path) tuple""" # Support specifying another host via hdfs://host:port/path syntax # We ignore the scheme and piece together the query and fragment # Note that HDFS URIs are not URL encoded, so a '?' or a '#' in the URI is part of the # path parts = urlsplit(path, allow_fragments=False) if not parts.path.startswith('/'): raise ValueError('Path must be absolute, was given {}'.format(path)) # depends on [control=['if'], data=[]] if parts.scheme not in ('', 'hdfs', 'hftp', 'webhdfs'): warnings.warn('Unexpected scheme {}'.format(parts.scheme)) # depends on [control=['if'], data=[]] assert not parts.fragment path = parts.path if parts.query: path += '?' + parts.query # depends on [control=['if'], data=[]] if parts.netloc: hosts = self._parse_hosts(parts.netloc) # depends on [control=['if'], data=[]] else: hosts = self.hosts return (hosts, path)
def batch_rename_file(path, f, t): """根据replaces中定义的规则,批量重命名""" files = os.listdir(path) for file in files: if f in file: new_fn = file.replace(f, t) old = os.path.join(path, file) new = os.path.join(path, new_fn) os.rename(old, new)
def function[batch_rename_file, parameter[path, f, t]]: constant[根据replaces中定义的规则,批量重命名] variable[files] assign[=] call[name[os].listdir, parameter[name[path]]] for taget[name[file]] in starred[name[files]] begin[:] if compare[name[f] in name[file]] begin[:] variable[new_fn] assign[=] call[name[file].replace, parameter[name[f], name[t]]] variable[old] assign[=] call[name[os].path.join, parameter[name[path], name[file]]] variable[new] assign[=] call[name[os].path.join, parameter[name[path], name[new_fn]]] call[name[os].rename, parameter[name[old], name[new]]]
keyword[def] identifier[batch_rename_file] ( identifier[path] , identifier[f] , identifier[t] ): literal[string] identifier[files] = identifier[os] . identifier[listdir] ( identifier[path] ) keyword[for] identifier[file] keyword[in] identifier[files] : keyword[if] identifier[f] keyword[in] identifier[file] : identifier[new_fn] = identifier[file] . identifier[replace] ( identifier[f] , identifier[t] ) identifier[old] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[file] ) identifier[new] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[new_fn] ) identifier[os] . identifier[rename] ( identifier[old] , identifier[new] )
def batch_rename_file(path, f, t): """根据replaces中定义的规则,批量重命名""" files = os.listdir(path) for file in files: if f in file: new_fn = file.replace(f, t) old = os.path.join(path, file) new = os.path.join(path, new_fn) os.rename(old, new) # depends on [control=['if'], data=['f', 'file']] # depends on [control=['for'], data=['file']]
def not_completed(f): """Decorator function to check if user is loged in. :raises: :class:`FMBaseError` if not logged in """ @wraps(f) def check_if_complete(cls, *args, **kwargs): if cls.is_complete: raise FMBaseError('Transfer already completed.') return f(cls, *args, **kwargs) return check_if_complete
def function[not_completed, parameter[f]]: constant[Decorator function to check if user is loged in. :raises: :class:`FMBaseError` if not logged in ] def function[check_if_complete, parameter[cls]]: if name[cls].is_complete begin[:] <ast.Raise object at 0x7da18f09e1d0> return[call[name[f], parameter[name[cls], <ast.Starred object at 0x7da18f09f130>]]] return[name[check_if_complete]]
keyword[def] identifier[not_completed] ( identifier[f] ): literal[string] @ identifier[wraps] ( identifier[f] ) keyword[def] identifier[check_if_complete] ( identifier[cls] ,* identifier[args] ,** identifier[kwargs] ): keyword[if] identifier[cls] . identifier[is_complete] : keyword[raise] identifier[FMBaseError] ( literal[string] ) keyword[return] identifier[f] ( identifier[cls] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[check_if_complete]
def not_completed(f): """Decorator function to check if user is loged in. :raises: :class:`FMBaseError` if not logged in """ @wraps(f) def check_if_complete(cls, *args, **kwargs): if cls.is_complete: raise FMBaseError('Transfer already completed.') # depends on [control=['if'], data=[]] return f(cls, *args, **kwargs) return check_if_complete
def map_exceptions(self, mapping, *args, **kwargs): """ Exception mapping helper decorator. Takes the same arguments as the main decorator, plus `mapping`, which is a list of `(exception_class, status_line)` pairs. """ @self.__call__(*args, **kwargs) def helper(e, status): return dict(exception=e), status def wrap(target): @wraps(target) def wrapper(*args, **kwargs): try: return target(*args, **kwargs) except BaseException as e: for klass, status in mapping: if isinstance(e, klass): return helper(e, status) raise return wrapper return wrap
def function[map_exceptions, parameter[self, mapping]]: constant[ Exception mapping helper decorator. Takes the same arguments as the main decorator, plus `mapping`, which is a list of `(exception_class, status_line)` pairs. ] def function[helper, parameter[e, status]]: return[tuple[[<ast.Call object at 0x7da204346530>, <ast.Name object at 0x7da2043479d0>]]] def function[wrap, parameter[target]]: def function[wrapper, parameter[]]: <ast.Try object at 0x7da204346560> return[name[wrapper]] return[name[wrap]]
keyword[def] identifier[map_exceptions] ( identifier[self] , identifier[mapping] ,* identifier[args] ,** identifier[kwargs] ): literal[string] @ identifier[self] . identifier[__call__] (* identifier[args] ,** identifier[kwargs] ) keyword[def] identifier[helper] ( identifier[e] , identifier[status] ): keyword[return] identifier[dict] ( identifier[exception] = identifier[e] ), identifier[status] keyword[def] identifier[wrap] ( identifier[target] ): @ identifier[wraps] ( identifier[target] ) keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): keyword[try] : keyword[return] identifier[target] (* identifier[args] ,** identifier[kwargs] ) keyword[except] identifier[BaseException] keyword[as] identifier[e] : keyword[for] identifier[klass] , identifier[status] keyword[in] identifier[mapping] : keyword[if] identifier[isinstance] ( identifier[e] , identifier[klass] ): keyword[return] identifier[helper] ( identifier[e] , identifier[status] ) keyword[raise] keyword[return] identifier[wrapper] keyword[return] identifier[wrap]
def map_exceptions(self, mapping, *args, **kwargs): """ Exception mapping helper decorator. Takes the same arguments as the main decorator, plus `mapping`, which is a list of `(exception_class, status_line)` pairs. """ @self.__call__(*args, **kwargs) def helper(e, status): return (dict(exception=e), status) def wrap(target): @wraps(target) def wrapper(*args, **kwargs): try: return target(*args, **kwargs) # depends on [control=['try'], data=[]] except BaseException as e: for (klass, status) in mapping: if isinstance(e, klass): return helper(e, status) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] raise # depends on [control=['except'], data=['e']] return wrapper return wrap
def keygen(sk_file=None, pk_file=None, **kwargs): ''' Use libnacl to generate a keypair. If no `sk_file` is defined return a keypair. If only the `sk_file` is defined `pk_file` will use the same name with a postfix `.pub`. When the `sk_file` is already existing, but `pk_file` is not. The `pk_file` will be generated using the `sk_file`. CLI Examples: .. code-block:: bash salt-call nacl.keygen salt-call nacl.keygen sk_file=/etc/salt/pki/master/nacl salt-call nacl.keygen sk_file=/etc/salt/pki/master/nacl pk_file=/etc/salt/pki/master/nacl.pub salt-call --local nacl.keygen ''' kwargs['opts'] = __opts__ return salt.utils.nacl.keygen(sk_file, pk_file, **kwargs)
def function[keygen, parameter[sk_file, pk_file]]: constant[ Use libnacl to generate a keypair. If no `sk_file` is defined return a keypair. If only the `sk_file` is defined `pk_file` will use the same name with a postfix `.pub`. When the `sk_file` is already existing, but `pk_file` is not. The `pk_file` will be generated using the `sk_file`. CLI Examples: .. code-block:: bash salt-call nacl.keygen salt-call nacl.keygen sk_file=/etc/salt/pki/master/nacl salt-call nacl.keygen sk_file=/etc/salt/pki/master/nacl pk_file=/etc/salt/pki/master/nacl.pub salt-call --local nacl.keygen ] call[name[kwargs]][constant[opts]] assign[=] name[__opts__] return[call[name[salt].utils.nacl.keygen, parameter[name[sk_file], name[pk_file]]]]
keyword[def] identifier[keygen] ( identifier[sk_file] = keyword[None] , identifier[pk_file] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[__opts__] keyword[return] identifier[salt] . identifier[utils] . identifier[nacl] . identifier[keygen] ( identifier[sk_file] , identifier[pk_file] ,** identifier[kwargs] )
def keygen(sk_file=None, pk_file=None, **kwargs): """ Use libnacl to generate a keypair. If no `sk_file` is defined return a keypair. If only the `sk_file` is defined `pk_file` will use the same name with a postfix `.pub`. When the `sk_file` is already existing, but `pk_file` is not. The `pk_file` will be generated using the `sk_file`. CLI Examples: .. code-block:: bash salt-call nacl.keygen salt-call nacl.keygen sk_file=/etc/salt/pki/master/nacl salt-call nacl.keygen sk_file=/etc/salt/pki/master/nacl pk_file=/etc/salt/pki/master/nacl.pub salt-call --local nacl.keygen """ kwargs['opts'] = __opts__ return salt.utils.nacl.keygen(sk_file, pk_file, **kwargs)
def _mplain_word_separator(self): """ Get the word separator to split words in mplain format. :rtype: string """ word_separator = gf.safe_get(self.parameters, gc.PPN_TASK_IS_TEXT_MPLAIN_WORD_SEPARATOR, u" ") if (word_separator is None) or (word_separator == "space"): return u" " elif word_separator == "equal": return u"=" elif word_separator == "pipe": return u"|" elif word_separator == "tab": return u"\u0009" return word_separator
def function[_mplain_word_separator, parameter[self]]: constant[ Get the word separator to split words in mplain format. :rtype: string ] variable[word_separator] assign[=] call[name[gf].safe_get, parameter[name[self].parameters, name[gc].PPN_TASK_IS_TEXT_MPLAIN_WORD_SEPARATOR, constant[ ]]] if <ast.BoolOp object at 0x7da1b18fb100> begin[:] return[constant[ ]] return[name[word_separator]]
keyword[def] identifier[_mplain_word_separator] ( identifier[self] ): literal[string] identifier[word_separator] = identifier[gf] . identifier[safe_get] ( identifier[self] . identifier[parameters] , identifier[gc] . identifier[PPN_TASK_IS_TEXT_MPLAIN_WORD_SEPARATOR] , literal[string] ) keyword[if] ( identifier[word_separator] keyword[is] keyword[None] ) keyword[or] ( identifier[word_separator] == literal[string] ): keyword[return] literal[string] keyword[elif] identifier[word_separator] == literal[string] : keyword[return] literal[string] keyword[elif] identifier[word_separator] == literal[string] : keyword[return] literal[string] keyword[elif] identifier[word_separator] == literal[string] : keyword[return] literal[string] keyword[return] identifier[word_separator]
def _mplain_word_separator(self): """ Get the word separator to split words in mplain format. :rtype: string """ word_separator = gf.safe_get(self.parameters, gc.PPN_TASK_IS_TEXT_MPLAIN_WORD_SEPARATOR, u' ') if word_separator is None or word_separator == 'space': return u' ' # depends on [control=['if'], data=[]] elif word_separator == 'equal': return u'=' # depends on [control=['if'], data=[]] elif word_separator == 'pipe': return u'|' # depends on [control=['if'], data=[]] elif word_separator == 'tab': return u'\t' # depends on [control=['if'], data=[]] return word_separator
def playlist_songs(self, playlist): """Get a listing of songs from a playlist. Paramters: playlist (dict): A playlist dict. Returns: list: Playlist song dicts. """ playlist_type = playlist.get('type') playlist_song_list = [] if playlist_type in ('USER_GENERATED', None): start_token = None playlist_song_list = [] while True: response = self._call( mc_calls.PlaylistEntryFeed, max_results=49995, start_token=start_token ) items = response.body.get('data', {}).get('items', []) if items: playlist_song_list.extend(items) start_token = response.body.get('nextPageToken') if start_token is None: break elif playlist_type == 'SHARED': playlist_share_token = playlist['shareToken'] start_token = None playlist_song_list = [] while True: response = self._call( mc_calls.PlaylistEntriesShared, playlist_share_token, max_results=49995, start_token=start_token ) entry = response.body['entries'][0] items = entry.get('playlistEntry', []) if items: playlist_song_list.extend(items) start_token = entry.get('nextPageToken') if start_token is None: break playlist_song_list.sort(key=itemgetter('absolutePosition')) return playlist_song_list
def function[playlist_songs, parameter[self, playlist]]: constant[Get a listing of songs from a playlist. Paramters: playlist (dict): A playlist dict. Returns: list: Playlist song dicts. ] variable[playlist_type] assign[=] call[name[playlist].get, parameter[constant[type]]] variable[playlist_song_list] assign[=] list[[]] if compare[name[playlist_type] in tuple[[<ast.Constant object at 0x7da1b0471660>, <ast.Constant object at 0x7da1b04719c0>]]] begin[:] variable[start_token] assign[=] constant[None] variable[playlist_song_list] assign[=] list[[]] while constant[True] begin[:] variable[response] assign[=] call[name[self]._call, parameter[name[mc_calls].PlaylistEntryFeed]] variable[items] assign[=] call[call[name[response].body.get, parameter[constant[data], dictionary[[], []]]].get, parameter[constant[items], list[[]]]] if name[items] begin[:] call[name[playlist_song_list].extend, parameter[name[items]]] variable[start_token] assign[=] call[name[response].body.get, parameter[constant[nextPageToken]]] if compare[name[start_token] is constant[None]] begin[:] break call[name[playlist_song_list].sort, parameter[]] return[name[playlist_song_list]]
keyword[def] identifier[playlist_songs] ( identifier[self] , identifier[playlist] ): literal[string] identifier[playlist_type] = identifier[playlist] . identifier[get] ( literal[string] ) identifier[playlist_song_list] =[] keyword[if] identifier[playlist_type] keyword[in] ( literal[string] , keyword[None] ): identifier[start_token] = keyword[None] identifier[playlist_song_list] =[] keyword[while] keyword[True] : identifier[response] = identifier[self] . identifier[_call] ( identifier[mc_calls] . identifier[PlaylistEntryFeed] , identifier[max_results] = literal[int] , identifier[start_token] = identifier[start_token] ) identifier[items] = identifier[response] . identifier[body] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[]) keyword[if] identifier[items] : identifier[playlist_song_list] . identifier[extend] ( identifier[items] ) identifier[start_token] = identifier[response] . identifier[body] . identifier[get] ( literal[string] ) keyword[if] identifier[start_token] keyword[is] keyword[None] : keyword[break] keyword[elif] identifier[playlist_type] == literal[string] : identifier[playlist_share_token] = identifier[playlist] [ literal[string] ] identifier[start_token] = keyword[None] identifier[playlist_song_list] =[] keyword[while] keyword[True] : identifier[response] = identifier[self] . identifier[_call] ( identifier[mc_calls] . identifier[PlaylistEntriesShared] , identifier[playlist_share_token] , identifier[max_results] = literal[int] , identifier[start_token] = identifier[start_token] ) identifier[entry] = identifier[response] . identifier[body] [ literal[string] ][ literal[int] ] identifier[items] = identifier[entry] . identifier[get] ( literal[string] ,[]) keyword[if] identifier[items] : identifier[playlist_song_list] . identifier[extend] ( identifier[items] ) identifier[start_token] = identifier[entry] . identifier[get] ( literal[string] ) keyword[if] identifier[start_token] keyword[is] keyword[None] : keyword[break] identifier[playlist_song_list] . identifier[sort] ( identifier[key] = identifier[itemgetter] ( literal[string] )) keyword[return] identifier[playlist_song_list]
def playlist_songs(self, playlist): """Get a listing of songs from a playlist. Paramters: playlist (dict): A playlist dict. Returns: list: Playlist song dicts. """ playlist_type = playlist.get('type') playlist_song_list = [] if playlist_type in ('USER_GENERATED', None): start_token = None playlist_song_list = [] while True: response = self._call(mc_calls.PlaylistEntryFeed, max_results=49995, start_token=start_token) items = response.body.get('data', {}).get('items', []) if items: playlist_song_list.extend(items) # depends on [control=['if'], data=[]] start_token = response.body.get('nextPageToken') if start_token is None: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] elif playlist_type == 'SHARED': playlist_share_token = playlist['shareToken'] start_token = None playlist_song_list = [] while True: response = self._call(mc_calls.PlaylistEntriesShared, playlist_share_token, max_results=49995, start_token=start_token) entry = response.body['entries'][0] items = entry.get('playlistEntry', []) if items: playlist_song_list.extend(items) # depends on [control=['if'], data=[]] start_token = entry.get('nextPageToken') if start_token is None: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] playlist_song_list.sort(key=itemgetter('absolutePosition')) return playlist_song_list
def is_int_like(value): """Returns whether the value can be used as a standard integer. >>> is_int_like(4) True >>> is_int_like(4.0) False >>> is_int_like("4") False >>> is_int_like("abc") False """ try: if isinstance(value, int): return True return int(value) == value and str(value).isdigit() except: return False
def function[is_int_like, parameter[value]]: constant[Returns whether the value can be used as a standard integer. >>> is_int_like(4) True >>> is_int_like(4.0) False >>> is_int_like("4") False >>> is_int_like("abc") False ] <ast.Try object at 0x7da18f812aa0>
keyword[def] identifier[is_int_like] ( identifier[value] ): literal[string] keyword[try] : keyword[if] identifier[isinstance] ( identifier[value] , identifier[int] ): keyword[return] keyword[True] keyword[return] identifier[int] ( identifier[value] )== identifier[value] keyword[and] identifier[str] ( identifier[value] ). identifier[isdigit] () keyword[except] : keyword[return] keyword[False]
def is_int_like(value): """Returns whether the value can be used as a standard integer. >>> is_int_like(4) True >>> is_int_like(4.0) False >>> is_int_like("4") False >>> is_int_like("abc") False """ try: if isinstance(value, int): return True # depends on [control=['if'], data=[]] return int(value) == value and str(value).isdigit() # depends on [control=['try'], data=[]] except: return False # depends on [control=['except'], data=[]]
def qx(mt, x): """ qx: Returns the probability that a life aged x dies before 1 year With the convention: the true probability is qx/1000 Args: mt: the mortality table x: the age as integer number. """ if x < len(mt.qx): return mt.qx[x] else: return 0
def function[qx, parameter[mt, x]]: constant[ qx: Returns the probability that a life aged x dies before 1 year With the convention: the true probability is qx/1000 Args: mt: the mortality table x: the age as integer number. ] if compare[name[x] less[<] call[name[len], parameter[name[mt].qx]]] begin[:] return[call[name[mt].qx][name[x]]]
keyword[def] identifier[qx] ( identifier[mt] , identifier[x] ): literal[string] keyword[if] identifier[x] < identifier[len] ( identifier[mt] . identifier[qx] ): keyword[return] identifier[mt] . identifier[qx] [ identifier[x] ] keyword[else] : keyword[return] literal[int]
def qx(mt, x): """ qx: Returns the probability that a life aged x dies before 1 year With the convention: the true probability is qx/1000 Args: mt: the mortality table x: the age as integer number. """ if x < len(mt.qx): return mt.qx[x] # depends on [control=['if'], data=['x']] else: return 0
def running(name, **kwargs): r''' Defines and starts a new VM with specified arguments, or restart a VM (or group of VMs). (Runs ``vagrant up``.) :param name: the Salt_id node name you wish your VM to have. If ``name`` contains a "?" or "*" then it will re-start a group of VMs which have been paused or stopped. Each machine must be initially started individually using this function or the vagrant.init execution module call. \[NOTE:\] Keyword arguments are silently ignored when re-starting an existing VM. Possible keyword arguments: - cwd: The directory (path) containing the Vagrantfile - machine: ('') the name of the machine (in the Vagrantfile) if not default - vagrant_runas: ('root') the username who owns the vagrantbox file - vagrant_provider: the provider to run the VM (usually 'virtualbox') - vm: ({}) a dictionary containing these or other keyword arguments .. code-block:: yaml node_name: vagrant.running .. code-block:: yaml node_name: vagrant.running: - cwd: /projects/my_project - vagrant_runas: my_username - machine: machine1 ''' if '*' in name or '?' in name: return _vagrant_call(name, 'start', 'restarted', "Machine has been restarted", "running") else: ret = {'name': name, 'changes': {}, 'result': True, 'comment': '{0} is already running'.format(name) } try: info = __salt__['vagrant.vm_state'](name) if info[0]['state'] != 'running': __salt__['vagrant.start'](name) ret['changes'][name] = 'Machine started' ret['comment'] = 'Node {0} started'.format(name) except (SaltInvocationError, CommandExecutionError): # there was no viable existing machine to start ret, kwargs = _find_init_change(name, ret, **kwargs) kwargs['start'] = True __salt__['vagrant.init'](name, **kwargs) ret['changes'][name] = 'Node defined and started' ret['comment'] = 'Node {0} defined and started'.format(name) return ret
def function[running, parameter[name]]: constant[ Defines and starts a new VM with specified arguments, or restart a VM (or group of VMs). (Runs ``vagrant up``.) :param name: the Salt_id node name you wish your VM to have. If ``name`` contains a "?" or "*" then it will re-start a group of VMs which have been paused or stopped. Each machine must be initially started individually using this function or the vagrant.init execution module call. \[NOTE:\] Keyword arguments are silently ignored when re-starting an existing VM. Possible keyword arguments: - cwd: The directory (path) containing the Vagrantfile - machine: ('') the name of the machine (in the Vagrantfile) if not default - vagrant_runas: ('root') the username who owns the vagrantbox file - vagrant_provider: the provider to run the VM (usually 'virtualbox') - vm: ({}) a dictionary containing these or other keyword arguments .. code-block:: yaml node_name: vagrant.running .. code-block:: yaml node_name: vagrant.running: - cwd: /projects/my_project - vagrant_runas: my_username - machine: machine1 ] if <ast.BoolOp object at 0x7da18bc71570> begin[:] return[call[name[_vagrant_call], parameter[name[name], constant[start], constant[restarted], constant[Machine has been restarted], constant[running]]]]
keyword[def] identifier[running] ( identifier[name] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[name] keyword[or] literal[string] keyword[in] identifier[name] : keyword[return] identifier[_vagrant_call] ( identifier[name] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[else] : identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[True] , literal[string] : literal[string] . identifier[format] ( identifier[name] ) } keyword[try] : identifier[info] = identifier[__salt__] [ literal[string] ]( identifier[name] ) keyword[if] identifier[info] [ literal[int] ][ literal[string] ]!= literal[string] : identifier[__salt__] [ literal[string] ]( identifier[name] ) identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] ) keyword[except] ( identifier[SaltInvocationError] , identifier[CommandExecutionError] ): identifier[ret] , identifier[kwargs] = identifier[_find_init_change] ( identifier[name] , identifier[ret] ,** identifier[kwargs] ) identifier[kwargs] [ literal[string] ]= keyword[True] identifier[__salt__] [ literal[string] ]( identifier[name] ,** identifier[kwargs] ) identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] ) keyword[return] identifier[ret]
def running(name, **kwargs): """ Defines and starts a new VM with specified arguments, or restart a VM (or group of VMs). (Runs ``vagrant up``.) :param name: the Salt_id node name you wish your VM to have. If ``name`` contains a "?" or "*" then it will re-start a group of VMs which have been paused or stopped. Each machine must be initially started individually using this function or the vagrant.init execution module call. \\[NOTE:\\] Keyword arguments are silently ignored when re-starting an existing VM. Possible keyword arguments: - cwd: The directory (path) containing the Vagrantfile - machine: ('') the name of the machine (in the Vagrantfile) if not default - vagrant_runas: ('root') the username who owns the vagrantbox file - vagrant_provider: the provider to run the VM (usually 'virtualbox') - vm: ({}) a dictionary containing these or other keyword arguments .. code-block:: yaml node_name: vagrant.running .. code-block:: yaml node_name: vagrant.running: - cwd: /projects/my_project - vagrant_runas: my_username - machine: machine1 """ if '*' in name or '?' in name: return _vagrant_call(name, 'start', 'restarted', 'Machine has been restarted', 'running') # depends on [control=['if'], data=[]] else: ret = {'name': name, 'changes': {}, 'result': True, 'comment': '{0} is already running'.format(name)} try: info = __salt__['vagrant.vm_state'](name) if info[0]['state'] != 'running': __salt__['vagrant.start'](name) ret['changes'][name] = 'Machine started' ret['comment'] = 'Node {0} started'.format(name) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except (SaltInvocationError, CommandExecutionError): # there was no viable existing machine to start (ret, kwargs) = _find_init_change(name, ret, **kwargs) kwargs['start'] = True __salt__['vagrant.init'](name, **kwargs) ret['changes'][name] = 'Node defined and started' ret['comment'] = 'Node {0} defined and started'.format(name) # depends on [control=['except'], data=[]] return ret
def call(self, fn, *args, **kwargs): """ Like :meth:`call_async`, but block until the return value is available. Equivalent to:: call_async(fn, *args, **kwargs).get().unpickle() :returns: The function's return value. :raises mitogen.core.CallError: An exception was raised in the remote context during execution. """ receiver = self.call_async(fn, *args, **kwargs) return receiver.get().unpickle(throw_dead=False)
def function[call, parameter[self, fn]]: constant[ Like :meth:`call_async`, but block until the return value is available. Equivalent to:: call_async(fn, *args, **kwargs).get().unpickle() :returns: The function's return value. :raises mitogen.core.CallError: An exception was raised in the remote context during execution. ] variable[receiver] assign[=] call[name[self].call_async, parameter[name[fn], <ast.Starred object at 0x7da1b1d39f90>]] return[call[call[name[receiver].get, parameter[]].unpickle, parameter[]]]
keyword[def] identifier[call] ( identifier[self] , identifier[fn] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[receiver] = identifier[self] . identifier[call_async] ( identifier[fn] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[receiver] . identifier[get] (). identifier[unpickle] ( identifier[throw_dead] = keyword[False] )
def call(self, fn, *args, **kwargs): """ Like :meth:`call_async`, but block until the return value is available. Equivalent to:: call_async(fn, *args, **kwargs).get().unpickle() :returns: The function's return value. :raises mitogen.core.CallError: An exception was raised in the remote context during execution. """ receiver = self.call_async(fn, *args, **kwargs) return receiver.get().unpickle(throw_dead=False)
def _dy_shapelets(self, shapelets, beta): """ computes the derivative d/dx of the shapelet coeffs :param shapelets: :param beta: :return: """ num_n = len(shapelets) dy = np.zeros((num_n+1, num_n+1)) for n1 in range(num_n): for n2 in range(num_n): amp = shapelets[n1][n2] dy[n1][n2+1] -= np.sqrt((n2+1)/2.) * amp if n2 > 0: dy[n1][n2-1] += np.sqrt(n2/2.) * amp return dy/beta
def function[_dy_shapelets, parameter[self, shapelets, beta]]: constant[ computes the derivative d/dx of the shapelet coeffs :param shapelets: :param beta: :return: ] variable[num_n] assign[=] call[name[len], parameter[name[shapelets]]] variable[dy] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da1b26ad4b0>, <ast.BinOp object at 0x7da1b26ad360>]]]] for taget[name[n1]] in starred[call[name[range], parameter[name[num_n]]]] begin[:] for taget[name[n2]] in starred[call[name[range], parameter[name[num_n]]]] begin[:] variable[amp] assign[=] call[call[name[shapelets]][name[n1]]][name[n2]] <ast.AugAssign object at 0x7da1b04a79a0> if compare[name[n2] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da2043469e0> return[binary_operation[name[dy] / name[beta]]]
keyword[def] identifier[_dy_shapelets] ( identifier[self] , identifier[shapelets] , identifier[beta] ): literal[string] identifier[num_n] = identifier[len] ( identifier[shapelets] ) identifier[dy] = identifier[np] . identifier[zeros] (( identifier[num_n] + literal[int] , identifier[num_n] + literal[int] )) keyword[for] identifier[n1] keyword[in] identifier[range] ( identifier[num_n] ): keyword[for] identifier[n2] keyword[in] identifier[range] ( identifier[num_n] ): identifier[amp] = identifier[shapelets] [ identifier[n1] ][ identifier[n2] ] identifier[dy] [ identifier[n1] ][ identifier[n2] + literal[int] ]-= identifier[np] . identifier[sqrt] (( identifier[n2] + literal[int] )/ literal[int] )* identifier[amp] keyword[if] identifier[n2] > literal[int] : identifier[dy] [ identifier[n1] ][ identifier[n2] - literal[int] ]+= identifier[np] . identifier[sqrt] ( identifier[n2] / literal[int] )* identifier[amp] keyword[return] identifier[dy] / identifier[beta]
def _dy_shapelets(self, shapelets, beta): """ computes the derivative d/dx of the shapelet coeffs :param shapelets: :param beta: :return: """ num_n = len(shapelets) dy = np.zeros((num_n + 1, num_n + 1)) for n1 in range(num_n): for n2 in range(num_n): amp = shapelets[n1][n2] dy[n1][n2 + 1] -= np.sqrt((n2 + 1) / 2.0) * amp if n2 > 0: dy[n1][n2 - 1] += np.sqrt(n2 / 2.0) * amp # depends on [control=['if'], data=['n2']] # depends on [control=['for'], data=['n2']] # depends on [control=['for'], data=['n1']] return dy / beta
def attr(self, kw=None, _attributes=None, **attrs): """Add a general or graph/node/edge attribute statement. Args: kw: Attributes target (``None`` or ``'graph'``, ``'node'``, ``'edge'``). attrs: Attributes to be set (must be strings, may be empty). See the :ref:`usage examples in the User Guide <attributes>`. """ if kw is not None and kw.lower() not in ('graph', 'node', 'edge'): raise ValueError('attr statement must target graph, node, or edge: ' '%r' % kw) if attrs or _attributes: if kw is None: a_list = self._a_list(None, attrs, _attributes) line = self._attr_plain % a_list else: attr_list = self._attr_list(None, attrs, _attributes) line = self._attr % (kw, attr_list) self.body.append(line)
def function[attr, parameter[self, kw, _attributes]]: constant[Add a general or graph/node/edge attribute statement. Args: kw: Attributes target (``None`` or ``'graph'``, ``'node'``, ``'edge'``). attrs: Attributes to be set (must be strings, may be empty). See the :ref:`usage examples in the User Guide <attributes>`. ] if <ast.BoolOp object at 0x7da20c6a8be0> begin[:] <ast.Raise object at 0x7da2046216c0> if <ast.BoolOp object at 0x7da204621420> begin[:] if compare[name[kw] is constant[None]] begin[:] variable[a_list] assign[=] call[name[self]._a_list, parameter[constant[None], name[attrs], name[_attributes]]] variable[line] assign[=] binary_operation[name[self]._attr_plain <ast.Mod object at 0x7da2590d6920> name[a_list]] call[name[self].body.append, parameter[name[line]]]
keyword[def] identifier[attr] ( identifier[self] , identifier[kw] = keyword[None] , identifier[_attributes] = keyword[None] ,** identifier[attrs] ): literal[string] keyword[if] identifier[kw] keyword[is] keyword[not] keyword[None] keyword[and] identifier[kw] . identifier[lower] () keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] % identifier[kw] ) keyword[if] identifier[attrs] keyword[or] identifier[_attributes] : keyword[if] identifier[kw] keyword[is] keyword[None] : identifier[a_list] = identifier[self] . identifier[_a_list] ( keyword[None] , identifier[attrs] , identifier[_attributes] ) identifier[line] = identifier[self] . identifier[_attr_plain] % identifier[a_list] keyword[else] : identifier[attr_list] = identifier[self] . identifier[_attr_list] ( keyword[None] , identifier[attrs] , identifier[_attributes] ) identifier[line] = identifier[self] . identifier[_attr] %( identifier[kw] , identifier[attr_list] ) identifier[self] . identifier[body] . identifier[append] ( identifier[line] )
def attr(self, kw=None, _attributes=None, **attrs): """Add a general or graph/node/edge attribute statement. Args: kw: Attributes target (``None`` or ``'graph'``, ``'node'``, ``'edge'``). attrs: Attributes to be set (must be strings, may be empty). See the :ref:`usage examples in the User Guide <attributes>`. """ if kw is not None and kw.lower() not in ('graph', 'node', 'edge'): raise ValueError('attr statement must target graph, node, or edge: %r' % kw) # depends on [control=['if'], data=[]] if attrs or _attributes: if kw is None: a_list = self._a_list(None, attrs, _attributes) line = self._attr_plain % a_list # depends on [control=['if'], data=[]] else: attr_list = self._attr_list(None, attrs, _attributes) line = self._attr % (kw, attr_list) self.body.append(line) # depends on [control=['if'], data=[]]
def start_watching(self): """ Begins watching etcd for changes. """ # Don't create a new watcher thread if we already have one running if self.watcher and self.watcher.is_alive(): return # Create a new watcher thread and start it self.watcher = Watcher() self.watcher.start()
def function[start_watching, parameter[self]]: constant[ Begins watching etcd for changes. ] if <ast.BoolOp object at 0x7da204960d90> begin[:] return[None] name[self].watcher assign[=] call[name[Watcher], parameter[]] call[name[self].watcher.start, parameter[]]
keyword[def] identifier[start_watching] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[watcher] keyword[and] identifier[self] . identifier[watcher] . identifier[is_alive] (): keyword[return] identifier[self] . identifier[watcher] = identifier[Watcher] () identifier[self] . identifier[watcher] . identifier[start] ()
def start_watching(self): """ Begins watching etcd for changes. """ # Don't create a new watcher thread if we already have one running if self.watcher and self.watcher.is_alive(): return # depends on [control=['if'], data=[]] # Create a new watcher thread and start it self.watcher = Watcher() self.watcher.start()
def analyse_text(text): """ Check if code contains REBOL header and so it probably not R code """ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE): # The code starts with REBOL header return 1.0 elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE): # The code contains REBOL header but also some text before it return 0.5
def function[analyse_text, parameter[text]]: constant[ Check if code contains REBOL header and so it probably not R code ] if call[name[re].match, parameter[constant[^\s*REBOL\s*\[], name[text], name[re].IGNORECASE]] begin[:] return[constant[1.0]]
keyword[def] identifier[analyse_text] ( identifier[text] ): literal[string] keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[text] , identifier[re] . identifier[IGNORECASE] ): keyword[return] literal[int] keyword[elif] identifier[re] . identifier[search] ( literal[string] , identifier[text] , identifier[re] . identifier[IGNORECASE] ): keyword[return] literal[int]
def analyse_text(text): """ Check if code contains REBOL header and so it probably not R code """ if re.match('^\\s*REBOL\\s*\\[', text, re.IGNORECASE): # The code starts with REBOL header return 1.0 # depends on [control=['if'], data=[]] elif re.search('\\s*REBOL\\s*[', text, re.IGNORECASE): # The code contains REBOL header but also some text before it return 0.5 # depends on [control=['if'], data=[]]
def get_header(fn, file_format, header_bytes=20000, verbose=False, *args, **kwargs): """Apply rules for detecting the boundary of the header :param str fn: file name :param str file_format: either ``AmiraMesh`` or ``HyperSurface`` :param int header_bytes: number of bytes in which to search for the header [default: 20000] :return str data: the header as per the ``file_format`` """ assert header_bytes > 0 assert file_format in ['AmiraMesh', 'HyperSurface'] with open(fn, 'rb') as f: rough_header = f.read(header_bytes) if file_format == "AmiraMesh": if verbose: print >> sys.stderr, "Using pattern: (?P<data>.*)\\n@1" m = re.search(r'(?P<data>.*)\n@1', rough_header, flags=re.S) elif file_format == "HyperSurface": if verbose: print >> sys.stderr, "Using pattern: (?P<data>.*)\\nVertices [0-9]*\\n" m = re.search(r'(?P<data>.*)\nVertices [0-9]*\n', rough_header, flags=re.S) elif file_format == "Undefined": raise ValueError("Unable to parse undefined file") # select the data data = m.group('data') # print data # print return data
def function[get_header, parameter[fn, file_format, header_bytes, verbose]]: constant[Apply rules for detecting the boundary of the header :param str fn: file name :param str file_format: either ``AmiraMesh`` or ``HyperSurface`` :param int header_bytes: number of bytes in which to search for the header [default: 20000] :return str data: the header as per the ``file_format`` ] assert[compare[name[header_bytes] greater[>] constant[0]]] assert[compare[name[file_format] in list[[<ast.Constant object at 0x7da1b0a4d900>, <ast.Constant object at 0x7da1b0a4d960>]]]] with call[name[open], parameter[name[fn], constant[rb]]] begin[:] variable[rough_header] assign[=] call[name[f].read, parameter[name[header_bytes]]] if compare[name[file_format] equal[==] constant[AmiraMesh]] begin[:] if name[verbose] begin[:] tuple[[<ast.BinOp object at 0x7da1b0a4d3c0>, <ast.Constant object at 0x7da1b0a4d8a0>]] variable[m] assign[=] call[name[re].search, parameter[constant[(?P<data>.*)\n@1], name[rough_header]]] variable[data] assign[=] call[name[m].group, parameter[constant[data]]] return[name[data]]
keyword[def] identifier[get_header] ( identifier[fn] , identifier[file_format] , identifier[header_bytes] = literal[int] , identifier[verbose] = keyword[False] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[assert] identifier[header_bytes] > literal[int] keyword[assert] identifier[file_format] keyword[in] [ literal[string] , literal[string] ] keyword[with] identifier[open] ( identifier[fn] , literal[string] ) keyword[as] identifier[f] : identifier[rough_header] = identifier[f] . identifier[read] ( identifier[header_bytes] ) keyword[if] identifier[file_format] == literal[string] : keyword[if] identifier[verbose] : identifier[print] >> identifier[sys] . identifier[stderr] , literal[string] identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[rough_header] , identifier[flags] = identifier[re] . identifier[S] ) keyword[elif] identifier[file_format] == literal[string] : keyword[if] identifier[verbose] : identifier[print] >> identifier[sys] . identifier[stderr] , literal[string] identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[rough_header] , identifier[flags] = identifier[re] . identifier[S] ) keyword[elif] identifier[file_format] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[data] = identifier[m] . identifier[group] ( literal[string] ) keyword[return] identifier[data]
def get_header(fn, file_format, header_bytes=20000, verbose=False, *args, **kwargs): """Apply rules for detecting the boundary of the header :param str fn: file name :param str file_format: either ``AmiraMesh`` or ``HyperSurface`` :param int header_bytes: number of bytes in which to search for the header [default: 20000] :return str data: the header as per the ``file_format`` """ assert header_bytes > 0 assert file_format in ['AmiraMesh', 'HyperSurface'] with open(fn, 'rb') as f: rough_header = f.read(header_bytes) if file_format == 'AmiraMesh': if verbose: (print >> sys.stderr, 'Using pattern: (?P<data>.*)\\n@1') # depends on [control=['if'], data=[]] m = re.search('(?P<data>.*)\\n@1', rough_header, flags=re.S) # depends on [control=['if'], data=[]] elif file_format == 'HyperSurface': if verbose: (print >> sys.stderr, 'Using pattern: (?P<data>.*)\\nVertices [0-9]*\\n') # depends on [control=['if'], data=[]] m = re.search('(?P<data>.*)\\nVertices [0-9]*\\n', rough_header, flags=re.S) # depends on [control=['if'], data=[]] elif file_format == 'Undefined': raise ValueError('Unable to parse undefined file') # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] # select the data data = m.group('data') # print data # print return data
def info(self, id): """Return the ``Package`` or ``Collection`` record for the given item.""" #self._update_index() # This is commented because it leads to # excessive network load if id in self._packages: return self._packages[id] if id in self._collections: return self._collections[id] self._update_index() # If package is not found, most probably we did not # warm up the cache if id in self._packages: return self._packages[id] if id in self._collections: return self._collections[id] raise ValueError('Package %r not found in index' % id)
def function[info, parameter[self, id]]: constant[Return the ``Package`` or ``Collection`` record for the given item.] if compare[name[id] in name[self]._packages] begin[:] return[call[name[self]._packages][name[id]]] if compare[name[id] in name[self]._collections] begin[:] return[call[name[self]._collections][name[id]]] call[name[self]._update_index, parameter[]] if compare[name[id] in name[self]._packages] begin[:] return[call[name[self]._packages][name[id]]] if compare[name[id] in name[self]._collections] begin[:] return[call[name[self]._collections][name[id]]] <ast.Raise object at 0x7da20c6a88e0>
keyword[def] identifier[info] ( identifier[self] , identifier[id] ): literal[string] keyword[if] identifier[id] keyword[in] identifier[self] . identifier[_packages] : keyword[return] identifier[self] . identifier[_packages] [ identifier[id] ] keyword[if] identifier[id] keyword[in] identifier[self] . identifier[_collections] : keyword[return] identifier[self] . identifier[_collections] [ identifier[id] ] identifier[self] . identifier[_update_index] () keyword[if] identifier[id] keyword[in] identifier[self] . identifier[_packages] : keyword[return] identifier[self] . identifier[_packages] [ identifier[id] ] keyword[if] identifier[id] keyword[in] identifier[self] . identifier[_collections] : keyword[return] identifier[self] . identifier[_collections] [ identifier[id] ] keyword[raise] identifier[ValueError] ( literal[string] % identifier[id] )
def info(self, id): """Return the ``Package`` or ``Collection`` record for the given item.""" #self._update_index() # This is commented because it leads to # excessive network load if id in self._packages: return self._packages[id] # depends on [control=['if'], data=['id']] if id in self._collections: return self._collections[id] # depends on [control=['if'], data=['id']] self._update_index() # If package is not found, most probably we did not # warm up the cache if id in self._packages: return self._packages[id] # depends on [control=['if'], data=['id']] if id in self._collections: return self._collections[id] # depends on [control=['if'], data=['id']] raise ValueError('Package %r not found in index' % id)
def x_pos(self, xval): "Return the x position (on the canvas) corresponding to XVAL." off = self.x_coord.get_canvas_pos(self.size[0], xval, self.x_range[0], self.x_range[1]) return self.loc[0] + off
def function[x_pos, parameter[self, xval]]: constant[Return the x position (on the canvas) corresponding to XVAL.] variable[off] assign[=] call[name[self].x_coord.get_canvas_pos, parameter[call[name[self].size][constant[0]], name[xval], call[name[self].x_range][constant[0]], call[name[self].x_range][constant[1]]]] return[binary_operation[call[name[self].loc][constant[0]] + name[off]]]
keyword[def] identifier[x_pos] ( identifier[self] , identifier[xval] ): literal[string] identifier[off] = identifier[self] . identifier[x_coord] . identifier[get_canvas_pos] ( identifier[self] . identifier[size] [ literal[int] ], identifier[xval] , identifier[self] . identifier[x_range] [ literal[int] ], identifier[self] . identifier[x_range] [ literal[int] ]) keyword[return] identifier[self] . identifier[loc] [ literal[int] ]+ identifier[off]
def x_pos(self, xval): """Return the x position (on the canvas) corresponding to XVAL.""" off = self.x_coord.get_canvas_pos(self.size[0], xval, self.x_range[0], self.x_range[1]) return self.loc[0] + off
async def create_websocket_client(sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[List] = None, subprotocols: Optional[List[str]] = None): """ A more low-level form of create_websocket_client. You are responsible for closing this websocket. """ ws = Websocket() await ws.start_client( sock, addr=addr, path=path, headers=headers, subprotocols=subprotocols) return ws
<ast.AsyncFunctionDef object at 0x7da2046204f0>
keyword[async] keyword[def] identifier[create_websocket_client] ( identifier[sock] : identifier[anyio] . identifier[abc] . identifier[SocketStream] , identifier[addr] , identifier[path] : identifier[str] , identifier[headers] : identifier[Optional] [ identifier[List] ]= keyword[None] , identifier[subprotocols] : identifier[Optional] [ identifier[List] [ identifier[str] ]]= keyword[None] ): literal[string] identifier[ws] = identifier[Websocket] () keyword[await] identifier[ws] . identifier[start_client] ( identifier[sock] , identifier[addr] = identifier[addr] , identifier[path] = identifier[path] , identifier[headers] = identifier[headers] , identifier[subprotocols] = identifier[subprotocols] ) keyword[return] identifier[ws]
async def create_websocket_client(sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[List]=None, subprotocols: Optional[List[str]]=None): """ A more low-level form of create_websocket_client. You are responsible for closing this websocket. """ ws = Websocket() await ws.start_client(sock, addr=addr, path=path, headers=headers, subprotocols=subprotocols) return ws
def video_search(self, entitiy_type, query, **kwargs): """ Search the TV schedule database Where ``entitiy_type`` is a comma separated list of: ``movie`` Movie ``tvseries`` TV series ``episode`` Episode titles ``onetimeonly`` TV programs ``credit`` People working in TV or movies """ return self.make_request('video', entitiy_type, query, kwargs)
def function[video_search, parameter[self, entitiy_type, query]]: constant[ Search the TV schedule database Where ``entitiy_type`` is a comma separated list of: ``movie`` Movie ``tvseries`` TV series ``episode`` Episode titles ``onetimeonly`` TV programs ``credit`` People working in TV or movies ] return[call[name[self].make_request, parameter[constant[video], name[entitiy_type], name[query], name[kwargs]]]]
keyword[def] identifier[video_search] ( identifier[self] , identifier[entitiy_type] , identifier[query] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[make_request] ( literal[string] , identifier[entitiy_type] , identifier[query] , identifier[kwargs] )
def video_search(self, entitiy_type, query, **kwargs): """ Search the TV schedule database Where ``entitiy_type`` is a comma separated list of: ``movie`` Movie ``tvseries`` TV series ``episode`` Episode titles ``onetimeonly`` TV programs ``credit`` People working in TV or movies """ return self.make_request('video', entitiy_type, query, kwargs)
def replace_all(text, dic): """Takes a string and dictionary. replaces all occurrences of i with j""" for i, j in dic.iteritems(): text = text.replace(i, j) return text
def function[replace_all, parameter[text, dic]]: constant[Takes a string and dictionary. replaces all occurrences of i with j] for taget[tuple[[<ast.Name object at 0x7da1b1f39de0>, <ast.Name object at 0x7da1b1f3bd60>]]] in starred[call[name[dic].iteritems, parameter[]]] begin[:] variable[text] assign[=] call[name[text].replace, parameter[name[i], name[j]]] return[name[text]]
keyword[def] identifier[replace_all] ( identifier[text] , identifier[dic] ): literal[string] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[dic] . identifier[iteritems] (): identifier[text] = identifier[text] . identifier[replace] ( identifier[i] , identifier[j] ) keyword[return] identifier[text]
def replace_all(text, dic): """Takes a string and dictionary. replaces all occurrences of i with j""" for (i, j) in dic.iteritems(): text = text.replace(i, j) # depends on [control=['for'], data=[]] return text
def host(environ): # pragma: no cover """ Reconstruct host from environment. A modified version of http://www.python.org/dev/peps/pep-0333/#url-reconstruction """ url = environ['wsgi.url_scheme'] + '://' if environ.get('HTTP_HOST'): url += environ['HTTP_HOST'] else: url += environ['SERVER_NAME'] if environ['wsgi.url_scheme'] == 'https': if environ['SERVER_PORT'] != '443': url += ':' + environ['SERVER_PORT'] else: if environ['SERVER_PORT'] != '80': url += ':' + environ['SERVER_PORT'] return url + quote(environ.get('SCRIPT_NAME', ''))
def function[host, parameter[environ]]: constant[ Reconstruct host from environment. A modified version of http://www.python.org/dev/peps/pep-0333/#url-reconstruction ] variable[url] assign[=] binary_operation[call[name[environ]][constant[wsgi.url_scheme]] + constant[://]] if call[name[environ].get, parameter[constant[HTTP_HOST]]] begin[:] <ast.AugAssign object at 0x7da20c6c6ce0> return[binary_operation[name[url] + call[name[quote], parameter[call[name[environ].get, parameter[constant[SCRIPT_NAME], constant[]]]]]]]
keyword[def] identifier[host] ( identifier[environ] ): literal[string] identifier[url] = identifier[environ] [ literal[string] ]+ literal[string] keyword[if] identifier[environ] . identifier[get] ( literal[string] ): identifier[url] += identifier[environ] [ literal[string] ] keyword[else] : identifier[url] += identifier[environ] [ literal[string] ] keyword[if] identifier[environ] [ literal[string] ]== literal[string] : keyword[if] identifier[environ] [ literal[string] ]!= literal[string] : identifier[url] += literal[string] + identifier[environ] [ literal[string] ] keyword[else] : keyword[if] identifier[environ] [ literal[string] ]!= literal[string] : identifier[url] += literal[string] + identifier[environ] [ literal[string] ] keyword[return] identifier[url] + identifier[quote] ( identifier[environ] . identifier[get] ( literal[string] , literal[string] ))
def host(environ): # pragma: no cover '\n Reconstruct host from environment. A modified version\n of http://www.python.org/dev/peps/pep-0333/#url-reconstruction\n ' url = environ['wsgi.url_scheme'] + '://' if environ.get('HTTP_HOST'): url += environ['HTTP_HOST'] # depends on [control=['if'], data=[]] else: url += environ['SERVER_NAME'] if environ['wsgi.url_scheme'] == 'https': if environ['SERVER_PORT'] != '443': url += ':' + environ['SERVER_PORT'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif environ['SERVER_PORT'] != '80': url += ':' + environ['SERVER_PORT'] # depends on [control=['if'], data=[]] return url + quote(environ.get('SCRIPT_NAME', ''))
def update(self): """Actual update process goes here using auxialary ``get_currencies`` and ``get_exchangerates`` methods. This method creates or updates corresponding ``Currency`` and ``ExchangeRate`` models """ currencies = self.get_currencies() currency_objects = {} for code, name in currencies: currency_objects[code], created = Currency.objects.get_or_create( code=code, defaults={'name': name}) if created: logger.info('currency: %s created', code) existing = ExchangeRate.objects.values('source__code', 'target__code', 'id') existing = {(d['source__code'], d['target__code']): d['id'] for d in existing} usd_exchange_rates = dict(self.get_exchangerates('USD')) updates = [] inserts = [] for source in currencies: for target in currencies: rate = self._get_rate_through_usd(source.code, target.code, usd_exchange_rates) exchange_rate = ExchangeRate(source=currency_objects[source.code], target=currency_objects[target.code], rate=rate) if (source.code, target.code) in existing: exchange_rate.id = existing[(source.code, target.code)] updates.append(exchange_rate) logger.debug('exchange rate updated %s/%s=%s' % (source, target, rate)) else: inserts.append(exchange_rate) logger.debug('exchange rate created %s/%s=%s' % (source, target, rate)) logger.info('exchange rates updated for %s' % source.code) logger.info("Updating %s rows" % len(updates)) update_many(updates) logger.info("Inserting %s rows" % len(inserts)) insert_many(inserts) logger.info('saved rates to db')
def function[update, parameter[self]]: constant[Actual update process goes here using auxialary ``get_currencies`` and ``get_exchangerates`` methods. This method creates or updates corresponding ``Currency`` and ``ExchangeRate`` models ] variable[currencies] assign[=] call[name[self].get_currencies, parameter[]] variable[currency_objects] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20e954430>, <ast.Name object at 0x7da20e956b60>]]] in starred[name[currencies]] begin[:] <ast.Tuple object at 0x7da20e954520> assign[=] call[name[Currency].objects.get_or_create, parameter[]] if name[created] begin[:] call[name[logger].info, parameter[constant[currency: %s created], name[code]]] variable[existing] assign[=] call[name[ExchangeRate].objects.values, parameter[constant[source__code], constant[target__code], constant[id]]] variable[existing] assign[=] <ast.DictComp object at 0x7da20e9575b0> variable[usd_exchange_rates] assign[=] call[name[dict], parameter[call[name[self].get_exchangerates, parameter[constant[USD]]]]] variable[updates] assign[=] list[[]] variable[inserts] assign[=] list[[]] for taget[name[source]] in starred[name[currencies]] begin[:] for taget[name[target]] in starred[name[currencies]] begin[:] variable[rate] assign[=] call[name[self]._get_rate_through_usd, parameter[name[source].code, name[target].code, name[usd_exchange_rates]]] variable[exchange_rate] assign[=] call[name[ExchangeRate], parameter[]] if compare[tuple[[<ast.Attribute object at 0x7da1b1be9390>, <ast.Attribute object at 0x7da1b26af010>]] in name[existing]] begin[:] name[exchange_rate].id assign[=] call[name[existing]][tuple[[<ast.Attribute object at 0x7da1b26acaf0>, <ast.Attribute object at 0x7da1b26ac820>]]] call[name[updates].append, parameter[name[exchange_rate]]] call[name[logger].debug, parameter[binary_operation[constant[exchange rate updated %s/%s=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e9564d0>, <ast.Name object at 0x7da20e957490>, <ast.Name object at 0x7da20e957430>]]]]] call[name[logger].info, parameter[binary_operation[constant[exchange rates updated for %s] <ast.Mod object at 0x7da2590d6920> name[source].code]]] call[name[logger].info, parameter[binary_operation[constant[Updating %s rows] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[updates]]]]]] call[name[update_many], parameter[name[updates]]] call[name[logger].info, parameter[binary_operation[constant[Inserting %s rows] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[inserts]]]]]] call[name[insert_many], parameter[name[inserts]]] call[name[logger].info, parameter[constant[saved rates to db]]]
keyword[def] identifier[update] ( identifier[self] ): literal[string] identifier[currencies] = identifier[self] . identifier[get_currencies] () identifier[currency_objects] ={} keyword[for] identifier[code] , identifier[name] keyword[in] identifier[currencies] : identifier[currency_objects] [ identifier[code] ], identifier[created] = identifier[Currency] . identifier[objects] . identifier[get_or_create] ( identifier[code] = identifier[code] , identifier[defaults] ={ literal[string] : identifier[name] }) keyword[if] identifier[created] : identifier[logger] . identifier[info] ( literal[string] , identifier[code] ) identifier[existing] = identifier[ExchangeRate] . identifier[objects] . identifier[values] ( literal[string] , literal[string] , literal[string] ) identifier[existing] ={( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ]): identifier[d] [ literal[string] ] keyword[for] identifier[d] keyword[in] identifier[existing] } identifier[usd_exchange_rates] = identifier[dict] ( identifier[self] . identifier[get_exchangerates] ( literal[string] )) identifier[updates] =[] identifier[inserts] =[] keyword[for] identifier[source] keyword[in] identifier[currencies] : keyword[for] identifier[target] keyword[in] identifier[currencies] : identifier[rate] = identifier[self] . identifier[_get_rate_through_usd] ( identifier[source] . identifier[code] , identifier[target] . identifier[code] , identifier[usd_exchange_rates] ) identifier[exchange_rate] = identifier[ExchangeRate] ( identifier[source] = identifier[currency_objects] [ identifier[source] . identifier[code] ], identifier[target] = identifier[currency_objects] [ identifier[target] . identifier[code] ], identifier[rate] = identifier[rate] ) keyword[if] ( identifier[source] . identifier[code] , identifier[target] . identifier[code] ) keyword[in] identifier[existing] : identifier[exchange_rate] . identifier[id] = identifier[existing] [( identifier[source] . identifier[code] , identifier[target] . identifier[code] )] identifier[updates] . identifier[append] ( identifier[exchange_rate] ) identifier[logger] . identifier[debug] ( literal[string] %( identifier[source] , identifier[target] , identifier[rate] )) keyword[else] : identifier[inserts] . identifier[append] ( identifier[exchange_rate] ) identifier[logger] . identifier[debug] ( literal[string] %( identifier[source] , identifier[target] , identifier[rate] )) identifier[logger] . identifier[info] ( literal[string] % identifier[source] . identifier[code] ) identifier[logger] . identifier[info] ( literal[string] % identifier[len] ( identifier[updates] )) identifier[update_many] ( identifier[updates] ) identifier[logger] . identifier[info] ( literal[string] % identifier[len] ( identifier[inserts] )) identifier[insert_many] ( identifier[inserts] ) identifier[logger] . identifier[info] ( literal[string] )
def update(self): """Actual update process goes here using auxialary ``get_currencies`` and ``get_exchangerates`` methods. This method creates or updates corresponding ``Currency`` and ``ExchangeRate`` models """ currencies = self.get_currencies() currency_objects = {} for (code, name) in currencies: (currency_objects[code], created) = Currency.objects.get_or_create(code=code, defaults={'name': name}) if created: logger.info('currency: %s created', code) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] existing = ExchangeRate.objects.values('source__code', 'target__code', 'id') existing = {(d['source__code'], d['target__code']): d['id'] for d in existing} usd_exchange_rates = dict(self.get_exchangerates('USD')) updates = [] inserts = [] for source in currencies: for target in currencies: rate = self._get_rate_through_usd(source.code, target.code, usd_exchange_rates) exchange_rate = ExchangeRate(source=currency_objects[source.code], target=currency_objects[target.code], rate=rate) if (source.code, target.code) in existing: exchange_rate.id = existing[source.code, target.code] updates.append(exchange_rate) logger.debug('exchange rate updated %s/%s=%s' % (source, target, rate)) # depends on [control=['if'], data=['existing']] else: inserts.append(exchange_rate) logger.debug('exchange rate created %s/%s=%s' % (source, target, rate)) # depends on [control=['for'], data=['target']] logger.info('exchange rates updated for %s' % source.code) # depends on [control=['for'], data=['source']] logger.info('Updating %s rows' % len(updates)) update_many(updates) logger.info('Inserting %s rows' % len(inserts)) insert_many(inserts) logger.info('saved rates to db')
def export_vms( self, vms_names=None, standalone=False, export_dir='.', compress=False, init_file_name='LagoInitFile', out_format=YAMLOutFormatPlugin(), collect_only=False, with_threads=True, ): """ Export vm images disks and init file. The exported images and init file can be used to recreate the environment. Args: vms_names(list of str): Names of the vms to export, if None export all the vms in the env (default=None) standalone(bool): If false, export a layered image (default=False) export_dir(str): Dir to place the exported images and init file compress(bool): If True compress the images with xz (default=False) init_file_name(str): The name of the exported init file (default='LagoInitfile') out_format(:class:`lago.plugins.output.OutFormatPlugin`): The type of the exported init file (the default is yaml) collect_only(bool): If True, return only a mapping from vm name to the disks that will be exported. (default=False) with_threads(bool): If True, run the export in parallel (default=True) Returns Unless collect_only == True, a mapping between vms' disks. """ return self.virt_env.export_vms( vms_names, standalone, export_dir, compress, init_file_name, out_format, collect_only, with_threads )
def function[export_vms, parameter[self, vms_names, standalone, export_dir, compress, init_file_name, out_format, collect_only, with_threads]]: constant[ Export vm images disks and init file. The exported images and init file can be used to recreate the environment. Args: vms_names(list of str): Names of the vms to export, if None export all the vms in the env (default=None) standalone(bool): If false, export a layered image (default=False) export_dir(str): Dir to place the exported images and init file compress(bool): If True compress the images with xz (default=False) init_file_name(str): The name of the exported init file (default='LagoInitfile') out_format(:class:`lago.plugins.output.OutFormatPlugin`): The type of the exported init file (the default is yaml) collect_only(bool): If True, return only a mapping from vm name to the disks that will be exported. (default=False) with_threads(bool): If True, run the export in parallel (default=True) Returns Unless collect_only == True, a mapping between vms' disks. ] return[call[name[self].virt_env.export_vms, parameter[name[vms_names], name[standalone], name[export_dir], name[compress], name[init_file_name], name[out_format], name[collect_only], name[with_threads]]]]
keyword[def] identifier[export_vms] ( identifier[self] , identifier[vms_names] = keyword[None] , identifier[standalone] = keyword[False] , identifier[export_dir] = literal[string] , identifier[compress] = keyword[False] , identifier[init_file_name] = literal[string] , identifier[out_format] = identifier[YAMLOutFormatPlugin] (), identifier[collect_only] = keyword[False] , identifier[with_threads] = keyword[True] , ): literal[string] keyword[return] identifier[self] . identifier[virt_env] . identifier[export_vms] ( identifier[vms_names] , identifier[standalone] , identifier[export_dir] , identifier[compress] , identifier[init_file_name] , identifier[out_format] , identifier[collect_only] , identifier[with_threads] )
def export_vms(self, vms_names=None, standalone=False, export_dir='.', compress=False, init_file_name='LagoInitFile', out_format=YAMLOutFormatPlugin(), collect_only=False, with_threads=True): """ Export vm images disks and init file. The exported images and init file can be used to recreate the environment. Args: vms_names(list of str): Names of the vms to export, if None export all the vms in the env (default=None) standalone(bool): If false, export a layered image (default=False) export_dir(str): Dir to place the exported images and init file compress(bool): If True compress the images with xz (default=False) init_file_name(str): The name of the exported init file (default='LagoInitfile') out_format(:class:`lago.plugins.output.OutFormatPlugin`): The type of the exported init file (the default is yaml) collect_only(bool): If True, return only a mapping from vm name to the disks that will be exported. (default=False) with_threads(bool): If True, run the export in parallel (default=True) Returns Unless collect_only == True, a mapping between vms' disks. """ return self.virt_env.export_vms(vms_names, standalone, export_dir, compress, init_file_name, out_format, collect_only, with_threads)
def to_binary(decimal, length): """ Given a decimal, generate the binary equivalent string of given length. e.g. binary(2, 5) = 00010 """ b = bitstring.Bits(int=decimal, length=length) return b.bin
def function[to_binary, parameter[decimal, length]]: constant[ Given a decimal, generate the binary equivalent string of given length. e.g. binary(2, 5) = 00010 ] variable[b] assign[=] call[name[bitstring].Bits, parameter[]] return[name[b].bin]
keyword[def] identifier[to_binary] ( identifier[decimal] , identifier[length] ): literal[string] identifier[b] = identifier[bitstring] . identifier[Bits] ( identifier[int] = identifier[decimal] , identifier[length] = identifier[length] ) keyword[return] identifier[b] . identifier[bin]
def to_binary(decimal, length): """ Given a decimal, generate the binary equivalent string of given length. e.g. binary(2, 5) = 00010 """ b = bitstring.Bits(int=decimal, length=length) return b.bin
def format(self, show_enabled=True): '''format display item''' what = '' if show_enabled: if self.enabled: what += ' y ' else: what += ' n ' pass pass if self.fmt: what += self.fmt + ' ' pass what += self.arg return '%3d: %s' % (self.number, what)
def function[format, parameter[self, show_enabled]]: constant[format display item] variable[what] assign[=] constant[] if name[show_enabled] begin[:] if name[self].enabled begin[:] <ast.AugAssign object at 0x7da18c4cd390> pass if name[self].fmt begin[:] <ast.AugAssign object at 0x7da18c4cc220> pass <ast.AugAssign object at 0x7da1b05bd5a0> return[binary_operation[constant[%3d: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b05bfe20>, <ast.Name object at 0x7da1b05bf070>]]]]
keyword[def] identifier[format] ( identifier[self] , identifier[show_enabled] = keyword[True] ): literal[string] identifier[what] = literal[string] keyword[if] identifier[show_enabled] : keyword[if] identifier[self] . identifier[enabled] : identifier[what] += literal[string] keyword[else] : identifier[what] += literal[string] keyword[pass] keyword[pass] keyword[if] identifier[self] . identifier[fmt] : identifier[what] += identifier[self] . identifier[fmt] + literal[string] keyword[pass] identifier[what] += identifier[self] . identifier[arg] keyword[return] literal[string] %( identifier[self] . identifier[number] , identifier[what] )
def format(self, show_enabled=True): """format display item""" what = '' if show_enabled: if self.enabled: what += ' y ' # depends on [control=['if'], data=[]] else: what += ' n ' pass pass # depends on [control=['if'], data=[]] if self.fmt: what += self.fmt + ' ' pass # depends on [control=['if'], data=[]] what += self.arg return '%3d: %s' % (self.number, what)
async def login(self, token, *, bot=True): """|coro| Logs in the client with the specified credentials. This function can be used in two different ways. .. warning:: Logging on with a user token is against the Discord `Terms of Service <https://support.discordapp.com/hc/en-us/articles/115002192352>`_ and doing so might potentially get your account banned. Use this at your own risk. Parameters ----------- token: :class:`str` The authentication token. Do not prefix this token with anything as the library will do it for you. bot: :class:`bool` Keyword argument that specifies if the account logging on is a bot token or not. Raises ------ LoginFailure The wrong credentials are passed. HTTPException An unknown HTTP related error occurred, usually when it isn't 200 or the known incorrect credentials passing status code. """ log.info('logging in using static token') await self.http.static_login(token, bot=bot) self._connection.is_bot = bot
<ast.AsyncFunctionDef object at 0x7da1b1f8b580>
keyword[async] keyword[def] identifier[login] ( identifier[self] , identifier[token] ,*, identifier[bot] = keyword[True] ): literal[string] identifier[log] . identifier[info] ( literal[string] ) keyword[await] identifier[self] . identifier[http] . identifier[static_login] ( identifier[token] , identifier[bot] = identifier[bot] ) identifier[self] . identifier[_connection] . identifier[is_bot] = identifier[bot]
async def login(self, token, *, bot=True): """|coro| Logs in the client with the specified credentials. This function can be used in two different ways. .. warning:: Logging on with a user token is against the Discord `Terms of Service <https://support.discordapp.com/hc/en-us/articles/115002192352>`_ and doing so might potentially get your account banned. Use this at your own risk. Parameters ----------- token: :class:`str` The authentication token. Do not prefix this token with anything as the library will do it for you. bot: :class:`bool` Keyword argument that specifies if the account logging on is a bot token or not. Raises ------ LoginFailure The wrong credentials are passed. HTTPException An unknown HTTP related error occurred, usually when it isn't 200 or the known incorrect credentials passing status code. """ log.info('logging in using static token') await self.http.static_login(token, bot=bot) self._connection.is_bot = bot
def check_ipv4(ip_str): """ Return True if is a valid IP v4 """ try: socket.inet_pton(socket.AF_INET, ip_str) except AttributeError: try: socket.inet_aton(ip_str) except socket.error: return False return ip_str.count('.') == 3 except socket.error: return False return True
def function[check_ipv4, parameter[ip_str]]: constant[ Return True if is a valid IP v4 ] <ast.Try object at 0x7da1b15f4340> return[constant[True]]
keyword[def] identifier[check_ipv4] ( identifier[ip_str] ): literal[string] keyword[try] : identifier[socket] . identifier[inet_pton] ( identifier[socket] . identifier[AF_INET] , identifier[ip_str] ) keyword[except] identifier[AttributeError] : keyword[try] : identifier[socket] . identifier[inet_aton] ( identifier[ip_str] ) keyword[except] identifier[socket] . identifier[error] : keyword[return] keyword[False] keyword[return] identifier[ip_str] . identifier[count] ( literal[string] )== literal[int] keyword[except] identifier[socket] . identifier[error] : keyword[return] keyword[False] keyword[return] keyword[True]
def check_ipv4(ip_str): """ Return True if is a valid IP v4 """ try: socket.inet_pton(socket.AF_INET, ip_str) # depends on [control=['try'], data=[]] except AttributeError: try: socket.inet_aton(ip_str) # depends on [control=['try'], data=[]] except socket.error: return False # depends on [control=['except'], data=[]] return ip_str.count('.') == 3 # depends on [control=['except'], data=[]] except socket.error: return False # depends on [control=['except'], data=[]] return True
def arp_access_list_permit_permit_list_host_mac(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") arp = ET.SubElement(config, "arp", xmlns="urn:brocade.com:mgmt:brocade-dai") access_list = ET.SubElement(arp, "access-list") acl_name_key = ET.SubElement(access_list, "acl-name") acl_name_key.text = kwargs.pop('acl_name') permit = ET.SubElement(access_list, "permit") permit_list = ET.SubElement(permit, "permit-list") ip_type_key = ET.SubElement(permit_list, "ip-type") ip_type_key.text = kwargs.pop('ip_type') host_ip_key = ET.SubElement(permit_list, "host-ip") host_ip_key.text = kwargs.pop('host_ip') mac_type_key = ET.SubElement(permit_list, "mac-type") mac_type_key.text = kwargs.pop('mac_type') host_mac = ET.SubElement(permit_list, "host-mac") host_mac.text = kwargs.pop('host_mac') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[arp_access_list_permit_permit_list_host_mac, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[arp] assign[=] call[name[ET].SubElement, parameter[name[config], constant[arp]]] variable[access_list] assign[=] call[name[ET].SubElement, parameter[name[arp], constant[access-list]]] variable[acl_name_key] assign[=] call[name[ET].SubElement, parameter[name[access_list], constant[acl-name]]] name[acl_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[acl_name]]] variable[permit] assign[=] call[name[ET].SubElement, parameter[name[access_list], constant[permit]]] variable[permit_list] assign[=] call[name[ET].SubElement, parameter[name[permit], constant[permit-list]]] variable[ip_type_key] assign[=] call[name[ET].SubElement, parameter[name[permit_list], constant[ip-type]]] name[ip_type_key].text assign[=] call[name[kwargs].pop, parameter[constant[ip_type]]] variable[host_ip_key] assign[=] call[name[ET].SubElement, parameter[name[permit_list], constant[host-ip]]] name[host_ip_key].text assign[=] call[name[kwargs].pop, parameter[constant[host_ip]]] variable[mac_type_key] assign[=] call[name[ET].SubElement, parameter[name[permit_list], constant[mac-type]]] name[mac_type_key].text assign[=] call[name[kwargs].pop, parameter[constant[mac_type]]] variable[host_mac] assign[=] call[name[ET].SubElement, parameter[name[permit_list], constant[host-mac]]] name[host_mac].text assign[=] call[name[kwargs].pop, parameter[constant[host_mac]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[arp_access_list_permit_permit_list_host_mac] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[arp] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[access_list] = identifier[ET] . identifier[SubElement] ( identifier[arp] , literal[string] ) identifier[acl_name_key] = identifier[ET] . identifier[SubElement] ( identifier[access_list] , literal[string] ) identifier[acl_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[permit] = identifier[ET] . identifier[SubElement] ( identifier[access_list] , literal[string] ) identifier[permit_list] = identifier[ET] . identifier[SubElement] ( identifier[permit] , literal[string] ) identifier[ip_type_key] = identifier[ET] . identifier[SubElement] ( identifier[permit_list] , literal[string] ) identifier[ip_type_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[host_ip_key] = identifier[ET] . identifier[SubElement] ( identifier[permit_list] , literal[string] ) identifier[host_ip_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[mac_type_key] = identifier[ET] . identifier[SubElement] ( identifier[permit_list] , literal[string] ) identifier[mac_type_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[host_mac] = identifier[ET] . identifier[SubElement] ( identifier[permit_list] , literal[string] ) identifier[host_mac] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def arp_access_list_permit_permit_list_host_mac(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') arp = ET.SubElement(config, 'arp', xmlns='urn:brocade.com:mgmt:brocade-dai') access_list = ET.SubElement(arp, 'access-list') acl_name_key = ET.SubElement(access_list, 'acl-name') acl_name_key.text = kwargs.pop('acl_name') permit = ET.SubElement(access_list, 'permit') permit_list = ET.SubElement(permit, 'permit-list') ip_type_key = ET.SubElement(permit_list, 'ip-type') ip_type_key.text = kwargs.pop('ip_type') host_ip_key = ET.SubElement(permit_list, 'host-ip') host_ip_key.text = kwargs.pop('host_ip') mac_type_key = ET.SubElement(permit_list, 'mac-type') mac_type_key.text = kwargs.pop('mac_type') host_mac = ET.SubElement(permit_list, 'host-mac') host_mac.text = kwargs.pop('host_mac') callback = kwargs.pop('callback', self._callback) return callback(config)
def label_store(self): '''Return a thread local :class:`dossier.label.LabelStore` client.''' if self._label_store is None: config = global_config('dossier.label') if 'kvlayer' in config: kvl = kvlayer.client(config=config['kvlayer']) self._label_store = LabelStore(kvl) else: self._label_store = self.create(LabelStore, config=config) return self._label_store
def function[label_store, parameter[self]]: constant[Return a thread local :class:`dossier.label.LabelStore` client.] if compare[name[self]._label_store is constant[None]] begin[:] variable[config] assign[=] call[name[global_config], parameter[constant[dossier.label]]] if compare[constant[kvlayer] in name[config]] begin[:] variable[kvl] assign[=] call[name[kvlayer].client, parameter[]] name[self]._label_store assign[=] call[name[LabelStore], parameter[name[kvl]]] return[name[self]._label_store]
keyword[def] identifier[label_store] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_label_store] keyword[is] keyword[None] : identifier[config] = identifier[global_config] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[config] : identifier[kvl] = identifier[kvlayer] . identifier[client] ( identifier[config] = identifier[config] [ literal[string] ]) identifier[self] . identifier[_label_store] = identifier[LabelStore] ( identifier[kvl] ) keyword[else] : identifier[self] . identifier[_label_store] = identifier[self] . identifier[create] ( identifier[LabelStore] , identifier[config] = identifier[config] ) keyword[return] identifier[self] . identifier[_label_store]
def label_store(self): """Return a thread local :class:`dossier.label.LabelStore` client.""" if self._label_store is None: config = global_config('dossier.label') if 'kvlayer' in config: kvl = kvlayer.client(config=config['kvlayer']) self._label_store = LabelStore(kvl) # depends on [control=['if'], data=['config']] else: self._label_store = self.create(LabelStore, config=config) # depends on [control=['if'], data=[]] return self._label_store
def purge(self, obj, email_addresses=None): """ Removes a CDN-enabled object from public access before the TTL expires. Please note that there is a limit (at this time) of 25 such requests; if you need to purge more than that, you must contact support. If one or more email_addresses are included, an email confirming the purge is sent to each address. """ cname = utils.get_name(self.container) oname = utils.get_name(obj) headers = {} if email_addresses: email_addresses = utils.coerce_to_list(email_addresses) headers["X-Purge-Email"] = ", ".join(email_addresses) uri = "/%s/%s" % (cname, oname) resp, resp_body = self.api.cdn_request(uri, method="DELETE", headers=headers)
def function[purge, parameter[self, obj, email_addresses]]: constant[ Removes a CDN-enabled object from public access before the TTL expires. Please note that there is a limit (at this time) of 25 such requests; if you need to purge more than that, you must contact support. If one or more email_addresses are included, an email confirming the purge is sent to each address. ] variable[cname] assign[=] call[name[utils].get_name, parameter[name[self].container]] variable[oname] assign[=] call[name[utils].get_name, parameter[name[obj]]] variable[headers] assign[=] dictionary[[], []] if name[email_addresses] begin[:] variable[email_addresses] assign[=] call[name[utils].coerce_to_list, parameter[name[email_addresses]]] call[name[headers]][constant[X-Purge-Email]] assign[=] call[constant[, ].join, parameter[name[email_addresses]]] variable[uri] assign[=] binary_operation[constant[/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0558eb0>, <ast.Name object at 0x7da1b055a7d0>]]] <ast.Tuple object at 0x7da1b055b940> assign[=] call[name[self].api.cdn_request, parameter[name[uri]]]
keyword[def] identifier[purge] ( identifier[self] , identifier[obj] , identifier[email_addresses] = keyword[None] ): literal[string] identifier[cname] = identifier[utils] . identifier[get_name] ( identifier[self] . identifier[container] ) identifier[oname] = identifier[utils] . identifier[get_name] ( identifier[obj] ) identifier[headers] ={} keyword[if] identifier[email_addresses] : identifier[email_addresses] = identifier[utils] . identifier[coerce_to_list] ( identifier[email_addresses] ) identifier[headers] [ literal[string] ]= literal[string] . identifier[join] ( identifier[email_addresses] ) identifier[uri] = literal[string] %( identifier[cname] , identifier[oname] ) identifier[resp] , identifier[resp_body] = identifier[self] . identifier[api] . identifier[cdn_request] ( identifier[uri] , identifier[method] = literal[string] , identifier[headers] = identifier[headers] )
def purge(self, obj, email_addresses=None): """ Removes a CDN-enabled object from public access before the TTL expires. Please note that there is a limit (at this time) of 25 such requests; if you need to purge more than that, you must contact support. If one or more email_addresses are included, an email confirming the purge is sent to each address. """ cname = utils.get_name(self.container) oname = utils.get_name(obj) headers = {} if email_addresses: email_addresses = utils.coerce_to_list(email_addresses) headers['X-Purge-Email'] = ', '.join(email_addresses) # depends on [control=['if'], data=[]] uri = '/%s/%s' % (cname, oname) (resp, resp_body) = self.api.cdn_request(uri, method='DELETE', headers=headers)
def listThirdPartyLibs(self, configuration = 'Development'): """ Lists the supported Unreal-bundled third-party libraries """ interrogator = self._getUE4BuildInterrogator() return interrogator.list(self.getPlatformIdentifier(), configuration, self._getLibraryOverrides())
def function[listThirdPartyLibs, parameter[self, configuration]]: constant[ Lists the supported Unreal-bundled third-party libraries ] variable[interrogator] assign[=] call[name[self]._getUE4BuildInterrogator, parameter[]] return[call[name[interrogator].list, parameter[call[name[self].getPlatformIdentifier, parameter[]], name[configuration], call[name[self]._getLibraryOverrides, parameter[]]]]]
keyword[def] identifier[listThirdPartyLibs] ( identifier[self] , identifier[configuration] = literal[string] ): literal[string] identifier[interrogator] = identifier[self] . identifier[_getUE4BuildInterrogator] () keyword[return] identifier[interrogator] . identifier[list] ( identifier[self] . identifier[getPlatformIdentifier] (), identifier[configuration] , identifier[self] . identifier[_getLibraryOverrides] ())
def listThirdPartyLibs(self, configuration='Development'): """ Lists the supported Unreal-bundled third-party libraries """ interrogator = self._getUE4BuildInterrogator() return interrogator.list(self.getPlatformIdentifier(), configuration, self._getLibraryOverrides())
def WriteFlowLogEntries(self, entries): """Writes flow output plugin log entries for a given flow.""" flow_ids = [(e.client_id, e.flow_id) for e in entries] for f in flow_ids: if f not in self.flows: raise db.AtLeastOneUnknownFlowError(flow_ids) for e in entries: dest = self.flow_log_entries.setdefault((e.client_id, e.flow_id), []) to_write = e.Copy() to_write.timestamp = rdfvalue.RDFDatetime.Now() dest.append(to_write)
def function[WriteFlowLogEntries, parameter[self, entries]]: constant[Writes flow output plugin log entries for a given flow.] variable[flow_ids] assign[=] <ast.ListComp object at 0x7da1b1b47f40> for taget[name[f]] in starred[name[flow_ids]] begin[:] if compare[name[f] <ast.NotIn object at 0x7da2590d7190> name[self].flows] begin[:] <ast.Raise object at 0x7da1b1b440d0> for taget[name[e]] in starred[name[entries]] begin[:] variable[dest] assign[=] call[name[self].flow_log_entries.setdefault, parameter[tuple[[<ast.Attribute object at 0x7da1b1c0df90>, <ast.Attribute object at 0x7da1b1c0df00>]], list[[]]]] variable[to_write] assign[=] call[name[e].Copy, parameter[]] name[to_write].timestamp assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]] call[name[dest].append, parameter[name[to_write]]]
keyword[def] identifier[WriteFlowLogEntries] ( identifier[self] , identifier[entries] ): literal[string] identifier[flow_ids] =[( identifier[e] . identifier[client_id] , identifier[e] . identifier[flow_id] ) keyword[for] identifier[e] keyword[in] identifier[entries] ] keyword[for] identifier[f] keyword[in] identifier[flow_ids] : keyword[if] identifier[f] keyword[not] keyword[in] identifier[self] . identifier[flows] : keyword[raise] identifier[db] . identifier[AtLeastOneUnknownFlowError] ( identifier[flow_ids] ) keyword[for] identifier[e] keyword[in] identifier[entries] : identifier[dest] = identifier[self] . identifier[flow_log_entries] . identifier[setdefault] (( identifier[e] . identifier[client_id] , identifier[e] . identifier[flow_id] ),[]) identifier[to_write] = identifier[e] . identifier[Copy] () identifier[to_write] . identifier[timestamp] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] () identifier[dest] . identifier[append] ( identifier[to_write] )
def WriteFlowLogEntries(self, entries): """Writes flow output plugin log entries for a given flow.""" flow_ids = [(e.client_id, e.flow_id) for e in entries] for f in flow_ids: if f not in self.flows: raise db.AtLeastOneUnknownFlowError(flow_ids) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] for e in entries: dest = self.flow_log_entries.setdefault((e.client_id, e.flow_id), []) to_write = e.Copy() to_write.timestamp = rdfvalue.RDFDatetime.Now() dest.append(to_write) # depends on [control=['for'], data=['e']]
def ensure_newline(self, n): """Make sure there are 'n' line breaks at the end.""" assert n >= 0 text = self._output.getvalue().rstrip('\n') if not text: return self._output = StringIO() self._output.write(text) self._output.write('\n' * n) text = self._output.getvalue() assert text[-n-1] != '\n' assert text[-n:] == '\n' * n
def function[ensure_newline, parameter[self, n]]: constant[Make sure there are 'n' line breaks at the end.] assert[compare[name[n] greater_or_equal[>=] constant[0]]] variable[text] assign[=] call[call[name[self]._output.getvalue, parameter[]].rstrip, parameter[constant[ ]]] if <ast.UnaryOp object at 0x7da18f722710> begin[:] return[None] name[self]._output assign[=] call[name[StringIO], parameter[]] call[name[self]._output.write, parameter[name[text]]] call[name[self]._output.write, parameter[binary_operation[constant[ ] * name[n]]]] variable[text] assign[=] call[name[self]._output.getvalue, parameter[]] assert[compare[call[name[text]][binary_operation[<ast.UnaryOp object at 0x7da18f723070> - constant[1]]] not_equal[!=] constant[ ]]] assert[compare[call[name[text]][<ast.Slice object at 0x7da18f721c30>] equal[==] binary_operation[constant[ ] * name[n]]]]
keyword[def] identifier[ensure_newline] ( identifier[self] , identifier[n] ): literal[string] keyword[assert] identifier[n] >= literal[int] identifier[text] = identifier[self] . identifier[_output] . identifier[getvalue] (). identifier[rstrip] ( literal[string] ) keyword[if] keyword[not] identifier[text] : keyword[return] identifier[self] . identifier[_output] = identifier[StringIO] () identifier[self] . identifier[_output] . identifier[write] ( identifier[text] ) identifier[self] . identifier[_output] . identifier[write] ( literal[string] * identifier[n] ) identifier[text] = identifier[self] . identifier[_output] . identifier[getvalue] () keyword[assert] identifier[text] [- identifier[n] - literal[int] ]!= literal[string] keyword[assert] identifier[text] [- identifier[n] :]== literal[string] * identifier[n]
def ensure_newline(self, n): """Make sure there are 'n' line breaks at the end.""" assert n >= 0 text = self._output.getvalue().rstrip('\n') if not text: return # depends on [control=['if'], data=[]] self._output = StringIO() self._output.write(text) self._output.write('\n' * n) text = self._output.getvalue() assert text[-n - 1] != '\n' assert text[-n:] == '\n' * n
def list_members(self, retrieve_all=True, **_params): """Fetches a list of all load balancer members for a project.""" # Pass filters in "params" argument to do_request return self.list('members', self.members_path, retrieve_all, **_params)
def function[list_members, parameter[self, retrieve_all]]: constant[Fetches a list of all load balancer members for a project.] return[call[name[self].list, parameter[constant[members], name[self].members_path, name[retrieve_all]]]]
keyword[def] identifier[list_members] ( identifier[self] , identifier[retrieve_all] = keyword[True] ,** identifier[_params] ): literal[string] keyword[return] identifier[self] . identifier[list] ( literal[string] , identifier[self] . identifier[members_path] , identifier[retrieve_all] , ** identifier[_params] )
def list_members(self, retrieve_all=True, **_params): """Fetches a list of all load balancer members for a project.""" # Pass filters in "params" argument to do_request return self.list('members', self.members_path, retrieve_all, **_params)
def _TryPrintAsAnyMessage(self, message): """Serializes if message is a google.protobuf.Any field.""" packed_message = _BuildMessageFromTypeName(message.TypeName(), self.descriptor_pool) if packed_message: packed_message.MergeFromString(message.value) self.out.write('%s[%s]' % (self.indent * ' ', message.type_url)) self._PrintMessageFieldValue(packed_message) self.out.write(' ' if self.as_one_line else '\n') return True else: return False
def function[_TryPrintAsAnyMessage, parameter[self, message]]: constant[Serializes if message is a google.protobuf.Any field.] variable[packed_message] assign[=] call[name[_BuildMessageFromTypeName], parameter[call[name[message].TypeName, parameter[]], name[self].descriptor_pool]] if name[packed_message] begin[:] call[name[packed_message].MergeFromString, parameter[name[message].value]] call[name[self].out.write, parameter[binary_operation[constant[%s[%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b204abf0>, <ast.Attribute object at 0x7da1b2048ee0>]]]]] call[name[self]._PrintMessageFieldValue, parameter[name[packed_message]]] call[name[self].out.write, parameter[<ast.IfExp object at 0x7da1b204ab60>]] return[constant[True]]
keyword[def] identifier[_TryPrintAsAnyMessage] ( identifier[self] , identifier[message] ): literal[string] identifier[packed_message] = identifier[_BuildMessageFromTypeName] ( identifier[message] . identifier[TypeName] (), identifier[self] . identifier[descriptor_pool] ) keyword[if] identifier[packed_message] : identifier[packed_message] . identifier[MergeFromString] ( identifier[message] . identifier[value] ) identifier[self] . identifier[out] . identifier[write] ( literal[string] %( identifier[self] . identifier[indent] * literal[string] , identifier[message] . identifier[type_url] )) identifier[self] . identifier[_PrintMessageFieldValue] ( identifier[packed_message] ) identifier[self] . identifier[out] . identifier[write] ( literal[string] keyword[if] identifier[self] . identifier[as_one_line] keyword[else] literal[string] ) keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def _TryPrintAsAnyMessage(self, message): """Serializes if message is a google.protobuf.Any field.""" packed_message = _BuildMessageFromTypeName(message.TypeName(), self.descriptor_pool) if packed_message: packed_message.MergeFromString(message.value) self.out.write('%s[%s]' % (self.indent * ' ', message.type_url)) self._PrintMessageFieldValue(packed_message) self.out.write(' ' if self.as_one_line else '\n') return True # depends on [control=['if'], data=[]] else: return False
def get_binaries(): """Download and return paths of all platform-specific binaries""" paths = [] for arp in [False, True]: paths.append(get_binary(arp=arp)) return paths
def function[get_binaries, parameter[]]: constant[Download and return paths of all platform-specific binaries] variable[paths] assign[=] list[[]] for taget[name[arp]] in starred[list[[<ast.Constant object at 0x7da18fe922c0>, <ast.Constant object at 0x7da18fe929b0>]]] begin[:] call[name[paths].append, parameter[call[name[get_binary], parameter[]]]] return[name[paths]]
keyword[def] identifier[get_binaries] (): literal[string] identifier[paths] =[] keyword[for] identifier[arp] keyword[in] [ keyword[False] , keyword[True] ]: identifier[paths] . identifier[append] ( identifier[get_binary] ( identifier[arp] = identifier[arp] )) keyword[return] identifier[paths]
def get_binaries(): """Download and return paths of all platform-specific binaries""" paths = [] for arp in [False, True]: paths.append(get_binary(arp=arp)) # depends on [control=['for'], data=['arp']] return paths
def set_current_context(self, name): """Set the current context in kubeconfig.""" if self.context_exists(name): self.data['current-context'] = name else: raise KubeConfError("Context does not exist.")
def function[set_current_context, parameter[self, name]]: constant[Set the current context in kubeconfig.] if call[name[self].context_exists, parameter[name[name]]] begin[:] call[name[self].data][constant[current-context]] assign[=] name[name]
keyword[def] identifier[set_current_context] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[self] . identifier[context_exists] ( identifier[name] ): identifier[self] . identifier[data] [ literal[string] ]= identifier[name] keyword[else] : keyword[raise] identifier[KubeConfError] ( literal[string] )
def set_current_context(self, name): """Set the current context in kubeconfig.""" if self.context_exists(name): self.data['current-context'] = name # depends on [control=['if'], data=[]] else: raise KubeConfError('Context does not exist.')
def delete_collection_namespaced_lease(self, namespace, **kwargs): """ delete collection of Lease This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_lease(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_namespaced_lease_with_http_info(namespace, **kwargs) else: (data) = self.delete_collection_namespaced_lease_with_http_info(namespace, **kwargs) return data
def function[delete_collection_namespaced_lease, parameter[self, namespace]]: constant[ delete collection of Lease This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_lease(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].delete_collection_namespaced_lease_with_http_info, parameter[name[namespace]]]]
keyword[def] identifier[delete_collection_namespaced_lease] ( identifier[self] , identifier[namespace] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[delete_collection_namespaced_lease_with_http_info] ( identifier[namespace] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[delete_collection_namespaced_lease_with_http_info] ( identifier[namespace] ,** identifier[kwargs] ) keyword[return] identifier[data]
def delete_collection_namespaced_lease(self, namespace, **kwargs): """ delete collection of Lease This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_lease(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_namespaced_lease_with_http_info(namespace, **kwargs) # depends on [control=['if'], data=[]] else: data = self.delete_collection_namespaced_lease_with_http_info(namespace, **kwargs) return data
def play(self): """Change state to playing.""" if self.state == STATE_PAUSED: self._player.set_state(Gst.State.PLAYING) self.state = STATE_PLAYING
def function[play, parameter[self]]: constant[Change state to playing.] if compare[name[self].state equal[==] name[STATE_PAUSED]] begin[:] call[name[self]._player.set_state, parameter[name[Gst].State.PLAYING]] name[self].state assign[=] name[STATE_PLAYING]
keyword[def] identifier[play] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[state] == identifier[STATE_PAUSED] : identifier[self] . identifier[_player] . identifier[set_state] ( identifier[Gst] . identifier[State] . identifier[PLAYING] ) identifier[self] . identifier[state] = identifier[STATE_PLAYING]
def play(self): """Change state to playing.""" if self.state == STATE_PAUSED: self._player.set_state(Gst.State.PLAYING) self.state = STATE_PLAYING # depends on [control=['if'], data=[]]
def make_niche_grid(res_dict, world_size=(60, 60)): """ Converts dictionary specifying where resources are to nested lists specifying what sets of resources are where. res_dict - a dictionary in which keys are resources in the environment and values are list of tuples representing the cells they're in. world_size - a tuple indicating the dimensions of the world. Default = 60x60, because that's the default Avida world size Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. """ # Initialize array to represent world world = initialize_grid(world_size, set()) # Fill in data on niches present in each cell of the world for res in res_dict: for cell in res_dict[res]: world[cell[1]][cell[0]].add(res) return world
def function[make_niche_grid, parameter[res_dict, world_size]]: constant[ Converts dictionary specifying where resources are to nested lists specifying what sets of resources are where. res_dict - a dictionary in which keys are resources in the environment and values are list of tuples representing the cells they're in. world_size - a tuple indicating the dimensions of the world. Default = 60x60, because that's the default Avida world size Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. ] variable[world] assign[=] call[name[initialize_grid], parameter[name[world_size], call[name[set], parameter[]]]] for taget[name[res]] in starred[name[res_dict]] begin[:] for taget[name[cell]] in starred[call[name[res_dict]][name[res]]] begin[:] call[call[call[name[world]][call[name[cell]][constant[1]]]][call[name[cell]][constant[0]]].add, parameter[name[res]]] return[name[world]]
keyword[def] identifier[make_niche_grid] ( identifier[res_dict] , identifier[world_size] =( literal[int] , literal[int] )): literal[string] identifier[world] = identifier[initialize_grid] ( identifier[world_size] , identifier[set] ()) keyword[for] identifier[res] keyword[in] identifier[res_dict] : keyword[for] identifier[cell] keyword[in] identifier[res_dict] [ identifier[res] ]: identifier[world] [ identifier[cell] [ literal[int] ]][ identifier[cell] [ literal[int] ]]. identifier[add] ( identifier[res] ) keyword[return] identifier[world]
def make_niche_grid(res_dict, world_size=(60, 60)): """ Converts dictionary specifying where resources are to nested lists specifying what sets of resources are where. res_dict - a dictionary in which keys are resources in the environment and values are list of tuples representing the cells they're in. world_size - a tuple indicating the dimensions of the world. Default = 60x60, because that's the default Avida world size Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. """ # Initialize array to represent world world = initialize_grid(world_size, set()) # Fill in data on niches present in each cell of the world for res in res_dict: for cell in res_dict[res]: world[cell[1]][cell[0]].add(res) # depends on [control=['for'], data=['cell']] # depends on [control=['for'], data=['res']] return world
def getrawfile(self, project_id, sha1, filepath): """ Get the raw file contents for a file by commit SHA and path. :param project_id: The ID of a project :param sha1: The commit or branch name :param filepath: The path the file :return: raw file contents """ data = {'filepath': filepath} request = requests.get( '{0}/{1}/repository/blobs/{2}'.format(self.projects_url, project_id, sha1), params=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout, headers=self.headers) if request.status_code == 200: return request.content else: return False
def function[getrawfile, parameter[self, project_id, sha1, filepath]]: constant[ Get the raw file contents for a file by commit SHA and path. :param project_id: The ID of a project :param sha1: The commit or branch name :param filepath: The path the file :return: raw file contents ] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b28b48b0>], [<ast.Name object at 0x7da1b28b7850>]] variable[request] assign[=] call[name[requests].get, parameter[call[constant[{0}/{1}/repository/blobs/{2}].format, parameter[name[self].projects_url, name[project_id], name[sha1]]]]] if compare[name[request].status_code equal[==] constant[200]] begin[:] return[name[request].content]
keyword[def] identifier[getrawfile] ( identifier[self] , identifier[project_id] , identifier[sha1] , identifier[filepath] ): literal[string] identifier[data] ={ literal[string] : identifier[filepath] } identifier[request] = identifier[requests] . identifier[get] ( literal[string] . identifier[format] ( identifier[self] . identifier[projects_url] , identifier[project_id] , identifier[sha1] ), identifier[params] = identifier[data] , identifier[verify] = identifier[self] . identifier[verify_ssl] , identifier[auth] = identifier[self] . identifier[auth] , identifier[timeout] = identifier[self] . identifier[timeout] , identifier[headers] = identifier[self] . identifier[headers] ) keyword[if] identifier[request] . identifier[status_code] == literal[int] : keyword[return] identifier[request] . identifier[content] keyword[else] : keyword[return] keyword[False]
def getrawfile(self, project_id, sha1, filepath): """ Get the raw file contents for a file by commit SHA and path. :param project_id: The ID of a project :param sha1: The commit or branch name :param filepath: The path the file :return: raw file contents """ data = {'filepath': filepath} request = requests.get('{0}/{1}/repository/blobs/{2}'.format(self.projects_url, project_id, sha1), params=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout, headers=self.headers) if request.status_code == 200: return request.content # depends on [control=['if'], data=[]] else: return False
def human_xor_01(X, y, model_generator, method_name): """ XOR (false/true) This tests how well a feature attribution method agrees with human intuition for an eXclusive OR operation combined with linear effects. This metric deals specifically with the question of credit allocation for the following function when all three inputs are true: if fever: +2 points if cough: +2 points if fever or cough but not both: +6 points transform = "identity" sort_order = 4 """ return _human_xor(X, model_generator, method_name, False, True)
def function[human_xor_01, parameter[X, y, model_generator, method_name]]: constant[ XOR (false/true) This tests how well a feature attribution method agrees with human intuition for an eXclusive OR operation combined with linear effects. This metric deals specifically with the question of credit allocation for the following function when all three inputs are true: if fever: +2 points if cough: +2 points if fever or cough but not both: +6 points transform = "identity" sort_order = 4 ] return[call[name[_human_xor], parameter[name[X], name[model_generator], name[method_name], constant[False], constant[True]]]]
keyword[def] identifier[human_xor_01] ( identifier[X] , identifier[y] , identifier[model_generator] , identifier[method_name] ): literal[string] keyword[return] identifier[_human_xor] ( identifier[X] , identifier[model_generator] , identifier[method_name] , keyword[False] , keyword[True] )
def human_xor_01(X, y, model_generator, method_name): """ XOR (false/true) This tests how well a feature attribution method agrees with human intuition for an eXclusive OR operation combined with linear effects. This metric deals specifically with the question of credit allocation for the following function when all three inputs are true: if fever: +2 points if cough: +2 points if fever or cough but not both: +6 points transform = "identity" sort_order = 4 """ return _human_xor(X, model_generator, method_name, False, True)
def upper(string): # pragma: no cover """Lower.""" new_string = [] for c in string: o = ord(c) new_string.append(chr(o - 32) if LC_A <= o <= LC_Z else c) return ''.join(new_string)
def function[upper, parameter[string]]: constant[Lower.] variable[new_string] assign[=] list[[]] for taget[name[c]] in starred[name[string]] begin[:] variable[o] assign[=] call[name[ord], parameter[name[c]]] call[name[new_string].append, parameter[<ast.IfExp object at 0x7da204961bd0>]] return[call[constant[].join, parameter[name[new_string]]]]
keyword[def] identifier[upper] ( identifier[string] ): literal[string] identifier[new_string] =[] keyword[for] identifier[c] keyword[in] identifier[string] : identifier[o] = identifier[ord] ( identifier[c] ) identifier[new_string] . identifier[append] ( identifier[chr] ( identifier[o] - literal[int] ) keyword[if] identifier[LC_A] <= identifier[o] <= identifier[LC_Z] keyword[else] identifier[c] ) keyword[return] literal[string] . identifier[join] ( identifier[new_string] )
def upper(string): # pragma: no cover 'Lower.' new_string = [] for c in string: o = ord(c) new_string.append(chr(o - 32) if LC_A <= o <= LC_Z else c) # depends on [control=['for'], data=['c']] return ''.join(new_string)
def ignore_cxx(self) -> bool: """Consume comments and whitespace characters.""" self._stream.save_context() while not self.read_eof(): idxref = self._stream.index if self._stream.peek_char in " \t\v\f\r\n": while (not self.read_eof() and self._stream.peek_char in " \t\v\f\r\n"): self._stream.incpos() if self.peek_text("//"): while not self.read_eof() and not self.peek_char("\n"): self._stream.incpos() if not self.read_char("\n") and self.read_eof(): return self._stream.validate_context() if self.peek_text("/*"): while not self.read_eof() and not self.peek_text("*/"): self._stream.incpos() if not self.read_text("*/") and self.read_eof(): return self._stream.restore_context() if idxref == self._stream.index: break return self._stream.validate_context()
def function[ignore_cxx, parameter[self]]: constant[Consume comments and whitespace characters.] call[name[self]._stream.save_context, parameter[]] while <ast.UnaryOp object at 0x7da1b01197b0> begin[:] variable[idxref] assign[=] name[self]._stream.index if compare[name[self]._stream.peek_char in constant[ ]] begin[:] while <ast.BoolOp object at 0x7da1b0118310> begin[:] call[name[self]._stream.incpos, parameter[]] if call[name[self].peek_text, parameter[constant[//]]] begin[:] while <ast.BoolOp object at 0x7da1b0118490> begin[:] call[name[self]._stream.incpos, parameter[]] if <ast.BoolOp object at 0x7da1b0119210> begin[:] return[call[name[self]._stream.validate_context, parameter[]]] if call[name[self].peek_text, parameter[constant[/*]]] begin[:] while <ast.BoolOp object at 0x7da1b01d9ba0> begin[:] call[name[self]._stream.incpos, parameter[]] if <ast.BoolOp object at 0x7da1b01dada0> begin[:] return[call[name[self]._stream.restore_context, parameter[]]] if compare[name[idxref] equal[==] name[self]._stream.index] begin[:] break return[call[name[self]._stream.validate_context, parameter[]]]
keyword[def] identifier[ignore_cxx] ( identifier[self] )-> identifier[bool] : literal[string] identifier[self] . identifier[_stream] . identifier[save_context] () keyword[while] keyword[not] identifier[self] . identifier[read_eof] (): identifier[idxref] = identifier[self] . identifier[_stream] . identifier[index] keyword[if] identifier[self] . identifier[_stream] . identifier[peek_char] keyword[in] literal[string] : keyword[while] ( keyword[not] identifier[self] . identifier[read_eof] () keyword[and] identifier[self] . identifier[_stream] . identifier[peek_char] keyword[in] literal[string] ): identifier[self] . identifier[_stream] . identifier[incpos] () keyword[if] identifier[self] . identifier[peek_text] ( literal[string] ): keyword[while] keyword[not] identifier[self] . identifier[read_eof] () keyword[and] keyword[not] identifier[self] . identifier[peek_char] ( literal[string] ): identifier[self] . identifier[_stream] . identifier[incpos] () keyword[if] keyword[not] identifier[self] . identifier[read_char] ( literal[string] ) keyword[and] identifier[self] . identifier[read_eof] (): keyword[return] identifier[self] . identifier[_stream] . identifier[validate_context] () keyword[if] identifier[self] . identifier[peek_text] ( literal[string] ): keyword[while] keyword[not] identifier[self] . identifier[read_eof] () keyword[and] keyword[not] identifier[self] . identifier[peek_text] ( literal[string] ): identifier[self] . identifier[_stream] . identifier[incpos] () keyword[if] keyword[not] identifier[self] . identifier[read_text] ( literal[string] ) keyword[and] identifier[self] . identifier[read_eof] (): keyword[return] identifier[self] . identifier[_stream] . identifier[restore_context] () keyword[if] identifier[idxref] == identifier[self] . identifier[_stream] . identifier[index] : keyword[break] keyword[return] identifier[self] . identifier[_stream] . identifier[validate_context] ()
def ignore_cxx(self) -> bool: """Consume comments and whitespace characters.""" self._stream.save_context() while not self.read_eof(): idxref = self._stream.index if self._stream.peek_char in ' \t\x0b\x0c\r\n': while not self.read_eof() and self._stream.peek_char in ' \t\x0b\x0c\r\n': self._stream.incpos() # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] if self.peek_text('//'): while not self.read_eof() and (not self.peek_char('\n')): self._stream.incpos() # depends on [control=['while'], data=[]] if not self.read_char('\n') and self.read_eof(): return self._stream.validate_context() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.peek_text('/*'): while not self.read_eof() and (not self.peek_text('*/')): self._stream.incpos() # depends on [control=['while'], data=[]] if not self.read_text('*/') and self.read_eof(): return self._stream.restore_context() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if idxref == self._stream.index: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return self._stream.validate_context()
def fulfill_access_secret_store_condition(event, agreement_id, did, service_agreement, consumer_address, publisher_account): """ Fulfill the access condition. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_address: ethereum account address of consumer, hex str :param publisher_account: Account instance of the publisher """ logger.debug(f"release reward after event {event}.") name_to_parameter = {param.name: param for param in service_agreement.condition_by_name['accessSecretStore'].parameters} document_id = add_0x_prefix(name_to_parameter['_documentId'].value) asset_id = add_0x_prefix(did_to_id(did)) assert document_id == asset_id, f'document_id {document_id} <=> asset_id {asset_id} mismatch.' try: tx_hash = Keeper.get_instance().access_secret_store_condition.fulfill( agreement_id, document_id, consumer_address, publisher_account ) process_tx_receipt( tx_hash, Keeper.get_instance().access_secret_store_condition.FULFILLED_EVENT, 'AccessSecretStoreCondition.Fulfilled' ) except Exception as e: # logger.error(f'Error when calling grantAccess condition function: {e}') raise e
def function[fulfill_access_secret_store_condition, parameter[event, agreement_id, did, service_agreement, consumer_address, publisher_account]]: constant[ Fulfill the access condition. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_address: ethereum account address of consumer, hex str :param publisher_account: Account instance of the publisher ] call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da204620e50>]] variable[name_to_parameter] assign[=] <ast.DictComp object at 0x7da204621b40> variable[document_id] assign[=] call[name[add_0x_prefix], parameter[call[name[name_to_parameter]][constant[_documentId]].value]] variable[asset_id] assign[=] call[name[add_0x_prefix], parameter[call[name[did_to_id], parameter[name[did]]]]] assert[compare[name[document_id] equal[==] name[asset_id]]] <ast.Try object at 0x7da20c76e350>
keyword[def] identifier[fulfill_access_secret_store_condition] ( identifier[event] , identifier[agreement_id] , identifier[did] , identifier[service_agreement] , identifier[consumer_address] , identifier[publisher_account] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) identifier[name_to_parameter] ={ identifier[param] . identifier[name] : identifier[param] keyword[for] identifier[param] keyword[in] identifier[service_agreement] . identifier[condition_by_name] [ literal[string] ]. identifier[parameters] } identifier[document_id] = identifier[add_0x_prefix] ( identifier[name_to_parameter] [ literal[string] ]. identifier[value] ) identifier[asset_id] = identifier[add_0x_prefix] ( identifier[did_to_id] ( identifier[did] )) keyword[assert] identifier[document_id] == identifier[asset_id] , literal[string] keyword[try] : identifier[tx_hash] = identifier[Keeper] . identifier[get_instance] (). identifier[access_secret_store_condition] . identifier[fulfill] ( identifier[agreement_id] , identifier[document_id] , identifier[consumer_address] , identifier[publisher_account] ) identifier[process_tx_receipt] ( identifier[tx_hash] , identifier[Keeper] . identifier[get_instance] (). identifier[access_secret_store_condition] . identifier[FULFILLED_EVENT] , literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[e]
def fulfill_access_secret_store_condition(event, agreement_id, did, service_agreement, consumer_address, publisher_account): """ Fulfill the access condition. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_address: ethereum account address of consumer, hex str :param publisher_account: Account instance of the publisher """ logger.debug(f'release reward after event {event}.') name_to_parameter = {param.name: param for param in service_agreement.condition_by_name['accessSecretStore'].parameters} document_id = add_0x_prefix(name_to_parameter['_documentId'].value) asset_id = add_0x_prefix(did_to_id(did)) assert document_id == asset_id, f'document_id {document_id} <=> asset_id {asset_id} mismatch.' try: tx_hash = Keeper.get_instance().access_secret_store_condition.fulfill(agreement_id, document_id, consumer_address, publisher_account) process_tx_receipt(tx_hash, Keeper.get_instance().access_secret_store_condition.FULFILLED_EVENT, 'AccessSecretStoreCondition.Fulfilled') # depends on [control=['try'], data=[]] except Exception as e: # logger.error(f'Error when calling grantAccess condition function: {e}') raise e # depends on [control=['except'], data=['e']]
def _read_json(self, schema=None, maxlen=JSONRPC_MAX_SIZE): """ Read a JSON payload from the requester Return the parsed payload on success Return None on error """ # JSON post? request_type = self.headers.get('content-type', None) client_address_str = "{}:{}".format(self.client_address[0], self.client_address[1]) if request_type != 'application/json': log.error("Invalid request of type {} from {}".format(request_type, client_address_str)) return None request_str = self._read_payload(maxlen=maxlen) if request_str is None: log.error("Failed to read request") return None # parse the payload request = None try: request = json.loads( request_str ) if schema is not None: jsonschema.validate( request, schema ) except ValidationError as ve: if BLOCKSTACK_DEBUG: log.exception(ve) log.error("Validation error on request {}...".format(request_str[:15])) if ve.validator == "maxLength": return {"error" : "maxLength"} except (TypeError, ValueError) as ve: if BLOCKSTACK_DEBUG: log.exception(ve) return None return request
def function[_read_json, parameter[self, schema, maxlen]]: constant[ Read a JSON payload from the requester Return the parsed payload on success Return None on error ] variable[request_type] assign[=] call[name[self].headers.get, parameter[constant[content-type], constant[None]]] variable[client_address_str] assign[=] call[constant[{}:{}].format, parameter[call[name[self].client_address][constant[0]], call[name[self].client_address][constant[1]]]] if compare[name[request_type] not_equal[!=] constant[application/json]] begin[:] call[name[log].error, parameter[call[constant[Invalid request of type {} from {}].format, parameter[name[request_type], name[client_address_str]]]]] return[constant[None]] variable[request_str] assign[=] call[name[self]._read_payload, parameter[]] if compare[name[request_str] is constant[None]] begin[:] call[name[log].error, parameter[constant[Failed to read request]]] return[constant[None]] variable[request] assign[=] constant[None] <ast.Try object at 0x7da20c6e7d90> return[name[request]]
keyword[def] identifier[_read_json] ( identifier[self] , identifier[schema] = keyword[None] , identifier[maxlen] = identifier[JSONRPC_MAX_SIZE] ): literal[string] identifier[request_type] = identifier[self] . identifier[headers] . identifier[get] ( literal[string] , keyword[None] ) identifier[client_address_str] = literal[string] . identifier[format] ( identifier[self] . identifier[client_address] [ literal[int] ], identifier[self] . identifier[client_address] [ literal[int] ]) keyword[if] identifier[request_type] != literal[string] : identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[request_type] , identifier[client_address_str] )) keyword[return] keyword[None] identifier[request_str] = identifier[self] . identifier[_read_payload] ( identifier[maxlen] = identifier[maxlen] ) keyword[if] identifier[request_str] keyword[is] keyword[None] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] keyword[None] identifier[request] = keyword[None] keyword[try] : identifier[request] = identifier[json] . identifier[loads] ( identifier[request_str] ) keyword[if] identifier[schema] keyword[is] keyword[not] keyword[None] : identifier[jsonschema] . identifier[validate] ( identifier[request] , identifier[schema] ) keyword[except] identifier[ValidationError] keyword[as] identifier[ve] : keyword[if] identifier[BLOCKSTACK_DEBUG] : identifier[log] . identifier[exception] ( identifier[ve] ) identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[request_str] [: literal[int] ])) keyword[if] identifier[ve] . identifier[validator] == literal[string] : keyword[return] { literal[string] : literal[string] } keyword[except] ( identifier[TypeError] , identifier[ValueError] ) keyword[as] identifier[ve] : keyword[if] identifier[BLOCKSTACK_DEBUG] : identifier[log] . identifier[exception] ( identifier[ve] ) keyword[return] keyword[None] keyword[return] identifier[request]
def _read_json(self, schema=None, maxlen=JSONRPC_MAX_SIZE): """ Read a JSON payload from the requester Return the parsed payload on success Return None on error """ # JSON post? request_type = self.headers.get('content-type', None) client_address_str = '{}:{}'.format(self.client_address[0], self.client_address[1]) if request_type != 'application/json': log.error('Invalid request of type {} from {}'.format(request_type, client_address_str)) return None # depends on [control=['if'], data=['request_type']] request_str = self._read_payload(maxlen=maxlen) if request_str is None: log.error('Failed to read request') return None # depends on [control=['if'], data=[]] # parse the payload request = None try: request = json.loads(request_str) if schema is not None: jsonschema.validate(request, schema) # depends on [control=['if'], data=['schema']] # depends on [control=['try'], data=[]] except ValidationError as ve: if BLOCKSTACK_DEBUG: log.exception(ve) # depends on [control=['if'], data=[]] log.error('Validation error on request {}...'.format(request_str[:15])) if ve.validator == 'maxLength': return {'error': 'maxLength'} # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['ve']] except (TypeError, ValueError) as ve: if BLOCKSTACK_DEBUG: log.exception(ve) # depends on [control=['if'], data=[]] return None # depends on [control=['except'], data=['ve']] return request
def subscribe_to_data( self, subscriber: Callable[[bytes], bool], ) -> None: """ Not thread-safe. """ self._data_subscribers.append(subscriber)
def function[subscribe_to_data, parameter[self, subscriber]]: constant[ Not thread-safe. ] call[name[self]._data_subscribers.append, parameter[name[subscriber]]]
keyword[def] identifier[subscribe_to_data] ( identifier[self] , identifier[subscriber] : identifier[Callable] [[ identifier[bytes] ], identifier[bool] ], )-> keyword[None] : literal[string] identifier[self] . identifier[_data_subscribers] . identifier[append] ( identifier[subscriber] )
def subscribe_to_data(self, subscriber: Callable[[bytes], bool]) -> None: """ Not thread-safe. """ self._data_subscribers.append(subscriber)