code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def ll(self, folder="", begin_from_file="", num=-1, all_grant_data=False): """ Get the list of files and permissions from S3. This is similar to LL (ls -lah) in Linux: List of files with permissions. Parameters ---------- folder : string Path to file on S3 num: integer, optional number of results to return, by default it returns all results. begin_from_file : string, optional which file to start from on S3. This is usedful in case you are iterating over lists of files and you need to page the result by starting listing from a certain file and fetching certain num (number) of files. all_grant_data : Boolean, optional More detailed file permission data will be returned. Examples -------- >>> from s3utils import S3utils >>> s3utils = S3utils( ... AWS_ACCESS_KEY_ID = 'your access key', ... AWS_SECRET_ACCESS_KEY = 'your secret key', ... AWS_STORAGE_BUCKET_NAME = 'your bucket name', ... S3UTILS_DEBUG_LEVEL = 1, #change it to 0 for less verbose ... ) >>> import json >>> # We use json.dumps to print the results more readable: >>> my_folder_stuff = s3utils.ll("/test/") >>> print(json.dumps(my_folder_stuff, indent=2)) { "test/myfolder/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/em/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/.DS_Store": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], "test/myfolder/hoho/haha/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/haha/ff": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], "test/myfolder/hoho/photo.JPG": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], } """ return self.ls(folder=folder, begin_from_file=begin_from_file, num=num, get_grants=True, all_grant_data=all_grant_data)
def function[ll, parameter[self, folder, begin_from_file, num, all_grant_data]]: constant[ Get the list of files and permissions from S3. This is similar to LL (ls -lah) in Linux: List of files with permissions. Parameters ---------- folder : string Path to file on S3 num: integer, optional number of results to return, by default it returns all results. begin_from_file : string, optional which file to start from on S3. This is usedful in case you are iterating over lists of files and you need to page the result by starting listing from a certain file and fetching certain num (number) of files. all_grant_data : Boolean, optional More detailed file permission data will be returned. Examples -------- >>> from s3utils import S3utils >>> s3utils = S3utils( ... AWS_ACCESS_KEY_ID = 'your access key', ... AWS_SECRET_ACCESS_KEY = 'your secret key', ... AWS_STORAGE_BUCKET_NAME = 'your bucket name', ... S3UTILS_DEBUG_LEVEL = 1, #change it to 0 for less verbose ... ) >>> import json >>> # We use json.dumps to print the results more readable: >>> my_folder_stuff = s3utils.ll("/test/") >>> print(json.dumps(my_folder_stuff, indent=2)) { "test/myfolder/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/em/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/.DS_Store": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], "test/myfolder/hoho/haha/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/haha/ff": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], "test/myfolder/hoho/photo.JPG": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], } ] return[call[name[self].ls, parameter[]]]
keyword[def] identifier[ll] ( identifier[self] , identifier[folder] = literal[string] , identifier[begin_from_file] = literal[string] , identifier[num] =- literal[int] , identifier[all_grant_data] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[ls] ( identifier[folder] = identifier[folder] , identifier[begin_from_file] = identifier[begin_from_file] , identifier[num] = identifier[num] , identifier[get_grants] = keyword[True] , identifier[all_grant_data] = identifier[all_grant_data] )
def ll(self, folder='', begin_from_file='', num=-1, all_grant_data=False): """ Get the list of files and permissions from S3. This is similar to LL (ls -lah) in Linux: List of files with permissions. Parameters ---------- folder : string Path to file on S3 num: integer, optional number of results to return, by default it returns all results. begin_from_file : string, optional which file to start from on S3. This is usedful in case you are iterating over lists of files and you need to page the result by starting listing from a certain file and fetching certain num (number) of files. all_grant_data : Boolean, optional More detailed file permission data will be returned. Examples -------- >>> from s3utils import S3utils >>> s3utils = S3utils( ... AWS_ACCESS_KEY_ID = 'your access key', ... AWS_SECRET_ACCESS_KEY = 'your secret key', ... AWS_STORAGE_BUCKET_NAME = 'your bucket name', ... S3UTILS_DEBUG_LEVEL = 1, #change it to 0 for less verbose ... ) >>> import json >>> # We use json.dumps to print the results more readable: >>> my_folder_stuff = s3utils.ll("/test/") >>> print(json.dumps(my_folder_stuff, indent=2)) { "test/myfolder/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/em/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/.DS_Store": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], "test/myfolder/hoho/haha/": [ { "name": "owner's name", "permission": "FULL_CONTROL" } ], "test/myfolder/hoho/haha/ff": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], "test/myfolder/hoho/photo.JPG": [ { "name": "owner's name", "permission": "FULL_CONTROL" }, { "name": null, "permission": "READ" } ], } """ return self.ls(folder=folder, begin_from_file=begin_from_file, num=num, get_grants=True, all_grant_data=all_grant_data)
def _task_directory(self, job_id, task_id, task_attempt): """The local dir for staging files for that particular task.""" dir_name = 'task' if task_id is None else str(task_id) if task_attempt: dir_name = '%s.%s' % (dir_name, task_attempt) return self._provider_root() + '/' + job_id + '/' + dir_name
def function[_task_directory, parameter[self, job_id, task_id, task_attempt]]: constant[The local dir for staging files for that particular task.] variable[dir_name] assign[=] <ast.IfExp object at 0x7da1b012f040> if name[task_attempt] begin[:] variable[dir_name] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b012df60>, <ast.Name object at 0x7da1b012c1c0>]]] return[binary_operation[binary_operation[binary_operation[binary_operation[call[name[self]._provider_root, parameter[]] + constant[/]] + name[job_id]] + constant[/]] + name[dir_name]]]
keyword[def] identifier[_task_directory] ( identifier[self] , identifier[job_id] , identifier[task_id] , identifier[task_attempt] ): literal[string] identifier[dir_name] = literal[string] keyword[if] identifier[task_id] keyword[is] keyword[None] keyword[else] identifier[str] ( identifier[task_id] ) keyword[if] identifier[task_attempt] : identifier[dir_name] = literal[string] %( identifier[dir_name] , identifier[task_attempt] ) keyword[return] identifier[self] . identifier[_provider_root] ()+ literal[string] + identifier[job_id] + literal[string] + identifier[dir_name]
def _task_directory(self, job_id, task_id, task_attempt): """The local dir for staging files for that particular task.""" dir_name = 'task' if task_id is None else str(task_id) if task_attempt: dir_name = '%s.%s' % (dir_name, task_attempt) # depends on [control=['if'], data=[]] return self._provider_root() + '/' + job_id + '/' + dir_name
def _limit_and_df(self, query, limit, as_df=False): """adds a limit (limit==None := no limit) to any query and allow a return as pandas.DataFrame :param bool as_df: if is set to True results return as pandas.DataFrame :param `sqlalchemy.orm.query.Query` query: SQL Alchemy query :param int,tuple limit: maximum number of results :return: query result of pyhgnc.manager.models.XY objects """ if limit: if isinstance(limit, int): query = query.limit(limit) if isinstance(limit, Iterable) and len(limit) == 2 and [int, int] == [type(x) for x in limit]: page, page_size = limit query = query.limit(page_size) query = query.offset(page * page_size) if as_df: results = read_sql(query.statement, self.engine) else: try: results = query.all() except: query.session.rollback() results = query.all() return results
def function[_limit_and_df, parameter[self, query, limit, as_df]]: constant[adds a limit (limit==None := no limit) to any query and allow a return as pandas.DataFrame :param bool as_df: if is set to True results return as pandas.DataFrame :param `sqlalchemy.orm.query.Query` query: SQL Alchemy query :param int,tuple limit: maximum number of results :return: query result of pyhgnc.manager.models.XY objects ] if name[limit] begin[:] if call[name[isinstance], parameter[name[limit], name[int]]] begin[:] variable[query] assign[=] call[name[query].limit, parameter[name[limit]]] if <ast.BoolOp object at 0x7da1b1da5b70> begin[:] <ast.Tuple object at 0x7da1b1da50c0> assign[=] name[limit] variable[query] assign[=] call[name[query].limit, parameter[name[page_size]]] variable[query] assign[=] call[name[query].offset, parameter[binary_operation[name[page] * name[page_size]]]] if name[as_df] begin[:] variable[results] assign[=] call[name[read_sql], parameter[name[query].statement, name[self].engine]] return[name[results]]
keyword[def] identifier[_limit_and_df] ( identifier[self] , identifier[query] , identifier[limit] , identifier[as_df] = keyword[False] ): literal[string] keyword[if] identifier[limit] : keyword[if] identifier[isinstance] ( identifier[limit] , identifier[int] ): identifier[query] = identifier[query] . identifier[limit] ( identifier[limit] ) keyword[if] identifier[isinstance] ( identifier[limit] , identifier[Iterable] ) keyword[and] identifier[len] ( identifier[limit] )== literal[int] keyword[and] [ identifier[int] , identifier[int] ]==[ identifier[type] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[limit] ]: identifier[page] , identifier[page_size] = identifier[limit] identifier[query] = identifier[query] . identifier[limit] ( identifier[page_size] ) identifier[query] = identifier[query] . identifier[offset] ( identifier[page] * identifier[page_size] ) keyword[if] identifier[as_df] : identifier[results] = identifier[read_sql] ( identifier[query] . identifier[statement] , identifier[self] . identifier[engine] ) keyword[else] : keyword[try] : identifier[results] = identifier[query] . identifier[all] () keyword[except] : identifier[query] . identifier[session] . identifier[rollback] () identifier[results] = identifier[query] . identifier[all] () keyword[return] identifier[results]
def _limit_and_df(self, query, limit, as_df=False): """adds a limit (limit==None := no limit) to any query and allow a return as pandas.DataFrame :param bool as_df: if is set to True results return as pandas.DataFrame :param `sqlalchemy.orm.query.Query` query: SQL Alchemy query :param int,tuple limit: maximum number of results :return: query result of pyhgnc.manager.models.XY objects """ if limit: if isinstance(limit, int): query = query.limit(limit) # depends on [control=['if'], data=[]] if isinstance(limit, Iterable) and len(limit) == 2 and ([int, int] == [type(x) for x in limit]): (page, page_size) = limit query = query.limit(page_size) query = query.offset(page * page_size) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if as_df: results = read_sql(query.statement, self.engine) # depends on [control=['if'], data=[]] else: try: results = query.all() # depends on [control=['try'], data=[]] except: query.session.rollback() results = query.all() # depends on [control=['except'], data=[]] return results
def get_named_tensor(self, name): """ Returns a named tensor if available. Returns: valid: True if named tensor found, False otherwise tensor: If valid, will be a tensor, otherwise None """ if name in self.named_tensors: return True, self.named_tensors[name] else: return False, None
def function[get_named_tensor, parameter[self, name]]: constant[ Returns a named tensor if available. Returns: valid: True if named tensor found, False otherwise tensor: If valid, will be a tensor, otherwise None ] if compare[name[name] in name[self].named_tensors] begin[:] return[tuple[[<ast.Constant object at 0x7da1b26adea0>, <ast.Subscript object at 0x7da1b26ad900>]]]
keyword[def] identifier[get_named_tensor] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[self] . identifier[named_tensors] : keyword[return] keyword[True] , identifier[self] . identifier[named_tensors] [ identifier[name] ] keyword[else] : keyword[return] keyword[False] , keyword[None]
def get_named_tensor(self, name): """ Returns a named tensor if available. Returns: valid: True if named tensor found, False otherwise tensor: If valid, will be a tensor, otherwise None """ if name in self.named_tensors: return (True, self.named_tensors[name]) # depends on [control=['if'], data=['name']] else: return (False, None)
def parse_iptables_rule(line): ''' Parse one iptables rule. Returns a dict where each iptables code argument is mapped to a name using IPTABLES_ARGS. ''' bits = line.split() definition = {} key = None args = [] not_arg = False def add_args(): arg_string = ' '.join(args) if key in IPTABLES_ARGS: definition_key = ( 'not_{0}'.format(IPTABLES_ARGS[key]) if not_arg else IPTABLES_ARGS[key] ) definition[definition_key] = arg_string else: definition.setdefault('extras', []).extend((key, arg_string)) for bit in bits: if bit == '!': if key: add_args() args = [] key = None not_arg = True elif bit.startswith('-'): if key: add_args() args = [] not_arg = False key = bit else: args.append(bit) if key: add_args() if 'extras' in definition: definition['extras'] = set(definition['extras']) return definition
def function[parse_iptables_rule, parameter[line]]: constant[ Parse one iptables rule. Returns a dict where each iptables code argument is mapped to a name using IPTABLES_ARGS. ] variable[bits] assign[=] call[name[line].split, parameter[]] variable[definition] assign[=] dictionary[[], []] variable[key] assign[=] constant[None] variable[args] assign[=] list[[]] variable[not_arg] assign[=] constant[False] def function[add_args, parameter[]]: variable[arg_string] assign[=] call[constant[ ].join, parameter[name[args]]] if compare[name[key] in name[IPTABLES_ARGS]] begin[:] variable[definition_key] assign[=] <ast.IfExp object at 0x7da18fe91f00> call[name[definition]][name[definition_key]] assign[=] name[arg_string] for taget[name[bit]] in starred[name[bits]] begin[:] if compare[name[bit] equal[==] constant[!]] begin[:] if name[key] begin[:] call[name[add_args], parameter[]] variable[args] assign[=] list[[]] variable[key] assign[=] constant[None] variable[not_arg] assign[=] constant[True] if name[key] begin[:] call[name[add_args], parameter[]] if compare[constant[extras] in name[definition]] begin[:] call[name[definition]][constant[extras]] assign[=] call[name[set], parameter[call[name[definition]][constant[extras]]]] return[name[definition]]
keyword[def] identifier[parse_iptables_rule] ( identifier[line] ): literal[string] identifier[bits] = identifier[line] . identifier[split] () identifier[definition] ={} identifier[key] = keyword[None] identifier[args] =[] identifier[not_arg] = keyword[False] keyword[def] identifier[add_args] (): identifier[arg_string] = literal[string] . identifier[join] ( identifier[args] ) keyword[if] identifier[key] keyword[in] identifier[IPTABLES_ARGS] : identifier[definition_key] =( literal[string] . identifier[format] ( identifier[IPTABLES_ARGS] [ identifier[key] ]) keyword[if] identifier[not_arg] keyword[else] identifier[IPTABLES_ARGS] [ identifier[key] ] ) identifier[definition] [ identifier[definition_key] ]= identifier[arg_string] keyword[else] : identifier[definition] . identifier[setdefault] ( literal[string] ,[]). identifier[extend] (( identifier[key] , identifier[arg_string] )) keyword[for] identifier[bit] keyword[in] identifier[bits] : keyword[if] identifier[bit] == literal[string] : keyword[if] identifier[key] : identifier[add_args] () identifier[args] =[] identifier[key] = keyword[None] identifier[not_arg] = keyword[True] keyword[elif] identifier[bit] . identifier[startswith] ( literal[string] ): keyword[if] identifier[key] : identifier[add_args] () identifier[args] =[] identifier[not_arg] = keyword[False] identifier[key] = identifier[bit] keyword[else] : identifier[args] . identifier[append] ( identifier[bit] ) keyword[if] identifier[key] : identifier[add_args] () keyword[if] literal[string] keyword[in] identifier[definition] : identifier[definition] [ literal[string] ]= identifier[set] ( identifier[definition] [ literal[string] ]) keyword[return] identifier[definition]
def parse_iptables_rule(line): """ Parse one iptables rule. Returns a dict where each iptables code argument is mapped to a name using IPTABLES_ARGS. """ bits = line.split() definition = {} key = None args = [] not_arg = False def add_args(): arg_string = ' '.join(args) if key in IPTABLES_ARGS: definition_key = 'not_{0}'.format(IPTABLES_ARGS[key]) if not_arg else IPTABLES_ARGS[key] definition[definition_key] = arg_string # depends on [control=['if'], data=['key', 'IPTABLES_ARGS']] else: definition.setdefault('extras', []).extend((key, arg_string)) for bit in bits: if bit == '!': if key: add_args() args = [] key = None # depends on [control=['if'], data=[]] not_arg = True # depends on [control=['if'], data=[]] elif bit.startswith('-'): if key: add_args() args = [] not_arg = False # depends on [control=['if'], data=[]] key = bit # depends on [control=['if'], data=[]] else: args.append(bit) # depends on [control=['for'], data=['bit']] if key: add_args() # depends on [control=['if'], data=[]] if 'extras' in definition: definition['extras'] = set(definition['extras']) # depends on [control=['if'], data=['definition']] return definition
def edit_team(self, name, memberships=None, filter=None, description=None, show=None, theme=None, perm_capture=None, perm_custom_events=None, perm_aws_data=None): ''' **Description** Edits an existing team. All arguments are optional. Team settings for any arguments unspecified will remain at their current settings. **Arguments** - **name**: the name of the team to edit. - **memberships**: dictionary of (user-name, team-role) pairs that should describe new memberships of the team. - **filter**: the scope that this team is able to access within Sysdig Monitor. - **description**: describes the team that will be created. - **show**: possible values are *host*, *container*. - **theme**: the color theme that Sysdig Monitor will use when displaying the team. - **perm_capture**: if True, this team will be allowed to take sysdig captures. - **perm_custom_events**: if True, this team will be allowed to view all custom events from every user and agent. - **perm_aws_data**: if True, this team will have access to all AWS metrics and tags, regardless of the team's scope. **Success Return Value** The edited team. **Example** `examples/user_team_mgmt.py <https://github.com/draios/python-sdc-client/blob/master/examples/user_team_mgmt.py>`_ ''' res = self.get_team(name) if res[0] == False: return res t = res[1] reqbody = { 'name': name, 'theme': theme if theme else t['theme'], 'show': show if show else t['show'], 'canUseSysdigCapture': perm_capture if perm_capture else t['canUseSysdigCapture'], 'canUseCustomEvents': perm_custom_events if perm_custom_events else t['canUseCustomEvents'], 'canUseAwsMetrics': perm_aws_data if perm_aws_data else t['canUseAwsMetrics'], 'id': t['id'], 'version': t['version'] } # Handling team description if description is not None: reqbody['description'] = description elif 'description' in list(t.keys()): reqbody['description'] = t['description'] # Handling for users to map (user-name, team-role) pairs to memberships if memberships != None: res = self._get_user_id_dict(list(memberships.keys())) if res[0] == False: return [False, 'Could not convert user names to IDs'] reqbody['userRoles'] = [ { 'userId': user_id, 'role': memberships[user_name] } for (user_name, user_id) in res[1].items() ] elif 'userRoles' in list(t.keys()): reqbody['userRoles'] = t['userRoles'] else: reqbody['userRoles'] = [] # Special handling for filters since we don't support blank filters if filter != None: reqbody['filter'] = filter elif 'filter' in list(t.keys()): reqbody['filter'] = t['filter'] res = requests.put(self.url + '/api/teams/' + str(t['id']), headers=self.hdrs, data=json.dumps(reqbody), verify=self.ssl_verify) return self._request_result(res)
def function[edit_team, parameter[self, name, memberships, filter, description, show, theme, perm_capture, perm_custom_events, perm_aws_data]]: constant[ **Description** Edits an existing team. All arguments are optional. Team settings for any arguments unspecified will remain at their current settings. **Arguments** - **name**: the name of the team to edit. - **memberships**: dictionary of (user-name, team-role) pairs that should describe new memberships of the team. - **filter**: the scope that this team is able to access within Sysdig Monitor. - **description**: describes the team that will be created. - **show**: possible values are *host*, *container*. - **theme**: the color theme that Sysdig Monitor will use when displaying the team. - **perm_capture**: if True, this team will be allowed to take sysdig captures. - **perm_custom_events**: if True, this team will be allowed to view all custom events from every user and agent. - **perm_aws_data**: if True, this team will have access to all AWS metrics and tags, regardless of the team's scope. **Success Return Value** The edited team. **Example** `examples/user_team_mgmt.py <https://github.com/draios/python-sdc-client/blob/master/examples/user_team_mgmt.py>`_ ] variable[res] assign[=] call[name[self].get_team, parameter[name[name]]] if compare[call[name[res]][constant[0]] equal[==] constant[False]] begin[:] return[name[res]] variable[t] assign[=] call[name[res]][constant[1]] variable[reqbody] assign[=] dictionary[[<ast.Constant object at 0x7da18f7226e0>, <ast.Constant object at 0x7da18f7235e0>, <ast.Constant object at 0x7da18f722c20>, <ast.Constant object at 0x7da18f722dd0>, <ast.Constant object at 0x7da18f7218d0>, <ast.Constant object at 0x7da18f723340>, <ast.Constant object at 0x7da18f7201c0>, <ast.Constant object at 0x7da18f723130>], [<ast.Name object at 0x7da18f722f20>, <ast.IfExp object at 0x7da18f720ee0>, <ast.IfExp object at 0x7da18f723bb0>, <ast.IfExp object at 0x7da18f722c80>, <ast.IfExp object at 0x7da18f7225c0>, <ast.IfExp object at 0x7da18f723e80>, <ast.Subscript object at 0x7da18f7204c0>, <ast.Subscript object at 0x7da18f723be0>]] if compare[name[description] is_not constant[None]] begin[:] call[name[reqbody]][constant[description]] assign[=] name[description] if compare[name[memberships] not_equal[!=] constant[None]] begin[:] variable[res] assign[=] call[name[self]._get_user_id_dict, parameter[call[name[list], parameter[call[name[memberships].keys, parameter[]]]]]] if compare[call[name[res]][constant[0]] equal[==] constant[False]] begin[:] return[list[[<ast.Constant object at 0x7da18f720df0>, <ast.Constant object at 0x7da18f723070>]]] call[name[reqbody]][constant[userRoles]] assign[=] <ast.ListComp object at 0x7da18f721510> if compare[name[filter] not_equal[!=] constant[None]] begin[:] call[name[reqbody]][constant[filter]] assign[=] name[filter] variable[res] assign[=] call[name[requests].put, parameter[binary_operation[binary_operation[name[self].url + constant[/api/teams/]] + call[name[str], parameter[call[name[t]][constant[id]]]]]]] return[call[name[self]._request_result, parameter[name[res]]]]
keyword[def] identifier[edit_team] ( identifier[self] , identifier[name] , identifier[memberships] = keyword[None] , identifier[filter] = keyword[None] , identifier[description] = keyword[None] , identifier[show] = keyword[None] , identifier[theme] = keyword[None] , identifier[perm_capture] = keyword[None] , identifier[perm_custom_events] = keyword[None] , identifier[perm_aws_data] = keyword[None] ): literal[string] identifier[res] = identifier[self] . identifier[get_team] ( identifier[name] ) keyword[if] identifier[res] [ literal[int] ]== keyword[False] : keyword[return] identifier[res] identifier[t] = identifier[res] [ literal[int] ] identifier[reqbody] ={ literal[string] : identifier[name] , literal[string] : identifier[theme] keyword[if] identifier[theme] keyword[else] identifier[t] [ literal[string] ], literal[string] : identifier[show] keyword[if] identifier[show] keyword[else] identifier[t] [ literal[string] ], literal[string] : identifier[perm_capture] keyword[if] identifier[perm_capture] keyword[else] identifier[t] [ literal[string] ], literal[string] : identifier[perm_custom_events] keyword[if] identifier[perm_custom_events] keyword[else] identifier[t] [ literal[string] ], literal[string] : identifier[perm_aws_data] keyword[if] identifier[perm_aws_data] keyword[else] identifier[t] [ literal[string] ], literal[string] : identifier[t] [ literal[string] ], literal[string] : identifier[t] [ literal[string] ] } keyword[if] identifier[description] keyword[is] keyword[not] keyword[None] : identifier[reqbody] [ literal[string] ]= identifier[description] keyword[elif] literal[string] keyword[in] identifier[list] ( identifier[t] . identifier[keys] ()): identifier[reqbody] [ literal[string] ]= identifier[t] [ literal[string] ] keyword[if] identifier[memberships] != keyword[None] : identifier[res] = identifier[self] . identifier[_get_user_id_dict] ( identifier[list] ( identifier[memberships] . identifier[keys] ())) keyword[if] identifier[res] [ literal[int] ]== keyword[False] : keyword[return] [ keyword[False] , literal[string] ] identifier[reqbody] [ literal[string] ]=[ { literal[string] : identifier[user_id] , literal[string] : identifier[memberships] [ identifier[user_name] ] } keyword[for] ( identifier[user_name] , identifier[user_id] ) keyword[in] identifier[res] [ literal[int] ]. identifier[items] () ] keyword[elif] literal[string] keyword[in] identifier[list] ( identifier[t] . identifier[keys] ()): identifier[reqbody] [ literal[string] ]= identifier[t] [ literal[string] ] keyword[else] : identifier[reqbody] [ literal[string] ]=[] keyword[if] identifier[filter] != keyword[None] : identifier[reqbody] [ literal[string] ]= identifier[filter] keyword[elif] literal[string] keyword[in] identifier[list] ( identifier[t] . identifier[keys] ()): identifier[reqbody] [ literal[string] ]= identifier[t] [ literal[string] ] identifier[res] = identifier[requests] . identifier[put] ( identifier[self] . identifier[url] + literal[string] + identifier[str] ( identifier[t] [ literal[string] ]), identifier[headers] = identifier[self] . identifier[hdrs] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[reqbody] ), identifier[verify] = identifier[self] . identifier[ssl_verify] ) keyword[return] identifier[self] . identifier[_request_result] ( identifier[res] )
def edit_team(self, name, memberships=None, filter=None, description=None, show=None, theme=None, perm_capture=None, perm_custom_events=None, perm_aws_data=None): """ **Description** Edits an existing team. All arguments are optional. Team settings for any arguments unspecified will remain at their current settings. **Arguments** - **name**: the name of the team to edit. - **memberships**: dictionary of (user-name, team-role) pairs that should describe new memberships of the team. - **filter**: the scope that this team is able to access within Sysdig Monitor. - **description**: describes the team that will be created. - **show**: possible values are *host*, *container*. - **theme**: the color theme that Sysdig Monitor will use when displaying the team. - **perm_capture**: if True, this team will be allowed to take sysdig captures. - **perm_custom_events**: if True, this team will be allowed to view all custom events from every user and agent. - **perm_aws_data**: if True, this team will have access to all AWS metrics and tags, regardless of the team's scope. **Success Return Value** The edited team. **Example** `examples/user_team_mgmt.py <https://github.com/draios/python-sdc-client/blob/master/examples/user_team_mgmt.py>`_ """ res = self.get_team(name) if res[0] == False: return res # depends on [control=['if'], data=[]] t = res[1] reqbody = {'name': name, 'theme': theme if theme else t['theme'], 'show': show if show else t['show'], 'canUseSysdigCapture': perm_capture if perm_capture else t['canUseSysdigCapture'], 'canUseCustomEvents': perm_custom_events if perm_custom_events else t['canUseCustomEvents'], 'canUseAwsMetrics': perm_aws_data if perm_aws_data else t['canUseAwsMetrics'], 'id': t['id'], 'version': t['version']} # Handling team description if description is not None: reqbody['description'] = description # depends on [control=['if'], data=['description']] elif 'description' in list(t.keys()): reqbody['description'] = t['description'] # depends on [control=['if'], data=[]] # Handling for users to map (user-name, team-role) pairs to memberships if memberships != None: res = self._get_user_id_dict(list(memberships.keys())) if res[0] == False: return [False, 'Could not convert user names to IDs'] # depends on [control=['if'], data=[]] reqbody['userRoles'] = [{'userId': user_id, 'role': memberships[user_name]} for (user_name, user_id) in res[1].items()] # depends on [control=['if'], data=['memberships']] elif 'userRoles' in list(t.keys()): reqbody['userRoles'] = t['userRoles'] # depends on [control=['if'], data=[]] else: reqbody['userRoles'] = [] # Special handling for filters since we don't support blank filters if filter != None: reqbody['filter'] = filter # depends on [control=['if'], data=['filter']] elif 'filter' in list(t.keys()): reqbody['filter'] = t['filter'] # depends on [control=['if'], data=[]] res = requests.put(self.url + '/api/teams/' + str(t['id']), headers=self.hdrs, data=json.dumps(reqbody), verify=self.ssl_verify) return self._request_result(res)
def update_annotations(self): """Update annotations made by the user, including bookmarks and events. Depending on the settings, it might add the bookmarks to overview and traces. """ start_time = self.parent.overview.start_time if self.parent.notes.annot is None: all_annot = [] else: bookmarks = self.parent.notes.annot.get_bookmarks() events = self.get_selected_events() all_annot = bookmarks + events all_annot = sorted(all_annot, key=lambda x: x['start']) self.idx_annot_list.clearContents() self.idx_annot_list.setRowCount(len(all_annot)) for i, mrk in enumerate(all_annot): abs_time = (start_time + timedelta(seconds=mrk['start'])).strftime('%H:%M:%S') dur = timedelta(seconds=mrk['end'] - mrk['start']) duration = '{0:02d}.{1:03d}'.format(dur.seconds, round(dur.microseconds / 1000)) item_time = QTableWidgetItem(abs_time) item_duration = QTableWidgetItem(duration) item_name = QTableWidgetItem(mrk['name']) if mrk in bookmarks: item_type = QTableWidgetItem('bookmark') color = self.parent.value('annot_bookmark_color') else: item_type = QTableWidgetItem('event') color = convert_name_to_color(mrk['name']) chan = mrk['chan'] if isinstance(chan, (tuple, list)): chan = ', '.join(chan) item_chan = QTableWidgetItem(chan) item_time.setForeground(QColor(color)) item_duration.setForeground(QColor(color)) item_name.setForeground(QColor(color)) item_type.setForeground(QColor(color)) item_chan.setForeground(QColor(color)) self.idx_annot_list.setItem(i, 0, item_time) self.idx_annot_list.setItem(i, 1, item_duration) self.idx_annot_list.setItem(i, 2, item_name) self.idx_annot_list.setItem(i, 3, item_type) self.idx_annot_list.setItem(i, 4, item_chan) # store information about the time as list (easy to access) annot_start = [ann['start'] for ann in all_annot] annot_end = [ann['end'] for ann in all_annot] annot_name = [ann['name'] for ann in all_annot] self.idx_annot_list.setProperty('start', annot_start) self.idx_annot_list.setProperty('end', annot_end) self.idx_annot_list.setProperty('name', annot_name) if self.parent.traces.data is not None: self.parent.traces.display_annotations() self.parent.overview.display_annotations()
def function[update_annotations, parameter[self]]: constant[Update annotations made by the user, including bookmarks and events. Depending on the settings, it might add the bookmarks to overview and traces. ] variable[start_time] assign[=] name[self].parent.overview.start_time if compare[name[self].parent.notes.annot is constant[None]] begin[:] variable[all_annot] assign[=] list[[]] call[name[self].idx_annot_list.clearContents, parameter[]] call[name[self].idx_annot_list.setRowCount, parameter[call[name[len], parameter[name[all_annot]]]]] for taget[tuple[[<ast.Name object at 0x7da1b0e8fee0>, <ast.Name object at 0x7da1b0e8efe0>]]] in starred[call[name[enumerate], parameter[name[all_annot]]]] begin[:] variable[abs_time] assign[=] call[binary_operation[name[start_time] + call[name[timedelta], parameter[]]].strftime, parameter[constant[%H:%M:%S]]] variable[dur] assign[=] call[name[timedelta], parameter[]] variable[duration] assign[=] call[constant[{0:02d}.{1:03d}].format, parameter[name[dur].seconds, call[name[round], parameter[binary_operation[name[dur].microseconds / constant[1000]]]]]] variable[item_time] assign[=] call[name[QTableWidgetItem], parameter[name[abs_time]]] variable[item_duration] assign[=] call[name[QTableWidgetItem], parameter[name[duration]]] variable[item_name] assign[=] call[name[QTableWidgetItem], parameter[call[name[mrk]][constant[name]]]] if compare[name[mrk] in name[bookmarks]] begin[:] variable[item_type] assign[=] call[name[QTableWidgetItem], parameter[constant[bookmark]]] variable[color] assign[=] call[name[self].parent.value, parameter[constant[annot_bookmark_color]]] variable[chan] assign[=] call[name[mrk]][constant[chan]] if call[name[isinstance], parameter[name[chan], tuple[[<ast.Name object at 0x7da1b0e8f5b0>, <ast.Name object at 0x7da1b0e8f610>]]]] begin[:] variable[chan] assign[=] call[constant[, ].join, parameter[name[chan]]] variable[item_chan] assign[=] call[name[QTableWidgetItem], parameter[name[chan]]] call[name[item_time].setForeground, parameter[call[name[QColor], parameter[name[color]]]]] call[name[item_duration].setForeground, parameter[call[name[QColor], parameter[name[color]]]]] call[name[item_name].setForeground, parameter[call[name[QColor], parameter[name[color]]]]] call[name[item_type].setForeground, parameter[call[name[QColor], parameter[name[color]]]]] call[name[item_chan].setForeground, parameter[call[name[QColor], parameter[name[color]]]]] call[name[self].idx_annot_list.setItem, parameter[name[i], constant[0], name[item_time]]] call[name[self].idx_annot_list.setItem, parameter[name[i], constant[1], name[item_duration]]] call[name[self].idx_annot_list.setItem, parameter[name[i], constant[2], name[item_name]]] call[name[self].idx_annot_list.setItem, parameter[name[i], constant[3], name[item_type]]] call[name[self].idx_annot_list.setItem, parameter[name[i], constant[4], name[item_chan]]] variable[annot_start] assign[=] <ast.ListComp object at 0x7da1b0e8c130> variable[annot_end] assign[=] <ast.ListComp object at 0x7da1b0e8f970> variable[annot_name] assign[=] <ast.ListComp object at 0x7da1b0e8f4f0> call[name[self].idx_annot_list.setProperty, parameter[constant[start], name[annot_start]]] call[name[self].idx_annot_list.setProperty, parameter[constant[end], name[annot_end]]] call[name[self].idx_annot_list.setProperty, parameter[constant[name], name[annot_name]]] if compare[name[self].parent.traces.data is_not constant[None]] begin[:] call[name[self].parent.traces.display_annotations, parameter[]] call[name[self].parent.overview.display_annotations, parameter[]]
keyword[def] identifier[update_annotations] ( identifier[self] ): literal[string] identifier[start_time] = identifier[self] . identifier[parent] . identifier[overview] . identifier[start_time] keyword[if] identifier[self] . identifier[parent] . identifier[notes] . identifier[annot] keyword[is] keyword[None] : identifier[all_annot] =[] keyword[else] : identifier[bookmarks] = identifier[self] . identifier[parent] . identifier[notes] . identifier[annot] . identifier[get_bookmarks] () identifier[events] = identifier[self] . identifier[get_selected_events] () identifier[all_annot] = identifier[bookmarks] + identifier[events] identifier[all_annot] = identifier[sorted] ( identifier[all_annot] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[string] ]) identifier[self] . identifier[idx_annot_list] . identifier[clearContents] () identifier[self] . identifier[idx_annot_list] . identifier[setRowCount] ( identifier[len] ( identifier[all_annot] )) keyword[for] identifier[i] , identifier[mrk] keyword[in] identifier[enumerate] ( identifier[all_annot] ): identifier[abs_time] =( identifier[start_time] + identifier[timedelta] ( identifier[seconds] = identifier[mrk] [ literal[string] ])). identifier[strftime] ( literal[string] ) identifier[dur] = identifier[timedelta] ( identifier[seconds] = identifier[mrk] [ literal[string] ]- identifier[mrk] [ literal[string] ]) identifier[duration] = literal[string] . identifier[format] ( identifier[dur] . identifier[seconds] , identifier[round] ( identifier[dur] . identifier[microseconds] / literal[int] )) identifier[item_time] = identifier[QTableWidgetItem] ( identifier[abs_time] ) identifier[item_duration] = identifier[QTableWidgetItem] ( identifier[duration] ) identifier[item_name] = identifier[QTableWidgetItem] ( identifier[mrk] [ literal[string] ]) keyword[if] identifier[mrk] keyword[in] identifier[bookmarks] : identifier[item_type] = identifier[QTableWidgetItem] ( literal[string] ) identifier[color] = identifier[self] . identifier[parent] . identifier[value] ( literal[string] ) keyword[else] : identifier[item_type] = identifier[QTableWidgetItem] ( literal[string] ) identifier[color] = identifier[convert_name_to_color] ( identifier[mrk] [ literal[string] ]) identifier[chan] = identifier[mrk] [ literal[string] ] keyword[if] identifier[isinstance] ( identifier[chan] ,( identifier[tuple] , identifier[list] )): identifier[chan] = literal[string] . identifier[join] ( identifier[chan] ) identifier[item_chan] = identifier[QTableWidgetItem] ( identifier[chan] ) identifier[item_time] . identifier[setForeground] ( identifier[QColor] ( identifier[color] )) identifier[item_duration] . identifier[setForeground] ( identifier[QColor] ( identifier[color] )) identifier[item_name] . identifier[setForeground] ( identifier[QColor] ( identifier[color] )) identifier[item_type] . identifier[setForeground] ( identifier[QColor] ( identifier[color] )) identifier[item_chan] . identifier[setForeground] ( identifier[QColor] ( identifier[color] )) identifier[self] . identifier[idx_annot_list] . identifier[setItem] ( identifier[i] , literal[int] , identifier[item_time] ) identifier[self] . identifier[idx_annot_list] . identifier[setItem] ( identifier[i] , literal[int] , identifier[item_duration] ) identifier[self] . identifier[idx_annot_list] . identifier[setItem] ( identifier[i] , literal[int] , identifier[item_name] ) identifier[self] . identifier[idx_annot_list] . identifier[setItem] ( identifier[i] , literal[int] , identifier[item_type] ) identifier[self] . identifier[idx_annot_list] . identifier[setItem] ( identifier[i] , literal[int] , identifier[item_chan] ) identifier[annot_start] =[ identifier[ann] [ literal[string] ] keyword[for] identifier[ann] keyword[in] identifier[all_annot] ] identifier[annot_end] =[ identifier[ann] [ literal[string] ] keyword[for] identifier[ann] keyword[in] identifier[all_annot] ] identifier[annot_name] =[ identifier[ann] [ literal[string] ] keyword[for] identifier[ann] keyword[in] identifier[all_annot] ] identifier[self] . identifier[idx_annot_list] . identifier[setProperty] ( literal[string] , identifier[annot_start] ) identifier[self] . identifier[idx_annot_list] . identifier[setProperty] ( literal[string] , identifier[annot_end] ) identifier[self] . identifier[idx_annot_list] . identifier[setProperty] ( literal[string] , identifier[annot_name] ) keyword[if] identifier[self] . identifier[parent] . identifier[traces] . identifier[data] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[parent] . identifier[traces] . identifier[display_annotations] () identifier[self] . identifier[parent] . identifier[overview] . identifier[display_annotations] ()
def update_annotations(self): """Update annotations made by the user, including bookmarks and events. Depending on the settings, it might add the bookmarks to overview and traces. """ start_time = self.parent.overview.start_time if self.parent.notes.annot is None: all_annot = [] # depends on [control=['if'], data=[]] else: bookmarks = self.parent.notes.annot.get_bookmarks() events = self.get_selected_events() all_annot = bookmarks + events all_annot = sorted(all_annot, key=lambda x: x['start']) self.idx_annot_list.clearContents() self.idx_annot_list.setRowCount(len(all_annot)) for (i, mrk) in enumerate(all_annot): abs_time = (start_time + timedelta(seconds=mrk['start'])).strftime('%H:%M:%S') dur = timedelta(seconds=mrk['end'] - mrk['start']) duration = '{0:02d}.{1:03d}'.format(dur.seconds, round(dur.microseconds / 1000)) item_time = QTableWidgetItem(abs_time) item_duration = QTableWidgetItem(duration) item_name = QTableWidgetItem(mrk['name']) if mrk in bookmarks: item_type = QTableWidgetItem('bookmark') color = self.parent.value('annot_bookmark_color') # depends on [control=['if'], data=[]] else: item_type = QTableWidgetItem('event') color = convert_name_to_color(mrk['name']) chan = mrk['chan'] if isinstance(chan, (tuple, list)): chan = ', '.join(chan) # depends on [control=['if'], data=[]] item_chan = QTableWidgetItem(chan) item_time.setForeground(QColor(color)) item_duration.setForeground(QColor(color)) item_name.setForeground(QColor(color)) item_type.setForeground(QColor(color)) item_chan.setForeground(QColor(color)) self.idx_annot_list.setItem(i, 0, item_time) self.idx_annot_list.setItem(i, 1, item_duration) self.idx_annot_list.setItem(i, 2, item_name) self.idx_annot_list.setItem(i, 3, item_type) self.idx_annot_list.setItem(i, 4, item_chan) # depends on [control=['for'], data=[]] # store information about the time as list (easy to access) annot_start = [ann['start'] for ann in all_annot] annot_end = [ann['end'] for ann in all_annot] annot_name = [ann['name'] for ann in all_annot] self.idx_annot_list.setProperty('start', annot_start) self.idx_annot_list.setProperty('end', annot_end) self.idx_annot_list.setProperty('name', annot_name) if self.parent.traces.data is not None: self.parent.traces.display_annotations() # depends on [control=['if'], data=[]] self.parent.overview.display_annotations()
def update_virtual_meta(self): """Will read back the virtual column etc, written by :func:`DataFrame.write_virtual_meta`. This will be done when opening a DataFrame.""" import astropy.units try: path = os.path.join(self.get_private_dir(create=False), "virtual_meta.yaml") if os.path.exists(path): meta_info = vaex.utils.read_json_or_yaml(path) if 'virtual_columns' not in meta_info: return self.virtual_columns.update(meta_info["virtual_columns"]) self.variables.update(meta_info["variables"]) self.ucds.update(meta_info["ucds"]) self.descriptions.update(meta_info["descriptions"]) units = {key: astropy.units.Unit(value) for key, value in meta_info["units"].items()} self.units.update(units) except: logger.exception("non fatal error")
def function[update_virtual_meta, parameter[self]]: constant[Will read back the virtual column etc, written by :func:`DataFrame.write_virtual_meta`. This will be done when opening a DataFrame.] import module[astropy.units] <ast.Try object at 0x7da18dc04400>
keyword[def] identifier[update_virtual_meta] ( identifier[self] ): literal[string] keyword[import] identifier[astropy] . identifier[units] keyword[try] : identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[get_private_dir] ( identifier[create] = keyword[False] ), literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ): identifier[meta_info] = identifier[vaex] . identifier[utils] . identifier[read_json_or_yaml] ( identifier[path] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[meta_info] : keyword[return] identifier[self] . identifier[virtual_columns] . identifier[update] ( identifier[meta_info] [ literal[string] ]) identifier[self] . identifier[variables] . identifier[update] ( identifier[meta_info] [ literal[string] ]) identifier[self] . identifier[ucds] . identifier[update] ( identifier[meta_info] [ literal[string] ]) identifier[self] . identifier[descriptions] . identifier[update] ( identifier[meta_info] [ literal[string] ]) identifier[units] ={ identifier[key] : identifier[astropy] . identifier[units] . identifier[Unit] ( identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[meta_info] [ literal[string] ]. identifier[items] ()} identifier[self] . identifier[units] . identifier[update] ( identifier[units] ) keyword[except] : identifier[logger] . identifier[exception] ( literal[string] )
def update_virtual_meta(self): """Will read back the virtual column etc, written by :func:`DataFrame.write_virtual_meta`. This will be done when opening a DataFrame.""" import astropy.units try: path = os.path.join(self.get_private_dir(create=False), 'virtual_meta.yaml') if os.path.exists(path): meta_info = vaex.utils.read_json_or_yaml(path) if 'virtual_columns' not in meta_info: return # depends on [control=['if'], data=[]] self.virtual_columns.update(meta_info['virtual_columns']) self.variables.update(meta_info['variables']) self.ucds.update(meta_info['ucds']) self.descriptions.update(meta_info['descriptions']) units = {key: astropy.units.Unit(value) for (key, value) in meta_info['units'].items()} self.units.update(units) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: logger.exception('non fatal error') # depends on [control=['except'], data=[]]
def forge_relationship(self, left_id, left_type, right_id, right_type, rel_type='Related To', rel_date=None, rel_confidence='high', rel_reason=''): """ Forges a relationship between two TLOs. Args: left_id: The CRITs ID of the first indicator left_type: The CRITs TLO type of the first indicator right_id: The CRITs ID of the second indicator right_type: The CRITs TLO type of the second indicator rel_type: The relationships type ("Related To", etc) rel_date: datetime.datetime object for the date of the relationship. If left blank, it will be datetime.datetime.now() rel_confidence: The relationship confidence (high, medium, low) rel_reason: Reason for the relationship. Returns: True if the relationship was created. False otherwise. """ if not rel_date: rel_date = datetime.datetime.now() type_trans = self._type_translation(left_type) submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id) params = { 'api_key': self.api_key, 'username': self.username, } data = { 'action': 'forge_relationship', 'right_type': right_type, 'right_id': right_id, 'rel_type': rel_type, 'rel_date': rel_date, 'rel_confidence': rel_confidence, 'rel_reason': rel_reason } r = requests.patch(submit_url, params=params, data=data, proxies=self.proxies, verify=self.verify) if r.status_code == 200: log.debug('Relationship built successfully: {0} <-> ' '{1}'.format(left_id, right_id)) return True else: log.error('Error with status code {0} and message {1} between ' 'these indicators: {2} <-> ' '{3}'.format(r.status_code, r.text, left_id, right_id)) return False
def function[forge_relationship, parameter[self, left_id, left_type, right_id, right_type, rel_type, rel_date, rel_confidence, rel_reason]]: constant[ Forges a relationship between two TLOs. Args: left_id: The CRITs ID of the first indicator left_type: The CRITs TLO type of the first indicator right_id: The CRITs ID of the second indicator right_type: The CRITs TLO type of the second indicator rel_type: The relationships type ("Related To", etc) rel_date: datetime.datetime object for the date of the relationship. If left blank, it will be datetime.datetime.now() rel_confidence: The relationship confidence (high, medium, low) rel_reason: Reason for the relationship. Returns: True if the relationship was created. False otherwise. ] if <ast.UnaryOp object at 0x7da2045678b0> begin[:] variable[rel_date] assign[=] call[name[datetime].datetime.now, parameter[]] variable[type_trans] assign[=] call[name[self]._type_translation, parameter[name[left_type]]] variable[submit_url] assign[=] call[constant[{}/{}/{}/].format, parameter[name[self].url, name[type_trans], name[left_id]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204566140>, <ast.Constant object at 0x7da2045655a0>], [<ast.Attribute object at 0x7da204565840>, <ast.Attribute object at 0x7da204566110>]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da204566fb0>, <ast.Constant object at 0x7da2045665c0>, <ast.Constant object at 0x7da204564700>, <ast.Constant object at 0x7da204565240>, <ast.Constant object at 0x7da204564160>, <ast.Constant object at 0x7da204567f40>, <ast.Constant object at 0x7da204566620>], [<ast.Constant object at 0x7da204567940>, <ast.Name object at 0x7da204566740>, <ast.Name object at 0x7da204567c70>, <ast.Name object at 0x7da18dc990c0>, <ast.Name object at 0x7da18dc99150>, <ast.Name object at 0x7da18dc9a050>, <ast.Name object at 0x7da18dc9a2f0>]] variable[r] assign[=] call[name[requests].patch, parameter[name[submit_url]]] if compare[name[r].status_code equal[==] constant[200]] begin[:] call[name[log].debug, parameter[call[constant[Relationship built successfully: {0} <-> {1}].format, parameter[name[left_id], name[right_id]]]]] return[constant[True]]
keyword[def] identifier[forge_relationship] ( identifier[self] , identifier[left_id] , identifier[left_type] , identifier[right_id] , identifier[right_type] , identifier[rel_type] = literal[string] , identifier[rel_date] = keyword[None] , identifier[rel_confidence] = literal[string] , identifier[rel_reason] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[rel_date] : identifier[rel_date] = identifier[datetime] . identifier[datetime] . identifier[now] () identifier[type_trans] = identifier[self] . identifier[_type_translation] ( identifier[left_type] ) identifier[submit_url] = literal[string] . identifier[format] ( identifier[self] . identifier[url] , identifier[type_trans] , identifier[left_id] ) identifier[params] ={ literal[string] : identifier[self] . identifier[api_key] , literal[string] : identifier[self] . identifier[username] , } identifier[data] ={ literal[string] : literal[string] , literal[string] : identifier[right_type] , literal[string] : identifier[right_id] , literal[string] : identifier[rel_type] , literal[string] : identifier[rel_date] , literal[string] : identifier[rel_confidence] , literal[string] : identifier[rel_reason] } identifier[r] = identifier[requests] . identifier[patch] ( identifier[submit_url] , identifier[params] = identifier[params] , identifier[data] = identifier[data] , identifier[proxies] = identifier[self] . identifier[proxies] , identifier[verify] = identifier[self] . identifier[verify] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : identifier[log] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[left_id] , identifier[right_id] )) keyword[return] keyword[True] keyword[else] : identifier[log] . identifier[error] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[r] . identifier[status_code] , identifier[r] . identifier[text] , identifier[left_id] , identifier[right_id] )) keyword[return] keyword[False]
def forge_relationship(self, left_id, left_type, right_id, right_type, rel_type='Related To', rel_date=None, rel_confidence='high', rel_reason=''): """ Forges a relationship between two TLOs. Args: left_id: The CRITs ID of the first indicator left_type: The CRITs TLO type of the first indicator right_id: The CRITs ID of the second indicator right_type: The CRITs TLO type of the second indicator rel_type: The relationships type ("Related To", etc) rel_date: datetime.datetime object for the date of the relationship. If left blank, it will be datetime.datetime.now() rel_confidence: The relationship confidence (high, medium, low) rel_reason: Reason for the relationship. Returns: True if the relationship was created. False otherwise. """ if not rel_date: rel_date = datetime.datetime.now() # depends on [control=['if'], data=[]] type_trans = self._type_translation(left_type) submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id) params = {'api_key': self.api_key, 'username': self.username} data = {'action': 'forge_relationship', 'right_type': right_type, 'right_id': right_id, 'rel_type': rel_type, 'rel_date': rel_date, 'rel_confidence': rel_confidence, 'rel_reason': rel_reason} r = requests.patch(submit_url, params=params, data=data, proxies=self.proxies, verify=self.verify) if r.status_code == 200: log.debug('Relationship built successfully: {0} <-> {1}'.format(left_id, right_id)) return True # depends on [control=['if'], data=[]] else: log.error('Error with status code {0} and message {1} between these indicators: {2} <-> {3}'.format(r.status_code, r.text, left_id, right_id)) return False
def round_sig_error(num, uncert, pm=False): """ Return a string of the number and its uncertainty to the right sig figs via uncertainty's print methods. The uncertainty determines the sig fig rounding of the number. https://pythonhosted.org/uncertainties/user_guide.html """ u = ufloat(num, uncert) if pm: return '{:.1uL}'.format(u) else: return '{:.1uLS}'.format(u)
def function[round_sig_error, parameter[num, uncert, pm]]: constant[ Return a string of the number and its uncertainty to the right sig figs via uncertainty's print methods. The uncertainty determines the sig fig rounding of the number. https://pythonhosted.org/uncertainties/user_guide.html ] variable[u] assign[=] call[name[ufloat], parameter[name[num], name[uncert]]] if name[pm] begin[:] return[call[constant[{:.1uL}].format, parameter[name[u]]]]
keyword[def] identifier[round_sig_error] ( identifier[num] , identifier[uncert] , identifier[pm] = keyword[False] ): literal[string] identifier[u] = identifier[ufloat] ( identifier[num] , identifier[uncert] ) keyword[if] identifier[pm] : keyword[return] literal[string] . identifier[format] ( identifier[u] ) keyword[else] : keyword[return] literal[string] . identifier[format] ( identifier[u] )
def round_sig_error(num, uncert, pm=False): """ Return a string of the number and its uncertainty to the right sig figs via uncertainty's print methods. The uncertainty determines the sig fig rounding of the number. https://pythonhosted.org/uncertainties/user_guide.html """ u = ufloat(num, uncert) if pm: return '{:.1uL}'.format(u) # depends on [control=['if'], data=[]] else: return '{:.1uLS}'.format(u)
def object_get(self, multihash, **kwargs): """Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links """ args = (multihash,) return self._client.request('/object/get', args, decoder='json', **kwargs)
def function[object_get, parameter[self, multihash]]: constant[Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links ] variable[args] assign[=] tuple[[<ast.Name object at 0x7da18f7216f0>]] return[call[name[self]._client.request, parameter[constant[/object/get], name[args]]]]
keyword[def] identifier[object_get] ( identifier[self] , identifier[multihash] ,** identifier[kwargs] ): literal[string] identifier[args] =( identifier[multihash] ,) keyword[return] identifier[self] . identifier[_client] . identifier[request] ( literal[string] , identifier[args] , identifier[decoder] = literal[string] ,** identifier[kwargs] )
def object_get(self, multihash, **kwargs): """Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links """ args = (multihash,) return self._client.request('/object/get', args, decoder='json', **kwargs)
def run(self, request_cb, notification_cb): """Run the event loop to receive requests and notifications from Nvim. While the event loop is running, `request_cb` and `_notification_cb` will be called whenever requests or notifications are respectively available. """ self._request_cb = request_cb self._notification_cb = notification_cb self._msgpack_stream.run(self._on_message) self._request_cb = None self._notification_cb = None
def function[run, parameter[self, request_cb, notification_cb]]: constant[Run the event loop to receive requests and notifications from Nvim. While the event loop is running, `request_cb` and `_notification_cb` will be called whenever requests or notifications are respectively available. ] name[self]._request_cb assign[=] name[request_cb] name[self]._notification_cb assign[=] name[notification_cb] call[name[self]._msgpack_stream.run, parameter[name[self]._on_message]] name[self]._request_cb assign[=] constant[None] name[self]._notification_cb assign[=] constant[None]
keyword[def] identifier[run] ( identifier[self] , identifier[request_cb] , identifier[notification_cb] ): literal[string] identifier[self] . identifier[_request_cb] = identifier[request_cb] identifier[self] . identifier[_notification_cb] = identifier[notification_cb] identifier[self] . identifier[_msgpack_stream] . identifier[run] ( identifier[self] . identifier[_on_message] ) identifier[self] . identifier[_request_cb] = keyword[None] identifier[self] . identifier[_notification_cb] = keyword[None]
def run(self, request_cb, notification_cb): """Run the event loop to receive requests and notifications from Nvim. While the event loop is running, `request_cb` and `_notification_cb` will be called whenever requests or notifications are respectively available. """ self._request_cb = request_cb self._notification_cb = notification_cb self._msgpack_stream.run(self._on_message) self._request_cb = None self._notification_cb = None
def getLorenzShares(data,weights=None,percentiles=[0.5],presorted=False): ''' Calculates the Lorenz curve at the requested percentiles of (weighted) data. Median by default. Parameters ---------- data : numpy.array A 1D array of float data. weights : numpy.array A weighting vector for the data. percentiles : [float] A list of percentiles to calculate for the data. Each element should be in (0,1). presorted : boolean Indicator for whether data has already been sorted. Returns ------- lorenz_out : numpy.array The requested Lorenz curve points of the data. ''' if weights is None: # Set equiprobable weights if none were given weights = np.ones(data.size) if presorted: # Sort the data if it is not already data_sorted = data weights_sorted = weights else: order = np.argsort(data) data_sorted = data[order] weights_sorted = weights[order] cum_dist = np.cumsum(weights_sorted)/np.sum(weights_sorted) # cumulative probability distribution temp = data_sorted*weights_sorted cum_data = np.cumsum(temp)/sum(temp) # cumulative ownership shares # Calculate the requested Lorenz shares by interpolating the cumulative ownership # shares over the cumulative distribution, then evaluating at requested points lorenzFunc = interp1d(cum_dist,cum_data,bounds_error=False,assume_sorted=True) lorenz_out = lorenzFunc(percentiles) return lorenz_out
def function[getLorenzShares, parameter[data, weights, percentiles, presorted]]: constant[ Calculates the Lorenz curve at the requested percentiles of (weighted) data. Median by default. Parameters ---------- data : numpy.array A 1D array of float data. weights : numpy.array A weighting vector for the data. percentiles : [float] A list of percentiles to calculate for the data. Each element should be in (0,1). presorted : boolean Indicator for whether data has already been sorted. Returns ------- lorenz_out : numpy.array The requested Lorenz curve points of the data. ] if compare[name[weights] is constant[None]] begin[:] variable[weights] assign[=] call[name[np].ones, parameter[name[data].size]] if name[presorted] begin[:] variable[data_sorted] assign[=] name[data] variable[weights_sorted] assign[=] name[weights] variable[cum_dist] assign[=] binary_operation[call[name[np].cumsum, parameter[name[weights_sorted]]] / call[name[np].sum, parameter[name[weights_sorted]]]] variable[temp] assign[=] binary_operation[name[data_sorted] * name[weights_sorted]] variable[cum_data] assign[=] binary_operation[call[name[np].cumsum, parameter[name[temp]]] / call[name[sum], parameter[name[temp]]]] variable[lorenzFunc] assign[=] call[name[interp1d], parameter[name[cum_dist], name[cum_data]]] variable[lorenz_out] assign[=] call[name[lorenzFunc], parameter[name[percentiles]]] return[name[lorenz_out]]
keyword[def] identifier[getLorenzShares] ( identifier[data] , identifier[weights] = keyword[None] , identifier[percentiles] =[ literal[int] ], identifier[presorted] = keyword[False] ): literal[string] keyword[if] identifier[weights] keyword[is] keyword[None] : identifier[weights] = identifier[np] . identifier[ones] ( identifier[data] . identifier[size] ) keyword[if] identifier[presorted] : identifier[data_sorted] = identifier[data] identifier[weights_sorted] = identifier[weights] keyword[else] : identifier[order] = identifier[np] . identifier[argsort] ( identifier[data] ) identifier[data_sorted] = identifier[data] [ identifier[order] ] identifier[weights_sorted] = identifier[weights] [ identifier[order] ] identifier[cum_dist] = identifier[np] . identifier[cumsum] ( identifier[weights_sorted] )/ identifier[np] . identifier[sum] ( identifier[weights_sorted] ) identifier[temp] = identifier[data_sorted] * identifier[weights_sorted] identifier[cum_data] = identifier[np] . identifier[cumsum] ( identifier[temp] )/ identifier[sum] ( identifier[temp] ) identifier[lorenzFunc] = identifier[interp1d] ( identifier[cum_dist] , identifier[cum_data] , identifier[bounds_error] = keyword[False] , identifier[assume_sorted] = keyword[True] ) identifier[lorenz_out] = identifier[lorenzFunc] ( identifier[percentiles] ) keyword[return] identifier[lorenz_out]
def getLorenzShares(data, weights=None, percentiles=[0.5], presorted=False): """ Calculates the Lorenz curve at the requested percentiles of (weighted) data. Median by default. Parameters ---------- data : numpy.array A 1D array of float data. weights : numpy.array A weighting vector for the data. percentiles : [float] A list of percentiles to calculate for the data. Each element should be in (0,1). presorted : boolean Indicator for whether data has already been sorted. Returns ------- lorenz_out : numpy.array The requested Lorenz curve points of the data. """ if weights is None: # Set equiprobable weights if none were given weights = np.ones(data.size) # depends on [control=['if'], data=['weights']] if presorted: # Sort the data if it is not already data_sorted = data weights_sorted = weights # depends on [control=['if'], data=[]] else: order = np.argsort(data) data_sorted = data[order] weights_sorted = weights[order] cum_dist = np.cumsum(weights_sorted) / np.sum(weights_sorted) # cumulative probability distribution temp = data_sorted * weights_sorted cum_data = np.cumsum(temp) / sum(temp) # cumulative ownership shares # Calculate the requested Lorenz shares by interpolating the cumulative ownership # shares over the cumulative distribution, then evaluating at requested points lorenzFunc = interp1d(cum_dist, cum_data, bounds_error=False, assume_sorted=True) lorenz_out = lorenzFunc(percentiles) return lorenz_out
def Nu_plate_Muley_Manglik(Re, Pr, chevron_angle, plate_enlargement_factor): r'''Calculates Nusselt number for single-phase flow in a Chevron-style plate heat exchanger according to [1]_, also shown in [2]_ and [3]_. .. math:: Nu = [0.2668 - 0.006967(\beta) + 7.244\times 10^{-5}(\beta)^2] \times[20.7803 - 50.9372\phi + 41.1585\phi^2 - 10.1507\phi^3] \times Re^{[0.728 + 0.0543\sin[(2\pi\beta/90) + 3.7]]} Pr^{1/3} Parameters ---------- Re : float Reynolds number with respect to the hydraulic diameter of the channels, [-] Pr : float Prandtl number calculated with bulk fluid properties, [-] chevron_angle : float Angle of the plate corrugations with respect to the vertical axis (the direction of flow if the plates were straight), between 0 and 90. Many plate exchangers use two alternating patterns; use their average angle for that situation [degrees] plate_enlargement_factor : float The extra surface area multiplier as compared to a flat plate caused the corrugations, [-] Returns ------- Nu : float Nusselt number with respect to `Dh`, [-] Notes ----- The correlation as presented in [1]_ suffers from a typo, with a coefficient of 10.51 instead of 10.15. Several more decimal places were published along with the corrected typo in [2]_. This has a *very large* difference if not implemented. The viscosity correction power is recommended to be the blanket Sieder and Tate (1936) value of 0.14. The correlation is recommended in the range of Reynolds numbers above 1000, chevron angles between 30 and 60 degrees, and enlargement factors from 1 to 1.5. Due to its cubic nature it is not likely to give good results if the chevron angle or enlargement factors are out of those ranges. Examples -------- >>> Nu_plate_Muley_Manglik(Re=2000, Pr=.7, chevron_angle=45, ... plate_enlargement_factor=1.18) 36.49087100602062 References ---------- .. [1] Muley, A., and R. M. Manglik. "Experimental Study of Turbulent Flow Heat Transfer and Pressure Drop in a Plate Heat Exchanger With Chevron Plates." Journal of Heat Transfer 121, no. 1 (February 1, 1999): 110-17. doi:10.1115/1.2825923. .. [2] Palm, Björn, and Joachim Claesson. "Plate Heat Exchangers: Calculation Methods for Single- and Two-Phase Flow (Keynote)," January 1, 2005, 103-13. https://doi.org/10.1115/ICMM2005-75092. ''' beta, phi = chevron_angle, plate_enlargement_factor t1 = (0.2668 - 0.006967*beta + 7.244E-5*beta**2) #t2 = (20.78 - 50.94*phi + 41.16*phi**2 - 10.51*phi**3) # It was the extra decimals which were needed t2 = (20.7803 - 50.9372*phi + 41.1585*phi**2 - 10.1507*phi**3) t3 = (0.728 + 0.0543*sin((2*pi*beta/90) + 3.7)) return t1*t2*Re**t3*Pr**(1/3.)
def function[Nu_plate_Muley_Manglik, parameter[Re, Pr, chevron_angle, plate_enlargement_factor]]: constant[Calculates Nusselt number for single-phase flow in a Chevron-style plate heat exchanger according to [1]_, also shown in [2]_ and [3]_. .. math:: Nu = [0.2668 - 0.006967(\beta) + 7.244\times 10^{-5}(\beta)^2] \times[20.7803 - 50.9372\phi + 41.1585\phi^2 - 10.1507\phi^3] \times Re^{[0.728 + 0.0543\sin[(2\pi\beta/90) + 3.7]]} Pr^{1/3} Parameters ---------- Re : float Reynolds number with respect to the hydraulic diameter of the channels, [-] Pr : float Prandtl number calculated with bulk fluid properties, [-] chevron_angle : float Angle of the plate corrugations with respect to the vertical axis (the direction of flow if the plates were straight), between 0 and 90. Many plate exchangers use two alternating patterns; use their average angle for that situation [degrees] plate_enlargement_factor : float The extra surface area multiplier as compared to a flat plate caused the corrugations, [-] Returns ------- Nu : float Nusselt number with respect to `Dh`, [-] Notes ----- The correlation as presented in [1]_ suffers from a typo, with a coefficient of 10.51 instead of 10.15. Several more decimal places were published along with the corrected typo in [2]_. This has a *very large* difference if not implemented. The viscosity correction power is recommended to be the blanket Sieder and Tate (1936) value of 0.14. The correlation is recommended in the range of Reynolds numbers above 1000, chevron angles between 30 and 60 degrees, and enlargement factors from 1 to 1.5. Due to its cubic nature it is not likely to give good results if the chevron angle or enlargement factors are out of those ranges. Examples -------- >>> Nu_plate_Muley_Manglik(Re=2000, Pr=.7, chevron_angle=45, ... plate_enlargement_factor=1.18) 36.49087100602062 References ---------- .. [1] Muley, A., and R. M. Manglik. "Experimental Study of Turbulent Flow Heat Transfer and Pressure Drop in a Plate Heat Exchanger With Chevron Plates." Journal of Heat Transfer 121, no. 1 (February 1, 1999): 110-17. doi:10.1115/1.2825923. .. [2] Palm, Björn, and Joachim Claesson. "Plate Heat Exchangers: Calculation Methods for Single- and Two-Phase Flow (Keynote)," January 1, 2005, 103-13. https://doi.org/10.1115/ICMM2005-75092. ] <ast.Tuple object at 0x7da18ede46a0> assign[=] tuple[[<ast.Name object at 0x7da18ede78b0>, <ast.Name object at 0x7da18ede6ec0>]] variable[t1] assign[=] binary_operation[binary_operation[constant[0.2668] - binary_operation[constant[0.006967] * name[beta]]] + binary_operation[constant[7.244e-05] * binary_operation[name[beta] ** constant[2]]]] variable[t2] assign[=] binary_operation[binary_operation[binary_operation[constant[20.7803] - binary_operation[constant[50.9372] * name[phi]]] + binary_operation[constant[41.1585] * binary_operation[name[phi] ** constant[2]]]] - binary_operation[constant[10.1507] * binary_operation[name[phi] ** constant[3]]]] variable[t3] assign[=] binary_operation[constant[0.728] + binary_operation[constant[0.0543] * call[name[sin], parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[pi]] * name[beta]] / constant[90]] + constant[3.7]]]]]] return[binary_operation[binary_operation[binary_operation[name[t1] * name[t2]] * binary_operation[name[Re] ** name[t3]]] * binary_operation[name[Pr] ** binary_operation[constant[1] / constant[3.0]]]]]
keyword[def] identifier[Nu_plate_Muley_Manglik] ( identifier[Re] , identifier[Pr] , identifier[chevron_angle] , identifier[plate_enlargement_factor] ): literal[string] identifier[beta] , identifier[phi] = identifier[chevron_angle] , identifier[plate_enlargement_factor] identifier[t1] =( literal[int] - literal[int] * identifier[beta] + literal[int] * identifier[beta] ** literal[int] ) identifier[t2] =( literal[int] - literal[int] * identifier[phi] + literal[int] * identifier[phi] ** literal[int] - literal[int] * identifier[phi] ** literal[int] ) identifier[t3] =( literal[int] + literal[int] * identifier[sin] (( literal[int] * identifier[pi] * identifier[beta] / literal[int] )+ literal[int] )) keyword[return] identifier[t1] * identifier[t2] * identifier[Re] ** identifier[t3] * identifier[Pr] **( literal[int] / literal[int] )
def Nu_plate_Muley_Manglik(Re, Pr, chevron_angle, plate_enlargement_factor): """Calculates Nusselt number for single-phase flow in a Chevron-style plate heat exchanger according to [1]_, also shown in [2]_ and [3]_. .. math:: Nu = [0.2668 - 0.006967(\\beta) + 7.244\\times 10^{-5}(\\beta)^2] \\times[20.7803 - 50.9372\\phi + 41.1585\\phi^2 - 10.1507\\phi^3] \\times Re^{[0.728 + 0.0543\\sin[(2\\pi\\beta/90) + 3.7]]} Pr^{1/3} Parameters ---------- Re : float Reynolds number with respect to the hydraulic diameter of the channels, [-] Pr : float Prandtl number calculated with bulk fluid properties, [-] chevron_angle : float Angle of the plate corrugations with respect to the vertical axis (the direction of flow if the plates were straight), between 0 and 90. Many plate exchangers use two alternating patterns; use their average angle for that situation [degrees] plate_enlargement_factor : float The extra surface area multiplier as compared to a flat plate caused the corrugations, [-] Returns ------- Nu : float Nusselt number with respect to `Dh`, [-] Notes ----- The correlation as presented in [1]_ suffers from a typo, with a coefficient of 10.51 instead of 10.15. Several more decimal places were published along with the corrected typo in [2]_. This has a *very large* difference if not implemented. The viscosity correction power is recommended to be the blanket Sieder and Tate (1936) value of 0.14. The correlation is recommended in the range of Reynolds numbers above 1000, chevron angles between 30 and 60 degrees, and enlargement factors from 1 to 1.5. Due to its cubic nature it is not likely to give good results if the chevron angle or enlargement factors are out of those ranges. Examples -------- >>> Nu_plate_Muley_Manglik(Re=2000, Pr=.7, chevron_angle=45, ... plate_enlargement_factor=1.18) 36.49087100602062 References ---------- .. [1] Muley, A., and R. M. Manglik. "Experimental Study of Turbulent Flow Heat Transfer and Pressure Drop in a Plate Heat Exchanger With Chevron Plates." Journal of Heat Transfer 121, no. 1 (February 1, 1999): 110-17. doi:10.1115/1.2825923. .. [2] Palm, Björn, and Joachim Claesson. "Plate Heat Exchangers: Calculation Methods for Single- and Two-Phase Flow (Keynote)," January 1, 2005, 103-13. https://doi.org/10.1115/ICMM2005-75092. """ (beta, phi) = (chevron_angle, plate_enlargement_factor) t1 = 0.2668 - 0.006967 * beta + 7.244e-05 * beta ** 2 #t2 = (20.78 - 50.94*phi + 41.16*phi**2 - 10.51*phi**3) # It was the extra decimals which were needed t2 = 20.7803 - 50.9372 * phi + 41.1585 * phi ** 2 - 10.1507 * phi ** 3 t3 = 0.728 + 0.0543 * sin(2 * pi * beta / 90 + 3.7) return t1 * t2 * Re ** t3 * Pr ** (1 / 3.0)
def cdf(self, x): """ Computes the cdf of a specific value, ie. computes F(x) where F denotes the CDF of the distribution. """ t = 0 N = float(self.n) if len(self) == 1: # only one centroid return int(x >= self.C.min_key()) for i, key in enumerate(self.C.keys()): c_i = self.C[key] if i == len(self) - 1: delta = (c_i.mean - self.C.prev_item(key)[1].mean) / 2. else: delta = (self.C.succ_item(key)[1].mean - c_i.mean) / 2. z = max(-1, (x - c_i.mean) / delta) if z < 1: return t / N + c_i.count / N * (z + 1) / 2 t += c_i.count return 1
def function[cdf, parameter[self, x]]: constant[ Computes the cdf of a specific value, ie. computes F(x) where F denotes the CDF of the distribution. ] variable[t] assign[=] constant[0] variable[N] assign[=] call[name[float], parameter[name[self].n]] if compare[call[name[len], parameter[name[self]]] equal[==] constant[1]] begin[:] return[call[name[int], parameter[compare[name[x] greater_or_equal[>=] call[name[self].C.min_key, parameter[]]]]]] for taget[tuple[[<ast.Name object at 0x7da1b0716c20>, <ast.Name object at 0x7da1b07144c0>]]] in starred[call[name[enumerate], parameter[call[name[self].C.keys, parameter[]]]]] begin[:] variable[c_i] assign[=] call[name[self].C][name[key]] if compare[name[i] equal[==] binary_operation[call[name[len], parameter[name[self]]] - constant[1]]] begin[:] variable[delta] assign[=] binary_operation[binary_operation[name[c_i].mean - call[call[name[self].C.prev_item, parameter[name[key]]]][constant[1]].mean] / constant[2.0]] variable[z] assign[=] call[name[max], parameter[<ast.UnaryOp object at 0x7da1b07176d0>, binary_operation[binary_operation[name[x] - name[c_i].mean] / name[delta]]]] if compare[name[z] less[<] constant[1]] begin[:] return[binary_operation[binary_operation[name[t] / name[N]] + binary_operation[binary_operation[binary_operation[name[c_i].count / name[N]] * binary_operation[name[z] + constant[1]]] / constant[2]]]] <ast.AugAssign object at 0x7da1b0714460> return[constant[1]]
keyword[def] identifier[cdf] ( identifier[self] , identifier[x] ): literal[string] identifier[t] = literal[int] identifier[N] = identifier[float] ( identifier[self] . identifier[n] ) keyword[if] identifier[len] ( identifier[self] )== literal[int] : keyword[return] identifier[int] ( identifier[x] >= identifier[self] . identifier[C] . identifier[min_key] ()) keyword[for] identifier[i] , identifier[key] keyword[in] identifier[enumerate] ( identifier[self] . identifier[C] . identifier[keys] ()): identifier[c_i] = identifier[self] . identifier[C] [ identifier[key] ] keyword[if] identifier[i] == identifier[len] ( identifier[self] )- literal[int] : identifier[delta] =( identifier[c_i] . identifier[mean] - identifier[self] . identifier[C] . identifier[prev_item] ( identifier[key] )[ literal[int] ]. identifier[mean] )/ literal[int] keyword[else] : identifier[delta] =( identifier[self] . identifier[C] . identifier[succ_item] ( identifier[key] )[ literal[int] ]. identifier[mean] - identifier[c_i] . identifier[mean] )/ literal[int] identifier[z] = identifier[max] (- literal[int] ,( identifier[x] - identifier[c_i] . identifier[mean] )/ identifier[delta] ) keyword[if] identifier[z] < literal[int] : keyword[return] identifier[t] / identifier[N] + identifier[c_i] . identifier[count] / identifier[N] *( identifier[z] + literal[int] )/ literal[int] identifier[t] += identifier[c_i] . identifier[count] keyword[return] literal[int]
def cdf(self, x): """ Computes the cdf of a specific value, ie. computes F(x) where F denotes the CDF of the distribution. """ t = 0 N = float(self.n) if len(self) == 1: # only one centroid return int(x >= self.C.min_key()) # depends on [control=['if'], data=[]] for (i, key) in enumerate(self.C.keys()): c_i = self.C[key] if i == len(self) - 1: delta = (c_i.mean - self.C.prev_item(key)[1].mean) / 2.0 # depends on [control=['if'], data=[]] else: delta = (self.C.succ_item(key)[1].mean - c_i.mean) / 2.0 z = max(-1, (x - c_i.mean) / delta) if z < 1: return t / N + c_i.count / N * (z + 1) / 2 # depends on [control=['if'], data=['z']] t += c_i.count # depends on [control=['for'], data=[]] return 1
def get_json_feed_content(url, offset=0, limit=None): """ Get the entries in a JSON feed """ end = limit + offset if limit is not None else None response = _get(url) try: content = json.loads(response.text) except Exception as parse_error: logger.warning( 'Failed to parse feed from {}: {}'.format(url, str(parse_error)) ) return False return content[offset:end]
def function[get_json_feed_content, parameter[url, offset, limit]]: constant[ Get the entries in a JSON feed ] variable[end] assign[=] <ast.IfExp object at 0x7da1b09be1d0> variable[response] assign[=] call[name[_get], parameter[name[url]]] <ast.Try object at 0x7da1b09bd270> return[call[name[content]][<ast.Slice object at 0x7da1b09bc760>]]
keyword[def] identifier[get_json_feed_content] ( identifier[url] , identifier[offset] = literal[int] , identifier[limit] = keyword[None] ): literal[string] identifier[end] = identifier[limit] + identifier[offset] keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] identifier[response] = identifier[_get] ( identifier[url] ) keyword[try] : identifier[content] = identifier[json] . identifier[loads] ( identifier[response] . identifier[text] ) keyword[except] identifier[Exception] keyword[as] identifier[parse_error] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[url] , identifier[str] ( identifier[parse_error] )) ) keyword[return] keyword[False] keyword[return] identifier[content] [ identifier[offset] : identifier[end] ]
def get_json_feed_content(url, offset=0, limit=None): """ Get the entries in a JSON feed """ end = limit + offset if limit is not None else None response = _get(url) try: content = json.loads(response.text) # depends on [control=['try'], data=[]] except Exception as parse_error: logger.warning('Failed to parse feed from {}: {}'.format(url, str(parse_error))) return False # depends on [control=['except'], data=['parse_error']] return content[offset:end]
def gw_get(object_dict, name=None, plugin=None): """ Getter function to retrieve objects from a given object dictionary. Used mainly to provide get() inside patterns. :param object_dict: objects, which must have 'name' and 'plugin' as attribute :type object_dict: dictionary :param name: name of the object :type name: str :param plugin: plugin name, which registers the object :return: None, single object or dict of objects """ if plugin is not None: if name is None: object_list = {} for key in object_dict.keys(): if object_dict[key].plugin == plugin: object_list[key] = object_dict[key] return object_list else: if name in object_dict.keys(): if object_dict[name].plugin == plugin: return object_dict[name] else: return None else: return None else: if name is None: return object_dict else: if name in object_dict.keys(): return object_dict[name] else: return None
def function[gw_get, parameter[object_dict, name, plugin]]: constant[ Getter function to retrieve objects from a given object dictionary. Used mainly to provide get() inside patterns. :param object_dict: objects, which must have 'name' and 'plugin' as attribute :type object_dict: dictionary :param name: name of the object :type name: str :param plugin: plugin name, which registers the object :return: None, single object or dict of objects ] if compare[name[plugin] is_not constant[None]] begin[:] if compare[name[name] is constant[None]] begin[:] variable[object_list] assign[=] dictionary[[], []] for taget[name[key]] in starred[call[name[object_dict].keys, parameter[]]] begin[:] if compare[call[name[object_dict]][name[key]].plugin equal[==] name[plugin]] begin[:] call[name[object_list]][name[key]] assign[=] call[name[object_dict]][name[key]] return[name[object_list]]
keyword[def] identifier[gw_get] ( identifier[object_dict] , identifier[name] = keyword[None] , identifier[plugin] = keyword[None] ): literal[string] keyword[if] identifier[plugin] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[name] keyword[is] keyword[None] : identifier[object_list] ={} keyword[for] identifier[key] keyword[in] identifier[object_dict] . identifier[keys] (): keyword[if] identifier[object_dict] [ identifier[key] ]. identifier[plugin] == identifier[plugin] : identifier[object_list] [ identifier[key] ]= identifier[object_dict] [ identifier[key] ] keyword[return] identifier[object_list] keyword[else] : keyword[if] identifier[name] keyword[in] identifier[object_dict] . identifier[keys] (): keyword[if] identifier[object_dict] [ identifier[name] ]. identifier[plugin] == identifier[plugin] : keyword[return] identifier[object_dict] [ identifier[name] ] keyword[else] : keyword[return] keyword[None] keyword[else] : keyword[return] keyword[None] keyword[else] : keyword[if] identifier[name] keyword[is] keyword[None] : keyword[return] identifier[object_dict] keyword[else] : keyword[if] identifier[name] keyword[in] identifier[object_dict] . identifier[keys] (): keyword[return] identifier[object_dict] [ identifier[name] ] keyword[else] : keyword[return] keyword[None]
def gw_get(object_dict, name=None, plugin=None): """ Getter function to retrieve objects from a given object dictionary. Used mainly to provide get() inside patterns. :param object_dict: objects, which must have 'name' and 'plugin' as attribute :type object_dict: dictionary :param name: name of the object :type name: str :param plugin: plugin name, which registers the object :return: None, single object or dict of objects """ if plugin is not None: if name is None: object_list = {} for key in object_dict.keys(): if object_dict[key].plugin == plugin: object_list[key] = object_dict[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] return object_list # depends on [control=['if'], data=[]] elif name in object_dict.keys(): if object_dict[name].plugin == plugin: return object_dict[name] # depends on [control=['if'], data=[]] else: return None # depends on [control=['if'], data=['name']] else: return None # depends on [control=['if'], data=['plugin']] elif name is None: return object_dict # depends on [control=['if'], data=[]] elif name in object_dict.keys(): return object_dict[name] # depends on [control=['if'], data=['name']] else: return None
def build(sub_parser, cmds): """todo: Docstring for build :param sub_parser: arg description :type sub_parser: type description :return: :rtype: """ res = {} for cmd in cmds: res[cmd.name] = cmd(sub_parser) # end for cmd in cmds return res
def function[build, parameter[sub_parser, cmds]]: constant[todo: Docstring for build :param sub_parser: arg description :type sub_parser: type description :return: :rtype: ] variable[res] assign[=] dictionary[[], []] for taget[name[cmd]] in starred[name[cmds]] begin[:] call[name[res]][name[cmd].name] assign[=] call[name[cmd], parameter[name[sub_parser]]] return[name[res]]
keyword[def] identifier[build] ( identifier[sub_parser] , identifier[cmds] ): literal[string] identifier[res] ={} keyword[for] identifier[cmd] keyword[in] identifier[cmds] : identifier[res] [ identifier[cmd] . identifier[name] ]= identifier[cmd] ( identifier[sub_parser] ) keyword[return] identifier[res]
def build(sub_parser, cmds): """todo: Docstring for build :param sub_parser: arg description :type sub_parser: type description :return: :rtype: """ res = {} for cmd in cmds: res[cmd.name] = cmd(sub_parser) # depends on [control=['for'], data=['cmd']] # end for cmd in cmds return res
def _rle(self, a): ''' rle implementation credit to Thomas Browne from his SOF post Sept 2015 Parameters ---------- a : array, shape[n,] input vector Returns ------- z : array, shape[nt,] run lengths p : array, shape[nt,] start positions of each run ar : array, shape[nt,] values for each run ''' ia = np.asarray(a) n = len(ia) y = np.array(ia[1:] != ia[:-1]) # pairwise unequal (string safe) i = np.append(np.where(y), n - 1) # must include last element posi z = np.diff(np.append(-1, i)) # run lengths p = np.cumsum(np.append(0, z))[:-1] # positions return (z, p, ia[i])
def function[_rle, parameter[self, a]]: constant[ rle implementation credit to Thomas Browne from his SOF post Sept 2015 Parameters ---------- a : array, shape[n,] input vector Returns ------- z : array, shape[nt,] run lengths p : array, shape[nt,] start positions of each run ar : array, shape[nt,] values for each run ] variable[ia] assign[=] call[name[np].asarray, parameter[name[a]]] variable[n] assign[=] call[name[len], parameter[name[ia]]] variable[y] assign[=] call[name[np].array, parameter[compare[call[name[ia]][<ast.Slice object at 0x7da1b17f88e0>] not_equal[!=] call[name[ia]][<ast.Slice object at 0x7da1b17fbeb0>]]]] variable[i] assign[=] call[name[np].append, parameter[call[name[np].where, parameter[name[y]]], binary_operation[name[n] - constant[1]]]] variable[z] assign[=] call[name[np].diff, parameter[call[name[np].append, parameter[<ast.UnaryOp object at 0x7da1b17f9840>, name[i]]]]] variable[p] assign[=] call[call[name[np].cumsum, parameter[call[name[np].append, parameter[constant[0], name[z]]]]]][<ast.Slice object at 0x7da1b15f2fe0>] return[tuple[[<ast.Name object at 0x7da1b15f0430>, <ast.Name object at 0x7da1b15f0e50>, <ast.Subscript object at 0x7da1b15f22c0>]]]
keyword[def] identifier[_rle] ( identifier[self] , identifier[a] ): literal[string] identifier[ia] = identifier[np] . identifier[asarray] ( identifier[a] ) identifier[n] = identifier[len] ( identifier[ia] ) identifier[y] = identifier[np] . identifier[array] ( identifier[ia] [ literal[int] :]!= identifier[ia] [:- literal[int] ]) identifier[i] = identifier[np] . identifier[append] ( identifier[np] . identifier[where] ( identifier[y] ), identifier[n] - literal[int] ) identifier[z] = identifier[np] . identifier[diff] ( identifier[np] . identifier[append] (- literal[int] , identifier[i] )) identifier[p] = identifier[np] . identifier[cumsum] ( identifier[np] . identifier[append] ( literal[int] , identifier[z] ))[:- literal[int] ] keyword[return] ( identifier[z] , identifier[p] , identifier[ia] [ identifier[i] ])
def _rle(self, a): """ rle implementation credit to Thomas Browne from his SOF post Sept 2015 Parameters ---------- a : array, shape[n,] input vector Returns ------- z : array, shape[nt,] run lengths p : array, shape[nt,] start positions of each run ar : array, shape[nt,] values for each run """ ia = np.asarray(a) n = len(ia) y = np.array(ia[1:] != ia[:-1]) # pairwise unequal (string safe) i = np.append(np.where(y), n - 1) # must include last element posi z = np.diff(np.append(-1, i)) # run lengths p = np.cumsum(np.append(0, z))[:-1] # positions return (z, p, ia[i])
def on_need_compare(self, pair): """Re-classify pair based on file attributes and options.""" # print("on_need_compare", pair) # If no metadata is available, we could only classify file entries as # 'existing'. # Now we use peer information to improve this classification. c_pair = (pair.local_classification, pair.remote_classification) org_pair = c_pair org_operation = pair.operation # print("need_compare", pair) if pair.is_dir: # For directores, we cannot compare existing peer entries. # Instead, we simply log (and traverse the children later). pair.local_classification = pair.remote_classification = "existing" pair.operation = "equal" self._log_action("", "visit", "?", pair.local, min_level=4) # self._log_action("", "equal", "=", pair.local, min_level=4) return elif c_pair == ("existing", "existing"): # Naive classification derived from file time and size time_cmp = eps_compare( pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME ) if time_cmp < 0: c_pair = ("unmodified", "modified") # remote is newer elif time_cmp > 0: c_pair = ("modified", "unmodified") # local is newer elif pair.local.size == pair.remote.size: c_pair = ("unmodified", "unmodified") # equal else: c_pair = ("modified", "modified") # conflict! elif c_pair == ("new", "new"): # Naive classification derived from file time and size time_cmp = eps_compare( pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME ) if time_cmp == 0 and pair.local.size == pair.remote.size: c_pair = ("unmodified", "unmodified") # equal else: c_pair = ("modified", "modified") # conflict! # elif c_pair == ("unmodified", "unmodified"): pair.local_classification = c_pair[0] pair.remote_classification = c_pair[1] pair.operation = operation_map.get(c_pair) # print("on_need_compare {} => {}".format(org_pair, pair)) if not pair.operation: raise RuntimeError( "Undefined operation for pair classification {}".format(c_pair) ) elif pair.operation == org_operation: raise RuntimeError("Could not re-classify {}".format(org_pair)) handler = getattr(self, "on_" + pair.operation, None) res = handler(pair) # self._log_action("", "different", "?", pair.local, min_level=2) return res
def function[on_need_compare, parameter[self, pair]]: constant[Re-classify pair based on file attributes and options.] variable[c_pair] assign[=] tuple[[<ast.Attribute object at 0x7da1b0418a60>, <ast.Attribute object at 0x7da1b0419e70>]] variable[org_pair] assign[=] name[c_pair] variable[org_operation] assign[=] name[pair].operation if name[pair].is_dir begin[:] name[pair].local_classification assign[=] constant[existing] name[pair].operation assign[=] constant[equal] call[name[self]._log_action, parameter[constant[], constant[visit], constant[?], name[pair].local]] return[None] name[pair].local_classification assign[=] call[name[c_pair]][constant[0]] name[pair].remote_classification assign[=] call[name[c_pair]][constant[1]] name[pair].operation assign[=] call[name[operation_map].get, parameter[name[c_pair]]] if <ast.UnaryOp object at 0x7da1b0538250> begin[:] <ast.Raise object at 0x7da1b053ba90> variable[handler] assign[=] call[name[getattr], parameter[name[self], binary_operation[constant[on_] + name[pair].operation], constant[None]]] variable[res] assign[=] call[name[handler], parameter[name[pair]]] return[name[res]]
keyword[def] identifier[on_need_compare] ( identifier[self] , identifier[pair] ): literal[string] identifier[c_pair] =( identifier[pair] . identifier[local_classification] , identifier[pair] . identifier[remote_classification] ) identifier[org_pair] = identifier[c_pair] identifier[org_operation] = identifier[pair] . identifier[operation] keyword[if] identifier[pair] . identifier[is_dir] : identifier[pair] . identifier[local_classification] = identifier[pair] . identifier[remote_classification] = literal[string] identifier[pair] . identifier[operation] = literal[string] identifier[self] . identifier[_log_action] ( literal[string] , literal[string] , literal[string] , identifier[pair] . identifier[local] , identifier[min_level] = literal[int] ) keyword[return] keyword[elif] identifier[c_pair] ==( literal[string] , literal[string] ): identifier[time_cmp] = identifier[eps_compare] ( identifier[pair] . identifier[local] . identifier[mtime] , identifier[pair] . identifier[remote] . identifier[mtime] , identifier[FileEntry] . identifier[EPS_TIME] ) keyword[if] identifier[time_cmp] < literal[int] : identifier[c_pair] =( literal[string] , literal[string] ) keyword[elif] identifier[time_cmp] > literal[int] : identifier[c_pair] =( literal[string] , literal[string] ) keyword[elif] identifier[pair] . identifier[local] . identifier[size] == identifier[pair] . identifier[remote] . identifier[size] : identifier[c_pair] =( literal[string] , literal[string] ) keyword[else] : identifier[c_pair] =( literal[string] , literal[string] ) keyword[elif] identifier[c_pair] ==( literal[string] , literal[string] ): identifier[time_cmp] = identifier[eps_compare] ( identifier[pair] . identifier[local] . identifier[mtime] , identifier[pair] . identifier[remote] . identifier[mtime] , identifier[FileEntry] . identifier[EPS_TIME] ) keyword[if] identifier[time_cmp] == literal[int] keyword[and] identifier[pair] . identifier[local] . identifier[size] == identifier[pair] . identifier[remote] . identifier[size] : identifier[c_pair] =( literal[string] , literal[string] ) keyword[else] : identifier[c_pair] =( literal[string] , literal[string] ) identifier[pair] . identifier[local_classification] = identifier[c_pair] [ literal[int] ] identifier[pair] . identifier[remote_classification] = identifier[c_pair] [ literal[int] ] identifier[pair] . identifier[operation] = identifier[operation_map] . identifier[get] ( identifier[c_pair] ) keyword[if] keyword[not] identifier[pair] . identifier[operation] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[c_pair] ) ) keyword[elif] identifier[pair] . identifier[operation] == identifier[org_operation] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[org_pair] )) identifier[handler] = identifier[getattr] ( identifier[self] , literal[string] + identifier[pair] . identifier[operation] , keyword[None] ) identifier[res] = identifier[handler] ( identifier[pair] ) keyword[return] identifier[res]
def on_need_compare(self, pair): """Re-classify pair based on file attributes and options.""" # print("on_need_compare", pair) # If no metadata is available, we could only classify file entries as # 'existing'. # Now we use peer information to improve this classification. c_pair = (pair.local_classification, pair.remote_classification) org_pair = c_pair org_operation = pair.operation # print("need_compare", pair) if pair.is_dir: # For directores, we cannot compare existing peer entries. # Instead, we simply log (and traverse the children later). pair.local_classification = pair.remote_classification = 'existing' pair.operation = 'equal' self._log_action('', 'visit', '?', pair.local, min_level=4) # self._log_action("", "equal", "=", pair.local, min_level=4) return # depends on [control=['if'], data=[]] elif c_pair == ('existing', 'existing'): # Naive classification derived from file time and size time_cmp = eps_compare(pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME) if time_cmp < 0: c_pair = ('unmodified', 'modified') # remote is newer # depends on [control=['if'], data=[]] elif time_cmp > 0: c_pair = ('modified', 'unmodified') # local is newer # depends on [control=['if'], data=[]] elif pair.local.size == pair.remote.size: c_pair = ('unmodified', 'unmodified') # equal # depends on [control=['if'], data=[]] else: c_pair = ('modified', 'modified') # conflict! # depends on [control=['if'], data=['c_pair']] elif c_pair == ('new', 'new'): # Naive classification derived from file time and size time_cmp = eps_compare(pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME) if time_cmp == 0 and pair.local.size == pair.remote.size: c_pair = ('unmodified', 'unmodified') # equal # depends on [control=['if'], data=[]] else: c_pair = ('modified', 'modified') # conflict! # depends on [control=['if'], data=['c_pair']] # elif c_pair == ("unmodified", "unmodified"): pair.local_classification = c_pair[0] pair.remote_classification = c_pair[1] pair.operation = operation_map.get(c_pair) # print("on_need_compare {} => {}".format(org_pair, pair)) if not pair.operation: raise RuntimeError('Undefined operation for pair classification {}'.format(c_pair)) # depends on [control=['if'], data=[]] elif pair.operation == org_operation: raise RuntimeError('Could not re-classify {}'.format(org_pair)) # depends on [control=['if'], data=[]] handler = getattr(self, 'on_' + pair.operation, None) res = handler(pair) # self._log_action("", "different", "?", pair.local, min_level=2) return res
def hil_gps_encode(self, time_usec, fix_type, lat, lon, alt, eph, epv, vel, vn, ve, vd, cog, satellites_visible): ''' The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right- handed, Z-axis up (GPS frame). time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t) lat : Latitude (WGS84), in degrees * 1E7 (int32_t) lon : Longitude (WGS84), in degrees * 1E7 (int32_t) alt : Altitude (AMSL, not WGS84), in meters * 1000 (positive for up) (int32_t) eph : GPS HDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) epv : GPS VDOP vertical dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) vel : GPS ground speed (m/s * 100). If unknown, set to: 65535 (uint16_t) vn : GPS velocity in cm/s in NORTH direction in earth-fixed NED frame (int16_t) ve : GPS velocity in cm/s in EAST direction in earth-fixed NED frame (int16_t) vd : GPS velocity in cm/s in DOWN direction in earth-fixed NED frame (int16_t) cog : Course over ground (NOT heading, but direction of movement) in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) satellites_visible : Number of satellites visible. If unknown, set to 255 (uint8_t) ''' return MAVLink_hil_gps_message(time_usec, fix_type, lat, lon, alt, eph, epv, vel, vn, ve, vd, cog, satellites_visible)
def function[hil_gps_encode, parameter[self, time_usec, fix_type, lat, lon, alt, eph, epv, vel, vn, ve, vd, cog, satellites_visible]]: constant[ The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right- handed, Z-axis up (GPS frame). time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t) lat : Latitude (WGS84), in degrees * 1E7 (int32_t) lon : Longitude (WGS84), in degrees * 1E7 (int32_t) alt : Altitude (AMSL, not WGS84), in meters * 1000 (positive for up) (int32_t) eph : GPS HDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) epv : GPS VDOP vertical dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) vel : GPS ground speed (m/s * 100). If unknown, set to: 65535 (uint16_t) vn : GPS velocity in cm/s in NORTH direction in earth-fixed NED frame (int16_t) ve : GPS velocity in cm/s in EAST direction in earth-fixed NED frame (int16_t) vd : GPS velocity in cm/s in DOWN direction in earth-fixed NED frame (int16_t) cog : Course over ground (NOT heading, but direction of movement) in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) satellites_visible : Number of satellites visible. If unknown, set to 255 (uint8_t) ] return[call[name[MAVLink_hil_gps_message], parameter[name[time_usec], name[fix_type], name[lat], name[lon], name[alt], name[eph], name[epv], name[vel], name[vn], name[ve], name[vd], name[cog], name[satellites_visible]]]]
keyword[def] identifier[hil_gps_encode] ( identifier[self] , identifier[time_usec] , identifier[fix_type] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[eph] , identifier[epv] , identifier[vel] , identifier[vn] , identifier[ve] , identifier[vd] , identifier[cog] , identifier[satellites_visible] ): literal[string] keyword[return] identifier[MAVLink_hil_gps_message] ( identifier[time_usec] , identifier[fix_type] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[eph] , identifier[epv] , identifier[vel] , identifier[vn] , identifier[ve] , identifier[vd] , identifier[cog] , identifier[satellites_visible] )
def hil_gps_encode(self, time_usec, fix_type, lat, lon, alt, eph, epv, vel, vn, ve, vd, cog, satellites_visible): """ The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right- handed, Z-axis up (GPS frame). time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t) lat : Latitude (WGS84), in degrees * 1E7 (int32_t) lon : Longitude (WGS84), in degrees * 1E7 (int32_t) alt : Altitude (AMSL, not WGS84), in meters * 1000 (positive for up) (int32_t) eph : GPS HDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) epv : GPS VDOP vertical dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) vel : GPS ground speed (m/s * 100). If unknown, set to: 65535 (uint16_t) vn : GPS velocity in cm/s in NORTH direction in earth-fixed NED frame (int16_t) ve : GPS velocity in cm/s in EAST direction in earth-fixed NED frame (int16_t) vd : GPS velocity in cm/s in DOWN direction in earth-fixed NED frame (int16_t) cog : Course over ground (NOT heading, but direction of movement) in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) satellites_visible : Number of satellites visible. If unknown, set to 255 (uint8_t) """ return MAVLink_hil_gps_message(time_usec, fix_type, lat, lon, alt, eph, epv, vel, vn, ve, vd, cog, satellites_visible)
def iter_subscriptions(self, login=None, number=-1, etag=None): """Iterate over repositories subscribed to by ``login`` or the authenticated user. :param str login: (optional), name of user whose subscriptions you want to see :param int number: (optional), number of repositories to return. Default: -1 returns all repositories :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of :class:`Repository <github3.repos.Repository>` """ if login: return self.user(login).iter_subscriptions() url = self._build_url('user', 'subscriptions') return self._iter(int(number), url, Repository, etag=etag)
def function[iter_subscriptions, parameter[self, login, number, etag]]: constant[Iterate over repositories subscribed to by ``login`` or the authenticated user. :param str login: (optional), name of user whose subscriptions you want to see :param int number: (optional), number of repositories to return. Default: -1 returns all repositories :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of :class:`Repository <github3.repos.Repository>` ] if name[login] begin[:] return[call[call[name[self].user, parameter[name[login]]].iter_subscriptions, parameter[]]] variable[url] assign[=] call[name[self]._build_url, parameter[constant[user], constant[subscriptions]]] return[call[name[self]._iter, parameter[call[name[int], parameter[name[number]]], name[url], name[Repository]]]]
keyword[def] identifier[iter_subscriptions] ( identifier[self] , identifier[login] = keyword[None] , identifier[number] =- literal[int] , identifier[etag] = keyword[None] ): literal[string] keyword[if] identifier[login] : keyword[return] identifier[self] . identifier[user] ( identifier[login] ). identifier[iter_subscriptions] () identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , literal[string] ) keyword[return] identifier[self] . identifier[_iter] ( identifier[int] ( identifier[number] ), identifier[url] , identifier[Repository] , identifier[etag] = identifier[etag] )
def iter_subscriptions(self, login=None, number=-1, etag=None): """Iterate over repositories subscribed to by ``login`` or the authenticated user. :param str login: (optional), name of user whose subscriptions you want to see :param int number: (optional), number of repositories to return. Default: -1 returns all repositories :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of :class:`Repository <github3.repos.Repository>` """ if login: return self.user(login).iter_subscriptions() # depends on [control=['if'], data=[]] url = self._build_url('user', 'subscriptions') return self._iter(int(number), url, Repository, etag=etag)
def add_otp_style(self, zip_odp, style_file): """ takes the slide content and merges in the style_file """ style = zipwrap.Zippier(style_file) for picture_file in style.ls("Pictures"): zip_odp.write(picture_file, style.cat(picture_file, True)) xml_data = style.cat("styles.xml", False) # import pdb;pdb.set_trace() xml_data = self.override_styles(xml_data) zip_odp.write("styles.xml", xml_data)
def function[add_otp_style, parameter[self, zip_odp, style_file]]: constant[ takes the slide content and merges in the style_file ] variable[style] assign[=] call[name[zipwrap].Zippier, parameter[name[style_file]]] for taget[name[picture_file]] in starred[call[name[style].ls, parameter[constant[Pictures]]]] begin[:] call[name[zip_odp].write, parameter[name[picture_file], call[name[style].cat, parameter[name[picture_file], constant[True]]]]] variable[xml_data] assign[=] call[name[style].cat, parameter[constant[styles.xml], constant[False]]] variable[xml_data] assign[=] call[name[self].override_styles, parameter[name[xml_data]]] call[name[zip_odp].write, parameter[constant[styles.xml], name[xml_data]]]
keyword[def] identifier[add_otp_style] ( identifier[self] , identifier[zip_odp] , identifier[style_file] ): literal[string] identifier[style] = identifier[zipwrap] . identifier[Zippier] ( identifier[style_file] ) keyword[for] identifier[picture_file] keyword[in] identifier[style] . identifier[ls] ( literal[string] ): identifier[zip_odp] . identifier[write] ( identifier[picture_file] , identifier[style] . identifier[cat] ( identifier[picture_file] , keyword[True] )) identifier[xml_data] = identifier[style] . identifier[cat] ( literal[string] , keyword[False] ) identifier[xml_data] = identifier[self] . identifier[override_styles] ( identifier[xml_data] ) identifier[zip_odp] . identifier[write] ( literal[string] , identifier[xml_data] )
def add_otp_style(self, zip_odp, style_file): """ takes the slide content and merges in the style_file """ style = zipwrap.Zippier(style_file) for picture_file in style.ls('Pictures'): zip_odp.write(picture_file, style.cat(picture_file, True)) # depends on [control=['for'], data=['picture_file']] xml_data = style.cat('styles.xml', False) # import pdb;pdb.set_trace() xml_data = self.override_styles(xml_data) zip_odp.write('styles.xml', xml_data)
def get_delta(value): """ Return a timedelta object based on the value which can be a timedelta or a number of seconds (int or float). Raise an exception in all other cases. """ if isinstance(value, (int, float)): return timedelta(seconds=value) elif isinstance(value, timedelta): return value raise Exception('Invalid delta')
def function[get_delta, parameter[value]]: constant[ Return a timedelta object based on the value which can be a timedelta or a number of seconds (int or float). Raise an exception in all other cases. ] if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da18f8131f0>, <ast.Name object at 0x7da18f8101c0>]]]] begin[:] return[call[name[timedelta], parameter[]]] <ast.Raise object at 0x7da18f8103d0>
keyword[def] identifier[get_delta] ( identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[int] , identifier[float] )): keyword[return] identifier[timedelta] ( identifier[seconds] = identifier[value] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[timedelta] ): keyword[return] identifier[value] keyword[raise] identifier[Exception] ( literal[string] )
def get_delta(value): """ Return a timedelta object based on the value which can be a timedelta or a number of seconds (int or float). Raise an exception in all other cases. """ if isinstance(value, (int, float)): return timedelta(seconds=value) # depends on [control=['if'], data=[]] elif isinstance(value, timedelta): return value # depends on [control=['if'], data=[]] raise Exception('Invalid delta')
def imagetransformer_b12l_4h_b256_uncond_dr03_rel_tpu(): """works very well on 4x4.""" hparams = imagetransformer_b12l_4h_b256_uncond_dr03_tpu() hparams.shared_rel = True hparams.dec_attention_type = cia.AttentionType.RELATIVE_LOCAL_1D return hparams
def function[imagetransformer_b12l_4h_b256_uncond_dr03_rel_tpu, parameter[]]: constant[works very well on 4x4.] variable[hparams] assign[=] call[name[imagetransformer_b12l_4h_b256_uncond_dr03_tpu], parameter[]] name[hparams].shared_rel assign[=] constant[True] name[hparams].dec_attention_type assign[=] name[cia].AttentionType.RELATIVE_LOCAL_1D return[name[hparams]]
keyword[def] identifier[imagetransformer_b12l_4h_b256_uncond_dr03_rel_tpu] (): literal[string] identifier[hparams] = identifier[imagetransformer_b12l_4h_b256_uncond_dr03_tpu] () identifier[hparams] . identifier[shared_rel] = keyword[True] identifier[hparams] . identifier[dec_attention_type] = identifier[cia] . identifier[AttentionType] . identifier[RELATIVE_LOCAL_1D] keyword[return] identifier[hparams]
def imagetransformer_b12l_4h_b256_uncond_dr03_rel_tpu(): """works very well on 4x4.""" hparams = imagetransformer_b12l_4h_b256_uncond_dr03_tpu() hparams.shared_rel = True hparams.dec_attention_type = cia.AttentionType.RELATIVE_LOCAL_1D return hparams
def subtract(dict_a, dict_b, strict=False): """a stricter form of subtract_by_key(), this version will only remove an entry from dict_a if the key is in dict_b *and* the value at that key matches""" if not strict: return subtract_by_key(dict_a, dict_b) difference_dict = {} for key in dict_a: if key not in dict_b or dict_b[key] != dict_a[key]: difference_dict[key] = dict_a[key] return difference_dict
def function[subtract, parameter[dict_a, dict_b, strict]]: constant[a stricter form of subtract_by_key(), this version will only remove an entry from dict_a if the key is in dict_b *and* the value at that key matches] if <ast.UnaryOp object at 0x7da1b28b9210> begin[:] return[call[name[subtract_by_key], parameter[name[dict_a], name[dict_b]]]] variable[difference_dict] assign[=] dictionary[[], []] for taget[name[key]] in starred[name[dict_a]] begin[:] if <ast.BoolOp object at 0x7da1b28b9c60> begin[:] call[name[difference_dict]][name[key]] assign[=] call[name[dict_a]][name[key]] return[name[difference_dict]]
keyword[def] identifier[subtract] ( identifier[dict_a] , identifier[dict_b] , identifier[strict] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[strict] : keyword[return] identifier[subtract_by_key] ( identifier[dict_a] , identifier[dict_b] ) identifier[difference_dict] ={} keyword[for] identifier[key] keyword[in] identifier[dict_a] : keyword[if] identifier[key] keyword[not] keyword[in] identifier[dict_b] keyword[or] identifier[dict_b] [ identifier[key] ]!= identifier[dict_a] [ identifier[key] ]: identifier[difference_dict] [ identifier[key] ]= identifier[dict_a] [ identifier[key] ] keyword[return] identifier[difference_dict]
def subtract(dict_a, dict_b, strict=False): """a stricter form of subtract_by_key(), this version will only remove an entry from dict_a if the key is in dict_b *and* the value at that key matches""" if not strict: return subtract_by_key(dict_a, dict_b) # depends on [control=['if'], data=[]] difference_dict = {} for key in dict_a: if key not in dict_b or dict_b[key] != dict_a[key]: difference_dict[key] = dict_a[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] return difference_dict
def register_alias_type(klass, *args): """ This function allows you to map subclasses of L{ClassAlias} to classes listed in C{args}. When an object is read/written from/to the AMF stream, a paired L{ClassAlias} instance is created (or reused), based on the Python class of that object. L{ClassAlias} provides important metadata for the class and can also control how the equivalent Python object is created, how the attributes are applied etc. Use this function if you need to do something non-standard. @since: 0.4 @see: - L{pyamf.adapters._google_appengine_ext_db.DataStoreClassAlias} for a good example. - L{unregister_alias_type} @raise RuntimeError: alias is already registered @raise TypeError: Value supplied to C{klass} is not a class @raise ValueError: - New aliases must subclass L{pyamf.ClassAlias} - At least one type must be supplied """ def check_type_registered(arg): for k, v in ALIAS_TYPES.iteritems(): for kl in v: if arg is kl: raise RuntimeError('%r is already registered under %r' % ( arg, k)) if not isinstance(klass, python.class_types): raise TypeError('klass must be class') if not issubclass(klass, ClassAlias): raise ValueError('New aliases must subclass pyamf.ClassAlias') if len(args) == 0: raise ValueError('At least one type must be supplied') if len(args) == 1 and hasattr(args[0], '__call__'): c = args[0] check_type_registered(c) else: for arg in args: if not isinstance(arg, python.class_types): raise TypeError('%r must be class' % (arg,)) check_type_registered(arg) ALIAS_TYPES[klass] = args for k, v in CLASS_CACHE.copy().iteritems(): new_alias = util.get_class_alias(v.klass) if new_alias is klass: meta = util.get_class_meta(v.klass) meta['alias'] = v.alias alias_klass = klass(v.klass, **meta) CLASS_CACHE[k] = alias_klass CLASS_CACHE[v.klass] = alias_klass
def function[register_alias_type, parameter[klass]]: constant[ This function allows you to map subclasses of L{ClassAlias} to classes listed in C{args}. When an object is read/written from/to the AMF stream, a paired L{ClassAlias} instance is created (or reused), based on the Python class of that object. L{ClassAlias} provides important metadata for the class and can also control how the equivalent Python object is created, how the attributes are applied etc. Use this function if you need to do something non-standard. @since: 0.4 @see: - L{pyamf.adapters._google_appengine_ext_db.DataStoreClassAlias} for a good example. - L{unregister_alias_type} @raise RuntimeError: alias is already registered @raise TypeError: Value supplied to C{klass} is not a class @raise ValueError: - New aliases must subclass L{pyamf.ClassAlias} - At least one type must be supplied ] def function[check_type_registered, parameter[arg]]: for taget[tuple[[<ast.Name object at 0x7da20c6a85b0>, <ast.Name object at 0x7da20c6aadd0>]]] in starred[call[name[ALIAS_TYPES].iteritems, parameter[]]] begin[:] for taget[name[kl]] in starred[name[v]] begin[:] if compare[name[arg] is name[kl]] begin[:] <ast.Raise object at 0x7da20c6abf40> if <ast.UnaryOp object at 0x7da20c6aac50> begin[:] <ast.Raise object at 0x7da20c6a83d0> if <ast.UnaryOp object at 0x7da20c6ab6d0> begin[:] <ast.Raise object at 0x7da20c6a97b0> if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20c6a9ea0> if <ast.BoolOp object at 0x7da20c6aa590> begin[:] variable[c] assign[=] call[name[args]][constant[0]] call[name[check_type_registered], parameter[name[c]]] call[name[ALIAS_TYPES]][name[klass]] assign[=] name[args] for taget[tuple[[<ast.Name object at 0x7da20c6aaaa0>, <ast.Name object at 0x7da20c6a90c0>]]] in starred[call[call[name[CLASS_CACHE].copy, parameter[]].iteritems, parameter[]]] begin[:] variable[new_alias] assign[=] call[name[util].get_class_alias, parameter[name[v].klass]] if compare[name[new_alias] is name[klass]] begin[:] variable[meta] assign[=] call[name[util].get_class_meta, parameter[name[v].klass]] call[name[meta]][constant[alias]] assign[=] name[v].alias variable[alias_klass] assign[=] call[name[klass], parameter[name[v].klass]] call[name[CLASS_CACHE]][name[k]] assign[=] name[alias_klass] call[name[CLASS_CACHE]][name[v].klass] assign[=] name[alias_klass]
keyword[def] identifier[register_alias_type] ( identifier[klass] ,* identifier[args] ): literal[string] keyword[def] identifier[check_type_registered] ( identifier[arg] ): keyword[for] identifier[k] , identifier[v] keyword[in] identifier[ALIAS_TYPES] . identifier[iteritems] (): keyword[for] identifier[kl] keyword[in] identifier[v] : keyword[if] identifier[arg] keyword[is] identifier[kl] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[arg] , identifier[k] )) keyword[if] keyword[not] identifier[isinstance] ( identifier[klass] , identifier[python] . identifier[class_types] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[issubclass] ( identifier[klass] , identifier[ClassAlias] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[args] )== literal[int] keyword[and] identifier[hasattr] ( identifier[args] [ literal[int] ], literal[string] ): identifier[c] = identifier[args] [ literal[int] ] identifier[check_type_registered] ( identifier[c] ) keyword[else] : keyword[for] identifier[arg] keyword[in] identifier[args] : keyword[if] keyword[not] identifier[isinstance] ( identifier[arg] , identifier[python] . identifier[class_types] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[arg] ,)) identifier[check_type_registered] ( identifier[arg] ) identifier[ALIAS_TYPES] [ identifier[klass] ]= identifier[args] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[CLASS_CACHE] . identifier[copy] (). identifier[iteritems] (): identifier[new_alias] = identifier[util] . identifier[get_class_alias] ( identifier[v] . identifier[klass] ) keyword[if] identifier[new_alias] keyword[is] identifier[klass] : identifier[meta] = identifier[util] . identifier[get_class_meta] ( identifier[v] . identifier[klass] ) identifier[meta] [ literal[string] ]= identifier[v] . identifier[alias] identifier[alias_klass] = identifier[klass] ( identifier[v] . identifier[klass] ,** identifier[meta] ) identifier[CLASS_CACHE] [ identifier[k] ]= identifier[alias_klass] identifier[CLASS_CACHE] [ identifier[v] . identifier[klass] ]= identifier[alias_klass]
def register_alias_type(klass, *args): """ This function allows you to map subclasses of L{ClassAlias} to classes listed in C{args}. When an object is read/written from/to the AMF stream, a paired L{ClassAlias} instance is created (or reused), based on the Python class of that object. L{ClassAlias} provides important metadata for the class and can also control how the equivalent Python object is created, how the attributes are applied etc. Use this function if you need to do something non-standard. @since: 0.4 @see: - L{pyamf.adapters._google_appengine_ext_db.DataStoreClassAlias} for a good example. - L{unregister_alias_type} @raise RuntimeError: alias is already registered @raise TypeError: Value supplied to C{klass} is not a class @raise ValueError: - New aliases must subclass L{pyamf.ClassAlias} - At least one type must be supplied """ def check_type_registered(arg): for (k, v) in ALIAS_TYPES.iteritems(): for kl in v: if arg is kl: raise RuntimeError('%r is already registered under %r' % (arg, k)) # depends on [control=['if'], data=['arg']] # depends on [control=['for'], data=['kl']] # depends on [control=['for'], data=[]] if not isinstance(klass, python.class_types): raise TypeError('klass must be class') # depends on [control=['if'], data=[]] if not issubclass(klass, ClassAlias): raise ValueError('New aliases must subclass pyamf.ClassAlias') # depends on [control=['if'], data=[]] if len(args) == 0: raise ValueError('At least one type must be supplied') # depends on [control=['if'], data=[]] if len(args) == 1 and hasattr(args[0], '__call__'): c = args[0] check_type_registered(c) # depends on [control=['if'], data=[]] else: for arg in args: if not isinstance(arg, python.class_types): raise TypeError('%r must be class' % (arg,)) # depends on [control=['if'], data=[]] check_type_registered(arg) # depends on [control=['for'], data=['arg']] ALIAS_TYPES[klass] = args for (k, v) in CLASS_CACHE.copy().iteritems(): new_alias = util.get_class_alias(v.klass) if new_alias is klass: meta = util.get_class_meta(v.klass) meta['alias'] = v.alias alias_klass = klass(v.klass, **meta) CLASS_CACHE[k] = alias_klass CLASS_CACHE[v.klass] = alias_klass # depends on [control=['if'], data=['klass']] # depends on [control=['for'], data=[]]
def addOntology(self): """ Adds a new Ontology to this repo. """ self._openRepo() name = self._args.name filePath = self._getFilePath(self._args.filePath, self._args.relativePath) if name is None: name = getNameFromPath(filePath) ontology = ontologies.Ontology(name) ontology.populateFromFile(filePath) self._updateRepo(self._repo.insertOntology, ontology)
def function[addOntology, parameter[self]]: constant[ Adds a new Ontology to this repo. ] call[name[self]._openRepo, parameter[]] variable[name] assign[=] name[self]._args.name variable[filePath] assign[=] call[name[self]._getFilePath, parameter[name[self]._args.filePath, name[self]._args.relativePath]] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] call[name[getNameFromPath], parameter[name[filePath]]] variable[ontology] assign[=] call[name[ontologies].Ontology, parameter[name[name]]] call[name[ontology].populateFromFile, parameter[name[filePath]]] call[name[self]._updateRepo, parameter[name[self]._repo.insertOntology, name[ontology]]]
keyword[def] identifier[addOntology] ( identifier[self] ): literal[string] identifier[self] . identifier[_openRepo] () identifier[name] = identifier[self] . identifier[_args] . identifier[name] identifier[filePath] = identifier[self] . identifier[_getFilePath] ( identifier[self] . identifier[_args] . identifier[filePath] , identifier[self] . identifier[_args] . identifier[relativePath] ) keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = identifier[getNameFromPath] ( identifier[filePath] ) identifier[ontology] = identifier[ontologies] . identifier[Ontology] ( identifier[name] ) identifier[ontology] . identifier[populateFromFile] ( identifier[filePath] ) identifier[self] . identifier[_updateRepo] ( identifier[self] . identifier[_repo] . identifier[insertOntology] , identifier[ontology] )
def addOntology(self): """ Adds a new Ontology to this repo. """ self._openRepo() name = self._args.name filePath = self._getFilePath(self._args.filePath, self._args.relativePath) if name is None: name = getNameFromPath(filePath) # depends on [control=['if'], data=['name']] ontology = ontologies.Ontology(name) ontology.populateFromFile(filePath) self._updateRepo(self._repo.insertOntology, ontology)
def _get_definitions(source): # type: (str) -> Tuple[Dict[str, str], int] """Extract a dictionary of arguments and definitions. Args: source: The source for a section of a usage string that contains definitions. Returns: A two-tuple containing a dictionary of all arguments and definitions as well as the length of the longest argument. """ max_len = 0 descs = collections.OrderedDict() # type: Dict[str, str] lines = (s.strip() for s in source.splitlines()) non_empty_lines = (s for s in lines if s) for line in non_empty_lines: if line: arg, desc = re.split(r'\s\s+', line.strip()) arg_len = len(arg) if arg_len > max_len: max_len = arg_len descs[arg] = desc return descs, max_len
def function[_get_definitions, parameter[source]]: constant[Extract a dictionary of arguments and definitions. Args: source: The source for a section of a usage string that contains definitions. Returns: A two-tuple containing a dictionary of all arguments and definitions as well as the length of the longest argument. ] variable[max_len] assign[=] constant[0] variable[descs] assign[=] call[name[collections].OrderedDict, parameter[]] variable[lines] assign[=] <ast.GeneratorExp object at 0x7da1b287df00> variable[non_empty_lines] assign[=] <ast.GeneratorExp object at 0x7da1b287ee90> for taget[name[line]] in starred[name[non_empty_lines]] begin[:] if name[line] begin[:] <ast.Tuple object at 0x7da1b287c250> assign[=] call[name[re].split, parameter[constant[\s\s+], call[name[line].strip, parameter[]]]] variable[arg_len] assign[=] call[name[len], parameter[name[arg]]] if compare[name[arg_len] greater[>] name[max_len]] begin[:] variable[max_len] assign[=] name[arg_len] call[name[descs]][name[arg]] assign[=] name[desc] return[tuple[[<ast.Name object at 0x7da1b287ef80>, <ast.Name object at 0x7da1b287ed40>]]]
keyword[def] identifier[_get_definitions] ( identifier[source] ): literal[string] identifier[max_len] = literal[int] identifier[descs] = identifier[collections] . identifier[OrderedDict] () identifier[lines] =( identifier[s] . identifier[strip] () keyword[for] identifier[s] keyword[in] identifier[source] . identifier[splitlines] ()) identifier[non_empty_lines] =( identifier[s] keyword[for] identifier[s] keyword[in] identifier[lines] keyword[if] identifier[s] ) keyword[for] identifier[line] keyword[in] identifier[non_empty_lines] : keyword[if] identifier[line] : identifier[arg] , identifier[desc] = identifier[re] . identifier[split] ( literal[string] , identifier[line] . identifier[strip] ()) identifier[arg_len] = identifier[len] ( identifier[arg] ) keyword[if] identifier[arg_len] > identifier[max_len] : identifier[max_len] = identifier[arg_len] identifier[descs] [ identifier[arg] ]= identifier[desc] keyword[return] identifier[descs] , identifier[max_len]
def _get_definitions(source): # type: (str) -> Tuple[Dict[str, str], int] 'Extract a dictionary of arguments and definitions.\n\n Args:\n source: The source for a section of a usage string that contains\n definitions.\n\n Returns:\n A two-tuple containing a dictionary of all arguments and definitions as\n well as the length of the longest argument.\n ' max_len = 0 descs = collections.OrderedDict() # type: Dict[str, str] lines = (s.strip() for s in source.splitlines()) non_empty_lines = (s for s in lines if s) for line in non_empty_lines: if line: (arg, desc) = re.split('\\s\\s+', line.strip()) arg_len = len(arg) if arg_len > max_len: max_len = arg_len # depends on [control=['if'], data=['arg_len', 'max_len']] descs[arg] = desc # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] return (descs, max_len)
def fix_e722(self, result): """fix bare except""" (line_index, _, target) = get_index_offset_contents(result, self.source) if BARE_EXCEPT_REGEX.search(target): self.source[line_index] = '{0}{1}'.format( target[:result['column'] - 1], "except Exception:")
def function[fix_e722, parameter[self, result]]: constant[fix bare except] <ast.Tuple object at 0x7da1b08a5c30> assign[=] call[name[get_index_offset_contents], parameter[name[result], name[self].source]] if call[name[BARE_EXCEPT_REGEX].search, parameter[name[target]]] begin[:] call[name[self].source][name[line_index]] assign[=] call[constant[{0}{1}].format, parameter[call[name[target]][<ast.Slice object at 0x7da1b08a67a0>], constant[except Exception:]]]
keyword[def] identifier[fix_e722] ( identifier[self] , identifier[result] ): literal[string] ( identifier[line_index] , identifier[_] , identifier[target] )= identifier[get_index_offset_contents] ( identifier[result] , identifier[self] . identifier[source] ) keyword[if] identifier[BARE_EXCEPT_REGEX] . identifier[search] ( identifier[target] ): identifier[self] . identifier[source] [ identifier[line_index] ]= literal[string] . identifier[format] ( identifier[target] [: identifier[result] [ literal[string] ]- literal[int] ], literal[string] )
def fix_e722(self, result): """fix bare except""" (line_index, _, target) = get_index_offset_contents(result, self.source) if BARE_EXCEPT_REGEX.search(target): self.source[line_index] = '{0}{1}'.format(target[:result['column'] - 1], 'except Exception:') # depends on [control=['if'], data=[]]
def _streamSSE(url, on_data=print, accrue=False): '''internal''' messages = SSEClient(url) if accrue: ret = [] for msg in messages: data = msg.data on_data(json.loads(data)) if accrue: ret.append(msg) return ret
def function[_streamSSE, parameter[url, on_data, accrue]]: constant[internal] variable[messages] assign[=] call[name[SSEClient], parameter[name[url]]] if name[accrue] begin[:] variable[ret] assign[=] list[[]] for taget[name[msg]] in starred[name[messages]] begin[:] variable[data] assign[=] name[msg].data call[name[on_data], parameter[call[name[json].loads, parameter[name[data]]]]] if name[accrue] begin[:] call[name[ret].append, parameter[name[msg]]] return[name[ret]]
keyword[def] identifier[_streamSSE] ( identifier[url] , identifier[on_data] = identifier[print] , identifier[accrue] = keyword[False] ): literal[string] identifier[messages] = identifier[SSEClient] ( identifier[url] ) keyword[if] identifier[accrue] : identifier[ret] =[] keyword[for] identifier[msg] keyword[in] identifier[messages] : identifier[data] = identifier[msg] . identifier[data] identifier[on_data] ( identifier[json] . identifier[loads] ( identifier[data] )) keyword[if] identifier[accrue] : identifier[ret] . identifier[append] ( identifier[msg] ) keyword[return] identifier[ret]
def _streamSSE(url, on_data=print, accrue=False): """internal""" messages = SSEClient(url) if accrue: ret = [] # depends on [control=['if'], data=[]] for msg in messages: data = msg.data on_data(json.loads(data)) if accrue: ret.append(msg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['msg']] return ret
def get_content(session, urls): """ Loads the content from URLs, ignoring connection errors. :param session: requests Session instance :param urls: list, str URLs :return: str, content """ for url in urls: resp = session.get(url) if resp.ok: return resp.json() return {}
def function[get_content, parameter[session, urls]]: constant[ Loads the content from URLs, ignoring connection errors. :param session: requests Session instance :param urls: list, str URLs :return: str, content ] for taget[name[url]] in starred[name[urls]] begin[:] variable[resp] assign[=] call[name[session].get, parameter[name[url]]] if name[resp].ok begin[:] return[call[name[resp].json, parameter[]]] return[dictionary[[], []]]
keyword[def] identifier[get_content] ( identifier[session] , identifier[urls] ): literal[string] keyword[for] identifier[url] keyword[in] identifier[urls] : identifier[resp] = identifier[session] . identifier[get] ( identifier[url] ) keyword[if] identifier[resp] . identifier[ok] : keyword[return] identifier[resp] . identifier[json] () keyword[return] {}
def get_content(session, urls): """ Loads the content from URLs, ignoring connection errors. :param session: requests Session instance :param urls: list, str URLs :return: str, content """ for url in urls: resp = session.get(url) if resp.ok: return resp.json() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['url']] return {}
async def queue_declare(self): """ Override this method to change how a queue is declared """ await self.channel.queue_declare( self.queue, durable=self.durable, exclusive=self.exclusive, no_wait=self.no_wait )
<ast.AsyncFunctionDef object at 0x7da1b1bbad10>
keyword[async] keyword[def] identifier[queue_declare] ( identifier[self] ): literal[string] keyword[await] identifier[self] . identifier[channel] . identifier[queue_declare] ( identifier[self] . identifier[queue] , identifier[durable] = identifier[self] . identifier[durable] , identifier[exclusive] = identifier[self] . identifier[exclusive] , identifier[no_wait] = identifier[self] . identifier[no_wait] )
async def queue_declare(self): """ Override this method to change how a queue is declared """ await self.channel.queue_declare(self.queue, durable=self.durable, exclusive=self.exclusive, no_wait=self.no_wait)
def dispatch(self): 'Perform dispatch, using request embedded within flask global state' import flask body = flask.request.get_json() return self. dispatch_with_args(body, argMap=dict())
def function[dispatch, parameter[self]]: constant[Perform dispatch, using request embedded within flask global state] import module[flask] variable[body] assign[=] call[name[flask].request.get_json, parameter[]] return[call[name[self].dispatch_with_args, parameter[name[body]]]]
keyword[def] identifier[dispatch] ( identifier[self] ): literal[string] keyword[import] identifier[flask] identifier[body] = identifier[flask] . identifier[request] . identifier[get_json] () keyword[return] identifier[self] . identifier[dispatch_with_args] ( identifier[body] , identifier[argMap] = identifier[dict] ())
def dispatch(self): """Perform dispatch, using request embedded within flask global state""" import flask body = flask.request.get_json() return self.dispatch_with_args(body, argMap=dict())
def _is_empty_cache_record(self, rec): """ Return True if the specified cache record has no data, False otherwise. :param rec: cache record returned by :py:meth:`~._cache_get` :type rec: dict :return: True if record is empty, False otherwise :rtype: bool """ # these are taken from DataQuery.query_one_table() for k in [ 'by_version', 'by_file_type', 'by_installer', 'by_implementation', 'by_system', 'by_distro', 'by_country' ]: if k in rec and len(rec[k]) > 0: return False return True
def function[_is_empty_cache_record, parameter[self, rec]]: constant[ Return True if the specified cache record has no data, False otherwise. :param rec: cache record returned by :py:meth:`~._cache_get` :type rec: dict :return: True if record is empty, False otherwise :rtype: bool ] for taget[name[k]] in starred[list[[<ast.Constant object at 0x7da2044c21d0>, <ast.Constant object at 0x7da2044c0c70>, <ast.Constant object at 0x7da2044c22c0>, <ast.Constant object at 0x7da2044c0070>, <ast.Constant object at 0x7da2044c1b70>, <ast.Constant object at 0x7da2044c2590>, <ast.Constant object at 0x7da2044c1960>]]] begin[:] if <ast.BoolOp object at 0x7da2044c1c60> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[_is_empty_cache_record] ( identifier[self] , identifier[rec] ): literal[string] keyword[for] identifier[k] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[if] identifier[k] keyword[in] identifier[rec] keyword[and] identifier[len] ( identifier[rec] [ identifier[k] ])> literal[int] : keyword[return] keyword[False] keyword[return] keyword[True]
def _is_empty_cache_record(self, rec): """ Return True if the specified cache record has no data, False otherwise. :param rec: cache record returned by :py:meth:`~._cache_get` :type rec: dict :return: True if record is empty, False otherwise :rtype: bool """ # these are taken from DataQuery.query_one_table() for k in ['by_version', 'by_file_type', 'by_installer', 'by_implementation', 'by_system', 'by_distro', 'by_country']: if k in rec and len(rec[k]) > 0: return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] return True
def url(self): """ Full URL as requested by the client (computed). This value is constructed out of different environment variables and includes scheme, host, port, scriptname, path and query string. """ scheme = self.environ.get('wsgi.url_scheme', 'http') host = self.environ.get('HTTP_X_FORWARDED_HOST', self.environ.get('HTTP_HOST', None)) if not host: host = self.environ.get('SERVER_NAME') port = self.environ.get('SERVER_PORT', '80') if scheme + port not in ('https443', 'http80'): host += ':' + port parts = (scheme, host, urlquote(self.fullpath), self.query_string, '') return urlunsplit(parts)
def function[url, parameter[self]]: constant[ Full URL as requested by the client (computed). This value is constructed out of different environment variables and includes scheme, host, port, scriptname, path and query string. ] variable[scheme] assign[=] call[name[self].environ.get, parameter[constant[wsgi.url_scheme], constant[http]]] variable[host] assign[=] call[name[self].environ.get, parameter[constant[HTTP_X_FORWARDED_HOST], call[name[self].environ.get, parameter[constant[HTTP_HOST], constant[None]]]]] if <ast.UnaryOp object at 0x7da18bcc87f0> begin[:] variable[host] assign[=] call[name[self].environ.get, parameter[constant[SERVER_NAME]]] variable[port] assign[=] call[name[self].environ.get, parameter[constant[SERVER_PORT], constant[80]]] if compare[binary_operation[name[scheme] + name[port]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c9927d0>, <ast.Constant object at 0x7da20c9927a0>]]] begin[:] <ast.AugAssign object at 0x7da20c992d40> variable[parts] assign[=] tuple[[<ast.Name object at 0x7da20c9925f0>, <ast.Name object at 0x7da20c991330>, <ast.Call object at 0x7da20c991960>, <ast.Attribute object at 0x7da20c992ec0>, <ast.Constant object at 0x7da20c993580>]] return[call[name[urlunsplit], parameter[name[parts]]]]
keyword[def] identifier[url] ( identifier[self] ): literal[string] identifier[scheme] = identifier[self] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ) identifier[host] = identifier[self] . identifier[environ] . identifier[get] ( literal[string] , identifier[self] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )) keyword[if] keyword[not] identifier[host] : identifier[host] = identifier[self] . identifier[environ] . identifier[get] ( literal[string] ) identifier[port] = identifier[self] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[scheme] + identifier[port] keyword[not] keyword[in] ( literal[string] , literal[string] ): identifier[host] += literal[string] + identifier[port] identifier[parts] =( identifier[scheme] , identifier[host] , identifier[urlquote] ( identifier[self] . identifier[fullpath] ), identifier[self] . identifier[query_string] , literal[string] ) keyword[return] identifier[urlunsplit] ( identifier[parts] )
def url(self): """ Full URL as requested by the client (computed). This value is constructed out of different environment variables and includes scheme, host, port, scriptname, path and query string. """ scheme = self.environ.get('wsgi.url_scheme', 'http') host = self.environ.get('HTTP_X_FORWARDED_HOST', self.environ.get('HTTP_HOST', None)) if not host: host = self.environ.get('SERVER_NAME') port = self.environ.get('SERVER_PORT', '80') if scheme + port not in ('https443', 'http80'): host += ':' + port # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] parts = (scheme, host, urlquote(self.fullpath), self.query_string, '') return urlunsplit(parts)
def dump(self, output, close_after_write=True): """Write data to the output with tabular format. Args: output (file descriptor or str): file descriptor or path to the output file. close_after_write (bool, optional): Close the output after write. Defaults to |True|. """ try: output.write self.stream = output except AttributeError: self.stream = io.open(output, "w", encoding="utf-8") try: self.write_table() finally: if close_after_write: self.stream.close() self.stream = sys.stdout
def function[dump, parameter[self, output, close_after_write]]: constant[Write data to the output with tabular format. Args: output (file descriptor or str): file descriptor or path to the output file. close_after_write (bool, optional): Close the output after write. Defaults to |True|. ] <ast.Try object at 0x7da20e9618a0> <ast.Try object at 0x7da20e962890>
keyword[def] identifier[dump] ( identifier[self] , identifier[output] , identifier[close_after_write] = keyword[True] ): literal[string] keyword[try] : identifier[output] . identifier[write] identifier[self] . identifier[stream] = identifier[output] keyword[except] identifier[AttributeError] : identifier[self] . identifier[stream] = identifier[io] . identifier[open] ( identifier[output] , literal[string] , identifier[encoding] = literal[string] ) keyword[try] : identifier[self] . identifier[write_table] () keyword[finally] : keyword[if] identifier[close_after_write] : identifier[self] . identifier[stream] . identifier[close] () identifier[self] . identifier[stream] = identifier[sys] . identifier[stdout]
def dump(self, output, close_after_write=True): """Write data to the output with tabular format. Args: output (file descriptor or str): file descriptor or path to the output file. close_after_write (bool, optional): Close the output after write. Defaults to |True|. """ try: output.write self.stream = output # depends on [control=['try'], data=[]] except AttributeError: self.stream = io.open(output, 'w', encoding='utf-8') # depends on [control=['except'], data=[]] try: self.write_table() # depends on [control=['try'], data=[]] finally: if close_after_write: self.stream.close() self.stream = sys.stdout # depends on [control=['if'], data=[]]
def register_user_type(self, keyspace, user_type, klass): """ Registers a class to use to represent a particular user-defined type. Query parameters for this user-defined type will be assumed to be instances of `klass`. Result sets for this user-defined type will be instances of `klass`. If no class is registered for a user-defined type, a namedtuple will be used for result sets, and non-prepared statements may not encode parameters for this type correctly. `keyspace` is the name of the keyspace that the UDT is defined in. `user_type` is the string name of the UDT to register the mapping for. `klass` should be a class with attributes whose names match the fields of the user-defined type. The constructor must accepts kwargs for each of the fields in the UDT. This method should only be called after the type has been created within Cassandra. Example:: cluster = Cluster(protocol_version=3) session = cluster.connect() session.set_keyspace('mykeyspace') session.execute("CREATE TYPE address (street text, zipcode int)") session.execute("CREATE TABLE users (id int PRIMARY KEY, location address)") # create a class to map to the "address" UDT class Address(object): def __init__(self, street, zipcode): self.street = street self.zipcode = zipcode cluster.register_user_type('mykeyspace', 'address', Address) # insert a row using an instance of Address session.execute("INSERT INTO users (id, location) VALUES (%s, %s)", (0, Address("123 Main St.", 78723))) # results will include Address instances results = session.execute("SELECT * FROM users") row = results[0] print row.id, row.location.street, row.location.zipcode """ if self.protocol_version < 3: log.warning("User Type serialization is only supported in native protocol version 3+ (%d in use). " "CQL encoding for simple statements will still work, but named tuples will " "be returned when reading type %s.%s.", self.protocol_version, keyspace, user_type) self._user_types[keyspace][user_type] = klass for session in tuple(self.sessions): session.user_type_registered(keyspace, user_type, klass) UserType.evict_udt_class(keyspace, user_type)
def function[register_user_type, parameter[self, keyspace, user_type, klass]]: constant[ Registers a class to use to represent a particular user-defined type. Query parameters for this user-defined type will be assumed to be instances of `klass`. Result sets for this user-defined type will be instances of `klass`. If no class is registered for a user-defined type, a namedtuple will be used for result sets, and non-prepared statements may not encode parameters for this type correctly. `keyspace` is the name of the keyspace that the UDT is defined in. `user_type` is the string name of the UDT to register the mapping for. `klass` should be a class with attributes whose names match the fields of the user-defined type. The constructor must accepts kwargs for each of the fields in the UDT. This method should only be called after the type has been created within Cassandra. Example:: cluster = Cluster(protocol_version=3) session = cluster.connect() session.set_keyspace('mykeyspace') session.execute("CREATE TYPE address (street text, zipcode int)") session.execute("CREATE TABLE users (id int PRIMARY KEY, location address)") # create a class to map to the "address" UDT class Address(object): def __init__(self, street, zipcode): self.street = street self.zipcode = zipcode cluster.register_user_type('mykeyspace', 'address', Address) # insert a row using an instance of Address session.execute("INSERT INTO users (id, location) VALUES (%s, %s)", (0, Address("123 Main St.", 78723))) # results will include Address instances results = session.execute("SELECT * FROM users") row = results[0] print row.id, row.location.street, row.location.zipcode ] if compare[name[self].protocol_version less[<] constant[3]] begin[:] call[name[log].warning, parameter[constant[User Type serialization is only supported in native protocol version 3+ (%d in use). CQL encoding for simple statements will still work, but named tuples will be returned when reading type %s.%s.], name[self].protocol_version, name[keyspace], name[user_type]]] call[call[name[self]._user_types][name[keyspace]]][name[user_type]] assign[=] name[klass] for taget[name[session]] in starred[call[name[tuple], parameter[name[self].sessions]]] begin[:] call[name[session].user_type_registered, parameter[name[keyspace], name[user_type], name[klass]]] call[name[UserType].evict_udt_class, parameter[name[keyspace], name[user_type]]]
keyword[def] identifier[register_user_type] ( identifier[self] , identifier[keyspace] , identifier[user_type] , identifier[klass] ): literal[string] keyword[if] identifier[self] . identifier[protocol_version] < literal[int] : identifier[log] . identifier[warning] ( literal[string] literal[string] literal[string] , identifier[self] . identifier[protocol_version] , identifier[keyspace] , identifier[user_type] ) identifier[self] . identifier[_user_types] [ identifier[keyspace] ][ identifier[user_type] ]= identifier[klass] keyword[for] identifier[session] keyword[in] identifier[tuple] ( identifier[self] . identifier[sessions] ): identifier[session] . identifier[user_type_registered] ( identifier[keyspace] , identifier[user_type] , identifier[klass] ) identifier[UserType] . identifier[evict_udt_class] ( identifier[keyspace] , identifier[user_type] )
def register_user_type(self, keyspace, user_type, klass): """ Registers a class to use to represent a particular user-defined type. Query parameters for this user-defined type will be assumed to be instances of `klass`. Result sets for this user-defined type will be instances of `klass`. If no class is registered for a user-defined type, a namedtuple will be used for result sets, and non-prepared statements may not encode parameters for this type correctly. `keyspace` is the name of the keyspace that the UDT is defined in. `user_type` is the string name of the UDT to register the mapping for. `klass` should be a class with attributes whose names match the fields of the user-defined type. The constructor must accepts kwargs for each of the fields in the UDT. This method should only be called after the type has been created within Cassandra. Example:: cluster = Cluster(protocol_version=3) session = cluster.connect() session.set_keyspace('mykeyspace') session.execute("CREATE TYPE address (street text, zipcode int)") session.execute("CREATE TABLE users (id int PRIMARY KEY, location address)") # create a class to map to the "address" UDT class Address(object): def __init__(self, street, zipcode): self.street = street self.zipcode = zipcode cluster.register_user_type('mykeyspace', 'address', Address) # insert a row using an instance of Address session.execute("INSERT INTO users (id, location) VALUES (%s, %s)", (0, Address("123 Main St.", 78723))) # results will include Address instances results = session.execute("SELECT * FROM users") row = results[0] print row.id, row.location.street, row.location.zipcode """ if self.protocol_version < 3: log.warning('User Type serialization is only supported in native protocol version 3+ (%d in use). CQL encoding for simple statements will still work, but named tuples will be returned when reading type %s.%s.', self.protocol_version, keyspace, user_type) # depends on [control=['if'], data=[]] self._user_types[keyspace][user_type] = klass for session in tuple(self.sessions): session.user_type_registered(keyspace, user_type, klass) # depends on [control=['for'], data=['session']] UserType.evict_udt_class(keyspace, user_type)
def get_device_stats(self): """Get GPU stats.""" stats = [] for index, device_handle in enumerate(self.device_handles): device_stats = {} # Dictionnary key is the GPU_ID device_stats['key'] = self.get_key() # GPU id (for multiple GPU, start at 0) device_stats['gpu_id'] = index # GPU name device_stats['name'] = get_device_name(device_handle) # Memory consumption in % (not available on all GPU) device_stats['mem'] = get_mem(device_handle) # Processor consumption in % device_stats['proc'] = get_proc(device_handle) stats.append(device_stats) return stats
def function[get_device_stats, parameter[self]]: constant[Get GPU stats.] variable[stats] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f810610>, <ast.Name object at 0x7da18f58d210>]]] in starred[call[name[enumerate], parameter[name[self].device_handles]]] begin[:] variable[device_stats] assign[=] dictionary[[], []] call[name[device_stats]][constant[key]] assign[=] call[name[self].get_key, parameter[]] call[name[device_stats]][constant[gpu_id]] assign[=] name[index] call[name[device_stats]][constant[name]] assign[=] call[name[get_device_name], parameter[name[device_handle]]] call[name[device_stats]][constant[mem]] assign[=] call[name[get_mem], parameter[name[device_handle]]] call[name[device_stats]][constant[proc]] assign[=] call[name[get_proc], parameter[name[device_handle]]] call[name[stats].append, parameter[name[device_stats]]] return[name[stats]]
keyword[def] identifier[get_device_stats] ( identifier[self] ): literal[string] identifier[stats] =[] keyword[for] identifier[index] , identifier[device_handle] keyword[in] identifier[enumerate] ( identifier[self] . identifier[device_handles] ): identifier[device_stats] ={} identifier[device_stats] [ literal[string] ]= identifier[self] . identifier[get_key] () identifier[device_stats] [ literal[string] ]= identifier[index] identifier[device_stats] [ literal[string] ]= identifier[get_device_name] ( identifier[device_handle] ) identifier[device_stats] [ literal[string] ]= identifier[get_mem] ( identifier[device_handle] ) identifier[device_stats] [ literal[string] ]= identifier[get_proc] ( identifier[device_handle] ) identifier[stats] . identifier[append] ( identifier[device_stats] ) keyword[return] identifier[stats]
def get_device_stats(self): """Get GPU stats.""" stats = [] for (index, device_handle) in enumerate(self.device_handles): device_stats = {} # Dictionnary key is the GPU_ID device_stats['key'] = self.get_key() # GPU id (for multiple GPU, start at 0) device_stats['gpu_id'] = index # GPU name device_stats['name'] = get_device_name(device_handle) # Memory consumption in % (not available on all GPU) device_stats['mem'] = get_mem(device_handle) # Processor consumption in % device_stats['proc'] = get_proc(device_handle) stats.append(device_stats) # depends on [control=['for'], data=[]] return stats
def tensor_dim_to_mesh_dim_size(layout, mesh_shape, tensor_dim): """How many ways does a tensor dimension get split. This is used to "cheat" when building the mtf graph and peek at how a tensor dimension will be split. Returns 1 if the tensor dimension is not split. Args: layout: an input to convert_to_layout_rules mesh_shape: an input to convert_to_shape tensor_dim: a Dimension Returns: an integer """ layout_rules = convert_to_layout_rules(layout) mesh_shape = convert_to_shape(mesh_shape) mesh_axis = layout_rules.tensor_dimension_to_mesh_axis(tensor_dim, mesh_shape) if mesh_axis is None: return 1 else: return mesh_shape.dims[mesh_axis].size
def function[tensor_dim_to_mesh_dim_size, parameter[layout, mesh_shape, tensor_dim]]: constant[How many ways does a tensor dimension get split. This is used to "cheat" when building the mtf graph and peek at how a tensor dimension will be split. Returns 1 if the tensor dimension is not split. Args: layout: an input to convert_to_layout_rules mesh_shape: an input to convert_to_shape tensor_dim: a Dimension Returns: an integer ] variable[layout_rules] assign[=] call[name[convert_to_layout_rules], parameter[name[layout]]] variable[mesh_shape] assign[=] call[name[convert_to_shape], parameter[name[mesh_shape]]] variable[mesh_axis] assign[=] call[name[layout_rules].tensor_dimension_to_mesh_axis, parameter[name[tensor_dim], name[mesh_shape]]] if compare[name[mesh_axis] is constant[None]] begin[:] return[constant[1]]
keyword[def] identifier[tensor_dim_to_mesh_dim_size] ( identifier[layout] , identifier[mesh_shape] , identifier[tensor_dim] ): literal[string] identifier[layout_rules] = identifier[convert_to_layout_rules] ( identifier[layout] ) identifier[mesh_shape] = identifier[convert_to_shape] ( identifier[mesh_shape] ) identifier[mesh_axis] = identifier[layout_rules] . identifier[tensor_dimension_to_mesh_axis] ( identifier[tensor_dim] , identifier[mesh_shape] ) keyword[if] identifier[mesh_axis] keyword[is] keyword[None] : keyword[return] literal[int] keyword[else] : keyword[return] identifier[mesh_shape] . identifier[dims] [ identifier[mesh_axis] ]. identifier[size]
def tensor_dim_to_mesh_dim_size(layout, mesh_shape, tensor_dim): """How many ways does a tensor dimension get split. This is used to "cheat" when building the mtf graph and peek at how a tensor dimension will be split. Returns 1 if the tensor dimension is not split. Args: layout: an input to convert_to_layout_rules mesh_shape: an input to convert_to_shape tensor_dim: a Dimension Returns: an integer """ layout_rules = convert_to_layout_rules(layout) mesh_shape = convert_to_shape(mesh_shape) mesh_axis = layout_rules.tensor_dimension_to_mesh_axis(tensor_dim, mesh_shape) if mesh_axis is None: return 1 # depends on [control=['if'], data=[]] else: return mesh_shape.dims[mesh_axis].size
def reexport_tf_summary(): """Re-export all symbols from the original tf.summary. This function finds the original tf.summary V2 API and re-exports all the symbols from it within this module as well, so that when this module is patched into the TF API namespace as the new tf.summary, the effect is an overlay that just adds TensorBoard-provided symbols to the module. Finding the original tf.summary V2 API module reliably is a challenge, since this code runs *during* the overall TF API import process and depending on the order of imports (which is subject to change), different parts of the API may or may not be defined at the point in time we attempt to access them. This code also may be inserted into two places in the API (tf and tf.compat.v2) and may be re-executed multiple times even for the same place in the API (due to the TF module import system not populating sys.modules properly), so it needs to be robust to many different scenarios. The one constraint we can count on is that everywhere this module is loaded (via the component_api_helper mechanism in TF), it's going to be the 'summary' submodule of a larger API package that already has a 'summary' attribute that contains the TF-only summary API symbols we need to re-export. This may either be the original TF-only summary module (the first time we load this module) or a pre-existing copy of this module (if we're re-loading this module again). We don't actually need to differentiate those two cases, because it's okay if we re-import our own TensorBoard-provided symbols; they will just be overwritten later on in this file. So given that guarantee, the approach we take is to first attempt to locate a TF V2 API package that already has a 'summary' attribute (most likely this is the parent package into which we're being imported, but not necessarily), and then do the dynamic version of "from tf_api_package.summary import *". Lastly, this logic is encapsulated in a function to avoid symbol leakage. """ import sys # pylint: disable=g-import-not-at-top # API packages to check for the original V2 summary API, in preference order # to avoid going "under the hood" to the _api packages unless necessary. packages = [ 'tensorflow', 'tensorflow.compat.v2', 'tensorflow._api.v2', 'tensorflow._api.v2.compat.v2', 'tensorflow._api.v1.compat.v2', ] # If we aren't sure we're on V2, don't use tf.summary since it could be V1. # Note there may be false positives since the __version__ attribute may not be # defined at this point in the import process. if not getattr(tf, '__version__', '').startswith('2.'): # noqa: F821 packages.remove('tensorflow') def dynamic_wildcard_import(module): """Implements the logic of "from module import *" for the given module.""" symbols = getattr(module, '__all__', None) if symbols is None: symbols = [k for k in module.__dict__.keys() if not k.startswith('_')] globals().update({symbol: getattr(module, symbol) for symbol in symbols}) notfound = object() # sentinel value for package_name in packages: package = sys.modules.get(package_name, notfound) if package is notfound: # Either it isn't in this installation at all (e.g. the _api.vX packages # are only in API version X), it isn't imported yet, or it was imported # but not inserted into sys.modules under its user-facing name (for the # non-'_api' packages), at which point we continue down the list to look # "under the hood" for it via its '_api' package name. continue module = getattr(package, 'summary', None) if module is None: # This happens if the package hasn't been fully imported yet. For example, # the 'tensorflow' package won't yet have 'summary' attribute if we are # loading this code via the 'tensorflow.compat...' path and 'compat' is # imported before 'summary' in the 'tensorflow' __init__.py file. continue # Success, we hope. Import all the public symbols into this module. dynamic_wildcard_import(module) return
def function[reexport_tf_summary, parameter[]]: constant[Re-export all symbols from the original tf.summary. This function finds the original tf.summary V2 API and re-exports all the symbols from it within this module as well, so that when this module is patched into the TF API namespace as the new tf.summary, the effect is an overlay that just adds TensorBoard-provided symbols to the module. Finding the original tf.summary V2 API module reliably is a challenge, since this code runs *during* the overall TF API import process and depending on the order of imports (which is subject to change), different parts of the API may or may not be defined at the point in time we attempt to access them. This code also may be inserted into two places in the API (tf and tf.compat.v2) and may be re-executed multiple times even for the same place in the API (due to the TF module import system not populating sys.modules properly), so it needs to be robust to many different scenarios. The one constraint we can count on is that everywhere this module is loaded (via the component_api_helper mechanism in TF), it's going to be the 'summary' submodule of a larger API package that already has a 'summary' attribute that contains the TF-only summary API symbols we need to re-export. This may either be the original TF-only summary module (the first time we load this module) or a pre-existing copy of this module (if we're re-loading this module again). We don't actually need to differentiate those two cases, because it's okay if we re-import our own TensorBoard-provided symbols; they will just be overwritten later on in this file. So given that guarantee, the approach we take is to first attempt to locate a TF V2 API package that already has a 'summary' attribute (most likely this is the parent package into which we're being imported, but not necessarily), and then do the dynamic version of "from tf_api_package.summary import *". Lastly, this logic is encapsulated in a function to avoid symbol leakage. ] import module[sys] variable[packages] assign[=] list[[<ast.Constant object at 0x7da1b1f460e0>, <ast.Constant object at 0x7da1b1f45600>, <ast.Constant object at 0x7da1b1f47eb0>, <ast.Constant object at 0x7da1b1f448e0>, <ast.Constant object at 0x7da1b1f451e0>]] if <ast.UnaryOp object at 0x7da1b1f47ee0> begin[:] call[name[packages].remove, parameter[constant[tensorflow]]] def function[dynamic_wildcard_import, parameter[module]]: constant[Implements the logic of "from module import *" for the given module.] variable[symbols] assign[=] call[name[getattr], parameter[name[module], constant[__all__], constant[None]]] if compare[name[symbols] is constant[None]] begin[:] variable[symbols] assign[=] <ast.ListComp object at 0x7da1b1f46530> call[call[name[globals], parameter[]].update, parameter[<ast.DictComp object at 0x7da1b1f45210>]] variable[notfound] assign[=] call[name[object], parameter[]] for taget[name[package_name]] in starred[name[packages]] begin[:] variable[package] assign[=] call[name[sys].modules.get, parameter[name[package_name], name[notfound]]] if compare[name[package] is name[notfound]] begin[:] continue variable[module] assign[=] call[name[getattr], parameter[name[package], constant[summary], constant[None]]] if compare[name[module] is constant[None]] begin[:] continue call[name[dynamic_wildcard_import], parameter[name[module]]] return[None]
keyword[def] identifier[reexport_tf_summary] (): literal[string] keyword[import] identifier[sys] identifier[packages] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , ] keyword[if] keyword[not] identifier[getattr] ( identifier[tf] , literal[string] , literal[string] ). identifier[startswith] ( literal[string] ): identifier[packages] . identifier[remove] ( literal[string] ) keyword[def] identifier[dynamic_wildcard_import] ( identifier[module] ): literal[string] identifier[symbols] = identifier[getattr] ( identifier[module] , literal[string] , keyword[None] ) keyword[if] identifier[symbols] keyword[is] keyword[None] : identifier[symbols] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[module] . identifier[__dict__] . identifier[keys] () keyword[if] keyword[not] identifier[k] . identifier[startswith] ( literal[string] )] identifier[globals] (). identifier[update] ({ identifier[symbol] : identifier[getattr] ( identifier[module] , identifier[symbol] ) keyword[for] identifier[symbol] keyword[in] identifier[symbols] }) identifier[notfound] = identifier[object] () keyword[for] identifier[package_name] keyword[in] identifier[packages] : identifier[package] = identifier[sys] . identifier[modules] . identifier[get] ( identifier[package_name] , identifier[notfound] ) keyword[if] identifier[package] keyword[is] identifier[notfound] : keyword[continue] identifier[module] = identifier[getattr] ( identifier[package] , literal[string] , keyword[None] ) keyword[if] identifier[module] keyword[is] keyword[None] : keyword[continue] identifier[dynamic_wildcard_import] ( identifier[module] ) keyword[return]
def reexport_tf_summary(): """Re-export all symbols from the original tf.summary. This function finds the original tf.summary V2 API and re-exports all the symbols from it within this module as well, so that when this module is patched into the TF API namespace as the new tf.summary, the effect is an overlay that just adds TensorBoard-provided symbols to the module. Finding the original tf.summary V2 API module reliably is a challenge, since this code runs *during* the overall TF API import process and depending on the order of imports (which is subject to change), different parts of the API may or may not be defined at the point in time we attempt to access them. This code also may be inserted into two places in the API (tf and tf.compat.v2) and may be re-executed multiple times even for the same place in the API (due to the TF module import system not populating sys.modules properly), so it needs to be robust to many different scenarios. The one constraint we can count on is that everywhere this module is loaded (via the component_api_helper mechanism in TF), it's going to be the 'summary' submodule of a larger API package that already has a 'summary' attribute that contains the TF-only summary API symbols we need to re-export. This may either be the original TF-only summary module (the first time we load this module) or a pre-existing copy of this module (if we're re-loading this module again). We don't actually need to differentiate those two cases, because it's okay if we re-import our own TensorBoard-provided symbols; they will just be overwritten later on in this file. So given that guarantee, the approach we take is to first attempt to locate a TF V2 API package that already has a 'summary' attribute (most likely this is the parent package into which we're being imported, but not necessarily), and then do the dynamic version of "from tf_api_package.summary import *". Lastly, this logic is encapsulated in a function to avoid symbol leakage. """ import sys # pylint: disable=g-import-not-at-top # API packages to check for the original V2 summary API, in preference order # to avoid going "under the hood" to the _api packages unless necessary. packages = ['tensorflow', 'tensorflow.compat.v2', 'tensorflow._api.v2', 'tensorflow._api.v2.compat.v2', 'tensorflow._api.v1.compat.v2'] # If we aren't sure we're on V2, don't use tf.summary since it could be V1. # Note there may be false positives since the __version__ attribute may not be # defined at this point in the import process. if not getattr(tf, '__version__', '').startswith('2.'): # noqa: F821 packages.remove('tensorflow') # depends on [control=['if'], data=[]] def dynamic_wildcard_import(module): """Implements the logic of "from module import *" for the given module.""" symbols = getattr(module, '__all__', None) if symbols is None: symbols = [k for k in module.__dict__.keys() if not k.startswith('_')] # depends on [control=['if'], data=['symbols']] globals().update({symbol: getattr(module, symbol) for symbol in symbols}) notfound = object() # sentinel value for package_name in packages: package = sys.modules.get(package_name, notfound) if package is notfound: # Either it isn't in this installation at all (e.g. the _api.vX packages # are only in API version X), it isn't imported yet, or it was imported # but not inserted into sys.modules under its user-facing name (for the # non-'_api' packages), at which point we continue down the list to look # "under the hood" for it via its '_api' package name. continue # depends on [control=['if'], data=[]] module = getattr(package, 'summary', None) if module is None: # This happens if the package hasn't been fully imported yet. For example, # the 'tensorflow' package won't yet have 'summary' attribute if we are # loading this code via the 'tensorflow.compat...' path and 'compat' is # imported before 'summary' in the 'tensorflow' __init__.py file. continue # depends on [control=['if'], data=[]] # Success, we hope. Import all the public symbols into this module. dynamic_wildcard_import(module) return # depends on [control=['for'], data=['package_name']]
def configure_error_handlers(app): """ Configure application error handlers """ def render_error(error): return (render_template('errors/%s.html' % error.code, title=error_messages[error.code], code=error.code), error.code) for (errcode, title) in error_messages.iteritems(): app.errorhandler(errcode)(render_error)
def function[configure_error_handlers, parameter[app]]: constant[ Configure application error handlers ] def function[render_error, parameter[error]]: return[tuple[[<ast.Call object at 0x7da1b1616740>, <ast.Attribute object at 0x7da1b1616ec0>]]] for taget[tuple[[<ast.Name object at 0x7da1b1614580>, <ast.Name object at 0x7da1b16144c0>]]] in starred[call[name[error_messages].iteritems, parameter[]]] begin[:] call[call[name[app].errorhandler, parameter[name[errcode]]], parameter[name[render_error]]]
keyword[def] identifier[configure_error_handlers] ( identifier[app] ): literal[string] keyword[def] identifier[render_error] ( identifier[error] ): keyword[return] ( identifier[render_template] ( literal[string] % identifier[error] . identifier[code] , identifier[title] = identifier[error_messages] [ identifier[error] . identifier[code] ], identifier[code] = identifier[error] . identifier[code] ), identifier[error] . identifier[code] ) keyword[for] ( identifier[errcode] , identifier[title] ) keyword[in] identifier[error_messages] . identifier[iteritems] (): identifier[app] . identifier[errorhandler] ( identifier[errcode] )( identifier[render_error] )
def configure_error_handlers(app): """ Configure application error handlers """ def render_error(error): return (render_template('errors/%s.html' % error.code, title=error_messages[error.code], code=error.code), error.code) for (errcode, title) in error_messages.iteritems(): app.errorhandler(errcode)(render_error) # depends on [control=['for'], data=[]]
def get_url(cls, url, uid, **kwargs): """ Construct the URL for talking to an individual resource. http://myapi.com/api/resource/1 Args: url: The url for this resource uid: The unique identifier for an individual resource kwargs: Additional keyword argueents returns: final_url: The URL for this individual resource """ if uid: url = '{}/{}'.format(url, uid) else: url = url return cls._parse_url_and_validate(url)
def function[get_url, parameter[cls, url, uid]]: constant[ Construct the URL for talking to an individual resource. http://myapi.com/api/resource/1 Args: url: The url for this resource uid: The unique identifier for an individual resource kwargs: Additional keyword argueents returns: final_url: The URL for this individual resource ] if name[uid] begin[:] variable[url] assign[=] call[constant[{}/{}].format, parameter[name[url], name[uid]]] return[call[name[cls]._parse_url_and_validate, parameter[name[url]]]]
keyword[def] identifier[get_url] ( identifier[cls] , identifier[url] , identifier[uid] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[uid] : identifier[url] = literal[string] . identifier[format] ( identifier[url] , identifier[uid] ) keyword[else] : identifier[url] = identifier[url] keyword[return] identifier[cls] . identifier[_parse_url_and_validate] ( identifier[url] )
def get_url(cls, url, uid, **kwargs): """ Construct the URL for talking to an individual resource. http://myapi.com/api/resource/1 Args: url: The url for this resource uid: The unique identifier for an individual resource kwargs: Additional keyword argueents returns: final_url: The URL for this individual resource """ if uid: url = '{}/{}'.format(url, uid) # depends on [control=['if'], data=[]] else: url = url return cls._parse_url_and_validate(url)
def _posix_get_window_size(): """Return (width, height) of console terminal on POSIX system. (0, 0) on IOError, i.e. when no console is allocated. """ # see README.txt for reference information # http://www.kernel.org/doc/man-pages/online/pages/man4/tty_ioctl.4.html from fcntl import ioctl from termios import TIOCGWINSZ from array import array """ struct winsize { unsigned short ws_row; unsigned short ws_col; unsigned short ws_xpixel; /* unused */ unsigned short ws_ypixel; /* unused */ }; """ winsize = array("H", [0] * 4) try: ioctl(sys.stdout.fileno(), TIOCGWINSZ, winsize) except IOError: # for example IOError: [Errno 25] Inappropriate ioctl for device # when output is redirected # [ ] TODO: check fd with os.isatty pass return (winsize[1], winsize[0])
def function[_posix_get_window_size, parameter[]]: constant[Return (width, height) of console terminal on POSIX system. (0, 0) on IOError, i.e. when no console is allocated. ] from relative_module[fcntl] import module[ioctl] from relative_module[termios] import module[TIOCGWINSZ] from relative_module[array] import module[array] constant[ struct winsize { unsigned short ws_row; unsigned short ws_col; unsigned short ws_xpixel; /* unused */ unsigned short ws_ypixel; /* unused */ }; ] variable[winsize] assign[=] call[name[array], parameter[constant[H], binary_operation[list[[<ast.Constant object at 0x7da1b05e2710>]] * constant[4]]]] <ast.Try object at 0x7da1b05e3c10> return[tuple[[<ast.Subscript object at 0x7da1b05ff1c0>, <ast.Subscript object at 0x7da1b05fd330>]]]
keyword[def] identifier[_posix_get_window_size] (): literal[string] keyword[from] identifier[fcntl] keyword[import] identifier[ioctl] keyword[from] identifier[termios] keyword[import] identifier[TIOCGWINSZ] keyword[from] identifier[array] keyword[import] identifier[array] literal[string] identifier[winsize] = identifier[array] ( literal[string] ,[ literal[int] ]* literal[int] ) keyword[try] : identifier[ioctl] ( identifier[sys] . identifier[stdout] . identifier[fileno] (), identifier[TIOCGWINSZ] , identifier[winsize] ) keyword[except] identifier[IOError] : keyword[pass] keyword[return] ( identifier[winsize] [ literal[int] ], identifier[winsize] [ literal[int] ])
def _posix_get_window_size(): """Return (width, height) of console terminal on POSIX system. (0, 0) on IOError, i.e. when no console is allocated. """ # see README.txt for reference information # http://www.kernel.org/doc/man-pages/online/pages/man4/tty_ioctl.4.html from fcntl import ioctl from termios import TIOCGWINSZ from array import array '\n struct winsize {\n unsigned short ws_row;\n unsigned short ws_col;\n unsigned short ws_xpixel; /* unused */\n unsigned short ws_ypixel; /* unused */\n };\n ' winsize = array('H', [0] * 4) try: ioctl(sys.stdout.fileno(), TIOCGWINSZ, winsize) # depends on [control=['try'], data=[]] except IOError: # for example IOError: [Errno 25] Inappropriate ioctl for device # when output is redirected # [ ] TODO: check fd with os.isatty pass # depends on [control=['except'], data=[]] return (winsize[1], winsize[0])
def get_seconds(value, scale): """Convert time scale dict to seconds Given a dictionary with keys for scale and value, convert value into seconds based on scale. """ scales = { 'seconds': lambda x: x, 'minutes': lambda x: x * 60, 'hours': lambda x: x * 60 * 60, 'days': lambda x: x * 60 * 60 * 24, 'weeks': lambda x: x * 60 * 60 * 24 * 7, 'months': lambda x: x * 60 * 60 * 24 * 30, 'years': lambda x: x * 60 * 60 * 24 * 365, } return scales[scale](value)
def function[get_seconds, parameter[value, scale]]: constant[Convert time scale dict to seconds Given a dictionary with keys for scale and value, convert value into seconds based on scale. ] variable[scales] assign[=] dictionary[[<ast.Constant object at 0x7da20c76c850>, <ast.Constant object at 0x7da20c76d810>, <ast.Constant object at 0x7da20c76cd90>, <ast.Constant object at 0x7da20c76ec50>, <ast.Constant object at 0x7da20c76d930>, <ast.Constant object at 0x7da20c76c760>, <ast.Constant object at 0x7da20c76cbb0>], [<ast.Lambda object at 0x7da20c76c670>, <ast.Lambda object at 0x7da20c76f9a0>, <ast.Lambda object at 0x7da20c76e9e0>, <ast.Lambda object at 0x7da20c76dc60>, <ast.Lambda object at 0x7da20c76f100>, <ast.Lambda object at 0x7da20c76eb00>, <ast.Lambda object at 0x7da20c76e590>]] return[call[call[name[scales]][name[scale]], parameter[name[value]]]]
keyword[def] identifier[get_seconds] ( identifier[value] , identifier[scale] ): literal[string] identifier[scales] ={ literal[string] : keyword[lambda] identifier[x] : identifier[x] , literal[string] : keyword[lambda] identifier[x] : identifier[x] * literal[int] , literal[string] : keyword[lambda] identifier[x] : identifier[x] * literal[int] * literal[int] , literal[string] : keyword[lambda] identifier[x] : identifier[x] * literal[int] * literal[int] * literal[int] , literal[string] : keyword[lambda] identifier[x] : identifier[x] * literal[int] * literal[int] * literal[int] * literal[int] , literal[string] : keyword[lambda] identifier[x] : identifier[x] * literal[int] * literal[int] * literal[int] * literal[int] , literal[string] : keyword[lambda] identifier[x] : identifier[x] * literal[int] * literal[int] * literal[int] * literal[int] , } keyword[return] identifier[scales] [ identifier[scale] ]( identifier[value] )
def get_seconds(value, scale): """Convert time scale dict to seconds Given a dictionary with keys for scale and value, convert value into seconds based on scale. """ scales = {'seconds': lambda x: x, 'minutes': lambda x: x * 60, 'hours': lambda x: x * 60 * 60, 'days': lambda x: x * 60 * 60 * 24, 'weeks': lambda x: x * 60 * 60 * 24 * 7, 'months': lambda x: x * 60 * 60 * 24 * 30, 'years': lambda x: x * 60 * 60 * 24 * 365} return scales[scale](value)
def _check_samples_line(klass, arr): """Peform additional check on samples line""" if len(arr) <= len(REQUIRE_NO_SAMPLE_HEADER): if tuple(arr) != REQUIRE_NO_SAMPLE_HEADER: raise exceptions.IncorrectVCFFormat( "Sample header line indicates no sample but does not " "equal required prefix {}".format("\t".join(REQUIRE_NO_SAMPLE_HEADER)) ) elif tuple(arr[: len(REQUIRE_SAMPLE_HEADER)]) != REQUIRE_SAMPLE_HEADER: raise exceptions.IncorrectVCFFormat( 'Sample header line (starting with "#CHROM") does not ' "start with required prefix {}".format("\t".join(REQUIRE_SAMPLE_HEADER)) )
def function[_check_samples_line, parameter[klass, arr]]: constant[Peform additional check on samples line] if compare[call[name[len], parameter[name[arr]]] less_or_equal[<=] call[name[len], parameter[name[REQUIRE_NO_SAMPLE_HEADER]]]] begin[:] if compare[call[name[tuple], parameter[name[arr]]] not_equal[!=] name[REQUIRE_NO_SAMPLE_HEADER]] begin[:] <ast.Raise object at 0x7da18ede6da0>
keyword[def] identifier[_check_samples_line] ( identifier[klass] , identifier[arr] ): literal[string] keyword[if] identifier[len] ( identifier[arr] )<= identifier[len] ( identifier[REQUIRE_NO_SAMPLE_HEADER] ): keyword[if] identifier[tuple] ( identifier[arr] )!= identifier[REQUIRE_NO_SAMPLE_HEADER] : keyword[raise] identifier[exceptions] . identifier[IncorrectVCFFormat] ( literal[string] literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[REQUIRE_NO_SAMPLE_HEADER] )) ) keyword[elif] identifier[tuple] ( identifier[arr] [: identifier[len] ( identifier[REQUIRE_SAMPLE_HEADER] )])!= identifier[REQUIRE_SAMPLE_HEADER] : keyword[raise] identifier[exceptions] . identifier[IncorrectVCFFormat] ( literal[string] literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[REQUIRE_SAMPLE_HEADER] )) )
def _check_samples_line(klass, arr): """Peform additional check on samples line""" if len(arr) <= len(REQUIRE_NO_SAMPLE_HEADER): if tuple(arr) != REQUIRE_NO_SAMPLE_HEADER: raise exceptions.IncorrectVCFFormat('Sample header line indicates no sample but does not equal required prefix {}'.format('\t'.join(REQUIRE_NO_SAMPLE_HEADER))) # depends on [control=['if'], data=['REQUIRE_NO_SAMPLE_HEADER']] # depends on [control=['if'], data=[]] elif tuple(arr[:len(REQUIRE_SAMPLE_HEADER)]) != REQUIRE_SAMPLE_HEADER: raise exceptions.IncorrectVCFFormat('Sample header line (starting with "#CHROM") does not start with required prefix {}'.format('\t'.join(REQUIRE_SAMPLE_HEADER))) # depends on [control=['if'], data=['REQUIRE_SAMPLE_HEADER']]
def main(): """Main CLI application.""" parser = get_parser() argcomplete.autocomplete(parser, always_complete_options=False) args = parser.parse_args() setup_logger( level=args.log_level ) try: if args.config and \ args.command in \ ('aggregate', 'show-closed-prs', 'show-all-prs'): run(args) else: parser.print_help() except KeyboardInterrupt: pass
def function[main, parameter[]]: constant[Main CLI application.] variable[parser] assign[=] call[name[get_parser], parameter[]] call[name[argcomplete].autocomplete, parameter[name[parser]]] variable[args] assign[=] call[name[parser].parse_args, parameter[]] call[name[setup_logger], parameter[]] <ast.Try object at 0x7da1b03288b0>
keyword[def] identifier[main] (): literal[string] identifier[parser] = identifier[get_parser] () identifier[argcomplete] . identifier[autocomplete] ( identifier[parser] , identifier[always_complete_options] = keyword[False] ) identifier[args] = identifier[parser] . identifier[parse_args] () identifier[setup_logger] ( identifier[level] = identifier[args] . identifier[log_level] ) keyword[try] : keyword[if] identifier[args] . identifier[config] keyword[and] identifier[args] . identifier[command] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[run] ( identifier[args] ) keyword[else] : identifier[parser] . identifier[print_help] () keyword[except] identifier[KeyboardInterrupt] : keyword[pass]
def main(): """Main CLI application.""" parser = get_parser() argcomplete.autocomplete(parser, always_complete_options=False) args = parser.parse_args() setup_logger(level=args.log_level) try: if args.config and args.command in ('aggregate', 'show-closed-prs', 'show-all-prs'): run(args) # depends on [control=['if'], data=[]] else: parser.print_help() # depends on [control=['try'], data=[]] except KeyboardInterrupt: pass # depends on [control=['except'], data=[]]
def _get_hgroup(name, array): '''Private function to check hostgroup''' hostgroup = None for temp in array.list_hgroups(): if temp['name'] == name: hostgroup = temp break return hostgroup
def function[_get_hgroup, parameter[name, array]]: constant[Private function to check hostgroup] variable[hostgroup] assign[=] constant[None] for taget[name[temp]] in starred[call[name[array].list_hgroups, parameter[]]] begin[:] if compare[call[name[temp]][constant[name]] equal[==] name[name]] begin[:] variable[hostgroup] assign[=] name[temp] break return[name[hostgroup]]
keyword[def] identifier[_get_hgroup] ( identifier[name] , identifier[array] ): literal[string] identifier[hostgroup] = keyword[None] keyword[for] identifier[temp] keyword[in] identifier[array] . identifier[list_hgroups] (): keyword[if] identifier[temp] [ literal[string] ]== identifier[name] : identifier[hostgroup] = identifier[temp] keyword[break] keyword[return] identifier[hostgroup]
def _get_hgroup(name, array): """Private function to check hostgroup""" hostgroup = None for temp in array.list_hgroups(): if temp['name'] == name: hostgroup = temp break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['temp']] return hostgroup
def get(self, entry): """ get the value from vault secret backend """ if self.apiVersion == 1: path = self.secretsmount + '/' + entry else: path = self.secretsmount + '/data/' + entry # note that the HTTP path contains v1 for both versions of the key-value # secret engine. Different versions of the key-value engine are # effectively separate secret engines in vault, with the same base HTTP # API, but with different paths within it. proj = yield self._http.get('/v1/{0}'.format(path)) code = yield proj.code if code != 200: raise KeyError("The key %s does not exist in Vault provider: request" " return code:%d." % (entry, code)) json = yield proj.json() if self.apiVersion == 1: ret = json.get('data', {}).get('value') else: ret = json.get('data', {}).get('data', {}).get('value') return ret
def function[get, parameter[self, entry]]: constant[ get the value from vault secret backend ] if compare[name[self].apiVersion equal[==] constant[1]] begin[:] variable[path] assign[=] binary_operation[binary_operation[name[self].secretsmount + constant[/]] + name[entry]] variable[proj] assign[=] <ast.Yield object at 0x7da2041d86a0> variable[code] assign[=] <ast.Yield object at 0x7da204565870> if compare[name[code] not_equal[!=] constant[200]] begin[:] <ast.Raise object at 0x7da20eb29fc0> variable[json] assign[=] <ast.Yield object at 0x7da20eb2ae60> if compare[name[self].apiVersion equal[==] constant[1]] begin[:] variable[ret] assign[=] call[call[name[json].get, parameter[constant[data], dictionary[[], []]]].get, parameter[constant[value]]] return[name[ret]]
keyword[def] identifier[get] ( identifier[self] , identifier[entry] ): literal[string] keyword[if] identifier[self] . identifier[apiVersion] == literal[int] : identifier[path] = identifier[self] . identifier[secretsmount] + literal[string] + identifier[entry] keyword[else] : identifier[path] = identifier[self] . identifier[secretsmount] + literal[string] + identifier[entry] identifier[proj] = keyword[yield] identifier[self] . identifier[_http] . identifier[get] ( literal[string] . identifier[format] ( identifier[path] )) identifier[code] = keyword[yield] identifier[proj] . identifier[code] keyword[if] identifier[code] != literal[int] : keyword[raise] identifier[KeyError] ( literal[string] literal[string] %( identifier[entry] , identifier[code] )) identifier[json] = keyword[yield] identifier[proj] . identifier[json] () keyword[if] identifier[self] . identifier[apiVersion] == literal[int] : identifier[ret] = identifier[json] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) keyword[else] : identifier[ret] = identifier[json] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) keyword[return] identifier[ret]
def get(self, entry): """ get the value from vault secret backend """ if self.apiVersion == 1: path = self.secretsmount + '/' + entry # depends on [control=['if'], data=[]] else: path = self.secretsmount + '/data/' + entry # note that the HTTP path contains v1 for both versions of the key-value # secret engine. Different versions of the key-value engine are # effectively separate secret engines in vault, with the same base HTTP # API, but with different paths within it. proj = (yield self._http.get('/v1/{0}'.format(path))) code = (yield proj.code) if code != 200: raise KeyError('The key %s does not exist in Vault provider: request return code:%d.' % (entry, code)) # depends on [control=['if'], data=['code']] json = (yield proj.json()) if self.apiVersion == 1: ret = json.get('data', {}).get('value') # depends on [control=['if'], data=[]] else: ret = json.get('data', {}).get('data', {}).get('value') return ret
def allowed(self, url, agent): '''Return true if the provided URL is allowed to agent.''' return self.get(url).allowed(url, agent)
def function[allowed, parameter[self, url, agent]]: constant[Return true if the provided URL is allowed to agent.] return[call[call[name[self].get, parameter[name[url]]].allowed, parameter[name[url], name[agent]]]]
keyword[def] identifier[allowed] ( identifier[self] , identifier[url] , identifier[agent] ): literal[string] keyword[return] identifier[self] . identifier[get] ( identifier[url] ). identifier[allowed] ( identifier[url] , identifier[agent] )
def allowed(self, url, agent): """Return true if the provided URL is allowed to agent.""" return self.get(url).allowed(url, agent)
def to_string(mnemonic): """Return the string representation of the given mnemonic. """ strings = { # Arithmetic Instructions ReilMnemonic.ADD: "add", ReilMnemonic.SUB: "sub", ReilMnemonic.MUL: "mul", ReilMnemonic.DIV: "div", ReilMnemonic.MOD: "mod", ReilMnemonic.BSH: "bsh", # Bitwise Instructions ReilMnemonic.AND: "and", ReilMnemonic.OR: "or", ReilMnemonic.XOR: "xor", # Data Transfer Instructions ReilMnemonic.LDM: "ldm", ReilMnemonic.STM: "stm", ReilMnemonic.STR: "str", # Conditional Instructions ReilMnemonic.BISZ: "bisz", ReilMnemonic.JCC: "jcc", # Other Instructions ReilMnemonic.UNKN: "unkn", ReilMnemonic.UNDEF: "undef", ReilMnemonic.NOP: "nop", # Extensions ReilMnemonic.SEXT: "sext", ReilMnemonic.SDIV: "sdiv", ReilMnemonic.SMOD: "smod", ReilMnemonic.SMUL: "smul", } return strings[mnemonic]
def function[to_string, parameter[mnemonic]]: constant[Return the string representation of the given mnemonic. ] variable[strings] assign[=] dictionary[[<ast.Attribute object at 0x7da18eb57400>, <ast.Attribute object at 0x7da18eb56a10>, <ast.Attribute object at 0x7da18eb54370>, <ast.Attribute object at 0x7da18eb562f0>, <ast.Attribute object at 0x7da18eb560e0>, <ast.Attribute object at 0x7da18eb564d0>, <ast.Attribute object at 0x7da18eb574f0>, <ast.Attribute object at 0x7da18eb55a50>, <ast.Attribute object at 0x7da18eb548e0>, <ast.Attribute object at 0x7da18eb57970>, <ast.Attribute object at 0x7da18eb572b0>, <ast.Attribute object at 0x7da18eb56b30>, <ast.Attribute object at 0x7da18eb56020>, <ast.Attribute object at 0x7da18eb57340>, <ast.Attribute object at 0x7da18eb54460>, <ast.Attribute object at 0x7da18eb57940>, <ast.Attribute object at 0x7da18eb57c70>, <ast.Attribute object at 0x7da18eb55c90>, <ast.Attribute object at 0x7da18eb56170>, <ast.Attribute object at 0x7da18eb54f10>, <ast.Attribute object at 0x7da18eb54f70>], [<ast.Constant object at 0x7da18eb55000>, <ast.Constant object at 0x7da18eb56530>, <ast.Constant object at 0x7da18eb55cf0>, <ast.Constant object at 0x7da18eb57910>, <ast.Constant object at 0x7da18eb56830>, <ast.Constant object at 0x7da18eb57850>, <ast.Constant object at 0x7da18eb54190>, <ast.Constant object at 0x7da18eb57cd0>, <ast.Constant object at 0x7da18eb54490>, <ast.Constant object at 0x7da18eb56e00>, <ast.Constant object at 0x7da18eb578e0>, <ast.Constant object at 0x7da18eb561d0>, <ast.Constant object at 0x7da18eb55ff0>, <ast.Constant object at 0x7da18eb55e40>, <ast.Constant object at 0x7da18eb54520>, <ast.Constant object at 0x7da18eb56350>, <ast.Constant object at 0x7da18eb57490>, <ast.Constant object at 0x7da18eb54250>, <ast.Constant object at 0x7da18eb569e0>, <ast.Constant object at 0x7da18eb54a60>, <ast.Constant object at 0x7da1b26afd60>]] return[call[name[strings]][name[mnemonic]]]
keyword[def] identifier[to_string] ( identifier[mnemonic] ): literal[string] identifier[strings] ={ identifier[ReilMnemonic] . identifier[ADD] : literal[string] , identifier[ReilMnemonic] . identifier[SUB] : literal[string] , identifier[ReilMnemonic] . identifier[MUL] : literal[string] , identifier[ReilMnemonic] . identifier[DIV] : literal[string] , identifier[ReilMnemonic] . identifier[MOD] : literal[string] , identifier[ReilMnemonic] . identifier[BSH] : literal[string] , identifier[ReilMnemonic] . identifier[AND] : literal[string] , identifier[ReilMnemonic] . identifier[OR] : literal[string] , identifier[ReilMnemonic] . identifier[XOR] : literal[string] , identifier[ReilMnemonic] . identifier[LDM] : literal[string] , identifier[ReilMnemonic] . identifier[STM] : literal[string] , identifier[ReilMnemonic] . identifier[STR] : literal[string] , identifier[ReilMnemonic] . identifier[BISZ] : literal[string] , identifier[ReilMnemonic] . identifier[JCC] : literal[string] , identifier[ReilMnemonic] . identifier[UNKN] : literal[string] , identifier[ReilMnemonic] . identifier[UNDEF] : literal[string] , identifier[ReilMnemonic] . identifier[NOP] : literal[string] , identifier[ReilMnemonic] . identifier[SEXT] : literal[string] , identifier[ReilMnemonic] . identifier[SDIV] : literal[string] , identifier[ReilMnemonic] . identifier[SMOD] : literal[string] , identifier[ReilMnemonic] . identifier[SMUL] : literal[string] , } keyword[return] identifier[strings] [ identifier[mnemonic] ]
def to_string(mnemonic): """Return the string representation of the given mnemonic. """ # Arithmetic Instructions # Bitwise Instructions # Data Transfer Instructions # Conditional Instructions # Other Instructions # Extensions strings = {ReilMnemonic.ADD: 'add', ReilMnemonic.SUB: 'sub', ReilMnemonic.MUL: 'mul', ReilMnemonic.DIV: 'div', ReilMnemonic.MOD: 'mod', ReilMnemonic.BSH: 'bsh', ReilMnemonic.AND: 'and', ReilMnemonic.OR: 'or', ReilMnemonic.XOR: 'xor', ReilMnemonic.LDM: 'ldm', ReilMnemonic.STM: 'stm', ReilMnemonic.STR: 'str', ReilMnemonic.BISZ: 'bisz', ReilMnemonic.JCC: 'jcc', ReilMnemonic.UNKN: 'unkn', ReilMnemonic.UNDEF: 'undef', ReilMnemonic.NOP: 'nop', ReilMnemonic.SEXT: 'sext', ReilMnemonic.SDIV: 'sdiv', ReilMnemonic.SMOD: 'smod', ReilMnemonic.SMUL: 'smul'} return strings[mnemonic]
def reset_coords(self, names=None, drop=False, inplace=None): """Given names of coordinates, reset them to become variables Parameters ---------- names : str or list of str, optional Name(s) of non-index coordinates in this dataset to reset into variables. By default, all non-index coordinates are reset. drop : bool, optional If True, remove coordinates instead of converting them into variables. inplace : bool, optional If True, modify this dataset inplace. Otherwise, create a new object. Returns ------- Dataset """ inplace = _check_inplace(inplace) if names is None: names = self._coord_names - set(self.dims) else: if isinstance(names, str): names = [names] self._assert_all_in_dataset(names) bad_coords = set(names) & set(self.dims) if bad_coords: raise ValueError( 'cannot remove index coordinates with reset_coords: %s' % bad_coords) obj = self if inplace else self.copy() obj._coord_names.difference_update(names) if drop: for name in names: del obj._variables[name] return obj
def function[reset_coords, parameter[self, names, drop, inplace]]: constant[Given names of coordinates, reset them to become variables Parameters ---------- names : str or list of str, optional Name(s) of non-index coordinates in this dataset to reset into variables. By default, all non-index coordinates are reset. drop : bool, optional If True, remove coordinates instead of converting them into variables. inplace : bool, optional If True, modify this dataset inplace. Otherwise, create a new object. Returns ------- Dataset ] variable[inplace] assign[=] call[name[_check_inplace], parameter[name[inplace]]] if compare[name[names] is constant[None]] begin[:] variable[names] assign[=] binary_operation[name[self]._coord_names - call[name[set], parameter[name[self].dims]]] variable[obj] assign[=] <ast.IfExp object at 0x7da18f09ed10> call[name[obj]._coord_names.difference_update, parameter[name[names]]] if name[drop] begin[:] for taget[name[name]] in starred[name[names]] begin[:] <ast.Delete object at 0x7da18f09f220> return[name[obj]]
keyword[def] identifier[reset_coords] ( identifier[self] , identifier[names] = keyword[None] , identifier[drop] = keyword[False] , identifier[inplace] = keyword[None] ): literal[string] identifier[inplace] = identifier[_check_inplace] ( identifier[inplace] ) keyword[if] identifier[names] keyword[is] keyword[None] : identifier[names] = identifier[self] . identifier[_coord_names] - identifier[set] ( identifier[self] . identifier[dims] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[names] , identifier[str] ): identifier[names] =[ identifier[names] ] identifier[self] . identifier[_assert_all_in_dataset] ( identifier[names] ) identifier[bad_coords] = identifier[set] ( identifier[names] )& identifier[set] ( identifier[self] . identifier[dims] ) keyword[if] identifier[bad_coords] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[bad_coords] ) identifier[obj] = identifier[self] keyword[if] identifier[inplace] keyword[else] identifier[self] . identifier[copy] () identifier[obj] . identifier[_coord_names] . identifier[difference_update] ( identifier[names] ) keyword[if] identifier[drop] : keyword[for] identifier[name] keyword[in] identifier[names] : keyword[del] identifier[obj] . identifier[_variables] [ identifier[name] ] keyword[return] identifier[obj]
def reset_coords(self, names=None, drop=False, inplace=None): """Given names of coordinates, reset them to become variables Parameters ---------- names : str or list of str, optional Name(s) of non-index coordinates in this dataset to reset into variables. By default, all non-index coordinates are reset. drop : bool, optional If True, remove coordinates instead of converting them into variables. inplace : bool, optional If True, modify this dataset inplace. Otherwise, create a new object. Returns ------- Dataset """ inplace = _check_inplace(inplace) if names is None: names = self._coord_names - set(self.dims) # depends on [control=['if'], data=['names']] else: if isinstance(names, str): names = [names] # depends on [control=['if'], data=[]] self._assert_all_in_dataset(names) bad_coords = set(names) & set(self.dims) if bad_coords: raise ValueError('cannot remove index coordinates with reset_coords: %s' % bad_coords) # depends on [control=['if'], data=[]] obj = self if inplace else self.copy() obj._coord_names.difference_update(names) if drop: for name in names: del obj._variables[name] # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]] return obj
def reconstruct_dict(dot_paths, values): ''' a method for reconstructing a dictionary from the values along dot paths ''' output_dict = {} for i in range(len(dot_paths)): if i + 1 <= len(values): path_segments = segment_path(dot_paths[i]) current_nest = output_dict for j in range(len(path_segments)): key_name = path_segments[j] try: key_name = int(key_name) except: pass if j + 1 == len(path_segments): if isinstance(key_name, int): current_nest.append(values[i]) else: current_nest[key_name] = values[i] else: next_key = path_segments[j+1] try: next_key = int(next_key) except: pass if isinstance(next_key, int): if not key_name in current_nest.keys(): current_nest[key_name] = [] current_nest = current_nest[key_name] else: if isinstance(key_name, int): current_nest.append({}) current_nest = current_nest[len(current_nest) - 1] else: if not key_name in current_nest.keys(): current_nest[key_name] = {} current_nest = current_nest[key_name] return output_dict
def function[reconstruct_dict, parameter[dot_paths, values]]: constant[ a method for reconstructing a dictionary from the values along dot paths ] variable[output_dict] assign[=] dictionary[[], []] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[dot_paths]]]]]] begin[:] if compare[binary_operation[name[i] + constant[1]] less_or_equal[<=] call[name[len], parameter[name[values]]]] begin[:] variable[path_segments] assign[=] call[name[segment_path], parameter[call[name[dot_paths]][name[i]]]] variable[current_nest] assign[=] name[output_dict] for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[path_segments]]]]]] begin[:] variable[key_name] assign[=] call[name[path_segments]][name[j]] <ast.Try object at 0x7da20c6c7610> if compare[binary_operation[name[j] + constant[1]] equal[==] call[name[len], parameter[name[path_segments]]]] begin[:] if call[name[isinstance], parameter[name[key_name], name[int]]] begin[:] call[name[current_nest].append, parameter[call[name[values]][name[i]]]] return[name[output_dict]]
keyword[def] identifier[reconstruct_dict] ( identifier[dot_paths] , identifier[values] ): literal[string] identifier[output_dict] ={} keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[dot_paths] )): keyword[if] identifier[i] + literal[int] <= identifier[len] ( identifier[values] ): identifier[path_segments] = identifier[segment_path] ( identifier[dot_paths] [ identifier[i] ]) identifier[current_nest] = identifier[output_dict] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[path_segments] )): identifier[key_name] = identifier[path_segments] [ identifier[j] ] keyword[try] : identifier[key_name] = identifier[int] ( identifier[key_name] ) keyword[except] : keyword[pass] keyword[if] identifier[j] + literal[int] == identifier[len] ( identifier[path_segments] ): keyword[if] identifier[isinstance] ( identifier[key_name] , identifier[int] ): identifier[current_nest] . identifier[append] ( identifier[values] [ identifier[i] ]) keyword[else] : identifier[current_nest] [ identifier[key_name] ]= identifier[values] [ identifier[i] ] keyword[else] : identifier[next_key] = identifier[path_segments] [ identifier[j] + literal[int] ] keyword[try] : identifier[next_key] = identifier[int] ( identifier[next_key] ) keyword[except] : keyword[pass] keyword[if] identifier[isinstance] ( identifier[next_key] , identifier[int] ): keyword[if] keyword[not] identifier[key_name] keyword[in] identifier[current_nest] . identifier[keys] (): identifier[current_nest] [ identifier[key_name] ]=[] identifier[current_nest] = identifier[current_nest] [ identifier[key_name] ] keyword[else] : keyword[if] identifier[isinstance] ( identifier[key_name] , identifier[int] ): identifier[current_nest] . identifier[append] ({}) identifier[current_nest] = identifier[current_nest] [ identifier[len] ( identifier[current_nest] )- literal[int] ] keyword[else] : keyword[if] keyword[not] identifier[key_name] keyword[in] identifier[current_nest] . identifier[keys] (): identifier[current_nest] [ identifier[key_name] ]={} identifier[current_nest] = identifier[current_nest] [ identifier[key_name] ] keyword[return] identifier[output_dict]
def reconstruct_dict(dot_paths, values): """ a method for reconstructing a dictionary from the values along dot paths """ output_dict = {} for i in range(len(dot_paths)): if i + 1 <= len(values): path_segments = segment_path(dot_paths[i]) current_nest = output_dict for j in range(len(path_segments)): key_name = path_segments[j] try: key_name = int(key_name) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] if j + 1 == len(path_segments): if isinstance(key_name, int): current_nest.append(values[i]) # depends on [control=['if'], data=[]] else: current_nest[key_name] = values[i] # depends on [control=['if'], data=[]] else: next_key = path_segments[j + 1] try: next_key = int(next_key) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] if isinstance(next_key, int): if not key_name in current_nest.keys(): current_nest[key_name] = [] # depends on [control=['if'], data=[]] current_nest = current_nest[key_name] # depends on [control=['if'], data=[]] elif isinstance(key_name, int): current_nest.append({}) current_nest = current_nest[len(current_nest) - 1] # depends on [control=['if'], data=[]] else: if not key_name in current_nest.keys(): current_nest[key_name] = {} # depends on [control=['if'], data=[]] current_nest = current_nest[key_name] # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return output_dict
def choose_form(self, number=None, xpath=None, name=None, **kwargs): """ Set the default form. :param number: number of form (starting from zero) :param id: value of "id" attribute :param name: value of "name" attribute :param xpath: XPath query :raises: :class:`DataNotFound` if form not found :raises: :class:`GrabMisuseError` if method is called without parameters Selected form will be available via `form` attribute of `Grab` instance. All form methods will work with default form. Examples:: # Select second form g.choose_form(1) # Select by id g.choose_form(id="register") # Select by name g.choose_form(name="signup") # Select by xpath g.choose_form(xpath='//form[contains(@action, "/submit")]') """ id_ = kwargs.pop('id', None) if id_ is not None: try: self._lxml_form = self.select('//form[@id="%s"]' % id_).node() except IndexError: raise DataNotFound("There is no form with id: %s" % id_) elif name is not None: try: self._lxml_form = self.select( '//form[@name="%s"]' % name).node() except IndexError: raise DataNotFound('There is no form with name: %s' % name) elif number is not None: try: self._lxml_form = self.tree.forms[number] except IndexError: raise DataNotFound('There is no form with number: %s' % number) elif xpath is not None: try: self._lxml_form = self.select(xpath).node() except IndexError: raise DataNotFound( 'Could not find form with xpath: %s' % xpath) else: raise GrabMisuseError('choose_form methods requires one of ' '[number, id, name, xpath] arguments')
def function[choose_form, parameter[self, number, xpath, name]]: constant[ Set the default form. :param number: number of form (starting from zero) :param id: value of "id" attribute :param name: value of "name" attribute :param xpath: XPath query :raises: :class:`DataNotFound` if form not found :raises: :class:`GrabMisuseError` if method is called without parameters Selected form will be available via `form` attribute of `Grab` instance. All form methods will work with default form. Examples:: # Select second form g.choose_form(1) # Select by id g.choose_form(id="register") # Select by name g.choose_form(name="signup") # Select by xpath g.choose_form(xpath='//form[contains(@action, "/submit")]') ] variable[id_] assign[=] call[name[kwargs].pop, parameter[constant[id], constant[None]]] if compare[name[id_] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b18dc4c0>
keyword[def] identifier[choose_form] ( identifier[self] , identifier[number] = keyword[None] , identifier[xpath] = keyword[None] , identifier[name] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[id_] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[id_] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[self] . identifier[_lxml_form] = identifier[self] . identifier[select] ( literal[string] % identifier[id_] ). identifier[node] () keyword[except] identifier[IndexError] : keyword[raise] identifier[DataNotFound] ( literal[string] % identifier[id_] ) keyword[elif] identifier[name] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[self] . identifier[_lxml_form] = identifier[self] . identifier[select] ( literal[string] % identifier[name] ). identifier[node] () keyword[except] identifier[IndexError] : keyword[raise] identifier[DataNotFound] ( literal[string] % identifier[name] ) keyword[elif] identifier[number] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[self] . identifier[_lxml_form] = identifier[self] . identifier[tree] . identifier[forms] [ identifier[number] ] keyword[except] identifier[IndexError] : keyword[raise] identifier[DataNotFound] ( literal[string] % identifier[number] ) keyword[elif] identifier[xpath] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[self] . identifier[_lxml_form] = identifier[self] . identifier[select] ( identifier[xpath] ). identifier[node] () keyword[except] identifier[IndexError] : keyword[raise] identifier[DataNotFound] ( literal[string] % identifier[xpath] ) keyword[else] : keyword[raise] identifier[GrabMisuseError] ( literal[string] literal[string] )
def choose_form(self, number=None, xpath=None, name=None, **kwargs): """ Set the default form. :param number: number of form (starting from zero) :param id: value of "id" attribute :param name: value of "name" attribute :param xpath: XPath query :raises: :class:`DataNotFound` if form not found :raises: :class:`GrabMisuseError` if method is called without parameters Selected form will be available via `form` attribute of `Grab` instance. All form methods will work with default form. Examples:: # Select second form g.choose_form(1) # Select by id g.choose_form(id="register") # Select by name g.choose_form(name="signup") # Select by xpath g.choose_form(xpath='//form[contains(@action, "/submit")]') """ id_ = kwargs.pop('id', None) if id_ is not None: try: self._lxml_form = self.select('//form[@id="%s"]' % id_).node() # depends on [control=['try'], data=[]] except IndexError: raise DataNotFound('There is no form with id: %s' % id_) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['id_']] elif name is not None: try: self._lxml_form = self.select('//form[@name="%s"]' % name).node() # depends on [control=['try'], data=[]] except IndexError: raise DataNotFound('There is no form with name: %s' % name) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['name']] elif number is not None: try: self._lxml_form = self.tree.forms[number] # depends on [control=['try'], data=[]] except IndexError: raise DataNotFound('There is no form with number: %s' % number) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['number']] elif xpath is not None: try: self._lxml_form = self.select(xpath).node() # depends on [control=['try'], data=[]] except IndexError: raise DataNotFound('Could not find form with xpath: %s' % xpath) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['xpath']] else: raise GrabMisuseError('choose_form methods requires one of [number, id, name, xpath] arguments')
def configure(self, name=None, rules=None, query=None, **options): """Configure the alert.""" self.name = name if not name: raise AssertionError("Alert's name should be defined and not empty.") if not rules: raise AssertionError("%s: Alert's rules is invalid" % name) self.rules = [parse_rule(rule) for rule in rules] self.rules = list(sorted(self.rules, key=lambda r: LEVELS.get(r.get('level'), 99))) assert query, "%s: Alert's query is invalid" % self.name self.query = query interval_raw = options.get('interval', self.reactor.options['interval']) self.interval = TimeUnit.from_interval(interval_raw) time_window_raw = options.get( 'time_window', self.reactor.options.get('time_window', interval_raw), ) self.time_window = TimeUnit.from_interval(time_window_raw) until_raw = options.get('until', self.reactor.options['until']) self.until = TimeUnit.from_interval(until_raw) # Adjust the start time to cater for `until` self.from_time = self.time_window + self.until self._format = options.get('format', self.reactor.options['format']) self.request_timeout = options.get( 'request_timeout', self.reactor.options['request_timeout']) self.connect_timeout = options.get( 'connect_timeout', self.reactor.options['connect_timeout']) interval_ms = self.interval.convert_to(units.MILLISECOND) history_size_raw = options.get('history_size', self.reactor.options['history_size']) history_size_unit = TimeUnit.from_interval(history_size_raw) history_size_ms = history_size_unit.convert_to(MILLISECOND) self.history_size = int(math.ceil(history_size_ms / interval_ms)) self.no_data = options.get('no_data', self.reactor.options['no_data']) self.loading_error = options.get('loading_error', self.reactor.options['loading_error']) if self.reactor.options.get('debug'): self.callback = ioloop.PeriodicCallback(self.load, 5000) else: self.callback = ioloop.PeriodicCallback(self.load, interval_ms)
def function[configure, parameter[self, name, rules, query]]: constant[Configure the alert.] name[self].name assign[=] name[name] if <ast.UnaryOp object at 0x7da1b0e67e80> begin[:] <ast.Raise object at 0x7da1b0e64a60> if <ast.UnaryOp object at 0x7da1b0e67b80> begin[:] <ast.Raise object at 0x7da1b0e67c40> name[self].rules assign[=] <ast.ListComp object at 0x7da1b0e67400> name[self].rules assign[=] call[name[list], parameter[call[name[sorted], parameter[name[self].rules]]]] assert[name[query]] name[self].query assign[=] name[query] variable[interval_raw] assign[=] call[name[options].get, parameter[constant[interval], call[name[self].reactor.options][constant[interval]]]] name[self].interval assign[=] call[name[TimeUnit].from_interval, parameter[name[interval_raw]]] variable[time_window_raw] assign[=] call[name[options].get, parameter[constant[time_window], call[name[self].reactor.options.get, parameter[constant[time_window], name[interval_raw]]]]] name[self].time_window assign[=] call[name[TimeUnit].from_interval, parameter[name[time_window_raw]]] variable[until_raw] assign[=] call[name[options].get, parameter[constant[until], call[name[self].reactor.options][constant[until]]]] name[self].until assign[=] call[name[TimeUnit].from_interval, parameter[name[until_raw]]] name[self].from_time assign[=] binary_operation[name[self].time_window + name[self].until] name[self]._format assign[=] call[name[options].get, parameter[constant[format], call[name[self].reactor.options][constant[format]]]] name[self].request_timeout assign[=] call[name[options].get, parameter[constant[request_timeout], call[name[self].reactor.options][constant[request_timeout]]]] name[self].connect_timeout assign[=] call[name[options].get, parameter[constant[connect_timeout], call[name[self].reactor.options][constant[connect_timeout]]]] variable[interval_ms] assign[=] call[name[self].interval.convert_to, parameter[name[units].MILLISECOND]] variable[history_size_raw] assign[=] call[name[options].get, parameter[constant[history_size], call[name[self].reactor.options][constant[history_size]]]] variable[history_size_unit] assign[=] call[name[TimeUnit].from_interval, parameter[name[history_size_raw]]] variable[history_size_ms] assign[=] call[name[history_size_unit].convert_to, parameter[name[MILLISECOND]]] name[self].history_size assign[=] call[name[int], parameter[call[name[math].ceil, parameter[binary_operation[name[history_size_ms] / name[interval_ms]]]]]] name[self].no_data assign[=] call[name[options].get, parameter[constant[no_data], call[name[self].reactor.options][constant[no_data]]]] name[self].loading_error assign[=] call[name[options].get, parameter[constant[loading_error], call[name[self].reactor.options][constant[loading_error]]]] if call[name[self].reactor.options.get, parameter[constant[debug]]] begin[:] name[self].callback assign[=] call[name[ioloop].PeriodicCallback, parameter[name[self].load, constant[5000]]]
keyword[def] identifier[configure] ( identifier[self] , identifier[name] = keyword[None] , identifier[rules] = keyword[None] , identifier[query] = keyword[None] ,** identifier[options] ): literal[string] identifier[self] . identifier[name] = identifier[name] keyword[if] keyword[not] identifier[name] : keyword[raise] identifier[AssertionError] ( literal[string] ) keyword[if] keyword[not] identifier[rules] : keyword[raise] identifier[AssertionError] ( literal[string] % identifier[name] ) identifier[self] . identifier[rules] =[ identifier[parse_rule] ( identifier[rule] ) keyword[for] identifier[rule] keyword[in] identifier[rules] ] identifier[self] . identifier[rules] = identifier[list] ( identifier[sorted] ( identifier[self] . identifier[rules] , identifier[key] = keyword[lambda] identifier[r] : identifier[LEVELS] . identifier[get] ( identifier[r] . identifier[get] ( literal[string] ), literal[int] ))) keyword[assert] identifier[query] , literal[string] % identifier[self] . identifier[name] identifier[self] . identifier[query] = identifier[query] identifier[interval_raw] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[self] . identifier[interval] = identifier[TimeUnit] . identifier[from_interval] ( identifier[interval_raw] ) identifier[time_window_raw] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] . identifier[get] ( literal[string] , identifier[interval_raw] ), ) identifier[self] . identifier[time_window] = identifier[TimeUnit] . identifier[from_interval] ( identifier[time_window_raw] ) identifier[until_raw] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[self] . identifier[until] = identifier[TimeUnit] . identifier[from_interval] ( identifier[until_raw] ) identifier[self] . identifier[from_time] = identifier[self] . identifier[time_window] + identifier[self] . identifier[until] identifier[self] . identifier[_format] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[self] . identifier[request_timeout] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[self] . identifier[connect_timeout] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[interval_ms] = identifier[self] . identifier[interval] . identifier[convert_to] ( identifier[units] . identifier[MILLISECOND] ) identifier[history_size_raw] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[history_size_unit] = identifier[TimeUnit] . identifier[from_interval] ( identifier[history_size_raw] ) identifier[history_size_ms] = identifier[history_size_unit] . identifier[convert_to] ( identifier[MILLISECOND] ) identifier[self] . identifier[history_size] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[history_size_ms] / identifier[interval_ms] )) identifier[self] . identifier[no_data] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) identifier[self] . identifier[loading_error] = identifier[options] . identifier[get] ( literal[string] , identifier[self] . identifier[reactor] . identifier[options] [ literal[string] ]) keyword[if] identifier[self] . identifier[reactor] . identifier[options] . identifier[get] ( literal[string] ): identifier[self] . identifier[callback] = identifier[ioloop] . identifier[PeriodicCallback] ( identifier[self] . identifier[load] , literal[int] ) keyword[else] : identifier[self] . identifier[callback] = identifier[ioloop] . identifier[PeriodicCallback] ( identifier[self] . identifier[load] , identifier[interval_ms] )
def configure(self, name=None, rules=None, query=None, **options): """Configure the alert.""" self.name = name if not name: raise AssertionError("Alert's name should be defined and not empty.") # depends on [control=['if'], data=[]] if not rules: raise AssertionError("%s: Alert's rules is invalid" % name) # depends on [control=['if'], data=[]] self.rules = [parse_rule(rule) for rule in rules] self.rules = list(sorted(self.rules, key=lambda r: LEVELS.get(r.get('level'), 99))) assert query, "%s: Alert's query is invalid" % self.name self.query = query interval_raw = options.get('interval', self.reactor.options['interval']) self.interval = TimeUnit.from_interval(interval_raw) time_window_raw = options.get('time_window', self.reactor.options.get('time_window', interval_raw)) self.time_window = TimeUnit.from_interval(time_window_raw) until_raw = options.get('until', self.reactor.options['until']) self.until = TimeUnit.from_interval(until_raw) # Adjust the start time to cater for `until` self.from_time = self.time_window + self.until self._format = options.get('format', self.reactor.options['format']) self.request_timeout = options.get('request_timeout', self.reactor.options['request_timeout']) self.connect_timeout = options.get('connect_timeout', self.reactor.options['connect_timeout']) interval_ms = self.interval.convert_to(units.MILLISECOND) history_size_raw = options.get('history_size', self.reactor.options['history_size']) history_size_unit = TimeUnit.from_interval(history_size_raw) history_size_ms = history_size_unit.convert_to(MILLISECOND) self.history_size = int(math.ceil(history_size_ms / interval_ms)) self.no_data = options.get('no_data', self.reactor.options['no_data']) self.loading_error = options.get('loading_error', self.reactor.options['loading_error']) if self.reactor.options.get('debug'): self.callback = ioloop.PeriodicCallback(self.load, 5000) # depends on [control=['if'], data=[]] else: self.callback = ioloop.PeriodicCallback(self.load, interval_ms)
def delete_attribute(self, name): ''' Delete an attribute with a given *name* from the list of attributes. ''' for idx, attr in enumerate(self.attributes): attr_name, _ = attr if attr_name == name: del self.attributes[idx] return
def function[delete_attribute, parameter[self, name]]: constant[ Delete an attribute with a given *name* from the list of attributes. ] for taget[tuple[[<ast.Name object at 0x7da1b0293bb0>, <ast.Name object at 0x7da1b0293790>]]] in starred[call[name[enumerate], parameter[name[self].attributes]]] begin[:] <ast.Tuple object at 0x7da1b020fc40> assign[=] name[attr] if compare[name[attr_name] equal[==] name[name]] begin[:] <ast.Delete object at 0x7da1b020d450> return[None]
keyword[def] identifier[delete_attribute] ( identifier[self] , identifier[name] ): literal[string] keyword[for] identifier[idx] , identifier[attr] keyword[in] identifier[enumerate] ( identifier[self] . identifier[attributes] ): identifier[attr_name] , identifier[_] = identifier[attr] keyword[if] identifier[attr_name] == identifier[name] : keyword[del] identifier[self] . identifier[attributes] [ identifier[idx] ] keyword[return]
def delete_attribute(self, name): """ Delete an attribute with a given *name* from the list of attributes. """ for (idx, attr) in enumerate(self.attributes): (attr_name, _) = attr if attr_name == name: del self.attributes[idx] return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def flag_to_list(flagval, flagtype): """Convert a string of comma-separated tf flags to a list of values.""" if flagtype == 'int': return [int(_) for _ in flagval.split(',') if _] elif flagtype == 'float': return [float(_) for _ in flagval.split(',') if _] elif flagtype == 'str': return [_ for _ in flagval.split(',') if _] else: raise Exception("incorrect type")
def function[flag_to_list, parameter[flagval, flagtype]]: constant[Convert a string of comma-separated tf flags to a list of values.] if compare[name[flagtype] equal[==] constant[int]] begin[:] return[<ast.ListComp object at 0x7da18f00c730>]
keyword[def] identifier[flag_to_list] ( identifier[flagval] , identifier[flagtype] ): literal[string] keyword[if] identifier[flagtype] == literal[string] : keyword[return] [ identifier[int] ( identifier[_] ) keyword[for] identifier[_] keyword[in] identifier[flagval] . identifier[split] ( literal[string] ) keyword[if] identifier[_] ] keyword[elif] identifier[flagtype] == literal[string] : keyword[return] [ identifier[float] ( identifier[_] ) keyword[for] identifier[_] keyword[in] identifier[flagval] . identifier[split] ( literal[string] ) keyword[if] identifier[_] ] keyword[elif] identifier[flagtype] == literal[string] : keyword[return] [ identifier[_] keyword[for] identifier[_] keyword[in] identifier[flagval] . identifier[split] ( literal[string] ) keyword[if] identifier[_] ] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def flag_to_list(flagval, flagtype): """Convert a string of comma-separated tf flags to a list of values.""" if flagtype == 'int': return [int(_) for _ in flagval.split(',') if _] # depends on [control=['if'], data=[]] elif flagtype == 'float': return [float(_) for _ in flagval.split(',') if _] # depends on [control=['if'], data=[]] elif flagtype == 'str': return [_ for _ in flagval.split(',') if _] # depends on [control=['if'], data=[]] else: raise Exception('incorrect type')
def _check_has_primary(sds): """Current topology type is ReplicaSetWithPrimary. Is primary still known? Pass in a dict of ServerDescriptions. Returns new topology type. """ for s in sds.values(): if s.server_type == SERVER_TYPE.RSPrimary: return TOPOLOGY_TYPE.ReplicaSetWithPrimary else: return TOPOLOGY_TYPE.ReplicaSetNoPrimary
def function[_check_has_primary, parameter[sds]]: constant[Current topology type is ReplicaSetWithPrimary. Is primary still known? Pass in a dict of ServerDescriptions. Returns new topology type. ] for taget[name[s]] in starred[call[name[sds].values, parameter[]]] begin[:] if compare[name[s].server_type equal[==] name[SERVER_TYPE].RSPrimary] begin[:] return[name[TOPOLOGY_TYPE].ReplicaSetWithPrimary]
keyword[def] identifier[_check_has_primary] ( identifier[sds] ): literal[string] keyword[for] identifier[s] keyword[in] identifier[sds] . identifier[values] (): keyword[if] identifier[s] . identifier[server_type] == identifier[SERVER_TYPE] . identifier[RSPrimary] : keyword[return] identifier[TOPOLOGY_TYPE] . identifier[ReplicaSetWithPrimary] keyword[else] : keyword[return] identifier[TOPOLOGY_TYPE] . identifier[ReplicaSetNoPrimary]
def _check_has_primary(sds): """Current topology type is ReplicaSetWithPrimary. Is primary still known? Pass in a dict of ServerDescriptions. Returns new topology type. """ for s in sds.values(): if s.server_type == SERVER_TYPE.RSPrimary: return TOPOLOGY_TYPE.ReplicaSetWithPrimary # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] else: return TOPOLOGY_TYPE.ReplicaSetNoPrimary
def remove_programmer(programmer_id): """remove programmer. :param programmer_id: programmer id (e.g. 'avrisp') :rtype: None """ log.debug('remove %s', programmer_id) lines = programmers_txt().lines() lines = filter( lambda x: not x.strip().startswith(programmer_id + '.'), lines) programmers_txt().write_lines(lines)
def function[remove_programmer, parameter[programmer_id]]: constant[remove programmer. :param programmer_id: programmer id (e.g. 'avrisp') :rtype: None ] call[name[log].debug, parameter[constant[remove %s], name[programmer_id]]] variable[lines] assign[=] call[call[name[programmers_txt], parameter[]].lines, parameter[]] variable[lines] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da20c76f430>, name[lines]]] call[call[name[programmers_txt], parameter[]].write_lines, parameter[name[lines]]]
keyword[def] identifier[remove_programmer] ( identifier[programmer_id] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[programmer_id] ) identifier[lines] = identifier[programmers_txt] (). identifier[lines] () identifier[lines] = identifier[filter] ( keyword[lambda] identifier[x] : keyword[not] identifier[x] . identifier[strip] (). identifier[startswith] ( identifier[programmer_id] + literal[string] ), identifier[lines] ) identifier[programmers_txt] (). identifier[write_lines] ( identifier[lines] )
def remove_programmer(programmer_id): """remove programmer. :param programmer_id: programmer id (e.g. 'avrisp') :rtype: None """ log.debug('remove %s', programmer_id) lines = programmers_txt().lines() lines = filter(lambda x: not x.strip().startswith(programmer_id + '.'), lines) programmers_txt().write_lines(lines)
def dump(ra, from_date, with_json=True, latest_only=False, **kwargs): """Dump the remote accounts as a list of dictionaries. :param ra: Remote account to be dumped. :type ra: `invenio_oauthclient.models.RemoteAccount [Invenio2.x]` :returns: Remote accounts serialized to dictionary. :rtype: dict """ return dict(id=ra.id, user_id=ra.user_id, client_id=ra.client_id, extra_data=ra.extra_data)
def function[dump, parameter[ra, from_date, with_json, latest_only]]: constant[Dump the remote accounts as a list of dictionaries. :param ra: Remote account to be dumped. :type ra: `invenio_oauthclient.models.RemoteAccount [Invenio2.x]` :returns: Remote accounts serialized to dictionary. :rtype: dict ] return[call[name[dict], parameter[]]]
keyword[def] identifier[dump] ( identifier[ra] , identifier[from_date] , identifier[with_json] = keyword[True] , identifier[latest_only] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[dict] ( identifier[id] = identifier[ra] . identifier[id] , identifier[user_id] = identifier[ra] . identifier[user_id] , identifier[client_id] = identifier[ra] . identifier[client_id] , identifier[extra_data] = identifier[ra] . identifier[extra_data] )
def dump(ra, from_date, with_json=True, latest_only=False, **kwargs): """Dump the remote accounts as a list of dictionaries. :param ra: Remote account to be dumped. :type ra: `invenio_oauthclient.models.RemoteAccount [Invenio2.x]` :returns: Remote accounts serialized to dictionary. :rtype: dict """ return dict(id=ra.id, user_id=ra.user_id, client_id=ra.client_id, extra_data=ra.extra_data)
def delete_snapshot(name, snap_name, runas=None, all=False): ''' Delete a snapshot .. note:: Deleting a snapshot from which other snapshots are dervied will not delete the derived snapshots :param str name: Name/ID of VM whose snapshot will be deleted :param str snap_name: Name/ID of snapshot to delete :param str runas: The user that the prlctl command will be run as :param bool all: Delete all snapshots having the name given .. versionadded:: 2016.11.0 Example: .. code-block:: bash salt '*' parallels.delete_snapshot macvm 'unneeded snapshot' runas=macdev salt '*' parallels.delete_snapshot macvm 'Snapshot for linked clone' all=True runas=macdev ''' # strict means raise an error if multiple snapshot IDs found for the name given strict = not all # Validate VM and snapshot names name = salt.utils.data.decode(name) snap_ids = _validate_snap_name(name, snap_name, strict=strict, runas=runas) if isinstance(snap_ids, six.string_types): snap_ids = [snap_ids] # Delete snapshot(s) ret = {} for snap_id in snap_ids: snap_id = snap_id.strip('{}') # Construct argument list args = [name, '--id', snap_id] # Execute command ret[snap_id] = prlctl('snapshot-delete', args, runas=runas) # Return results ret_keys = list(ret.keys()) if len(ret_keys) == 1: return ret[ret_keys[0]] else: return ret
def function[delete_snapshot, parameter[name, snap_name, runas, all]]: constant[ Delete a snapshot .. note:: Deleting a snapshot from which other snapshots are dervied will not delete the derived snapshots :param str name: Name/ID of VM whose snapshot will be deleted :param str snap_name: Name/ID of snapshot to delete :param str runas: The user that the prlctl command will be run as :param bool all: Delete all snapshots having the name given .. versionadded:: 2016.11.0 Example: .. code-block:: bash salt '*' parallels.delete_snapshot macvm 'unneeded snapshot' runas=macdev salt '*' parallels.delete_snapshot macvm 'Snapshot for linked clone' all=True runas=macdev ] variable[strict] assign[=] <ast.UnaryOp object at 0x7da1b2169420> variable[name] assign[=] call[name[salt].utils.data.decode, parameter[name[name]]] variable[snap_ids] assign[=] call[name[_validate_snap_name], parameter[name[name], name[snap_name]]] if call[name[isinstance], parameter[name[snap_ids], name[six].string_types]] begin[:] variable[snap_ids] assign[=] list[[<ast.Name object at 0x7da1b2168700>]] variable[ret] assign[=] dictionary[[], []] for taget[name[snap_id]] in starred[name[snap_ids]] begin[:] variable[snap_id] assign[=] call[name[snap_id].strip, parameter[constant[{}]]] variable[args] assign[=] list[[<ast.Name object at 0x7da1b216aa10>, <ast.Constant object at 0x7da1b216b610>, <ast.Name object at 0x7da1b216a200>]] call[name[ret]][name[snap_id]] assign[=] call[name[prlctl], parameter[constant[snapshot-delete], name[args]]] variable[ret_keys] assign[=] call[name[list], parameter[call[name[ret].keys, parameter[]]]] if compare[call[name[len], parameter[name[ret_keys]]] equal[==] constant[1]] begin[:] return[call[name[ret]][call[name[ret_keys]][constant[0]]]]
keyword[def] identifier[delete_snapshot] ( identifier[name] , identifier[snap_name] , identifier[runas] = keyword[None] , identifier[all] = keyword[False] ): literal[string] identifier[strict] = keyword[not] identifier[all] identifier[name] = identifier[salt] . identifier[utils] . identifier[data] . identifier[decode] ( identifier[name] ) identifier[snap_ids] = identifier[_validate_snap_name] ( identifier[name] , identifier[snap_name] , identifier[strict] = identifier[strict] , identifier[runas] = identifier[runas] ) keyword[if] identifier[isinstance] ( identifier[snap_ids] , identifier[six] . identifier[string_types] ): identifier[snap_ids] =[ identifier[snap_ids] ] identifier[ret] ={} keyword[for] identifier[snap_id] keyword[in] identifier[snap_ids] : identifier[snap_id] = identifier[snap_id] . identifier[strip] ( literal[string] ) identifier[args] =[ identifier[name] , literal[string] , identifier[snap_id] ] identifier[ret] [ identifier[snap_id] ]= identifier[prlctl] ( literal[string] , identifier[args] , identifier[runas] = identifier[runas] ) identifier[ret_keys] = identifier[list] ( identifier[ret] . identifier[keys] ()) keyword[if] identifier[len] ( identifier[ret_keys] )== literal[int] : keyword[return] identifier[ret] [ identifier[ret_keys] [ literal[int] ]] keyword[else] : keyword[return] identifier[ret]
def delete_snapshot(name, snap_name, runas=None, all=False): """ Delete a snapshot .. note:: Deleting a snapshot from which other snapshots are dervied will not delete the derived snapshots :param str name: Name/ID of VM whose snapshot will be deleted :param str snap_name: Name/ID of snapshot to delete :param str runas: The user that the prlctl command will be run as :param bool all: Delete all snapshots having the name given .. versionadded:: 2016.11.0 Example: .. code-block:: bash salt '*' parallels.delete_snapshot macvm 'unneeded snapshot' runas=macdev salt '*' parallels.delete_snapshot macvm 'Snapshot for linked clone' all=True runas=macdev """ # strict means raise an error if multiple snapshot IDs found for the name given strict = not all # Validate VM and snapshot names name = salt.utils.data.decode(name) snap_ids = _validate_snap_name(name, snap_name, strict=strict, runas=runas) if isinstance(snap_ids, six.string_types): snap_ids = [snap_ids] # depends on [control=['if'], data=[]] # Delete snapshot(s) ret = {} for snap_id in snap_ids: snap_id = snap_id.strip('{}') # Construct argument list args = [name, '--id', snap_id] # Execute command ret[snap_id] = prlctl('snapshot-delete', args, runas=runas) # depends on [control=['for'], data=['snap_id']] # Return results ret_keys = list(ret.keys()) if len(ret_keys) == 1: return ret[ret_keys[0]] # depends on [control=['if'], data=[]] else: return ret
def filter_from_options(key, options): """ :param key: Key str in options :param options: Mapping object :return: New mapping object from 'options' in which the item with 'key' filtered >>> filter_from_options('a', dict(a=1, b=2)) {'b': 2} """ return anyconfig.utils.filter_options([k for k in options.keys() if k != key], options)
def function[filter_from_options, parameter[key, options]]: constant[ :param key: Key str in options :param options: Mapping object :return: New mapping object from 'options' in which the item with 'key' filtered >>> filter_from_options('a', dict(a=1, b=2)) {'b': 2} ] return[call[name[anyconfig].utils.filter_options, parameter[<ast.ListComp object at 0x7da1b0667b20>, name[options]]]]
keyword[def] identifier[filter_from_options] ( identifier[key] , identifier[options] ): literal[string] keyword[return] identifier[anyconfig] . identifier[utils] . identifier[filter_options] ([ identifier[k] keyword[for] identifier[k] keyword[in] identifier[options] . identifier[keys] () keyword[if] identifier[k] != identifier[key] ], identifier[options] )
def filter_from_options(key, options): """ :param key: Key str in options :param options: Mapping object :return: New mapping object from 'options' in which the item with 'key' filtered >>> filter_from_options('a', dict(a=1, b=2)) {'b': 2} """ return anyconfig.utils.filter_options([k for k in options.keys() if k != key], options)
def _create_temp_requirements(self): """Create a temporary requirements.txt. This allows testing again a git branch instead of pulling from pypi. """ self.use_temp_requirements_file = True # Replace tcex version with develop branch of tcex with open(self.requirements_file, 'r') as fh: current_requirements = fh.read().strip().split('\n') self.requirements_file = 'temp-{}'.format(self.requirements_file) with open(self.requirements_file, 'w') as fh: new_requirements = '' for line in current_requirements: if not line: continue if line.startswith('tcex'): line = 'git+https://github.com/ThreatConnect-Inc/tcex.git@{}#egg=tcex' line = line.format(self.args.branch) # print('line', line) new_requirements += '{}\n'.format(line) fh.write(new_requirements)
def function[_create_temp_requirements, parameter[self]]: constant[Create a temporary requirements.txt. This allows testing again a git branch instead of pulling from pypi. ] name[self].use_temp_requirements_file assign[=] constant[True] with call[name[open], parameter[name[self].requirements_file, constant[r]]] begin[:] variable[current_requirements] assign[=] call[call[call[name[fh].read, parameter[]].strip, parameter[]].split, parameter[constant[ ]]] name[self].requirements_file assign[=] call[constant[temp-{}].format, parameter[name[self].requirements_file]] with call[name[open], parameter[name[self].requirements_file, constant[w]]] begin[:] variable[new_requirements] assign[=] constant[] for taget[name[line]] in starred[name[current_requirements]] begin[:] if <ast.UnaryOp object at 0x7da18f58c550> begin[:] continue if call[name[line].startswith, parameter[constant[tcex]]] begin[:] variable[line] assign[=] constant[git+https://github.com/ThreatConnect-Inc/tcex.git@{}#egg=tcex] variable[line] assign[=] call[name[line].format, parameter[name[self].args.branch]] <ast.AugAssign object at 0x7da18f58f520> call[name[fh].write, parameter[name[new_requirements]]]
keyword[def] identifier[_create_temp_requirements] ( identifier[self] ): literal[string] identifier[self] . identifier[use_temp_requirements_file] = keyword[True] keyword[with] identifier[open] ( identifier[self] . identifier[requirements_file] , literal[string] ) keyword[as] identifier[fh] : identifier[current_requirements] = identifier[fh] . identifier[read] (). identifier[strip] (). identifier[split] ( literal[string] ) identifier[self] . identifier[requirements_file] = literal[string] . identifier[format] ( identifier[self] . identifier[requirements_file] ) keyword[with] identifier[open] ( identifier[self] . identifier[requirements_file] , literal[string] ) keyword[as] identifier[fh] : identifier[new_requirements] = literal[string] keyword[for] identifier[line] keyword[in] identifier[current_requirements] : keyword[if] keyword[not] identifier[line] : keyword[continue] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[line] = literal[string] identifier[line] = identifier[line] . identifier[format] ( identifier[self] . identifier[args] . identifier[branch] ) identifier[new_requirements] += literal[string] . identifier[format] ( identifier[line] ) identifier[fh] . identifier[write] ( identifier[new_requirements] )
def _create_temp_requirements(self): """Create a temporary requirements.txt. This allows testing again a git branch instead of pulling from pypi. """ self.use_temp_requirements_file = True # Replace tcex version with develop branch of tcex with open(self.requirements_file, 'r') as fh: current_requirements = fh.read().strip().split('\n') # depends on [control=['with'], data=['fh']] self.requirements_file = 'temp-{}'.format(self.requirements_file) with open(self.requirements_file, 'w') as fh: new_requirements = '' for line in current_requirements: if not line: continue # depends on [control=['if'], data=[]] if line.startswith('tcex'): line = 'git+https://github.com/ThreatConnect-Inc/tcex.git@{}#egg=tcex' line = line.format(self.args.branch) # depends on [control=['if'], data=[]] # print('line', line) new_requirements += '{}\n'.format(line) # depends on [control=['for'], data=['line']] fh.write(new_requirements) # depends on [control=['with'], data=['fh']]
def generate_data_type(self, data_type): """Output a data type definition (a struct or union).""" if isinstance(data_type, Struct): # Output a struct definition. self.emit('') self.emit('struct %s' % data_type.name) with self.indent(): if data_type.doc is not None: self.emit(self.format_string(data_type.doc)) for field in data_type.fields: type_repr = self.format_data_type(field.data_type) if not field.has_default: self.emit('%s %s' % (field.name, type_repr)) else: self.emit('%s %s = %s' % (field.name, type_repr, self.format_value(field.default))) if field.doc is not None: with self.indent(): self.emit(self.format_value(field.doc)) elif isinstance(data_type, Union): # Output a union definition. self.emit('') self.emit('union %s' % data_type.name) with self.indent(): if data_type.doc is not None: self.emit(self.format_string(data_type.doc)) for field in data_type.fields: name = field.name # Add a star for a catch-all field. # (There are two ways to recognize these.) if field.catch_all or field is data_type.catch_all_field: name += '*' if isinstance(field.data_type, Void): self.emit('%s' % (name)) else: type_repr = self.format_data_type(field.data_type) self.emit('%s %s' % (name, type_repr)) if field.doc is not None: with self.indent(): self.emit(self.format_value(field.doc)) else: # Don't know what this is. self.emit('') self.emit('# ??? %s' % repr(data_type))
def function[generate_data_type, parameter[self, data_type]]: constant[Output a data type definition (a struct or union).] if call[name[isinstance], parameter[name[data_type], name[Struct]]] begin[:] call[name[self].emit, parameter[constant[]]] call[name[self].emit, parameter[binary_operation[constant[struct %s] <ast.Mod object at 0x7da2590d6920> name[data_type].name]]] with call[name[self].indent, parameter[]] begin[:] if compare[name[data_type].doc is_not constant[None]] begin[:] call[name[self].emit, parameter[call[name[self].format_string, parameter[name[data_type].doc]]]] for taget[name[field]] in starred[name[data_type].fields] begin[:] variable[type_repr] assign[=] call[name[self].format_data_type, parameter[name[field].data_type]] if <ast.UnaryOp object at 0x7da18f8113f0> begin[:] call[name[self].emit, parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f812a40>, <ast.Name object at 0x7da18f813e20>]]]]] if compare[name[field].doc is_not constant[None]] begin[:] with call[name[self].indent, parameter[]] begin[:] call[name[self].emit, parameter[call[name[self].format_value, parameter[name[field].doc]]]]
keyword[def] identifier[generate_data_type] ( identifier[self] , identifier[data_type] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data_type] , identifier[Struct] ): identifier[self] . identifier[emit] ( literal[string] ) identifier[self] . identifier[emit] ( literal[string] % identifier[data_type] . identifier[name] ) keyword[with] identifier[self] . identifier[indent] (): keyword[if] identifier[data_type] . identifier[doc] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[emit] ( identifier[self] . identifier[format_string] ( identifier[data_type] . identifier[doc] )) keyword[for] identifier[field] keyword[in] identifier[data_type] . identifier[fields] : identifier[type_repr] = identifier[self] . identifier[format_data_type] ( identifier[field] . identifier[data_type] ) keyword[if] keyword[not] identifier[field] . identifier[has_default] : identifier[self] . identifier[emit] ( literal[string] %( identifier[field] . identifier[name] , identifier[type_repr] )) keyword[else] : identifier[self] . identifier[emit] ( literal[string] % ( identifier[field] . identifier[name] , identifier[type_repr] , identifier[self] . identifier[format_value] ( identifier[field] . identifier[default] ))) keyword[if] identifier[field] . identifier[doc] keyword[is] keyword[not] keyword[None] : keyword[with] identifier[self] . identifier[indent] (): identifier[self] . identifier[emit] ( identifier[self] . identifier[format_value] ( identifier[field] . identifier[doc] )) keyword[elif] identifier[isinstance] ( identifier[data_type] , identifier[Union] ): identifier[self] . identifier[emit] ( literal[string] ) identifier[self] . identifier[emit] ( literal[string] % identifier[data_type] . identifier[name] ) keyword[with] identifier[self] . identifier[indent] (): keyword[if] identifier[data_type] . identifier[doc] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[emit] ( identifier[self] . identifier[format_string] ( identifier[data_type] . identifier[doc] )) keyword[for] identifier[field] keyword[in] identifier[data_type] . identifier[fields] : identifier[name] = identifier[field] . identifier[name] keyword[if] identifier[field] . identifier[catch_all] keyword[or] identifier[field] keyword[is] identifier[data_type] . identifier[catch_all_field] : identifier[name] += literal[string] keyword[if] identifier[isinstance] ( identifier[field] . identifier[data_type] , identifier[Void] ): identifier[self] . identifier[emit] ( literal[string] %( identifier[name] )) keyword[else] : identifier[type_repr] = identifier[self] . identifier[format_data_type] ( identifier[field] . identifier[data_type] ) identifier[self] . identifier[emit] ( literal[string] %( identifier[name] , identifier[type_repr] )) keyword[if] identifier[field] . identifier[doc] keyword[is] keyword[not] keyword[None] : keyword[with] identifier[self] . identifier[indent] (): identifier[self] . identifier[emit] ( identifier[self] . identifier[format_value] ( identifier[field] . identifier[doc] )) keyword[else] : identifier[self] . identifier[emit] ( literal[string] ) identifier[self] . identifier[emit] ( literal[string] % identifier[repr] ( identifier[data_type] ))
def generate_data_type(self, data_type): """Output a data type definition (a struct or union).""" if isinstance(data_type, Struct): # Output a struct definition. self.emit('') self.emit('struct %s' % data_type.name) with self.indent(): if data_type.doc is not None: self.emit(self.format_string(data_type.doc)) # depends on [control=['if'], data=[]] for field in data_type.fields: type_repr = self.format_data_type(field.data_type) if not field.has_default: self.emit('%s %s' % (field.name, type_repr)) # depends on [control=['if'], data=[]] else: self.emit('%s %s = %s' % (field.name, type_repr, self.format_value(field.default))) if field.doc is not None: with self.indent(): self.emit(self.format_value(field.doc)) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(data_type, Union): # Output a union definition. self.emit('') self.emit('union %s' % data_type.name) with self.indent(): if data_type.doc is not None: self.emit(self.format_string(data_type.doc)) # depends on [control=['if'], data=[]] for field in data_type.fields: name = field.name # Add a star for a catch-all field. # (There are two ways to recognize these.) if field.catch_all or field is data_type.catch_all_field: name += '*' # depends on [control=['if'], data=[]] if isinstance(field.data_type, Void): self.emit('%s' % name) # depends on [control=['if'], data=[]] else: type_repr = self.format_data_type(field.data_type) self.emit('%s %s' % (name, type_repr)) if field.doc is not None: with self.indent(): self.emit(self.format_value(field.doc)) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: # Don't know what this is. self.emit('') self.emit('# ??? %s' % repr(data_type))
def _add_return_site(self, return_site): """ Registers a basic block as a site for control flow to return from this function. :param CodeNode return_site: The block node that ends with a return. """ self._register_nodes(True, return_site) self._ret_sites.add(return_site) # A return site must be an endpoint of the function - you cannot continue execution of the current function # after returning self._add_endpoint(return_site, 'return')
def function[_add_return_site, parameter[self, return_site]]: constant[ Registers a basic block as a site for control flow to return from this function. :param CodeNode return_site: The block node that ends with a return. ] call[name[self]._register_nodes, parameter[constant[True], name[return_site]]] call[name[self]._ret_sites.add, parameter[name[return_site]]] call[name[self]._add_endpoint, parameter[name[return_site], constant[return]]]
keyword[def] identifier[_add_return_site] ( identifier[self] , identifier[return_site] ): literal[string] identifier[self] . identifier[_register_nodes] ( keyword[True] , identifier[return_site] ) identifier[self] . identifier[_ret_sites] . identifier[add] ( identifier[return_site] ) identifier[self] . identifier[_add_endpoint] ( identifier[return_site] , literal[string] )
def _add_return_site(self, return_site): """ Registers a basic block as a site for control flow to return from this function. :param CodeNode return_site: The block node that ends with a return. """ self._register_nodes(True, return_site) self._ret_sites.add(return_site) # A return site must be an endpoint of the function - you cannot continue execution of the current function # after returning self._add_endpoint(return_site, 'return')
def viewport_changed(self, screen_id, x, y, width, height): """Signals that framebuffer window viewport has changed. in screen_id of type int Monitor to take the screenshot from. in x of type int Framebuffer x offset. in y of type int Framebuffer y offset. in width of type int Viewport width. in height of type int Viewport height. raises :class:`OleErrorInvalidarg` The specified viewport data is invalid. """ if not isinstance(screen_id, baseinteger): raise TypeError("screen_id can only be an instance of type baseinteger") if not isinstance(x, baseinteger): raise TypeError("x can only be an instance of type baseinteger") if not isinstance(y, baseinteger): raise TypeError("y can only be an instance of type baseinteger") if not isinstance(width, baseinteger): raise TypeError("width can only be an instance of type baseinteger") if not isinstance(height, baseinteger): raise TypeError("height can only be an instance of type baseinteger") self._call("viewportChanged", in_p=[screen_id, x, y, width, height])
def function[viewport_changed, parameter[self, screen_id, x, y, width, height]]: constant[Signals that framebuffer window viewport has changed. in screen_id of type int Monitor to take the screenshot from. in x of type int Framebuffer x offset. in y of type int Framebuffer y offset. in width of type int Viewport width. in height of type int Viewport height. raises :class:`OleErrorInvalidarg` The specified viewport data is invalid. ] if <ast.UnaryOp object at 0x7da20e9b3130> begin[:] <ast.Raise object at 0x7da20e9b01c0> if <ast.UnaryOp object at 0x7da20e9b06a0> begin[:] <ast.Raise object at 0x7da20e9b0e80> if <ast.UnaryOp object at 0x7da20e9b3910> begin[:] <ast.Raise object at 0x7da20e9b1d80> if <ast.UnaryOp object at 0x7da20e9b3160> begin[:] <ast.Raise object at 0x7da20e9b34c0> if <ast.UnaryOp object at 0x7da20e9b39a0> begin[:] <ast.Raise object at 0x7da20e9b3730> call[name[self]._call, parameter[constant[viewportChanged]]]
keyword[def] identifier[viewport_changed] ( identifier[self] , identifier[screen_id] , identifier[x] , identifier[y] , identifier[width] , identifier[height] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[screen_id] , identifier[baseinteger] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[x] , identifier[baseinteger] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[y] , identifier[baseinteger] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[width] , identifier[baseinteger] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[height] , identifier[baseinteger] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[self] . identifier[_call] ( literal[string] , identifier[in_p] =[ identifier[screen_id] , identifier[x] , identifier[y] , identifier[width] , identifier[height] ])
def viewport_changed(self, screen_id, x, y, width, height): """Signals that framebuffer window viewport has changed. in screen_id of type int Monitor to take the screenshot from. in x of type int Framebuffer x offset. in y of type int Framebuffer y offset. in width of type int Viewport width. in height of type int Viewport height. raises :class:`OleErrorInvalidarg` The specified viewport data is invalid. """ if not isinstance(screen_id, baseinteger): raise TypeError('screen_id can only be an instance of type baseinteger') # depends on [control=['if'], data=[]] if not isinstance(x, baseinteger): raise TypeError('x can only be an instance of type baseinteger') # depends on [control=['if'], data=[]] if not isinstance(y, baseinteger): raise TypeError('y can only be an instance of type baseinteger') # depends on [control=['if'], data=[]] if not isinstance(width, baseinteger): raise TypeError('width can only be an instance of type baseinteger') # depends on [control=['if'], data=[]] if not isinstance(height, baseinteger): raise TypeError('height can only be an instance of type baseinteger') # depends on [control=['if'], data=[]] self._call('viewportChanged', in_p=[screen_id, x, y, width, height])
def live_source_load(self, source): """ Send new source code to the bot :param source: :param good_cb: callback called if code was good :param bad_cb: callback called if code was bad (will get contents of exception) :return: """ source = source.rstrip('\n') if source != self.source: self.source = source b64_source = base64.b64encode(bytes(bytearray(source, "ascii"))) self.send_command(CMD_LOAD_BASE64, b64_source)
def function[live_source_load, parameter[self, source]]: constant[ Send new source code to the bot :param source: :param good_cb: callback called if code was good :param bad_cb: callback called if code was bad (will get contents of exception) :return: ] variable[source] assign[=] call[name[source].rstrip, parameter[constant[ ]]] if compare[name[source] not_equal[!=] name[self].source] begin[:] name[self].source assign[=] name[source] variable[b64_source] assign[=] call[name[base64].b64encode, parameter[call[name[bytes], parameter[call[name[bytearray], parameter[name[source], constant[ascii]]]]]]] call[name[self].send_command, parameter[name[CMD_LOAD_BASE64], name[b64_source]]]
keyword[def] identifier[live_source_load] ( identifier[self] , identifier[source] ): literal[string] identifier[source] = identifier[source] . identifier[rstrip] ( literal[string] ) keyword[if] identifier[source] != identifier[self] . identifier[source] : identifier[self] . identifier[source] = identifier[source] identifier[b64_source] = identifier[base64] . identifier[b64encode] ( identifier[bytes] ( identifier[bytearray] ( identifier[source] , literal[string] ))) identifier[self] . identifier[send_command] ( identifier[CMD_LOAD_BASE64] , identifier[b64_source] )
def live_source_load(self, source): """ Send new source code to the bot :param source: :param good_cb: callback called if code was good :param bad_cb: callback called if code was bad (will get contents of exception) :return: """ source = source.rstrip('\n') if source != self.source: self.source = source b64_source = base64.b64encode(bytes(bytearray(source, 'ascii'))) self.send_command(CMD_LOAD_BASE64, b64_source) # depends on [control=['if'], data=['source']]
def standard_sc_expr_str(sc): """ Standard symbol/choice printing function. Uses plain Kconfig syntax, and displays choices as <choice> (or <choice NAME>, for named choices). See expr_str(). """ if sc.__class__ is Symbol: return '"{}"'.format(escape(sc.name)) if sc.is_constant else sc.name # Choice return "<choice {}>".format(sc.name) if sc.name else "<choice>"
def function[standard_sc_expr_str, parameter[sc]]: constant[ Standard symbol/choice printing function. Uses plain Kconfig syntax, and displays choices as <choice> (or <choice NAME>, for named choices). See expr_str(). ] if compare[name[sc].__class__ is name[Symbol]] begin[:] return[<ast.IfExp object at 0x7da1b206b730>] return[<ast.IfExp object at 0x7da1b2068760>]
keyword[def] identifier[standard_sc_expr_str] ( identifier[sc] ): literal[string] keyword[if] identifier[sc] . identifier[__class__] keyword[is] identifier[Symbol] : keyword[return] literal[string] . identifier[format] ( identifier[escape] ( identifier[sc] . identifier[name] )) keyword[if] identifier[sc] . identifier[is_constant] keyword[else] identifier[sc] . identifier[name] keyword[return] literal[string] . identifier[format] ( identifier[sc] . identifier[name] ) keyword[if] identifier[sc] . identifier[name] keyword[else] literal[string]
def standard_sc_expr_str(sc): """ Standard symbol/choice printing function. Uses plain Kconfig syntax, and displays choices as <choice> (or <choice NAME>, for named choices). See expr_str(). """ if sc.__class__ is Symbol: return '"{}"'.format(escape(sc.name)) if sc.is_constant else sc.name # depends on [control=['if'], data=[]] # Choice return '<choice {}>'.format(sc.name) if sc.name else '<choice>'
def get_name( self, name, lastblock=None, include_expired=False, include_history=True ): """ Given a name, return the latest version and history of the metadata gleaned from the blockchain. Name must be fully-qualified (i.e. name.ns_id) Return None if no such name is currently registered. NOTE: returns names that are revoked """ if lastblock is None: lastblock = self.lastblock cur = self.db.cursor() name_rec = namedb_get_name( cur, name, lastblock, include_expired=include_expired, include_history=include_history ) return name_rec
def function[get_name, parameter[self, name, lastblock, include_expired, include_history]]: constant[ Given a name, return the latest version and history of the metadata gleaned from the blockchain. Name must be fully-qualified (i.e. name.ns_id) Return None if no such name is currently registered. NOTE: returns names that are revoked ] if compare[name[lastblock] is constant[None]] begin[:] variable[lastblock] assign[=] name[self].lastblock variable[cur] assign[=] call[name[self].db.cursor, parameter[]] variable[name_rec] assign[=] call[name[namedb_get_name], parameter[name[cur], name[name], name[lastblock]]] return[name[name_rec]]
keyword[def] identifier[get_name] ( identifier[self] , identifier[name] , identifier[lastblock] = keyword[None] , identifier[include_expired] = keyword[False] , identifier[include_history] = keyword[True] ): literal[string] keyword[if] identifier[lastblock] keyword[is] keyword[None] : identifier[lastblock] = identifier[self] . identifier[lastblock] identifier[cur] = identifier[self] . identifier[db] . identifier[cursor] () identifier[name_rec] = identifier[namedb_get_name] ( identifier[cur] , identifier[name] , identifier[lastblock] , identifier[include_expired] = identifier[include_expired] , identifier[include_history] = identifier[include_history] ) keyword[return] identifier[name_rec]
def get_name(self, name, lastblock=None, include_expired=False, include_history=True): """ Given a name, return the latest version and history of the metadata gleaned from the blockchain. Name must be fully-qualified (i.e. name.ns_id) Return None if no such name is currently registered. NOTE: returns names that are revoked """ if lastblock is None: lastblock = self.lastblock # depends on [control=['if'], data=['lastblock']] cur = self.db.cursor() name_rec = namedb_get_name(cur, name, lastblock, include_expired=include_expired, include_history=include_history) return name_rec
def is_correlated(self, threshold=0): """ Compare with a threshold to determine whether two timeseries correlate to each other. :return: a CorrelationResult object if two time series correlate otherwise false. """ return self.correlation_result if self.correlation_result.coefficient >= threshold else False
def function[is_correlated, parameter[self, threshold]]: constant[ Compare with a threshold to determine whether two timeseries correlate to each other. :return: a CorrelationResult object if two time series correlate otherwise false. ] return[<ast.IfExp object at 0x7da2054a6170>]
keyword[def] identifier[is_correlated] ( identifier[self] , identifier[threshold] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[correlation_result] keyword[if] identifier[self] . identifier[correlation_result] . identifier[coefficient] >= identifier[threshold] keyword[else] keyword[False]
def is_correlated(self, threshold=0): """ Compare with a threshold to determine whether two timeseries correlate to each other. :return: a CorrelationResult object if two time series correlate otherwise false. """ return self.correlation_result if self.correlation_result.coefficient >= threshold else False
def sf(f, dirpath, jottapath): """Create and return a SyncFile tuple from filename. localpath will be a byte string with utf8 code points jottapath will be a unicode string""" log.debug('Create SyncFile from %s', repr(f)) log.debug('Got encoded filename %r, joining with dirpath %r', _encode_filename_to_filesystem(f), dirpath) return SyncFile(localpath=os.path.join(dirpath, _encode_filename_to_filesystem(f)), jottapath=posixpath.join(_decode_filename_to_unicode(jottapath), _decode_filename_to_unicode(f)))
def function[sf, parameter[f, dirpath, jottapath]]: constant[Create and return a SyncFile tuple from filename. localpath will be a byte string with utf8 code points jottapath will be a unicode string] call[name[log].debug, parameter[constant[Create SyncFile from %s], call[name[repr], parameter[name[f]]]]] call[name[log].debug, parameter[constant[Got encoded filename %r, joining with dirpath %r], call[name[_encode_filename_to_filesystem], parameter[name[f]]], name[dirpath]]] return[call[name[SyncFile], parameter[]]]
keyword[def] identifier[sf] ( identifier[f] , identifier[dirpath] , identifier[jottapath] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[repr] ( identifier[f] )) identifier[log] . identifier[debug] ( literal[string] , identifier[_encode_filename_to_filesystem] ( identifier[f] ), identifier[dirpath] ) keyword[return] identifier[SyncFile] ( identifier[localpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirpath] , identifier[_encode_filename_to_filesystem] ( identifier[f] )), identifier[jottapath] = identifier[posixpath] . identifier[join] ( identifier[_decode_filename_to_unicode] ( identifier[jottapath] ), identifier[_decode_filename_to_unicode] ( identifier[f] )))
def sf(f, dirpath, jottapath): """Create and return a SyncFile tuple from filename. localpath will be a byte string with utf8 code points jottapath will be a unicode string""" log.debug('Create SyncFile from %s', repr(f)) log.debug('Got encoded filename %r, joining with dirpath %r', _encode_filename_to_filesystem(f), dirpath) return SyncFile(localpath=os.path.join(dirpath, _encode_filename_to_filesystem(f)), jottapath=posixpath.join(_decode_filename_to_unicode(jottapath), _decode_filename_to_unicode(f)))
def K(self, X, X2, target): """Return covariance between X and X2.""" self._K_computations(X, X2) target += self.variance*self._K_dvar
def function[K, parameter[self, X, X2, target]]: constant[Return covariance between X and X2.] call[name[self]._K_computations, parameter[name[X], name[X2]]] <ast.AugAssign object at 0x7da1b1c2f460>
keyword[def] identifier[K] ( identifier[self] , identifier[X] , identifier[X2] , identifier[target] ): literal[string] identifier[self] . identifier[_K_computations] ( identifier[X] , identifier[X2] ) identifier[target] += identifier[self] . identifier[variance] * identifier[self] . identifier[_K_dvar]
def K(self, X, X2, target): """Return covariance between X and X2.""" self._K_computations(X, X2) target += self.variance * self._K_dvar
def update_or_create(self, model, **kwargs): '''Update or create a new instance of ``model``. This method can raise an exception if the ``kwargs`` dictionary contains field data that does not validate. :param model: a :class:`StdModel` :param kwargs: dictionary of parameters. :returns: A two elements tuple containing the instance and a boolean indicating if the instance was created or not. ''' backend = self.model(model).backend return backend.execute(self._update_or_create(model, **kwargs))
def function[update_or_create, parameter[self, model]]: constant[Update or create a new instance of ``model``. This method can raise an exception if the ``kwargs`` dictionary contains field data that does not validate. :param model: a :class:`StdModel` :param kwargs: dictionary of parameters. :returns: A two elements tuple containing the instance and a boolean indicating if the instance was created or not. ] variable[backend] assign[=] call[name[self].model, parameter[name[model]]].backend return[call[name[backend].execute, parameter[call[name[self]._update_or_create, parameter[name[model]]]]]]
keyword[def] identifier[update_or_create] ( identifier[self] , identifier[model] ,** identifier[kwargs] ): literal[string] identifier[backend] = identifier[self] . identifier[model] ( identifier[model] ). identifier[backend] keyword[return] identifier[backend] . identifier[execute] ( identifier[self] . identifier[_update_or_create] ( identifier[model] ,** identifier[kwargs] ))
def update_or_create(self, model, **kwargs): """Update or create a new instance of ``model``. This method can raise an exception if the ``kwargs`` dictionary contains field data that does not validate. :param model: a :class:`StdModel` :param kwargs: dictionary of parameters. :returns: A two elements tuple containing the instance and a boolean indicating if the instance was created or not. """ backend = self.model(model).backend return backend.execute(self._update_or_create(model, **kwargs))
def update_http_method_params(self): """ Update HTTP url parameters based on msm_id and query filters if there are any. """ url_params = {} if self.start: url_params.update( {"start": int(calendar.timegm(self.start.timetuple()))} ) if self.stop: url_params.update( {"stop": int(calendar.timegm(self.stop.timetuple()))} ) if self.probe_ids: url_params.update({"probe_ids": self.probe_ids}) self.http_method_args["params"].update(url_params)
def function[update_http_method_params, parameter[self]]: constant[ Update HTTP url parameters based on msm_id and query filters if there are any. ] variable[url_params] assign[=] dictionary[[], []] if name[self].start begin[:] call[name[url_params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b034b580>], [<ast.Call object at 0x7da1b0348250>]]]] if name[self].stop begin[:] call[name[url_params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0349510>], [<ast.Call object at 0x7da1b034b190>]]]] if name[self].probe_ids begin[:] call[name[url_params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b04f6830>], [<ast.Attribute object at 0x7da1b04f70d0>]]]] call[call[name[self].http_method_args][constant[params]].update, parameter[name[url_params]]]
keyword[def] identifier[update_http_method_params] ( identifier[self] ): literal[string] identifier[url_params] ={} keyword[if] identifier[self] . identifier[start] : identifier[url_params] . identifier[update] ( { literal[string] : identifier[int] ( identifier[calendar] . identifier[timegm] ( identifier[self] . identifier[start] . identifier[timetuple] ()))} ) keyword[if] identifier[self] . identifier[stop] : identifier[url_params] . identifier[update] ( { literal[string] : identifier[int] ( identifier[calendar] . identifier[timegm] ( identifier[self] . identifier[stop] . identifier[timetuple] ()))} ) keyword[if] identifier[self] . identifier[probe_ids] : identifier[url_params] . identifier[update] ({ literal[string] : identifier[self] . identifier[probe_ids] }) identifier[self] . identifier[http_method_args] [ literal[string] ]. identifier[update] ( identifier[url_params] )
def update_http_method_params(self): """ Update HTTP url parameters based on msm_id and query filters if there are any. """ url_params = {} if self.start: url_params.update({'start': int(calendar.timegm(self.start.timetuple()))}) # depends on [control=['if'], data=[]] if self.stop: url_params.update({'stop': int(calendar.timegm(self.stop.timetuple()))}) # depends on [control=['if'], data=[]] if self.probe_ids: url_params.update({'probe_ids': self.probe_ids}) # depends on [control=['if'], data=[]] self.http_method_args['params'].update(url_params)
def shell2json(s): """Convert shell syntax to json.""" replace = { r'BinData\(.+?\)': '1', r'(new )?Date\(.+?\)': '1', r'Timestamp\(.+?\)': '1', r'ObjectId\(.+?\)': '1', r'DBRef\(.+?\)': '1', r'undefined': '1', r'MinKey': '1', r'MaxKey': '1', r'NumberLong\(.+?\)': '1', r'/.+?/\w*': '1' } for key, value in replace.items(): s = re.sub(key, value, s) return s
def function[shell2json, parameter[s]]: constant[Convert shell syntax to json.] variable[replace] assign[=] dictionary[[<ast.Constant object at 0x7da1b175efb0>, <ast.Constant object at 0x7da1b175f190>, <ast.Constant object at 0x7da1b175d720>, <ast.Constant object at 0x7da1b175e260>, <ast.Constant object at 0x7da1b175d3c0>, <ast.Constant object at 0x7da1b175e0e0>, <ast.Constant object at 0x7da1b175fa30>, <ast.Constant object at 0x7da1b175eb30>, <ast.Constant object at 0x7da1b175c220>, <ast.Constant object at 0x7da1b175e890>], [<ast.Constant object at 0x7da1b175d7b0>, <ast.Constant object at 0x7da1b175ccd0>, <ast.Constant object at 0x7da1b175eda0>, <ast.Constant object at 0x7da1b175f610>, <ast.Constant object at 0x7da1b175ef50>, <ast.Constant object at 0x7da1b175dba0>, <ast.Constant object at 0x7da1b175d660>, <ast.Constant object at 0x7da1b175c2e0>, <ast.Constant object at 0x7da1b175f0a0>, <ast.Constant object at 0x7da1b175e830>]] for taget[tuple[[<ast.Name object at 0x7da1b175d9c0>, <ast.Name object at 0x7da1b175f670>]]] in starred[call[name[replace].items, parameter[]]] begin[:] variable[s] assign[=] call[name[re].sub, parameter[name[key], name[value], name[s]]] return[name[s]]
keyword[def] identifier[shell2json] ( identifier[s] ): literal[string] identifier[replace] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[for] identifier[key] , identifier[value] keyword[in] identifier[replace] . identifier[items] (): identifier[s] = identifier[re] . identifier[sub] ( identifier[key] , identifier[value] , identifier[s] ) keyword[return] identifier[s]
def shell2json(s): """Convert shell syntax to json.""" replace = {'BinData\\(.+?\\)': '1', '(new )?Date\\(.+?\\)': '1', 'Timestamp\\(.+?\\)': '1', 'ObjectId\\(.+?\\)': '1', 'DBRef\\(.+?\\)': '1', 'undefined': '1', 'MinKey': '1', 'MaxKey': '1', 'NumberLong\\(.+?\\)': '1', '/.+?/\\w*': '1'} for (key, value) in replace.items(): s = re.sub(key, value, s) # depends on [control=['for'], data=[]] return s
def reverse_axis(self, axis_to_reverse): """Reverse an axis in all figure plots. This will reverse the tick marks on an axis for each plot in the figure. It can be overridden in SinglePlot class. Args: axis_to_reverse (str): Axis to reverse. Supports `x` and `y`. Raises: ValueError: The string representing the axis to reverse is not `x` or `y`. """ if axis_to_reverse.lower() == 'x': self.general.reverse_x_axis = True if axis_to_reverse.lower() == 'y': self.general.reverse_y_axis = True if axis_to_reverse.lower() != 'x' or axis_to_reverse.lower() != 'y': raise ValueError('Axis for reversing needs to be either x or y.') return
def function[reverse_axis, parameter[self, axis_to_reverse]]: constant[Reverse an axis in all figure plots. This will reverse the tick marks on an axis for each plot in the figure. It can be overridden in SinglePlot class. Args: axis_to_reverse (str): Axis to reverse. Supports `x` and `y`. Raises: ValueError: The string representing the axis to reverse is not `x` or `y`. ] if compare[call[name[axis_to_reverse].lower, parameter[]] equal[==] constant[x]] begin[:] name[self].general.reverse_x_axis assign[=] constant[True] if compare[call[name[axis_to_reverse].lower, parameter[]] equal[==] constant[y]] begin[:] name[self].general.reverse_y_axis assign[=] constant[True] if <ast.BoolOp object at 0x7da18c4ccc40> begin[:] <ast.Raise object at 0x7da18c4cfb50> return[None]
keyword[def] identifier[reverse_axis] ( identifier[self] , identifier[axis_to_reverse] ): literal[string] keyword[if] identifier[axis_to_reverse] . identifier[lower] ()== literal[string] : identifier[self] . identifier[general] . identifier[reverse_x_axis] = keyword[True] keyword[if] identifier[axis_to_reverse] . identifier[lower] ()== literal[string] : identifier[self] . identifier[general] . identifier[reverse_y_axis] = keyword[True] keyword[if] identifier[axis_to_reverse] . identifier[lower] ()!= literal[string] keyword[or] identifier[axis_to_reverse] . identifier[lower] ()!= literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return]
def reverse_axis(self, axis_to_reverse): """Reverse an axis in all figure plots. This will reverse the tick marks on an axis for each plot in the figure. It can be overridden in SinglePlot class. Args: axis_to_reverse (str): Axis to reverse. Supports `x` and `y`. Raises: ValueError: The string representing the axis to reverse is not `x` or `y`. """ if axis_to_reverse.lower() == 'x': self.general.reverse_x_axis = True # depends on [control=['if'], data=[]] if axis_to_reverse.lower() == 'y': self.general.reverse_y_axis = True # depends on [control=['if'], data=[]] if axis_to_reverse.lower() != 'x' or axis_to_reverse.lower() != 'y': raise ValueError('Axis for reversing needs to be either x or y.') # depends on [control=['if'], data=[]] return
def today_as_utc_datetime(): """Datetime/Date comparisons aren't great, and someone might configure TODAY, to be a date.""" now = today() if not isinstance(now, datetime) and isinstance(now, date): now = datetime.combine(now, datetime.min.time()) now = now.replace(tzinfo=tz.gettz('UTC')) return now
def function[today_as_utc_datetime, parameter[]]: constant[Datetime/Date comparisons aren't great, and someone might configure TODAY, to be a date.] variable[now] assign[=] call[name[today], parameter[]] if <ast.BoolOp object at 0x7da1b0bf35b0> begin[:] variable[now] assign[=] call[name[datetime].combine, parameter[name[now], call[name[datetime].min.time, parameter[]]]] variable[now] assign[=] call[name[now].replace, parameter[]] return[name[now]]
keyword[def] identifier[today_as_utc_datetime] (): literal[string] identifier[now] = identifier[today] () keyword[if] keyword[not] identifier[isinstance] ( identifier[now] , identifier[datetime] ) keyword[and] identifier[isinstance] ( identifier[now] , identifier[date] ): identifier[now] = identifier[datetime] . identifier[combine] ( identifier[now] , identifier[datetime] . identifier[min] . identifier[time] ()) identifier[now] = identifier[now] . identifier[replace] ( identifier[tzinfo] = identifier[tz] . identifier[gettz] ( literal[string] )) keyword[return] identifier[now]
def today_as_utc_datetime(): """Datetime/Date comparisons aren't great, and someone might configure TODAY, to be a date.""" now = today() if not isinstance(now, datetime) and isinstance(now, date): now = datetime.combine(now, datetime.min.time()) now = now.replace(tzinfo=tz.gettz('UTC')) # depends on [control=['if'], data=[]] return now
def translate(self, package, into=None): """From a binary package, translate to a local binary distribution.""" if not package.local: raise ValueError('BinaryTranslator cannot translate remote packages.') if not isinstance(package, self._package_type): return None if not package.compatible(self._supported_tags): TRACER.log('Target package %s is not compatible with %s' % ( package, self._supported_tags)) return None into = into or safe_mkdtemp() target_path = os.path.join(into, package.filename) safe_copy(package.local_path, target_path) return DistributionHelper.distribution_from_path(target_path)
def function[translate, parameter[self, package, into]]: constant[From a binary package, translate to a local binary distribution.] if <ast.UnaryOp object at 0x7da204620370> begin[:] <ast.Raise object at 0x7da204620a30> if <ast.UnaryOp object at 0x7da204622230> begin[:] return[constant[None]] if <ast.UnaryOp object at 0x7da204622650> begin[:] call[name[TRACER].log, parameter[binary_operation[constant[Target package %s is not compatible with %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204621ae0>, <ast.Attribute object at 0x7da204622dd0>]]]]] return[constant[None]] variable[into] assign[=] <ast.BoolOp object at 0x7da204622f50> variable[target_path] assign[=] call[name[os].path.join, parameter[name[into], name[package].filename]] call[name[safe_copy], parameter[name[package].local_path, name[target_path]]] return[call[name[DistributionHelper].distribution_from_path, parameter[name[target_path]]]]
keyword[def] identifier[translate] ( identifier[self] , identifier[package] , identifier[into] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[package] . identifier[local] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[package] , identifier[self] . identifier[_package_type] ): keyword[return] keyword[None] keyword[if] keyword[not] identifier[package] . identifier[compatible] ( identifier[self] . identifier[_supported_tags] ): identifier[TRACER] . identifier[log] ( literal[string] %( identifier[package] , identifier[self] . identifier[_supported_tags] )) keyword[return] keyword[None] identifier[into] = identifier[into] keyword[or] identifier[safe_mkdtemp] () identifier[target_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[into] , identifier[package] . identifier[filename] ) identifier[safe_copy] ( identifier[package] . identifier[local_path] , identifier[target_path] ) keyword[return] identifier[DistributionHelper] . identifier[distribution_from_path] ( identifier[target_path] )
def translate(self, package, into=None): """From a binary package, translate to a local binary distribution.""" if not package.local: raise ValueError('BinaryTranslator cannot translate remote packages.') # depends on [control=['if'], data=[]] if not isinstance(package, self._package_type): return None # depends on [control=['if'], data=[]] if not package.compatible(self._supported_tags): TRACER.log('Target package %s is not compatible with %s' % (package, self._supported_tags)) return None # depends on [control=['if'], data=[]] into = into or safe_mkdtemp() target_path = os.path.join(into, package.filename) safe_copy(package.local_path, target_path) return DistributionHelper.distribution_from_path(target_path)
def stop(self, timeout=None): """ Stops the task thread. Synchronous! """ with self._lock: if self._thread: self._queue.put_nowait(self._terminator) self._thread.join(timeout=timeout) self._thread = None self._thread_for_pid = None
def function[stop, parameter[self, timeout]]: constant[ Stops the task thread. Synchronous! ] with name[self]._lock begin[:] if name[self]._thread begin[:] call[name[self]._queue.put_nowait, parameter[name[self]._terminator]] call[name[self]._thread.join, parameter[]] name[self]._thread assign[=] constant[None] name[self]._thread_for_pid assign[=] constant[None]
keyword[def] identifier[stop] ( identifier[self] , identifier[timeout] = keyword[None] ): literal[string] keyword[with] identifier[self] . identifier[_lock] : keyword[if] identifier[self] . identifier[_thread] : identifier[self] . identifier[_queue] . identifier[put_nowait] ( identifier[self] . identifier[_terminator] ) identifier[self] . identifier[_thread] . identifier[join] ( identifier[timeout] = identifier[timeout] ) identifier[self] . identifier[_thread] = keyword[None] identifier[self] . identifier[_thread_for_pid] = keyword[None]
def stop(self, timeout=None): """ Stops the task thread. Synchronous! """ with self._lock: if self._thread: self._queue.put_nowait(self._terminator) self._thread.join(timeout=timeout) self._thread = None self._thread_for_pid = None # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]]
def snake_to_camel(value): """ Converts a snake_case_string to a camelCaseString. >>> snake_to_camel("foo_bar_baz") 'fooBarBaz' """ camel = "".join(word.title() for word in value.split("_")) return value[:1].lower() + camel[1:]
def function[snake_to_camel, parameter[value]]: constant[ Converts a snake_case_string to a camelCaseString. >>> snake_to_camel("foo_bar_baz") 'fooBarBaz' ] variable[camel] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da204622080>]] return[binary_operation[call[call[name[value]][<ast.Slice object at 0x7da2046232b0>].lower, parameter[]] + call[name[camel]][<ast.Slice object at 0x7da204621450>]]]
keyword[def] identifier[snake_to_camel] ( identifier[value] ): literal[string] identifier[camel] = literal[string] . identifier[join] ( identifier[word] . identifier[title] () keyword[for] identifier[word] keyword[in] identifier[value] . identifier[split] ( literal[string] )) keyword[return] identifier[value] [: literal[int] ]. identifier[lower] ()+ identifier[camel] [ literal[int] :]
def snake_to_camel(value): """ Converts a snake_case_string to a camelCaseString. >>> snake_to_camel("foo_bar_baz") 'fooBarBaz' """ camel = ''.join((word.title() for word in value.split('_'))) return value[:1].lower() + camel[1:]
def _sampleLocationOnSide(self): """ Helper method to sample from the lateral surface of a cylinder. """ z = random.uniform(-1, 1) * self.height / 2. sampledAngle = 2 * random.random() * pi x, y = self.radius * cos(sampledAngle), self.radius * sin(sampledAngle) return [x, y, z]
def function[_sampleLocationOnSide, parameter[self]]: constant[ Helper method to sample from the lateral surface of a cylinder. ] variable[z] assign[=] binary_operation[binary_operation[call[name[random].uniform, parameter[<ast.UnaryOp object at 0x7da1b0831d20>, constant[1]]] * name[self].height] / constant[2.0]] variable[sampledAngle] assign[=] binary_operation[binary_operation[constant[2] * call[name[random].random, parameter[]]] * name[pi]] <ast.Tuple object at 0x7da1b0832020> assign[=] tuple[[<ast.BinOp object at 0x7da1b0832380>, <ast.BinOp object at 0x7da1b08324a0>]] return[list[[<ast.Name object at 0x7da1b0832620>, <ast.Name object at 0x7da1b0832650>, <ast.Name object at 0x7da1b0832680>]]]
keyword[def] identifier[_sampleLocationOnSide] ( identifier[self] ): literal[string] identifier[z] = identifier[random] . identifier[uniform] (- literal[int] , literal[int] )* identifier[self] . identifier[height] / literal[int] identifier[sampledAngle] = literal[int] * identifier[random] . identifier[random] ()* identifier[pi] identifier[x] , identifier[y] = identifier[self] . identifier[radius] * identifier[cos] ( identifier[sampledAngle] ), identifier[self] . identifier[radius] * identifier[sin] ( identifier[sampledAngle] ) keyword[return] [ identifier[x] , identifier[y] , identifier[z] ]
def _sampleLocationOnSide(self): """ Helper method to sample from the lateral surface of a cylinder. """ z = random.uniform(-1, 1) * self.height / 2.0 sampledAngle = 2 * random.random() * pi (x, y) = (self.radius * cos(sampledAngle), self.radius * sin(sampledAngle)) return [x, y, z]
def build_register_credit_card_parameters(client_ref: str) -> PaymentParameters: """ Builds the parameters needed to present the user with a datatrans form to register a credit card. Contrary to a payment form, datatrans will not show an amount. :param client_ref: A unique reference for this alias capture. :return: The parameters needed to display the datatrans form """ amount = 0 currency = 'CHF' # Datatrans requires this value to be filled, so we use this arbitrary currency. merchant_id = web_merchant_id refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters( merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=True, ) logger.info('building-payment-parameters', parameters=parameters) return parameters
def function[build_register_credit_card_parameters, parameter[client_ref]]: constant[ Builds the parameters needed to present the user with a datatrans form to register a credit card. Contrary to a payment form, datatrans will not show an amount. :param client_ref: A unique reference for this alias capture. :return: The parameters needed to display the datatrans form ] variable[amount] assign[=] constant[0] variable[currency] assign[=] constant[CHF] variable[merchant_id] assign[=] name[web_merchant_id] variable[refno] assign[=] name[client_ref] variable[sign] assign[=] call[name[sign_web], parameter[name[merchant_id], name[amount], name[currency], name[refno]]] variable[parameters] assign[=] call[name[PaymentParameters], parameter[]] call[name[logger].info, parameter[constant[building-payment-parameters]]] return[name[parameters]]
keyword[def] identifier[build_register_credit_card_parameters] ( identifier[client_ref] : identifier[str] )-> identifier[PaymentParameters] : literal[string] identifier[amount] = literal[int] identifier[currency] = literal[string] identifier[merchant_id] = identifier[web_merchant_id] identifier[refno] = identifier[client_ref] identifier[sign] = identifier[sign_web] ( identifier[merchant_id] , identifier[amount] , identifier[currency] , identifier[refno] ) identifier[parameters] = identifier[PaymentParameters] ( identifier[merchant_id] = identifier[merchant_id] , identifier[amount] = identifier[amount] , identifier[currency] = identifier[currency] , identifier[refno] = identifier[refno] , identifier[sign] = identifier[sign] , identifier[use_alias] = keyword[True] , ) identifier[logger] . identifier[info] ( literal[string] , identifier[parameters] = identifier[parameters] ) keyword[return] identifier[parameters]
def build_register_credit_card_parameters(client_ref: str) -> PaymentParameters: """ Builds the parameters needed to present the user with a datatrans form to register a credit card. Contrary to a payment form, datatrans will not show an amount. :param client_ref: A unique reference for this alias capture. :return: The parameters needed to display the datatrans form """ amount = 0 currency = 'CHF' # Datatrans requires this value to be filled, so we use this arbitrary currency. merchant_id = web_merchant_id refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters(merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=True) logger.info('building-payment-parameters', parameters=parameters) return parameters
def align_two_alignments(aln1, aln2, moltype, params=None): """Returns an Alignment object from two existing Alignments. aln1, aln2: cogent.core.alignment.Alignment objects, or data that can be used to build them. - Mafft profile alignment only works with aligned sequences. Alignment object used to handle unaligned sequences. params: dict of parameters to pass in to the Mafft app controller. """ #create SequenceCollection object from seqs aln1 = Alignment(aln1,MolType=moltype) #Create mapping between abbreviated IDs and full IDs aln1_int_map, aln1_int_keys = aln1.getIntMap() #Create SequenceCollection from int_map. aln1_int_map = Alignment(aln1_int_map,MolType=moltype) #create Alignment object from aln aln2 = Alignment(aln2,MolType=moltype) #Create mapping between abbreviated IDs and full IDs aln2_int_map, aln2_int_keys = aln2.getIntMap(prefix='seqn_') #Create SequenceCollection from int_map. aln2_int_map = Alignment(aln2_int_map,MolType=moltype) #Update aln1_int_keys with aln2_int_keys aln1_int_keys.update(aln2_int_keys) #Create Mafft app. app = Mafft(InputHandler='_input_as_paths',\ params=params, SuppressStderr=False) app._command = 'mafft-profile' aln1_path = app._tempfile_as_multiline_string(aln1_int_map.toFasta()) aln2_path = app._tempfile_as_multiline_string(aln2_int_map.toFasta()) filepaths = [aln1_path,aln2_path] #Get results using int_map as input to app res = app(filepaths) #Get alignment as dict out of results alignment = dict(parse_fasta(res['StdOut'])) #Make new dict mapping original IDs new_alignment = {} for k,v in alignment.items(): key = k.replace('_seed_','') new_alignment[aln1_int_keys[key]]=v #Create an Alignment object from alignment dict new_alignment = Alignment(new_alignment,MolType=moltype) #Clean up res.cleanUp() remove(aln1_path) remove(aln2_path) remove('pre') remove('trace') del(aln1,aln1_int_map,aln1_int_keys,\ aln2,aln2_int_map,aln2_int_keys,app,res,alignment) return new_alignment
def function[align_two_alignments, parameter[aln1, aln2, moltype, params]]: constant[Returns an Alignment object from two existing Alignments. aln1, aln2: cogent.core.alignment.Alignment objects, or data that can be used to build them. - Mafft profile alignment only works with aligned sequences. Alignment object used to handle unaligned sequences. params: dict of parameters to pass in to the Mafft app controller. ] variable[aln1] assign[=] call[name[Alignment], parameter[name[aln1]]] <ast.Tuple object at 0x7da2047eabf0> assign[=] call[name[aln1].getIntMap, parameter[]] variable[aln1_int_map] assign[=] call[name[Alignment], parameter[name[aln1_int_map]]] variable[aln2] assign[=] call[name[Alignment], parameter[name[aln2]]] <ast.Tuple object at 0x7da2047e98d0> assign[=] call[name[aln2].getIntMap, parameter[]] variable[aln2_int_map] assign[=] call[name[Alignment], parameter[name[aln2_int_map]]] call[name[aln1_int_keys].update, parameter[name[aln2_int_keys]]] variable[app] assign[=] call[name[Mafft], parameter[]] name[app]._command assign[=] constant[mafft-profile] variable[aln1_path] assign[=] call[name[app]._tempfile_as_multiline_string, parameter[call[name[aln1_int_map].toFasta, parameter[]]]] variable[aln2_path] assign[=] call[name[app]._tempfile_as_multiline_string, parameter[call[name[aln2_int_map].toFasta, parameter[]]]] variable[filepaths] assign[=] list[[<ast.Name object at 0x7da1b0a6a3b0>, <ast.Name object at 0x7da1b0a6a920>]] variable[res] assign[=] call[name[app], parameter[name[filepaths]]] variable[alignment] assign[=] call[name[dict], parameter[call[name[parse_fasta], parameter[call[name[res]][constant[StdOut]]]]]] variable[new_alignment] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18fe90c70>, <ast.Name object at 0x7da18fe92c50>]]] in starred[call[name[alignment].items, parameter[]]] begin[:] variable[key] assign[=] call[name[k].replace, parameter[constant[_seed_], constant[]]] call[name[new_alignment]][call[name[aln1_int_keys]][name[key]]] assign[=] name[v] variable[new_alignment] assign[=] call[name[Alignment], parameter[name[new_alignment]]] call[name[res].cleanUp, parameter[]] call[name[remove], parameter[name[aln1_path]]] call[name[remove], parameter[name[aln2_path]]] call[name[remove], parameter[constant[pre]]] call[name[remove], parameter[constant[trace]]] <ast.Delete object at 0x7da1b0bac220> return[name[new_alignment]]
keyword[def] identifier[align_two_alignments] ( identifier[aln1] , identifier[aln2] , identifier[moltype] , identifier[params] = keyword[None] ): literal[string] identifier[aln1] = identifier[Alignment] ( identifier[aln1] , identifier[MolType] = identifier[moltype] ) identifier[aln1_int_map] , identifier[aln1_int_keys] = identifier[aln1] . identifier[getIntMap] () identifier[aln1_int_map] = identifier[Alignment] ( identifier[aln1_int_map] , identifier[MolType] = identifier[moltype] ) identifier[aln2] = identifier[Alignment] ( identifier[aln2] , identifier[MolType] = identifier[moltype] ) identifier[aln2_int_map] , identifier[aln2_int_keys] = identifier[aln2] . identifier[getIntMap] ( identifier[prefix] = literal[string] ) identifier[aln2_int_map] = identifier[Alignment] ( identifier[aln2_int_map] , identifier[MolType] = identifier[moltype] ) identifier[aln1_int_keys] . identifier[update] ( identifier[aln2_int_keys] ) identifier[app] = identifier[Mafft] ( identifier[InputHandler] = literal[string] , identifier[params] = identifier[params] , identifier[SuppressStderr] = keyword[False] ) identifier[app] . identifier[_command] = literal[string] identifier[aln1_path] = identifier[app] . identifier[_tempfile_as_multiline_string] ( identifier[aln1_int_map] . identifier[toFasta] ()) identifier[aln2_path] = identifier[app] . identifier[_tempfile_as_multiline_string] ( identifier[aln2_int_map] . identifier[toFasta] ()) identifier[filepaths] =[ identifier[aln1_path] , identifier[aln2_path] ] identifier[res] = identifier[app] ( identifier[filepaths] ) identifier[alignment] = identifier[dict] ( identifier[parse_fasta] ( identifier[res] [ literal[string] ])) identifier[new_alignment] ={} keyword[for] identifier[k] , identifier[v] keyword[in] identifier[alignment] . identifier[items] (): identifier[key] = identifier[k] . identifier[replace] ( literal[string] , literal[string] ) identifier[new_alignment] [ identifier[aln1_int_keys] [ identifier[key] ]]= identifier[v] identifier[new_alignment] = identifier[Alignment] ( identifier[new_alignment] , identifier[MolType] = identifier[moltype] ) identifier[res] . identifier[cleanUp] () identifier[remove] ( identifier[aln1_path] ) identifier[remove] ( identifier[aln2_path] ) identifier[remove] ( literal[string] ) identifier[remove] ( literal[string] ) keyword[del] ( identifier[aln1] , identifier[aln1_int_map] , identifier[aln1_int_keys] , identifier[aln2] , identifier[aln2_int_map] , identifier[aln2_int_keys] , identifier[app] , identifier[res] , identifier[alignment] ) keyword[return] identifier[new_alignment]
def align_two_alignments(aln1, aln2, moltype, params=None): """Returns an Alignment object from two existing Alignments. aln1, aln2: cogent.core.alignment.Alignment objects, or data that can be used to build them. - Mafft profile alignment only works with aligned sequences. Alignment object used to handle unaligned sequences. params: dict of parameters to pass in to the Mafft app controller. """ #create SequenceCollection object from seqs aln1 = Alignment(aln1, MolType=moltype) #Create mapping between abbreviated IDs and full IDs (aln1_int_map, aln1_int_keys) = aln1.getIntMap() #Create SequenceCollection from int_map. aln1_int_map = Alignment(aln1_int_map, MolType=moltype) #create Alignment object from aln aln2 = Alignment(aln2, MolType=moltype) #Create mapping between abbreviated IDs and full IDs (aln2_int_map, aln2_int_keys) = aln2.getIntMap(prefix='seqn_') #Create SequenceCollection from int_map. aln2_int_map = Alignment(aln2_int_map, MolType=moltype) #Update aln1_int_keys with aln2_int_keys aln1_int_keys.update(aln2_int_keys) #Create Mafft app. app = Mafft(InputHandler='_input_as_paths', params=params, SuppressStderr=False) app._command = 'mafft-profile' aln1_path = app._tempfile_as_multiline_string(aln1_int_map.toFasta()) aln2_path = app._tempfile_as_multiline_string(aln2_int_map.toFasta()) filepaths = [aln1_path, aln2_path] #Get results using int_map as input to app res = app(filepaths) #Get alignment as dict out of results alignment = dict(parse_fasta(res['StdOut'])) #Make new dict mapping original IDs new_alignment = {} for (k, v) in alignment.items(): key = k.replace('_seed_', '') new_alignment[aln1_int_keys[key]] = v # depends on [control=['for'], data=[]] #Create an Alignment object from alignment dict new_alignment = Alignment(new_alignment, MolType=moltype) #Clean up res.cleanUp() remove(aln1_path) remove(aln2_path) remove('pre') remove('trace') del (aln1, aln1_int_map, aln1_int_keys, aln2, aln2_int_map, aln2_int_keys, app, res, alignment) return new_alignment
def apply_statusbar_settings(self): """Update status bar widgets settings""" show_status_bar = CONF.get('main', 'show_status_bar') self.statusBar().setVisible(show_status_bar) if show_status_bar: for widget, name in ((self.mem_status, 'memory_usage'), (self.cpu_status, 'cpu_usage')): if widget is not None: widget.setVisible(CONF.get('main', '%s/enable' % name)) widget.set_interval(CONF.get('main', '%s/timeout' % name)) else: return
def function[apply_statusbar_settings, parameter[self]]: constant[Update status bar widgets settings] variable[show_status_bar] assign[=] call[name[CONF].get, parameter[constant[main], constant[show_status_bar]]] call[call[name[self].statusBar, parameter[]].setVisible, parameter[name[show_status_bar]]] if name[show_status_bar] begin[:] for taget[tuple[[<ast.Name object at 0x7da2054a52a0>, <ast.Name object at 0x7da2054a5c30>]]] in starred[tuple[[<ast.Tuple object at 0x7da2054a5bd0>, <ast.Tuple object at 0x7da2054a5960>]]] begin[:] if compare[name[widget] is_not constant[None]] begin[:] call[name[widget].setVisible, parameter[call[name[CONF].get, parameter[constant[main], binary_operation[constant[%s/enable] <ast.Mod object at 0x7da2590d6920> name[name]]]]]] call[name[widget].set_interval, parameter[call[name[CONF].get, parameter[constant[main], binary_operation[constant[%s/timeout] <ast.Mod object at 0x7da2590d6920> name[name]]]]]]
keyword[def] identifier[apply_statusbar_settings] ( identifier[self] ): literal[string] identifier[show_status_bar] = identifier[CONF] . identifier[get] ( literal[string] , literal[string] ) identifier[self] . identifier[statusBar] (). identifier[setVisible] ( identifier[show_status_bar] ) keyword[if] identifier[show_status_bar] : keyword[for] identifier[widget] , identifier[name] keyword[in] (( identifier[self] . identifier[mem_status] , literal[string] ), ( identifier[self] . identifier[cpu_status] , literal[string] )): keyword[if] identifier[widget] keyword[is] keyword[not] keyword[None] : identifier[widget] . identifier[setVisible] ( identifier[CONF] . identifier[get] ( literal[string] , literal[string] % identifier[name] )) identifier[widget] . identifier[set_interval] ( identifier[CONF] . identifier[get] ( literal[string] , literal[string] % identifier[name] )) keyword[else] : keyword[return]
def apply_statusbar_settings(self): """Update status bar widgets settings""" show_status_bar = CONF.get('main', 'show_status_bar') self.statusBar().setVisible(show_status_bar) if show_status_bar: for (widget, name) in ((self.mem_status, 'memory_usage'), (self.cpu_status, 'cpu_usage')): if widget is not None: widget.setVisible(CONF.get('main', '%s/enable' % name)) widget.set_interval(CONF.get('main', '%s/timeout' % name)) # depends on [control=['if'], data=['widget']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: return
def group_exists(name, region=None, key=None, keyid=None, profile=None): ''' Check to see if a replication group exists. CLI example:: salt myminion boto_elasticache.group_exists myelasticache ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: conn.describe_replication_groups(name) return True except boto.exception.BotoServerError as e: log.debug(e) return False
def function[group_exists, parameter[name, region, key, keyid, profile]]: constant[ Check to see if a replication group exists. CLI example:: salt myminion boto_elasticache.group_exists myelasticache ] variable[conn] assign[=] call[name[_get_conn], parameter[]] <ast.Try object at 0x7da1b1fa1f30>
keyword[def] identifier[group_exists] ( identifier[name] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[try] : identifier[conn] . identifier[describe_replication_groups] ( identifier[name] ) keyword[return] keyword[True] keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] : identifier[log] . identifier[debug] ( identifier[e] ) keyword[return] keyword[False]
def group_exists(name, region=None, key=None, keyid=None, profile=None): """ Check to see if a replication group exists. CLI example:: salt myminion boto_elasticache.group_exists myelasticache """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: conn.describe_replication_groups(name) return True # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as e: log.debug(e) return False # depends on [control=['except'], data=['e']]
def releasers(cls): """ Returns all of the supported releasers. """ return [ HookReleaser, VersionFileReleaser, PythonReleaser, CocoaPodsReleaser, NPMReleaser, CReleaser, ChangelogReleaser, GitHubReleaser, GitReleaser, ]
def function[releasers, parameter[cls]]: constant[ Returns all of the supported releasers. ] return[list[[<ast.Name object at 0x7da20c6c6230>, <ast.Name object at 0x7da20c6c4dc0>, <ast.Name object at 0x7da20c6c7490>, <ast.Name object at 0x7da20c6c6cb0>, <ast.Name object at 0x7da20c6c7eb0>, <ast.Name object at 0x7da20c6c6860>, <ast.Name object at 0x7da20c6c4ca0>, <ast.Name object at 0x7da20c6c7be0>, <ast.Name object at 0x7da20c6c54b0>]]]
keyword[def] identifier[releasers] ( identifier[cls] ): literal[string] keyword[return] [ identifier[HookReleaser] , identifier[VersionFileReleaser] , identifier[PythonReleaser] , identifier[CocoaPodsReleaser] , identifier[NPMReleaser] , identifier[CReleaser] , identifier[ChangelogReleaser] , identifier[GitHubReleaser] , identifier[GitReleaser] , ]
def releasers(cls): """ Returns all of the supported releasers. """ return [HookReleaser, VersionFileReleaser, PythonReleaser, CocoaPodsReleaser, NPMReleaser, CReleaser, ChangelogReleaser, GitHubReleaser, GitReleaser]
def generate_orders(events, sell_delay=5, sep=','): """Generate CSV orders based on events indicated in a DataFrame Arguments: events (pandas.DataFrame): Table of NaNs or 1's, one column for each symbol. 1 indicates a BUY event. -1 a SELL event. nan or 0 is a nonevent. sell_delay (float): Number of days to wait before selling back the shares bought sep (str or None): if sep is None, orders will be returns as tuples of `int`s, `float`s, and `str`s otherwise the separator will be used to join the order parameters into the yielded str Returns: generator of str: yielded CSV rows in the format (yr, mo, day, symbol, Buy/Sell, shares) """ sell_delay = float(unicode(sell_delay)) or 1 for i, (t, row) in enumerate(events.iterrows()): for sym, event in row.to_dict().iteritems(): # print sym, event, type(event) # return events if event and not np.isnan(event): # add a sell event `sell_delay` in the future within the existing `events` DataFrame # modify the series, but only in the future and be careful not to step on existing events if event > 0: sell_event_i = min(i + sell_delay, len(events) - 1) sell_event_t = events.index[sell_event_i] sell_event = events[sym][sell_event_i] if np.isnan(sell_event): events[sym][sell_event_t] = -1 else: events[sym][sell_event_t] += -1 order = (t.year, t.month, t.day, sym, 'Buy' if event > 0 else 'Sell', abs(event) * 100) if isinstance(sep, basestring): yield sep.join(order) yield order
def function[generate_orders, parameter[events, sell_delay, sep]]: constant[Generate CSV orders based on events indicated in a DataFrame Arguments: events (pandas.DataFrame): Table of NaNs or 1's, one column for each symbol. 1 indicates a BUY event. -1 a SELL event. nan or 0 is a nonevent. sell_delay (float): Number of days to wait before selling back the shares bought sep (str or None): if sep is None, orders will be returns as tuples of `int`s, `float`s, and `str`s otherwise the separator will be used to join the order parameters into the yielded str Returns: generator of str: yielded CSV rows in the format (yr, mo, day, symbol, Buy/Sell, shares) ] variable[sell_delay] assign[=] <ast.BoolOp object at 0x7da1b16144c0> for taget[tuple[[<ast.Name object at 0x7da1b1616800>, <ast.Tuple object at 0x7da1b1614580>]]] in starred[call[name[enumerate], parameter[call[name[events].iterrows, parameter[]]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b16159c0>, <ast.Name object at 0x7da1b1616830>]]] in starred[call[call[name[row].to_dict, parameter[]].iteritems, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b1617100> begin[:] if compare[name[event] greater[>] constant[0]] begin[:] variable[sell_event_i] assign[=] call[name[min], parameter[binary_operation[name[i] + name[sell_delay]], binary_operation[call[name[len], parameter[name[events]]] - constant[1]]]] variable[sell_event_t] assign[=] call[name[events].index][name[sell_event_i]] variable[sell_event] assign[=] call[call[name[events]][name[sym]]][name[sell_event_i]] if call[name[np].isnan, parameter[name[sell_event]]] begin[:] call[call[name[events]][name[sym]]][name[sell_event_t]] assign[=] <ast.UnaryOp object at 0x7da1b1615270> variable[order] assign[=] tuple[[<ast.Attribute object at 0x7da1b16168c0>, <ast.Attribute object at 0x7da1b1614610>, <ast.Attribute object at 0x7da1b1617310>, <ast.Name object at 0x7da1b1616860>, <ast.IfExp object at 0x7da1b16174c0>, <ast.BinOp object at 0x7da1b1614e20>]] if call[name[isinstance], parameter[name[sep], name[basestring]]] begin[:] <ast.Yield object at 0x7da1b1614310> <ast.Yield object at 0x7da1b16167d0>
keyword[def] identifier[generate_orders] ( identifier[events] , identifier[sell_delay] = literal[int] , identifier[sep] = literal[string] ): literal[string] identifier[sell_delay] = identifier[float] ( identifier[unicode] ( identifier[sell_delay] )) keyword[or] literal[int] keyword[for] identifier[i] ,( identifier[t] , identifier[row] ) keyword[in] identifier[enumerate] ( identifier[events] . identifier[iterrows] ()): keyword[for] identifier[sym] , identifier[event] keyword[in] identifier[row] . identifier[to_dict] (). identifier[iteritems] (): keyword[if] identifier[event] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[event] ): keyword[if] identifier[event] > literal[int] : identifier[sell_event_i] = identifier[min] ( identifier[i] + identifier[sell_delay] , identifier[len] ( identifier[events] )- literal[int] ) identifier[sell_event_t] = identifier[events] . identifier[index] [ identifier[sell_event_i] ] identifier[sell_event] = identifier[events] [ identifier[sym] ][ identifier[sell_event_i] ] keyword[if] identifier[np] . identifier[isnan] ( identifier[sell_event] ): identifier[events] [ identifier[sym] ][ identifier[sell_event_t] ]=- literal[int] keyword[else] : identifier[events] [ identifier[sym] ][ identifier[sell_event_t] ]+=- literal[int] identifier[order] =( identifier[t] . identifier[year] , identifier[t] . identifier[month] , identifier[t] . identifier[day] , identifier[sym] , literal[string] keyword[if] identifier[event] > literal[int] keyword[else] literal[string] , identifier[abs] ( identifier[event] )* literal[int] ) keyword[if] identifier[isinstance] ( identifier[sep] , identifier[basestring] ): keyword[yield] identifier[sep] . identifier[join] ( identifier[order] ) keyword[yield] identifier[order]
def generate_orders(events, sell_delay=5, sep=','): """Generate CSV orders based on events indicated in a DataFrame Arguments: events (pandas.DataFrame): Table of NaNs or 1's, one column for each symbol. 1 indicates a BUY event. -1 a SELL event. nan or 0 is a nonevent. sell_delay (float): Number of days to wait before selling back the shares bought sep (str or None): if sep is None, orders will be returns as tuples of `int`s, `float`s, and `str`s otherwise the separator will be used to join the order parameters into the yielded str Returns: generator of str: yielded CSV rows in the format (yr, mo, day, symbol, Buy/Sell, shares) """ sell_delay = float(unicode(sell_delay)) or 1 for (i, (t, row)) in enumerate(events.iterrows()): for (sym, event) in row.to_dict().iteritems(): # print sym, event, type(event) # return events if event and (not np.isnan(event)): # add a sell event `sell_delay` in the future within the existing `events` DataFrame # modify the series, but only in the future and be careful not to step on existing events if event > 0: sell_event_i = min(i + sell_delay, len(events) - 1) sell_event_t = events.index[sell_event_i] sell_event = events[sym][sell_event_i] if np.isnan(sell_event): events[sym][sell_event_t] = -1 # depends on [control=['if'], data=[]] else: events[sym][sell_event_t] += -1 # depends on [control=['if'], data=[]] order = (t.year, t.month, t.day, sym, 'Buy' if event > 0 else 'Sell', abs(event) * 100) if isinstance(sep, basestring): yield sep.join(order) # depends on [control=['if'], data=[]] yield order # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def gps_input_encode(self, time_usec, gps_id, ignore_flags, time_week_ms, time_week, fix_type, lat, lon, alt, hdop, vdop, vn, ve, vd, speed_accuracy, horiz_accuracy, vert_accuracy, satellites_visible): ''' GPS sensor input message. This is a raw sensor value sent by the GPS. This is NOT the global position estimate of the sytem. time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t) gps_id : ID of the GPS for multiple GPS inputs (uint8_t) ignore_flags : Flags indicating which fields to ignore (see GPS_INPUT_IGNORE_FLAGS enum). All other fields must be provided. (uint16_t) time_week_ms : GPS time (milliseconds from start of GPS week) (uint32_t) time_week : GPS week number (uint16_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. 4: 3D with DGPS. 5: 3D with RTK (uint8_t) lat : Latitude (WGS84), in degrees * 1E7 (int32_t) lon : Longitude (WGS84), in degrees * 1E7 (int32_t) alt : Altitude (AMSL, not WGS84), in m (positive for up) (float) hdop : GPS HDOP horizontal dilution of position in m (float) vdop : GPS VDOP vertical dilution of position in m (float) vn : GPS velocity in m/s in NORTH direction in earth-fixed NED frame (float) ve : GPS velocity in m/s in EAST direction in earth-fixed NED frame (float) vd : GPS velocity in m/s in DOWN direction in earth-fixed NED frame (float) speed_accuracy : GPS speed accuracy in m/s (float) horiz_accuracy : GPS horizontal accuracy in m (float) vert_accuracy : GPS vertical accuracy in m (float) satellites_visible : Number of satellites visible. (uint8_t) ''' return MAVLink_gps_input_message(time_usec, gps_id, ignore_flags, time_week_ms, time_week, fix_type, lat, lon, alt, hdop, vdop, vn, ve, vd, speed_accuracy, horiz_accuracy, vert_accuracy, satellites_visible)
def function[gps_input_encode, parameter[self, time_usec, gps_id, ignore_flags, time_week_ms, time_week, fix_type, lat, lon, alt, hdop, vdop, vn, ve, vd, speed_accuracy, horiz_accuracy, vert_accuracy, satellites_visible]]: constant[ GPS sensor input message. This is a raw sensor value sent by the GPS. This is NOT the global position estimate of the sytem. time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t) gps_id : ID of the GPS for multiple GPS inputs (uint8_t) ignore_flags : Flags indicating which fields to ignore (see GPS_INPUT_IGNORE_FLAGS enum). All other fields must be provided. (uint16_t) time_week_ms : GPS time (milliseconds from start of GPS week) (uint32_t) time_week : GPS week number (uint16_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. 4: 3D with DGPS. 5: 3D with RTK (uint8_t) lat : Latitude (WGS84), in degrees * 1E7 (int32_t) lon : Longitude (WGS84), in degrees * 1E7 (int32_t) alt : Altitude (AMSL, not WGS84), in m (positive for up) (float) hdop : GPS HDOP horizontal dilution of position in m (float) vdop : GPS VDOP vertical dilution of position in m (float) vn : GPS velocity in m/s in NORTH direction in earth-fixed NED frame (float) ve : GPS velocity in m/s in EAST direction in earth-fixed NED frame (float) vd : GPS velocity in m/s in DOWN direction in earth-fixed NED frame (float) speed_accuracy : GPS speed accuracy in m/s (float) horiz_accuracy : GPS horizontal accuracy in m (float) vert_accuracy : GPS vertical accuracy in m (float) satellites_visible : Number of satellites visible. (uint8_t) ] return[call[name[MAVLink_gps_input_message], parameter[name[time_usec], name[gps_id], name[ignore_flags], name[time_week_ms], name[time_week], name[fix_type], name[lat], name[lon], name[alt], name[hdop], name[vdop], name[vn], name[ve], name[vd], name[speed_accuracy], name[horiz_accuracy], name[vert_accuracy], name[satellites_visible]]]]
keyword[def] identifier[gps_input_encode] ( identifier[self] , identifier[time_usec] , identifier[gps_id] , identifier[ignore_flags] , identifier[time_week_ms] , identifier[time_week] , identifier[fix_type] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[hdop] , identifier[vdop] , identifier[vn] , identifier[ve] , identifier[vd] , identifier[speed_accuracy] , identifier[horiz_accuracy] , identifier[vert_accuracy] , identifier[satellites_visible] ): literal[string] keyword[return] identifier[MAVLink_gps_input_message] ( identifier[time_usec] , identifier[gps_id] , identifier[ignore_flags] , identifier[time_week_ms] , identifier[time_week] , identifier[fix_type] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[hdop] , identifier[vdop] , identifier[vn] , identifier[ve] , identifier[vd] , identifier[speed_accuracy] , identifier[horiz_accuracy] , identifier[vert_accuracy] , identifier[satellites_visible] )
def gps_input_encode(self, time_usec, gps_id, ignore_flags, time_week_ms, time_week, fix_type, lat, lon, alt, hdop, vdop, vn, ve, vd, speed_accuracy, horiz_accuracy, vert_accuracy, satellites_visible): """ GPS sensor input message. This is a raw sensor value sent by the GPS. This is NOT the global position estimate of the sytem. time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t) gps_id : ID of the GPS for multiple GPS inputs (uint8_t) ignore_flags : Flags indicating which fields to ignore (see GPS_INPUT_IGNORE_FLAGS enum). All other fields must be provided. (uint16_t) time_week_ms : GPS time (milliseconds from start of GPS week) (uint32_t) time_week : GPS week number (uint16_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. 4: 3D with DGPS. 5: 3D with RTK (uint8_t) lat : Latitude (WGS84), in degrees * 1E7 (int32_t) lon : Longitude (WGS84), in degrees * 1E7 (int32_t) alt : Altitude (AMSL, not WGS84), in m (positive for up) (float) hdop : GPS HDOP horizontal dilution of position in m (float) vdop : GPS VDOP vertical dilution of position in m (float) vn : GPS velocity in m/s in NORTH direction in earth-fixed NED frame (float) ve : GPS velocity in m/s in EAST direction in earth-fixed NED frame (float) vd : GPS velocity in m/s in DOWN direction in earth-fixed NED frame (float) speed_accuracy : GPS speed accuracy in m/s (float) horiz_accuracy : GPS horizontal accuracy in m (float) vert_accuracy : GPS vertical accuracy in m (float) satellites_visible : Number of satellites visible. (uint8_t) """ return MAVLink_gps_input_message(time_usec, gps_id, ignore_flags, time_week_ms, time_week, fix_type, lat, lon, alt, hdop, vdop, vn, ve, vd, speed_accuracy, horiz_accuracy, vert_accuracy, satellites_visible)
def ilx_conv(graph, prefix, ilx_start): """ convert a set of temporary identifiers to ilx and modify the graph in place """ to_sub = set() for subject in graph.subjects(rdflib.RDF.type, rdflib.OWL.Class): if PREFIXES[prefix] in subject: to_sub.add(subject) ilx_base = 'ilx_{:0>7}' ILX_base = 'ILX:{:0>7}' # ah rdflib/owlapi, you infuriate me ilx_labels = {} replace = {} for sub in sorted(to_sub): ilx_format = ilx_base.format(ilx_start) ILX_format = ILX_base.format(ilx_start) ilx_start += 1 prefix, url, suffix = graph.namespace_manager.compute_qname(sub) curie = prefix + ':' + suffix replace[curie] = ILX_format label = [_ for _ in graph.objects(sub, rdflib.RDFS.label)][0] ilx_labels[ilx_format] = label new_sub = expand('ilx:' + ilx_format) for p, o in graph.predicate_objects(sub): graph.remove((sub, p, o)) graph.add((new_sub, p, o)) for s, p in graph.subject_predicates(sub): graph.remove((s, p, sub)) graph.add((s, p, new_sub)) return ilx_labels, replace
def function[ilx_conv, parameter[graph, prefix, ilx_start]]: constant[ convert a set of temporary identifiers to ilx and modify the graph in place ] variable[to_sub] assign[=] call[name[set], parameter[]] for taget[name[subject]] in starred[call[name[graph].subjects, parameter[name[rdflib].RDF.type, name[rdflib].OWL.Class]]] begin[:] if compare[call[name[PREFIXES]][name[prefix]] in name[subject]] begin[:] call[name[to_sub].add, parameter[name[subject]]] variable[ilx_base] assign[=] constant[ilx_{:0>7}] variable[ILX_base] assign[=] constant[ILX:{:0>7}] variable[ilx_labels] assign[=] dictionary[[], []] variable[replace] assign[=] dictionary[[], []] for taget[name[sub]] in starred[call[name[sorted], parameter[name[to_sub]]]] begin[:] variable[ilx_format] assign[=] call[name[ilx_base].format, parameter[name[ilx_start]]] variable[ILX_format] assign[=] call[name[ILX_base].format, parameter[name[ilx_start]]] <ast.AugAssign object at 0x7da1b1a47ee0> <ast.Tuple object at 0x7da1b1a453c0> assign[=] call[name[graph].namespace_manager.compute_qname, parameter[name[sub]]] variable[curie] assign[=] binary_operation[binary_operation[name[prefix] + constant[:]] + name[suffix]] call[name[replace]][name[curie]] assign[=] name[ILX_format] variable[label] assign[=] call[<ast.ListComp object at 0x7da1b1a45f90>][constant[0]] call[name[ilx_labels]][name[ilx_format]] assign[=] name[label] variable[new_sub] assign[=] call[name[expand], parameter[binary_operation[constant[ilx:] + name[ilx_format]]]] for taget[tuple[[<ast.Name object at 0x7da1b1a45570>, <ast.Name object at 0x7da1b1a47ac0>]]] in starred[call[name[graph].predicate_objects, parameter[name[sub]]]] begin[:] call[name[graph].remove, parameter[tuple[[<ast.Name object at 0x7da1b1a442b0>, <ast.Name object at 0x7da1b1a46950>, <ast.Name object at 0x7da1b1a459c0>]]]] call[name[graph].add, parameter[tuple[[<ast.Name object at 0x7da1b1a45db0>, <ast.Name object at 0x7da1b1a47850>, <ast.Name object at 0x7da1b1a45690>]]]] for taget[tuple[[<ast.Name object at 0x7da1b1a454b0>, <ast.Name object at 0x7da1b1a456f0>]]] in starred[call[name[graph].subject_predicates, parameter[name[sub]]]] begin[:] call[name[graph].remove, parameter[tuple[[<ast.Name object at 0x7da1b1a45630>, <ast.Name object at 0x7da1b1a45b10>, <ast.Name object at 0x7da1b1a44820>]]]] call[name[graph].add, parameter[tuple[[<ast.Name object at 0x7da1b1a448e0>, <ast.Name object at 0x7da1b1a47d30>, <ast.Name object at 0x7da1b1a466b0>]]]] return[tuple[[<ast.Name object at 0x7da1b1a444f0>, <ast.Name object at 0x7da1b1a44490>]]]
keyword[def] identifier[ilx_conv] ( identifier[graph] , identifier[prefix] , identifier[ilx_start] ): literal[string] identifier[to_sub] = identifier[set] () keyword[for] identifier[subject] keyword[in] identifier[graph] . identifier[subjects] ( identifier[rdflib] . identifier[RDF] . identifier[type] , identifier[rdflib] . identifier[OWL] . identifier[Class] ): keyword[if] identifier[PREFIXES] [ identifier[prefix] ] keyword[in] identifier[subject] : identifier[to_sub] . identifier[add] ( identifier[subject] ) identifier[ilx_base] = literal[string] identifier[ILX_base] = literal[string] identifier[ilx_labels] ={} identifier[replace] ={} keyword[for] identifier[sub] keyword[in] identifier[sorted] ( identifier[to_sub] ): identifier[ilx_format] = identifier[ilx_base] . identifier[format] ( identifier[ilx_start] ) identifier[ILX_format] = identifier[ILX_base] . identifier[format] ( identifier[ilx_start] ) identifier[ilx_start] += literal[int] identifier[prefix] , identifier[url] , identifier[suffix] = identifier[graph] . identifier[namespace_manager] . identifier[compute_qname] ( identifier[sub] ) identifier[curie] = identifier[prefix] + literal[string] + identifier[suffix] identifier[replace] [ identifier[curie] ]= identifier[ILX_format] identifier[label] =[ identifier[_] keyword[for] identifier[_] keyword[in] identifier[graph] . identifier[objects] ( identifier[sub] , identifier[rdflib] . identifier[RDFS] . identifier[label] )][ literal[int] ] identifier[ilx_labels] [ identifier[ilx_format] ]= identifier[label] identifier[new_sub] = identifier[expand] ( literal[string] + identifier[ilx_format] ) keyword[for] identifier[p] , identifier[o] keyword[in] identifier[graph] . identifier[predicate_objects] ( identifier[sub] ): identifier[graph] . identifier[remove] (( identifier[sub] , identifier[p] , identifier[o] )) identifier[graph] . identifier[add] (( identifier[new_sub] , identifier[p] , identifier[o] )) keyword[for] identifier[s] , identifier[p] keyword[in] identifier[graph] . identifier[subject_predicates] ( identifier[sub] ): identifier[graph] . identifier[remove] (( identifier[s] , identifier[p] , identifier[sub] )) identifier[graph] . identifier[add] (( identifier[s] , identifier[p] , identifier[new_sub] )) keyword[return] identifier[ilx_labels] , identifier[replace]
def ilx_conv(graph, prefix, ilx_start): """ convert a set of temporary identifiers to ilx and modify the graph in place """ to_sub = set() for subject in graph.subjects(rdflib.RDF.type, rdflib.OWL.Class): if PREFIXES[prefix] in subject: to_sub.add(subject) # depends on [control=['if'], data=['subject']] # depends on [control=['for'], data=['subject']] ilx_base = 'ilx_{:0>7}' ILX_base = 'ILX:{:0>7}' # ah rdflib/owlapi, you infuriate me ilx_labels = {} replace = {} for sub in sorted(to_sub): ilx_format = ilx_base.format(ilx_start) ILX_format = ILX_base.format(ilx_start) ilx_start += 1 (prefix, url, suffix) = graph.namespace_manager.compute_qname(sub) curie = prefix + ':' + suffix replace[curie] = ILX_format label = [_ for _ in graph.objects(sub, rdflib.RDFS.label)][0] ilx_labels[ilx_format] = label new_sub = expand('ilx:' + ilx_format) for (p, o) in graph.predicate_objects(sub): graph.remove((sub, p, o)) graph.add((new_sub, p, o)) # depends on [control=['for'], data=[]] for (s, p) in graph.subject_predicates(sub): graph.remove((s, p, sub)) graph.add((s, p, new_sub)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['sub']] return (ilx_labels, replace)
def _get_input_data(self, var, start_date, end_date): """Get the data for a single variable over the desired date range.""" logging.info(self._print_verbose("Getting input data:", var)) if isinstance(var, (float, int)): return var else: cond_pfull = ((not hasattr(self, internal_names.PFULL_STR)) and var.def_vert and self.dtype_in_vert == internal_names.ETA_STR) data = self.data_loader.recursively_compute_variable( var, start_date, end_date, self.time_offset, self.model, **self.data_loader_attrs) name = data.name data = self._add_grid_attributes(data.to_dataset(name=data.name)) data = data[name] if cond_pfull: try: self.pfull_coord = data[internal_names.PFULL_STR] except KeyError: pass # Force all data to be at full pressure levels, not half levels. bool_to_pfull = (self.dtype_in_vert == internal_names.ETA_STR and var.def_vert == internal_names.PHALF_STR) if bool_to_pfull: data = utils.vertcoord.to_pfull_from_phalf(data, self.pfull_coord) if var.def_time: # Restrict to the desired dates within each year. if self.dtype_in_time != 'av': return self._to_desired_dates(data) else: return data
def function[_get_input_data, parameter[self, var, start_date, end_date]]: constant[Get the data for a single variable over the desired date range.] call[name[logging].info, parameter[call[name[self]._print_verbose, parameter[constant[Getting input data:], name[var]]]]] if call[name[isinstance], parameter[name[var], tuple[[<ast.Name object at 0x7da1b0669210>, <ast.Name object at 0x7da1b0669330>]]]] begin[:] return[name[var]] if name[var].def_time begin[:] if compare[name[self].dtype_in_time not_equal[!=] constant[av]] begin[:] return[call[name[self]._to_desired_dates, parameter[name[data]]]]
keyword[def] identifier[_get_input_data] ( identifier[self] , identifier[var] , identifier[start_date] , identifier[end_date] ): literal[string] identifier[logging] . identifier[info] ( identifier[self] . identifier[_print_verbose] ( literal[string] , identifier[var] )) keyword[if] identifier[isinstance] ( identifier[var] ,( identifier[float] , identifier[int] )): keyword[return] identifier[var] keyword[else] : identifier[cond_pfull] =(( keyword[not] identifier[hasattr] ( identifier[self] , identifier[internal_names] . identifier[PFULL_STR] )) keyword[and] identifier[var] . identifier[def_vert] keyword[and] identifier[self] . identifier[dtype_in_vert] == identifier[internal_names] . identifier[ETA_STR] ) identifier[data] = identifier[self] . identifier[data_loader] . identifier[recursively_compute_variable] ( identifier[var] , identifier[start_date] , identifier[end_date] , identifier[self] . identifier[time_offset] , identifier[self] . identifier[model] , ** identifier[self] . identifier[data_loader_attrs] ) identifier[name] = identifier[data] . identifier[name] identifier[data] = identifier[self] . identifier[_add_grid_attributes] ( identifier[data] . identifier[to_dataset] ( identifier[name] = identifier[data] . identifier[name] )) identifier[data] = identifier[data] [ identifier[name] ] keyword[if] identifier[cond_pfull] : keyword[try] : identifier[self] . identifier[pfull_coord] = identifier[data] [ identifier[internal_names] . identifier[PFULL_STR] ] keyword[except] identifier[KeyError] : keyword[pass] identifier[bool_to_pfull] =( identifier[self] . identifier[dtype_in_vert] == identifier[internal_names] . identifier[ETA_STR] keyword[and] identifier[var] . identifier[def_vert] == identifier[internal_names] . identifier[PHALF_STR] ) keyword[if] identifier[bool_to_pfull] : identifier[data] = identifier[utils] . identifier[vertcoord] . identifier[to_pfull_from_phalf] ( identifier[data] , identifier[self] . identifier[pfull_coord] ) keyword[if] identifier[var] . identifier[def_time] : keyword[if] identifier[self] . identifier[dtype_in_time] != literal[string] : keyword[return] identifier[self] . identifier[_to_desired_dates] ( identifier[data] ) keyword[else] : keyword[return] identifier[data]
def _get_input_data(self, var, start_date, end_date): """Get the data for a single variable over the desired date range.""" logging.info(self._print_verbose('Getting input data:', var)) if isinstance(var, (float, int)): return var # depends on [control=['if'], data=[]] else: cond_pfull = not hasattr(self, internal_names.PFULL_STR) and var.def_vert and (self.dtype_in_vert == internal_names.ETA_STR) data = self.data_loader.recursively_compute_variable(var, start_date, end_date, self.time_offset, self.model, **self.data_loader_attrs) name = data.name data = self._add_grid_attributes(data.to_dataset(name=data.name)) data = data[name] if cond_pfull: try: self.pfull_coord = data[internal_names.PFULL_STR] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Force all data to be at full pressure levels, not half levels. bool_to_pfull = self.dtype_in_vert == internal_names.ETA_STR and var.def_vert == internal_names.PHALF_STR if bool_to_pfull: data = utils.vertcoord.to_pfull_from_phalf(data, self.pfull_coord) # depends on [control=['if'], data=[]] if var.def_time: # Restrict to the desired dates within each year. if self.dtype_in_time != 'av': return self._to_desired_dates(data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: return data
def _head(self, client_kwargs): """ Returns object HTTP header. Args: client_kwargs (dict): Client arguments. Returns: dict: HTTP header. """ with _handle_client_exception(): # Object if 'obj' in client_kwargs: return self.client.head_object(**client_kwargs) # Container return self.client.head_container(**client_kwargs)
def function[_head, parameter[self, client_kwargs]]: constant[ Returns object HTTP header. Args: client_kwargs (dict): Client arguments. Returns: dict: HTTP header. ] with call[name[_handle_client_exception], parameter[]] begin[:] if compare[constant[obj] in name[client_kwargs]] begin[:] return[call[name[self].client.head_object, parameter[]]] return[call[name[self].client.head_container, parameter[]]]
keyword[def] identifier[_head] ( identifier[self] , identifier[client_kwargs] ): literal[string] keyword[with] identifier[_handle_client_exception] (): keyword[if] literal[string] keyword[in] identifier[client_kwargs] : keyword[return] identifier[self] . identifier[client] . identifier[head_object] (** identifier[client_kwargs] ) keyword[return] identifier[self] . identifier[client] . identifier[head_container] (** identifier[client_kwargs] )
def _head(self, client_kwargs): """ Returns object HTTP header. Args: client_kwargs (dict): Client arguments. Returns: dict: HTTP header. """ with _handle_client_exception(): # Object if 'obj' in client_kwargs: return self.client.head_object(**client_kwargs) # depends on [control=['if'], data=['client_kwargs']] # Container return self.client.head_container(**client_kwargs) # depends on [control=['with'], data=[]]
def validateLogicalInterfaceConfiguration(self, logicalInterfaceId): """ Validate the logical interface configuration. Parameters: - logicalInterfaceId (string) Throws APIException on failure. """ req = ApiClient.oneLogicalInterfaceUrl % (self.host, "/draft", logicalInterfaceId) body = {"operation" : "validate-configuration"} resp = requests.patch(req, auth=self.credentials, headers={"Content-Type":"application/json"}, data=json.dumps(body), verify=self.verify) if resp.status_code == 200: self.logger.debug("Validation for logical interface configuration succeeded") else: raise ibmiotf.APIException(resp.status_code, "Validation for logical interface configuration failed", resp) return resp.json()
def function[validateLogicalInterfaceConfiguration, parameter[self, logicalInterfaceId]]: constant[ Validate the logical interface configuration. Parameters: - logicalInterfaceId (string) Throws APIException on failure. ] variable[req] assign[=] binary_operation[name[ApiClient].oneLogicalInterfaceUrl <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6e4e50>, <ast.Constant object at 0x7da20cabe050>, <ast.Name object at 0x7da1b05cab90>]]] variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b05cafb0>], [<ast.Constant object at 0x7da1b05c98a0>]] variable[resp] assign[=] call[name[requests].patch, parameter[name[req]]] if compare[name[resp].status_code equal[==] constant[200]] begin[:] call[name[self].logger.debug, parameter[constant[Validation for logical interface configuration succeeded]]] return[call[name[resp].json, parameter[]]]
keyword[def] identifier[validateLogicalInterfaceConfiguration] ( identifier[self] , identifier[logicalInterfaceId] ): literal[string] identifier[req] = identifier[ApiClient] . identifier[oneLogicalInterfaceUrl] %( identifier[self] . identifier[host] , literal[string] , identifier[logicalInterfaceId] ) identifier[body] ={ literal[string] : literal[string] } identifier[resp] = identifier[requests] . identifier[patch] ( identifier[req] , identifier[auth] = identifier[self] . identifier[credentials] , identifier[headers] ={ literal[string] : literal[string] }, identifier[data] = identifier[json] . identifier[dumps] ( identifier[body] ), identifier[verify] = identifier[self] . identifier[verify] ) keyword[if] identifier[resp] . identifier[status_code] == literal[int] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) keyword[else] : keyword[raise] identifier[ibmiotf] . identifier[APIException] ( identifier[resp] . identifier[status_code] , literal[string] , identifier[resp] ) keyword[return] identifier[resp] . identifier[json] ()
def validateLogicalInterfaceConfiguration(self, logicalInterfaceId): """ Validate the logical interface configuration. Parameters: - logicalInterfaceId (string) Throws APIException on failure. """ req = ApiClient.oneLogicalInterfaceUrl % (self.host, '/draft', logicalInterfaceId) body = {'operation': 'validate-configuration'} resp = requests.patch(req, auth=self.credentials, headers={'Content-Type': 'application/json'}, data=json.dumps(body), verify=self.verify) if resp.status_code == 200: self.logger.debug('Validation for logical interface configuration succeeded') # depends on [control=['if'], data=[]] else: raise ibmiotf.APIException(resp.status_code, 'Validation for logical interface configuration failed', resp) return resp.json()
def are_in(items, collection): """Return True for each item in the collection :param items: a sub-collection :param collection: a collection :returns: a list of booleans >>> are_in(['Terry', 'James'], ['Terry', 'John', 'Eric']) [True, False] """ if not isinstance(items, (list, tuple)): items = (items, ) return map(lambda x: x in collection, items)
def function[are_in, parameter[items, collection]]: constant[Return True for each item in the collection :param items: a sub-collection :param collection: a collection :returns: a list of booleans >>> are_in(['Terry', 'James'], ['Terry', 'John', 'Eric']) [True, False] ] if <ast.UnaryOp object at 0x7da18bc73970> begin[:] variable[items] assign[=] tuple[[<ast.Name object at 0x7da18bc72440>]] return[call[name[map], parameter[<ast.Lambda object at 0x7da18bc70100>, name[items]]]]
keyword[def] identifier[are_in] ( identifier[items] , identifier[collection] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[items] ,( identifier[list] , identifier[tuple] )): identifier[items] =( identifier[items] ,) keyword[return] identifier[map] ( keyword[lambda] identifier[x] : identifier[x] keyword[in] identifier[collection] , identifier[items] )
def are_in(items, collection): """Return True for each item in the collection :param items: a sub-collection :param collection: a collection :returns: a list of booleans >>> are_in(['Terry', 'James'], ['Terry', 'John', 'Eric']) [True, False] """ if not isinstance(items, (list, tuple)): items = (items,) # depends on [control=['if'], data=[]] return map(lambda x: x in collection, items)
def custom_add_user_view(request): ''' The page to add a new user. ''' page_name = "Admin - Add User" add_user_form = AddUserForm(request.POST or None, initial={ 'status': UserProfile.RESIDENT, }) if add_user_form.is_valid(): add_user_form.save() message = MESSAGES['USER_ADDED'].format( username=add_user_form.cleaned_data["username"]) messages.add_message(request, messages.SUCCESS, message) return HttpResponseRedirect(reverse('custom_add_user')) return render_to_response('custom_add_user.html', { 'page_name': page_name, 'add_user_form': add_user_form, 'members': User.objects.all().exclude(username=ANONYMOUS_USERNAME), }, context_instance=RequestContext(request))
def function[custom_add_user_view, parameter[request]]: constant[ The page to add a new user. ] variable[page_name] assign[=] constant[Admin - Add User] variable[add_user_form] assign[=] call[name[AddUserForm], parameter[<ast.BoolOp object at 0x7da18f721b40>]] if call[name[add_user_form].is_valid, parameter[]] begin[:] call[name[add_user_form].save, parameter[]] variable[message] assign[=] call[call[name[MESSAGES]][constant[USER_ADDED]].format, parameter[]] call[name[messages].add_message, parameter[name[request], name[messages].SUCCESS, name[message]]] return[call[name[HttpResponseRedirect], parameter[call[name[reverse], parameter[constant[custom_add_user]]]]]] return[call[name[render_to_response], parameter[constant[custom_add_user.html], dictionary[[<ast.Constant object at 0x7da18bcc90f0>, <ast.Constant object at 0x7da18bccbd60>, <ast.Constant object at 0x7da18bcc88b0>], [<ast.Name object at 0x7da18bcc85e0>, <ast.Name object at 0x7da18bccb220>, <ast.Call object at 0x7da18bcc9c30>]]]]]
keyword[def] identifier[custom_add_user_view] ( identifier[request] ): literal[string] identifier[page_name] = literal[string] identifier[add_user_form] = identifier[AddUserForm] ( identifier[request] . identifier[POST] keyword[or] keyword[None] , identifier[initial] ={ literal[string] : identifier[UserProfile] . identifier[RESIDENT] , }) keyword[if] identifier[add_user_form] . identifier[is_valid] (): identifier[add_user_form] . identifier[save] () identifier[message] = identifier[MESSAGES] [ literal[string] ]. identifier[format] ( identifier[username] = identifier[add_user_form] . identifier[cleaned_data] [ literal[string] ]) identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[SUCCESS] , identifier[message] ) keyword[return] identifier[HttpResponseRedirect] ( identifier[reverse] ( literal[string] )) keyword[return] identifier[render_to_response] ( literal[string] ,{ literal[string] : identifier[page_name] , literal[string] : identifier[add_user_form] , literal[string] : identifier[User] . identifier[objects] . identifier[all] (). identifier[exclude] ( identifier[username] = identifier[ANONYMOUS_USERNAME] ), }, identifier[context_instance] = identifier[RequestContext] ( identifier[request] ))
def custom_add_user_view(request): """ The page to add a new user. """ page_name = 'Admin - Add User' add_user_form = AddUserForm(request.POST or None, initial={'status': UserProfile.RESIDENT}) if add_user_form.is_valid(): add_user_form.save() message = MESSAGES['USER_ADDED'].format(username=add_user_form.cleaned_data['username']) messages.add_message(request, messages.SUCCESS, message) return HttpResponseRedirect(reverse('custom_add_user')) # depends on [control=['if'], data=[]] return render_to_response('custom_add_user.html', {'page_name': page_name, 'add_user_form': add_user_form, 'members': User.objects.all().exclude(username=ANONYMOUS_USERNAME)}, context_instance=RequestContext(request))
def dims_knight(self, move): '''Knight on the rim is dim''' if self.board.piece_type_at(move.from_square) == chess.KNIGHT: rim = SquareSet( chess.BB_RANK_1 | \ chess.BB_RANK_8 | \ chess.BB_FILE_A | \ chess.BB_FILE_H) return move.to_square in rim
def function[dims_knight, parameter[self, move]]: constant[Knight on the rim is dim] if compare[call[name[self].board.piece_type_at, parameter[name[move].from_square]] equal[==] name[chess].KNIGHT] begin[:] variable[rim] assign[=] call[name[SquareSet], parameter[binary_operation[binary_operation[binary_operation[name[chess].BB_RANK_1 <ast.BitOr object at 0x7da2590d6aa0> name[chess].BB_RANK_8] <ast.BitOr object at 0x7da2590d6aa0> name[chess].BB_FILE_A] <ast.BitOr object at 0x7da2590d6aa0> name[chess].BB_FILE_H]]] return[compare[name[move].to_square in name[rim]]]
keyword[def] identifier[dims_knight] ( identifier[self] , identifier[move] ): literal[string] keyword[if] identifier[self] . identifier[board] . identifier[piece_type_at] ( identifier[move] . identifier[from_square] )== identifier[chess] . identifier[KNIGHT] : identifier[rim] = identifier[SquareSet] ( identifier[chess] . identifier[BB_RANK_1] | identifier[chess] . identifier[BB_RANK_8] | identifier[chess] . identifier[BB_FILE_A] | identifier[chess] . identifier[BB_FILE_H] ) keyword[return] identifier[move] . identifier[to_square] keyword[in] identifier[rim]
def dims_knight(self, move): """Knight on the rim is dim""" if self.board.piece_type_at(move.from_square) == chess.KNIGHT: rim = SquareSet(chess.BB_RANK_1 | chess.BB_RANK_8 | chess.BB_FILE_A | chess.BB_FILE_H) return move.to_square in rim # depends on [control=['if'], data=[]]
def extend_access_token(self, app_id, app_secret): """ Extends the expiration time of a valid OAuth access token. See <https://developers.facebook.com/docs/facebook-login/access-tokens/ expiration-and-extension> """ args = { "client_id": app_id, "client_secret": app_secret, "grant_type": "fb_exchange_token", "fb_exchange_token": self.access_token, } return self.request( "{0}/oauth/access_token".format(self.version), args=args )
def function[extend_access_token, parameter[self, app_id, app_secret]]: constant[ Extends the expiration time of a valid OAuth access token. See <https://developers.facebook.com/docs/facebook-login/access-tokens/ expiration-and-extension> ] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da207f004c0>, <ast.Constant object at 0x7da207f02aa0>, <ast.Constant object at 0x7da207f02b00>, <ast.Constant object at 0x7da207f00820>], [<ast.Name object at 0x7da207f00670>, <ast.Name object at 0x7da207f020b0>, <ast.Constant object at 0x7da207f00df0>, <ast.Attribute object at 0x7da207f03250>]] return[call[name[self].request, parameter[call[constant[{0}/oauth/access_token].format, parameter[name[self].version]]]]]
keyword[def] identifier[extend_access_token] ( identifier[self] , identifier[app_id] , identifier[app_secret] ): literal[string] identifier[args] ={ literal[string] : identifier[app_id] , literal[string] : identifier[app_secret] , literal[string] : literal[string] , literal[string] : identifier[self] . identifier[access_token] , } keyword[return] identifier[self] . identifier[request] ( literal[string] . identifier[format] ( identifier[self] . identifier[version] ), identifier[args] = identifier[args] )
def extend_access_token(self, app_id, app_secret): """ Extends the expiration time of a valid OAuth access token. See <https://developers.facebook.com/docs/facebook-login/access-tokens/ expiration-and-extension> """ args = {'client_id': app_id, 'client_secret': app_secret, 'grant_type': 'fb_exchange_token', 'fb_exchange_token': self.access_token} return self.request('{0}/oauth/access_token'.format(self.version), args=args)
def __convertLongToString(self, iValue): """convert a long hex integer to string remove '0x' and 'L' return string Args: iValue: long integer in hex format Returns: string of this long integer without "0x" and "L" """ string = '' strValue = str(hex(iValue)) string = strValue.lstrip('0x') string = string.rstrip('L') return string
def function[__convertLongToString, parameter[self, iValue]]: constant[convert a long hex integer to string remove '0x' and 'L' return string Args: iValue: long integer in hex format Returns: string of this long integer without "0x" and "L" ] variable[string] assign[=] constant[] variable[strValue] assign[=] call[name[str], parameter[call[name[hex], parameter[name[iValue]]]]] variable[string] assign[=] call[name[strValue].lstrip, parameter[constant[0x]]] variable[string] assign[=] call[name[string].rstrip, parameter[constant[L]]] return[name[string]]
keyword[def] identifier[__convertLongToString] ( identifier[self] , identifier[iValue] ): literal[string] identifier[string] = literal[string] identifier[strValue] = identifier[str] ( identifier[hex] ( identifier[iValue] )) identifier[string] = identifier[strValue] . identifier[lstrip] ( literal[string] ) identifier[string] = identifier[string] . identifier[rstrip] ( literal[string] ) keyword[return] identifier[string]
def __convertLongToString(self, iValue): """convert a long hex integer to string remove '0x' and 'L' return string Args: iValue: long integer in hex format Returns: string of this long integer without "0x" and "L" """ string = '' strValue = str(hex(iValue)) string = strValue.lstrip('0x') string = string.rstrip('L') return string
def is_all_field_none(self): """ :rtype: bool """ if self._target_url is not None: return False if self._category is not None: return False if self._event_type is not None: return False if self._object_ is not None: return False return True
def function[is_all_field_none, parameter[self]]: constant[ :rtype: bool ] if compare[name[self]._target_url is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._category is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._event_type is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._object_ is_not constant[None]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_all_field_none] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_target_url] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_category] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_event_type] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_object_] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_all_field_none(self): """ :rtype: bool """ if self._target_url is not None: return False # depends on [control=['if'], data=[]] if self._category is not None: return False # depends on [control=['if'], data=[]] if self._event_type is not None: return False # depends on [control=['if'], data=[]] if self._object_ is not None: return False # depends on [control=['if'], data=[]] return True
def continuous_binary_search(f, lo, hi, gap=1e-4): """Binary search for a function :param f: boolean monotone function with f(hi) = True :param int lo: :param int hi: with hi >= lo :param float gap: :returns: first value x in [lo,hi] such that f(x), x is computed up to some precision :complexity: `O(log((hi-lo)/gap))` """ while hi - lo > gap: # in other languages you can force floating division by using 2.0 mid = (lo + hi) / 2. if f(mid): hi = mid else: lo = mid return lo
def function[continuous_binary_search, parameter[f, lo, hi, gap]]: constant[Binary search for a function :param f: boolean monotone function with f(hi) = True :param int lo: :param int hi: with hi >= lo :param float gap: :returns: first value x in [lo,hi] such that f(x), x is computed up to some precision :complexity: `O(log((hi-lo)/gap))` ] while compare[binary_operation[name[hi] - name[lo]] greater[>] name[gap]] begin[:] variable[mid] assign[=] binary_operation[binary_operation[name[lo] + name[hi]] / constant[2.0]] if call[name[f], parameter[name[mid]]] begin[:] variable[hi] assign[=] name[mid] return[name[lo]]
keyword[def] identifier[continuous_binary_search] ( identifier[f] , identifier[lo] , identifier[hi] , identifier[gap] = literal[int] ): literal[string] keyword[while] identifier[hi] - identifier[lo] > identifier[gap] : identifier[mid] =( identifier[lo] + identifier[hi] )/ literal[int] keyword[if] identifier[f] ( identifier[mid] ): identifier[hi] = identifier[mid] keyword[else] : identifier[lo] = identifier[mid] keyword[return] identifier[lo]
def continuous_binary_search(f, lo, hi, gap=0.0001): """Binary search for a function :param f: boolean monotone function with f(hi) = True :param int lo: :param int hi: with hi >= lo :param float gap: :returns: first value x in [lo,hi] such that f(x), x is computed up to some precision :complexity: `O(log((hi-lo)/gap))` """ while hi - lo > gap: # in other languages you can force floating division by using 2.0 mid = (lo + hi) / 2.0 if f(mid): hi = mid # depends on [control=['if'], data=[]] else: lo = mid # depends on [control=['while'], data=[]] return lo
def save(self, cfg, filename, print_ir=False, format='dot', options=None): """Save basic block graph into a file. """ if options is None: options = {} try: dot_graph = Dot(**self.graph_format) # Add nodes. nodes = {} for bb in cfg.basic_blocks: nodes[bb.address] = self._create_node(bb, cfg.name, print_ir, options) dot_graph.add_node(nodes[bb.address]) # Add edges. for bb_src in cfg.basic_blocks: for bb_dst_addr, branch_type in bb_src.branches: if bb_dst_addr in nodes: edge = self._create_edge(nodes[bb_src.address], nodes[bb_dst_addr], branch_type) dot_graph.add_edge(edge) else: logger.warning("Destination basic block not found! (0x%x)", bb_dst_addr) # Save graph. dot_graph.write("{}.{}".format(filename, format), format=format) except Exception: logger.error("Failed to save basic block graph: %s (%s)", filename, format, exc_info=True)
def function[save, parameter[self, cfg, filename, print_ir, format, options]]: constant[Save basic block graph into a file. ] if compare[name[options] is constant[None]] begin[:] variable[options] assign[=] dictionary[[], []] <ast.Try object at 0x7da1b0926e90>
keyword[def] identifier[save] ( identifier[self] , identifier[cfg] , identifier[filename] , identifier[print_ir] = keyword[False] , identifier[format] = literal[string] , identifier[options] = keyword[None] ): literal[string] keyword[if] identifier[options] keyword[is] keyword[None] : identifier[options] ={} keyword[try] : identifier[dot_graph] = identifier[Dot] (** identifier[self] . identifier[graph_format] ) identifier[nodes] ={} keyword[for] identifier[bb] keyword[in] identifier[cfg] . identifier[basic_blocks] : identifier[nodes] [ identifier[bb] . identifier[address] ]= identifier[self] . identifier[_create_node] ( identifier[bb] , identifier[cfg] . identifier[name] , identifier[print_ir] , identifier[options] ) identifier[dot_graph] . identifier[add_node] ( identifier[nodes] [ identifier[bb] . identifier[address] ]) keyword[for] identifier[bb_src] keyword[in] identifier[cfg] . identifier[basic_blocks] : keyword[for] identifier[bb_dst_addr] , identifier[branch_type] keyword[in] identifier[bb_src] . identifier[branches] : keyword[if] identifier[bb_dst_addr] keyword[in] identifier[nodes] : identifier[edge] = identifier[self] . identifier[_create_edge] ( identifier[nodes] [ identifier[bb_src] . identifier[address] ], identifier[nodes] [ identifier[bb_dst_addr] ], identifier[branch_type] ) identifier[dot_graph] . identifier[add_edge] ( identifier[edge] ) keyword[else] : identifier[logger] . identifier[warning] ( literal[string] , identifier[bb_dst_addr] ) identifier[dot_graph] . identifier[write] ( literal[string] . identifier[format] ( identifier[filename] , identifier[format] ), identifier[format] = identifier[format] ) keyword[except] identifier[Exception] : identifier[logger] . identifier[error] ( literal[string] , identifier[filename] , identifier[format] , identifier[exc_info] = keyword[True] )
def save(self, cfg, filename, print_ir=False, format='dot', options=None): """Save basic block graph into a file. """ if options is None: options = {} # depends on [control=['if'], data=['options']] try: dot_graph = Dot(**self.graph_format) # Add nodes. nodes = {} for bb in cfg.basic_blocks: nodes[bb.address] = self._create_node(bb, cfg.name, print_ir, options) dot_graph.add_node(nodes[bb.address]) # depends on [control=['for'], data=['bb']] # Add edges. for bb_src in cfg.basic_blocks: for (bb_dst_addr, branch_type) in bb_src.branches: if bb_dst_addr in nodes: edge = self._create_edge(nodes[bb_src.address], nodes[bb_dst_addr], branch_type) dot_graph.add_edge(edge) # depends on [control=['if'], data=['bb_dst_addr', 'nodes']] else: logger.warning('Destination basic block not found! (0x%x)', bb_dst_addr) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['bb_src']] # Save graph. dot_graph.write('{}.{}'.format(filename, format), format=format) # depends on [control=['try'], data=[]] except Exception: logger.error('Failed to save basic block graph: %s (%s)', filename, format, exc_info=True) # depends on [control=['except'], data=[]]