code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def print_collection(self, c, h, n): """Print collection using the specified indent (n) and newline (nl).""" if c in h: return "[]..." h.append(c) s = [] for item in c: s.append("\n") s.append(self.indent(n)) s.append(self.process(item, h, n - 2)) s.append(",") h.pop() return "".join(s)
def function[print_collection, parameter[self, c, h, n]]: constant[Print collection using the specified indent (n) and newline (nl).] if compare[name[c] in name[h]] begin[:] return[constant[[]...]] call[name[h].append, parameter[name[c]]] variable[s] assign[=] list[[]] for taget[name[item]] in starred[name[c]] begin[:] call[name[s].append, parameter[constant[ ]]] call[name[s].append, parameter[call[name[self].indent, parameter[name[n]]]]] call[name[s].append, parameter[call[name[self].process, parameter[name[item], name[h], binary_operation[name[n] - constant[2]]]]]] call[name[s].append, parameter[constant[,]]] call[name[h].pop, parameter[]] return[call[constant[].join, parameter[name[s]]]]
keyword[def] identifier[print_collection] ( identifier[self] , identifier[c] , identifier[h] , identifier[n] ): literal[string] keyword[if] identifier[c] keyword[in] identifier[h] : keyword[return] literal[string] identifier[h] . identifier[append] ( identifier[c] ) identifier[s] =[] keyword[for] identifier[item] keyword[in] identifier[c] : identifier[s] . identifier[append] ( literal[string] ) identifier[s] . identifier[append] ( identifier[self] . identifier[indent] ( identifier[n] )) identifier[s] . identifier[append] ( identifier[self] . identifier[process] ( identifier[item] , identifier[h] , identifier[n] - literal[int] )) identifier[s] . identifier[append] ( literal[string] ) identifier[h] . identifier[pop] () keyword[return] literal[string] . identifier[join] ( identifier[s] )
def print_collection(self, c, h, n): """Print collection using the specified indent (n) and newline (nl).""" if c in h: return '[]...' # depends on [control=['if'], data=[]] h.append(c) s = [] for item in c: s.append('\n') s.append(self.indent(n)) s.append(self.process(item, h, n - 2)) s.append(',') # depends on [control=['for'], data=['item']] h.pop() return ''.join(s)
def coerce_author(value): """ Coerce strings to :class:`Author` objects. :param value: A string or :class:`Author` object. :returns: An :class:`Author` object. :raises: :exc:`~exceptions.ValueError` when `value` isn't a string or :class:`Author` object. """ # Author objects pass through untouched. if isinstance(value, Author): return value # In all other cases we expect a string. if not isinstance(value, string_types): msg = "Expected Author object or string as argument, got %s instead!" raise ValueError(msg % type(value)) # Try to parse the `name <email>' format. match = re.match('^(.+?) <(.+?)>$', value) if not match: msg = "Provided author information isn't in 'name <email>' format! (%r)" raise ValueError(msg % value) return Author( name=match.group(1).strip(), email=match.group(2).strip(), )
def function[coerce_author, parameter[value]]: constant[ Coerce strings to :class:`Author` objects. :param value: A string or :class:`Author` object. :returns: An :class:`Author` object. :raises: :exc:`~exceptions.ValueError` when `value` isn't a string or :class:`Author` object. ] if call[name[isinstance], parameter[name[value], name[Author]]] begin[:] return[name[value]] if <ast.UnaryOp object at 0x7da1b0aa4790> begin[:] variable[msg] assign[=] constant[Expected Author object or string as argument, got %s instead!] <ast.Raise object at 0x7da1b0aa5c90> variable[match] assign[=] call[name[re].match, parameter[constant[^(.+?) <(.+?)>$], name[value]]] if <ast.UnaryOp object at 0x7da1b0aa6980> begin[:] variable[msg] assign[=] constant[Provided author information isn't in 'name <email>' format! (%r)] <ast.Raise object at 0x7da1b0aa6f20> return[call[name[Author], parameter[]]]
keyword[def] identifier[coerce_author] ( identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[Author] ): keyword[return] identifier[value] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[string_types] ): identifier[msg] = literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] % identifier[type] ( identifier[value] )) identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[value] ) keyword[if] keyword[not] identifier[match] : identifier[msg] = literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] % identifier[value] ) keyword[return] identifier[Author] ( identifier[name] = identifier[match] . identifier[group] ( literal[int] ). identifier[strip] (), identifier[email] = identifier[match] . identifier[group] ( literal[int] ). identifier[strip] (), )
def coerce_author(value): """ Coerce strings to :class:`Author` objects. :param value: A string or :class:`Author` object. :returns: An :class:`Author` object. :raises: :exc:`~exceptions.ValueError` when `value` isn't a string or :class:`Author` object. """ # Author objects pass through untouched. if isinstance(value, Author): return value # depends on [control=['if'], data=[]] # In all other cases we expect a string. if not isinstance(value, string_types): msg = 'Expected Author object or string as argument, got %s instead!' raise ValueError(msg % type(value)) # depends on [control=['if'], data=[]] # Try to parse the `name <email>' format. match = re.match('^(.+?) <(.+?)>$', value) if not match: msg = "Provided author information isn't in 'name <email>' format! (%r)" raise ValueError(msg % value) # depends on [control=['if'], data=[]] return Author(name=match.group(1).strip(), email=match.group(2).strip())
def setDefaultOptions(config): """ Set default options for builtin batch systems. This is required if a Config object is not constructed from an Options object. """ config.batchSystem = "singleMachine" config.disableAutoDeployment = False config.environment = {} config.statePollingWait = None # if not set, will default to seconds in getWaitDuration() config.maxLocalJobs = multiprocessing.cpu_count() config.manualMemArgs = False # single machine config.scale = 1 config.linkImports = False # mesos config.mesosMasterAddress = '%s:5050' % getPublicIP() # parasol config.parasolCommand = 'parasol' config.parasolMaxBatches = 10000
def function[setDefaultOptions, parameter[config]]: constant[ Set default options for builtin batch systems. This is required if a Config object is not constructed from an Options object. ] name[config].batchSystem assign[=] constant[singleMachine] name[config].disableAutoDeployment assign[=] constant[False] name[config].environment assign[=] dictionary[[], []] name[config].statePollingWait assign[=] constant[None] name[config].maxLocalJobs assign[=] call[name[multiprocessing].cpu_count, parameter[]] name[config].manualMemArgs assign[=] constant[False] name[config].scale assign[=] constant[1] name[config].linkImports assign[=] constant[False] name[config].mesosMasterAddress assign[=] binary_operation[constant[%s:5050] <ast.Mod object at 0x7da2590d6920> call[name[getPublicIP], parameter[]]] name[config].parasolCommand assign[=] constant[parasol] name[config].parasolMaxBatches assign[=] constant[10000]
keyword[def] identifier[setDefaultOptions] ( identifier[config] ): literal[string] identifier[config] . identifier[batchSystem] = literal[string] identifier[config] . identifier[disableAutoDeployment] = keyword[False] identifier[config] . identifier[environment] ={} identifier[config] . identifier[statePollingWait] = keyword[None] identifier[config] . identifier[maxLocalJobs] = identifier[multiprocessing] . identifier[cpu_count] () identifier[config] . identifier[manualMemArgs] = keyword[False] identifier[config] . identifier[scale] = literal[int] identifier[config] . identifier[linkImports] = keyword[False] identifier[config] . identifier[mesosMasterAddress] = literal[string] % identifier[getPublicIP] () identifier[config] . identifier[parasolCommand] = literal[string] identifier[config] . identifier[parasolMaxBatches] = literal[int]
def setDefaultOptions(config): """ Set default options for builtin batch systems. This is required if a Config object is not constructed from an Options object. """ config.batchSystem = 'singleMachine' config.disableAutoDeployment = False config.environment = {} config.statePollingWait = None # if not set, will default to seconds in getWaitDuration() config.maxLocalJobs = multiprocessing.cpu_count() config.manualMemArgs = False # single machine config.scale = 1 config.linkImports = False # mesos config.mesosMasterAddress = '%s:5050' % getPublicIP() # parasol config.parasolCommand = 'parasol' config.parasolMaxBatches = 10000
def _GenerateNames(name, fromlist, globals): """Generates the names of modules that might be loaded via this import. Args: name: Argument as passed to the importer. fromlist: Argument as passed to the importer. globals: Argument as passed to the importer. Returns: A set that contains the names of all modules that are loaded by the currently executing import statement, as they would show up in sys.modules. The returned set may contain module names that were already loaded before the execution of this import statement. The returned set may contain names that are not real modules. """ def GetCurrentPackage(globals): """Finds the name of the package for the currently executing module.""" if not globals: return None # Get the name of the module/package that the current import is being # executed in. current = globals.get('__name__') if not current: return None # Check if the current module is really a module, or a package. current_file = globals.get('__file__') if not current_file: return None root = os.path.splitext(os.path.basename(current_file))[0] if root == '__init__': # The current import happened from a package. Return the package. return current else: # The current import happened from a module. Return the package that # contains the module. return current.rpartition('.')[0] # A Python module can be addressed in two ways: # 1. Using a path relative to the currently executing module's path. For # instance, module p1/p2/m3.py imports p1/p2/p3/m4.py using 'import p3.m4'. # 2. Using a path relative to sys.path. For instance, module p1/p2/m3.py # imports p1/p2/p3/m4.py using 'import p1.p2.p3.m4'. # # The Python importer uses the 'globals' argument to identify the module that # the current import is being performed in. The actual logic is very # complicated, and we only approximate it here to limit the performance # overhead (See import.c in the interpreter for details). Here, we only use # the value of the globals['__name__'] for this purpose. # # Note: The Python importer prioritizes the current package over sys.path. For # instance, if 'p1.p2.m3' imports 'm4', then 'p1.p2.m4' is a better match than # the top level 'm4'. However, the debugger does not have to implement this, # because breakpoint paths are not described relative to some other file. They # are always assumed to be relative to the sys.path directories. If the user # sets breakpoint inside 'm4.py', then we can map it to either the top level # 'm4' or 'p1.p2.m4', i.e., both are valid matches. curpkg = GetCurrentPackage(globals) names = set() # A Python module can be imported using two syntaxes: # 1. import p1.p2.m3 # 2. from p1.p2 import m3 # # When the regular 'import p1.p2.m3' syntax is used, the name of the module # being imported is passed in the 'name' argument (e.g., name='p1.p2.m3', # fromlist=None). # # When the from-import syntax is used, then fromlist contains the leaf names # of the modules, and name contains the containing package. For instance, if # name='a.b', fromlist=['c', 'd'], then we add ['a.b.c', 'a.b.d']. # # Corner cases: # 1. The fromlist syntax can be used to import a function from a module. # For instance, 'from p1.p2.m3 import func'. # 2. Sometimes, the importer is passed a dummy fromlist=['__doc__'] (see # import.c in the interpreter for details). # Due to these corner cases, the returned set may contain entries that are not # names of real modules. for from_entry in fromlist or []: # Name relative to sys.path. # For relative imports such as 'from . import x', name will be the empty # string. Thus we should not prepend a '.' to the entry. entry = (name + '.' + from_entry) if name else from_entry names.add(entry) # Name relative to the currently executing module's package. if curpkg: names.add(curpkg + '.' + entry) # Generate all names from name. For instance, if name='a.b.c', then # we need to add ['a.b.c', 'a.b', 'a']. while name: # Name relative to sys.path. names.add(name) # Name relative to currently executing module's package. if curpkg: names.add(curpkg + '.' + name) name = name.rpartition('.')[0] return names
def function[_GenerateNames, parameter[name, fromlist, globals]]: constant[Generates the names of modules that might be loaded via this import. Args: name: Argument as passed to the importer. fromlist: Argument as passed to the importer. globals: Argument as passed to the importer. Returns: A set that contains the names of all modules that are loaded by the currently executing import statement, as they would show up in sys.modules. The returned set may contain module names that were already loaded before the execution of this import statement. The returned set may contain names that are not real modules. ] def function[GetCurrentPackage, parameter[globals]]: constant[Finds the name of the package for the currently executing module.] if <ast.UnaryOp object at 0x7da1b1e98640> begin[:] return[constant[None]] variable[current] assign[=] call[name[globals].get, parameter[constant[__name__]]] if <ast.UnaryOp object at 0x7da1b1e98850> begin[:] return[constant[None]] variable[current_file] assign[=] call[name[globals].get, parameter[constant[__file__]]] if <ast.UnaryOp object at 0x7da1b1e98af0> begin[:] return[constant[None]] variable[root] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[current_file]]]]]][constant[0]] if compare[name[root] equal[==] constant[__init__]] begin[:] return[name[current]] variable[curpkg] assign[=] call[name[GetCurrentPackage], parameter[name[globals]]] variable[names] assign[=] call[name[set], parameter[]] for taget[name[from_entry]] in starred[<ast.BoolOp object at 0x7da1b1e99ae0>] begin[:] variable[entry] assign[=] <ast.IfExp object at 0x7da1b1e9a7d0> call[name[names].add, parameter[name[entry]]] if name[curpkg] begin[:] call[name[names].add, parameter[binary_operation[binary_operation[name[curpkg] + constant[.]] + name[entry]]]] while name[name] begin[:] call[name[names].add, parameter[name[name]]] if name[curpkg] begin[:] call[name[names].add, parameter[binary_operation[binary_operation[name[curpkg] + constant[.]] + name[name]]]] variable[name] assign[=] call[call[name[name].rpartition, parameter[constant[.]]]][constant[0]] return[name[names]]
keyword[def] identifier[_GenerateNames] ( identifier[name] , identifier[fromlist] , identifier[globals] ): literal[string] keyword[def] identifier[GetCurrentPackage] ( identifier[globals] ): literal[string] keyword[if] keyword[not] identifier[globals] : keyword[return] keyword[None] identifier[current] = identifier[globals] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[current] : keyword[return] keyword[None] identifier[current_file] = identifier[globals] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[current_file] : keyword[return] keyword[None] identifier[root] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[current_file] ))[ literal[int] ] keyword[if] identifier[root] == literal[string] : keyword[return] identifier[current] keyword[else] : keyword[return] identifier[current] . identifier[rpartition] ( literal[string] )[ literal[int] ] identifier[curpkg] = identifier[GetCurrentPackage] ( identifier[globals] ) identifier[names] = identifier[set] () keyword[for] identifier[from_entry] keyword[in] identifier[fromlist] keyword[or] []: identifier[entry] =( identifier[name] + literal[string] + identifier[from_entry] ) keyword[if] identifier[name] keyword[else] identifier[from_entry] identifier[names] . identifier[add] ( identifier[entry] ) keyword[if] identifier[curpkg] : identifier[names] . identifier[add] ( identifier[curpkg] + literal[string] + identifier[entry] ) keyword[while] identifier[name] : identifier[names] . identifier[add] ( identifier[name] ) keyword[if] identifier[curpkg] : identifier[names] . identifier[add] ( identifier[curpkg] + literal[string] + identifier[name] ) identifier[name] = identifier[name] . identifier[rpartition] ( literal[string] )[ literal[int] ] keyword[return] identifier[names]
def _GenerateNames(name, fromlist, globals): """Generates the names of modules that might be loaded via this import. Args: name: Argument as passed to the importer. fromlist: Argument as passed to the importer. globals: Argument as passed to the importer. Returns: A set that contains the names of all modules that are loaded by the currently executing import statement, as they would show up in sys.modules. The returned set may contain module names that were already loaded before the execution of this import statement. The returned set may contain names that are not real modules. """ def GetCurrentPackage(globals): """Finds the name of the package for the currently executing module.""" if not globals: return None # depends on [control=['if'], data=[]] # Get the name of the module/package that the current import is being # executed in. current = globals.get('__name__') if not current: return None # depends on [control=['if'], data=[]] # Check if the current module is really a module, or a package. current_file = globals.get('__file__') if not current_file: return None # depends on [control=['if'], data=[]] root = os.path.splitext(os.path.basename(current_file))[0] if root == '__init__': # The current import happened from a package. Return the package. return current # depends on [control=['if'], data=[]] else: # The current import happened from a module. Return the package that # contains the module. return current.rpartition('.')[0] # A Python module can be addressed in two ways: # 1. Using a path relative to the currently executing module's path. For # instance, module p1/p2/m3.py imports p1/p2/p3/m4.py using 'import p3.m4'. # 2. Using a path relative to sys.path. For instance, module p1/p2/m3.py # imports p1/p2/p3/m4.py using 'import p1.p2.p3.m4'. # # The Python importer uses the 'globals' argument to identify the module that # the current import is being performed in. The actual logic is very # complicated, and we only approximate it here to limit the performance # overhead (See import.c in the interpreter for details). Here, we only use # the value of the globals['__name__'] for this purpose. # # Note: The Python importer prioritizes the current package over sys.path. For # instance, if 'p1.p2.m3' imports 'm4', then 'p1.p2.m4' is a better match than # the top level 'm4'. However, the debugger does not have to implement this, # because breakpoint paths are not described relative to some other file. They # are always assumed to be relative to the sys.path directories. If the user # sets breakpoint inside 'm4.py', then we can map it to either the top level # 'm4' or 'p1.p2.m4', i.e., both are valid matches. curpkg = GetCurrentPackage(globals) names = set() # A Python module can be imported using two syntaxes: # 1. import p1.p2.m3 # 2. from p1.p2 import m3 # # When the regular 'import p1.p2.m3' syntax is used, the name of the module # being imported is passed in the 'name' argument (e.g., name='p1.p2.m3', # fromlist=None). # # When the from-import syntax is used, then fromlist contains the leaf names # of the modules, and name contains the containing package. For instance, if # name='a.b', fromlist=['c', 'd'], then we add ['a.b.c', 'a.b.d']. # # Corner cases: # 1. The fromlist syntax can be used to import a function from a module. # For instance, 'from p1.p2.m3 import func'. # 2. Sometimes, the importer is passed a dummy fromlist=['__doc__'] (see # import.c in the interpreter for details). # Due to these corner cases, the returned set may contain entries that are not # names of real modules. for from_entry in fromlist or []: # Name relative to sys.path. # For relative imports such as 'from . import x', name will be the empty # string. Thus we should not prepend a '.' to the entry. entry = name + '.' + from_entry if name else from_entry names.add(entry) # Name relative to the currently executing module's package. if curpkg: names.add(curpkg + '.' + entry) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['from_entry']] # Generate all names from name. For instance, if name='a.b.c', then # we need to add ['a.b.c', 'a.b', 'a']. while name: # Name relative to sys.path. names.add(name) # Name relative to currently executing module's package. if curpkg: names.add(curpkg + '.' + name) # depends on [control=['if'], data=[]] name = name.rpartition('.')[0] # depends on [control=['while'], data=[]] return names
def enable_step_on_branch_mode(cls): """ When tracing, call this on every single step event for step on branch mode. @raise WindowsError: Raises C{ERROR_DEBUGGER_INACTIVE} if the debugger is not attached to least one process. @raise NotImplementedError: Current architecture is not C{i386} or C{amd64}. @warning: This method uses the processor's machine specific registers (MSR). It could potentially brick your machine. It works on my machine, but your mileage may vary. @note: It doesn't seem to work in VMWare or VirtualBox machines. Maybe it fails in other virtualization/emulation environments, no extensive testing was made so far. """ cls.write_msr(DebugRegister.DebugCtlMSR, DebugRegister.BranchTrapFlag | DebugRegister.LastBranchRecord)
def function[enable_step_on_branch_mode, parameter[cls]]: constant[ When tracing, call this on every single step event for step on branch mode. @raise WindowsError: Raises C{ERROR_DEBUGGER_INACTIVE} if the debugger is not attached to least one process. @raise NotImplementedError: Current architecture is not C{i386} or C{amd64}. @warning: This method uses the processor's machine specific registers (MSR). It could potentially brick your machine. It works on my machine, but your mileage may vary. @note: It doesn't seem to work in VMWare or VirtualBox machines. Maybe it fails in other virtualization/emulation environments, no extensive testing was made so far. ] call[name[cls].write_msr, parameter[name[DebugRegister].DebugCtlMSR, binary_operation[name[DebugRegister].BranchTrapFlag <ast.BitOr object at 0x7da2590d6aa0> name[DebugRegister].LastBranchRecord]]]
keyword[def] identifier[enable_step_on_branch_mode] ( identifier[cls] ): literal[string] identifier[cls] . identifier[write_msr] ( identifier[DebugRegister] . identifier[DebugCtlMSR] , identifier[DebugRegister] . identifier[BranchTrapFlag] | identifier[DebugRegister] . identifier[LastBranchRecord] )
def enable_step_on_branch_mode(cls): """ When tracing, call this on every single step event for step on branch mode. @raise WindowsError: Raises C{ERROR_DEBUGGER_INACTIVE} if the debugger is not attached to least one process. @raise NotImplementedError: Current architecture is not C{i386} or C{amd64}. @warning: This method uses the processor's machine specific registers (MSR). It could potentially brick your machine. It works on my machine, but your mileage may vary. @note: It doesn't seem to work in VMWare or VirtualBox machines. Maybe it fails in other virtualization/emulation environments, no extensive testing was made so far. """ cls.write_msr(DebugRegister.DebugCtlMSR, DebugRegister.BranchTrapFlag | DebugRegister.LastBranchRecord)
def subreporter(self, subpath, entry): """ create a reporter for a sub-report, with updated breadcrumbs and the same output formats """ newbase = join(self.basedir, subpath) r = Reporter(newbase, entry, self.options) crumbs = list(self.breadcrumbs) crumbs.append((self.basedir, self.entry)) r.breadcrumbs = crumbs r.formats = set(self.formats) return r
def function[subreporter, parameter[self, subpath, entry]]: constant[ create a reporter for a sub-report, with updated breadcrumbs and the same output formats ] variable[newbase] assign[=] call[name[join], parameter[name[self].basedir, name[subpath]]] variable[r] assign[=] call[name[Reporter], parameter[name[newbase], name[entry], name[self].options]] variable[crumbs] assign[=] call[name[list], parameter[name[self].breadcrumbs]] call[name[crumbs].append, parameter[tuple[[<ast.Attribute object at 0x7da1b0ebe9b0>, <ast.Attribute object at 0x7da1b0ebed70>]]]] name[r].breadcrumbs assign[=] name[crumbs] name[r].formats assign[=] call[name[set], parameter[name[self].formats]] return[name[r]]
keyword[def] identifier[subreporter] ( identifier[self] , identifier[subpath] , identifier[entry] ): literal[string] identifier[newbase] = identifier[join] ( identifier[self] . identifier[basedir] , identifier[subpath] ) identifier[r] = identifier[Reporter] ( identifier[newbase] , identifier[entry] , identifier[self] . identifier[options] ) identifier[crumbs] = identifier[list] ( identifier[self] . identifier[breadcrumbs] ) identifier[crumbs] . identifier[append] (( identifier[self] . identifier[basedir] , identifier[self] . identifier[entry] )) identifier[r] . identifier[breadcrumbs] = identifier[crumbs] identifier[r] . identifier[formats] = identifier[set] ( identifier[self] . identifier[formats] ) keyword[return] identifier[r]
def subreporter(self, subpath, entry): """ create a reporter for a sub-report, with updated breadcrumbs and the same output formats """ newbase = join(self.basedir, subpath) r = Reporter(newbase, entry, self.options) crumbs = list(self.breadcrumbs) crumbs.append((self.basedir, self.entry)) r.breadcrumbs = crumbs r.formats = set(self.formats) return r
def update_subscription(self, subscription_id, url=None, events=None): """ Create subscription :param subscription_id: Subscription to update :param events: Events to subscribe :param url: Url to send events """ params = {} if url is not None: params['url'] = url if events is not None: params['events'] = events url = self.SUBSCRIPTIONS_ID_URL % subscription_id connection = Connection(self.token) connection.set_url(self.production, url) connection.add_header('Content-Type', 'application/json') connection.add_params(params) return connection.patch_request()
def function[update_subscription, parameter[self, subscription_id, url, events]]: constant[ Create subscription :param subscription_id: Subscription to update :param events: Events to subscribe :param url: Url to send events ] variable[params] assign[=] dictionary[[], []] if compare[name[url] is_not constant[None]] begin[:] call[name[params]][constant[url]] assign[=] name[url] if compare[name[events] is_not constant[None]] begin[:] call[name[params]][constant[events]] assign[=] name[events] variable[url] assign[=] binary_operation[name[self].SUBSCRIPTIONS_ID_URL <ast.Mod object at 0x7da2590d6920> name[subscription_id]] variable[connection] assign[=] call[name[Connection], parameter[name[self].token]] call[name[connection].set_url, parameter[name[self].production, name[url]]] call[name[connection].add_header, parameter[constant[Content-Type], constant[application/json]]] call[name[connection].add_params, parameter[name[params]]] return[call[name[connection].patch_request, parameter[]]]
keyword[def] identifier[update_subscription] ( identifier[self] , identifier[subscription_id] , identifier[url] = keyword[None] , identifier[events] = keyword[None] ): literal[string] identifier[params] ={} keyword[if] identifier[url] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[url] keyword[if] identifier[events] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[events] identifier[url] = identifier[self] . identifier[SUBSCRIPTIONS_ID_URL] % identifier[subscription_id] identifier[connection] = identifier[Connection] ( identifier[self] . identifier[token] ) identifier[connection] . identifier[set_url] ( identifier[self] . identifier[production] , identifier[url] ) identifier[connection] . identifier[add_header] ( literal[string] , literal[string] ) identifier[connection] . identifier[add_params] ( identifier[params] ) keyword[return] identifier[connection] . identifier[patch_request] ()
def update_subscription(self, subscription_id, url=None, events=None): """ Create subscription :param subscription_id: Subscription to update :param events: Events to subscribe :param url: Url to send events """ params = {} if url is not None: params['url'] = url # depends on [control=['if'], data=['url']] if events is not None: params['events'] = events # depends on [control=['if'], data=['events']] url = self.SUBSCRIPTIONS_ID_URL % subscription_id connection = Connection(self.token) connection.set_url(self.production, url) connection.add_header('Content-Type', 'application/json') connection.add_params(params) return connection.patch_request()
def ssn(self): """ Returns a 13 digits Swiss SSN named AHV (German) or AVS (French and Italian) See: http://www.bsv.admin.ch/themen/ahv/00011/02185/ """ def _checksum(digits): evensum = sum(digits[:-1:2]) oddsum = sum(digits[1::2]) return (10 - ((evensum + oddsum * 3) % 10)) % 10 digits = [7, 5, 6] # create an array of first 9 elements initialized randomly digits += self.generator.random.sample(range(10), 9) # determine the last digit to make it qualify the test digits.append(_checksum(digits)) # repeat steps until it does qualify the test digits = ''.join([str(d) for d in digits]) ssn = digits[:3] + '.' \ + digits[3:7] + '.' \ + digits[7:11] + '.' \ + digits[11:] return ssn
def function[ssn, parameter[self]]: constant[ Returns a 13 digits Swiss SSN named AHV (German) or AVS (French and Italian) See: http://www.bsv.admin.ch/themen/ahv/00011/02185/ ] def function[_checksum, parameter[digits]]: variable[evensum] assign[=] call[name[sum], parameter[call[name[digits]][<ast.Slice object at 0x7da18dc9a4a0>]]] variable[oddsum] assign[=] call[name[sum], parameter[call[name[digits]][<ast.Slice object at 0x7da18dc99870>]]] return[binary_operation[binary_operation[constant[10] - binary_operation[binary_operation[name[evensum] + binary_operation[name[oddsum] * constant[3]]] <ast.Mod object at 0x7da2590d6920> constant[10]]] <ast.Mod object at 0x7da2590d6920> constant[10]]] variable[digits] assign[=] list[[<ast.Constant object at 0x7da18dc98b50>, <ast.Constant object at 0x7da18dc9a7d0>, <ast.Constant object at 0x7da18dc99960>]] <ast.AugAssign object at 0x7da18dc9a6b0> call[name[digits].append, parameter[call[name[_checksum], parameter[name[digits]]]]] variable[digits] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da18dc9ae00>]] variable[ssn] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[digits]][<ast.Slice object at 0x7da18dc99ea0>] + constant[.]] + call[name[digits]][<ast.Slice object at 0x7da1b26af010>]] + constant[.]] + call[name[digits]][<ast.Slice object at 0x7da18dc98e20>]] + constant[.]] + call[name[digits]][<ast.Slice object at 0x7da18dc9ac50>]] return[name[ssn]]
keyword[def] identifier[ssn] ( identifier[self] ): literal[string] keyword[def] identifier[_checksum] ( identifier[digits] ): identifier[evensum] = identifier[sum] ( identifier[digits] [:- literal[int] : literal[int] ]) identifier[oddsum] = identifier[sum] ( identifier[digits] [ literal[int] :: literal[int] ]) keyword[return] ( literal[int] -(( identifier[evensum] + identifier[oddsum] * literal[int] )% literal[int] ))% literal[int] identifier[digits] =[ literal[int] , literal[int] , literal[int] ] identifier[digits] += identifier[self] . identifier[generator] . identifier[random] . identifier[sample] ( identifier[range] ( literal[int] ), literal[int] ) identifier[digits] . identifier[append] ( identifier[_checksum] ( identifier[digits] )) identifier[digits] = literal[string] . identifier[join] ([ identifier[str] ( identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[digits] ]) identifier[ssn] = identifier[digits] [: literal[int] ]+ literal[string] + identifier[digits] [ literal[int] : literal[int] ]+ literal[string] + identifier[digits] [ literal[int] : literal[int] ]+ literal[string] + identifier[digits] [ literal[int] :] keyword[return] identifier[ssn]
def ssn(self): """ Returns a 13 digits Swiss SSN named AHV (German) or AVS (French and Italian) See: http://www.bsv.admin.ch/themen/ahv/00011/02185/ """ def _checksum(digits): evensum = sum(digits[:-1:2]) oddsum = sum(digits[1::2]) return (10 - (evensum + oddsum * 3) % 10) % 10 digits = [7, 5, 6] # create an array of first 9 elements initialized randomly digits += self.generator.random.sample(range(10), 9) # determine the last digit to make it qualify the test digits.append(_checksum(digits)) # repeat steps until it does qualify the test digits = ''.join([str(d) for d in digits]) ssn = digits[:3] + '.' + digits[3:7] + '.' + digits[7:11] + '.' + digits[11:] return ssn
def _perspective_warp(c, magnitude:partial(uniform,size=8)=0, invert=False): "Apply warp of `magnitude` to `c`." magnitude = magnitude.view(4,2) targ_pts = [[x+m for x,m in zip(xs, ms)] for xs, ms in zip(_orig_pts, magnitude)] return _do_perspective_warp(c, targ_pts, invert)
def function[_perspective_warp, parameter[c, magnitude, invert]]: constant[Apply warp of `magnitude` to `c`.] variable[magnitude] assign[=] call[name[magnitude].view, parameter[constant[4], constant[2]]] variable[targ_pts] assign[=] <ast.ListComp object at 0x7da1b1dfa380> return[call[name[_do_perspective_warp], parameter[name[c], name[targ_pts], name[invert]]]]
keyword[def] identifier[_perspective_warp] ( identifier[c] , identifier[magnitude] : identifier[partial] ( identifier[uniform] , identifier[size] = literal[int] )= literal[int] , identifier[invert] = keyword[False] ): literal[string] identifier[magnitude] = identifier[magnitude] . identifier[view] ( literal[int] , literal[int] ) identifier[targ_pts] =[[ identifier[x] + identifier[m] keyword[for] identifier[x] , identifier[m] keyword[in] identifier[zip] ( identifier[xs] , identifier[ms] )] keyword[for] identifier[xs] , identifier[ms] keyword[in] identifier[zip] ( identifier[_orig_pts] , identifier[magnitude] )] keyword[return] identifier[_do_perspective_warp] ( identifier[c] , identifier[targ_pts] , identifier[invert] )
def _perspective_warp(c, magnitude: partial(uniform, size=8)=0, invert=False): """Apply warp of `magnitude` to `c`.""" magnitude = magnitude.view(4, 2) targ_pts = [[x + m for (x, m) in zip(xs, ms)] for (xs, ms) in zip(_orig_pts, magnitude)] return _do_perspective_warp(c, targ_pts, invert)
def make_fil_file(filename,out_dir='./', new_filename=None, max_load = None): ''' Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir. ''' fil_file = Waterfall(filename, max_load = max_load) if not new_filename: new_filename = out_dir+filename.replace('.h5','.fil').split('/')[-1] if '.fil' not in new_filename: new_filename = new_filename+'.fil' fil_file.write_to_fil(new_filename)
def function[make_fil_file, parameter[filename, out_dir, new_filename, max_load]]: constant[ Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir. ] variable[fil_file] assign[=] call[name[Waterfall], parameter[name[filename]]] if <ast.UnaryOp object at 0x7da1b26af010> begin[:] variable[new_filename] assign[=] binary_operation[name[out_dir] + call[call[call[name[filename].replace, parameter[constant[.h5], constant[.fil]]].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b26ad300>]] if compare[constant[.fil] <ast.NotIn object at 0x7da2590d7190> name[new_filename]] begin[:] variable[new_filename] assign[=] binary_operation[name[new_filename] + constant[.fil]] call[name[fil_file].write_to_fil, parameter[name[new_filename]]]
keyword[def] identifier[make_fil_file] ( identifier[filename] , identifier[out_dir] = literal[string] , identifier[new_filename] = keyword[None] , identifier[max_load] = keyword[None] ): literal[string] identifier[fil_file] = identifier[Waterfall] ( identifier[filename] , identifier[max_load] = identifier[max_load] ) keyword[if] keyword[not] identifier[new_filename] : identifier[new_filename] = identifier[out_dir] + identifier[filename] . identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )[- literal[int] ] keyword[if] literal[string] keyword[not] keyword[in] identifier[new_filename] : identifier[new_filename] = identifier[new_filename] + literal[string] identifier[fil_file] . identifier[write_to_fil] ( identifier[new_filename] )
def make_fil_file(filename, out_dir='./', new_filename=None, max_load=None): """ Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir. """ fil_file = Waterfall(filename, max_load=max_load) if not new_filename: new_filename = out_dir + filename.replace('.h5', '.fil').split('/')[-1] # depends on [control=['if'], data=[]] if '.fil' not in new_filename: new_filename = new_filename + '.fil' # depends on [control=['if'], data=['new_filename']] fil_file.write_to_fil(new_filename)
def to_unit(C, val, unit=None): """convert a string measurement to a Unum""" md = re.match(r'^(?P<num>[\d\.]+)(?P<unit>.*)$', val) if md is not None: un = float(md.group('num')) * CSS.units[md.group('unit')] if unit is not None: return un.asUnit(unit) else: return un
def function[to_unit, parameter[C, val, unit]]: constant[convert a string measurement to a Unum] variable[md] assign[=] call[name[re].match, parameter[constant[^(?P<num>[\d\.]+)(?P<unit>.*)$], name[val]]] if compare[name[md] is_not constant[None]] begin[:] variable[un] assign[=] binary_operation[call[name[float], parameter[call[name[md].group, parameter[constant[num]]]]] * call[name[CSS].units][call[name[md].group, parameter[constant[unit]]]]] if compare[name[unit] is_not constant[None]] begin[:] return[call[name[un].asUnit, parameter[name[unit]]]]
keyword[def] identifier[to_unit] ( identifier[C] , identifier[val] , identifier[unit] = keyword[None] ): literal[string] identifier[md] = identifier[re] . identifier[match] ( literal[string] , identifier[val] ) keyword[if] identifier[md] keyword[is] keyword[not] keyword[None] : identifier[un] = identifier[float] ( identifier[md] . identifier[group] ( literal[string] ))* identifier[CSS] . identifier[units] [ identifier[md] . identifier[group] ( literal[string] )] keyword[if] identifier[unit] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[un] . identifier[asUnit] ( identifier[unit] ) keyword[else] : keyword[return] identifier[un]
def to_unit(C, val, unit=None): """convert a string measurement to a Unum""" md = re.match('^(?P<num>[\\d\\.]+)(?P<unit>.*)$', val) if md is not None: un = float(md.group('num')) * CSS.units[md.group('unit')] if unit is not None: return un.asUnit(unit) # depends on [control=['if'], data=['unit']] else: return un # depends on [control=['if'], data=['md']]
def categories_ref(self): """ The Excel worksheet reference to the categories for this chart (not including the column heading). """ categories = self._chart_data.categories if categories.depth == 0: raise ValueError('chart data contains no categories') right_col = chr(ord('A') + categories.depth - 1) bottom_row = categories.leaf_count + 1 return "Sheet1!$A$2:$%s$%d" % (right_col, bottom_row)
def function[categories_ref, parameter[self]]: constant[ The Excel worksheet reference to the categories for this chart (not including the column heading). ] variable[categories] assign[=] name[self]._chart_data.categories if compare[name[categories].depth equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20c6ab100> variable[right_col] assign[=] call[name[chr], parameter[binary_operation[binary_operation[call[name[ord], parameter[constant[A]]] + name[categories].depth] - constant[1]]]] variable[bottom_row] assign[=] binary_operation[name[categories].leaf_count + constant[1]] return[binary_operation[constant[Sheet1!$A$2:$%s$%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6aa1a0>, <ast.Name object at 0x7da20c6a86a0>]]]]
keyword[def] identifier[categories_ref] ( identifier[self] ): literal[string] identifier[categories] = identifier[self] . identifier[_chart_data] . identifier[categories] keyword[if] identifier[categories] . identifier[depth] == literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[right_col] = identifier[chr] ( identifier[ord] ( literal[string] )+ identifier[categories] . identifier[depth] - literal[int] ) identifier[bottom_row] = identifier[categories] . identifier[leaf_count] + literal[int] keyword[return] literal[string] %( identifier[right_col] , identifier[bottom_row] )
def categories_ref(self): """ The Excel worksheet reference to the categories for this chart (not including the column heading). """ categories = self._chart_data.categories if categories.depth == 0: raise ValueError('chart data contains no categories') # depends on [control=['if'], data=[]] right_col = chr(ord('A') + categories.depth - 1) bottom_row = categories.leaf_count + 1 return 'Sheet1!$A$2:$%s$%d' % (right_col, bottom_row)
def dump_ifd(self, ifd, ifd_name, tag_dict=EXIF_TAGS, relative=0, stop_tag=DEFAULT_STOP_TAG): """ Return a list of entries in the given IFD. """ # make sure we can process the entries try: entries = self.s2n(ifd, 2) except TypeError: logger.warning("Possibly corrupted IFD: %s" % ifd) return for i in range(entries): # entry is index of start of this IFD in the file entry = ifd + 2 + 12 * i tag = self.s2n(entry, 2) # get tag name early to avoid errors, help debug tag_entry = tag_dict.get(tag) if tag_entry: tag_name = tag_entry[0] else: tag_name = 'Tag 0x%04X' % tag # ignore certain tags for faster processing if not (not self.detailed and tag in IGNORE_TAGS): field_type = self.s2n(entry + 2, 2) # unknown field type if not 0 < field_type < len(FIELD_TYPES): if not self.strict: continue else: raise ValueError('Unknown type %d in tag 0x%04X' % (field_type, tag)) type_length = FIELD_TYPES[field_type][0] count = self.s2n(entry + 4, 4) # Adjust for tag id/type/count (2+2+4 bytes) # Now we point at either the data or the 2nd level offset offset = entry + 8 # If the value fits in 4 bytes, it is inlined, else we # need to jump ahead again. if count * type_length > 4: # offset is not the value; it's a pointer to the value # if relative we set things up so s2n will seek to the right # place when it adds self.offset. Note that this 'relative' # is for the Nikon type 3 makernote. Other cameras may use # other relative offsets, which would have to be computed here # slightly differently. if relative: tmp_offset = self.s2n(offset, 4) offset = tmp_offset + ifd - 8 if self.fake_exif: offset += 18 else: offset = self.s2n(offset, 4) field_offset = offset values = None if field_type == 2: # special case: null-terminated ASCII string # XXX investigate # sometimes gets too big to fit in int value if count != 0: # and count < (2**31): # 2E31 is hardware dependant. --gd file_position = self.offset + offset try: self.file.seek(file_position) values = self.file.read(count) #print(values) # Drop any garbage after a null. values = values.split(b'\x00', 1)[0] if isinstance(values, bytes): try: values = values.decode("utf-8") except UnicodeDecodeError: logger.warning("Possibly corrupted field %s in %s IFD", tag_name, ifd_name) except OverflowError: logger.warn('OverflowError at position: %s, length: %s', file_position, count) values = '' except MemoryError: logger.warn('MemoryError at position: %s, length: %s', file_position, count) values = '' else: values = [] signed = (field_type in [6, 8, 9, 10]) # XXX investigate # some entries get too big to handle could be malformed # file or problem with self.s2n if count < 1000: for dummy in range(count): if field_type in (5, 10): # a ratio value = Ratio(self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed)) else: value = self.s2n(offset, type_length, signed) values.append(value) offset = offset + type_length # The test above causes problems with tags that are # supposed to have long values! Fix up one important case. elif tag_name in ('MakerNote', makernote.canon.CAMERA_INFO_TAG_NAME): for dummy in range(count): value = self.s2n(offset, type_length, signed) values.append(value) offset = offset + type_length # now 'values' is either a string or an array if count == 1 and field_type != 2: printable = str(values[0]) elif count > 50 and len(values) > 20: printable = str(values[0:20])[0:-1] + ", ... ]" else: try: printable = str(values) # fix for python2's handling of unicode values except UnicodeEncodeError: printable = unicode(values) # compute printable version of values if tag_entry: # optional 2nd tag element is present if len(tag_entry) != 1: if callable(tag_entry[1]): # call mapping function printable = tag_entry[1](values) elif type(tag_entry[1]) is tuple: ifd_info = tag_entry[1] try: logger.debug('%s SubIFD at offset %d:', ifd_info[0], values[0]) self.dump_ifd(values[0], ifd_info[0], tag_dict=ifd_info[1], stop_tag=stop_tag) except IndexError: logger.warn('No values found for %s SubIFD', ifd_info[0]) else: printable = '' for i in values: # use lookup table for this tag printable += tag_entry[1].get(i, repr(i)) self.tags[ifd_name + ' ' + tag_name] = IfdTag(printable, tag, field_type, values, field_offset, count * type_length) try: tag_value = repr(self.tags[ifd_name + ' ' + tag_name]) # fix for python2's handling of unicode values except UnicodeEncodeError: tag_value = unicode(self.tags[ifd_name + ' ' + tag_name]) logger.debug(' %s: %s', tag_name, tag_value) if tag_name == stop_tag: break
def function[dump_ifd, parameter[self, ifd, ifd_name, tag_dict, relative, stop_tag]]: constant[ Return a list of entries in the given IFD. ] <ast.Try object at 0x7da18f00c040> for taget[name[i]] in starred[call[name[range], parameter[name[entries]]]] begin[:] variable[entry] assign[=] binary_operation[binary_operation[name[ifd] + constant[2]] + binary_operation[constant[12] * name[i]]] variable[tag] assign[=] call[name[self].s2n, parameter[name[entry], constant[2]]] variable[tag_entry] assign[=] call[name[tag_dict].get, parameter[name[tag]]] if name[tag_entry] begin[:] variable[tag_name] assign[=] call[name[tag_entry]][constant[0]] if <ast.UnaryOp object at 0x7da18f00dc00> begin[:] variable[field_type] assign[=] call[name[self].s2n, parameter[binary_operation[name[entry] + constant[2]], constant[2]]] if <ast.UnaryOp object at 0x7da18bcc95d0> begin[:] if <ast.UnaryOp object at 0x7da18bcc8640> begin[:] continue variable[type_length] assign[=] call[call[name[FIELD_TYPES]][name[field_type]]][constant[0]] variable[count] assign[=] call[name[self].s2n, parameter[binary_operation[name[entry] + constant[4]], constant[4]]] variable[offset] assign[=] binary_operation[name[entry] + constant[8]] if compare[binary_operation[name[count] * name[type_length]] greater[>] constant[4]] begin[:] if name[relative] begin[:] variable[tmp_offset] assign[=] call[name[self].s2n, parameter[name[offset], constant[4]]] variable[offset] assign[=] binary_operation[binary_operation[name[tmp_offset] + name[ifd]] - constant[8]] if name[self].fake_exif begin[:] <ast.AugAssign object at 0x7da18bcc9480> variable[field_offset] assign[=] name[offset] variable[values] assign[=] constant[None] if compare[name[field_type] equal[==] constant[2]] begin[:] if compare[name[count] not_equal[!=] constant[0]] begin[:] variable[file_position] assign[=] binary_operation[name[self].offset + name[offset]] <ast.Try object at 0x7da18bccbdc0> if <ast.BoolOp object at 0x7da18fe92c50> begin[:] variable[printable] assign[=] call[name[str], parameter[call[name[values]][constant[0]]]] if name[tag_entry] begin[:] if compare[call[name[len], parameter[name[tag_entry]]] not_equal[!=] constant[1]] begin[:] if call[name[callable], parameter[call[name[tag_entry]][constant[1]]]] begin[:] variable[printable] assign[=] call[call[name[tag_entry]][constant[1]], parameter[name[values]]] call[name[self].tags][binary_operation[binary_operation[name[ifd_name] + constant[ ]] + name[tag_name]]] assign[=] call[name[IfdTag], parameter[name[printable], name[tag], name[field_type], name[values], name[field_offset], binary_operation[name[count] * name[type_length]]]] <ast.Try object at 0x7da1b2581750> call[name[logger].debug, parameter[constant[ %s: %s], name[tag_name], name[tag_value]]] if compare[name[tag_name] equal[==] name[stop_tag]] begin[:] break
keyword[def] identifier[dump_ifd] ( identifier[self] , identifier[ifd] , identifier[ifd_name] , identifier[tag_dict] = identifier[EXIF_TAGS] , identifier[relative] = literal[int] , identifier[stop_tag] = identifier[DEFAULT_STOP_TAG] ): literal[string] keyword[try] : identifier[entries] = identifier[self] . identifier[s2n] ( identifier[ifd] , literal[int] ) keyword[except] identifier[TypeError] : identifier[logger] . identifier[warning] ( literal[string] % identifier[ifd] ) keyword[return] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[entries] ): identifier[entry] = identifier[ifd] + literal[int] + literal[int] * identifier[i] identifier[tag] = identifier[self] . identifier[s2n] ( identifier[entry] , literal[int] ) identifier[tag_entry] = identifier[tag_dict] . identifier[get] ( identifier[tag] ) keyword[if] identifier[tag_entry] : identifier[tag_name] = identifier[tag_entry] [ literal[int] ] keyword[else] : identifier[tag_name] = literal[string] % identifier[tag] keyword[if] keyword[not] ( keyword[not] identifier[self] . identifier[detailed] keyword[and] identifier[tag] keyword[in] identifier[IGNORE_TAGS] ): identifier[field_type] = identifier[self] . identifier[s2n] ( identifier[entry] + literal[int] , literal[int] ) keyword[if] keyword[not] literal[int] < identifier[field_type] < identifier[len] ( identifier[FIELD_TYPES] ): keyword[if] keyword[not] identifier[self] . identifier[strict] : keyword[continue] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[field_type] , identifier[tag] )) identifier[type_length] = identifier[FIELD_TYPES] [ identifier[field_type] ][ literal[int] ] identifier[count] = identifier[self] . identifier[s2n] ( identifier[entry] + literal[int] , literal[int] ) identifier[offset] = identifier[entry] + literal[int] keyword[if] identifier[count] * identifier[type_length] > literal[int] : keyword[if] identifier[relative] : identifier[tmp_offset] = identifier[self] . identifier[s2n] ( identifier[offset] , literal[int] ) identifier[offset] = identifier[tmp_offset] + identifier[ifd] - literal[int] keyword[if] identifier[self] . identifier[fake_exif] : identifier[offset] += literal[int] keyword[else] : identifier[offset] = identifier[self] . identifier[s2n] ( identifier[offset] , literal[int] ) identifier[field_offset] = identifier[offset] identifier[values] = keyword[None] keyword[if] identifier[field_type] == literal[int] : keyword[if] identifier[count] != literal[int] : identifier[file_position] = identifier[self] . identifier[offset] + identifier[offset] keyword[try] : identifier[self] . identifier[file] . identifier[seek] ( identifier[file_position] ) identifier[values] = identifier[self] . identifier[file] . identifier[read] ( identifier[count] ) identifier[values] = identifier[values] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] keyword[if] identifier[isinstance] ( identifier[values] , identifier[bytes] ): keyword[try] : identifier[values] = identifier[values] . identifier[decode] ( literal[string] ) keyword[except] identifier[UnicodeDecodeError] : identifier[logger] . identifier[warning] ( literal[string] , identifier[tag_name] , identifier[ifd_name] ) keyword[except] identifier[OverflowError] : identifier[logger] . identifier[warn] ( literal[string] , identifier[file_position] , identifier[count] ) identifier[values] = literal[string] keyword[except] identifier[MemoryError] : identifier[logger] . identifier[warn] ( literal[string] , identifier[file_position] , identifier[count] ) identifier[values] = literal[string] keyword[else] : identifier[values] =[] identifier[signed] =( identifier[field_type] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] ]) keyword[if] identifier[count] < literal[int] : keyword[for] identifier[dummy] keyword[in] identifier[range] ( identifier[count] ): keyword[if] identifier[field_type] keyword[in] ( literal[int] , literal[int] ): identifier[value] = identifier[Ratio] ( identifier[self] . identifier[s2n] ( identifier[offset] , literal[int] , identifier[signed] ), identifier[self] . identifier[s2n] ( identifier[offset] + literal[int] , literal[int] , identifier[signed] )) keyword[else] : identifier[value] = identifier[self] . identifier[s2n] ( identifier[offset] , identifier[type_length] , identifier[signed] ) identifier[values] . identifier[append] ( identifier[value] ) identifier[offset] = identifier[offset] + identifier[type_length] keyword[elif] identifier[tag_name] keyword[in] ( literal[string] , identifier[makernote] . identifier[canon] . identifier[CAMERA_INFO_TAG_NAME] ): keyword[for] identifier[dummy] keyword[in] identifier[range] ( identifier[count] ): identifier[value] = identifier[self] . identifier[s2n] ( identifier[offset] , identifier[type_length] , identifier[signed] ) identifier[values] . identifier[append] ( identifier[value] ) identifier[offset] = identifier[offset] + identifier[type_length] keyword[if] identifier[count] == literal[int] keyword[and] identifier[field_type] != literal[int] : identifier[printable] = identifier[str] ( identifier[values] [ literal[int] ]) keyword[elif] identifier[count] > literal[int] keyword[and] identifier[len] ( identifier[values] )> literal[int] : identifier[printable] = identifier[str] ( identifier[values] [ literal[int] : literal[int] ])[ literal[int] :- literal[int] ]+ literal[string] keyword[else] : keyword[try] : identifier[printable] = identifier[str] ( identifier[values] ) keyword[except] identifier[UnicodeEncodeError] : identifier[printable] = identifier[unicode] ( identifier[values] ) keyword[if] identifier[tag_entry] : keyword[if] identifier[len] ( identifier[tag_entry] )!= literal[int] : keyword[if] identifier[callable] ( identifier[tag_entry] [ literal[int] ]): identifier[printable] = identifier[tag_entry] [ literal[int] ]( identifier[values] ) keyword[elif] identifier[type] ( identifier[tag_entry] [ literal[int] ]) keyword[is] identifier[tuple] : identifier[ifd_info] = identifier[tag_entry] [ literal[int] ] keyword[try] : identifier[logger] . identifier[debug] ( literal[string] , identifier[ifd_info] [ literal[int] ], identifier[values] [ literal[int] ]) identifier[self] . identifier[dump_ifd] ( identifier[values] [ literal[int] ], identifier[ifd_info] [ literal[int] ], identifier[tag_dict] = identifier[ifd_info] [ literal[int] ], identifier[stop_tag] = identifier[stop_tag] ) keyword[except] identifier[IndexError] : identifier[logger] . identifier[warn] ( literal[string] , identifier[ifd_info] [ literal[int] ]) keyword[else] : identifier[printable] = literal[string] keyword[for] identifier[i] keyword[in] identifier[values] : identifier[printable] += identifier[tag_entry] [ literal[int] ]. identifier[get] ( identifier[i] , identifier[repr] ( identifier[i] )) identifier[self] . identifier[tags] [ identifier[ifd_name] + literal[string] + identifier[tag_name] ]= identifier[IfdTag] ( identifier[printable] , identifier[tag] , identifier[field_type] , identifier[values] , identifier[field_offset] , identifier[count] * identifier[type_length] ) keyword[try] : identifier[tag_value] = identifier[repr] ( identifier[self] . identifier[tags] [ identifier[ifd_name] + literal[string] + identifier[tag_name] ]) keyword[except] identifier[UnicodeEncodeError] : identifier[tag_value] = identifier[unicode] ( identifier[self] . identifier[tags] [ identifier[ifd_name] + literal[string] + identifier[tag_name] ]) identifier[logger] . identifier[debug] ( literal[string] , identifier[tag_name] , identifier[tag_value] ) keyword[if] identifier[tag_name] == identifier[stop_tag] : keyword[break]
def dump_ifd(self, ifd, ifd_name, tag_dict=EXIF_TAGS, relative=0, stop_tag=DEFAULT_STOP_TAG): """ Return a list of entries in the given IFD. """ # make sure we can process the entries try: entries = self.s2n(ifd, 2) # depends on [control=['try'], data=[]] except TypeError: logger.warning('Possibly corrupted IFD: %s' % ifd) return # depends on [control=['except'], data=[]] for i in range(entries): # entry is index of start of this IFD in the file entry = ifd + 2 + 12 * i tag = self.s2n(entry, 2) # get tag name early to avoid errors, help debug tag_entry = tag_dict.get(tag) if tag_entry: tag_name = tag_entry[0] # depends on [control=['if'], data=[]] else: tag_name = 'Tag 0x%04X' % tag # ignore certain tags for faster processing if not (not self.detailed and tag in IGNORE_TAGS): field_type = self.s2n(entry + 2, 2) # unknown field type if not 0 < field_type < len(FIELD_TYPES): if not self.strict: continue # depends on [control=['if'], data=[]] else: raise ValueError('Unknown type %d in tag 0x%04X' % (field_type, tag)) # depends on [control=['if'], data=[]] type_length = FIELD_TYPES[field_type][0] count = self.s2n(entry + 4, 4) # Adjust for tag id/type/count (2+2+4 bytes) # Now we point at either the data or the 2nd level offset offset = entry + 8 # If the value fits in 4 bytes, it is inlined, else we # need to jump ahead again. if count * type_length > 4: # offset is not the value; it's a pointer to the value # if relative we set things up so s2n will seek to the right # place when it adds self.offset. Note that this 'relative' # is for the Nikon type 3 makernote. Other cameras may use # other relative offsets, which would have to be computed here # slightly differently. if relative: tmp_offset = self.s2n(offset, 4) offset = tmp_offset + ifd - 8 if self.fake_exif: offset += 18 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: offset = self.s2n(offset, 4) # depends on [control=['if'], data=[]] field_offset = offset values = None if field_type == 2: # special case: null-terminated ASCII string # XXX investigate # sometimes gets too big to fit in int value if count != 0: # and count < (2**31): # 2E31 is hardware dependant. --gd file_position = self.offset + offset try: self.file.seek(file_position) values = self.file.read(count) #print(values) # Drop any garbage after a null. values = values.split(b'\x00', 1)[0] if isinstance(values, bytes): try: values = values.decode('utf-8') # depends on [control=['try'], data=[]] except UnicodeDecodeError: logger.warning('Possibly corrupted field %s in %s IFD', tag_name, ifd_name) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except OverflowError: logger.warn('OverflowError at position: %s, length: %s', file_position, count) values = '' # depends on [control=['except'], data=[]] except MemoryError: logger.warn('MemoryError at position: %s, length: %s', file_position, count) values = '' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['count']] # depends on [control=['if'], data=[]] else: values = [] signed = field_type in [6, 8, 9, 10] # XXX investigate # some entries get too big to handle could be malformed # file or problem with self.s2n if count < 1000: for dummy in range(count): if field_type in (5, 10): # a ratio value = Ratio(self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed)) # depends on [control=['if'], data=[]] else: value = self.s2n(offset, type_length, signed) values.append(value) offset = offset + type_length # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['count']] # The test above causes problems with tags that are # supposed to have long values! Fix up one important case. elif tag_name in ('MakerNote', makernote.canon.CAMERA_INFO_TAG_NAME): for dummy in range(count): value = self.s2n(offset, type_length, signed) values.append(value) offset = offset + type_length # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # now 'values' is either a string or an array if count == 1 and field_type != 2: printable = str(values[0]) # depends on [control=['if'], data=[]] elif count > 50 and len(values) > 20: printable = str(values[0:20])[0:-1] + ', ... ]' # depends on [control=['if'], data=[]] else: try: printable = str(values) # depends on [control=['try'], data=[]] # fix for python2's handling of unicode values except UnicodeEncodeError: printable = unicode(values) # depends on [control=['except'], data=[]] # compute printable version of values if tag_entry: # optional 2nd tag element is present if len(tag_entry) != 1: if callable(tag_entry[1]): # call mapping function printable = tag_entry[1](values) # depends on [control=['if'], data=[]] elif type(tag_entry[1]) is tuple: ifd_info = tag_entry[1] try: logger.debug('%s SubIFD at offset %d:', ifd_info[0], values[0]) self.dump_ifd(values[0], ifd_info[0], tag_dict=ifd_info[1], stop_tag=stop_tag) # depends on [control=['try'], data=[]] except IndexError: logger.warn('No values found for %s SubIFD', ifd_info[0]) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: printable = '' for i in values: # use lookup table for this tag printable += tag_entry[1].get(i, repr(i)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.tags[ifd_name + ' ' + tag_name] = IfdTag(printable, tag, field_type, values, field_offset, count * type_length) try: tag_value = repr(self.tags[ifd_name + ' ' + tag_name]) # depends on [control=['try'], data=[]] # fix for python2's handling of unicode values except UnicodeEncodeError: tag_value = unicode(self.tags[ifd_name + ' ' + tag_name]) # depends on [control=['except'], data=[]] logger.debug(' %s: %s', tag_name, tag_value) # depends on [control=['if'], data=[]] if tag_name == stop_tag: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
def delete(self, ids): """ Method to delete ipv4's by their ids :param ids: Identifiers of ipv4's :return: None """ url = build_uri_with_ids('api/v3/ipv4/%s/', ids) return super(ApiIPv4, self).delete(url)
def function[delete, parameter[self, ids]]: constant[ Method to delete ipv4's by their ids :param ids: Identifiers of ipv4's :return: None ] variable[url] assign[=] call[name[build_uri_with_ids], parameter[constant[api/v3/ipv4/%s/], name[ids]]] return[call[call[name[super], parameter[name[ApiIPv4], name[self]]].delete, parameter[name[url]]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[ids] ): literal[string] identifier[url] = identifier[build_uri_with_ids] ( literal[string] , identifier[ids] ) keyword[return] identifier[super] ( identifier[ApiIPv4] , identifier[self] ). identifier[delete] ( identifier[url] )
def delete(self, ids): """ Method to delete ipv4's by their ids :param ids: Identifiers of ipv4's :return: None """ url = build_uri_with_ids('api/v3/ipv4/%s/', ids) return super(ApiIPv4, self).delete(url)
def wait(self, timeout=None): """pause the current coroutine until this event is set .. note:: this method will block the current coroutine if :meth:`set` has not been called. :param timeout: the maximum amount of time to block in seconds. the default of ``None`` allows indefinite blocking. :type timeout: number or None :returns: ``True`` if a timeout was provided and was hit, otherwise ``False`` """ if self._is_set: return False current = compat.getcurrent() # the waiting greenlet waketime = None if timeout is None else time.time() + timeout if timeout is not None: scheduler.schedule_at(waketime, current) self._waiters.append(current) scheduler.state.mainloop.switch() if timeout is not None: if not scheduler._remove_timer(waketime, current): scheduler.state.awoken_from_events.discard(current) if current in self._waiters: self._waiters.remove(current) return True return False
def function[wait, parameter[self, timeout]]: constant[pause the current coroutine until this event is set .. note:: this method will block the current coroutine if :meth:`set` has not been called. :param timeout: the maximum amount of time to block in seconds. the default of ``None`` allows indefinite blocking. :type timeout: number or None :returns: ``True`` if a timeout was provided and was hit, otherwise ``False`` ] if name[self]._is_set begin[:] return[constant[False]] variable[current] assign[=] call[name[compat].getcurrent, parameter[]] variable[waketime] assign[=] <ast.IfExp object at 0x7da2046211b0> if compare[name[timeout] is_not constant[None]] begin[:] call[name[scheduler].schedule_at, parameter[name[waketime], name[current]]] call[name[self]._waiters.append, parameter[name[current]]] call[name[scheduler].state.mainloop.switch, parameter[]] if compare[name[timeout] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da2046220b0> begin[:] call[name[scheduler].state.awoken_from_events.discard, parameter[name[current]]] if compare[name[current] in name[self]._waiters] begin[:] call[name[self]._waiters.remove, parameter[name[current]]] return[constant[True]] return[constant[False]]
keyword[def] identifier[wait] ( identifier[self] , identifier[timeout] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[_is_set] : keyword[return] keyword[False] identifier[current] = identifier[compat] . identifier[getcurrent] () identifier[waketime] = keyword[None] keyword[if] identifier[timeout] keyword[is] keyword[None] keyword[else] identifier[time] . identifier[time] ()+ identifier[timeout] keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] : identifier[scheduler] . identifier[schedule_at] ( identifier[waketime] , identifier[current] ) identifier[self] . identifier[_waiters] . identifier[append] ( identifier[current] ) identifier[scheduler] . identifier[state] . identifier[mainloop] . identifier[switch] () keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] identifier[scheduler] . identifier[_remove_timer] ( identifier[waketime] , identifier[current] ): identifier[scheduler] . identifier[state] . identifier[awoken_from_events] . identifier[discard] ( identifier[current] ) keyword[if] identifier[current] keyword[in] identifier[self] . identifier[_waiters] : identifier[self] . identifier[_waiters] . identifier[remove] ( identifier[current] ) keyword[return] keyword[True] keyword[return] keyword[False]
def wait(self, timeout=None): """pause the current coroutine until this event is set .. note:: this method will block the current coroutine if :meth:`set` has not been called. :param timeout: the maximum amount of time to block in seconds. the default of ``None`` allows indefinite blocking. :type timeout: number or None :returns: ``True`` if a timeout was provided and was hit, otherwise ``False`` """ if self._is_set: return False # depends on [control=['if'], data=[]] current = compat.getcurrent() # the waiting greenlet waketime = None if timeout is None else time.time() + timeout if timeout is not None: scheduler.schedule_at(waketime, current) # depends on [control=['if'], data=[]] self._waiters.append(current) scheduler.state.mainloop.switch() if timeout is not None: if not scheduler._remove_timer(waketime, current): scheduler.state.awoken_from_events.discard(current) if current in self._waiters: self._waiters.remove(current) # depends on [control=['if'], data=['current']] return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return False
def setLegendData(self, *args, **kwargs): """ Set or genernate the legend data from this canteen. Uses :py:func:`.buildLegend` for genernating """ self.legendData = buildLegend(*args, key=self.legendKeyFunc, **kwargs)
def function[setLegendData, parameter[self]]: constant[ Set or genernate the legend data from this canteen. Uses :py:func:`.buildLegend` for genernating ] name[self].legendData assign[=] call[name[buildLegend], parameter[<ast.Starred object at 0x7da1b1ec3070>]]
keyword[def] identifier[setLegendData] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[legendData] = identifier[buildLegend] (* identifier[args] , identifier[key] = identifier[self] . identifier[legendKeyFunc] ,** identifier[kwargs] )
def setLegendData(self, *args, **kwargs): """ Set or genernate the legend data from this canteen. Uses :py:func:`.buildLegend` for genernating """ self.legendData = buildLegend(*args, key=self.legendKeyFunc, **kwargs)
def transform(self, X): r"""Maps the input data through the transformer to correspondingly shaped output data array/list. Parameters ---------- X : ndarray(T, n) or list of ndarray(T_i, n) The input data, where T is the number of time steps and n is the number of dimensions. If a list is provided, the number of time steps is allowed to vary, but the number of dimensions are required to be to be consistent. Returns ------- Y : ndarray(T, d) or list of ndarray(T_i, d) The mapped data, where T is the number of time steps of the input data and d is the output dimension of this transformer. If called with a list of trajectories, Y will also be a corresponding list of trajectories """ if isinstance(X, np.ndarray): if X.ndim == 2: mapped = self._transform_array(X) return mapped else: raise TypeError('Input has the wrong shape: %s with %i' ' dimensions. Expecting a matrix (2 dimensions)' % (str(X.shape), X.ndim)) elif isinstance(X, (list, tuple)): out = [] for x in X: mapped = self._transform_array(x) out.append(mapped) return out else: raise TypeError('Input has the wrong type: %s ' '. Either accepting numpy arrays of dimension 2 ' 'or lists of such arrays' % (str(type(X))))
def function[transform, parameter[self, X]]: constant[Maps the input data through the transformer to correspondingly shaped output data array/list. Parameters ---------- X : ndarray(T, n) or list of ndarray(T_i, n) The input data, where T is the number of time steps and n is the number of dimensions. If a list is provided, the number of time steps is allowed to vary, but the number of dimensions are required to be to be consistent. Returns ------- Y : ndarray(T, d) or list of ndarray(T_i, d) The mapped data, where T is the number of time steps of the input data and d is the output dimension of this transformer. If called with a list of trajectories, Y will also be a corresponding list of trajectories ] if call[name[isinstance], parameter[name[X], name[np].ndarray]] begin[:] if compare[name[X].ndim equal[==] constant[2]] begin[:] variable[mapped] assign[=] call[name[self]._transform_array, parameter[name[X]]] return[name[mapped]]
keyword[def] identifier[transform] ( identifier[self] , identifier[X] ): literal[string] keyword[if] identifier[isinstance] ( identifier[X] , identifier[np] . identifier[ndarray] ): keyword[if] identifier[X] . identifier[ndim] == literal[int] : identifier[mapped] = identifier[self] . identifier[_transform_array] ( identifier[X] ) keyword[return] identifier[mapped] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] %( identifier[str] ( identifier[X] . identifier[shape] ), identifier[X] . identifier[ndim] )) keyword[elif] identifier[isinstance] ( identifier[X] ,( identifier[list] , identifier[tuple] )): identifier[out] =[] keyword[for] identifier[x] keyword[in] identifier[X] : identifier[mapped] = identifier[self] . identifier[_transform_array] ( identifier[x] ) identifier[out] . identifier[append] ( identifier[mapped] ) keyword[return] identifier[out] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] literal[string] %( identifier[str] ( identifier[type] ( identifier[X] ))))
def transform(self, X): """Maps the input data through the transformer to correspondingly shaped output data array/list. Parameters ---------- X : ndarray(T, n) or list of ndarray(T_i, n) The input data, where T is the number of time steps and n is the number of dimensions. If a list is provided, the number of time steps is allowed to vary, but the number of dimensions are required to be to be consistent. Returns ------- Y : ndarray(T, d) or list of ndarray(T_i, d) The mapped data, where T is the number of time steps of the input data and d is the output dimension of this transformer. If called with a list of trajectories, Y will also be a corresponding list of trajectories """ if isinstance(X, np.ndarray): if X.ndim == 2: mapped = self._transform_array(X) return mapped # depends on [control=['if'], data=[]] else: raise TypeError('Input has the wrong shape: %s with %i dimensions. Expecting a matrix (2 dimensions)' % (str(X.shape), X.ndim)) # depends on [control=['if'], data=[]] elif isinstance(X, (list, tuple)): out = [] for x in X: mapped = self._transform_array(x) out.append(mapped) # depends on [control=['for'], data=['x']] return out # depends on [control=['if'], data=[]] else: raise TypeError('Input has the wrong type: %s . Either accepting numpy arrays of dimension 2 or lists of such arrays' % str(type(X)))
def id_pools(self): """ Gets the IdPools API client. Returns: IdPools: """ if not self.__id_pools: self.__id_pools = IdPools(self.__connection) return self.__id_pools
def function[id_pools, parameter[self]]: constant[ Gets the IdPools API client. Returns: IdPools: ] if <ast.UnaryOp object at 0x7da20c76cf10> begin[:] name[self].__id_pools assign[=] call[name[IdPools], parameter[name[self].__connection]] return[name[self].__id_pools]
keyword[def] identifier[id_pools] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[__id_pools] : identifier[self] . identifier[__id_pools] = identifier[IdPools] ( identifier[self] . identifier[__connection] ) keyword[return] identifier[self] . identifier[__id_pools]
def id_pools(self): """ Gets the IdPools API client. Returns: IdPools: """ if not self.__id_pools: self.__id_pools = IdPools(self.__connection) # depends on [control=['if'], data=[]] return self.__id_pools
def expand_effect_repertoire(self, new_purview=None): """See |Subsystem.expand_repertoire()|.""" return self.subsystem.expand_effect_repertoire( self.effect.repertoire, new_purview)
def function[expand_effect_repertoire, parameter[self, new_purview]]: constant[See |Subsystem.expand_repertoire()|.] return[call[name[self].subsystem.expand_effect_repertoire, parameter[name[self].effect.repertoire, name[new_purview]]]]
keyword[def] identifier[expand_effect_repertoire] ( identifier[self] , identifier[new_purview] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[subsystem] . identifier[expand_effect_repertoire] ( identifier[self] . identifier[effect] . identifier[repertoire] , identifier[new_purview] )
def expand_effect_repertoire(self, new_purview=None): """See |Subsystem.expand_repertoire()|.""" return self.subsystem.expand_effect_repertoire(self.effect.repertoire, new_purview)
def partial_dependence(self, term, X=None, width=None, quantiles=None, meshgrid=False): """ Computes the term functions for the GAM and possibly their confidence intervals. if both width=None and quantiles=None, then no confidence intervals are computed Parameters ---------- term : int, optional Term for which to compute the partial dependence functions. X : array-like with input data, optional if `meshgrid=False`, then `X` should be an array-like of shape (n_samples, m_features). if `meshgrid=True`, then `X` should be a tuple containing an array for each feature in the term. if None, an equally spaced grid of points is generated. width : float on (0, 1), optional Width of the confidence interval. quantiles : array-like of floats on (0, 1), optional instead of specifying the prediciton width, one can specify the quantiles. so width=.95 is equivalent to quantiles=[.025, .975]. if None, defaults to width. meshgrid : bool, whether to return and accept meshgrids. Useful for creating outputs that are suitable for 3D plotting. Note, for simple terms with no interactions, the output of this function will be the same for ``meshgrid=True`` and ``meshgrid=False``, but the inputs will need to be different. Returns ------- pdeps : np.array of shape (n_samples,) conf_intervals : list of length len(term) containing np.arrays of shape (n_samples, 2 or len(quantiles)) Raises ------ ValueError : If the term requested is an intercept since it does not make sense to process the intercept term. See Also -------- generate_X_grid : for help creating meshgrids. """ if not self._is_fitted: raise AttributeError('GAM has not been fitted. Call fit first.') if not isinstance(term, int): raise ValueError('term must be an integer, but found term: {}'.format(term)) # ensure term exists if (term >= len(self.terms)) or (term < -1): raise ValueError('Term {} out of range for model with {} terms'\ .format(term, len(self.terms))) # cant do Intercept if self.terms[term].isintercept: raise ValueError('cannot create grid for intercept term') if X is None: X = self.generate_X_grid(term=term, meshgrid=meshgrid) if meshgrid: if not isinstance(X, tuple): raise ValueError('X must be a tuple of grids if `meshgrid=True`, '\ 'but found X: {}'.format(X)) shape = X[0].shape X = self._flatten_mesh(X, term=term) X = check_X(X, n_feats=self.statistics_['m_features'], edge_knots=self.edge_knots_, dtypes=self.dtype, features=self.feature, verbose=self.verbose) modelmat = self._modelmat(X, term=term) pdep = self._linear_predictor(modelmat=modelmat, term=term) out = [pdep] compute_quantiles = (width is not None) or (quantiles is not None) if compute_quantiles: conf_intervals = self._get_quantiles(X, width=width, quantiles=quantiles, modelmat=modelmat, lp=pdep, term=term, xform=False) out += [conf_intervals] if meshgrid: for i, array in enumerate(out): # add extra dimensions arising from multiple confidence intervals if array.ndim > 1: depth = array.shape[-1] shape += (depth,) out[i] = np.reshape(array, shape) if compute_quantiles: return out return out[0]
def function[partial_dependence, parameter[self, term, X, width, quantiles, meshgrid]]: constant[ Computes the term functions for the GAM and possibly their confidence intervals. if both width=None and quantiles=None, then no confidence intervals are computed Parameters ---------- term : int, optional Term for which to compute the partial dependence functions. X : array-like with input data, optional if `meshgrid=False`, then `X` should be an array-like of shape (n_samples, m_features). if `meshgrid=True`, then `X` should be a tuple containing an array for each feature in the term. if None, an equally spaced grid of points is generated. width : float on (0, 1), optional Width of the confidence interval. quantiles : array-like of floats on (0, 1), optional instead of specifying the prediciton width, one can specify the quantiles. so width=.95 is equivalent to quantiles=[.025, .975]. if None, defaults to width. meshgrid : bool, whether to return and accept meshgrids. Useful for creating outputs that are suitable for 3D plotting. Note, for simple terms with no interactions, the output of this function will be the same for ``meshgrid=True`` and ``meshgrid=False``, but the inputs will need to be different. Returns ------- pdeps : np.array of shape (n_samples,) conf_intervals : list of length len(term) containing np.arrays of shape (n_samples, 2 or len(quantiles)) Raises ------ ValueError : If the term requested is an intercept since it does not make sense to process the intercept term. See Also -------- generate_X_grid : for help creating meshgrids. ] if <ast.UnaryOp object at 0x7da18c4ce770> begin[:] <ast.Raise object at 0x7da18c4cc9a0> if <ast.UnaryOp object at 0x7da18c4cf970> begin[:] <ast.Raise object at 0x7da18c4cf3d0> if <ast.BoolOp object at 0x7da18f58dc90> begin[:] <ast.Raise object at 0x7da18f58c220> if call[name[self].terms][name[term]].isintercept begin[:] <ast.Raise object at 0x7da18f58c7f0> if compare[name[X] is constant[None]] begin[:] variable[X] assign[=] call[name[self].generate_X_grid, parameter[]] if name[meshgrid] begin[:] if <ast.UnaryOp object at 0x7da18f58d900> begin[:] <ast.Raise object at 0x7da18f58d3c0> variable[shape] assign[=] call[name[X]][constant[0]].shape variable[X] assign[=] call[name[self]._flatten_mesh, parameter[name[X]]] variable[X] assign[=] call[name[check_X], parameter[name[X]]] variable[modelmat] assign[=] call[name[self]._modelmat, parameter[name[X]]] variable[pdep] assign[=] call[name[self]._linear_predictor, parameter[]] variable[out] assign[=] list[[<ast.Name object at 0x7da18f58d960>]] variable[compute_quantiles] assign[=] <ast.BoolOp object at 0x7da18f58d0f0> if name[compute_quantiles] begin[:] variable[conf_intervals] assign[=] call[name[self]._get_quantiles, parameter[name[X]]] <ast.AugAssign object at 0x7da18f58e260> if name[meshgrid] begin[:] for taget[tuple[[<ast.Name object at 0x7da18f58e020>, <ast.Name object at 0x7da18f58f250>]]] in starred[call[name[enumerate], parameter[name[out]]]] begin[:] if compare[name[array].ndim greater[>] constant[1]] begin[:] variable[depth] assign[=] call[name[array].shape][<ast.UnaryOp object at 0x7da18f58d7b0>] <ast.AugAssign object at 0x7da18f58c520> call[name[out]][name[i]] assign[=] call[name[np].reshape, parameter[name[array], name[shape]]] if name[compute_quantiles] begin[:] return[name[out]] return[call[name[out]][constant[0]]]
keyword[def] identifier[partial_dependence] ( identifier[self] , identifier[term] , identifier[X] = keyword[None] , identifier[width] = keyword[None] , identifier[quantiles] = keyword[None] , identifier[meshgrid] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_is_fitted] : keyword[raise] identifier[AttributeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[term] , identifier[int] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[term] )) keyword[if] ( identifier[term] >= identifier[len] ( identifier[self] . identifier[terms] )) keyword[or] ( identifier[term] <- literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[term] , identifier[len] ( identifier[self] . identifier[terms] ))) keyword[if] identifier[self] . identifier[terms] [ identifier[term] ]. identifier[isintercept] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[X] keyword[is] keyword[None] : identifier[X] = identifier[self] . identifier[generate_X_grid] ( identifier[term] = identifier[term] , identifier[meshgrid] = identifier[meshgrid] ) keyword[if] identifier[meshgrid] : keyword[if] keyword[not] identifier[isinstance] ( identifier[X] , identifier[tuple] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[X] )) identifier[shape] = identifier[X] [ literal[int] ]. identifier[shape] identifier[X] = identifier[self] . identifier[_flatten_mesh] ( identifier[X] , identifier[term] = identifier[term] ) identifier[X] = identifier[check_X] ( identifier[X] , identifier[n_feats] = identifier[self] . identifier[statistics_] [ literal[string] ], identifier[edge_knots] = identifier[self] . identifier[edge_knots_] , identifier[dtypes] = identifier[self] . identifier[dtype] , identifier[features] = identifier[self] . identifier[feature] , identifier[verbose] = identifier[self] . identifier[verbose] ) identifier[modelmat] = identifier[self] . identifier[_modelmat] ( identifier[X] , identifier[term] = identifier[term] ) identifier[pdep] = identifier[self] . identifier[_linear_predictor] ( identifier[modelmat] = identifier[modelmat] , identifier[term] = identifier[term] ) identifier[out] =[ identifier[pdep] ] identifier[compute_quantiles] =( identifier[width] keyword[is] keyword[not] keyword[None] ) keyword[or] ( identifier[quantiles] keyword[is] keyword[not] keyword[None] ) keyword[if] identifier[compute_quantiles] : identifier[conf_intervals] = identifier[self] . identifier[_get_quantiles] ( identifier[X] , identifier[width] = identifier[width] , identifier[quantiles] = identifier[quantiles] , identifier[modelmat] = identifier[modelmat] , identifier[lp] = identifier[pdep] , identifier[term] = identifier[term] , identifier[xform] = keyword[False] ) identifier[out] +=[ identifier[conf_intervals] ] keyword[if] identifier[meshgrid] : keyword[for] identifier[i] , identifier[array] keyword[in] identifier[enumerate] ( identifier[out] ): keyword[if] identifier[array] . identifier[ndim] > literal[int] : identifier[depth] = identifier[array] . identifier[shape] [- literal[int] ] identifier[shape] +=( identifier[depth] ,) identifier[out] [ identifier[i] ]= identifier[np] . identifier[reshape] ( identifier[array] , identifier[shape] ) keyword[if] identifier[compute_quantiles] : keyword[return] identifier[out] keyword[return] identifier[out] [ literal[int] ]
def partial_dependence(self, term, X=None, width=None, quantiles=None, meshgrid=False): """ Computes the term functions for the GAM and possibly their confidence intervals. if both width=None and quantiles=None, then no confidence intervals are computed Parameters ---------- term : int, optional Term for which to compute the partial dependence functions. X : array-like with input data, optional if `meshgrid=False`, then `X` should be an array-like of shape (n_samples, m_features). if `meshgrid=True`, then `X` should be a tuple containing an array for each feature in the term. if None, an equally spaced grid of points is generated. width : float on (0, 1), optional Width of the confidence interval. quantiles : array-like of floats on (0, 1), optional instead of specifying the prediciton width, one can specify the quantiles. so width=.95 is equivalent to quantiles=[.025, .975]. if None, defaults to width. meshgrid : bool, whether to return and accept meshgrids. Useful for creating outputs that are suitable for 3D plotting. Note, for simple terms with no interactions, the output of this function will be the same for ``meshgrid=True`` and ``meshgrid=False``, but the inputs will need to be different. Returns ------- pdeps : np.array of shape (n_samples,) conf_intervals : list of length len(term) containing np.arrays of shape (n_samples, 2 or len(quantiles)) Raises ------ ValueError : If the term requested is an intercept since it does not make sense to process the intercept term. See Also -------- generate_X_grid : for help creating meshgrids. """ if not self._is_fitted: raise AttributeError('GAM has not been fitted. Call fit first.') # depends on [control=['if'], data=[]] if not isinstance(term, int): raise ValueError('term must be an integer, but found term: {}'.format(term)) # depends on [control=['if'], data=[]] # ensure term exists if term >= len(self.terms) or term < -1: raise ValueError('Term {} out of range for model with {} terms'.format(term, len(self.terms))) # depends on [control=['if'], data=[]] # cant do Intercept if self.terms[term].isintercept: raise ValueError('cannot create grid for intercept term') # depends on [control=['if'], data=[]] if X is None: X = self.generate_X_grid(term=term, meshgrid=meshgrid) # depends on [control=['if'], data=['X']] if meshgrid: if not isinstance(X, tuple): raise ValueError('X must be a tuple of grids if `meshgrid=True`, but found X: {}'.format(X)) # depends on [control=['if'], data=[]] shape = X[0].shape X = self._flatten_mesh(X, term=term) X = check_X(X, n_feats=self.statistics_['m_features'], edge_knots=self.edge_knots_, dtypes=self.dtype, features=self.feature, verbose=self.verbose) # depends on [control=['if'], data=[]] modelmat = self._modelmat(X, term=term) pdep = self._linear_predictor(modelmat=modelmat, term=term) out = [pdep] compute_quantiles = width is not None or quantiles is not None if compute_quantiles: conf_intervals = self._get_quantiles(X, width=width, quantiles=quantiles, modelmat=modelmat, lp=pdep, term=term, xform=False) out += [conf_intervals] # depends on [control=['if'], data=[]] if meshgrid: for (i, array) in enumerate(out): # add extra dimensions arising from multiple confidence intervals if array.ndim > 1: depth = array.shape[-1] shape += (depth,) # depends on [control=['if'], data=[]] out[i] = np.reshape(array, shape) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if compute_quantiles: return out # depends on [control=['if'], data=[]] return out[0]
def get_descriptor_defaults(self, api_info, hostname=None): """Gets a default configuration for a service. Args: api_info: _ApiInfo object for this service. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: A dictionary with the default configuration. """ if self.__request: hostname = self.__request.reconstruct_hostname() protocol = self.__request.url_scheme else: hostname = (hostname or util.get_app_hostname() or api_info.hostname) protocol = 'http' if ((hostname and hostname.startswith('localhost')) or util.is_running_on_devserver()) else 'https' full_base_path = '{0}{1}/{2}/'.format(api_info.base_path, api_info.name, api_info.path_version) base_url = '{0}://{1}{2}'.format(protocol, hostname, full_base_path) root_url = '{0}://{1}{2}'.format(protocol, hostname, api_info.base_path) defaults = { 'kind': 'discovery#restDescription', 'discoveryVersion': 'v1', 'id': '{0}:{1}'.format(api_info.name, api_info.path_version), 'name': api_info.name, 'version': api_info.api_version, 'icons': { 'x16': 'https://www.gstatic.com/images/branding/product/1x/googleg_16dp.png', 'x32': 'https://www.gstatic.com/images/branding/product/1x/googleg_32dp.png' }, 'protocol': 'rest', 'servicePath': '{0}/{1}/'.format(api_info.name, api_info.path_version), 'batchPath': 'batch', 'basePath': full_base_path, 'rootUrl': root_url, 'baseUrl': base_url, 'description': 'This is an API', } if api_info.description: defaults['description'] = api_info.description if api_info.title: defaults['title'] = api_info.title if api_info.documentation: defaults['documentationLink'] = api_info.documentation if api_info.canonical_name: defaults['canonicalName'] = api_info.canonical_name return defaults
def function[get_descriptor_defaults, parameter[self, api_info, hostname]]: constant[Gets a default configuration for a service. Args: api_info: _ApiInfo object for this service. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: A dictionary with the default configuration. ] if name[self].__request begin[:] variable[hostname] assign[=] call[name[self].__request.reconstruct_hostname, parameter[]] variable[protocol] assign[=] name[self].__request.url_scheme variable[full_base_path] assign[=] call[constant[{0}{1}/{2}/].format, parameter[name[api_info].base_path, name[api_info].name, name[api_info].path_version]] variable[base_url] assign[=] call[constant[{0}://{1}{2}].format, parameter[name[protocol], name[hostname], name[full_base_path]]] variable[root_url] assign[=] call[constant[{0}://{1}{2}].format, parameter[name[protocol], name[hostname], name[api_info].base_path]] variable[defaults] assign[=] dictionary[[<ast.Constant object at 0x7da1b0d57a60>, <ast.Constant object at 0x7da1b0d56a70>, <ast.Constant object at 0x7da1b0d576d0>, <ast.Constant object at 0x7da1b0d54d60>, <ast.Constant object at 0x7da1b0d541c0>, <ast.Constant object at 0x7da1b0d540a0>, <ast.Constant object at 0x7da1b0d55570>, <ast.Constant object at 0x7da1b0d548b0>, <ast.Constant object at 0x7da1b0d542e0>, <ast.Constant object at 0x7da1b0d54e20>, <ast.Constant object at 0x7da1b0d57a90>, <ast.Constant object at 0x7da1b0d54250>, <ast.Constant object at 0x7da1b0d54be0>], [<ast.Constant object at 0x7da1b0d55c60>, <ast.Constant object at 0x7da1b0d57100>, <ast.Call object at 0x7da1b0d54bb0>, <ast.Attribute object at 0x7da1b0d57a30>, <ast.Attribute object at 0x7da1b0d57f70>, <ast.Dict object at 0x7da1b0d54130>, <ast.Constant object at 0x7da1b0d57bb0>, <ast.Call object at 0x7da1b0d54c40>, <ast.Constant object at 0x7da1b0d54370>, <ast.Name object at 0x7da1b0d55e70>, <ast.Name object at 0x7da1b0d55540>, <ast.Name object at 0x7da1b0d555a0>, <ast.Constant object at 0x7da1b0d57760>]] if name[api_info].description begin[:] call[name[defaults]][constant[description]] assign[=] name[api_info].description if name[api_info].title begin[:] call[name[defaults]][constant[title]] assign[=] name[api_info].title if name[api_info].documentation begin[:] call[name[defaults]][constant[documentationLink]] assign[=] name[api_info].documentation if name[api_info].canonical_name begin[:] call[name[defaults]][constant[canonicalName]] assign[=] name[api_info].canonical_name return[name[defaults]]
keyword[def] identifier[get_descriptor_defaults] ( identifier[self] , identifier[api_info] , identifier[hostname] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[__request] : identifier[hostname] = identifier[self] . identifier[__request] . identifier[reconstruct_hostname] () identifier[protocol] = identifier[self] . identifier[__request] . identifier[url_scheme] keyword[else] : identifier[hostname] =( identifier[hostname] keyword[or] identifier[util] . identifier[get_app_hostname] () keyword[or] identifier[api_info] . identifier[hostname] ) identifier[protocol] = literal[string] keyword[if] (( identifier[hostname] keyword[and] identifier[hostname] . identifier[startswith] ( literal[string] )) keyword[or] identifier[util] . identifier[is_running_on_devserver] ()) keyword[else] literal[string] identifier[full_base_path] = literal[string] . identifier[format] ( identifier[api_info] . identifier[base_path] , identifier[api_info] . identifier[name] , identifier[api_info] . identifier[path_version] ) identifier[base_url] = literal[string] . identifier[format] ( identifier[protocol] , identifier[hostname] , identifier[full_base_path] ) identifier[root_url] = literal[string] . identifier[format] ( identifier[protocol] , identifier[hostname] , identifier[api_info] . identifier[base_path] ) identifier[defaults] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] . identifier[format] ( identifier[api_info] . identifier[name] , identifier[api_info] . identifier[path_version] ), literal[string] : identifier[api_info] . identifier[name] , literal[string] : identifier[api_info] . identifier[api_version] , literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }, literal[string] : literal[string] , literal[string] : literal[string] . identifier[format] ( identifier[api_info] . identifier[name] , identifier[api_info] . identifier[path_version] ), literal[string] : literal[string] , literal[string] : identifier[full_base_path] , literal[string] : identifier[root_url] , literal[string] : identifier[base_url] , literal[string] : literal[string] , } keyword[if] identifier[api_info] . identifier[description] : identifier[defaults] [ literal[string] ]= identifier[api_info] . identifier[description] keyword[if] identifier[api_info] . identifier[title] : identifier[defaults] [ literal[string] ]= identifier[api_info] . identifier[title] keyword[if] identifier[api_info] . identifier[documentation] : identifier[defaults] [ literal[string] ]= identifier[api_info] . identifier[documentation] keyword[if] identifier[api_info] . identifier[canonical_name] : identifier[defaults] [ literal[string] ]= identifier[api_info] . identifier[canonical_name] keyword[return] identifier[defaults]
def get_descriptor_defaults(self, api_info, hostname=None): """Gets a default configuration for a service. Args: api_info: _ApiInfo object for this service. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: A dictionary with the default configuration. """ if self.__request: hostname = self.__request.reconstruct_hostname() protocol = self.__request.url_scheme # depends on [control=['if'], data=[]] else: hostname = hostname or util.get_app_hostname() or api_info.hostname protocol = 'http' if hostname and hostname.startswith('localhost') or util.is_running_on_devserver() else 'https' full_base_path = '{0}{1}/{2}/'.format(api_info.base_path, api_info.name, api_info.path_version) base_url = '{0}://{1}{2}'.format(protocol, hostname, full_base_path) root_url = '{0}://{1}{2}'.format(protocol, hostname, api_info.base_path) defaults = {'kind': 'discovery#restDescription', 'discoveryVersion': 'v1', 'id': '{0}:{1}'.format(api_info.name, api_info.path_version), 'name': api_info.name, 'version': api_info.api_version, 'icons': {'x16': 'https://www.gstatic.com/images/branding/product/1x/googleg_16dp.png', 'x32': 'https://www.gstatic.com/images/branding/product/1x/googleg_32dp.png'}, 'protocol': 'rest', 'servicePath': '{0}/{1}/'.format(api_info.name, api_info.path_version), 'batchPath': 'batch', 'basePath': full_base_path, 'rootUrl': root_url, 'baseUrl': base_url, 'description': 'This is an API'} if api_info.description: defaults['description'] = api_info.description # depends on [control=['if'], data=[]] if api_info.title: defaults['title'] = api_info.title # depends on [control=['if'], data=[]] if api_info.documentation: defaults['documentationLink'] = api_info.documentation # depends on [control=['if'], data=[]] if api_info.canonical_name: defaults['canonicalName'] = api_info.canonical_name # depends on [control=['if'], data=[]] return defaults
def point_in_poly(p, poly): """Determine whether a point is within a polygon area Uses the ray casting algorithm. Parameters ---------- p: float Coordinates of the point poly: array_like of shape (N, 2) Polygon (`PolygonFilter.points`) Returns ------- inside: bool `True`, if point is inside. Notes ----- If `p` lies on a side of the polygon, it is defined as - "inside" if it is on the top or right - "outside" if it is on the lower or left """ poly = np.array(poly) n = poly.shape[0] inside = False x, y = p # Coarse bounding box exclusion: if (x <= poly[:, 0].max() and x > poly[:, 0].min() and y <= poly[:, 1].max() and y > poly[:, 1].min()): # The point is within the coarse bounding box. p1x, p1y = poly[0] # point i in contour for ii in range(n): # also covers (n-1, 0) (circular) p2x, p2y = poly[(ii+1) % n] # point ii+1 in contour (circular) # Edge-wise fine bounding-ray exclusion. # Determine whether point is in the current ray, # defined by the y-range of p1 and p2 and whether # it is left of p1 and p2. if (y > min(p1y, p2y) and y <= max(p1y, p2y) # in y-range and x <= max(p1x, p2x)): # left of p1 and p2 # Note that always p1y!=p2y due to the above test. # Only Compute the x-coordinate of the intersection # between line p1-p2 and the horizontal ray, # ((y-p1y)*(p2x-p1x)/(p2y-p1y) + p1x), # if x is not already known to be left of it # (p1x==p2x in combination with x<=max(p1x, p2x) above). if p1x == p2x or x <= (y-p1y)*(p2x-p1x)/(p2y-p1y) + p1x: # Toggle `inside` if the ray intersects # with the current edge. inside = not inside # Move on to the next edge of the polygon. p1x, p1y = p2x, p2y return inside
def function[point_in_poly, parameter[p, poly]]: constant[Determine whether a point is within a polygon area Uses the ray casting algorithm. Parameters ---------- p: float Coordinates of the point poly: array_like of shape (N, 2) Polygon (`PolygonFilter.points`) Returns ------- inside: bool `True`, if point is inside. Notes ----- If `p` lies on a side of the polygon, it is defined as - "inside" if it is on the top or right - "outside" if it is on the lower or left ] variable[poly] assign[=] call[name[np].array, parameter[name[poly]]] variable[n] assign[=] call[name[poly].shape][constant[0]] variable[inside] assign[=] constant[False] <ast.Tuple object at 0x7da1b1833790> assign[=] name[p] if <ast.BoolOp object at 0x7da1b18324a0> begin[:] <ast.Tuple object at 0x7da1b1830dc0> assign[=] call[name[poly]][constant[0]] for taget[name[ii]] in starred[call[name[range], parameter[name[n]]]] begin[:] <ast.Tuple object at 0x7da1b18306a0> assign[=] call[name[poly]][binary_operation[binary_operation[name[ii] + constant[1]] <ast.Mod object at 0x7da2590d6920> name[n]]] if <ast.BoolOp object at 0x7da1b1833c40> begin[:] if <ast.BoolOp object at 0x7da1b18025f0> begin[:] variable[inside] assign[=] <ast.UnaryOp object at 0x7da1b1800190> <ast.Tuple object at 0x7da1b1800400> assign[=] tuple[[<ast.Name object at 0x7da1b18008e0>, <ast.Name object at 0x7da1b1802440>]] return[name[inside]]
keyword[def] identifier[point_in_poly] ( identifier[p] , identifier[poly] ): literal[string] identifier[poly] = identifier[np] . identifier[array] ( identifier[poly] ) identifier[n] = identifier[poly] . identifier[shape] [ literal[int] ] identifier[inside] = keyword[False] identifier[x] , identifier[y] = identifier[p] keyword[if] ( identifier[x] <= identifier[poly] [:, literal[int] ]. identifier[max] () keyword[and] identifier[x] > identifier[poly] [:, literal[int] ]. identifier[min] () keyword[and] identifier[y] <= identifier[poly] [:, literal[int] ]. identifier[max] () keyword[and] identifier[y] > identifier[poly] [:, literal[int] ]. identifier[min] ()): identifier[p1x] , identifier[p1y] = identifier[poly] [ literal[int] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[n] ): identifier[p2x] , identifier[p2y] = identifier[poly] [( identifier[ii] + literal[int] )% identifier[n] ] keyword[if] ( identifier[y] > identifier[min] ( identifier[p1y] , identifier[p2y] ) keyword[and] identifier[y] <= identifier[max] ( identifier[p1y] , identifier[p2y] ) keyword[and] identifier[x] <= identifier[max] ( identifier[p1x] , identifier[p2x] )): keyword[if] identifier[p1x] == identifier[p2x] keyword[or] identifier[x] <=( identifier[y] - identifier[p1y] )*( identifier[p2x] - identifier[p1x] )/( identifier[p2y] - identifier[p1y] )+ identifier[p1x] : identifier[inside] = keyword[not] identifier[inside] identifier[p1x] , identifier[p1y] = identifier[p2x] , identifier[p2y] keyword[return] identifier[inside]
def point_in_poly(p, poly): """Determine whether a point is within a polygon area Uses the ray casting algorithm. Parameters ---------- p: float Coordinates of the point poly: array_like of shape (N, 2) Polygon (`PolygonFilter.points`) Returns ------- inside: bool `True`, if point is inside. Notes ----- If `p` lies on a side of the polygon, it is defined as - "inside" if it is on the top or right - "outside" if it is on the lower or left """ poly = np.array(poly) n = poly.shape[0] inside = False (x, y) = p # Coarse bounding box exclusion: if x <= poly[:, 0].max() and x > poly[:, 0].min() and (y <= poly[:, 1].max()) and (y > poly[:, 1].min()): # The point is within the coarse bounding box. (p1x, p1y) = poly[0] # point i in contour for ii in range(n): # also covers (n-1, 0) (circular) (p2x, p2y) = poly[(ii + 1) % n] # point ii+1 in contour (circular) # Edge-wise fine bounding-ray exclusion. # Determine whether point is in the current ray, # defined by the y-range of p1 and p2 and whether # it is left of p1 and p2. if y > min(p1y, p2y) and y <= max(p1y, p2y) and (x <= max(p1x, p2x)): # in y-range # left of p1 and p2 # Note that always p1y!=p2y due to the above test. # Only Compute the x-coordinate of the intersection # between line p1-p2 and the horizontal ray, # ((y-p1y)*(p2x-p1x)/(p2y-p1y) + p1x), # if x is not already known to be left of it # (p1x==p2x in combination with x<=max(p1x, p2x) above). if p1x == p2x or x <= (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x: # Toggle `inside` if the ray intersects # with the current edge. inside = not inside # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Move on to the next edge of the polygon. (p1x, p1y) = (p2x, p2y) # depends on [control=['for'], data=['ii']] # depends on [control=['if'], data=[]] return inside
def _do_update_packet(self, packet, ip, port): """ React to update packet - people/person on a device have changed :param packet: Packet from client with changes :type packet: paps.si.app.message.APPUpdateMessage :param ip: Client ip address :type ip: unicode :param port: Client port :type port: int :rtype: None """ self.debug("()") device_id = packet.header.device_id if device_id <= Id.SERVER: self.error("ProtocolViolation: Invalid device id") return client = self._clients.get(device_id, None) if not client: self.error("ProtocolViolation: Client is not registered") return key = u"{}:{}".format(ip, port) if client['key'] != key: self.error( u"ProtocolViolation: Client key ({}) has changed: {}".format( client['key'], key ) ) return # Packet info seems ok try: people = packet.people() except ProtocolViolation: self.exception("Failed to decode people from packet") return # Verify same number of people in update as registered to client # (APP specific) if len(people) != len(client['people']): self.error("ProtocolViolation: Incorrect number of people updated") changed = [] # Add ids to all people # Assumes same order here as on the client (e.g from the join()) for index, person in enumerate(people): old = client['people'][index] person.id = old.id if person != old: old.sitting = person.sitting # Maybe sent person to protect access to local saved state changed.append(old) if changed: # Only update if there is really a change try: self.changer.on_person_update(changed) except: self.exception("Failed to notify people update") return else: self.debug("No people updated")
def function[_do_update_packet, parameter[self, packet, ip, port]]: constant[ React to update packet - people/person on a device have changed :param packet: Packet from client with changes :type packet: paps.si.app.message.APPUpdateMessage :param ip: Client ip address :type ip: unicode :param port: Client port :type port: int :rtype: None ] call[name[self].debug, parameter[constant[()]]] variable[device_id] assign[=] name[packet].header.device_id if compare[name[device_id] less_or_equal[<=] name[Id].SERVER] begin[:] call[name[self].error, parameter[constant[ProtocolViolation: Invalid device id]]] return[None] variable[client] assign[=] call[name[self]._clients.get, parameter[name[device_id], constant[None]]] if <ast.UnaryOp object at 0x7da1b162ae30> begin[:] call[name[self].error, parameter[constant[ProtocolViolation: Client is not registered]]] return[None] variable[key] assign[=] call[constant[{}:{}].format, parameter[name[ip], name[port]]] if compare[call[name[client]][constant[key]] not_equal[!=] name[key]] begin[:] call[name[self].error, parameter[call[constant[ProtocolViolation: Client key ({}) has changed: {}].format, parameter[call[name[client]][constant[key]], name[key]]]]] return[None] <ast.Try object at 0x7da1b1602620> if compare[call[name[len], parameter[name[people]]] not_equal[!=] call[name[len], parameter[call[name[client]][constant[people]]]]] begin[:] call[name[self].error, parameter[constant[ProtocolViolation: Incorrect number of people updated]]] variable[changed] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1601ab0>, <ast.Name object at 0x7da1b1600fd0>]]] in starred[call[name[enumerate], parameter[name[people]]]] begin[:] variable[old] assign[=] call[call[name[client]][constant[people]]][name[index]] name[person].id assign[=] name[old].id if compare[name[person] not_equal[!=] name[old]] begin[:] name[old].sitting assign[=] name[person].sitting call[name[changed].append, parameter[name[old]]] if name[changed] begin[:] <ast.Try object at 0x7da1b1602c20>
keyword[def] identifier[_do_update_packet] ( identifier[self] , identifier[packet] , identifier[ip] , identifier[port] ): literal[string] identifier[self] . identifier[debug] ( literal[string] ) identifier[device_id] = identifier[packet] . identifier[header] . identifier[device_id] keyword[if] identifier[device_id] <= identifier[Id] . identifier[SERVER] : identifier[self] . identifier[error] ( literal[string] ) keyword[return] identifier[client] = identifier[self] . identifier[_clients] . identifier[get] ( identifier[device_id] , keyword[None] ) keyword[if] keyword[not] identifier[client] : identifier[self] . identifier[error] ( literal[string] ) keyword[return] identifier[key] = literal[string] . identifier[format] ( identifier[ip] , identifier[port] ) keyword[if] identifier[client] [ literal[string] ]!= identifier[key] : identifier[self] . identifier[error] ( literal[string] . identifier[format] ( identifier[client] [ literal[string] ], identifier[key] ) ) keyword[return] keyword[try] : identifier[people] = identifier[packet] . identifier[people] () keyword[except] identifier[ProtocolViolation] : identifier[self] . identifier[exception] ( literal[string] ) keyword[return] keyword[if] identifier[len] ( identifier[people] )!= identifier[len] ( identifier[client] [ literal[string] ]): identifier[self] . identifier[error] ( literal[string] ) identifier[changed] =[] keyword[for] identifier[index] , identifier[person] keyword[in] identifier[enumerate] ( identifier[people] ): identifier[old] = identifier[client] [ literal[string] ][ identifier[index] ] identifier[person] . identifier[id] = identifier[old] . identifier[id] keyword[if] identifier[person] != identifier[old] : identifier[old] . identifier[sitting] = identifier[person] . identifier[sitting] identifier[changed] . identifier[append] ( identifier[old] ) keyword[if] identifier[changed] : keyword[try] : identifier[self] . identifier[changer] . identifier[on_person_update] ( identifier[changed] ) keyword[except] : identifier[self] . identifier[exception] ( literal[string] ) keyword[return] keyword[else] : identifier[self] . identifier[debug] ( literal[string] )
def _do_update_packet(self, packet, ip, port): """ React to update packet - people/person on a device have changed :param packet: Packet from client with changes :type packet: paps.si.app.message.APPUpdateMessage :param ip: Client ip address :type ip: unicode :param port: Client port :type port: int :rtype: None """ self.debug('()') device_id = packet.header.device_id if device_id <= Id.SERVER: self.error('ProtocolViolation: Invalid device id') return # depends on [control=['if'], data=[]] client = self._clients.get(device_id, None) if not client: self.error('ProtocolViolation: Client is not registered') return # depends on [control=['if'], data=[]] key = u'{}:{}'.format(ip, port) if client['key'] != key: self.error(u'ProtocolViolation: Client key ({}) has changed: {}'.format(client['key'], key)) return # depends on [control=['if'], data=['key']] # Packet info seems ok try: people = packet.people() # depends on [control=['try'], data=[]] except ProtocolViolation: self.exception('Failed to decode people from packet') return # depends on [control=['except'], data=[]] # Verify same number of people in update as registered to client # (APP specific) if len(people) != len(client['people']): self.error('ProtocolViolation: Incorrect number of people updated') # depends on [control=['if'], data=[]] changed = [] # Add ids to all people # Assumes same order here as on the client (e.g from the join()) for (index, person) in enumerate(people): old = client['people'][index] person.id = old.id if person != old: old.sitting = person.sitting # Maybe sent person to protect access to local saved state changed.append(old) # depends on [control=['if'], data=['person', 'old']] # depends on [control=['for'], data=[]] if changed: # Only update if there is really a change try: self.changer.on_person_update(changed) # depends on [control=['try'], data=[]] except: self.exception('Failed to notify people update') return # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: self.debug('No people updated')
def rank_alternation(edm_missing, rank, niter=50, print_out=False, edm_true=None): """Complete and denoise EDM using rank alternation. Iteratively impose rank and strucutre to complete marix entries :param edm_missing: EDM with 0 where no measurement was taken. :param rank: expected rank of complete EDM. :param niter: maximum number of iterations. :param edm: if given, the relative EDM error is tracked. :return: Completed matrix and array of errors (empty if no true edm is given). The matrix is of the correct structure, but might not have the right measured entries. """ from pylocus.basics import low_rank_approximation errs = [] N = edm_missing.shape[0] edm_complete = edm_missing.copy() edm_complete[edm_complete == 0] = np.mean(edm_complete[edm_complete > 0]) for i in range(niter): # impose matrix rank edm_complete = low_rank_approximation(edm_complete, rank) # impose known entries edm_complete[edm_missing > 0] = edm_missing[edm_missing > 0] # impose matrix structure edm_complete[range(N), range(N)] = 0.0 edm_complete[edm_complete < 0] = 0.0 edm_complete = 0.5 * (edm_complete + edm_complete.T) if edm_true is not None: err = np.linalg.norm(edm_complete - edm_true) errs.append(err) return edm_complete, errs
def function[rank_alternation, parameter[edm_missing, rank, niter, print_out, edm_true]]: constant[Complete and denoise EDM using rank alternation. Iteratively impose rank and strucutre to complete marix entries :param edm_missing: EDM with 0 where no measurement was taken. :param rank: expected rank of complete EDM. :param niter: maximum number of iterations. :param edm: if given, the relative EDM error is tracked. :return: Completed matrix and array of errors (empty if no true edm is given). The matrix is of the correct structure, but might not have the right measured entries. ] from relative_module[pylocus.basics] import module[low_rank_approximation] variable[errs] assign[=] list[[]] variable[N] assign[=] call[name[edm_missing].shape][constant[0]] variable[edm_complete] assign[=] call[name[edm_missing].copy, parameter[]] call[name[edm_complete]][compare[name[edm_complete] equal[==] constant[0]]] assign[=] call[name[np].mean, parameter[call[name[edm_complete]][compare[name[edm_complete] greater[>] constant[0]]]]] for taget[name[i]] in starred[call[name[range], parameter[name[niter]]]] begin[:] variable[edm_complete] assign[=] call[name[low_rank_approximation], parameter[name[edm_complete], name[rank]]] call[name[edm_complete]][compare[name[edm_missing] greater[>] constant[0]]] assign[=] call[name[edm_missing]][compare[name[edm_missing] greater[>] constant[0]]] call[name[edm_complete]][tuple[[<ast.Call object at 0x7da1b1a3c9d0>, <ast.Call object at 0x7da1b1a3ebc0>]]] assign[=] constant[0.0] call[name[edm_complete]][compare[name[edm_complete] less[<] constant[0]]] assign[=] constant[0.0] variable[edm_complete] assign[=] binary_operation[constant[0.5] * binary_operation[name[edm_complete] + name[edm_complete].T]] if compare[name[edm_true] is_not constant[None]] begin[:] variable[err] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[edm_complete] - name[edm_true]]]] call[name[errs].append, parameter[name[err]]] return[tuple[[<ast.Name object at 0x7da1b1a3d390>, <ast.Name object at 0x7da1b1a3cca0>]]]
keyword[def] identifier[rank_alternation] ( identifier[edm_missing] , identifier[rank] , identifier[niter] = literal[int] , identifier[print_out] = keyword[False] , identifier[edm_true] = keyword[None] ): literal[string] keyword[from] identifier[pylocus] . identifier[basics] keyword[import] identifier[low_rank_approximation] identifier[errs] =[] identifier[N] = identifier[edm_missing] . identifier[shape] [ literal[int] ] identifier[edm_complete] = identifier[edm_missing] . identifier[copy] () identifier[edm_complete] [ identifier[edm_complete] == literal[int] ]= identifier[np] . identifier[mean] ( identifier[edm_complete] [ identifier[edm_complete] > literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[niter] ): identifier[edm_complete] = identifier[low_rank_approximation] ( identifier[edm_complete] , identifier[rank] ) identifier[edm_complete] [ identifier[edm_missing] > literal[int] ]= identifier[edm_missing] [ identifier[edm_missing] > literal[int] ] identifier[edm_complete] [ identifier[range] ( identifier[N] ), identifier[range] ( identifier[N] )]= literal[int] identifier[edm_complete] [ identifier[edm_complete] < literal[int] ]= literal[int] identifier[edm_complete] = literal[int] *( identifier[edm_complete] + identifier[edm_complete] . identifier[T] ) keyword[if] identifier[edm_true] keyword[is] keyword[not] keyword[None] : identifier[err] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[edm_complete] - identifier[edm_true] ) identifier[errs] . identifier[append] ( identifier[err] ) keyword[return] identifier[edm_complete] , identifier[errs]
def rank_alternation(edm_missing, rank, niter=50, print_out=False, edm_true=None): """Complete and denoise EDM using rank alternation. Iteratively impose rank and strucutre to complete marix entries :param edm_missing: EDM with 0 where no measurement was taken. :param rank: expected rank of complete EDM. :param niter: maximum number of iterations. :param edm: if given, the relative EDM error is tracked. :return: Completed matrix and array of errors (empty if no true edm is given). The matrix is of the correct structure, but might not have the right measured entries. """ from pylocus.basics import low_rank_approximation errs = [] N = edm_missing.shape[0] edm_complete = edm_missing.copy() edm_complete[edm_complete == 0] = np.mean(edm_complete[edm_complete > 0]) for i in range(niter): # impose matrix rank edm_complete = low_rank_approximation(edm_complete, rank) # impose known entries edm_complete[edm_missing > 0] = edm_missing[edm_missing > 0] # impose matrix structure edm_complete[range(N), range(N)] = 0.0 edm_complete[edm_complete < 0] = 0.0 edm_complete = 0.5 * (edm_complete + edm_complete.T) if edm_true is not None: err = np.linalg.norm(edm_complete - edm_true) errs.append(err) # depends on [control=['if'], data=['edm_true']] # depends on [control=['for'], data=[]] return (edm_complete, errs)
def truncate(self, distance): """ Return a truncated version of the path. Only one vertex (at the endpoint) will be added. """ position = np.searchsorted(self._cum_norm, distance) offset = distance - self._cum_norm[position - 1] if offset < constants.tol_path.merge: truncated = self._points[:position + 1] else: vector = unitize(np.diff(self._points[np.arange(2) + position], axis=0).reshape(-1)) vector *= offset endpoint = self._points[position] + vector truncated = np.vstack((self._points[:position + 1], endpoint)) assert (np.linalg.norm(np.diff(truncated, axis=0), axis=1).sum() - distance) < constants.tol_path.merge return truncated
def function[truncate, parameter[self, distance]]: constant[ Return a truncated version of the path. Only one vertex (at the endpoint) will be added. ] variable[position] assign[=] call[name[np].searchsorted, parameter[name[self]._cum_norm, name[distance]]] variable[offset] assign[=] binary_operation[name[distance] - call[name[self]._cum_norm][binary_operation[name[position] - constant[1]]]] if compare[name[offset] less[<] name[constants].tol_path.merge] begin[:] variable[truncated] assign[=] call[name[self]._points][<ast.Slice object at 0x7da20c76f910>] assert[compare[binary_operation[call[call[name[np].linalg.norm, parameter[call[name[np].diff, parameter[name[truncated]]]]].sum, parameter[]] - name[distance]] less[<] name[constants].tol_path.merge]] return[name[truncated]]
keyword[def] identifier[truncate] ( identifier[self] , identifier[distance] ): literal[string] identifier[position] = identifier[np] . identifier[searchsorted] ( identifier[self] . identifier[_cum_norm] , identifier[distance] ) identifier[offset] = identifier[distance] - identifier[self] . identifier[_cum_norm] [ identifier[position] - literal[int] ] keyword[if] identifier[offset] < identifier[constants] . identifier[tol_path] . identifier[merge] : identifier[truncated] = identifier[self] . identifier[_points] [: identifier[position] + literal[int] ] keyword[else] : identifier[vector] = identifier[unitize] ( identifier[np] . identifier[diff] ( identifier[self] . identifier[_points] [ identifier[np] . identifier[arange] ( literal[int] )+ identifier[position] ], identifier[axis] = literal[int] ). identifier[reshape] (- literal[int] )) identifier[vector] *= identifier[offset] identifier[endpoint] = identifier[self] . identifier[_points] [ identifier[position] ]+ identifier[vector] identifier[truncated] = identifier[np] . identifier[vstack] (( identifier[self] . identifier[_points] [: identifier[position] + literal[int] ], identifier[endpoint] )) keyword[assert] ( identifier[np] . identifier[linalg] . identifier[norm] ( identifier[np] . identifier[diff] ( identifier[truncated] , identifier[axis] = literal[int] ), identifier[axis] = literal[int] ). identifier[sum] ()- identifier[distance] )< identifier[constants] . identifier[tol_path] . identifier[merge] keyword[return] identifier[truncated]
def truncate(self, distance): """ Return a truncated version of the path. Only one vertex (at the endpoint) will be added. """ position = np.searchsorted(self._cum_norm, distance) offset = distance - self._cum_norm[position - 1] if offset < constants.tol_path.merge: truncated = self._points[:position + 1] # depends on [control=['if'], data=[]] else: vector = unitize(np.diff(self._points[np.arange(2) + position], axis=0).reshape(-1)) vector *= offset endpoint = self._points[position] + vector truncated = np.vstack((self._points[:position + 1], endpoint)) assert np.linalg.norm(np.diff(truncated, axis=0), axis=1).sum() - distance < constants.tol_path.merge return truncated
def GetEntries(self, cut=None, weighted_cut=None, weighted=False): """ Get the number of (weighted) entries in the Tree Parameters ---------- cut : str or rootpy.tree.cut.Cut, optional (default=None) Only entries passing this cut will be included in the count weighted_cut : str or rootpy.tree.cut.Cut, optional (default=None) Apply a weighted selection and determine the weighted number of entries. weighted : bool, optional (default=False) Multiply the number of (weighted) entries by the Tree weight. """ if weighted_cut: hist = Hist(1, -1, 2) branch = self.GetListOfBranches()[0].GetName() weight = self.GetWeight() self.SetWeight(1) self.Draw('{0}=={1}>>{2}'.format(branch, branch, hist.GetName()), weighted_cut * cut) self.SetWeight(weight) entries = hist.Integral() elif cut: entries = super(BaseTree, self).GetEntries(str(cut)) else: entries = super(BaseTree, self).GetEntries() if weighted: entries *= self.GetWeight() return entries
def function[GetEntries, parameter[self, cut, weighted_cut, weighted]]: constant[ Get the number of (weighted) entries in the Tree Parameters ---------- cut : str or rootpy.tree.cut.Cut, optional (default=None) Only entries passing this cut will be included in the count weighted_cut : str or rootpy.tree.cut.Cut, optional (default=None) Apply a weighted selection and determine the weighted number of entries. weighted : bool, optional (default=False) Multiply the number of (weighted) entries by the Tree weight. ] if name[weighted_cut] begin[:] variable[hist] assign[=] call[name[Hist], parameter[constant[1], <ast.UnaryOp object at 0x7da1b11be6e0>, constant[2]]] variable[branch] assign[=] call[call[call[name[self].GetListOfBranches, parameter[]]][constant[0]].GetName, parameter[]] variable[weight] assign[=] call[name[self].GetWeight, parameter[]] call[name[self].SetWeight, parameter[constant[1]]] call[name[self].Draw, parameter[call[constant[{0}=={1}>>{2}].format, parameter[name[branch], name[branch], call[name[hist].GetName, parameter[]]]], binary_operation[name[weighted_cut] * name[cut]]]] call[name[self].SetWeight, parameter[name[weight]]] variable[entries] assign[=] call[name[hist].Integral, parameter[]] if name[weighted] begin[:] <ast.AugAssign object at 0x7da1b11bc130> return[name[entries]]
keyword[def] identifier[GetEntries] ( identifier[self] , identifier[cut] = keyword[None] , identifier[weighted_cut] = keyword[None] , identifier[weighted] = keyword[False] ): literal[string] keyword[if] identifier[weighted_cut] : identifier[hist] = identifier[Hist] ( literal[int] ,- literal[int] , literal[int] ) identifier[branch] = identifier[self] . identifier[GetListOfBranches] ()[ literal[int] ]. identifier[GetName] () identifier[weight] = identifier[self] . identifier[GetWeight] () identifier[self] . identifier[SetWeight] ( literal[int] ) identifier[self] . identifier[Draw] ( literal[string] . identifier[format] ( identifier[branch] , identifier[branch] , identifier[hist] . identifier[GetName] ()), identifier[weighted_cut] * identifier[cut] ) identifier[self] . identifier[SetWeight] ( identifier[weight] ) identifier[entries] = identifier[hist] . identifier[Integral] () keyword[elif] identifier[cut] : identifier[entries] = identifier[super] ( identifier[BaseTree] , identifier[self] ). identifier[GetEntries] ( identifier[str] ( identifier[cut] )) keyword[else] : identifier[entries] = identifier[super] ( identifier[BaseTree] , identifier[self] ). identifier[GetEntries] () keyword[if] identifier[weighted] : identifier[entries] *= identifier[self] . identifier[GetWeight] () keyword[return] identifier[entries]
def GetEntries(self, cut=None, weighted_cut=None, weighted=False): """ Get the number of (weighted) entries in the Tree Parameters ---------- cut : str or rootpy.tree.cut.Cut, optional (default=None) Only entries passing this cut will be included in the count weighted_cut : str or rootpy.tree.cut.Cut, optional (default=None) Apply a weighted selection and determine the weighted number of entries. weighted : bool, optional (default=False) Multiply the number of (weighted) entries by the Tree weight. """ if weighted_cut: hist = Hist(1, -1, 2) branch = self.GetListOfBranches()[0].GetName() weight = self.GetWeight() self.SetWeight(1) self.Draw('{0}=={1}>>{2}'.format(branch, branch, hist.GetName()), weighted_cut * cut) self.SetWeight(weight) entries = hist.Integral() # depends on [control=['if'], data=[]] elif cut: entries = super(BaseTree, self).GetEntries(str(cut)) # depends on [control=['if'], data=[]] else: entries = super(BaseTree, self).GetEntries() if weighted: entries *= self.GetWeight() # depends on [control=['if'], data=[]] return entries
def _id_map(minion_id, dns_name): ''' Maintain a relationship between a minion and a dns name ''' bank = 'digicert/minions' cache = salt.cache.Cache(__opts__, syspaths.CACHE_DIR) dns_names = cache.fetch(bank, minion_id) if not isinstance(dns_names, list): dns_names = [] if dns_name not in dns_names: dns_names.append(dns_name) cache.store(bank, minion_id, dns_names)
def function[_id_map, parameter[minion_id, dns_name]]: constant[ Maintain a relationship between a minion and a dns name ] variable[bank] assign[=] constant[digicert/minions] variable[cache] assign[=] call[name[salt].cache.Cache, parameter[name[__opts__], name[syspaths].CACHE_DIR]] variable[dns_names] assign[=] call[name[cache].fetch, parameter[name[bank], name[minion_id]]] if <ast.UnaryOp object at 0x7da18dc98a60> begin[:] variable[dns_names] assign[=] list[[]] if compare[name[dns_name] <ast.NotIn object at 0x7da2590d7190> name[dns_names]] begin[:] call[name[dns_names].append, parameter[name[dns_name]]] call[name[cache].store, parameter[name[bank], name[minion_id], name[dns_names]]]
keyword[def] identifier[_id_map] ( identifier[minion_id] , identifier[dns_name] ): literal[string] identifier[bank] = literal[string] identifier[cache] = identifier[salt] . identifier[cache] . identifier[Cache] ( identifier[__opts__] , identifier[syspaths] . identifier[CACHE_DIR] ) identifier[dns_names] = identifier[cache] . identifier[fetch] ( identifier[bank] , identifier[minion_id] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[dns_names] , identifier[list] ): identifier[dns_names] =[] keyword[if] identifier[dns_name] keyword[not] keyword[in] identifier[dns_names] : identifier[dns_names] . identifier[append] ( identifier[dns_name] ) identifier[cache] . identifier[store] ( identifier[bank] , identifier[minion_id] , identifier[dns_names] )
def _id_map(minion_id, dns_name): """ Maintain a relationship between a minion and a dns name """ bank = 'digicert/minions' cache = salt.cache.Cache(__opts__, syspaths.CACHE_DIR) dns_names = cache.fetch(bank, minion_id) if not isinstance(dns_names, list): dns_names = [] # depends on [control=['if'], data=[]] if dns_name not in dns_names: dns_names.append(dns_name) # depends on [control=['if'], data=['dns_name', 'dns_names']] cache.store(bank, minion_id, dns_names)
def _heat_deploy(self, stack, stack_name, template_path, parameters, environments, timeout): """Verify the Baremetal nodes are available and do a stack update""" self.log.debug("Processing environment files") env_files, env = ( template_utils.process_multiple_environments_and_files( environments)) self.log.debug("Getting template contents") template_files, template = template_utils.get_template_contents( template_path) files = dict(list(template_files.items()) + list(env_files.items())) clients = self.app.client_manager orchestration_client = clients.rdomanager_oscplugin.orchestration() self.log.debug("Deploying stack: %s", stack_name) self.log.debug("Deploying template: %s", template) self.log.debug("Deploying parameters: %s", parameters) self.log.debug("Deploying environment: %s", env) self.log.debug("Deploying files: %s", files) stack_args = { 'stack_name': stack_name, 'template': template, 'parameters': parameters, 'environment': env, 'files': files } if timeout: stack_args['timeout_mins'] = timeout if stack is None: self.log.info("Performing Heat stack create") orchestration_client.stacks.create(**stack_args) else: self.log.info("Performing Heat stack update") # Make sure existing parameters for stack are reused stack_args['existing'] = 'true' orchestration_client.stacks.update(stack.id, **stack_args) create_result = utils.wait_for_stack_ready( orchestration_client, stack_name) if not create_result: if stack is None: raise Exception("Heat Stack create failed.") else: raise Exception("Heat Stack update failed.")
def function[_heat_deploy, parameter[self, stack, stack_name, template_path, parameters, environments, timeout]]: constant[Verify the Baremetal nodes are available and do a stack update] call[name[self].log.debug, parameter[constant[Processing environment files]]] <ast.Tuple object at 0x7da18bcc9c30> assign[=] call[name[template_utils].process_multiple_environments_and_files, parameter[name[environments]]] call[name[self].log.debug, parameter[constant[Getting template contents]]] <ast.Tuple object at 0x7da18bcca4a0> assign[=] call[name[template_utils].get_template_contents, parameter[name[template_path]]] variable[files] assign[=] call[name[dict], parameter[binary_operation[call[name[list], parameter[call[name[template_files].items, parameter[]]]] + call[name[list], parameter[call[name[env_files].items, parameter[]]]]]]] variable[clients] assign[=] name[self].app.client_manager variable[orchestration_client] assign[=] call[name[clients].rdomanager_oscplugin.orchestration, parameter[]] call[name[self].log.debug, parameter[constant[Deploying stack: %s], name[stack_name]]] call[name[self].log.debug, parameter[constant[Deploying template: %s], name[template]]] call[name[self].log.debug, parameter[constant[Deploying parameters: %s], name[parameters]]] call[name[self].log.debug, parameter[constant[Deploying environment: %s], name[env]]] call[name[self].log.debug, parameter[constant[Deploying files: %s], name[files]]] variable[stack_args] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8190>, <ast.Constant object at 0x7da18bcc9480>, <ast.Constant object at 0x7da18bcca740>, <ast.Constant object at 0x7da18bcc9660>, <ast.Constant object at 0x7da18bccaec0>], [<ast.Name object at 0x7da18bcc98a0>, <ast.Name object at 0x7da18bcc9930>, <ast.Name object at 0x7da18bcc9f60>, <ast.Name object at 0x7da18bcc9d20>, <ast.Name object at 0x7da18bcca380>]] if name[timeout] begin[:] call[name[stack_args]][constant[timeout_mins]] assign[=] name[timeout] if compare[name[stack] is constant[None]] begin[:] call[name[self].log.info, parameter[constant[Performing Heat stack create]]] call[name[orchestration_client].stacks.create, parameter[]] variable[create_result] assign[=] call[name[utils].wait_for_stack_ready, parameter[name[orchestration_client], name[stack_name]]] if <ast.UnaryOp object at 0x7da18bcca080> begin[:] if compare[name[stack] is constant[None]] begin[:] <ast.Raise object at 0x7da18bcc8af0>
keyword[def] identifier[_heat_deploy] ( identifier[self] , identifier[stack] , identifier[stack_name] , identifier[template_path] , identifier[parameters] , identifier[environments] , identifier[timeout] ): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[env_files] , identifier[env] =( identifier[template_utils] . identifier[process_multiple_environments_and_files] ( identifier[environments] )) identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[template_files] , identifier[template] = identifier[template_utils] . identifier[get_template_contents] ( identifier[template_path] ) identifier[files] = identifier[dict] ( identifier[list] ( identifier[template_files] . identifier[items] ())+ identifier[list] ( identifier[env_files] . identifier[items] ())) identifier[clients] = identifier[self] . identifier[app] . identifier[client_manager] identifier[orchestration_client] = identifier[clients] . identifier[rdomanager_oscplugin] . identifier[orchestration] () identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[stack_name] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[template] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[parameters] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[env] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[files] ) identifier[stack_args] ={ literal[string] : identifier[stack_name] , literal[string] : identifier[template] , literal[string] : identifier[parameters] , literal[string] : identifier[env] , literal[string] : identifier[files] } keyword[if] identifier[timeout] : identifier[stack_args] [ literal[string] ]= identifier[timeout] keyword[if] identifier[stack] keyword[is] keyword[None] : identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[orchestration_client] . identifier[stacks] . identifier[create] (** identifier[stack_args] ) keyword[else] : identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[stack_args] [ literal[string] ]= literal[string] identifier[orchestration_client] . identifier[stacks] . identifier[update] ( identifier[stack] . identifier[id] ,** identifier[stack_args] ) identifier[create_result] = identifier[utils] . identifier[wait_for_stack_ready] ( identifier[orchestration_client] , identifier[stack_name] ) keyword[if] keyword[not] identifier[create_result] : keyword[if] identifier[stack] keyword[is] keyword[None] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def _heat_deploy(self, stack, stack_name, template_path, parameters, environments, timeout): """Verify the Baremetal nodes are available and do a stack update""" self.log.debug('Processing environment files') (env_files, env) = template_utils.process_multiple_environments_and_files(environments) self.log.debug('Getting template contents') (template_files, template) = template_utils.get_template_contents(template_path) files = dict(list(template_files.items()) + list(env_files.items())) clients = self.app.client_manager orchestration_client = clients.rdomanager_oscplugin.orchestration() self.log.debug('Deploying stack: %s', stack_name) self.log.debug('Deploying template: %s', template) self.log.debug('Deploying parameters: %s', parameters) self.log.debug('Deploying environment: %s', env) self.log.debug('Deploying files: %s', files) stack_args = {'stack_name': stack_name, 'template': template, 'parameters': parameters, 'environment': env, 'files': files} if timeout: stack_args['timeout_mins'] = timeout # depends on [control=['if'], data=[]] if stack is None: self.log.info('Performing Heat stack create') orchestration_client.stacks.create(**stack_args) # depends on [control=['if'], data=[]] else: self.log.info('Performing Heat stack update') # Make sure existing parameters for stack are reused stack_args['existing'] = 'true' orchestration_client.stacks.update(stack.id, **stack_args) create_result = utils.wait_for_stack_ready(orchestration_client, stack_name) if not create_result: if stack is None: raise Exception('Heat Stack create failed.') # depends on [control=['if'], data=[]] else: raise Exception('Heat Stack update failed.') # depends on [control=['if'], data=[]]
def update(self, search=None, **kwargs): """Updates the server with any changes you've made to the current saved search along with any additional arguments you specify. :param `search`: The search query (optional). :type search: ``string`` :param `kwargs`: Additional arguments (optional). For a list of available parameters, see `Saved search parameters <http://dev.splunk.com/view/SP-CAAAEE5#savedsearchparams>`_ on Splunk Developer Portal. :type kwargs: ``dict`` :return: The :class:`SavedSearch`. """ # Updates to a saved search *require* that the search string be # passed, so we pass the current search string if a value wasn't # provided by the caller. if search is None: search = self.content.search Entity.update(self, search=search, **kwargs) return self
def function[update, parameter[self, search]]: constant[Updates the server with any changes you've made to the current saved search along with any additional arguments you specify. :param `search`: The search query (optional). :type search: ``string`` :param `kwargs`: Additional arguments (optional). For a list of available parameters, see `Saved search parameters <http://dev.splunk.com/view/SP-CAAAEE5#savedsearchparams>`_ on Splunk Developer Portal. :type kwargs: ``dict`` :return: The :class:`SavedSearch`. ] if compare[name[search] is constant[None]] begin[:] variable[search] assign[=] name[self].content.search call[name[Entity].update, parameter[name[self]]] return[name[self]]
keyword[def] identifier[update] ( identifier[self] , identifier[search] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[search] keyword[is] keyword[None] : identifier[search] = identifier[self] . identifier[content] . identifier[search] identifier[Entity] . identifier[update] ( identifier[self] , identifier[search] = identifier[search] ,** identifier[kwargs] ) keyword[return] identifier[self]
def update(self, search=None, **kwargs): """Updates the server with any changes you've made to the current saved search along with any additional arguments you specify. :param `search`: The search query (optional). :type search: ``string`` :param `kwargs`: Additional arguments (optional). For a list of available parameters, see `Saved search parameters <http://dev.splunk.com/view/SP-CAAAEE5#savedsearchparams>`_ on Splunk Developer Portal. :type kwargs: ``dict`` :return: The :class:`SavedSearch`. """ # Updates to a saved search *require* that the search string be # passed, so we pass the current search string if a value wasn't # provided by the caller. if search is None: search = self.content.search # depends on [control=['if'], data=['search']] Entity.update(self, search=search, **kwargs) return self
def matches(self, txt: str) -> bool: """Determine whether txt matches pattern :param txt: text to check :return: True if match """ # rval = ref.getText()[1:-1].encode('utf-8').decode('unicode-escape') if r'\\u' in self.pattern_re.pattern: txt = txt.encode('utf-8').decode('unicode-escape') match = self.pattern_re.match(txt) return match is not None and match.end() == len(txt)
def function[matches, parameter[self, txt]]: constant[Determine whether txt matches pattern :param txt: text to check :return: True if match ] if compare[constant[\\u] in name[self].pattern_re.pattern] begin[:] variable[txt] assign[=] call[call[name[txt].encode, parameter[constant[utf-8]]].decode, parameter[constant[unicode-escape]]] variable[match] assign[=] call[name[self].pattern_re.match, parameter[name[txt]]] return[<ast.BoolOp object at 0x7da204345270>]
keyword[def] identifier[matches] ( identifier[self] , identifier[txt] : identifier[str] )-> identifier[bool] : literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[pattern_re] . identifier[pattern] : identifier[txt] = identifier[txt] . identifier[encode] ( literal[string] ). identifier[decode] ( literal[string] ) identifier[match] = identifier[self] . identifier[pattern_re] . identifier[match] ( identifier[txt] ) keyword[return] identifier[match] keyword[is] keyword[not] keyword[None] keyword[and] identifier[match] . identifier[end] ()== identifier[len] ( identifier[txt] )
def matches(self, txt: str) -> bool: """Determine whether txt matches pattern :param txt: text to check :return: True if match """ # rval = ref.getText()[1:-1].encode('utf-8').decode('unicode-escape') if '\\\\u' in self.pattern_re.pattern: txt = txt.encode('utf-8').decode('unicode-escape') # depends on [control=['if'], data=[]] match = self.pattern_re.match(txt) return match is not None and match.end() == len(txt)
def untlpy2highwirepy(untl_elements, **kwargs): """Convert a UNTL Python object to a highwire Python object.""" highwire_list = [] title = None publisher = None creation = None escape = kwargs.get('escape', False) for element in untl_elements.children: # If the UNTL element should be converted to highwire, # create highwire element. if element.tag in HIGHWIRE_CONVERSION_DISPATCH: highwire_element = HIGHWIRE_CONVERSION_DISPATCH[element.tag]( qualifier=element.qualifier, content=element.content, children=element.children, escape=escape, ) if highwire_element: if element.tag == 'title': if element.qualifier != 'officialtitle' and not title: title = highwire_element elif element.qualifier == 'officialtitle': title = highwire_element elif element.tag == 'publisher': if not publisher: # This is the first publisher element. publisher = highwire_element highwire_list.append(publisher) elif element.tag == 'date': # If a creation date hasn't been found yet, # verify this date is acceptable. if not creation and element.qualifier == 'creation': if highwire_element.content: creation = highwire_element if creation: highwire_list.append(creation) # Otherwise, add the element to the list if it has content. elif highwire_element.content: highwire_list.append(highwire_element) # If the title was found, add it to the list. if title: highwire_list.append(title) return highwire_list
def function[untlpy2highwirepy, parameter[untl_elements]]: constant[Convert a UNTL Python object to a highwire Python object.] variable[highwire_list] assign[=] list[[]] variable[title] assign[=] constant[None] variable[publisher] assign[=] constant[None] variable[creation] assign[=] constant[None] variable[escape] assign[=] call[name[kwargs].get, parameter[constant[escape], constant[False]]] for taget[name[element]] in starred[name[untl_elements].children] begin[:] if compare[name[element].tag in name[HIGHWIRE_CONVERSION_DISPATCH]] begin[:] variable[highwire_element] assign[=] call[call[name[HIGHWIRE_CONVERSION_DISPATCH]][name[element].tag], parameter[]] if name[highwire_element] begin[:] if compare[name[element].tag equal[==] constant[title]] begin[:] if <ast.BoolOp object at 0x7da1b225dc90> begin[:] variable[title] assign[=] name[highwire_element] if name[title] begin[:] call[name[highwire_list].append, parameter[name[title]]] return[name[highwire_list]]
keyword[def] identifier[untlpy2highwirepy] ( identifier[untl_elements] ,** identifier[kwargs] ): literal[string] identifier[highwire_list] =[] identifier[title] = keyword[None] identifier[publisher] = keyword[None] identifier[creation] = keyword[None] identifier[escape] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ) keyword[for] identifier[element] keyword[in] identifier[untl_elements] . identifier[children] : keyword[if] identifier[element] . identifier[tag] keyword[in] identifier[HIGHWIRE_CONVERSION_DISPATCH] : identifier[highwire_element] = identifier[HIGHWIRE_CONVERSION_DISPATCH] [ identifier[element] . identifier[tag] ]( identifier[qualifier] = identifier[element] . identifier[qualifier] , identifier[content] = identifier[element] . identifier[content] , identifier[children] = identifier[element] . identifier[children] , identifier[escape] = identifier[escape] , ) keyword[if] identifier[highwire_element] : keyword[if] identifier[element] . identifier[tag] == literal[string] : keyword[if] identifier[element] . identifier[qualifier] != literal[string] keyword[and] keyword[not] identifier[title] : identifier[title] = identifier[highwire_element] keyword[elif] identifier[element] . identifier[qualifier] == literal[string] : identifier[title] = identifier[highwire_element] keyword[elif] identifier[element] . identifier[tag] == literal[string] : keyword[if] keyword[not] identifier[publisher] : identifier[publisher] = identifier[highwire_element] identifier[highwire_list] . identifier[append] ( identifier[publisher] ) keyword[elif] identifier[element] . identifier[tag] == literal[string] : keyword[if] keyword[not] identifier[creation] keyword[and] identifier[element] . identifier[qualifier] == literal[string] : keyword[if] identifier[highwire_element] . identifier[content] : identifier[creation] = identifier[highwire_element] keyword[if] identifier[creation] : identifier[highwire_list] . identifier[append] ( identifier[creation] ) keyword[elif] identifier[highwire_element] . identifier[content] : identifier[highwire_list] . identifier[append] ( identifier[highwire_element] ) keyword[if] identifier[title] : identifier[highwire_list] . identifier[append] ( identifier[title] ) keyword[return] identifier[highwire_list]
def untlpy2highwirepy(untl_elements, **kwargs): """Convert a UNTL Python object to a highwire Python object.""" highwire_list = [] title = None publisher = None creation = None escape = kwargs.get('escape', False) for element in untl_elements.children: # If the UNTL element should be converted to highwire, # create highwire element. if element.tag in HIGHWIRE_CONVERSION_DISPATCH: highwire_element = HIGHWIRE_CONVERSION_DISPATCH[element.tag](qualifier=element.qualifier, content=element.content, children=element.children, escape=escape) if highwire_element: if element.tag == 'title': if element.qualifier != 'officialtitle' and (not title): title = highwire_element # depends on [control=['if'], data=[]] elif element.qualifier == 'officialtitle': title = highwire_element # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif element.tag == 'publisher': if not publisher: # This is the first publisher element. publisher = highwire_element highwire_list.append(publisher) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif element.tag == 'date': # If a creation date hasn't been found yet, # verify this date is acceptable. if not creation and element.qualifier == 'creation': if highwire_element.content: creation = highwire_element if creation: highwire_list.append(creation) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Otherwise, add the element to the list if it has content. elif highwire_element.content: highwire_list.append(highwire_element) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['HIGHWIRE_CONVERSION_DISPATCH']] # depends on [control=['for'], data=['element']] # If the title was found, add it to the list. if title: highwire_list.append(title) # depends on [control=['if'], data=[]] return highwire_list
def render(self, template, **data): """Renders the template using Jinja2 with given data arguments. """ if(type(template) != str): raise TypeError("String expected") env = Environment( loader=FileSystemLoader(os.getcwd() + '/View'), autoescape=select_autoescape() ) template = env.get_template(template) return self.finish(template.render(data))
def function[render, parameter[self, template]]: constant[Renders the template using Jinja2 with given data arguments. ] if compare[call[name[type], parameter[name[template]]] not_equal[!=] name[str]] begin[:] <ast.Raise object at 0x7da2054a4670> variable[env] assign[=] call[name[Environment], parameter[]] variable[template] assign[=] call[name[env].get_template, parameter[name[template]]] return[call[name[self].finish, parameter[call[name[template].render, parameter[name[data]]]]]]
keyword[def] identifier[render] ( identifier[self] , identifier[template] ,** identifier[data] ): literal[string] keyword[if] ( identifier[type] ( identifier[template] )!= identifier[str] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[env] = identifier[Environment] ( identifier[loader] = identifier[FileSystemLoader] ( identifier[os] . identifier[getcwd] ()+ literal[string] ), identifier[autoescape] = identifier[select_autoescape] () ) identifier[template] = identifier[env] . identifier[get_template] ( identifier[template] ) keyword[return] identifier[self] . identifier[finish] ( identifier[template] . identifier[render] ( identifier[data] ))
def render(self, template, **data): """Renders the template using Jinja2 with given data arguments. """ if type(template) != str: raise TypeError('String expected') # depends on [control=['if'], data=[]] env = Environment(loader=FileSystemLoader(os.getcwd() + '/View'), autoescape=select_autoescape()) template = env.get_template(template) return self.finish(template.render(data))
async def unsubscribe(self, topic): """ Unsubscribe the socket from the specified topic. :param topic: The topic to unsubscribe from. """ if self.socket_type not in {SUB, XSUB}: raise AssertionError( "A %s socket cannot unsubscribe." % self.socket_type.decode(), ) # Do this **BEFORE** awaiting so that new connections created during # the execution below honor the setting. self._subscriptions.remove(topic) tasks = [ asyncio.ensure_future( peer.connection.local_unsubscribe(topic), loop=self.loop, ) for peer in self._peers if peer.connection ] if tasks: try: await asyncio.wait(tasks, loop=self.loop) finally: for task in tasks: task.cancel()
<ast.AsyncFunctionDef object at 0x7da1b0aec0d0>
keyword[async] keyword[def] identifier[unsubscribe] ( identifier[self] , identifier[topic] ): literal[string] keyword[if] identifier[self] . identifier[socket_type] keyword[not] keyword[in] { identifier[SUB] , identifier[XSUB] }: keyword[raise] identifier[AssertionError] ( literal[string] % identifier[self] . identifier[socket_type] . identifier[decode] (), ) identifier[self] . identifier[_subscriptions] . identifier[remove] ( identifier[topic] ) identifier[tasks] =[ identifier[asyncio] . identifier[ensure_future] ( identifier[peer] . identifier[connection] . identifier[local_unsubscribe] ( identifier[topic] ), identifier[loop] = identifier[self] . identifier[loop] , ) keyword[for] identifier[peer] keyword[in] identifier[self] . identifier[_peers] keyword[if] identifier[peer] . identifier[connection] ] keyword[if] identifier[tasks] : keyword[try] : keyword[await] identifier[asyncio] . identifier[wait] ( identifier[tasks] , identifier[loop] = identifier[self] . identifier[loop] ) keyword[finally] : keyword[for] identifier[task] keyword[in] identifier[tasks] : identifier[task] . identifier[cancel] ()
async def unsubscribe(self, topic): """ Unsubscribe the socket from the specified topic. :param topic: The topic to unsubscribe from. """ if self.socket_type not in {SUB, XSUB}: raise AssertionError('A %s socket cannot unsubscribe.' % self.socket_type.decode()) # depends on [control=['if'], data=[]] # Do this **BEFORE** awaiting so that new connections created during # the execution below honor the setting. self._subscriptions.remove(topic) tasks = [asyncio.ensure_future(peer.connection.local_unsubscribe(topic), loop=self.loop) for peer in self._peers if peer.connection] if tasks: try: await asyncio.wait(tasks, loop=self.loop) # depends on [control=['try'], data=[]] finally: for task in tasks: task.cancel() # depends on [control=['for'], data=['task']] # depends on [control=['if'], data=[]]
def mount(self, source, target, options=[]): """ Mount partion on target :param source: Full partition path like /dev/sda1 :param target: Mount point :param options: Optional mount options """ if len(options) == 0: options = [''] args = { 'options': ','.join(options), 'source': source, 'target': target, } self._mount_chk.check(args) response = self._client.raw('disk.mount', args) result = response.get() if result.state != 'SUCCESS': raise RuntimeError('failed to mount partition: %s' % result.stderr)
def function[mount, parameter[self, source, target, options]]: constant[ Mount partion on target :param source: Full partition path like /dev/sda1 :param target: Mount point :param options: Optional mount options ] if compare[call[name[len], parameter[name[options]]] equal[==] constant[0]] begin[:] variable[options] assign[=] list[[<ast.Constant object at 0x7da1b04d9390>]] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b04d97e0>, <ast.Constant object at 0x7da1b04da980>, <ast.Constant object at 0x7da1b04db040>], [<ast.Call object at 0x7da1b04d9cf0>, <ast.Name object at 0x7da1b04d9990>, <ast.Name object at 0x7da1b04dadd0>]] call[name[self]._mount_chk.check, parameter[name[args]]] variable[response] assign[=] call[name[self]._client.raw, parameter[constant[disk.mount], name[args]]] variable[result] assign[=] call[name[response].get, parameter[]] if compare[name[result].state not_equal[!=] constant[SUCCESS]] begin[:] <ast.Raise object at 0x7da1b04d8670>
keyword[def] identifier[mount] ( identifier[self] , identifier[source] , identifier[target] , identifier[options] =[]): literal[string] keyword[if] identifier[len] ( identifier[options] )== literal[int] : identifier[options] =[ literal[string] ] identifier[args] ={ literal[string] : literal[string] . identifier[join] ( identifier[options] ), literal[string] : identifier[source] , literal[string] : identifier[target] , } identifier[self] . identifier[_mount_chk] . identifier[check] ( identifier[args] ) identifier[response] = identifier[self] . identifier[_client] . identifier[raw] ( literal[string] , identifier[args] ) identifier[result] = identifier[response] . identifier[get] () keyword[if] identifier[result] . identifier[state] != literal[string] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[result] . identifier[stderr] )
def mount(self, source, target, options=[]): """ Mount partion on target :param source: Full partition path like /dev/sda1 :param target: Mount point :param options: Optional mount options """ if len(options) == 0: options = [''] # depends on [control=['if'], data=[]] args = {'options': ','.join(options), 'source': source, 'target': target} self._mount_chk.check(args) response = self._client.raw('disk.mount', args) result = response.get() if result.state != 'SUCCESS': raise RuntimeError('failed to mount partition: %s' % result.stderr) # depends on [control=['if'], data=[]]
def auth(self, user, pwd): """ Obtain connection parameters from the Microsoft account login page, and perform a login with the given email address or Skype username, and its password. This emulates a login to Skype for Web on ``login.live.com``. .. note:: Microsoft accounts with two-factor authentication enabled are not supported, and will cause a :class:`.SkypeAuthException` to be raised. See the exception definitions for other possible causes. Args: user (str): username or email address of the connecting account pwd (str): password of the connecting account Returns: (str, datetime.datetime) tuple: Skype token, and associated expiry if known Raises: .SkypeAuthException: if the login request is rejected .SkypeApiException: if the login form can't be processed """ # Do the authentication dance. params = self.getParams() t = self.sendCreds(user, pwd, params) return self.getToken(t)
def function[auth, parameter[self, user, pwd]]: constant[ Obtain connection parameters from the Microsoft account login page, and perform a login with the given email address or Skype username, and its password. This emulates a login to Skype for Web on ``login.live.com``. .. note:: Microsoft accounts with two-factor authentication enabled are not supported, and will cause a :class:`.SkypeAuthException` to be raised. See the exception definitions for other possible causes. Args: user (str): username or email address of the connecting account pwd (str): password of the connecting account Returns: (str, datetime.datetime) tuple: Skype token, and associated expiry if known Raises: .SkypeAuthException: if the login request is rejected .SkypeApiException: if the login form can't be processed ] variable[params] assign[=] call[name[self].getParams, parameter[]] variable[t] assign[=] call[name[self].sendCreds, parameter[name[user], name[pwd], name[params]]] return[call[name[self].getToken, parameter[name[t]]]]
keyword[def] identifier[auth] ( identifier[self] , identifier[user] , identifier[pwd] ): literal[string] identifier[params] = identifier[self] . identifier[getParams] () identifier[t] = identifier[self] . identifier[sendCreds] ( identifier[user] , identifier[pwd] , identifier[params] ) keyword[return] identifier[self] . identifier[getToken] ( identifier[t] )
def auth(self, user, pwd): """ Obtain connection parameters from the Microsoft account login page, and perform a login with the given email address or Skype username, and its password. This emulates a login to Skype for Web on ``login.live.com``. .. note:: Microsoft accounts with two-factor authentication enabled are not supported, and will cause a :class:`.SkypeAuthException` to be raised. See the exception definitions for other possible causes. Args: user (str): username or email address of the connecting account pwd (str): password of the connecting account Returns: (str, datetime.datetime) tuple: Skype token, and associated expiry if known Raises: .SkypeAuthException: if the login request is rejected .SkypeApiException: if the login form can't be processed """ # Do the authentication dance. params = self.getParams() t = self.sendCreds(user, pwd, params) return self.getToken(t)
def set_default_headers(self, *args, **kwargs): """Set the default headers for all requests.""" self.set_header('Access-Control-Allow-Origin', '*') self.set_header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept') self.set_header('Access-Control-Allow-Methods', 'GET, HEAD, PUT, POST, DELETE')
def function[set_default_headers, parameter[self]]: constant[Set the default headers for all requests.] call[name[self].set_header, parameter[constant[Access-Control-Allow-Origin], constant[*]]] call[name[self].set_header, parameter[constant[Access-Control-Allow-Headers], constant[Origin, X-Requested-With, Content-Type, Accept]]] call[name[self].set_header, parameter[constant[Access-Control-Allow-Methods], constant[GET, HEAD, PUT, POST, DELETE]]]
keyword[def] identifier[set_default_headers] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[set_header] ( literal[string] , literal[string] ) identifier[self] . identifier[set_header] ( literal[string] , literal[string] ) identifier[self] . identifier[set_header] ( literal[string] , literal[string] )
def set_default_headers(self, *args, **kwargs): """Set the default headers for all requests.""" self.set_header('Access-Control-Allow-Origin', '*') self.set_header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept') self.set_header('Access-Control-Allow-Methods', 'GET, HEAD, PUT, POST, DELETE')
def launch(self, monitor=False, wait=False, timeout=None, **kwargs): """Launch a new ad-hoc command. Runs a user-defined command from Ansible Tower, immediately starts it, and returns back an ID in order for its status to be monitored. =====API DOCS===== Launch a new ad-hoc command. :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched command rather than exiting with a success. :type monitor: bool :param wait: Flag that if set, monitor the status of the job, but do not print while job is in progress. :type wait: bool :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number of seconds. :type timeout: int :param `**kwargs`: Fields needed to create and launch an ad hoc command. :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait`` call if ``wait`` flag is on; dictionary of "id" and "changed" if none of the two flags are on. :rtype: dict :raises tower_cli.exceptions.TowerCLIError: When ad hoc commands are not available in Tower backend. =====API DOCS===== """ # This feature only exists for versions 2.2 and up r = client.get('/') if 'ad_hoc_commands' not in r.json(): raise exc.TowerCLIError('Your host is running an outdated version' 'of Ansible Tower that can not run ' 'ad-hoc commands (2.2 or earlier)') # Pop the None arguments because we have no .write() method in # inheritance chain for this type of resource. This is needed self._pop_none(kwargs) # Actually start the command. debug.log('Launching the ad-hoc command.', header='details') result = client.post(self.endpoint, data=kwargs) command = result.json() command_id = command['id'] # If we were told to monitor the command once it started, then call # monitor from here. if monitor: return self.monitor(command_id, timeout=timeout) elif wait: return self.wait(command_id, timeout=timeout) # Return the command ID and other response data answer = OrderedDict(( ('changed', True), ('id', command_id), )) answer.update(result.json()) return answer
def function[launch, parameter[self, monitor, wait, timeout]]: constant[Launch a new ad-hoc command. Runs a user-defined command from Ansible Tower, immediately starts it, and returns back an ID in order for its status to be monitored. =====API DOCS===== Launch a new ad-hoc command. :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched command rather than exiting with a success. :type monitor: bool :param wait: Flag that if set, monitor the status of the job, but do not print while job is in progress. :type wait: bool :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number of seconds. :type timeout: int :param `**kwargs`: Fields needed to create and launch an ad hoc command. :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait`` call if ``wait`` flag is on; dictionary of "id" and "changed" if none of the two flags are on. :rtype: dict :raises tower_cli.exceptions.TowerCLIError: When ad hoc commands are not available in Tower backend. =====API DOCS===== ] variable[r] assign[=] call[name[client].get, parameter[constant[/]]] if compare[constant[ad_hoc_commands] <ast.NotIn object at 0x7da2590d7190> call[name[r].json, parameter[]]] begin[:] <ast.Raise object at 0x7da2046238e0> call[name[self]._pop_none, parameter[name[kwargs]]] call[name[debug].log, parameter[constant[Launching the ad-hoc command.]]] variable[result] assign[=] call[name[client].post, parameter[name[self].endpoint]] variable[command] assign[=] call[name[result].json, parameter[]] variable[command_id] assign[=] call[name[command]][constant[id]] if name[monitor] begin[:] return[call[name[self].monitor, parameter[name[command_id]]]] variable[answer] assign[=] call[name[OrderedDict], parameter[tuple[[<ast.Tuple object at 0x7da204622c80>, <ast.Tuple object at 0x7da2046225c0>]]]] call[name[answer].update, parameter[call[name[result].json, parameter[]]]] return[name[answer]]
keyword[def] identifier[launch] ( identifier[self] , identifier[monitor] = keyword[False] , identifier[wait] = keyword[False] , identifier[timeout] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[r] = identifier[client] . identifier[get] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[r] . identifier[json] (): keyword[raise] identifier[exc] . identifier[TowerCLIError] ( literal[string] literal[string] literal[string] ) identifier[self] . identifier[_pop_none] ( identifier[kwargs] ) identifier[debug] . identifier[log] ( literal[string] , identifier[header] = literal[string] ) identifier[result] = identifier[client] . identifier[post] ( identifier[self] . identifier[endpoint] , identifier[data] = identifier[kwargs] ) identifier[command] = identifier[result] . identifier[json] () identifier[command_id] = identifier[command] [ literal[string] ] keyword[if] identifier[monitor] : keyword[return] identifier[self] . identifier[monitor] ( identifier[command_id] , identifier[timeout] = identifier[timeout] ) keyword[elif] identifier[wait] : keyword[return] identifier[self] . identifier[wait] ( identifier[command_id] , identifier[timeout] = identifier[timeout] ) identifier[answer] = identifier[OrderedDict] (( ( literal[string] , keyword[True] ), ( literal[string] , identifier[command_id] ), )) identifier[answer] . identifier[update] ( identifier[result] . identifier[json] ()) keyword[return] identifier[answer]
def launch(self, monitor=False, wait=False, timeout=None, **kwargs): """Launch a new ad-hoc command. Runs a user-defined command from Ansible Tower, immediately starts it, and returns back an ID in order for its status to be monitored. =====API DOCS===== Launch a new ad-hoc command. :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched command rather than exiting with a success. :type monitor: bool :param wait: Flag that if set, monitor the status of the job, but do not print while job is in progress. :type wait: bool :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number of seconds. :type timeout: int :param `**kwargs`: Fields needed to create and launch an ad hoc command. :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait`` call if ``wait`` flag is on; dictionary of "id" and "changed" if none of the two flags are on. :rtype: dict :raises tower_cli.exceptions.TowerCLIError: When ad hoc commands are not available in Tower backend. =====API DOCS===== """ # This feature only exists for versions 2.2 and up r = client.get('/') if 'ad_hoc_commands' not in r.json(): raise exc.TowerCLIError('Your host is running an outdated versionof Ansible Tower that can not run ad-hoc commands (2.2 or earlier)') # depends on [control=['if'], data=[]] # Pop the None arguments because we have no .write() method in # inheritance chain for this type of resource. This is needed self._pop_none(kwargs) # Actually start the command. debug.log('Launching the ad-hoc command.', header='details') result = client.post(self.endpoint, data=kwargs) command = result.json() command_id = command['id'] # If we were told to monitor the command once it started, then call # monitor from here. if monitor: return self.monitor(command_id, timeout=timeout) # depends on [control=['if'], data=[]] elif wait: return self.wait(command_id, timeout=timeout) # depends on [control=['if'], data=[]] # Return the command ID and other response data answer = OrderedDict((('changed', True), ('id', command_id))) answer.update(result.json()) return answer
def _parse_line_vars(self, line): """ Parse a line in a [XXXXX:vars] section. """ key_values = {} # Undocumented feature allows json in vars sections like so: # [prod:vars] # json_like_vars=[{'name': 'htpasswd_auth'}] # We'll try this first. If it fails, we'll fall back to normal var # lines. Since it's undocumented, we just assume some things. k, v = line.strip().split('=', 1) if v.startswith('['): try: list_res = ihateyaml.safe_load(v) if isinstance(list_res[0], dict): key_values = list_res[0] return key_values except ValueError: pass # Guess it's not YAML. Parse as normal host variables tokens = shlex.split(line.strip()) key_values = self._parse_vars(tokens) return key_values
def function[_parse_line_vars, parameter[self, line]]: constant[ Parse a line in a [XXXXX:vars] section. ] variable[key_values] assign[=] dictionary[[], []] <ast.Tuple object at 0x7da1b1d17f70> assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[=], constant[1]]] if call[name[v].startswith, parameter[constant[[]]] begin[:] <ast.Try object at 0x7da1b2295f00> variable[tokens] assign[=] call[name[shlex].split, parameter[call[name[line].strip, parameter[]]]] variable[key_values] assign[=] call[name[self]._parse_vars, parameter[name[tokens]]] return[name[key_values]]
keyword[def] identifier[_parse_line_vars] ( identifier[self] , identifier[line] ): literal[string] identifier[key_values] ={} identifier[k] , identifier[v] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] , literal[int] ) keyword[if] identifier[v] . identifier[startswith] ( literal[string] ): keyword[try] : identifier[list_res] = identifier[ihateyaml] . identifier[safe_load] ( identifier[v] ) keyword[if] identifier[isinstance] ( identifier[list_res] [ literal[int] ], identifier[dict] ): identifier[key_values] = identifier[list_res] [ literal[int] ] keyword[return] identifier[key_values] keyword[except] identifier[ValueError] : keyword[pass] identifier[tokens] = identifier[shlex] . identifier[split] ( identifier[line] . identifier[strip] ()) identifier[key_values] = identifier[self] . identifier[_parse_vars] ( identifier[tokens] ) keyword[return] identifier[key_values]
def _parse_line_vars(self, line): """ Parse a line in a [XXXXX:vars] section. """ key_values = {} # Undocumented feature allows json in vars sections like so: # [prod:vars] # json_like_vars=[{'name': 'htpasswd_auth'}] # We'll try this first. If it fails, we'll fall back to normal var # lines. Since it's undocumented, we just assume some things. (k, v) = line.strip().split('=', 1) if v.startswith('['): try: list_res = ihateyaml.safe_load(v) if isinstance(list_res[0], dict): key_values = list_res[0] return key_values # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Guess it's not YAML. Parse as normal host variables tokens = shlex.split(line.strip()) key_values = self._parse_vars(tokens) return key_values
def chunked(sentence): """ Returns a list of Chunk and Chink objects from the given sentence. Chink is a subclass of Chunk used for words that have Word.chunk == None (e.g., punctuation marks, conjunctions). """ # For example, to construct a training vector with the head of previous chunks as a feature. # Doing this with Sentence.chunks would discard the punctuation marks and conjunctions # (Sentence.chunks only yields Chunk objects), which amy be useful features. chunks = [] for word in sentence: if word.chunk is not None: if len(chunks) == 0 or chunks[-1] != word.chunk: chunks.append(word.chunk) else: ch = Chink(sentence) ch.append(word.copy(ch)) chunks.append(ch) return chunks
def function[chunked, parameter[sentence]]: constant[ Returns a list of Chunk and Chink objects from the given sentence. Chink is a subclass of Chunk used for words that have Word.chunk == None (e.g., punctuation marks, conjunctions). ] variable[chunks] assign[=] list[[]] for taget[name[word]] in starred[name[sentence]] begin[:] if compare[name[word].chunk is_not constant[None]] begin[:] if <ast.BoolOp object at 0x7da20c6e7f10> begin[:] call[name[chunks].append, parameter[name[word].chunk]] return[name[chunks]]
keyword[def] identifier[chunked] ( identifier[sentence] ): literal[string] identifier[chunks] =[] keyword[for] identifier[word] keyword[in] identifier[sentence] : keyword[if] identifier[word] . identifier[chunk] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[len] ( identifier[chunks] )== literal[int] keyword[or] identifier[chunks] [- literal[int] ]!= identifier[word] . identifier[chunk] : identifier[chunks] . identifier[append] ( identifier[word] . identifier[chunk] ) keyword[else] : identifier[ch] = identifier[Chink] ( identifier[sentence] ) identifier[ch] . identifier[append] ( identifier[word] . identifier[copy] ( identifier[ch] )) identifier[chunks] . identifier[append] ( identifier[ch] ) keyword[return] identifier[chunks]
def chunked(sentence): """ Returns a list of Chunk and Chink objects from the given sentence. Chink is a subclass of Chunk used for words that have Word.chunk == None (e.g., punctuation marks, conjunctions). """ # For example, to construct a training vector with the head of previous chunks as a feature. # Doing this with Sentence.chunks would discard the punctuation marks and conjunctions # (Sentence.chunks only yields Chunk objects), which amy be useful features. chunks = [] for word in sentence: if word.chunk is not None: if len(chunks) == 0 or chunks[-1] != word.chunk: chunks.append(word.chunk) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: ch = Chink(sentence) ch.append(word.copy(ch)) chunks.append(ch) # depends on [control=['for'], data=['word']] return chunks
def init(base_url, username=None, password=None, verify=True): """Initialize ubersmith API module with HTTP request handler.""" handler = RequestHandler(base_url, username, password, verify) set_default_request_handler(handler) return handler
def function[init, parameter[base_url, username, password, verify]]: constant[Initialize ubersmith API module with HTTP request handler.] variable[handler] assign[=] call[name[RequestHandler], parameter[name[base_url], name[username], name[password], name[verify]]] call[name[set_default_request_handler], parameter[name[handler]]] return[name[handler]]
keyword[def] identifier[init] ( identifier[base_url] , identifier[username] = keyword[None] , identifier[password] = keyword[None] , identifier[verify] = keyword[True] ): literal[string] identifier[handler] = identifier[RequestHandler] ( identifier[base_url] , identifier[username] , identifier[password] , identifier[verify] ) identifier[set_default_request_handler] ( identifier[handler] ) keyword[return] identifier[handler]
def init(base_url, username=None, password=None, verify=True): """Initialize ubersmith API module with HTTP request handler.""" handler = RequestHandler(base_url, username, password, verify) set_default_request_handler(handler) return handler
def unpack_ip(fourbytes): """Converts an ip address given in a four byte string in network byte order to a string in dotted notation. >>> unpack_ip(b"dead") '100.101.97.100' >>> unpack_ip(b"alive") Traceback (most recent call last): ... ValueError: given buffer is not exactly four bytes long @type fourbytes: bytes @rtype: str @raises ValueError: for bad input """ if not isinstance(fourbytes, bytes): raise ValueError("given buffer is not a string") if len(fourbytes) != 4: raise ValueError("given buffer is not exactly four bytes long") return ".".join([str(x) for x in bytes_to_int_seq(fourbytes)])
def function[unpack_ip, parameter[fourbytes]]: constant[Converts an ip address given in a four byte string in network byte order to a string in dotted notation. >>> unpack_ip(b"dead") '100.101.97.100' >>> unpack_ip(b"alive") Traceback (most recent call last): ... ValueError: given buffer is not exactly four bytes long @type fourbytes: bytes @rtype: str @raises ValueError: for bad input ] if <ast.UnaryOp object at 0x7da1b0382770> begin[:] <ast.Raise object at 0x7da1b03818a0> if compare[call[name[len], parameter[name[fourbytes]]] not_equal[!=] constant[4]] begin[:] <ast.Raise object at 0x7da1b0381090> return[call[constant[.].join, parameter[<ast.ListComp object at 0x7da204566bc0>]]]
keyword[def] identifier[unpack_ip] ( identifier[fourbytes] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[fourbytes] , identifier[bytes] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[fourbytes] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[bytes_to_int_seq] ( identifier[fourbytes] )])
def unpack_ip(fourbytes): """Converts an ip address given in a four byte string in network byte order to a string in dotted notation. >>> unpack_ip(b"dead") '100.101.97.100' >>> unpack_ip(b"alive") Traceback (most recent call last): ... ValueError: given buffer is not exactly four bytes long @type fourbytes: bytes @rtype: str @raises ValueError: for bad input """ if not isinstance(fourbytes, bytes): raise ValueError('given buffer is not a string') # depends on [control=['if'], data=[]] if len(fourbytes) != 4: raise ValueError('given buffer is not exactly four bytes long') # depends on [control=['if'], data=[]] return '.'.join([str(x) for x in bytes_to_int_seq(fourbytes)])
def restore_organization_to_ckan(catalog, owner_org, portal_url, apikey, dataset_list=None, download_strategy=None, generate_new_access_url=None): """Restaura los datasets de la organización de un catálogo al portal pasado por parámetro. Si hay temas presentes en el DataJson que no están en el portal de CKAN, los genera. Args: catalog (DataJson): El catálogo de origen que se restaura. portal_url (str): La URL del portal CKAN de destino. apikey (str): La apikey de un usuario con los permisos que le permitan crear o actualizar el dataset. dataset_list(list(str)): Los ids de los datasets a restaurar. Si no se pasa una lista, todos los datasests se restauran. owner_org (str): La organización a la cual pertencen los datasets. download_strategy(callable): Una función (catálogo, distribución)-> bool. Sobre las distribuciones que evalúa True, descarga el recurso en el downloadURL y lo sube al portal de destino. Por default no sube ninguna distribución. generate_new_access_url(list): Se pasan los ids de las distribuciones cuyo accessURL se regenerar en el portal de destino. Para el resto, el portal debe mantiene el valor pasado en el DataJson. Returns: list(str): La lista de ids de datasets subidos. """ push_new_themes(catalog, portal_url, apikey) restored = [] if dataset_list is None: try: dataset_list = [ds['identifier'] for ds in catalog.datasets] except KeyError: logger.exception('Hay datasets sin identificadores') return restored for dataset_id in dataset_list: try: restored_id = restore_dataset_to_ckan(catalog, owner_org, dataset_id, portal_url, apikey, download_strategy, generate_new_access_url) restored.append(restored_id) except (CKANAPIError, KeyError, AttributeError) as e: logger.exception('Ocurrió un error restaurando el dataset {}: {}' .format(dataset_id, str(e))) return restored
def function[restore_organization_to_ckan, parameter[catalog, owner_org, portal_url, apikey, dataset_list, download_strategy, generate_new_access_url]]: constant[Restaura los datasets de la organización de un catálogo al portal pasado por parámetro. Si hay temas presentes en el DataJson que no están en el portal de CKAN, los genera. Args: catalog (DataJson): El catálogo de origen que se restaura. portal_url (str): La URL del portal CKAN de destino. apikey (str): La apikey de un usuario con los permisos que le permitan crear o actualizar el dataset. dataset_list(list(str)): Los ids de los datasets a restaurar. Si no se pasa una lista, todos los datasests se restauran. owner_org (str): La organización a la cual pertencen los datasets. download_strategy(callable): Una función (catálogo, distribución)-> bool. Sobre las distribuciones que evalúa True, descarga el recurso en el downloadURL y lo sube al portal de destino. Por default no sube ninguna distribución. generate_new_access_url(list): Se pasan los ids de las distribuciones cuyo accessURL se regenerar en el portal de destino. Para el resto, el portal debe mantiene el valor pasado en el DataJson. Returns: list(str): La lista de ids de datasets subidos. ] call[name[push_new_themes], parameter[name[catalog], name[portal_url], name[apikey]]] variable[restored] assign[=] list[[]] if compare[name[dataset_list] is constant[None]] begin[:] <ast.Try object at 0x7da1b04422f0> for taget[name[dataset_id]] in starred[name[dataset_list]] begin[:] <ast.Try object at 0x7da1b0443e80> return[name[restored]]
keyword[def] identifier[restore_organization_to_ckan] ( identifier[catalog] , identifier[owner_org] , identifier[portal_url] , identifier[apikey] , identifier[dataset_list] = keyword[None] , identifier[download_strategy] = keyword[None] , identifier[generate_new_access_url] = keyword[None] ): literal[string] identifier[push_new_themes] ( identifier[catalog] , identifier[portal_url] , identifier[apikey] ) identifier[restored] =[] keyword[if] identifier[dataset_list] keyword[is] keyword[None] : keyword[try] : identifier[dataset_list] =[ identifier[ds] [ literal[string] ] keyword[for] identifier[ds] keyword[in] identifier[catalog] . identifier[datasets] ] keyword[except] identifier[KeyError] : identifier[logger] . identifier[exception] ( literal[string] ) keyword[return] identifier[restored] keyword[for] identifier[dataset_id] keyword[in] identifier[dataset_list] : keyword[try] : identifier[restored_id] = identifier[restore_dataset_to_ckan] ( identifier[catalog] , identifier[owner_org] , identifier[dataset_id] , identifier[portal_url] , identifier[apikey] , identifier[download_strategy] , identifier[generate_new_access_url] ) identifier[restored] . identifier[append] ( identifier[restored_id] ) keyword[except] ( identifier[CKANAPIError] , identifier[KeyError] , identifier[AttributeError] ) keyword[as] identifier[e] : identifier[logger] . identifier[exception] ( literal[string] . identifier[format] ( identifier[dataset_id] , identifier[str] ( identifier[e] ))) keyword[return] identifier[restored]
def restore_organization_to_ckan(catalog, owner_org, portal_url, apikey, dataset_list=None, download_strategy=None, generate_new_access_url=None): """Restaura los datasets de la organización de un catálogo al portal pasado por parámetro. Si hay temas presentes en el DataJson que no están en el portal de CKAN, los genera. Args: catalog (DataJson): El catálogo de origen que se restaura. portal_url (str): La URL del portal CKAN de destino. apikey (str): La apikey de un usuario con los permisos que le permitan crear o actualizar el dataset. dataset_list(list(str)): Los ids de los datasets a restaurar. Si no se pasa una lista, todos los datasests se restauran. owner_org (str): La organización a la cual pertencen los datasets. download_strategy(callable): Una función (catálogo, distribución)-> bool. Sobre las distribuciones que evalúa True, descarga el recurso en el downloadURL y lo sube al portal de destino. Por default no sube ninguna distribución. generate_new_access_url(list): Se pasan los ids de las distribuciones cuyo accessURL se regenerar en el portal de destino. Para el resto, el portal debe mantiene el valor pasado en el DataJson. Returns: list(str): La lista de ids de datasets subidos. """ push_new_themes(catalog, portal_url, apikey) restored = [] if dataset_list is None: try: dataset_list = [ds['identifier'] for ds in catalog.datasets] # depends on [control=['try'], data=[]] except KeyError: logger.exception('Hay datasets sin identificadores') return restored # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['dataset_list']] for dataset_id in dataset_list: try: restored_id = restore_dataset_to_ckan(catalog, owner_org, dataset_id, portal_url, apikey, download_strategy, generate_new_access_url) restored.append(restored_id) # depends on [control=['try'], data=[]] except (CKANAPIError, KeyError, AttributeError) as e: logger.exception('Ocurrió un error restaurando el dataset {}: {}'.format(dataset_id, str(e))) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['dataset_id']] return restored
def add_or_replace_annotation(self, # pylint: disable=R0201 obj, annotation, agent, add_agent_only=False): """Takes an `annotation` dictionary which is expected to have a string as the value of annotation['author']['name'] This function will remove all annotations from obj that: 1. have the same author/name, and 2. have no messages that are flagged as messages to be preserved (values for 'preserve' that evaluate to true) """ nex = get_nexml_el(obj) nvers = detect_nexson_version(obj) _LOG.debug('detected version as ' + nvers) agents_obj = find_val_literal_meta_first(nex, 'ot:agents', nvers) if not agents_obj: agents_obj = add_literal_meta(nex, 'ot:agents', {'agent': []}, nvers) agents_list = agents_obj.setdefault('agent', []) found_agent = False aid = agent['@id'] for a in agents_list: if a.get('@id') == aid: found_agent = True break if not found_agent: agents_list.append(agent) if add_agent_only: delete_same_agent_annotation(obj, annotation) else: replace_same_agent_annotation(obj, annotation)
def function[add_or_replace_annotation, parameter[self, obj, annotation, agent, add_agent_only]]: constant[Takes an `annotation` dictionary which is expected to have a string as the value of annotation['author']['name'] This function will remove all annotations from obj that: 1. have the same author/name, and 2. have no messages that are flagged as messages to be preserved (values for 'preserve' that evaluate to true) ] variable[nex] assign[=] call[name[get_nexml_el], parameter[name[obj]]] variable[nvers] assign[=] call[name[detect_nexson_version], parameter[name[obj]]] call[name[_LOG].debug, parameter[binary_operation[constant[detected version as ] + name[nvers]]]] variable[agents_obj] assign[=] call[name[find_val_literal_meta_first], parameter[name[nex], constant[ot:agents], name[nvers]]] if <ast.UnaryOp object at 0x7da2045641f0> begin[:] variable[agents_obj] assign[=] call[name[add_literal_meta], parameter[name[nex], constant[ot:agents], dictionary[[<ast.Constant object at 0x7da204567dc0>], [<ast.List object at 0x7da204566d40>]], name[nvers]]] variable[agents_list] assign[=] call[name[agents_obj].setdefault, parameter[constant[agent], list[[]]]] variable[found_agent] assign[=] constant[False] variable[aid] assign[=] call[name[agent]][constant[@id]] for taget[name[a]] in starred[name[agents_list]] begin[:] if compare[call[name[a].get, parameter[constant[@id]]] equal[==] name[aid]] begin[:] variable[found_agent] assign[=] constant[True] break if <ast.UnaryOp object at 0x7da204567970> begin[:] call[name[agents_list].append, parameter[name[agent]]] if name[add_agent_only] begin[:] call[name[delete_same_agent_annotation], parameter[name[obj], name[annotation]]]
keyword[def] identifier[add_or_replace_annotation] ( identifier[self] , identifier[obj] , identifier[annotation] , identifier[agent] , identifier[add_agent_only] = keyword[False] ): literal[string] identifier[nex] = identifier[get_nexml_el] ( identifier[obj] ) identifier[nvers] = identifier[detect_nexson_version] ( identifier[obj] ) identifier[_LOG] . identifier[debug] ( literal[string] + identifier[nvers] ) identifier[agents_obj] = identifier[find_val_literal_meta_first] ( identifier[nex] , literal[string] , identifier[nvers] ) keyword[if] keyword[not] identifier[agents_obj] : identifier[agents_obj] = identifier[add_literal_meta] ( identifier[nex] , literal[string] ,{ literal[string] :[]}, identifier[nvers] ) identifier[agents_list] = identifier[agents_obj] . identifier[setdefault] ( literal[string] ,[]) identifier[found_agent] = keyword[False] identifier[aid] = identifier[agent] [ literal[string] ] keyword[for] identifier[a] keyword[in] identifier[agents_list] : keyword[if] identifier[a] . identifier[get] ( literal[string] )== identifier[aid] : identifier[found_agent] = keyword[True] keyword[break] keyword[if] keyword[not] identifier[found_agent] : identifier[agents_list] . identifier[append] ( identifier[agent] ) keyword[if] identifier[add_agent_only] : identifier[delete_same_agent_annotation] ( identifier[obj] , identifier[annotation] ) keyword[else] : identifier[replace_same_agent_annotation] ( identifier[obj] , identifier[annotation] )
def add_or_replace_annotation(self, obj, annotation, agent, add_agent_only=False): # pylint: disable=R0201 "Takes an `annotation` dictionary which is\n expected to have a string as the value of annotation['author']['name']\n This function will remove all annotations from obj that:\n 1. have the same author/name, and\n 2. have no messages that are flagged as messages to be preserved (values for 'preserve'\n that evaluate to true)\n " nex = get_nexml_el(obj) nvers = detect_nexson_version(obj) _LOG.debug('detected version as ' + nvers) agents_obj = find_val_literal_meta_first(nex, 'ot:agents', nvers) if not agents_obj: agents_obj = add_literal_meta(nex, 'ot:agents', {'agent': []}, nvers) # depends on [control=['if'], data=[]] agents_list = agents_obj.setdefault('agent', []) found_agent = False aid = agent['@id'] for a in agents_list: if a.get('@id') == aid: found_agent = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] if not found_agent: agents_list.append(agent) # depends on [control=['if'], data=[]] if add_agent_only: delete_same_agent_annotation(obj, annotation) # depends on [control=['if'], data=[]] else: replace_same_agent_annotation(obj, annotation)
def scalarsToStr(self, scalarValues, scalarNames=None): """ Return a pretty print string representing the return values from :meth:`.getScalars` and :meth:`.getScalarNames`. :param scalarValues: input values to encode to string :param scalarNames: optional input of scalar names to convert. If None, gets scalar names from :meth:`.getScalarNames` :return: string representation of scalar values """ if scalarNames is None: scalarNames = self.getScalarNames() desc = '' for (name, value) in zip(scalarNames, scalarValues): if len(desc) > 0: desc += ", %s:%.2f" % (name, value) else: desc += "%s:%.2f" % (name, value) return desc
def function[scalarsToStr, parameter[self, scalarValues, scalarNames]]: constant[ Return a pretty print string representing the return values from :meth:`.getScalars` and :meth:`.getScalarNames`. :param scalarValues: input values to encode to string :param scalarNames: optional input of scalar names to convert. If None, gets scalar names from :meth:`.getScalarNames` :return: string representation of scalar values ] if compare[name[scalarNames] is constant[None]] begin[:] variable[scalarNames] assign[=] call[name[self].getScalarNames, parameter[]] variable[desc] assign[=] constant[] for taget[tuple[[<ast.Name object at 0x7da20e9b3880>, <ast.Name object at 0x7da20e9b0d00>]]] in starred[call[name[zip], parameter[name[scalarNames], name[scalarValues]]]] begin[:] if compare[call[name[len], parameter[name[desc]]] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da20e9b17e0> return[name[desc]]
keyword[def] identifier[scalarsToStr] ( identifier[self] , identifier[scalarValues] , identifier[scalarNames] = keyword[None] ): literal[string] keyword[if] identifier[scalarNames] keyword[is] keyword[None] : identifier[scalarNames] = identifier[self] . identifier[getScalarNames] () identifier[desc] = literal[string] keyword[for] ( identifier[name] , identifier[value] ) keyword[in] identifier[zip] ( identifier[scalarNames] , identifier[scalarValues] ): keyword[if] identifier[len] ( identifier[desc] )> literal[int] : identifier[desc] += literal[string] %( identifier[name] , identifier[value] ) keyword[else] : identifier[desc] += literal[string] %( identifier[name] , identifier[value] ) keyword[return] identifier[desc]
def scalarsToStr(self, scalarValues, scalarNames=None): """ Return a pretty print string representing the return values from :meth:`.getScalars` and :meth:`.getScalarNames`. :param scalarValues: input values to encode to string :param scalarNames: optional input of scalar names to convert. If None, gets scalar names from :meth:`.getScalarNames` :return: string representation of scalar values """ if scalarNames is None: scalarNames = self.getScalarNames() # depends on [control=['if'], data=['scalarNames']] desc = '' for (name, value) in zip(scalarNames, scalarValues): if len(desc) > 0: desc += ', %s:%.2f' % (name, value) # depends on [control=['if'], data=[]] else: desc += '%s:%.2f' % (name, value) # depends on [control=['for'], data=[]] return desc
def authorize_password(self, client_id, username, password): """Authorize to platform as regular user You must provide a valid client_id (same as web application), your password and your username. Username and password is not stored in client but refresh token is stored. The only valid scope for this authorization is "regular_user". :param client_id: Valid client_id :type client_id: String :param username: User email :type username: String :param password: User password :type password: String """ self.auth_data = { "grant_type": "password", "username": username, "password": password, "client_id": client_id, "scope": ["regular_user"] } self._do_authorize()
def function[authorize_password, parameter[self, client_id, username, password]]: constant[Authorize to platform as regular user You must provide a valid client_id (same as web application), your password and your username. Username and password is not stored in client but refresh token is stored. The only valid scope for this authorization is "regular_user". :param client_id: Valid client_id :type client_id: String :param username: User email :type username: String :param password: User password :type password: String ] name[self].auth_data assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5b10>, <ast.Constant object at 0x7da20c6c6c50>, <ast.Constant object at 0x7da20c6c4dc0>, <ast.Constant object at 0x7da20e9543a0>, <ast.Constant object at 0x7da20e957040>], [<ast.Constant object at 0x7da20e9553f0>, <ast.Name object at 0x7da20e954af0>, <ast.Name object at 0x7da20e954730>, <ast.Name object at 0x7da20e955ed0>, <ast.List object at 0x7da20e956470>]] call[name[self]._do_authorize, parameter[]]
keyword[def] identifier[authorize_password] ( identifier[self] , identifier[client_id] , identifier[username] , identifier[password] ): literal[string] identifier[self] . identifier[auth_data] ={ literal[string] : literal[string] , literal[string] : identifier[username] , literal[string] : identifier[password] , literal[string] : identifier[client_id] , literal[string] :[ literal[string] ] } identifier[self] . identifier[_do_authorize] ()
def authorize_password(self, client_id, username, password): """Authorize to platform as regular user You must provide a valid client_id (same as web application), your password and your username. Username and password is not stored in client but refresh token is stored. The only valid scope for this authorization is "regular_user". :param client_id: Valid client_id :type client_id: String :param username: User email :type username: String :param password: User password :type password: String """ self.auth_data = {'grant_type': 'password', 'username': username, 'password': password, 'client_id': client_id, 'scope': ['regular_user']} self._do_authorize()
def create(self, properties): """ Create and configure a Partition in this CPC. Authorization requirements: * Object-access permission to this CPC. * Task permission to the "New Partition" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create Partition' in the :term:`HMC API` book. Returns: Partition: The resource object for the new Partition. The object will have its 'object-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ result = self.session.post(self.cpc.uri + '/partitions', body=properties) # There should not be overlaps, but just in case there are, the # returned props should overwrite the input props: props = copy.deepcopy(properties) props.update(result) name = props.get(self._name_prop, None) uri = props[self._uri_prop] part = Partition(self, uri, name, props) self._name_uri_cache.update(name, uri) return part
def function[create, parameter[self, properties]]: constant[ Create and configure a Partition in this CPC. Authorization requirements: * Object-access permission to this CPC. * Task permission to the "New Partition" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create Partition' in the :term:`HMC API` book. Returns: Partition: The resource object for the new Partition. The object will have its 'object-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` ] variable[result] assign[=] call[name[self].session.post, parameter[binary_operation[name[self].cpc.uri + constant[/partitions]]]] variable[props] assign[=] call[name[copy].deepcopy, parameter[name[properties]]] call[name[props].update, parameter[name[result]]] variable[name] assign[=] call[name[props].get, parameter[name[self]._name_prop, constant[None]]] variable[uri] assign[=] call[name[props]][name[self]._uri_prop] variable[part] assign[=] call[name[Partition], parameter[name[self], name[uri], name[name], name[props]]] call[name[self]._name_uri_cache.update, parameter[name[name], name[uri]]] return[name[part]]
keyword[def] identifier[create] ( identifier[self] , identifier[properties] ): literal[string] identifier[result] = identifier[self] . identifier[session] . identifier[post] ( identifier[self] . identifier[cpc] . identifier[uri] + literal[string] , identifier[body] = identifier[properties] ) identifier[props] = identifier[copy] . identifier[deepcopy] ( identifier[properties] ) identifier[props] . identifier[update] ( identifier[result] ) identifier[name] = identifier[props] . identifier[get] ( identifier[self] . identifier[_name_prop] , keyword[None] ) identifier[uri] = identifier[props] [ identifier[self] . identifier[_uri_prop] ] identifier[part] = identifier[Partition] ( identifier[self] , identifier[uri] , identifier[name] , identifier[props] ) identifier[self] . identifier[_name_uri_cache] . identifier[update] ( identifier[name] , identifier[uri] ) keyword[return] identifier[part]
def create(self, properties): """ Create and configure a Partition in this CPC. Authorization requirements: * Object-access permission to this CPC. * Task permission to the "New Partition" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create Partition' in the :term:`HMC API` book. Returns: Partition: The resource object for the new Partition. The object will have its 'object-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ result = self.session.post(self.cpc.uri + '/partitions', body=properties) # There should not be overlaps, but just in case there are, the # returned props should overwrite the input props: props = copy.deepcopy(properties) props.update(result) name = props.get(self._name_prop, None) uri = props[self._uri_prop] part = Partition(self, uri, name, props) self._name_uri_cache.update(name, uri) return part
def file(self, path, status=200, set_content_length=True): """ Return a file as a response """ ct, encoding = mimetypes.guess_type(path) if not ct: ct = 'application/octet-stream' if encoding: self.headers[aiohttp.hdrs.CONTENT_ENCODING] = encoding self.content_type = ct if set_content_length: st = os.stat(path) self.last_modified = st.st_mtime self.headers[aiohttp.hdrs.CONTENT_LENGTH] = str(st.st_size) else: self.enable_chunked_encoding() self.set_status(status) try: with open(path, 'rb') as fobj: yield from self.prepare(self._request) while True: data = fobj.read(4096) if not data: break yield from self.write(data) yield from self.drain() except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
def function[file, parameter[self, path, status, set_content_length]]: constant[ Return a file as a response ] <ast.Tuple object at 0x7da2049624d0> assign[=] call[name[mimetypes].guess_type, parameter[name[path]]] if <ast.UnaryOp object at 0x7da2049607c0> begin[:] variable[ct] assign[=] constant[application/octet-stream] if name[encoding] begin[:] call[name[self].headers][name[aiohttp].hdrs.CONTENT_ENCODING] assign[=] name[encoding] name[self].content_type assign[=] name[ct] if name[set_content_length] begin[:] variable[st] assign[=] call[name[os].stat, parameter[name[path]]] name[self].last_modified assign[=] name[st].st_mtime call[name[self].headers][name[aiohttp].hdrs.CONTENT_LENGTH] assign[=] call[name[str], parameter[name[st].st_size]] call[name[self].set_status, parameter[name[status]]] <ast.Try object at 0x7da204962ec0>
keyword[def] identifier[file] ( identifier[self] , identifier[path] , identifier[status] = literal[int] , identifier[set_content_length] = keyword[True] ): literal[string] identifier[ct] , identifier[encoding] = identifier[mimetypes] . identifier[guess_type] ( identifier[path] ) keyword[if] keyword[not] identifier[ct] : identifier[ct] = literal[string] keyword[if] identifier[encoding] : identifier[self] . identifier[headers] [ identifier[aiohttp] . identifier[hdrs] . identifier[CONTENT_ENCODING] ]= identifier[encoding] identifier[self] . identifier[content_type] = identifier[ct] keyword[if] identifier[set_content_length] : identifier[st] = identifier[os] . identifier[stat] ( identifier[path] ) identifier[self] . identifier[last_modified] = identifier[st] . identifier[st_mtime] identifier[self] . identifier[headers] [ identifier[aiohttp] . identifier[hdrs] . identifier[CONTENT_LENGTH] ]= identifier[str] ( identifier[st] . identifier[st_size] ) keyword[else] : identifier[self] . identifier[enable_chunked_encoding] () identifier[self] . identifier[set_status] ( identifier[status] ) keyword[try] : keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[fobj] : keyword[yield] keyword[from] identifier[self] . identifier[prepare] ( identifier[self] . identifier[_request] ) keyword[while] keyword[True] : identifier[data] = identifier[fobj] . identifier[read] ( literal[int] ) keyword[if] keyword[not] identifier[data] : keyword[break] keyword[yield] keyword[from] identifier[self] . identifier[write] ( identifier[data] ) keyword[yield] keyword[from] identifier[self] . identifier[drain] () keyword[except] identifier[FileNotFoundError] : keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPNotFound] () keyword[except] identifier[PermissionError] : keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPForbidden] ()
def file(self, path, status=200, set_content_length=True): """ Return a file as a response """ (ct, encoding) = mimetypes.guess_type(path) if not ct: ct = 'application/octet-stream' # depends on [control=['if'], data=[]] if encoding: self.headers[aiohttp.hdrs.CONTENT_ENCODING] = encoding # depends on [control=['if'], data=[]] self.content_type = ct if set_content_length: st = os.stat(path) self.last_modified = st.st_mtime self.headers[aiohttp.hdrs.CONTENT_LENGTH] = str(st.st_size) # depends on [control=['if'], data=[]] else: self.enable_chunked_encoding() self.set_status(status) try: with open(path, 'rb') as fobj: yield from self.prepare(self._request) while True: data = fobj.read(4096) if not data: break # depends on [control=['if'], data=[]] yield from self.write(data) yield from self.drain() # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['fobj']] # depends on [control=['try'], data=[]] except FileNotFoundError: raise aiohttp.web.HTTPNotFound() # depends on [control=['except'], data=[]] except PermissionError: raise aiohttp.web.HTTPForbidden() # depends on [control=['except'], data=[]]
def normalizeInternalObjectType(value, cls, name): """ Normalizes an internal object type. * **value** must be a instance of **cls**. * Returned value is the same type as the input value. """ if not isinstance(value, cls): raise TypeError("%s must be a %s instance, not %s." % (name, name, type(value).__name__)) return value
def function[normalizeInternalObjectType, parameter[value, cls, name]]: constant[ Normalizes an internal object type. * **value** must be a instance of **cls**. * Returned value is the same type as the input value. ] if <ast.UnaryOp object at 0x7da20c993370> begin[:] <ast.Raise object at 0x7da20c9918d0> return[name[value]]
keyword[def] identifier[normalizeInternalObjectType] ( identifier[value] , identifier[cls] , identifier[name] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[cls] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[name] , identifier[name] , identifier[type] ( identifier[value] ). identifier[__name__] )) keyword[return] identifier[value]
def normalizeInternalObjectType(value, cls, name): """ Normalizes an internal object type. * **value** must be a instance of **cls**. * Returned value is the same type as the input value. """ if not isinstance(value, cls): raise TypeError('%s must be a %s instance, not %s.' % (name, name, type(value).__name__)) # depends on [control=['if'], data=[]] return value
def _node_to_model(tree_or_item, package, parent=None, lucent_id=TRANSLUCENT_BINDER_ID): """Given a tree, parse to a set of models""" if 'contents' in tree_or_item: # It is a binder. tree = tree_or_item # Grab the package metadata, so we have required license info metadata = package.metadata.copy() if tree['id'] == lucent_id: metadata['title'] = tree['title'] binder = TranslucentBinder(metadata=metadata) else: try: package_item = package.grab_by_name(tree['id']) binder = BinderItem(package_item, package) except KeyError: # Translucent w/ id metadata.update({ 'title': tree['title'], 'cnx-archive-uri': tree['id'], 'cnx-archive-shortid': tree['shortId']}) binder = Binder(tree['id'], metadata=metadata) for item in tree['contents']: node = _node_to_model(item, package, parent=binder, lucent_id=lucent_id) if node.metadata['title'] != item['title']: binder.set_title_for_node(node, item['title']) result = binder else: # It is a document. item = tree_or_item package_item = package.grab_by_name(item['id']) result = adapt_item(package_item, package) if parent is not None: parent.append(result) return result
def function[_node_to_model, parameter[tree_or_item, package, parent, lucent_id]]: constant[Given a tree, parse to a set of models] if compare[constant[contents] in name[tree_or_item]] begin[:] variable[tree] assign[=] name[tree_or_item] variable[metadata] assign[=] call[name[package].metadata.copy, parameter[]] if compare[call[name[tree]][constant[id]] equal[==] name[lucent_id]] begin[:] call[name[metadata]][constant[title]] assign[=] call[name[tree]][constant[title]] variable[binder] assign[=] call[name[TranslucentBinder], parameter[]] for taget[name[item]] in starred[call[name[tree]][constant[contents]]] begin[:] variable[node] assign[=] call[name[_node_to_model], parameter[name[item], name[package]]] if compare[call[name[node].metadata][constant[title]] not_equal[!=] call[name[item]][constant[title]]] begin[:] call[name[binder].set_title_for_node, parameter[name[node], call[name[item]][constant[title]]]] variable[result] assign[=] name[binder] if compare[name[parent] is_not constant[None]] begin[:] call[name[parent].append, parameter[name[result]]] return[name[result]]
keyword[def] identifier[_node_to_model] ( identifier[tree_or_item] , identifier[package] , identifier[parent] = keyword[None] , identifier[lucent_id] = identifier[TRANSLUCENT_BINDER_ID] ): literal[string] keyword[if] literal[string] keyword[in] identifier[tree_or_item] : identifier[tree] = identifier[tree_or_item] identifier[metadata] = identifier[package] . identifier[metadata] . identifier[copy] () keyword[if] identifier[tree] [ literal[string] ]== identifier[lucent_id] : identifier[metadata] [ literal[string] ]= identifier[tree] [ literal[string] ] identifier[binder] = identifier[TranslucentBinder] ( identifier[metadata] = identifier[metadata] ) keyword[else] : keyword[try] : identifier[package_item] = identifier[package] . identifier[grab_by_name] ( identifier[tree] [ literal[string] ]) identifier[binder] = identifier[BinderItem] ( identifier[package_item] , identifier[package] ) keyword[except] identifier[KeyError] : identifier[metadata] . identifier[update] ({ literal[string] : identifier[tree] [ literal[string] ], literal[string] : identifier[tree] [ literal[string] ], literal[string] : identifier[tree] [ literal[string] ]}) identifier[binder] = identifier[Binder] ( identifier[tree] [ literal[string] ], identifier[metadata] = identifier[metadata] ) keyword[for] identifier[item] keyword[in] identifier[tree] [ literal[string] ]: identifier[node] = identifier[_node_to_model] ( identifier[item] , identifier[package] , identifier[parent] = identifier[binder] , identifier[lucent_id] = identifier[lucent_id] ) keyword[if] identifier[node] . identifier[metadata] [ literal[string] ]!= identifier[item] [ literal[string] ]: identifier[binder] . identifier[set_title_for_node] ( identifier[node] , identifier[item] [ literal[string] ]) identifier[result] = identifier[binder] keyword[else] : identifier[item] = identifier[tree_or_item] identifier[package_item] = identifier[package] . identifier[grab_by_name] ( identifier[item] [ literal[string] ]) identifier[result] = identifier[adapt_item] ( identifier[package_item] , identifier[package] ) keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[parent] . identifier[append] ( identifier[result] ) keyword[return] identifier[result]
def _node_to_model(tree_or_item, package, parent=None, lucent_id=TRANSLUCENT_BINDER_ID): """Given a tree, parse to a set of models""" if 'contents' in tree_or_item: # It is a binder. tree = tree_or_item # Grab the package metadata, so we have required license info metadata = package.metadata.copy() if tree['id'] == lucent_id: metadata['title'] = tree['title'] binder = TranslucentBinder(metadata=metadata) # depends on [control=['if'], data=[]] else: try: package_item = package.grab_by_name(tree['id']) binder = BinderItem(package_item, package) # depends on [control=['try'], data=[]] except KeyError: # Translucent w/ id metadata.update({'title': tree['title'], 'cnx-archive-uri': tree['id'], 'cnx-archive-shortid': tree['shortId']}) binder = Binder(tree['id'], metadata=metadata) # depends on [control=['except'], data=[]] for item in tree['contents']: node = _node_to_model(item, package, parent=binder, lucent_id=lucent_id) if node.metadata['title'] != item['title']: binder.set_title_for_node(node, item['title']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] result = binder # depends on [control=['if'], data=['tree_or_item']] else: # It is a document. item = tree_or_item package_item = package.grab_by_name(item['id']) result = adapt_item(package_item, package) if parent is not None: parent.append(result) # depends on [control=['if'], data=['parent']] return result
def loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.'): """loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.') -> (form class, base class) Load a Qt Designer .ui file and return the generated form class and the Qt base class. uifile is a file name or file-like object containing the .ui file. from_imports is optionally set to generate relative import statements. At the moment this only applies to the import of resource modules. resource_suffix is the suffix appended to the basename of any resource file specified in the .ui file to create the name of the Python module generated from the resource file by pyrcc4. The default is '_rc', i.e. if the .ui file specified a resource file called foo.qrc then the corresponding Python module is foo_rc. import_from is optionally set to the package used for relative import statements. The default is ``'.'``. """ import sys from PyQt5 import QtWidgets if sys.hexversion >= 0x03000000: from .port_v3.string_io import StringIO else: from .port_v2.string_io import StringIO code_string = StringIO() winfo = compiler.UICompiler().compileUi(uifile, code_string, from_imports, resource_suffix, import_from) ui_globals = {} exec(code_string.getvalue(), ui_globals) return (ui_globals[winfo["uiclass"]], getattr(QtWidgets, winfo["baseclass"]))
def function[loadUiType, parameter[uifile, from_imports, resource_suffix, import_from]]: constant[loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.') -> (form class, base class) Load a Qt Designer .ui file and return the generated form class and the Qt base class. uifile is a file name or file-like object containing the .ui file. from_imports is optionally set to generate relative import statements. At the moment this only applies to the import of resource modules. resource_suffix is the suffix appended to the basename of any resource file specified in the .ui file to create the name of the Python module generated from the resource file by pyrcc4. The default is '_rc', i.e. if the .ui file specified a resource file called foo.qrc then the corresponding Python module is foo_rc. import_from is optionally set to the package used for relative import statements. The default is ``'.'``. ] import module[sys] from relative_module[PyQt5] import module[QtWidgets] if compare[name[sys].hexversion greater_or_equal[>=] constant[50331648]] begin[:] from relative_module[port_v3.string_io] import module[StringIO] variable[code_string] assign[=] call[name[StringIO], parameter[]] variable[winfo] assign[=] call[call[name[compiler].UICompiler, parameter[]].compileUi, parameter[name[uifile], name[code_string], name[from_imports], name[resource_suffix], name[import_from]]] variable[ui_globals] assign[=] dictionary[[], []] call[name[exec], parameter[call[name[code_string].getvalue, parameter[]], name[ui_globals]]] return[tuple[[<ast.Subscript object at 0x7da1b07ce2c0>, <ast.Call object at 0x7da1b07cef50>]]]
keyword[def] identifier[loadUiType] ( identifier[uifile] , identifier[from_imports] = keyword[False] , identifier[resource_suffix] = literal[string] , identifier[import_from] = literal[string] ): literal[string] keyword[import] identifier[sys] keyword[from] identifier[PyQt5] keyword[import] identifier[QtWidgets] keyword[if] identifier[sys] . identifier[hexversion] >= literal[int] : keyword[from] . identifier[port_v3] . identifier[string_io] keyword[import] identifier[StringIO] keyword[else] : keyword[from] . identifier[port_v2] . identifier[string_io] keyword[import] identifier[StringIO] identifier[code_string] = identifier[StringIO] () identifier[winfo] = identifier[compiler] . identifier[UICompiler] (). identifier[compileUi] ( identifier[uifile] , identifier[code_string] , identifier[from_imports] , identifier[resource_suffix] , identifier[import_from] ) identifier[ui_globals] ={} identifier[exec] ( identifier[code_string] . identifier[getvalue] (), identifier[ui_globals] ) keyword[return] ( identifier[ui_globals] [ identifier[winfo] [ literal[string] ]], identifier[getattr] ( identifier[QtWidgets] , identifier[winfo] [ literal[string] ]))
def loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.'): """loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.') -> (form class, base class) Load a Qt Designer .ui file and return the generated form class and the Qt base class. uifile is a file name or file-like object containing the .ui file. from_imports is optionally set to generate relative import statements. At the moment this only applies to the import of resource modules. resource_suffix is the suffix appended to the basename of any resource file specified in the .ui file to create the name of the Python module generated from the resource file by pyrcc4. The default is '_rc', i.e. if the .ui file specified a resource file called foo.qrc then the corresponding Python module is foo_rc. import_from is optionally set to the package used for relative import statements. The default is ``'.'``. """ import sys from PyQt5 import QtWidgets if sys.hexversion >= 50331648: from .port_v3.string_io import StringIO # depends on [control=['if'], data=[]] else: from .port_v2.string_io import StringIO code_string = StringIO() winfo = compiler.UICompiler().compileUi(uifile, code_string, from_imports, resource_suffix, import_from) ui_globals = {} exec(code_string.getvalue(), ui_globals) return (ui_globals[winfo['uiclass']], getattr(QtWidgets, winfo['baseclass']))
def get(path_or_file, default=SENTINAL, mime=None, name=None, backend=None, encoding=None, encoding_errors=None, kwargs=None, _wtitle=False): """ Get document full text. Accepts a path or file-like object. * If given, `default` is returned instead of an error. * `backend` is either a module object or a string specifying which default backend to use (e.g. "doc"); take a look at backends directory to see a list of default backends. * `mime` and `name` should be passed if the information is available to caller, otherwise a best guess is made. If both are specified `mime` takes precedence. * `encoding` and `encoding_errors` are used to handle text encoding. They are taken into consideration mostly only by pure-python backends which do not rely on CLI tools. Default to "utf8" and "strict" respectively. * `kwargs` are passed to the underlying backend. """ try: text, title = _get( path_or_file, default=default, mime=mime, name=name, backend=backend, kwargs=kwargs, encoding=encoding, encoding_errors=encoding_errors, _wtitle=_wtitle) if _wtitle: return (text, title) else: return text except Exception as e: if default is not SENTINAL: LOGGER.exception(e) return default raise
def function[get, parameter[path_or_file, default, mime, name, backend, encoding, encoding_errors, kwargs, _wtitle]]: constant[ Get document full text. Accepts a path or file-like object. * If given, `default` is returned instead of an error. * `backend` is either a module object or a string specifying which default backend to use (e.g. "doc"); take a look at backends directory to see a list of default backends. * `mime` and `name` should be passed if the information is available to caller, otherwise a best guess is made. If both are specified `mime` takes precedence. * `encoding` and `encoding_errors` are used to handle text encoding. They are taken into consideration mostly only by pure-python backends which do not rely on CLI tools. Default to "utf8" and "strict" respectively. * `kwargs` are passed to the underlying backend. ] <ast.Try object at 0x7da18f09fe20>
keyword[def] identifier[get] ( identifier[path_or_file] , identifier[default] = identifier[SENTINAL] , identifier[mime] = keyword[None] , identifier[name] = keyword[None] , identifier[backend] = keyword[None] , identifier[encoding] = keyword[None] , identifier[encoding_errors] = keyword[None] , identifier[kwargs] = keyword[None] , identifier[_wtitle] = keyword[False] ): literal[string] keyword[try] : identifier[text] , identifier[title] = identifier[_get] ( identifier[path_or_file] , identifier[default] = identifier[default] , identifier[mime] = identifier[mime] , identifier[name] = identifier[name] , identifier[backend] = identifier[backend] , identifier[kwargs] = identifier[kwargs] , identifier[encoding] = identifier[encoding] , identifier[encoding_errors] = identifier[encoding_errors] , identifier[_wtitle] = identifier[_wtitle] ) keyword[if] identifier[_wtitle] : keyword[return] ( identifier[text] , identifier[title] ) keyword[else] : keyword[return] identifier[text] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] identifier[default] keyword[is] keyword[not] identifier[SENTINAL] : identifier[LOGGER] . identifier[exception] ( identifier[e] ) keyword[return] identifier[default] keyword[raise]
def get(path_or_file, default=SENTINAL, mime=None, name=None, backend=None, encoding=None, encoding_errors=None, kwargs=None, _wtitle=False): """ Get document full text. Accepts a path or file-like object. * If given, `default` is returned instead of an error. * `backend` is either a module object or a string specifying which default backend to use (e.g. "doc"); take a look at backends directory to see a list of default backends. * `mime` and `name` should be passed if the information is available to caller, otherwise a best guess is made. If both are specified `mime` takes precedence. * `encoding` and `encoding_errors` are used to handle text encoding. They are taken into consideration mostly only by pure-python backends which do not rely on CLI tools. Default to "utf8" and "strict" respectively. * `kwargs` are passed to the underlying backend. """ try: (text, title) = _get(path_or_file, default=default, mime=mime, name=name, backend=backend, kwargs=kwargs, encoding=encoding, encoding_errors=encoding_errors, _wtitle=_wtitle) if _wtitle: return (text, title) # depends on [control=['if'], data=[]] else: return text # depends on [control=['try'], data=[]] except Exception as e: if default is not SENTINAL: LOGGER.exception(e) return default # depends on [control=['if'], data=['default']] raise # depends on [control=['except'], data=['e']]
def set_group_whole_ban(self, *, group_id, enable=True): """ 群组全员禁言 ------------ :param int group_id: 群号 :param bool enable: 是否禁言 :return: None :rtype: None """ return super().__getattr__('set_group_whole_ban') \ (group_id=group_id, enable=enable)
def function[set_group_whole_ban, parameter[self]]: constant[ 群组全员禁言 ------------ :param int group_id: 群号 :param bool enable: 是否禁言 :return: None :rtype: None ] return[call[call[call[name[super], parameter[]].__getattr__, parameter[constant[set_group_whole_ban]]], parameter[]]]
keyword[def] identifier[set_group_whole_ban] ( identifier[self] ,*, identifier[group_id] , identifier[enable] = keyword[True] ): literal[string] keyword[return] identifier[super] (). identifier[__getattr__] ( literal[string] )( identifier[group_id] = identifier[group_id] , identifier[enable] = identifier[enable] )
def set_group_whole_ban(self, *, group_id, enable=True): """ 群组全员禁言 ------------ :param int group_id: 群号 :param bool enable: 是否禁言 :return: None :rtype: None """ return super().__getattr__('set_group_whole_ban')(group_id=group_id, enable=enable)
def distok(self): """ Boolean array showing which stars pass all distribution constraints. A "distribution constraint" is a constraint that affects the distribution of stars, rather than just the number. """ ok = np.ones(len(self.stars)).astype(bool) for name in self.constraints: c = self.constraints[name] if c.name not in self.distribution_skip: ok &= c.ok return ok
def function[distok, parameter[self]]: constant[ Boolean array showing which stars pass all distribution constraints. A "distribution constraint" is a constraint that affects the distribution of stars, rather than just the number. ] variable[ok] assign[=] call[call[name[np].ones, parameter[call[name[len], parameter[name[self].stars]]]].astype, parameter[name[bool]]] for taget[name[name]] in starred[name[self].constraints] begin[:] variable[c] assign[=] call[name[self].constraints][name[name]] if compare[name[c].name <ast.NotIn object at 0x7da2590d7190> name[self].distribution_skip] begin[:] <ast.AugAssign object at 0x7da1b26d5d20> return[name[ok]]
keyword[def] identifier[distok] ( identifier[self] ): literal[string] identifier[ok] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[self] . identifier[stars] )). identifier[astype] ( identifier[bool] ) keyword[for] identifier[name] keyword[in] identifier[self] . identifier[constraints] : identifier[c] = identifier[self] . identifier[constraints] [ identifier[name] ] keyword[if] identifier[c] . identifier[name] keyword[not] keyword[in] identifier[self] . identifier[distribution_skip] : identifier[ok] &= identifier[c] . identifier[ok] keyword[return] identifier[ok]
def distok(self): """ Boolean array showing which stars pass all distribution constraints. A "distribution constraint" is a constraint that affects the distribution of stars, rather than just the number. """ ok = np.ones(len(self.stars)).astype(bool) for name in self.constraints: c = self.constraints[name] if c.name not in self.distribution_skip: ok &= c.ok # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] return ok
def safe_listget(list_, index, default='?'): """ depricate """ if index >= len(list_): return default ret = list_[index] if ret is None: return default return ret
def function[safe_listget, parameter[list_, index, default]]: constant[ depricate ] if compare[name[index] greater_or_equal[>=] call[name[len], parameter[name[list_]]]] begin[:] return[name[default]] variable[ret] assign[=] call[name[list_]][name[index]] if compare[name[ret] is constant[None]] begin[:] return[name[default]] return[name[ret]]
keyword[def] identifier[safe_listget] ( identifier[list_] , identifier[index] , identifier[default] = literal[string] ): literal[string] keyword[if] identifier[index] >= identifier[len] ( identifier[list_] ): keyword[return] identifier[default] identifier[ret] = identifier[list_] [ identifier[index] ] keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[return] identifier[default] keyword[return] identifier[ret]
def safe_listget(list_, index, default='?'): """ depricate """ if index >= len(list_): return default # depends on [control=['if'], data=[]] ret = list_[index] if ret is None: return default # depends on [control=['if'], data=[]] return ret
def _get_cmap_data(data, kwargs) -> Tuple[colors.Normalize, np.ndarray]: """Get normalized values to be used with a colormap. Parameters ---------- data : array_like cmap_min : Optional[float] or "min" By default 0. If "min", minimum value of the data. cmap_max : Optional[float] By default, maximum value of the data cmap_normalize : str or colors.Normalize Returns ------- normalizer : colors.Normalize normalized_data : array_like """ norm = kwargs.pop("cmap_normalize", None) if norm == "log": cmap_max = kwargs.pop("cmap_max", data.max()) cmap_min = kwargs.pop("cmap_min", data[data > 0].min()) norm = colors.LogNorm(cmap_min, cmap_max) elif not norm: cmap_max = kwargs.pop("cmap_max", data.max()) cmap_min = kwargs.pop("cmap_min", 0) if cmap_min == "min": cmap_min = data.min() norm = colors.Normalize(cmap_min, cmap_max, clip=True) return norm, norm(data)
def function[_get_cmap_data, parameter[data, kwargs]]: constant[Get normalized values to be used with a colormap. Parameters ---------- data : array_like cmap_min : Optional[float] or "min" By default 0. If "min", minimum value of the data. cmap_max : Optional[float] By default, maximum value of the data cmap_normalize : str or colors.Normalize Returns ------- normalizer : colors.Normalize normalized_data : array_like ] variable[norm] assign[=] call[name[kwargs].pop, parameter[constant[cmap_normalize], constant[None]]] if compare[name[norm] equal[==] constant[log]] begin[:] variable[cmap_max] assign[=] call[name[kwargs].pop, parameter[constant[cmap_max], call[name[data].max, parameter[]]]] variable[cmap_min] assign[=] call[name[kwargs].pop, parameter[constant[cmap_min], call[call[name[data]][compare[name[data] greater[>] constant[0]]].min, parameter[]]]] variable[norm] assign[=] call[name[colors].LogNorm, parameter[name[cmap_min], name[cmap_max]]] return[tuple[[<ast.Name object at 0x7da18f721060>, <ast.Call object at 0x7da18f720e80>]]]
keyword[def] identifier[_get_cmap_data] ( identifier[data] , identifier[kwargs] )-> identifier[Tuple] [ identifier[colors] . identifier[Normalize] , identifier[np] . identifier[ndarray] ]: literal[string] identifier[norm] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[norm] == literal[string] : identifier[cmap_max] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[data] . identifier[max] ()) identifier[cmap_min] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[data] [ identifier[data] > literal[int] ]. identifier[min] ()) identifier[norm] = identifier[colors] . identifier[LogNorm] ( identifier[cmap_min] , identifier[cmap_max] ) keyword[elif] keyword[not] identifier[norm] : identifier[cmap_max] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[data] . identifier[max] ()) identifier[cmap_min] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] ) keyword[if] identifier[cmap_min] == literal[string] : identifier[cmap_min] = identifier[data] . identifier[min] () identifier[norm] = identifier[colors] . identifier[Normalize] ( identifier[cmap_min] , identifier[cmap_max] , identifier[clip] = keyword[True] ) keyword[return] identifier[norm] , identifier[norm] ( identifier[data] )
def _get_cmap_data(data, kwargs) -> Tuple[colors.Normalize, np.ndarray]: """Get normalized values to be used with a colormap. Parameters ---------- data : array_like cmap_min : Optional[float] or "min" By default 0. If "min", minimum value of the data. cmap_max : Optional[float] By default, maximum value of the data cmap_normalize : str or colors.Normalize Returns ------- normalizer : colors.Normalize normalized_data : array_like """ norm = kwargs.pop('cmap_normalize', None) if norm == 'log': cmap_max = kwargs.pop('cmap_max', data.max()) cmap_min = kwargs.pop('cmap_min', data[data > 0].min()) norm = colors.LogNorm(cmap_min, cmap_max) # depends on [control=['if'], data=['norm']] elif not norm: cmap_max = kwargs.pop('cmap_max', data.max()) cmap_min = kwargs.pop('cmap_min', 0) if cmap_min == 'min': cmap_min = data.min() # depends on [control=['if'], data=['cmap_min']] norm = colors.Normalize(cmap_min, cmap_max, clip=True) # depends on [control=['if'], data=[]] return (norm, norm(data))
def import_journey_data_for_target_stop(self, target_stop_I, origin_stop_I_to_journey_labels, enforce_synchronous_writes=False): """ Parameters ---------- origin_stop_I_to_journey_labels: dict key: origin_stop_Is value: list of labels target_stop_I: int """ cur = self.conn.cursor() self.conn.isolation_level = 'EXCLUSIVE' # if not enforce_synchronous_writes: cur.execute('PRAGMA synchronous = 0;') if self.track_route: self._insert_journeys_with_route_into_db(origin_stop_I_to_journey_labels, target_stop=int(target_stop_I)) else: self._insert_journeys_into_db_no_route(origin_stop_I_to_journey_labels, target_stop=int(target_stop_I)) print("Finished import process") self.conn.commit()
def function[import_journey_data_for_target_stop, parameter[self, target_stop_I, origin_stop_I_to_journey_labels, enforce_synchronous_writes]]: constant[ Parameters ---------- origin_stop_I_to_journey_labels: dict key: origin_stop_Is value: list of labels target_stop_I: int ] variable[cur] assign[=] call[name[self].conn.cursor, parameter[]] name[self].conn.isolation_level assign[=] constant[EXCLUSIVE] call[name[cur].execute, parameter[constant[PRAGMA synchronous = 0;]]] if name[self].track_route begin[:] call[name[self]._insert_journeys_with_route_into_db, parameter[name[origin_stop_I_to_journey_labels]]] call[name[print], parameter[constant[Finished import process]]] call[name[self].conn.commit, parameter[]]
keyword[def] identifier[import_journey_data_for_target_stop] ( identifier[self] , identifier[target_stop_I] , identifier[origin_stop_I_to_journey_labels] , identifier[enforce_synchronous_writes] = keyword[False] ): literal[string] identifier[cur] = identifier[self] . identifier[conn] . identifier[cursor] () identifier[self] . identifier[conn] . identifier[isolation_level] = literal[string] identifier[cur] . identifier[execute] ( literal[string] ) keyword[if] identifier[self] . identifier[track_route] : identifier[self] . identifier[_insert_journeys_with_route_into_db] ( identifier[origin_stop_I_to_journey_labels] , identifier[target_stop] = identifier[int] ( identifier[target_stop_I] )) keyword[else] : identifier[self] . identifier[_insert_journeys_into_db_no_route] ( identifier[origin_stop_I_to_journey_labels] , identifier[target_stop] = identifier[int] ( identifier[target_stop_I] )) identifier[print] ( literal[string] ) identifier[self] . identifier[conn] . identifier[commit] ()
def import_journey_data_for_target_stop(self, target_stop_I, origin_stop_I_to_journey_labels, enforce_synchronous_writes=False): """ Parameters ---------- origin_stop_I_to_journey_labels: dict key: origin_stop_Is value: list of labels target_stop_I: int """ cur = self.conn.cursor() self.conn.isolation_level = 'EXCLUSIVE' # if not enforce_synchronous_writes: cur.execute('PRAGMA synchronous = 0;') if self.track_route: self._insert_journeys_with_route_into_db(origin_stop_I_to_journey_labels, target_stop=int(target_stop_I)) # depends on [control=['if'], data=[]] else: self._insert_journeys_into_db_no_route(origin_stop_I_to_journey_labels, target_stop=int(target_stop_I)) print('Finished import process') self.conn.commit()
def clear(self): """ Removes all pending Jobs from the queue and return them in a list. This method does **no**t call #Job.cancel() on any of the jobs. If you want that, use #cancel_all() or call it manually. """ with synchronized(self.__queue): jobs = self.__queue.snapshot() self.__queue.clear() return jobs
def function[clear, parameter[self]]: constant[ Removes all pending Jobs from the queue and return them in a list. This method does **no**t call #Job.cancel() on any of the jobs. If you want that, use #cancel_all() or call it manually. ] with call[name[synchronized], parameter[name[self].__queue]] begin[:] variable[jobs] assign[=] call[name[self].__queue.snapshot, parameter[]] call[name[self].__queue.clear, parameter[]] return[name[jobs]]
keyword[def] identifier[clear] ( identifier[self] ): literal[string] keyword[with] identifier[synchronized] ( identifier[self] . identifier[__queue] ): identifier[jobs] = identifier[self] . identifier[__queue] . identifier[snapshot] () identifier[self] . identifier[__queue] . identifier[clear] () keyword[return] identifier[jobs]
def clear(self): """ Removes all pending Jobs from the queue and return them in a list. This method does **no**t call #Job.cancel() on any of the jobs. If you want that, use #cancel_all() or call it manually. """ with synchronized(self.__queue): jobs = self.__queue.snapshot() self.__queue.clear() # depends on [control=['with'], data=[]] return jobs
def _createLocalRouter(siteStore): """ Create an L{IMessageRouter} provider for the default case, where no L{IMessageRouter} powerup is installed on the top-level store. It wraps a L{LocalMessageRouter} around the L{LoginSystem} installed on the given site store. If no L{LoginSystem} is present, this returns a null router which will simply log an error but not deliver the message anywhere, until this configuration error can be corrected. @rtype: L{IMessageRouter} """ ls = siteStore.findUnique(LoginSystem, default=None) if ls is None: try: raise UnsatisfiedRequirement() except UnsatisfiedRequirement: log.err(Failure(), "You have opened a substore from a site store with no " "LoginSystem. Message routing is disabled.") return _NullRouter() return LocalMessageRouter(ls)
def function[_createLocalRouter, parameter[siteStore]]: constant[ Create an L{IMessageRouter} provider for the default case, where no L{IMessageRouter} powerup is installed on the top-level store. It wraps a L{LocalMessageRouter} around the L{LoginSystem} installed on the given site store. If no L{LoginSystem} is present, this returns a null router which will simply log an error but not deliver the message anywhere, until this configuration error can be corrected. @rtype: L{IMessageRouter} ] variable[ls] assign[=] call[name[siteStore].findUnique, parameter[name[LoginSystem]]] if compare[name[ls] is constant[None]] begin[:] <ast.Try object at 0x7da1b0a31b10> return[call[name[_NullRouter], parameter[]]] return[call[name[LocalMessageRouter], parameter[name[ls]]]]
keyword[def] identifier[_createLocalRouter] ( identifier[siteStore] ): literal[string] identifier[ls] = identifier[siteStore] . identifier[findUnique] ( identifier[LoginSystem] , identifier[default] = keyword[None] ) keyword[if] identifier[ls] keyword[is] keyword[None] : keyword[try] : keyword[raise] identifier[UnsatisfiedRequirement] () keyword[except] identifier[UnsatisfiedRequirement] : identifier[log] . identifier[err] ( identifier[Failure] (), literal[string] literal[string] ) keyword[return] identifier[_NullRouter] () keyword[return] identifier[LocalMessageRouter] ( identifier[ls] )
def _createLocalRouter(siteStore): """ Create an L{IMessageRouter} provider for the default case, where no L{IMessageRouter} powerup is installed on the top-level store. It wraps a L{LocalMessageRouter} around the L{LoginSystem} installed on the given site store. If no L{LoginSystem} is present, this returns a null router which will simply log an error but not deliver the message anywhere, until this configuration error can be corrected. @rtype: L{IMessageRouter} """ ls = siteStore.findUnique(LoginSystem, default=None) if ls is None: try: raise UnsatisfiedRequirement() # depends on [control=['try'], data=[]] except UnsatisfiedRequirement: log.err(Failure(), 'You have opened a substore from a site store with no LoginSystem. Message routing is disabled.') # depends on [control=['except'], data=[]] return _NullRouter() # depends on [control=['if'], data=[]] return LocalMessageRouter(ls)
def bfs(self): ''' Breadth-first search generator. Yields `(node, parent)` for every node in the tree, beginning with `(self.root, None)`. ''' yield (self.root, None) todo = deque([(self.root[char], self.root) for char in self.root]) while todo: current, parent = todo.popleft() yield (current, parent) for char in current: todo.append((current[char], current))
def function[bfs, parameter[self]]: constant[ Breadth-first search generator. Yields `(node, parent)` for every node in the tree, beginning with `(self.root, None)`. ] <ast.Yield object at 0x7da1b2428400> variable[todo] assign[=] call[name[deque], parameter[<ast.ListComp object at 0x7da1b2429ff0>]] while name[todo] begin[:] <ast.Tuple object at 0x7da1b24297b0> assign[=] call[name[todo].popleft, parameter[]] <ast.Yield object at 0x7da1b2428bb0> for taget[name[char]] in starred[name[current]] begin[:] call[name[todo].append, parameter[tuple[[<ast.Subscript object at 0x7da1b2429b10>, <ast.Name object at 0x7da1b2428eb0>]]]]
keyword[def] identifier[bfs] ( identifier[self] ): literal[string] keyword[yield] ( identifier[self] . identifier[root] , keyword[None] ) identifier[todo] = identifier[deque] ([( identifier[self] . identifier[root] [ identifier[char] ], identifier[self] . identifier[root] ) keyword[for] identifier[char] keyword[in] identifier[self] . identifier[root] ]) keyword[while] identifier[todo] : identifier[current] , identifier[parent] = identifier[todo] . identifier[popleft] () keyword[yield] ( identifier[current] , identifier[parent] ) keyword[for] identifier[char] keyword[in] identifier[current] : identifier[todo] . identifier[append] (( identifier[current] [ identifier[char] ], identifier[current] ))
def bfs(self): """ Breadth-first search generator. Yields `(node, parent)` for every node in the tree, beginning with `(self.root, None)`. """ yield (self.root, None) todo = deque([(self.root[char], self.root) for char in self.root]) while todo: (current, parent) = todo.popleft() yield (current, parent) for char in current: todo.append((current[char], current)) # depends on [control=['for'], data=['char']] # depends on [control=['while'], data=[]]
def confd_state_cli_listen_ssh_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring") cli = ET.SubElement(confd_state, "cli") listen = ET.SubElement(cli, "listen") ssh = ET.SubElement(listen, "ssh") port = ET.SubElement(ssh, "port") port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[confd_state_cli_listen_ssh_port, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[confd_state] assign[=] call[name[ET].SubElement, parameter[name[config], constant[confd-state]]] variable[cli] assign[=] call[name[ET].SubElement, parameter[name[confd_state], constant[cli]]] variable[listen] assign[=] call[name[ET].SubElement, parameter[name[cli], constant[listen]]] variable[ssh] assign[=] call[name[ET].SubElement, parameter[name[listen], constant[ssh]]] variable[port] assign[=] call[name[ET].SubElement, parameter[name[ssh], constant[port]]] name[port].text assign[=] call[name[kwargs].pop, parameter[constant[port]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[confd_state_cli_listen_ssh_port] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[confd_state] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[cli] = identifier[ET] . identifier[SubElement] ( identifier[confd_state] , literal[string] ) identifier[listen] = identifier[ET] . identifier[SubElement] ( identifier[cli] , literal[string] ) identifier[ssh] = identifier[ET] . identifier[SubElement] ( identifier[listen] , literal[string] ) identifier[port] = identifier[ET] . identifier[SubElement] ( identifier[ssh] , literal[string] ) identifier[port] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def confd_state_cli_listen_ssh_port(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') confd_state = ET.SubElement(config, 'confd-state', xmlns='http://tail-f.com/yang/confd-monitoring') cli = ET.SubElement(confd_state, 'cli') listen = ET.SubElement(cli, 'listen') ssh = ET.SubElement(listen, 'ssh') port = ET.SubElement(ssh, 'port') port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config)
def union(rasters): """ Union of rasters Usage: union(rasters) where: rasters is a list of GeoRaster objects """ if sum([rasters[0].x_cell_size == i.x_cell_size for i in rasters]) == len(rasters) \ and sum([rasters[0].y_cell_size == i.y_cell_size for i in rasters]) == len(rasters)\ and sum([rasters[0].projection.ExportToProj4() == i.projection.ExportToProj4() for i in rasters]) == len(rasters): if sum([rasters[0].nodata_value == i.nodata_value for i in rasters]) == len(rasters): ndv = rasters[0].nodata_value else: ndv = np.nan if ndv == None: ndv = np.nan if sum([rasters[0].datatype == i.datatype for i in rasters]) == len(rasters): datatype = rasters[0].datatype else: datatype = None projection = rasters[0].projection lonmin = min([i.xmin for i in rasters]) lonmax = max([i.xmax for i in rasters]) latmin = min([i.ymin for i in rasters]) latmax = max([i.ymax for i in rasters]) shape = (np.abs(np.floor((latmax-latmin)/rasters[0].y_cell_size)).astype(int), np.floor((lonmax-lonmin)/rasters[0].x_cell_size).astype(int)) out = ndv*np.ones(shape) outmask = np.ones(shape).astype(bool) for i in rasters: (row, col) = map_pixel(i.xmin, i.ymax, rasters[0].x_cell_size, rasters[0].y_cell_size, lonmin, latmax) out[row:row+i.shape[0], col:col+i.shape[1]] = np.where(i.raster.data != i.nodata_value, i.raster.data,\ out[row:row+i.shape[0], col:col+i.shape[1]]) outmask[row:row+i.shape[0], col:col+i.shape[1]] = np.where(i.raster.mask == False, False,\ outmask[row:row+i.shape[0], col:col+i.shape[1]]) out = np.ma.masked_array(out, mask=outmask, fill_value=ndv) return GeoRaster(out, (lonmin, rasters[0].x_cell_size, 0.0, latmax, 0.0, rasters[0].y_cell_size), nodata_value=ndv, projection=projection, datatype=datatype) else: raise RasterGeoError('Rasters need to have same pixel sizes. Use the aggregate or dissolve functions to generate correct GeoRasters')
def function[union, parameter[rasters]]: constant[ Union of rasters Usage: union(rasters) where: rasters is a list of GeoRaster objects ] if <ast.BoolOp object at 0x7da1b28649a0> begin[:] if compare[call[name[sum], parameter[<ast.ListComp object at 0x7da1b282b250>]] equal[==] call[name[len], parameter[name[rasters]]]] begin[:] variable[ndv] assign[=] call[name[rasters]][constant[0]].nodata_value if compare[name[ndv] equal[==] constant[None]] begin[:] variable[ndv] assign[=] name[np].nan if compare[call[name[sum], parameter[<ast.ListComp object at 0x7da1b2829db0>]] equal[==] call[name[len], parameter[name[rasters]]]] begin[:] variable[datatype] assign[=] call[name[rasters]][constant[0]].datatype variable[projection] assign[=] call[name[rasters]][constant[0]].projection variable[lonmin] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da1b282ada0>]] variable[lonmax] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da1b282a530>]] variable[latmin] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da1b282ae60>]] variable[latmax] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da1b27e0820>]] variable[shape] assign[=] tuple[[<ast.Call object at 0x7da1b27e32e0>, <ast.Call object at 0x7da1b27e25c0>]] variable[out] assign[=] binary_operation[name[ndv] * call[name[np].ones, parameter[name[shape]]]] variable[outmask] assign[=] call[call[name[np].ones, parameter[name[shape]]].astype, parameter[name[bool]]] for taget[name[i]] in starred[name[rasters]] begin[:] <ast.Tuple object at 0x7da1b2872980> assign[=] call[name[map_pixel], parameter[name[i].xmin, name[i].ymax, call[name[rasters]][constant[0]].x_cell_size, call[name[rasters]][constant[0]].y_cell_size, name[lonmin], name[latmax]]] call[name[out]][tuple[[<ast.Slice object at 0x7da1b28728c0>, <ast.Slice object at 0x7da1b28709d0>]]] assign[=] call[name[np].where, parameter[compare[name[i].raster.data not_equal[!=] name[i].nodata_value], name[i].raster.data, call[name[out]][tuple[[<ast.Slice object at 0x7da1b28722c0>, <ast.Slice object at 0x7da1b2870610>]]]]] call[name[outmask]][tuple[[<ast.Slice object at 0x7da1b2872c80>, <ast.Slice object at 0x7da1b2873f10>]]] assign[=] call[name[np].where, parameter[compare[name[i].raster.mask equal[==] constant[False]], constant[False], call[name[outmask]][tuple[[<ast.Slice object at 0x7da1b28701c0>, <ast.Slice object at 0x7da1b2873a30>]]]]] variable[out] assign[=] call[name[np].ma.masked_array, parameter[name[out]]] return[call[name[GeoRaster], parameter[name[out], tuple[[<ast.Name object at 0x7da2041d8400>, <ast.Attribute object at 0x7da2041d9d20>, <ast.Constant object at 0x7da2041db850>, <ast.Name object at 0x7da2041daf80>, <ast.Constant object at 0x7da2041d85b0>, <ast.Attribute object at 0x7da2041d9c00>]]]]]
keyword[def] identifier[union] ( identifier[rasters] ): literal[string] keyword[if] identifier[sum] ([ identifier[rasters] [ literal[int] ]. identifier[x_cell_size] == identifier[i] . identifier[x_cell_size] keyword[for] identifier[i] keyword[in] identifier[rasters] ])== identifier[len] ( identifier[rasters] ) keyword[and] identifier[sum] ([ identifier[rasters] [ literal[int] ]. identifier[y_cell_size] == identifier[i] . identifier[y_cell_size] keyword[for] identifier[i] keyword[in] identifier[rasters] ])== identifier[len] ( identifier[rasters] ) keyword[and] identifier[sum] ([ identifier[rasters] [ literal[int] ]. identifier[projection] . identifier[ExportToProj4] ()== identifier[i] . identifier[projection] . identifier[ExportToProj4] () keyword[for] identifier[i] keyword[in] identifier[rasters] ])== identifier[len] ( identifier[rasters] ): keyword[if] identifier[sum] ([ identifier[rasters] [ literal[int] ]. identifier[nodata_value] == identifier[i] . identifier[nodata_value] keyword[for] identifier[i] keyword[in] identifier[rasters] ])== identifier[len] ( identifier[rasters] ): identifier[ndv] = identifier[rasters] [ literal[int] ]. identifier[nodata_value] keyword[else] : identifier[ndv] = identifier[np] . identifier[nan] keyword[if] identifier[ndv] == keyword[None] : identifier[ndv] = identifier[np] . identifier[nan] keyword[if] identifier[sum] ([ identifier[rasters] [ literal[int] ]. identifier[datatype] == identifier[i] . identifier[datatype] keyword[for] identifier[i] keyword[in] identifier[rasters] ])== identifier[len] ( identifier[rasters] ): identifier[datatype] = identifier[rasters] [ literal[int] ]. identifier[datatype] keyword[else] : identifier[datatype] = keyword[None] identifier[projection] = identifier[rasters] [ literal[int] ]. identifier[projection] identifier[lonmin] = identifier[min] ([ identifier[i] . identifier[xmin] keyword[for] identifier[i] keyword[in] identifier[rasters] ]) identifier[lonmax] = identifier[max] ([ identifier[i] . identifier[xmax] keyword[for] identifier[i] keyword[in] identifier[rasters] ]) identifier[latmin] = identifier[min] ([ identifier[i] . identifier[ymin] keyword[for] identifier[i] keyword[in] identifier[rasters] ]) identifier[latmax] = identifier[max] ([ identifier[i] . identifier[ymax] keyword[for] identifier[i] keyword[in] identifier[rasters] ]) identifier[shape] =( identifier[np] . identifier[abs] ( identifier[np] . identifier[floor] (( identifier[latmax] - identifier[latmin] )/ identifier[rasters] [ literal[int] ]. identifier[y_cell_size] )). identifier[astype] ( identifier[int] ), identifier[np] . identifier[floor] (( identifier[lonmax] - identifier[lonmin] )/ identifier[rasters] [ literal[int] ]. identifier[x_cell_size] ). identifier[astype] ( identifier[int] )) identifier[out] = identifier[ndv] * identifier[np] . identifier[ones] ( identifier[shape] ) identifier[outmask] = identifier[np] . identifier[ones] ( identifier[shape] ). identifier[astype] ( identifier[bool] ) keyword[for] identifier[i] keyword[in] identifier[rasters] : ( identifier[row] , identifier[col] )= identifier[map_pixel] ( identifier[i] . identifier[xmin] , identifier[i] . identifier[ymax] , identifier[rasters] [ literal[int] ]. identifier[x_cell_size] , identifier[rasters] [ literal[int] ]. identifier[y_cell_size] , identifier[lonmin] , identifier[latmax] ) identifier[out] [ identifier[row] : identifier[row] + identifier[i] . identifier[shape] [ literal[int] ], identifier[col] : identifier[col] + identifier[i] . identifier[shape] [ literal[int] ]]= identifier[np] . identifier[where] ( identifier[i] . identifier[raster] . identifier[data] != identifier[i] . identifier[nodata_value] , identifier[i] . identifier[raster] . identifier[data] , identifier[out] [ identifier[row] : identifier[row] + identifier[i] . identifier[shape] [ literal[int] ], identifier[col] : identifier[col] + identifier[i] . identifier[shape] [ literal[int] ]]) identifier[outmask] [ identifier[row] : identifier[row] + identifier[i] . identifier[shape] [ literal[int] ], identifier[col] : identifier[col] + identifier[i] . identifier[shape] [ literal[int] ]]= identifier[np] . identifier[where] ( identifier[i] . identifier[raster] . identifier[mask] == keyword[False] , keyword[False] , identifier[outmask] [ identifier[row] : identifier[row] + identifier[i] . identifier[shape] [ literal[int] ], identifier[col] : identifier[col] + identifier[i] . identifier[shape] [ literal[int] ]]) identifier[out] = identifier[np] . identifier[ma] . identifier[masked_array] ( identifier[out] , identifier[mask] = identifier[outmask] , identifier[fill_value] = identifier[ndv] ) keyword[return] identifier[GeoRaster] ( identifier[out] ,( identifier[lonmin] , identifier[rasters] [ literal[int] ]. identifier[x_cell_size] , literal[int] , identifier[latmax] , literal[int] , identifier[rasters] [ literal[int] ]. identifier[y_cell_size] ), identifier[nodata_value] = identifier[ndv] , identifier[projection] = identifier[projection] , identifier[datatype] = identifier[datatype] ) keyword[else] : keyword[raise] identifier[RasterGeoError] ( literal[string] )
def union(rasters): """ Union of rasters Usage: union(rasters) where: rasters is a list of GeoRaster objects """ if sum([rasters[0].x_cell_size == i.x_cell_size for i in rasters]) == len(rasters) and sum([rasters[0].y_cell_size == i.y_cell_size for i in rasters]) == len(rasters) and (sum([rasters[0].projection.ExportToProj4() == i.projection.ExportToProj4() for i in rasters]) == len(rasters)): if sum([rasters[0].nodata_value == i.nodata_value for i in rasters]) == len(rasters): ndv = rasters[0].nodata_value # depends on [control=['if'], data=[]] else: ndv = np.nan if ndv == None: ndv = np.nan # depends on [control=['if'], data=['ndv']] if sum([rasters[0].datatype == i.datatype for i in rasters]) == len(rasters): datatype = rasters[0].datatype # depends on [control=['if'], data=[]] else: datatype = None projection = rasters[0].projection lonmin = min([i.xmin for i in rasters]) lonmax = max([i.xmax for i in rasters]) latmin = min([i.ymin for i in rasters]) latmax = max([i.ymax for i in rasters]) shape = (np.abs(np.floor((latmax - latmin) / rasters[0].y_cell_size)).astype(int), np.floor((lonmax - lonmin) / rasters[0].x_cell_size).astype(int)) out = ndv * np.ones(shape) outmask = np.ones(shape).astype(bool) for i in rasters: (row, col) = map_pixel(i.xmin, i.ymax, rasters[0].x_cell_size, rasters[0].y_cell_size, lonmin, latmax) out[row:row + i.shape[0], col:col + i.shape[1]] = np.where(i.raster.data != i.nodata_value, i.raster.data, out[row:row + i.shape[0], col:col + i.shape[1]]) outmask[row:row + i.shape[0], col:col + i.shape[1]] = np.where(i.raster.mask == False, False, outmask[row:row + i.shape[0], col:col + i.shape[1]]) # depends on [control=['for'], data=['i']] out = np.ma.masked_array(out, mask=outmask, fill_value=ndv) return GeoRaster(out, (lonmin, rasters[0].x_cell_size, 0.0, latmax, 0.0, rasters[0].y_cell_size), nodata_value=ndv, projection=projection, datatype=datatype) # depends on [control=['if'], data=[]] else: raise RasterGeoError('Rasters need to have same pixel sizes. Use the aggregate or dissolve functions to generate correct GeoRasters')
def ping(self): """Attempts to ping the server using current credentials, and responds with the path of the currently authenticated device""" return self.handleresult(self.r.get(self.url, params={"q": "this"})).text
def function[ping, parameter[self]]: constant[Attempts to ping the server using current credentials, and responds with the path of the currently authenticated device] return[call[name[self].handleresult, parameter[call[name[self].r.get, parameter[name[self].url]]]].text]
keyword[def] identifier[ping] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[handleresult] ( identifier[self] . identifier[r] . identifier[get] ( identifier[self] . identifier[url] , identifier[params] ={ literal[string] : literal[string] })). identifier[text]
def ping(self): """Attempts to ping the server using current credentials, and responds with the path of the currently authenticated device""" return self.handleresult(self.r.get(self.url, params={'q': 'this'})).text
def backward_transfer_pair( backward_channel: NettingChannelState, payer_transfer: LockedTransferSignedState, pseudo_random_generator: random.Random, block_number: BlockNumber, ) -> Tuple[Optional[MediationPairState], List[Event]]: """ Sends a transfer backwards, allowing the previous hop to try a new route. When all the routes available for this node failed, send a transfer backwards with the same amount and secrethash, allowing the previous hop to do a retry. Args: backward_channel: The original channel which sent the mediated transfer to this node. payer_transfer: The *latest* payer transfer which is backing the mediation. block_number: The current block number. Returns: The mediator pair and the correspoding refund event. """ transfer_pair = None events: List[Event] = list() lock = payer_transfer.lock lock_timeout = BlockTimeout(lock.expiration - block_number) # Ensure the refund transfer's lock has a safe expiration, otherwise don't # do anything and wait for the received lock to expire. if is_channel_usable(backward_channel, lock.amount, lock_timeout): message_identifier = message_identifier_from_prng(pseudo_random_generator) refund_transfer = channel.send_refundtransfer( channel_state=backward_channel, initiator=payer_transfer.initiator, target=payer_transfer.target, amount=get_lock_amount_after_fees(lock, backward_channel), message_identifier=message_identifier, payment_identifier=payer_transfer.payment_identifier, expiration=lock.expiration, secrethash=lock.secrethash, ) transfer_pair = MediationPairState( payer_transfer, backward_channel.partner_state.address, refund_transfer.transfer, ) events.append(refund_transfer) return (transfer_pair, events)
def function[backward_transfer_pair, parameter[backward_channel, payer_transfer, pseudo_random_generator, block_number]]: constant[ Sends a transfer backwards, allowing the previous hop to try a new route. When all the routes available for this node failed, send a transfer backwards with the same amount and secrethash, allowing the previous hop to do a retry. Args: backward_channel: The original channel which sent the mediated transfer to this node. payer_transfer: The *latest* payer transfer which is backing the mediation. block_number: The current block number. Returns: The mediator pair and the correspoding refund event. ] variable[transfer_pair] assign[=] constant[None] <ast.AnnAssign object at 0x7da1b1986950> variable[lock] assign[=] name[payer_transfer].lock variable[lock_timeout] assign[=] call[name[BlockTimeout], parameter[binary_operation[name[lock].expiration - name[block_number]]]] if call[name[is_channel_usable], parameter[name[backward_channel], name[lock].amount, name[lock_timeout]]] begin[:] variable[message_identifier] assign[=] call[name[message_identifier_from_prng], parameter[name[pseudo_random_generator]]] variable[refund_transfer] assign[=] call[name[channel].send_refundtransfer, parameter[]] variable[transfer_pair] assign[=] call[name[MediationPairState], parameter[name[payer_transfer], name[backward_channel].partner_state.address, name[refund_transfer].transfer]] call[name[events].append, parameter[name[refund_transfer]]] return[tuple[[<ast.Name object at 0x7da1b170b490>, <ast.Name object at 0x7da1b170bdc0>]]]
keyword[def] identifier[backward_transfer_pair] ( identifier[backward_channel] : identifier[NettingChannelState] , identifier[payer_transfer] : identifier[LockedTransferSignedState] , identifier[pseudo_random_generator] : identifier[random] . identifier[Random] , identifier[block_number] : identifier[BlockNumber] , )-> identifier[Tuple] [ identifier[Optional] [ identifier[MediationPairState] ], identifier[List] [ identifier[Event] ]]: literal[string] identifier[transfer_pair] = keyword[None] identifier[events] : identifier[List] [ identifier[Event] ]= identifier[list] () identifier[lock] = identifier[payer_transfer] . identifier[lock] identifier[lock_timeout] = identifier[BlockTimeout] ( identifier[lock] . identifier[expiration] - identifier[block_number] ) keyword[if] identifier[is_channel_usable] ( identifier[backward_channel] , identifier[lock] . identifier[amount] , identifier[lock_timeout] ): identifier[message_identifier] = identifier[message_identifier_from_prng] ( identifier[pseudo_random_generator] ) identifier[refund_transfer] = identifier[channel] . identifier[send_refundtransfer] ( identifier[channel_state] = identifier[backward_channel] , identifier[initiator] = identifier[payer_transfer] . identifier[initiator] , identifier[target] = identifier[payer_transfer] . identifier[target] , identifier[amount] = identifier[get_lock_amount_after_fees] ( identifier[lock] , identifier[backward_channel] ), identifier[message_identifier] = identifier[message_identifier] , identifier[payment_identifier] = identifier[payer_transfer] . identifier[payment_identifier] , identifier[expiration] = identifier[lock] . identifier[expiration] , identifier[secrethash] = identifier[lock] . identifier[secrethash] , ) identifier[transfer_pair] = identifier[MediationPairState] ( identifier[payer_transfer] , identifier[backward_channel] . identifier[partner_state] . identifier[address] , identifier[refund_transfer] . identifier[transfer] , ) identifier[events] . identifier[append] ( identifier[refund_transfer] ) keyword[return] ( identifier[transfer_pair] , identifier[events] )
def backward_transfer_pair(backward_channel: NettingChannelState, payer_transfer: LockedTransferSignedState, pseudo_random_generator: random.Random, block_number: BlockNumber) -> Tuple[Optional[MediationPairState], List[Event]]: """ Sends a transfer backwards, allowing the previous hop to try a new route. When all the routes available for this node failed, send a transfer backwards with the same amount and secrethash, allowing the previous hop to do a retry. Args: backward_channel: The original channel which sent the mediated transfer to this node. payer_transfer: The *latest* payer transfer which is backing the mediation. block_number: The current block number. Returns: The mediator pair and the correspoding refund event. """ transfer_pair = None events: List[Event] = list() lock = payer_transfer.lock lock_timeout = BlockTimeout(lock.expiration - block_number) # Ensure the refund transfer's lock has a safe expiration, otherwise don't # do anything and wait for the received lock to expire. if is_channel_usable(backward_channel, lock.amount, lock_timeout): message_identifier = message_identifier_from_prng(pseudo_random_generator) refund_transfer = channel.send_refundtransfer(channel_state=backward_channel, initiator=payer_transfer.initiator, target=payer_transfer.target, amount=get_lock_amount_after_fees(lock, backward_channel), message_identifier=message_identifier, payment_identifier=payer_transfer.payment_identifier, expiration=lock.expiration, secrethash=lock.secrethash) transfer_pair = MediationPairState(payer_transfer, backward_channel.partner_state.address, refund_transfer.transfer) events.append(refund_transfer) # depends on [control=['if'], data=[]] return (transfer_pair, events)
def show_explorer(self): """Show the explorer""" if self.dockwidget is not None: if self.dockwidget.isHidden(): self.dockwidget.show() self.dockwidget.raise_() self.dockwidget.update()
def function[show_explorer, parameter[self]]: constant[Show the explorer] if compare[name[self].dockwidget is_not constant[None]] begin[:] if call[name[self].dockwidget.isHidden, parameter[]] begin[:] call[name[self].dockwidget.show, parameter[]] call[name[self].dockwidget.raise_, parameter[]] call[name[self].dockwidget.update, parameter[]]
keyword[def] identifier[show_explorer] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[dockwidget] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[dockwidget] . identifier[isHidden] (): identifier[self] . identifier[dockwidget] . identifier[show] () identifier[self] . identifier[dockwidget] . identifier[raise_] () identifier[self] . identifier[dockwidget] . identifier[update] ()
def show_explorer(self): """Show the explorer""" if self.dockwidget is not None: if self.dockwidget.isHidden(): self.dockwidget.show() # depends on [control=['if'], data=[]] self.dockwidget.raise_() self.dockwidget.update() # depends on [control=['if'], data=[]]
def luhn_check(card_number): """ checks to make sure that the card passes a luhn mod-10 checksum """ sum = 0 num_digits = len(card_number) oddeven = num_digits & 1 for count in range(0, num_digits): digit = int(card_number[count]) if not ((count & 1) ^ oddeven): digit *= 2 if digit > 9: digit -= 9 sum += digit return (sum % 10) == 0
def function[luhn_check, parameter[card_number]]: constant[ checks to make sure that the card passes a luhn mod-10 checksum ] variable[sum] assign[=] constant[0] variable[num_digits] assign[=] call[name[len], parameter[name[card_number]]] variable[oddeven] assign[=] binary_operation[name[num_digits] <ast.BitAnd object at 0x7da2590d6b60> constant[1]] for taget[name[count]] in starred[call[name[range], parameter[constant[0], name[num_digits]]]] begin[:] variable[digit] assign[=] call[name[int], parameter[call[name[card_number]][name[count]]]] if <ast.UnaryOp object at 0x7da1b26af430> begin[:] <ast.AugAssign object at 0x7da2054a6440> if compare[name[digit] greater[>] constant[9]] begin[:] <ast.AugAssign object at 0x7da2054a7bb0> <ast.AugAssign object at 0x7da2054a52a0> return[compare[binary_operation[name[sum] <ast.Mod object at 0x7da2590d6920> constant[10]] equal[==] constant[0]]]
keyword[def] identifier[luhn_check] ( identifier[card_number] ): literal[string] identifier[sum] = literal[int] identifier[num_digits] = identifier[len] ( identifier[card_number] ) identifier[oddeven] = identifier[num_digits] & literal[int] keyword[for] identifier[count] keyword[in] identifier[range] ( literal[int] , identifier[num_digits] ): identifier[digit] = identifier[int] ( identifier[card_number] [ identifier[count] ]) keyword[if] keyword[not] (( identifier[count] & literal[int] )^ identifier[oddeven] ): identifier[digit] *= literal[int] keyword[if] identifier[digit] > literal[int] : identifier[digit] -= literal[int] identifier[sum] += identifier[digit] keyword[return] ( identifier[sum] % literal[int] )== literal[int]
def luhn_check(card_number): """ checks to make sure that the card passes a luhn mod-10 checksum """ sum = 0 num_digits = len(card_number) oddeven = num_digits & 1 for count in range(0, num_digits): digit = int(card_number[count]) if not count & 1 ^ oddeven: digit *= 2 # depends on [control=['if'], data=[]] if digit > 9: digit -= 9 # depends on [control=['if'], data=['digit']] sum += digit # depends on [control=['for'], data=['count']] return sum % 10 == 0
def set_droppable_order(self, droppable_ids): """ reorder droppables per the passed in list :param droppable_ids: :return: """ reordered_droppables = [] current_droppable_ids = [d['id'] for d in self.my_osid_object_form._my_map['droppables']] if set(droppable_ids) != set(current_droppable_ids): raise IllegalState('droppable_ids do not match existing droppables') for droppable_id in droppable_ids: for current_droppable in self.my_osid_object_form._my_map['droppables']: if droppable_id == current_droppable['id']: reordered_droppables.append(current_droppable) break self.my_osid_object_form._my_map['droppables'] = reordered_droppables
def function[set_droppable_order, parameter[self, droppable_ids]]: constant[ reorder droppables per the passed in list :param droppable_ids: :return: ] variable[reordered_droppables] assign[=] list[[]] variable[current_droppable_ids] assign[=] <ast.ListComp object at 0x7da1b0a22f80> if compare[call[name[set], parameter[name[droppable_ids]]] not_equal[!=] call[name[set], parameter[name[current_droppable_ids]]]] begin[:] <ast.Raise object at 0x7da1b0a212a0> for taget[name[droppable_id]] in starred[name[droppable_ids]] begin[:] for taget[name[current_droppable]] in starred[call[name[self].my_osid_object_form._my_map][constant[droppables]]] begin[:] if compare[name[droppable_id] equal[==] call[name[current_droppable]][constant[id]]] begin[:] call[name[reordered_droppables].append, parameter[name[current_droppable]]] break call[name[self].my_osid_object_form._my_map][constant[droppables]] assign[=] name[reordered_droppables]
keyword[def] identifier[set_droppable_order] ( identifier[self] , identifier[droppable_ids] ): literal[string] identifier[reordered_droppables] =[] identifier[current_droppable_ids] =[ identifier[d] [ literal[string] ] keyword[for] identifier[d] keyword[in] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]] keyword[if] identifier[set] ( identifier[droppable_ids] )!= identifier[set] ( identifier[current_droppable_ids] ): keyword[raise] identifier[IllegalState] ( literal[string] ) keyword[for] identifier[droppable_id] keyword[in] identifier[droppable_ids] : keyword[for] identifier[current_droppable] keyword[in] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]: keyword[if] identifier[droppable_id] == identifier[current_droppable] [ literal[string] ]: identifier[reordered_droppables] . identifier[append] ( identifier[current_droppable] ) keyword[break] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[reordered_droppables]
def set_droppable_order(self, droppable_ids): """ reorder droppables per the passed in list :param droppable_ids: :return: """ reordered_droppables = [] current_droppable_ids = [d['id'] for d in self.my_osid_object_form._my_map['droppables']] if set(droppable_ids) != set(current_droppable_ids): raise IllegalState('droppable_ids do not match existing droppables') # depends on [control=['if'], data=[]] for droppable_id in droppable_ids: for current_droppable in self.my_osid_object_form._my_map['droppables']: if droppable_id == current_droppable['id']: reordered_droppables.append(current_droppable) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['current_droppable']] # depends on [control=['for'], data=['droppable_id']] self.my_osid_object_form._my_map['droppables'] = reordered_droppables
def get_community_badge_progress(self, steamID, badgeID, format=None): """Gets all the quests needed to get the specified badge, and which are completed. steamID: The users ID badgeID: The badge we're asking about format: Return format. None defaults to json. (json, xml, vdf) """ parameters = {'steamid' : steamID, 'badgeid' : badgeID} if format is not None: parameters['format'] = format url = self.create_request_url(self.interface, 'GetCommunityBadgeProgress', 1, parameters) data = self.retrieve_request(url) return self.return_data(data, format=format)
def function[get_community_badge_progress, parameter[self, steamID, badgeID, format]]: constant[Gets all the quests needed to get the specified badge, and which are completed. steamID: The users ID badgeID: The badge we're asking about format: Return format. None defaults to json. (json, xml, vdf) ] variable[parameters] assign[=] dictionary[[<ast.Constant object at 0x7da18f722920>, <ast.Constant object at 0x7da18f722d40>], [<ast.Name object at 0x7da18f721c30>, <ast.Name object at 0x7da18f721c60>]] if compare[name[format] is_not constant[None]] begin[:] call[name[parameters]][constant[format]] assign[=] name[format] variable[url] assign[=] call[name[self].create_request_url, parameter[name[self].interface, constant[GetCommunityBadgeProgress], constant[1], name[parameters]]] variable[data] assign[=] call[name[self].retrieve_request, parameter[name[url]]] return[call[name[self].return_data, parameter[name[data]]]]
keyword[def] identifier[get_community_badge_progress] ( identifier[self] , identifier[steamID] , identifier[badgeID] , identifier[format] = keyword[None] ): literal[string] identifier[parameters] ={ literal[string] : identifier[steamID] , literal[string] : identifier[badgeID] } keyword[if] identifier[format] keyword[is] keyword[not] keyword[None] : identifier[parameters] [ literal[string] ]= identifier[format] identifier[url] = identifier[self] . identifier[create_request_url] ( identifier[self] . identifier[interface] , literal[string] , literal[int] , identifier[parameters] ) identifier[data] = identifier[self] . identifier[retrieve_request] ( identifier[url] ) keyword[return] identifier[self] . identifier[return_data] ( identifier[data] , identifier[format] = identifier[format] )
def get_community_badge_progress(self, steamID, badgeID, format=None): """Gets all the quests needed to get the specified badge, and which are completed. steamID: The users ID badgeID: The badge we're asking about format: Return format. None defaults to json. (json, xml, vdf) """ parameters = {'steamid': steamID, 'badgeid': badgeID} if format is not None: parameters['format'] = format # depends on [control=['if'], data=['format']] url = self.create_request_url(self.interface, 'GetCommunityBadgeProgress', 1, parameters) data = self.retrieve_request(url) return self.return_data(data, format=format)
def best_prediction(self): """The highest value from among the predictions made by the action sets in this match set.""" if self._best_prediction is None and self._action_sets: self._best_prediction = max( action_set.prediction for action_set in self._action_sets.values() ) return self._best_prediction
def function[best_prediction, parameter[self]]: constant[The highest value from among the predictions made by the action sets in this match set.] if <ast.BoolOp object at 0x7da1b0feb760> begin[:] name[self]._best_prediction assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b0fe9a80>]] return[name[self]._best_prediction]
keyword[def] identifier[best_prediction] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_best_prediction] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[_action_sets] : identifier[self] . identifier[_best_prediction] = identifier[max] ( identifier[action_set] . identifier[prediction] keyword[for] identifier[action_set] keyword[in] identifier[self] . identifier[_action_sets] . identifier[values] () ) keyword[return] identifier[self] . identifier[_best_prediction]
def best_prediction(self): """The highest value from among the predictions made by the action sets in this match set.""" if self._best_prediction is None and self._action_sets: self._best_prediction = max((action_set.prediction for action_set in self._action_sets.values())) # depends on [control=['if'], data=[]] return self._best_prediction
def to_gnuplot_datafile(self, datafilepath): """Dumps the TimeSeries into a gnuplot compatible data file. :param string datafilepath: Path used to create the file. If that file already exists, it will be overwritten! :return: Returns :py:const:`True` if the data could be written, :py:const:`False` otherwise. :rtype: boolean """ try: datafile = file(datafilepath, "wb") except Exception: return False if self._timestampFormat is None: self._timestampFormat = _STR_EPOCHS datafile.write("# time_as_<%s> value\n" % self._timestampFormat) convert = TimeSeries.convert_epoch_to_timestamp for datapoint in self._timeseriesData: timestamp, value = datapoint if self._timestampFormat is not None: timestamp = convert(timestamp, self._timestampFormat) datafile.write("%s %s\n" % (timestamp, value)) datafile.close() return True
def function[to_gnuplot_datafile, parameter[self, datafilepath]]: constant[Dumps the TimeSeries into a gnuplot compatible data file. :param string datafilepath: Path used to create the file. If that file already exists, it will be overwritten! :return: Returns :py:const:`True` if the data could be written, :py:const:`False` otherwise. :rtype: boolean ] <ast.Try object at 0x7da18bcc9420> if compare[name[self]._timestampFormat is constant[None]] begin[:] name[self]._timestampFormat assign[=] name[_STR_EPOCHS] call[name[datafile].write, parameter[binary_operation[constant[# time_as_<%s> value ] <ast.Mod object at 0x7da2590d6920> name[self]._timestampFormat]]] variable[convert] assign[=] name[TimeSeries].convert_epoch_to_timestamp for taget[name[datapoint]] in starred[name[self]._timeseriesData] begin[:] <ast.Tuple object at 0x7da18bccb6d0> assign[=] name[datapoint] if compare[name[self]._timestampFormat is_not constant[None]] begin[:] variable[timestamp] assign[=] call[name[convert], parameter[name[timestamp], name[self]._timestampFormat]] call[name[datafile].write, parameter[binary_operation[constant[%s %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bccbca0>, <ast.Name object at 0x7da18bccae90>]]]]] call[name[datafile].close, parameter[]] return[constant[True]]
keyword[def] identifier[to_gnuplot_datafile] ( identifier[self] , identifier[datafilepath] ): literal[string] keyword[try] : identifier[datafile] = identifier[file] ( identifier[datafilepath] , literal[string] ) keyword[except] identifier[Exception] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_timestampFormat] keyword[is] keyword[None] : identifier[self] . identifier[_timestampFormat] = identifier[_STR_EPOCHS] identifier[datafile] . identifier[write] ( literal[string] % identifier[self] . identifier[_timestampFormat] ) identifier[convert] = identifier[TimeSeries] . identifier[convert_epoch_to_timestamp] keyword[for] identifier[datapoint] keyword[in] identifier[self] . identifier[_timeseriesData] : identifier[timestamp] , identifier[value] = identifier[datapoint] keyword[if] identifier[self] . identifier[_timestampFormat] keyword[is] keyword[not] keyword[None] : identifier[timestamp] = identifier[convert] ( identifier[timestamp] , identifier[self] . identifier[_timestampFormat] ) identifier[datafile] . identifier[write] ( literal[string] %( identifier[timestamp] , identifier[value] )) identifier[datafile] . identifier[close] () keyword[return] keyword[True]
def to_gnuplot_datafile(self, datafilepath): """Dumps the TimeSeries into a gnuplot compatible data file. :param string datafilepath: Path used to create the file. If that file already exists, it will be overwritten! :return: Returns :py:const:`True` if the data could be written, :py:const:`False` otherwise. :rtype: boolean """ try: datafile = file(datafilepath, 'wb') # depends on [control=['try'], data=[]] except Exception: return False # depends on [control=['except'], data=[]] if self._timestampFormat is None: self._timestampFormat = _STR_EPOCHS # depends on [control=['if'], data=[]] datafile.write('# time_as_<%s> value\n' % self._timestampFormat) convert = TimeSeries.convert_epoch_to_timestamp for datapoint in self._timeseriesData: (timestamp, value) = datapoint if self._timestampFormat is not None: timestamp = convert(timestamp, self._timestampFormat) # depends on [control=['if'], data=[]] datafile.write('%s %s\n' % (timestamp, value)) # depends on [control=['for'], data=['datapoint']] datafile.close() return True
async def list(self, *args, **kwargs): ''' Corresponds to GET request without a resource identifier, fetching documents from the database ''' limit = int(kwargs.pop('limit', self.limit)) limit = 1000 if limit == 0 else limit # lets not go crazy here offset = int(kwargs.pop('offset', self.offset)) projection = None # perform full text search or standard filtering if self._meta.fts_operator in kwargs.keys(): filters = { '$text': {'$search': kwargs[self._meta.fts_operator]} } projection = {'score': {'$meta': 'textScore'}} sort = [('score', {'$meta': 'textScore'}, )] else: # build filters from query parameters filters = self.build_filters(**kwargs) # add custom query defined in resource meta, if exists if isinstance(self._meta.query, dict): filters.update(self._meta.query) # build sorts from query parameters sort = self.build_sort(**kwargs) if isinstance(self._meta.sort, list): sort.extend(self._meta.sort) cursor = self._meta.object_class.get_cursor(db=self.db, query=filters, projection=projection, sort=sort) cursor.skip(offset) cursor.limit(limit) total_count = await self._meta.object_class.count(db=self.db, filters=filters) object_list = await self._meta.object_class.find(cursor) # serialize results serialized_objects = await asyncio.gather(*[obj.serialize() for obj in object_list]) # signal post list asyncio.ensure_future(resource_post_list.send( sender=self._meta.object_class, db=self.db, instances=object_list) ) return { 'meta': { 'total_count': total_count, 'limit': limit, 'offset': offset }, 'objects': serialized_objects }
<ast.AsyncFunctionDef object at 0x7da18bc70700>
keyword[async] keyword[def] identifier[list] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[limit] = identifier[int] ( identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[limit] )) identifier[limit] = literal[int] keyword[if] identifier[limit] == literal[int] keyword[else] identifier[limit] identifier[offset] = identifier[int] ( identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[offset] )) identifier[projection] = keyword[None] keyword[if] identifier[self] . identifier[_meta] . identifier[fts_operator] keyword[in] identifier[kwargs] . identifier[keys] (): identifier[filters] ={ literal[string] :{ literal[string] : identifier[kwargs] [ identifier[self] . identifier[_meta] . identifier[fts_operator] ]} } identifier[projection] ={ literal[string] :{ literal[string] : literal[string] }} identifier[sort] =[( literal[string] ,{ literal[string] : literal[string] },)] keyword[else] : identifier[filters] = identifier[self] . identifier[build_filters] (** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[_meta] . identifier[query] , identifier[dict] ): identifier[filters] . identifier[update] ( identifier[self] . identifier[_meta] . identifier[query] ) identifier[sort] = identifier[self] . identifier[build_sort] (** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[_meta] . identifier[sort] , identifier[list] ): identifier[sort] . identifier[extend] ( identifier[self] . identifier[_meta] . identifier[sort] ) identifier[cursor] = identifier[self] . identifier[_meta] . identifier[object_class] . identifier[get_cursor] ( identifier[db] = identifier[self] . identifier[db] , identifier[query] = identifier[filters] , identifier[projection] = identifier[projection] , identifier[sort] = identifier[sort] ) identifier[cursor] . identifier[skip] ( identifier[offset] ) identifier[cursor] . identifier[limit] ( identifier[limit] ) identifier[total_count] = keyword[await] identifier[self] . identifier[_meta] . identifier[object_class] . identifier[count] ( identifier[db] = identifier[self] . identifier[db] , identifier[filters] = identifier[filters] ) identifier[object_list] = keyword[await] identifier[self] . identifier[_meta] . identifier[object_class] . identifier[find] ( identifier[cursor] ) identifier[serialized_objects] = keyword[await] identifier[asyncio] . identifier[gather] (*[ identifier[obj] . identifier[serialize] () keyword[for] identifier[obj] keyword[in] identifier[object_list] ]) identifier[asyncio] . identifier[ensure_future] ( identifier[resource_post_list] . identifier[send] ( identifier[sender] = identifier[self] . identifier[_meta] . identifier[object_class] , identifier[db] = identifier[self] . identifier[db] , identifier[instances] = identifier[object_list] ) ) keyword[return] { literal[string] :{ literal[string] : identifier[total_count] , literal[string] : identifier[limit] , literal[string] : identifier[offset] }, literal[string] : identifier[serialized_objects] }
async def list(self, *args, **kwargs): """ Corresponds to GET request without a resource identifier, fetching documents from the database """ limit = int(kwargs.pop('limit', self.limit)) limit = 1000 if limit == 0 else limit # lets not go crazy here offset = int(kwargs.pop('offset', self.offset)) projection = None # perform full text search or standard filtering if self._meta.fts_operator in kwargs.keys(): filters = {'$text': {'$search': kwargs[self._meta.fts_operator]}} projection = {'score': {'$meta': 'textScore'}} sort = [('score', {'$meta': 'textScore'})] # depends on [control=['if'], data=[]] else: # build filters from query parameters filters = self.build_filters(**kwargs) # add custom query defined in resource meta, if exists if isinstance(self._meta.query, dict): filters.update(self._meta.query) # depends on [control=['if'], data=[]] # build sorts from query parameters sort = self.build_sort(**kwargs) if isinstance(self._meta.sort, list): sort.extend(self._meta.sort) # depends on [control=['if'], data=[]] cursor = self._meta.object_class.get_cursor(db=self.db, query=filters, projection=projection, sort=sort) cursor.skip(offset) cursor.limit(limit) total_count = await self._meta.object_class.count(db=self.db, filters=filters) object_list = await self._meta.object_class.find(cursor) # serialize results serialized_objects = await asyncio.gather(*[obj.serialize() for obj in object_list]) # signal post list asyncio.ensure_future(resource_post_list.send(sender=self._meta.object_class, db=self.db, instances=object_list)) return {'meta': {'total_count': total_count, 'limit': limit, 'offset': offset}, 'objects': serialized_objects}
def edit(self, edits, data_reg): """ Edit data in :class:`Data_Source`. Sets :attr:`issaved` to ``False``. """ data_reg.update(edits) self._is_saved = False
def function[edit, parameter[self, edits, data_reg]]: constant[ Edit data in :class:`Data_Source`. Sets :attr:`issaved` to ``False``. ] call[name[data_reg].update, parameter[name[edits]]] name[self]._is_saved assign[=] constant[False]
keyword[def] identifier[edit] ( identifier[self] , identifier[edits] , identifier[data_reg] ): literal[string] identifier[data_reg] . identifier[update] ( identifier[edits] ) identifier[self] . identifier[_is_saved] = keyword[False]
def edit(self, edits, data_reg): """ Edit data in :class:`Data_Source`. Sets :attr:`issaved` to ``False``. """ data_reg.update(edits) self._is_saved = False
def get_pending_bios_settings(self, only_allowed_settings=True): """Get current BIOS settings. :param: only_allowed_settings: True when only allowed BIOS settings are to be returned. If False, All the BIOS settings supported by iLO are returned. :return: a dictionary of pending BIOS settings. Depending on the 'only_allowed_settings', either only the allowed settings are returned or all the supported settings are returned. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ headers, bios_uri, bios_settings = self._check_bios_resource() try: settings_config_uri = bios_settings['links']['Settings']['href'] except KeyError: msg = ("Settings resource not found. Couldn't get pending BIOS " "Settings.") raise exception.IloCommandNotSupportedError(msg) status, headers, config = self._rest_get(settings_config_uri) if status != 200: msg = self._get_extended_error(config) raise exception.IloError(msg) # Remove the "links" section config.pop("links", None) if only_allowed_settings: return utils.apply_bios_properties_filter( config, constants.SUPPORTED_BIOS_PROPERTIES) return config
def function[get_pending_bios_settings, parameter[self, only_allowed_settings]]: constant[Get current BIOS settings. :param: only_allowed_settings: True when only allowed BIOS settings are to be returned. If False, All the BIOS settings supported by iLO are returned. :return: a dictionary of pending BIOS settings. Depending on the 'only_allowed_settings', either only the allowed settings are returned or all the supported settings are returned. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. ] <ast.Tuple object at 0x7da1b1a8f4f0> assign[=] call[name[self]._check_bios_resource, parameter[]] <ast.Try object at 0x7da1b1a8f2b0> <ast.Tuple object at 0x7da1b1a8d840> assign[=] call[name[self]._rest_get, parameter[name[settings_config_uri]]] if compare[name[status] not_equal[!=] constant[200]] begin[:] variable[msg] assign[=] call[name[self]._get_extended_error, parameter[name[config]]] <ast.Raise object at 0x7da1b1a8e2f0> call[name[config].pop, parameter[constant[links], constant[None]]] if name[only_allowed_settings] begin[:] return[call[name[utils].apply_bios_properties_filter, parameter[name[config], name[constants].SUPPORTED_BIOS_PROPERTIES]]] return[name[config]]
keyword[def] identifier[get_pending_bios_settings] ( identifier[self] , identifier[only_allowed_settings] = keyword[True] ): literal[string] identifier[headers] , identifier[bios_uri] , identifier[bios_settings] = identifier[self] . identifier[_check_bios_resource] () keyword[try] : identifier[settings_config_uri] = identifier[bios_settings] [ literal[string] ][ literal[string] ][ literal[string] ] keyword[except] identifier[KeyError] : identifier[msg] =( literal[string] literal[string] ) keyword[raise] identifier[exception] . identifier[IloCommandNotSupportedError] ( identifier[msg] ) identifier[status] , identifier[headers] , identifier[config] = identifier[self] . identifier[_rest_get] ( identifier[settings_config_uri] ) keyword[if] identifier[status] != literal[int] : identifier[msg] = identifier[self] . identifier[_get_extended_error] ( identifier[config] ) keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] ) identifier[config] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[only_allowed_settings] : keyword[return] identifier[utils] . identifier[apply_bios_properties_filter] ( identifier[config] , identifier[constants] . identifier[SUPPORTED_BIOS_PROPERTIES] ) keyword[return] identifier[config]
def get_pending_bios_settings(self, only_allowed_settings=True): """Get current BIOS settings. :param: only_allowed_settings: True when only allowed BIOS settings are to be returned. If False, All the BIOS settings supported by iLO are returned. :return: a dictionary of pending BIOS settings. Depending on the 'only_allowed_settings', either only the allowed settings are returned or all the supported settings are returned. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ (headers, bios_uri, bios_settings) = self._check_bios_resource() try: settings_config_uri = bios_settings['links']['Settings']['href'] # depends on [control=['try'], data=[]] except KeyError: msg = "Settings resource not found. Couldn't get pending BIOS Settings." raise exception.IloCommandNotSupportedError(msg) # depends on [control=['except'], data=[]] (status, headers, config) = self._rest_get(settings_config_uri) if status != 200: msg = self._get_extended_error(config) raise exception.IloError(msg) # depends on [control=['if'], data=[]] # Remove the "links" section config.pop('links', None) if only_allowed_settings: return utils.apply_bios_properties_filter(config, constants.SUPPORTED_BIOS_PROPERTIES) # depends on [control=['if'], data=[]] return config
def set_toplevel_object(self, instance, class_=None): """ Set the toplevel object to return from :meth:`get_toplevel_object` when asked for `class_` to `instance`. If `class_` is :data:`None`, the :func:`type` of the `instance` is used. """ if class_ is None: class_ = type(instance) self._toplevels[class_] = instance
def function[set_toplevel_object, parameter[self, instance, class_]]: constant[ Set the toplevel object to return from :meth:`get_toplevel_object` when asked for `class_` to `instance`. If `class_` is :data:`None`, the :func:`type` of the `instance` is used. ] if compare[name[class_] is constant[None]] begin[:] variable[class_] assign[=] call[name[type], parameter[name[instance]]] call[name[self]._toplevels][name[class_]] assign[=] name[instance]
keyword[def] identifier[set_toplevel_object] ( identifier[self] , identifier[instance] , identifier[class_] = keyword[None] ): literal[string] keyword[if] identifier[class_] keyword[is] keyword[None] : identifier[class_] = identifier[type] ( identifier[instance] ) identifier[self] . identifier[_toplevels] [ identifier[class_] ]= identifier[instance]
def set_toplevel_object(self, instance, class_=None): """ Set the toplevel object to return from :meth:`get_toplevel_object` when asked for `class_` to `instance`. If `class_` is :data:`None`, the :func:`type` of the `instance` is used. """ if class_ is None: class_ = type(instance) # depends on [control=['if'], data=['class_']] self._toplevels[class_] = instance
def add(self, key, value): """Add an entry to a list preference Add `value` to the list of entries for the `key` preference. """ if not key in self.prefs: self.prefs[key] = [] self.prefs[key].append(value)
def function[add, parameter[self, key, value]]: constant[Add an entry to a list preference Add `value` to the list of entries for the `key` preference. ] if <ast.UnaryOp object at 0x7da204622aa0> begin[:] call[name[self].prefs][name[key]] assign[=] list[[]] call[call[name[self].prefs][name[key]].append, parameter[name[value]]]
keyword[def] identifier[add] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] keyword[if] keyword[not] identifier[key] keyword[in] identifier[self] . identifier[prefs] : identifier[self] . identifier[prefs] [ identifier[key] ]=[] identifier[self] . identifier[prefs] [ identifier[key] ]. identifier[append] ( identifier[value] )
def add(self, key, value): """Add an entry to a list preference Add `value` to the list of entries for the `key` preference. """ if not key in self.prefs: self.prefs[key] = [] # depends on [control=['if'], data=[]] self.prefs[key].append(value)
def from_dict(data, ctx): """ Instantiate a new Instrument from a dict (generally from loading a JSON response). The data used to instantiate the Instrument is a shallow copy of the dict passed in, with any complex child types instantiated appropriately. """ data = data.copy() if data.get('minimumTradeSize') is not None: data['minimumTradeSize'] = ctx.convert_decimal_number( data.get('minimumTradeSize') ) if data.get('maximumTrailingStopDistance') is not None: data['maximumTrailingStopDistance'] = ctx.convert_decimal_number( data.get('maximumTrailingStopDistance') ) if data.get('minimumTrailingStopDistance') is not None: data['minimumTrailingStopDistance'] = ctx.convert_decimal_number( data.get('minimumTrailingStopDistance') ) if data.get('maximumPositionSize') is not None: data['maximumPositionSize'] = ctx.convert_decimal_number( data.get('maximumPositionSize') ) if data.get('maximumOrderUnits') is not None: data['maximumOrderUnits'] = ctx.convert_decimal_number( data.get('maximumOrderUnits') ) if data.get('marginRate') is not None: data['marginRate'] = ctx.convert_decimal_number( data.get('marginRate') ) if data.get('commission') is not None: data['commission'] = \ ctx.primitives.InstrumentCommission.from_dict( data['commission'], ctx ) return Instrument(**data)
def function[from_dict, parameter[data, ctx]]: constant[ Instantiate a new Instrument from a dict (generally from loading a JSON response). The data used to instantiate the Instrument is a shallow copy of the dict passed in, with any complex child types instantiated appropriately. ] variable[data] assign[=] call[name[data].copy, parameter[]] if compare[call[name[data].get, parameter[constant[minimumTradeSize]]] is_not constant[None]] begin[:] call[name[data]][constant[minimumTradeSize]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[minimumTradeSize]]]]] if compare[call[name[data].get, parameter[constant[maximumTrailingStopDistance]]] is_not constant[None]] begin[:] call[name[data]][constant[maximumTrailingStopDistance]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[maximumTrailingStopDistance]]]]] if compare[call[name[data].get, parameter[constant[minimumTrailingStopDistance]]] is_not constant[None]] begin[:] call[name[data]][constant[minimumTrailingStopDistance]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[minimumTrailingStopDistance]]]]] if compare[call[name[data].get, parameter[constant[maximumPositionSize]]] is_not constant[None]] begin[:] call[name[data]][constant[maximumPositionSize]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[maximumPositionSize]]]]] if compare[call[name[data].get, parameter[constant[maximumOrderUnits]]] is_not constant[None]] begin[:] call[name[data]][constant[maximumOrderUnits]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[maximumOrderUnits]]]]] if compare[call[name[data].get, parameter[constant[marginRate]]] is_not constant[None]] begin[:] call[name[data]][constant[marginRate]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[marginRate]]]]] if compare[call[name[data].get, parameter[constant[commission]]] is_not constant[None]] begin[:] call[name[data]][constant[commission]] assign[=] call[name[ctx].primitives.InstrumentCommission.from_dict, parameter[call[name[data]][constant[commission]], name[ctx]]] return[call[name[Instrument], parameter[]]]
keyword[def] identifier[from_dict] ( identifier[data] , identifier[ctx] ): literal[string] identifier[data] = identifier[data] . identifier[copy] () keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] ( identifier[data] . identifier[get] ( literal[string] ) ) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] ( identifier[data] . identifier[get] ( literal[string] ) ) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] ( identifier[data] . identifier[get] ( literal[string] ) ) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] ( identifier[data] . identifier[get] ( literal[string] ) ) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] ( identifier[data] . identifier[get] ( literal[string] ) ) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] ( identifier[data] . identifier[get] ( literal[string] ) ) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[ctx] . identifier[primitives] . identifier[InstrumentCommission] . identifier[from_dict] ( identifier[data] [ literal[string] ], identifier[ctx] ) keyword[return] identifier[Instrument] (** identifier[data] )
def from_dict(data, ctx): """ Instantiate a new Instrument from a dict (generally from loading a JSON response). The data used to instantiate the Instrument is a shallow copy of the dict passed in, with any complex child types instantiated appropriately. """ data = data.copy() if data.get('minimumTradeSize') is not None: data['minimumTradeSize'] = ctx.convert_decimal_number(data.get('minimumTradeSize')) # depends on [control=['if'], data=[]] if data.get('maximumTrailingStopDistance') is not None: data['maximumTrailingStopDistance'] = ctx.convert_decimal_number(data.get('maximumTrailingStopDistance')) # depends on [control=['if'], data=[]] if data.get('minimumTrailingStopDistance') is not None: data['minimumTrailingStopDistance'] = ctx.convert_decimal_number(data.get('minimumTrailingStopDistance')) # depends on [control=['if'], data=[]] if data.get('maximumPositionSize') is not None: data['maximumPositionSize'] = ctx.convert_decimal_number(data.get('maximumPositionSize')) # depends on [control=['if'], data=[]] if data.get('maximumOrderUnits') is not None: data['maximumOrderUnits'] = ctx.convert_decimal_number(data.get('maximumOrderUnits')) # depends on [control=['if'], data=[]] if data.get('marginRate') is not None: data['marginRate'] = ctx.convert_decimal_number(data.get('marginRate')) # depends on [control=['if'], data=[]] if data.get('commission') is not None: data['commission'] = ctx.primitives.InstrumentCommission.from_dict(data['commission'], ctx) # depends on [control=['if'], data=[]] return Instrument(**data)
def srfscc(srfstr, bodyid): """ Translate a surface string, together with a body ID code, to the corresponding surface ID code. The input surface string may contain a name or an integer ID code. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfscc_c.html :param srfstr: Surface name or ID string. :type srfstr: str :param bodyid: ID code of body associated with surface. :type bodyid: int :return: Integer surface ID code. :rtype: int """ srfstr = stypes.stringToCharP(srfstr) bodyid = ctypes.c_int(bodyid) code = ctypes.c_int() isname = ctypes.c_int() libspice.srfscc_c(srfstr, bodyid, ctypes.byref(code), ctypes.byref(isname)) return code.value, bool(isname.value)
def function[srfscc, parameter[srfstr, bodyid]]: constant[ Translate a surface string, together with a body ID code, to the corresponding surface ID code. The input surface string may contain a name or an integer ID code. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfscc_c.html :param srfstr: Surface name or ID string. :type srfstr: str :param bodyid: ID code of body associated with surface. :type bodyid: int :return: Integer surface ID code. :rtype: int ] variable[srfstr] assign[=] call[name[stypes].stringToCharP, parameter[name[srfstr]]] variable[bodyid] assign[=] call[name[ctypes].c_int, parameter[name[bodyid]]] variable[code] assign[=] call[name[ctypes].c_int, parameter[]] variable[isname] assign[=] call[name[ctypes].c_int, parameter[]] call[name[libspice].srfscc_c, parameter[name[srfstr], name[bodyid], call[name[ctypes].byref, parameter[name[code]]], call[name[ctypes].byref, parameter[name[isname]]]]] return[tuple[[<ast.Attribute object at 0x7da2054a4eb0>, <ast.Call object at 0x7da2054a7130>]]]
keyword[def] identifier[srfscc] ( identifier[srfstr] , identifier[bodyid] ): literal[string] identifier[srfstr] = identifier[stypes] . identifier[stringToCharP] ( identifier[srfstr] ) identifier[bodyid] = identifier[ctypes] . identifier[c_int] ( identifier[bodyid] ) identifier[code] = identifier[ctypes] . identifier[c_int] () identifier[isname] = identifier[ctypes] . identifier[c_int] () identifier[libspice] . identifier[srfscc_c] ( identifier[srfstr] , identifier[bodyid] , identifier[ctypes] . identifier[byref] ( identifier[code] ), identifier[ctypes] . identifier[byref] ( identifier[isname] )) keyword[return] identifier[code] . identifier[value] , identifier[bool] ( identifier[isname] . identifier[value] )
def srfscc(srfstr, bodyid): """ Translate a surface string, together with a body ID code, to the corresponding surface ID code. The input surface string may contain a name or an integer ID code. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfscc_c.html :param srfstr: Surface name or ID string. :type srfstr: str :param bodyid: ID code of body associated with surface. :type bodyid: int :return: Integer surface ID code. :rtype: int """ srfstr = stypes.stringToCharP(srfstr) bodyid = ctypes.c_int(bodyid) code = ctypes.c_int() isname = ctypes.c_int() libspice.srfscc_c(srfstr, bodyid, ctypes.byref(code), ctypes.byref(isname)) return (code.value, bool(isname.value))
def is_func_decorator(node: astroid.node_classes.NodeNG) -> bool: """return true if the name is used in function decorator""" parent = node.parent while parent is not None: if isinstance(parent, astroid.Decorators): return True if parent.is_statement or isinstance( parent, (astroid.Lambda, scoped_nodes.ComprehensionScope, scoped_nodes.ListComp), ): break parent = parent.parent return False
def function[is_func_decorator, parameter[node]]: constant[return true if the name is used in function decorator] variable[parent] assign[=] name[node].parent while compare[name[parent] is_not constant[None]] begin[:] if call[name[isinstance], parameter[name[parent], name[astroid].Decorators]] begin[:] return[constant[True]] if <ast.BoolOp object at 0x7da1b020dfc0> begin[:] break variable[parent] assign[=] name[parent].parent return[constant[False]]
keyword[def] identifier[is_func_decorator] ( identifier[node] : identifier[astroid] . identifier[node_classes] . identifier[NodeNG] )-> identifier[bool] : literal[string] identifier[parent] = identifier[node] . identifier[parent] keyword[while] identifier[parent] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[parent] , identifier[astroid] . identifier[Decorators] ): keyword[return] keyword[True] keyword[if] identifier[parent] . identifier[is_statement] keyword[or] identifier[isinstance] ( identifier[parent] , ( identifier[astroid] . identifier[Lambda] , identifier[scoped_nodes] . identifier[ComprehensionScope] , identifier[scoped_nodes] . identifier[ListComp] ), ): keyword[break] identifier[parent] = identifier[parent] . identifier[parent] keyword[return] keyword[False]
def is_func_decorator(node: astroid.node_classes.NodeNG) -> bool: """return true if the name is used in function decorator""" parent = node.parent while parent is not None: if isinstance(parent, astroid.Decorators): return True # depends on [control=['if'], data=[]] if parent.is_statement or isinstance(parent, (astroid.Lambda, scoped_nodes.ComprehensionScope, scoped_nodes.ListComp)): break # depends on [control=['if'], data=[]] parent = parent.parent # depends on [control=['while'], data=['parent']] return False
def to_template(template_name=None): """ Decorator for simple call TemplateResponse Examples: @to_template("test.html") def test(request): return {'test': 100} @to_template def test2(request): return {'test': 100, 'TEMPLATE': 'test.html'} @to_template def test2(request, template_name='test.html'): return {'test': 100, 'TEMPLATE': template_name} """ def decorator(view_func): def _wrapped_view(request, *args, **kwargs): result = view_func(request, *args, **kwargs) if isinstance(result, dict): return TemplateResponse(request, result.pop('TEMPLATE', template_name), result) return result return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view) return decorator
def function[to_template, parameter[template_name]]: constant[ Decorator for simple call TemplateResponse Examples: @to_template("test.html") def test(request): return {'test': 100} @to_template def test2(request): return {'test': 100, 'TEMPLATE': 'test.html'} @to_template def test2(request, template_name='test.html'): return {'test': 100, 'TEMPLATE': template_name} ] def function[decorator, parameter[view_func]]: def function[_wrapped_view, parameter[request]]: variable[result] assign[=] call[name[view_func], parameter[name[request], <ast.Starred object at 0x7da1b0a40250>]] if call[name[isinstance], parameter[name[result], name[dict]]] begin[:] return[call[name[TemplateResponse], parameter[name[request], call[name[result].pop, parameter[constant[TEMPLATE], name[template_name]]], name[result]]]] return[name[result]] return[call[call[name[wraps], parameter[name[view_func]]], parameter[name[_wrapped_view]]]] return[name[decorator]]
keyword[def] identifier[to_template] ( identifier[template_name] = keyword[None] ): literal[string] keyword[def] identifier[decorator] ( identifier[view_func] ): keyword[def] identifier[_wrapped_view] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ): identifier[result] = identifier[view_func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[result] , identifier[dict] ): keyword[return] identifier[TemplateResponse] ( identifier[request] , identifier[result] . identifier[pop] ( literal[string] , identifier[template_name] ), identifier[result] ) keyword[return] identifier[result] keyword[return] identifier[wraps] ( identifier[view_func] , identifier[assigned] = identifier[available_attrs] ( identifier[view_func] ))( identifier[_wrapped_view] ) keyword[return] identifier[decorator]
def to_template(template_name=None): """ Decorator for simple call TemplateResponse Examples: @to_template("test.html") def test(request): return {'test': 100} @to_template def test2(request): return {'test': 100, 'TEMPLATE': 'test.html'} @to_template def test2(request, template_name='test.html'): return {'test': 100, 'TEMPLATE': template_name} """ def decorator(view_func): def _wrapped_view(request, *args, **kwargs): result = view_func(request, *args, **kwargs) if isinstance(result, dict): return TemplateResponse(request, result.pop('TEMPLATE', template_name), result) # depends on [control=['if'], data=[]] return result return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view) return decorator
def sort(self, order="asc"): """Getting the sorted result of the given list :@param order: "asc" :@type order: string :@return self """ self.__prepare() if isinstance(self._json_data, list): if order == "asc": self._json_data = sorted(self._json_data) else: self._json_data = sorted(self._json_data, reverse=True) return self
def function[sort, parameter[self, order]]: constant[Getting the sorted result of the given list :@param order: "asc" :@type order: string :@return self ] call[name[self].__prepare, parameter[]] if call[name[isinstance], parameter[name[self]._json_data, name[list]]] begin[:] if compare[name[order] equal[==] constant[asc]] begin[:] name[self]._json_data assign[=] call[name[sorted], parameter[name[self]._json_data]] return[name[self]]
keyword[def] identifier[sort] ( identifier[self] , identifier[order] = literal[string] ): literal[string] identifier[self] . identifier[__prepare] () keyword[if] identifier[isinstance] ( identifier[self] . identifier[_json_data] , identifier[list] ): keyword[if] identifier[order] == literal[string] : identifier[self] . identifier[_json_data] = identifier[sorted] ( identifier[self] . identifier[_json_data] ) keyword[else] : identifier[self] . identifier[_json_data] = identifier[sorted] ( identifier[self] . identifier[_json_data] , identifier[reverse] = keyword[True] ) keyword[return] identifier[self]
def sort(self, order='asc'): """Getting the sorted result of the given list :@param order: "asc" :@type order: string :@return self """ self.__prepare() if isinstance(self._json_data, list): if order == 'asc': self._json_data = sorted(self._json_data) # depends on [control=['if'], data=[]] else: self._json_data = sorted(self._json_data, reverse=True) # depends on [control=['if'], data=[]] return self
def get(self): """Get the contents of a GValue. The contents of the GValue are read out as a Python type. """ # logger.debug('GValue.get: self = %s', self) gtype = self.gvalue.g_type fundamental = gobject_lib.g_type_fundamental(gtype) result = None if gtype == GValue.gbool_type: result = bool(gobject_lib.g_value_get_boolean(self.gvalue)) elif gtype == GValue.gint_type: result = gobject_lib.g_value_get_int(self.gvalue) elif gtype == GValue.guint64_type: result = gobject_lib.g_value_get_uint64(self.gvalue) elif gtype == GValue.gdouble_type: result = gobject_lib.g_value_get_double(self.gvalue) elif fundamental == GValue.genum_type: return GValue.from_enum(gtype, gobject_lib.g_value_get_enum(self.gvalue)) elif fundamental == GValue.gflags_type: result = gobject_lib.g_value_get_flags(self.gvalue) elif gtype == GValue.gstr_type: pointer = gobject_lib.g_value_get_string(self.gvalue) if pointer != ffi.NULL: result = _to_string(pointer) elif gtype == GValue.refstr_type: psize = ffi.new('size_t *') pointer = vips_lib.vips_value_get_ref_string(self.gvalue, psize) # psize[0] will be number of bytes in string, but just assume it's # NULL-terminated result = _to_string(pointer) elif gtype == GValue.image_type: # g_value_get_object() will not add a ref ... that is # held by the gvalue go = gobject_lib.g_value_get_object(self.gvalue) vi = ffi.cast('VipsImage *', go) # we want a ref that will last with the life of the vimage: # this ref is matched by the unref that's attached to finalize # by Image() gobject_lib.g_object_ref(go) result = pyvips.Image(vi) elif gtype == GValue.array_int_type: pint = ffi.new('int *') array = vips_lib.vips_value_get_array_int(self.gvalue, pint) result = [] for i in range(0, pint[0]): result.append(array[i]) elif gtype == GValue.array_double_type: pint = ffi.new('int *') array = vips_lib.vips_value_get_array_double(self.gvalue, pint) result = [] for i in range(0, pint[0]): result.append(array[i]) elif gtype == GValue.array_image_type: pint = ffi.new('int *') array = vips_lib.vips_value_get_array_image(self.gvalue, pint) result = [] for i in range(0, pint[0]): vi = array[i] gobject_lib.g_object_ref(vi) image = pyvips.Image(vi) result.append(image) elif gtype == GValue.blob_type: psize = ffi.new('size_t *') array = vips_lib.vips_value_get_blob(self.gvalue, psize) buf = ffi.cast('char*', array) result = ffi.unpack(buf, psize[0]) else: raise Error('unsupported gtype for get {0}'. format(type_name(gtype))) return result
def function[get, parameter[self]]: constant[Get the contents of a GValue. The contents of the GValue are read out as a Python type. ] variable[gtype] assign[=] name[self].gvalue.g_type variable[fundamental] assign[=] call[name[gobject_lib].g_type_fundamental, parameter[name[gtype]]] variable[result] assign[=] constant[None] if compare[name[gtype] equal[==] name[GValue].gbool_type] begin[:] variable[result] assign[=] call[name[bool], parameter[call[name[gobject_lib].g_value_get_boolean, parameter[name[self].gvalue]]]] return[name[result]]
keyword[def] identifier[get] ( identifier[self] ): literal[string] identifier[gtype] = identifier[self] . identifier[gvalue] . identifier[g_type] identifier[fundamental] = identifier[gobject_lib] . identifier[g_type_fundamental] ( identifier[gtype] ) identifier[result] = keyword[None] keyword[if] identifier[gtype] == identifier[GValue] . identifier[gbool_type] : identifier[result] = identifier[bool] ( identifier[gobject_lib] . identifier[g_value_get_boolean] ( identifier[self] . identifier[gvalue] )) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[gint_type] : identifier[result] = identifier[gobject_lib] . identifier[g_value_get_int] ( identifier[self] . identifier[gvalue] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[guint64_type] : identifier[result] = identifier[gobject_lib] . identifier[g_value_get_uint64] ( identifier[self] . identifier[gvalue] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[gdouble_type] : identifier[result] = identifier[gobject_lib] . identifier[g_value_get_double] ( identifier[self] . identifier[gvalue] ) keyword[elif] identifier[fundamental] == identifier[GValue] . identifier[genum_type] : keyword[return] identifier[GValue] . identifier[from_enum] ( identifier[gtype] , identifier[gobject_lib] . identifier[g_value_get_enum] ( identifier[self] . identifier[gvalue] )) keyword[elif] identifier[fundamental] == identifier[GValue] . identifier[gflags_type] : identifier[result] = identifier[gobject_lib] . identifier[g_value_get_flags] ( identifier[self] . identifier[gvalue] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[gstr_type] : identifier[pointer] = identifier[gobject_lib] . identifier[g_value_get_string] ( identifier[self] . identifier[gvalue] ) keyword[if] identifier[pointer] != identifier[ffi] . identifier[NULL] : identifier[result] = identifier[_to_string] ( identifier[pointer] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[refstr_type] : identifier[psize] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[pointer] = identifier[vips_lib] . identifier[vips_value_get_ref_string] ( identifier[self] . identifier[gvalue] , identifier[psize] ) identifier[result] = identifier[_to_string] ( identifier[pointer] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[image_type] : identifier[go] = identifier[gobject_lib] . identifier[g_value_get_object] ( identifier[self] . identifier[gvalue] ) identifier[vi] = identifier[ffi] . identifier[cast] ( literal[string] , identifier[go] ) identifier[gobject_lib] . identifier[g_object_ref] ( identifier[go] ) identifier[result] = identifier[pyvips] . identifier[Image] ( identifier[vi] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[array_int_type] : identifier[pint] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[array] = identifier[vips_lib] . identifier[vips_value_get_array_int] ( identifier[self] . identifier[gvalue] , identifier[pint] ) identifier[result] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pint] [ literal[int] ]): identifier[result] . identifier[append] ( identifier[array] [ identifier[i] ]) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[array_double_type] : identifier[pint] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[array] = identifier[vips_lib] . identifier[vips_value_get_array_double] ( identifier[self] . identifier[gvalue] , identifier[pint] ) identifier[result] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pint] [ literal[int] ]): identifier[result] . identifier[append] ( identifier[array] [ identifier[i] ]) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[array_image_type] : identifier[pint] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[array] = identifier[vips_lib] . identifier[vips_value_get_array_image] ( identifier[self] . identifier[gvalue] , identifier[pint] ) identifier[result] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pint] [ literal[int] ]): identifier[vi] = identifier[array] [ identifier[i] ] identifier[gobject_lib] . identifier[g_object_ref] ( identifier[vi] ) identifier[image] = identifier[pyvips] . identifier[Image] ( identifier[vi] ) identifier[result] . identifier[append] ( identifier[image] ) keyword[elif] identifier[gtype] == identifier[GValue] . identifier[blob_type] : identifier[psize] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[array] = identifier[vips_lib] . identifier[vips_value_get_blob] ( identifier[self] . identifier[gvalue] , identifier[psize] ) identifier[buf] = identifier[ffi] . identifier[cast] ( literal[string] , identifier[array] ) identifier[result] = identifier[ffi] . identifier[unpack] ( identifier[buf] , identifier[psize] [ literal[int] ]) keyword[else] : keyword[raise] identifier[Error] ( literal[string] . identifier[format] ( identifier[type_name] ( identifier[gtype] ))) keyword[return] identifier[result]
def get(self): """Get the contents of a GValue. The contents of the GValue are read out as a Python type. """ # logger.debug('GValue.get: self = %s', self) gtype = self.gvalue.g_type fundamental = gobject_lib.g_type_fundamental(gtype) result = None if gtype == GValue.gbool_type: result = bool(gobject_lib.g_value_get_boolean(self.gvalue)) # depends on [control=['if'], data=[]] elif gtype == GValue.gint_type: result = gobject_lib.g_value_get_int(self.gvalue) # depends on [control=['if'], data=[]] elif gtype == GValue.guint64_type: result = gobject_lib.g_value_get_uint64(self.gvalue) # depends on [control=['if'], data=[]] elif gtype == GValue.gdouble_type: result = gobject_lib.g_value_get_double(self.gvalue) # depends on [control=['if'], data=[]] elif fundamental == GValue.genum_type: return GValue.from_enum(gtype, gobject_lib.g_value_get_enum(self.gvalue)) # depends on [control=['if'], data=[]] elif fundamental == GValue.gflags_type: result = gobject_lib.g_value_get_flags(self.gvalue) # depends on [control=['if'], data=[]] elif gtype == GValue.gstr_type: pointer = gobject_lib.g_value_get_string(self.gvalue) if pointer != ffi.NULL: result = _to_string(pointer) # depends on [control=['if'], data=['pointer']] # depends on [control=['if'], data=[]] elif gtype == GValue.refstr_type: psize = ffi.new('size_t *') pointer = vips_lib.vips_value_get_ref_string(self.gvalue, psize) # psize[0] will be number of bytes in string, but just assume it's # NULL-terminated result = _to_string(pointer) # depends on [control=['if'], data=[]] elif gtype == GValue.image_type: # g_value_get_object() will not add a ref ... that is # held by the gvalue go = gobject_lib.g_value_get_object(self.gvalue) vi = ffi.cast('VipsImage *', go) # we want a ref that will last with the life of the vimage: # this ref is matched by the unref that's attached to finalize # by Image() gobject_lib.g_object_ref(go) result = pyvips.Image(vi) # depends on [control=['if'], data=[]] elif gtype == GValue.array_int_type: pint = ffi.new('int *') array = vips_lib.vips_value_get_array_int(self.gvalue, pint) result = [] for i in range(0, pint[0]): result.append(array[i]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif gtype == GValue.array_double_type: pint = ffi.new('int *') array = vips_lib.vips_value_get_array_double(self.gvalue, pint) result = [] for i in range(0, pint[0]): result.append(array[i]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif gtype == GValue.array_image_type: pint = ffi.new('int *') array = vips_lib.vips_value_get_array_image(self.gvalue, pint) result = [] for i in range(0, pint[0]): vi = array[i] gobject_lib.g_object_ref(vi) image = pyvips.Image(vi) result.append(image) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif gtype == GValue.blob_type: psize = ffi.new('size_t *') array = vips_lib.vips_value_get_blob(self.gvalue, psize) buf = ffi.cast('char*', array) result = ffi.unpack(buf, psize[0]) # depends on [control=['if'], data=[]] else: raise Error('unsupported gtype for get {0}'.format(type_name(gtype))) return result
def _find_new_ancestors(cls, db: BaseDB, header: BlockHeader) -> Iterable[BlockHeader]: """ Returns the chain leading up from the given header until (but not including) the first ancestor it has in common with our canonical chain. If D is the canonical head in the following chain, and F is the new header, then this function returns (F, E). A - B - C - D \ E - F """ h = header while True: try: orig = cls._get_canonical_block_header_by_number(db, h.block_number) except HeaderNotFound: # This just means the block is not on the canonical chain. pass else: if orig.hash == h.hash: # Found the common ancestor, stop. break # Found a new ancestor yield h if h.parent_hash == GENESIS_PARENT_HASH: break else: h = cls._get_block_header_by_hash(db, h.parent_hash)
def function[_find_new_ancestors, parameter[cls, db, header]]: constant[ Returns the chain leading up from the given header until (but not including) the first ancestor it has in common with our canonical chain. If D is the canonical head in the following chain, and F is the new header, then this function returns (F, E). A - B - C - D E - F ] variable[h] assign[=] name[header] while constant[True] begin[:] <ast.Try object at 0x7da1b175dc00> <ast.Yield object at 0x7da1b175fb80> if compare[name[h].parent_hash equal[==] name[GENESIS_PARENT_HASH]] begin[:] break
keyword[def] identifier[_find_new_ancestors] ( identifier[cls] , identifier[db] : identifier[BaseDB] , identifier[header] : identifier[BlockHeader] )-> identifier[Iterable] [ identifier[BlockHeader] ]: literal[string] identifier[h] = identifier[header] keyword[while] keyword[True] : keyword[try] : identifier[orig] = identifier[cls] . identifier[_get_canonical_block_header_by_number] ( identifier[db] , identifier[h] . identifier[block_number] ) keyword[except] identifier[HeaderNotFound] : keyword[pass] keyword[else] : keyword[if] identifier[orig] . identifier[hash] == identifier[h] . identifier[hash] : keyword[break] keyword[yield] identifier[h] keyword[if] identifier[h] . identifier[parent_hash] == identifier[GENESIS_PARENT_HASH] : keyword[break] keyword[else] : identifier[h] = identifier[cls] . identifier[_get_block_header_by_hash] ( identifier[db] , identifier[h] . identifier[parent_hash] )
def _find_new_ancestors(cls, db: BaseDB, header: BlockHeader) -> Iterable[BlockHeader]: """ Returns the chain leading up from the given header until (but not including) the first ancestor it has in common with our canonical chain. If D is the canonical head in the following chain, and F is the new header, then this function returns (F, E). A - B - C - D E - F """ h = header while True: try: orig = cls._get_canonical_block_header_by_number(db, h.block_number) # depends on [control=['try'], data=[]] except HeaderNotFound: # This just means the block is not on the canonical chain. pass # depends on [control=['except'], data=[]] else: if orig.hash == h.hash: # Found the common ancestor, stop. break # depends on [control=['if'], data=[]] # Found a new ancestor yield h if h.parent_hash == GENESIS_PARENT_HASH: break # depends on [control=['if'], data=[]] else: h = cls._get_block_header_by_hash(db, h.parent_hash) # depends on [control=['while'], data=[]]
def get_widget_template(self, field_name, field): """ Returns the optional widget template to use when rendering the widget for a form field. Preference of template selection: 1. Template from `widget_template_overrides` selected by field name 2. Template from `widget_template_overrides` selected by widget class By default, returns `None` which means "use Django's default widget template". :param field_name: The field name to select a widget template for. :param field: `Field` instance to return a widget template. :return: Template name to use when rendering the widget or `None` """ templates = self.widget_template_overrides or {} template_name = templates.get(field_name, None) if template_name: return template_name template_name = templates.get(field.widget.__class__, None) if template_name: return template_name return None
def function[get_widget_template, parameter[self, field_name, field]]: constant[ Returns the optional widget template to use when rendering the widget for a form field. Preference of template selection: 1. Template from `widget_template_overrides` selected by field name 2. Template from `widget_template_overrides` selected by widget class By default, returns `None` which means "use Django's default widget template". :param field_name: The field name to select a widget template for. :param field: `Field` instance to return a widget template. :return: Template name to use when rendering the widget or `None` ] variable[templates] assign[=] <ast.BoolOp object at 0x7da1b2879600> variable[template_name] assign[=] call[name[templates].get, parameter[name[field_name], constant[None]]] if name[template_name] begin[:] return[name[template_name]] variable[template_name] assign[=] call[name[templates].get, parameter[name[field].widget.__class__, constant[None]]] if name[template_name] begin[:] return[name[template_name]] return[constant[None]]
keyword[def] identifier[get_widget_template] ( identifier[self] , identifier[field_name] , identifier[field] ): literal[string] identifier[templates] = identifier[self] . identifier[widget_template_overrides] keyword[or] {} identifier[template_name] = identifier[templates] . identifier[get] ( identifier[field_name] , keyword[None] ) keyword[if] identifier[template_name] : keyword[return] identifier[template_name] identifier[template_name] = identifier[templates] . identifier[get] ( identifier[field] . identifier[widget] . identifier[__class__] , keyword[None] ) keyword[if] identifier[template_name] : keyword[return] identifier[template_name] keyword[return] keyword[None]
def get_widget_template(self, field_name, field): """ Returns the optional widget template to use when rendering the widget for a form field. Preference of template selection: 1. Template from `widget_template_overrides` selected by field name 2. Template from `widget_template_overrides` selected by widget class By default, returns `None` which means "use Django's default widget template". :param field_name: The field name to select a widget template for. :param field: `Field` instance to return a widget template. :return: Template name to use when rendering the widget or `None` """ templates = self.widget_template_overrides or {} template_name = templates.get(field_name, None) if template_name: return template_name # depends on [control=['if'], data=[]] template_name = templates.get(field.widget.__class__, None) if template_name: return template_name # depends on [control=['if'], data=[]] return None
def grab_hotkey(self, item): """ Grab a hotkey. If the hotkey has no filter regex, it is global and is grabbed recursively from the root window If it has a filter regex, iterate over all children of the root and grab from matching windows """ if item.get_applicable_regex() is None: self.__enqueue(self.__grabHotkey, item.hotKey, item.modifiers, self.rootWindow) if self.__needsMutterWorkaround(item): self.__enqueue(self.__grabRecurse, item, self.rootWindow, False) else: self.__enqueue(self.__grabRecurse, item, self.rootWindow)
def function[grab_hotkey, parameter[self, item]]: constant[ Grab a hotkey. If the hotkey has no filter regex, it is global and is grabbed recursively from the root window If it has a filter regex, iterate over all children of the root and grab from matching windows ] if compare[call[name[item].get_applicable_regex, parameter[]] is constant[None]] begin[:] call[name[self].__enqueue, parameter[name[self].__grabHotkey, name[item].hotKey, name[item].modifiers, name[self].rootWindow]] if call[name[self].__needsMutterWorkaround, parameter[name[item]]] begin[:] call[name[self].__enqueue, parameter[name[self].__grabRecurse, name[item], name[self].rootWindow, constant[False]]]
keyword[def] identifier[grab_hotkey] ( identifier[self] , identifier[item] ): literal[string] keyword[if] identifier[item] . identifier[get_applicable_regex] () keyword[is] keyword[None] : identifier[self] . identifier[__enqueue] ( identifier[self] . identifier[__grabHotkey] , identifier[item] . identifier[hotKey] , identifier[item] . identifier[modifiers] , identifier[self] . identifier[rootWindow] ) keyword[if] identifier[self] . identifier[__needsMutterWorkaround] ( identifier[item] ): identifier[self] . identifier[__enqueue] ( identifier[self] . identifier[__grabRecurse] , identifier[item] , identifier[self] . identifier[rootWindow] , keyword[False] ) keyword[else] : identifier[self] . identifier[__enqueue] ( identifier[self] . identifier[__grabRecurse] , identifier[item] , identifier[self] . identifier[rootWindow] )
def grab_hotkey(self, item): """ Grab a hotkey. If the hotkey has no filter regex, it is global and is grabbed recursively from the root window If it has a filter regex, iterate over all children of the root and grab from matching windows """ if item.get_applicable_regex() is None: self.__enqueue(self.__grabHotkey, item.hotKey, item.modifiers, self.rootWindow) if self.__needsMutterWorkaround(item): self.__enqueue(self.__grabRecurse, item, self.rootWindow, False) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self.__enqueue(self.__grabRecurse, item, self.rootWindow)
def _prepare_io_handler(self, handler): """Call the `interfaces.IOHandler.prepare` method and remove the handler from unprepared handler list when done. """ logger.debug(" preparing handler: {0!r}".format(handler)) ret = handler.prepare() logger.debug(" prepare result: {0!r}".format(ret)) if isinstance(ret, HandlerReady): del self._unprepared_handlers[handler] prepared = True elif isinstance(ret, PrepareAgain): if ret.timeout is not None: if self._timeout is not None: self._timeout = min(self._timeout, ret.timeout) else: self._timeout = ret.timeout prepared = False else: raise TypeError("Unexpected result type from prepare()") return prepared
def function[_prepare_io_handler, parameter[self, handler]]: constant[Call the `interfaces.IOHandler.prepare` method and remove the handler from unprepared handler list when done. ] call[name[logger].debug, parameter[call[constant[ preparing handler: {0!r}].format, parameter[name[handler]]]]] variable[ret] assign[=] call[name[handler].prepare, parameter[]] call[name[logger].debug, parameter[call[constant[ prepare result: {0!r}].format, parameter[name[ret]]]]] if call[name[isinstance], parameter[name[ret], name[HandlerReady]]] begin[:] <ast.Delete object at 0x7da204347a90> variable[prepared] assign[=] constant[True] return[name[prepared]]
keyword[def] identifier[_prepare_io_handler] ( identifier[self] , identifier[handler] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[handler] )) identifier[ret] = identifier[handler] . identifier[prepare] () identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[ret] )) keyword[if] identifier[isinstance] ( identifier[ret] , identifier[HandlerReady] ): keyword[del] identifier[self] . identifier[_unprepared_handlers] [ identifier[handler] ] identifier[prepared] = keyword[True] keyword[elif] identifier[isinstance] ( identifier[ret] , identifier[PrepareAgain] ): keyword[if] identifier[ret] . identifier[timeout] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[_timeout] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_timeout] = identifier[min] ( identifier[self] . identifier[_timeout] , identifier[ret] . identifier[timeout] ) keyword[else] : identifier[self] . identifier[_timeout] = identifier[ret] . identifier[timeout] identifier[prepared] = keyword[False] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[prepared]
def _prepare_io_handler(self, handler): """Call the `interfaces.IOHandler.prepare` method and remove the handler from unprepared handler list when done. """ logger.debug(' preparing handler: {0!r}'.format(handler)) ret = handler.prepare() logger.debug(' prepare result: {0!r}'.format(ret)) if isinstance(ret, HandlerReady): del self._unprepared_handlers[handler] prepared = True # depends on [control=['if'], data=[]] elif isinstance(ret, PrepareAgain): if ret.timeout is not None: if self._timeout is not None: self._timeout = min(self._timeout, ret.timeout) # depends on [control=['if'], data=[]] else: self._timeout = ret.timeout # depends on [control=['if'], data=[]] prepared = False # depends on [control=['if'], data=[]] else: raise TypeError('Unexpected result type from prepare()') return prepared
def _es_content(settings): """ Extract content formating related subset of widget settings. """ return {k: settings[k] for k in (ConsoleWidget.SETTING_WIDTH, ConsoleWidget.SETTING_ALIGN, ConsoleWidget.SETTING_PADDING, ConsoleWidget.SETTING_PADDING_LEFT, ConsoleWidget.SETTING_PADDING_RIGHT, ConsoleWidget.SETTING_PADDING_CHAR)}
def function[_es_content, parameter[settings]]: constant[ Extract content formating related subset of widget settings. ] return[<ast.DictComp object at 0x7da2045679a0>]
keyword[def] identifier[_es_content] ( identifier[settings] ): literal[string] keyword[return] { identifier[k] : identifier[settings] [ identifier[k] ] keyword[for] identifier[k] keyword[in] ( identifier[ConsoleWidget] . identifier[SETTING_WIDTH] , identifier[ConsoleWidget] . identifier[SETTING_ALIGN] , identifier[ConsoleWidget] . identifier[SETTING_PADDING] , identifier[ConsoleWidget] . identifier[SETTING_PADDING_LEFT] , identifier[ConsoleWidget] . identifier[SETTING_PADDING_RIGHT] , identifier[ConsoleWidget] . identifier[SETTING_PADDING_CHAR] )}
def _es_content(settings): """ Extract content formating related subset of widget settings. """ return {k: settings[k] for k in (ConsoleWidget.SETTING_WIDTH, ConsoleWidget.SETTING_ALIGN, ConsoleWidget.SETTING_PADDING, ConsoleWidget.SETTING_PADDING_LEFT, ConsoleWidget.SETTING_PADDING_RIGHT, ConsoleWidget.SETTING_PADDING_CHAR)}
def p_functioncall(self, tree): # Where all the real interpreter action is # Note that things that should only be done at the top level # are performed in the interpret function defined below. ''' V ::= function_call ( V LPAREN V RPAREN )''' if type(tree[2].value) != type([]): args = [tree[2].value] else: args = tree[2].value fname = tree[0].value if fname not in syfunctions: print("Error: unknown function:", fname) self.error(fname) else: if fname == 'unit': # constant spectrum tree.value = spectrum.FlatSpectrum(args[0],fluxunits=args[1]) elif fname == 'bb': # black body tree.value = spectrum.BlackBody(args[0]) elif fname == 'pl': # power law if args[2] not in synforms: print("Error: unrecognized units:", args[2]) # code to create powerlaw spectrum object tree.value = spectrum.Powerlaw(args[0],args[1],fluxunits=args[2]) elif fname == 'box': # box throughput tree.value = spectrum.Box(args[0],args[1]) elif fname == 'spec': # spectrum from reference file (for now....) name = args[0] tree.value = spectrum.TabularSourceSpectrum(_handleIRAFName(name)) elif fname == 'band': # passband args=tree[2].svalue tree.value = ObsBandpass(args) elif fname == 'em': # emission line tree.value = spectrum.GaussianSource(args[2],args[0],args[1],fluxunits=args[3]) elif fname == 'icat': # catalog interpolation tree.value = catalog.Icat(*args) elif fname == 'rn': # renormalize sp = args[0] if not isinstance(sp,spectrum.SourceSpectrum): name=_handleIRAFName(args[0]) sp = spectrum.TabularSourceSpectrum(name) # # Always force the renormalization to occur: prevent exceptions #in case of partial overlap. Less robust but duplicates synphot. # Force the renormalization in the case of partial overlap (OverlapError), # but raise an exception if the spectrum and bandpass are entirely # disjoint (DisjointError) try: tree.value = sp.renorm(args[2],args[3],args[1]) except DisjointError: raise except OverlapError: tree.value = sp.renorm(args[2],args[3],args[1],force=True) tree.value.warnings['force_renorm'] = 'Warning: Renormalization of the spectrum, to the specified value, in the specified units, exceeds the limit of the specified passband.' elif fname == 'z': # redshift if args[0] != 'null': # the ETC generates junk sometimes.... try: tree.value = args[0].redshift(args[1]) except AttributeError: try: #name = getName(args[0]) sp = spectrum.TabularSourceSpectrum( \ _handleIRAFName(args[0])) tree.value = sp.redshift(args[1]) except AttributeError: tree.value = spectrum.FlatSpectrum(1.0) else: tree.value = spectrum.FlatSpectrum(1.0) elif fname == 'ebmvx': # extinction tree.value = reddening.Extinction(args[0],args[1]) else: tree.value = "would call %s with the following args: %s" % (fname, repr(args))
def function[p_functioncall, parameter[self, tree]]: constant[ V ::= function_call ( V LPAREN V RPAREN )] if compare[call[name[type], parameter[call[name[tree]][constant[2]].value]] not_equal[!=] call[name[type], parameter[list[[]]]]] begin[:] variable[args] assign[=] list[[<ast.Attribute object at 0x7da204347ee0>]] variable[fname] assign[=] call[name[tree]][constant[0]].value if compare[name[fname] <ast.NotIn object at 0x7da2590d7190> name[syfunctions]] begin[:] call[name[print], parameter[constant[Error: unknown function:], name[fname]]] call[name[self].error, parameter[name[fname]]]
keyword[def] identifier[p_functioncall] ( identifier[self] , identifier[tree] ): literal[string] keyword[if] identifier[type] ( identifier[tree] [ literal[int] ]. identifier[value] )!= identifier[type] ([]): identifier[args] =[ identifier[tree] [ literal[int] ]. identifier[value] ] keyword[else] : identifier[args] = identifier[tree] [ literal[int] ]. identifier[value] identifier[fname] = identifier[tree] [ literal[int] ]. identifier[value] keyword[if] identifier[fname] keyword[not] keyword[in] identifier[syfunctions] : identifier[print] ( literal[string] , identifier[fname] ) identifier[self] . identifier[error] ( identifier[fname] ) keyword[else] : keyword[if] identifier[fname] == literal[string] : identifier[tree] . identifier[value] = identifier[spectrum] . identifier[FlatSpectrum] ( identifier[args] [ literal[int] ], identifier[fluxunits] = identifier[args] [ literal[int] ]) keyword[elif] identifier[fname] == literal[string] : identifier[tree] . identifier[value] = identifier[spectrum] . identifier[BlackBody] ( identifier[args] [ literal[int] ]) keyword[elif] identifier[fname] == literal[string] : keyword[if] identifier[args] [ literal[int] ] keyword[not] keyword[in] identifier[synforms] : identifier[print] ( literal[string] , identifier[args] [ literal[int] ]) identifier[tree] . identifier[value] = identifier[spectrum] . identifier[Powerlaw] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[fluxunits] = identifier[args] [ literal[int] ]) keyword[elif] identifier[fname] == literal[string] : identifier[tree] . identifier[value] = identifier[spectrum] . identifier[Box] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ]) keyword[elif] identifier[fname] == literal[string] : identifier[name] = identifier[args] [ literal[int] ] identifier[tree] . identifier[value] = identifier[spectrum] . identifier[TabularSourceSpectrum] ( identifier[_handleIRAFName] ( identifier[name] )) keyword[elif] identifier[fname] == literal[string] : identifier[args] = identifier[tree] [ literal[int] ]. identifier[svalue] identifier[tree] . identifier[value] = identifier[ObsBandpass] ( identifier[args] ) keyword[elif] identifier[fname] == literal[string] : identifier[tree] . identifier[value] = identifier[spectrum] . identifier[GaussianSource] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[fluxunits] = identifier[args] [ literal[int] ]) keyword[elif] identifier[fname] == literal[string] : identifier[tree] . identifier[value] = identifier[catalog] . identifier[Icat] (* identifier[args] ) keyword[elif] identifier[fname] == literal[string] : identifier[sp] = identifier[args] [ literal[int] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[sp] , identifier[spectrum] . identifier[SourceSpectrum] ): identifier[name] = identifier[_handleIRAFName] ( identifier[args] [ literal[int] ]) identifier[sp] = identifier[spectrum] . identifier[TabularSourceSpectrum] ( identifier[name] ) keyword[try] : identifier[tree] . identifier[value] = identifier[sp] . identifier[renorm] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[args] [ literal[int] ]) keyword[except] identifier[DisjointError] : keyword[raise] keyword[except] identifier[OverlapError] : identifier[tree] . identifier[value] = identifier[sp] . identifier[renorm] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[force] = keyword[True] ) identifier[tree] . identifier[value] . identifier[warnings] [ literal[string] ]= literal[string] keyword[elif] identifier[fname] == literal[string] : keyword[if] identifier[args] [ literal[int] ]!= literal[string] : keyword[try] : identifier[tree] . identifier[value] = identifier[args] [ literal[int] ]. identifier[redshift] ( identifier[args] [ literal[int] ]) keyword[except] identifier[AttributeError] : keyword[try] : identifier[sp] = identifier[spectrum] . identifier[TabularSourceSpectrum] ( identifier[_handleIRAFName] ( identifier[args] [ literal[int] ])) identifier[tree] . identifier[value] = identifier[sp] . identifier[redshift] ( identifier[args] [ literal[int] ]) keyword[except] identifier[AttributeError] : identifier[tree] . identifier[value] = identifier[spectrum] . identifier[FlatSpectrum] ( literal[int] ) keyword[else] : identifier[tree] . identifier[value] = identifier[spectrum] . identifier[FlatSpectrum] ( literal[int] ) keyword[elif] identifier[fname] == literal[string] : identifier[tree] . identifier[value] = identifier[reddening] . identifier[Extinction] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ]) keyword[else] : identifier[tree] . identifier[value] = literal[string] %( identifier[fname] , identifier[repr] ( identifier[args] ))
def p_functioncall(self, tree): # Where all the real interpreter action is # Note that things that should only be done at the top level # are performed in the interpret function defined below. ' V ::= function_call ( V LPAREN V RPAREN )' if type(tree[2].value) != type([]): args = [tree[2].value] # depends on [control=['if'], data=[]] else: args = tree[2].value fname = tree[0].value if fname not in syfunctions: print('Error: unknown function:', fname) self.error(fname) # depends on [control=['if'], data=['fname']] elif fname == 'unit': # constant spectrum tree.value = spectrum.FlatSpectrum(args[0], fluxunits=args[1]) # depends on [control=['if'], data=[]] elif fname == 'bb': # black body tree.value = spectrum.BlackBody(args[0]) # depends on [control=['if'], data=[]] elif fname == 'pl': # power law if args[2] not in synforms: print('Error: unrecognized units:', args[2]) # depends on [control=['if'], data=[]] # code to create powerlaw spectrum object tree.value = spectrum.Powerlaw(args[0], args[1], fluxunits=args[2]) # depends on [control=['if'], data=[]] elif fname == 'box': # box throughput tree.value = spectrum.Box(args[0], args[1]) # depends on [control=['if'], data=[]] elif fname == 'spec': # spectrum from reference file (for now....) name = args[0] tree.value = spectrum.TabularSourceSpectrum(_handleIRAFName(name)) # depends on [control=['if'], data=[]] elif fname == 'band': # passband args = tree[2].svalue tree.value = ObsBandpass(args) # depends on [control=['if'], data=[]] elif fname == 'em': # emission line tree.value = spectrum.GaussianSource(args[2], args[0], args[1], fluxunits=args[3]) # depends on [control=['if'], data=[]] elif fname == 'icat': # catalog interpolation tree.value = catalog.Icat(*args) # depends on [control=['if'], data=[]] elif fname == 'rn': # renormalize sp = args[0] if not isinstance(sp, spectrum.SourceSpectrum): name = _handleIRAFName(args[0]) sp = spectrum.TabularSourceSpectrum(name) # depends on [control=['if'], data=[]] # # Always force the renormalization to occur: prevent exceptions #in case of partial overlap. Less robust but duplicates synphot. # Force the renormalization in the case of partial overlap (OverlapError), # but raise an exception if the spectrum and bandpass are entirely # disjoint (DisjointError) try: tree.value = sp.renorm(args[2], args[3], args[1]) # depends on [control=['try'], data=[]] except DisjointError: raise # depends on [control=['except'], data=[]] except OverlapError: tree.value = sp.renorm(args[2], args[3], args[1], force=True) tree.value.warnings['force_renorm'] = 'Warning: Renormalization of the spectrum, to the specified value, in the specified units, exceeds the limit of the specified passband.' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif fname == 'z': # redshift if args[0] != 'null': # the ETC generates junk sometimes.... try: tree.value = args[0].redshift(args[1]) # depends on [control=['try'], data=[]] except AttributeError: try: #name = getName(args[0]) sp = spectrum.TabularSourceSpectrum(_handleIRAFName(args[0])) tree.value = sp.redshift(args[1]) # depends on [control=['try'], data=[]] except AttributeError: tree.value = spectrum.FlatSpectrum(1.0) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: tree.value = spectrum.FlatSpectrum(1.0) # depends on [control=['if'], data=[]] elif fname == 'ebmvx': # extinction tree.value = reddening.Extinction(args[0], args[1]) # depends on [control=['if'], data=[]] else: tree.value = 'would call %s with the following args: %s' % (fname, repr(args))
def get_ftype_counts(self): """Get the counts of ftypes in this object. Returns: The counts of ftypes in this object. """ if hasattr(self, "ftype"): return pandas.Series({self.ftype: 1}) return self.ftypes.value_counts().sort_index()
def function[get_ftype_counts, parameter[self]]: constant[Get the counts of ftypes in this object. Returns: The counts of ftypes in this object. ] if call[name[hasattr], parameter[name[self], constant[ftype]]] begin[:] return[call[name[pandas].Series, parameter[dictionary[[<ast.Attribute object at 0x7da18eb55e10>], [<ast.Constant object at 0x7da18eb551b0>]]]]] return[call[call[name[self].ftypes.value_counts, parameter[]].sort_index, parameter[]]]
keyword[def] identifier[get_ftype_counts] ( identifier[self] ): literal[string] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[return] identifier[pandas] . identifier[Series] ({ identifier[self] . identifier[ftype] : literal[int] }) keyword[return] identifier[self] . identifier[ftypes] . identifier[value_counts] (). identifier[sort_index] ()
def get_ftype_counts(self): """Get the counts of ftypes in this object. Returns: The counts of ftypes in this object. """ if hasattr(self, 'ftype'): return pandas.Series({self.ftype: 1}) # depends on [control=['if'], data=[]] return self.ftypes.value_counts().sort_index()
def _encode_fields(self, xfield, yfield, time_unit=None, scale=Scale(zero=False)): """ Encode the fields in Altair format """ if scale is None: scale = Scale() xfieldtype = xfield[1] yfieldtype = yfield[1] x_options = None if len(xfield) > 2: x_options = xfield[2] y_options = None if len(yfield) > 2: y_options = yfield[2] if time_unit is not None: if x_options is None: xencode = X(xfieldtype, timeUnit=time_unit) else: xencode = X( xfieldtype, axis=Axis(**x_options), timeUnit=time_unit, scale=scale ) else: if x_options is None: xencode = X(xfieldtype) else: xencode = X( xfieldtype, axis=Axis(**x_options), scale=scale ) if y_options is None: yencode = Y(yfieldtype, scale=scale) else: yencode = Y( yfieldtype, axis=Axis(**y_options), scale=scale ) return xencode, yencode
def function[_encode_fields, parameter[self, xfield, yfield, time_unit, scale]]: constant[ Encode the fields in Altair format ] if compare[name[scale] is constant[None]] begin[:] variable[scale] assign[=] call[name[Scale], parameter[]] variable[xfieldtype] assign[=] call[name[xfield]][constant[1]] variable[yfieldtype] assign[=] call[name[yfield]][constant[1]] variable[x_options] assign[=] constant[None] if compare[call[name[len], parameter[name[xfield]]] greater[>] constant[2]] begin[:] variable[x_options] assign[=] call[name[xfield]][constant[2]] variable[y_options] assign[=] constant[None] if compare[call[name[len], parameter[name[yfield]]] greater[>] constant[2]] begin[:] variable[y_options] assign[=] call[name[yfield]][constant[2]] if compare[name[time_unit] is_not constant[None]] begin[:] if compare[name[x_options] is constant[None]] begin[:] variable[xencode] assign[=] call[name[X], parameter[name[xfieldtype]]] if compare[name[y_options] is constant[None]] begin[:] variable[yencode] assign[=] call[name[Y], parameter[name[yfieldtype]]] return[tuple[[<ast.Name object at 0x7da1b02932b0>, <ast.Name object at 0x7da1b0292920>]]]
keyword[def] identifier[_encode_fields] ( identifier[self] , identifier[xfield] , identifier[yfield] , identifier[time_unit] = keyword[None] , identifier[scale] = identifier[Scale] ( identifier[zero] = keyword[False] )): literal[string] keyword[if] identifier[scale] keyword[is] keyword[None] : identifier[scale] = identifier[Scale] () identifier[xfieldtype] = identifier[xfield] [ literal[int] ] identifier[yfieldtype] = identifier[yfield] [ literal[int] ] identifier[x_options] = keyword[None] keyword[if] identifier[len] ( identifier[xfield] )> literal[int] : identifier[x_options] = identifier[xfield] [ literal[int] ] identifier[y_options] = keyword[None] keyword[if] identifier[len] ( identifier[yfield] )> literal[int] : identifier[y_options] = identifier[yfield] [ literal[int] ] keyword[if] identifier[time_unit] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[x_options] keyword[is] keyword[None] : identifier[xencode] = identifier[X] ( identifier[xfieldtype] , identifier[timeUnit] = identifier[time_unit] ) keyword[else] : identifier[xencode] = identifier[X] ( identifier[xfieldtype] , identifier[axis] = identifier[Axis] (** identifier[x_options] ), identifier[timeUnit] = identifier[time_unit] , identifier[scale] = identifier[scale] ) keyword[else] : keyword[if] identifier[x_options] keyword[is] keyword[None] : identifier[xencode] = identifier[X] ( identifier[xfieldtype] ) keyword[else] : identifier[xencode] = identifier[X] ( identifier[xfieldtype] , identifier[axis] = identifier[Axis] (** identifier[x_options] ), identifier[scale] = identifier[scale] ) keyword[if] identifier[y_options] keyword[is] keyword[None] : identifier[yencode] = identifier[Y] ( identifier[yfieldtype] , identifier[scale] = identifier[scale] ) keyword[else] : identifier[yencode] = identifier[Y] ( identifier[yfieldtype] , identifier[axis] = identifier[Axis] (** identifier[y_options] ), identifier[scale] = identifier[scale] ) keyword[return] identifier[xencode] , identifier[yencode]
def _encode_fields(self, xfield, yfield, time_unit=None, scale=Scale(zero=False)): """ Encode the fields in Altair format """ if scale is None: scale = Scale() # depends on [control=['if'], data=['scale']] xfieldtype = xfield[1] yfieldtype = yfield[1] x_options = None if len(xfield) > 2: x_options = xfield[2] # depends on [control=['if'], data=[]] y_options = None if len(yfield) > 2: y_options = yfield[2] # depends on [control=['if'], data=[]] if time_unit is not None: if x_options is None: xencode = X(xfieldtype, timeUnit=time_unit) # depends on [control=['if'], data=[]] else: xencode = X(xfieldtype, axis=Axis(**x_options), timeUnit=time_unit, scale=scale) # depends on [control=['if'], data=['time_unit']] elif x_options is None: xencode = X(xfieldtype) # depends on [control=['if'], data=[]] else: xencode = X(xfieldtype, axis=Axis(**x_options), scale=scale) if y_options is None: yencode = Y(yfieldtype, scale=scale) # depends on [control=['if'], data=[]] else: yencode = Y(yfieldtype, axis=Axis(**y_options), scale=scale) return (xencode, yencode)
def notify_done(self, error=False, run_done_callbacks=True): ''' if error clear all sessions otherwise check to see if all other sessions are complete then run the done callbacks ''' if error: for _session in self._sessions.values(): _session.set_done() self._session_count = 0 else: self._update_session_count(-1) for _session in self._sessions.values(): if not _session.is_done(): return if run_done_callbacks: self._run_done_callbacks() self._done_event.set()
def function[notify_done, parameter[self, error, run_done_callbacks]]: constant[ if error clear all sessions otherwise check to see if all other sessions are complete then run the done callbacks ] if name[error] begin[:] for taget[name[_session]] in starred[call[name[self]._sessions.values, parameter[]]] begin[:] call[name[_session].set_done, parameter[]] name[self]._session_count assign[=] constant[0] if name[run_done_callbacks] begin[:] call[name[self]._run_done_callbacks, parameter[]] call[name[self]._done_event.set, parameter[]]
keyword[def] identifier[notify_done] ( identifier[self] , identifier[error] = keyword[False] , identifier[run_done_callbacks] = keyword[True] ): literal[string] keyword[if] identifier[error] : keyword[for] identifier[_session] keyword[in] identifier[self] . identifier[_sessions] . identifier[values] (): identifier[_session] . identifier[set_done] () identifier[self] . identifier[_session_count] = literal[int] keyword[else] : identifier[self] . identifier[_update_session_count] (- literal[int] ) keyword[for] identifier[_session] keyword[in] identifier[self] . identifier[_sessions] . identifier[values] (): keyword[if] keyword[not] identifier[_session] . identifier[is_done] (): keyword[return] keyword[if] identifier[run_done_callbacks] : identifier[self] . identifier[_run_done_callbacks] () identifier[self] . identifier[_done_event] . identifier[set] ()
def notify_done(self, error=False, run_done_callbacks=True): """ if error clear all sessions otherwise check to see if all other sessions are complete then run the done callbacks """ if error: for _session in self._sessions.values(): _session.set_done() # depends on [control=['for'], data=['_session']] self._session_count = 0 # depends on [control=['if'], data=[]] else: self._update_session_count(-1) for _session in self._sessions.values(): if not _session.is_done(): return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_session']] if run_done_callbacks: self._run_done_callbacks() # depends on [control=['if'], data=[]] self._done_event.set()
def dashboard(request): "Counts, aggregations and more!" end_time = now() start_time = end_time - timedelta(days=7) defaults = {'start': start_time, 'end': end_time} form = DashboardForm(data=request.GET or defaults) if form.is_valid(): start_time = form.cleaned_data['start'] end_time = form.cleaned_data['end'] # determine when tracking began try: obj = Visitor.objects.order_by('start_time')[0] track_start_time = obj.start_time except (IndexError, Visitor.DoesNotExist): track_start_time = now() # If the start_date is before tracking began, warn about incomplete data warn_incomplete = (start_time < track_start_time) # queries take `date` objects (for now) user_stats = Visitor.objects.user_stats(start_time, end_time) visitor_stats = Visitor.objects.stats(start_time, end_time) if TRACK_PAGEVIEWS: pageview_stats = Pageview.objects.stats(start_time, end_time) else: pageview_stats = None context = { 'form': form, 'track_start_time': track_start_time, 'warn_incomplete': warn_incomplete, 'user_stats': user_stats, 'visitor_stats': visitor_stats, 'pageview_stats': pageview_stats, } return render(request, 'tracking/dashboard.html', context)
def function[dashboard, parameter[request]]: constant[Counts, aggregations and more!] variable[end_time] assign[=] call[name[now], parameter[]] variable[start_time] assign[=] binary_operation[name[end_time] - call[name[timedelta], parameter[]]] variable[defaults] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5de0>, <ast.Constant object at 0x7da20c6c5c30>], [<ast.Name object at 0x7da20c6c7f70>, <ast.Name object at 0x7da20c6c4490>]] variable[form] assign[=] call[name[DashboardForm], parameter[]] if call[name[form].is_valid, parameter[]] begin[:] variable[start_time] assign[=] call[name[form].cleaned_data][constant[start]] variable[end_time] assign[=] call[name[form].cleaned_data][constant[end]] <ast.Try object at 0x7da20c6c4f70> variable[warn_incomplete] assign[=] compare[name[start_time] less[<] name[track_start_time]] variable[user_stats] assign[=] call[name[Visitor].objects.user_stats, parameter[name[start_time], name[end_time]]] variable[visitor_stats] assign[=] call[name[Visitor].objects.stats, parameter[name[start_time], name[end_time]]] if name[TRACK_PAGEVIEWS] begin[:] variable[pageview_stats] assign[=] call[name[Pageview].objects.stats, parameter[name[start_time], name[end_time]]] variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c7b20>, <ast.Constant object at 0x7da20c6c6890>, <ast.Constant object at 0x7da20c6c70a0>, <ast.Constant object at 0x7da20c6c7010>, <ast.Constant object at 0x7da20c6c5750>, <ast.Constant object at 0x7da20c6c7640>], [<ast.Name object at 0x7da20c6c7730>, <ast.Name object at 0x7da20c6c4340>, <ast.Name object at 0x7da20c6c6980>, <ast.Name object at 0x7da20c6c42b0>, <ast.Name object at 0x7da20c6c7850>, <ast.Name object at 0x7da20c6c7a90>]] return[call[name[render], parameter[name[request], constant[tracking/dashboard.html], name[context]]]]
keyword[def] identifier[dashboard] ( identifier[request] ): literal[string] identifier[end_time] = identifier[now] () identifier[start_time] = identifier[end_time] - identifier[timedelta] ( identifier[days] = literal[int] ) identifier[defaults] ={ literal[string] : identifier[start_time] , literal[string] : identifier[end_time] } identifier[form] = identifier[DashboardForm] ( identifier[data] = identifier[request] . identifier[GET] keyword[or] identifier[defaults] ) keyword[if] identifier[form] . identifier[is_valid] (): identifier[start_time] = identifier[form] . identifier[cleaned_data] [ literal[string] ] identifier[end_time] = identifier[form] . identifier[cleaned_data] [ literal[string] ] keyword[try] : identifier[obj] = identifier[Visitor] . identifier[objects] . identifier[order_by] ( literal[string] )[ literal[int] ] identifier[track_start_time] = identifier[obj] . identifier[start_time] keyword[except] ( identifier[IndexError] , identifier[Visitor] . identifier[DoesNotExist] ): identifier[track_start_time] = identifier[now] () identifier[warn_incomplete] =( identifier[start_time] < identifier[track_start_time] ) identifier[user_stats] = identifier[Visitor] . identifier[objects] . identifier[user_stats] ( identifier[start_time] , identifier[end_time] ) identifier[visitor_stats] = identifier[Visitor] . identifier[objects] . identifier[stats] ( identifier[start_time] , identifier[end_time] ) keyword[if] identifier[TRACK_PAGEVIEWS] : identifier[pageview_stats] = identifier[Pageview] . identifier[objects] . identifier[stats] ( identifier[start_time] , identifier[end_time] ) keyword[else] : identifier[pageview_stats] = keyword[None] identifier[context] ={ literal[string] : identifier[form] , literal[string] : identifier[track_start_time] , literal[string] : identifier[warn_incomplete] , literal[string] : identifier[user_stats] , literal[string] : identifier[visitor_stats] , literal[string] : identifier[pageview_stats] , } keyword[return] identifier[render] ( identifier[request] , literal[string] , identifier[context] )
def dashboard(request): """Counts, aggregations and more!""" end_time = now() start_time = end_time - timedelta(days=7) defaults = {'start': start_time, 'end': end_time} form = DashboardForm(data=request.GET or defaults) if form.is_valid(): start_time = form.cleaned_data['start'] end_time = form.cleaned_data['end'] # depends on [control=['if'], data=[]] # determine when tracking began try: obj = Visitor.objects.order_by('start_time')[0] track_start_time = obj.start_time # depends on [control=['try'], data=[]] except (IndexError, Visitor.DoesNotExist): track_start_time = now() # depends on [control=['except'], data=[]] # If the start_date is before tracking began, warn about incomplete data warn_incomplete = start_time < track_start_time # queries take `date` objects (for now) user_stats = Visitor.objects.user_stats(start_time, end_time) visitor_stats = Visitor.objects.stats(start_time, end_time) if TRACK_PAGEVIEWS: pageview_stats = Pageview.objects.stats(start_time, end_time) # depends on [control=['if'], data=[]] else: pageview_stats = None context = {'form': form, 'track_start_time': track_start_time, 'warn_incomplete': warn_incomplete, 'user_stats': user_stats, 'visitor_stats': visitor_stats, 'pageview_stats': pageview_stats} return render(request, 'tracking/dashboard.html', context)
def rotvec(v1, angle, iaxis): """ Transform a vector to a new coordinate system rotated by angle radians about axis iaxis. This transformation rotates v1 by angle radians about the specified axis. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotvec_c.html :param v1: Vector whose coordinate system is to be rotated. :type v1: 3-Element Array of floats :param angle: Angle of rotation (radians). :type angle: float :param iaxis: Axis of rotation X=1, Y=2, Z=3. :type iaxis: int :return: the vector expressed in the new coordinate system. :rtype: 3-Element Array of floats """ v1 = stypes.toDoubleVector(v1) angle = ctypes.c_double(angle) iaxis = ctypes.c_int(iaxis) vout = stypes.emptyDoubleVector(3) libspice.rotvec_c(v1, angle, iaxis, vout) return stypes.cVectorToPython(vout)
def function[rotvec, parameter[v1, angle, iaxis]]: constant[ Transform a vector to a new coordinate system rotated by angle radians about axis iaxis. This transformation rotates v1 by angle radians about the specified axis. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotvec_c.html :param v1: Vector whose coordinate system is to be rotated. :type v1: 3-Element Array of floats :param angle: Angle of rotation (radians). :type angle: float :param iaxis: Axis of rotation X=1, Y=2, Z=3. :type iaxis: int :return: the vector expressed in the new coordinate system. :rtype: 3-Element Array of floats ] variable[v1] assign[=] call[name[stypes].toDoubleVector, parameter[name[v1]]] variable[angle] assign[=] call[name[ctypes].c_double, parameter[name[angle]]] variable[iaxis] assign[=] call[name[ctypes].c_int, parameter[name[iaxis]]] variable[vout] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]] call[name[libspice].rotvec_c, parameter[name[v1], name[angle], name[iaxis], name[vout]]] return[call[name[stypes].cVectorToPython, parameter[name[vout]]]]
keyword[def] identifier[rotvec] ( identifier[v1] , identifier[angle] , identifier[iaxis] ): literal[string] identifier[v1] = identifier[stypes] . identifier[toDoubleVector] ( identifier[v1] ) identifier[angle] = identifier[ctypes] . identifier[c_double] ( identifier[angle] ) identifier[iaxis] = identifier[ctypes] . identifier[c_int] ( identifier[iaxis] ) identifier[vout] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] ) identifier[libspice] . identifier[rotvec_c] ( identifier[v1] , identifier[angle] , identifier[iaxis] , identifier[vout] ) keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[vout] )
def rotvec(v1, angle, iaxis): """ Transform a vector to a new coordinate system rotated by angle radians about axis iaxis. This transformation rotates v1 by angle radians about the specified axis. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotvec_c.html :param v1: Vector whose coordinate system is to be rotated. :type v1: 3-Element Array of floats :param angle: Angle of rotation (radians). :type angle: float :param iaxis: Axis of rotation X=1, Y=2, Z=3. :type iaxis: int :return: the vector expressed in the new coordinate system. :rtype: 3-Element Array of floats """ v1 = stypes.toDoubleVector(v1) angle = ctypes.c_double(angle) iaxis = ctypes.c_int(iaxis) vout = stypes.emptyDoubleVector(3) libspice.rotvec_c(v1, angle, iaxis, vout) return stypes.cVectorToPython(vout)
def gen_text(env: TextIOBase, package: str, tmpl: str): """Create output from Jinja template.""" if env: env_args = json_datetime.load(env) else: env_args = {} jinja_env = template.setup(package) echo(jinja_env.get_template(tmpl).render(**env_args))
def function[gen_text, parameter[env, package, tmpl]]: constant[Create output from Jinja template.] if name[env] begin[:] variable[env_args] assign[=] call[name[json_datetime].load, parameter[name[env]]] variable[jinja_env] assign[=] call[name[template].setup, parameter[name[package]]] call[name[echo], parameter[call[call[name[jinja_env].get_template, parameter[name[tmpl]]].render, parameter[]]]]
keyword[def] identifier[gen_text] ( identifier[env] : identifier[TextIOBase] , identifier[package] : identifier[str] , identifier[tmpl] : identifier[str] ): literal[string] keyword[if] identifier[env] : identifier[env_args] = identifier[json_datetime] . identifier[load] ( identifier[env] ) keyword[else] : identifier[env_args] ={} identifier[jinja_env] = identifier[template] . identifier[setup] ( identifier[package] ) identifier[echo] ( identifier[jinja_env] . identifier[get_template] ( identifier[tmpl] ). identifier[render] (** identifier[env_args] ))
def gen_text(env: TextIOBase, package: str, tmpl: str): """Create output from Jinja template.""" if env: env_args = json_datetime.load(env) # depends on [control=['if'], data=[]] else: env_args = {} jinja_env = template.setup(package) echo(jinja_env.get_template(tmpl).render(**env_args))
def check_redirect_uris(uris, client_type=None): """ This function checks all return uris provided and tries to deduce as what type of client we should register. :param uris: The redirect URIs to check. :type uris: list :param client_type: An indicator of which client type you are expecting to be used. If this does not match the deduced type, an error will be raised. :type client_type: str :returns: The deduced client type. :rtype: str :raises ValueError: An error occured while checking the redirect uris. .. versionadded:: 1.0 """ if client_type not in [None, 'native', 'web']: raise ValueError('Invalid client type indicator used') if not isinstance(uris, list): raise ValueError('uris needs to be a list of strings') if len(uris) < 1: raise ValueError('At least one return URI needs to be provided') for uri in uris: if uri.startswith('https://'): if client_type == 'native': raise ValueError('https url with native client') client_type = 'web' elif uri.startswith('http://localhost'): if client_type == 'web': raise ValueError('http://localhost url with web client') client_type = 'native' else: if (uri.startswith('http://') and not uri.startswith('http://localhost')): raise ValueError('http:// url with non-localhost is illegal') else: raise ValueError('Invalid uri provided: %s' % uri) return client_type
def function[check_redirect_uris, parameter[uris, client_type]]: constant[ This function checks all return uris provided and tries to deduce as what type of client we should register. :param uris: The redirect URIs to check. :type uris: list :param client_type: An indicator of which client type you are expecting to be used. If this does not match the deduced type, an error will be raised. :type client_type: str :returns: The deduced client type. :rtype: str :raises ValueError: An error occured while checking the redirect uris. .. versionadded:: 1.0 ] if compare[name[client_type] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da204347d90>, <ast.Constant object at 0x7da2043461a0>, <ast.Constant object at 0x7da204347280>]]] begin[:] <ast.Raise object at 0x7da204346680> if <ast.UnaryOp object at 0x7da204346da0> begin[:] <ast.Raise object at 0x7da204346ef0> if compare[call[name[len], parameter[name[uris]]] less[<] constant[1]] begin[:] <ast.Raise object at 0x7da204346380> for taget[name[uri]] in starred[name[uris]] begin[:] if call[name[uri].startswith, parameter[constant[https://]]] begin[:] if compare[name[client_type] equal[==] constant[native]] begin[:] <ast.Raise object at 0x7da2043469b0> variable[client_type] assign[=] constant[web] return[name[client_type]]
keyword[def] identifier[check_redirect_uris] ( identifier[uris] , identifier[client_type] = keyword[None] ): literal[string] keyword[if] identifier[client_type] keyword[not] keyword[in] [ keyword[None] , literal[string] , literal[string] ]: keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[uris] , identifier[list] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[uris] )< literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[uri] keyword[in] identifier[uris] : keyword[if] identifier[uri] . identifier[startswith] ( literal[string] ): keyword[if] identifier[client_type] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[client_type] = literal[string] keyword[elif] identifier[uri] . identifier[startswith] ( literal[string] ): keyword[if] identifier[client_type] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[client_type] = literal[string] keyword[else] : keyword[if] ( identifier[uri] . identifier[startswith] ( literal[string] ) keyword[and] keyword[not] identifier[uri] . identifier[startswith] ( literal[string] )): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[uri] ) keyword[return] identifier[client_type]
def check_redirect_uris(uris, client_type=None): """ This function checks all return uris provided and tries to deduce as what type of client we should register. :param uris: The redirect URIs to check. :type uris: list :param client_type: An indicator of which client type you are expecting to be used. If this does not match the deduced type, an error will be raised. :type client_type: str :returns: The deduced client type. :rtype: str :raises ValueError: An error occured while checking the redirect uris. .. versionadded:: 1.0 """ if client_type not in [None, 'native', 'web']: raise ValueError('Invalid client type indicator used') # depends on [control=['if'], data=[]] if not isinstance(uris, list): raise ValueError('uris needs to be a list of strings') # depends on [control=['if'], data=[]] if len(uris) < 1: raise ValueError('At least one return URI needs to be provided') # depends on [control=['if'], data=[]] for uri in uris: if uri.startswith('https://'): if client_type == 'native': raise ValueError('https url with native client') # depends on [control=['if'], data=[]] client_type = 'web' # depends on [control=['if'], data=[]] elif uri.startswith('http://localhost'): if client_type == 'web': raise ValueError('http://localhost url with web client') # depends on [control=['if'], data=[]] client_type = 'native' # depends on [control=['if'], data=[]] elif uri.startswith('http://') and (not uri.startswith('http://localhost')): raise ValueError('http:// url with non-localhost is illegal') # depends on [control=['if'], data=[]] else: raise ValueError('Invalid uri provided: %s' % uri) # depends on [control=['for'], data=['uri']] return client_type
def get_source_files(target, build_context) -> list: """Return list of source files for `target`.""" all_sources = list(target.props.sources) for proto_dep_name in target.props.protos: proto_dep = build_context.targets[proto_dep_name] all_sources.extend(proto_dep.artifacts.get(AT.gen_cc).keys()) return all_sources
def function[get_source_files, parameter[target, build_context]]: constant[Return list of source files for `target`.] variable[all_sources] assign[=] call[name[list], parameter[name[target].props.sources]] for taget[name[proto_dep_name]] in starred[name[target].props.protos] begin[:] variable[proto_dep] assign[=] call[name[build_context].targets][name[proto_dep_name]] call[name[all_sources].extend, parameter[call[call[name[proto_dep].artifacts.get, parameter[name[AT].gen_cc]].keys, parameter[]]]] return[name[all_sources]]
keyword[def] identifier[get_source_files] ( identifier[target] , identifier[build_context] )-> identifier[list] : literal[string] identifier[all_sources] = identifier[list] ( identifier[target] . identifier[props] . identifier[sources] ) keyword[for] identifier[proto_dep_name] keyword[in] identifier[target] . identifier[props] . identifier[protos] : identifier[proto_dep] = identifier[build_context] . identifier[targets] [ identifier[proto_dep_name] ] identifier[all_sources] . identifier[extend] ( identifier[proto_dep] . identifier[artifacts] . identifier[get] ( identifier[AT] . identifier[gen_cc] ). identifier[keys] ()) keyword[return] identifier[all_sources]
def get_source_files(target, build_context) -> list: """Return list of source files for `target`.""" all_sources = list(target.props.sources) for proto_dep_name in target.props.protos: proto_dep = build_context.targets[proto_dep_name] all_sources.extend(proto_dep.artifacts.get(AT.gen_cc).keys()) # depends on [control=['for'], data=['proto_dep_name']] return all_sources
def blacklist(ctx, blacklist_account, account): """ Add an account to a blacklist """ account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.blacklist(blacklist_account))
def function[blacklist, parameter[ctx, blacklist_account, account]]: constant[ Add an account to a blacklist ] variable[account] assign[=] call[name[Account], parameter[name[account]]] call[name[print_tx], parameter[call[name[account].blacklist, parameter[name[blacklist_account]]]]]
keyword[def] identifier[blacklist] ( identifier[ctx] , identifier[blacklist_account] , identifier[account] ): literal[string] identifier[account] = identifier[Account] ( identifier[account] , identifier[blockchain_instance] = identifier[ctx] . identifier[blockchain] ) identifier[print_tx] ( identifier[account] . identifier[blacklist] ( identifier[blacklist_account] ))
def blacklist(ctx, blacklist_account, account): """ Add an account to a blacklist """ account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.blacklist(blacklist_account))
def next_interval(self, interval): """ Given a value of an interval, this function returns the next interval value """ index = np.where(self.intervals == interval) if index[0][0] + 1 < len(self.intervals): return self.intervals[index[0][0] + 1] else: raise IndexError("Ran out of intervals!")
def function[next_interval, parameter[self, interval]]: constant[ Given a value of an interval, this function returns the next interval value ] variable[index] assign[=] call[name[np].where, parameter[compare[name[self].intervals equal[==] name[interval]]]] if compare[binary_operation[call[call[name[index]][constant[0]]][constant[0]] + constant[1]] less[<] call[name[len], parameter[name[self].intervals]]] begin[:] return[call[name[self].intervals][binary_operation[call[call[name[index]][constant[0]]][constant[0]] + constant[1]]]]
keyword[def] identifier[next_interval] ( identifier[self] , identifier[interval] ): literal[string] identifier[index] = identifier[np] . identifier[where] ( identifier[self] . identifier[intervals] == identifier[interval] ) keyword[if] identifier[index] [ literal[int] ][ literal[int] ]+ literal[int] < identifier[len] ( identifier[self] . identifier[intervals] ): keyword[return] identifier[self] . identifier[intervals] [ identifier[index] [ literal[int] ][ literal[int] ]+ literal[int] ] keyword[else] : keyword[raise] identifier[IndexError] ( literal[string] )
def next_interval(self, interval): """ Given a value of an interval, this function returns the next interval value """ index = np.where(self.intervals == interval) if index[0][0] + 1 < len(self.intervals): return self.intervals[index[0][0] + 1] # depends on [control=['if'], data=[]] else: raise IndexError('Ran out of intervals!')
def get(msg_or_dict, key, default=_SENTINEL): """Retrieve a key's value from a protobuf Message or dictionary. Args: mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object. default (Any): If the key is not present on the object, and a default is set, returns that default instead. A type-appropriate falsy default is generally recommended, as protobuf messages almost always have default values for unset values and it is not always possible to tell the difference between a falsy value and an unset one. If no default is set then :class:`KeyError` will be raised if the key is not present in the object. Returns: Any: The return value from the underlying Message or dict. Raises: KeyError: If the key is not found. Note that, for unset values, messages and dictionaries may not have consistent behavior. TypeError: If ``msg_or_dict`` is not a Message or Mapping. """ # We may need to get a nested key. Resolve this. key, subkey = _resolve_subkeys(key) # Attempt to get the value from the two types of objects we know about. # If we get something else, complain. if isinstance(msg_or_dict, message.Message): answer = getattr(msg_or_dict, key, default) elif isinstance(msg_or_dict, collections_abc.Mapping): answer = msg_or_dict.get(key, default) else: raise TypeError( "get() expected a dict or protobuf message, got {!r}.".format( type(msg_or_dict) ) ) # If the object we got back is our sentinel, raise KeyError; this is # a "not found" case. if answer is _SENTINEL: raise KeyError(key) # If a subkey exists, call this method recursively against the answer. if subkey is not None and answer is not default: return get(answer, subkey, default=default) return answer
def function[get, parameter[msg_or_dict, key, default]]: constant[Retrieve a key's value from a protobuf Message or dictionary. Args: mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object. default (Any): If the key is not present on the object, and a default is set, returns that default instead. A type-appropriate falsy default is generally recommended, as protobuf messages almost always have default values for unset values and it is not always possible to tell the difference between a falsy value and an unset one. If no default is set then :class:`KeyError` will be raised if the key is not present in the object. Returns: Any: The return value from the underlying Message or dict. Raises: KeyError: If the key is not found. Note that, for unset values, messages and dictionaries may not have consistent behavior. TypeError: If ``msg_or_dict`` is not a Message or Mapping. ] <ast.Tuple object at 0x7da20c6e73a0> assign[=] call[name[_resolve_subkeys], parameter[name[key]]] if call[name[isinstance], parameter[name[msg_or_dict], name[message].Message]] begin[:] variable[answer] assign[=] call[name[getattr], parameter[name[msg_or_dict], name[key], name[default]]] if compare[name[answer] is name[_SENTINEL]] begin[:] <ast.Raise object at 0x7da20c6e7d90> if <ast.BoolOp object at 0x7da20c6e5b40> begin[:] return[call[name[get], parameter[name[answer], name[subkey]]]] return[name[answer]]
keyword[def] identifier[get] ( identifier[msg_or_dict] , identifier[key] , identifier[default] = identifier[_SENTINEL] ): literal[string] identifier[key] , identifier[subkey] = identifier[_resolve_subkeys] ( identifier[key] ) keyword[if] identifier[isinstance] ( identifier[msg_or_dict] , identifier[message] . identifier[Message] ): identifier[answer] = identifier[getattr] ( identifier[msg_or_dict] , identifier[key] , identifier[default] ) keyword[elif] identifier[isinstance] ( identifier[msg_or_dict] , identifier[collections_abc] . identifier[Mapping] ): identifier[answer] = identifier[msg_or_dict] . identifier[get] ( identifier[key] , identifier[default] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[msg_or_dict] ) ) ) keyword[if] identifier[answer] keyword[is] identifier[_SENTINEL] : keyword[raise] identifier[KeyError] ( identifier[key] ) keyword[if] identifier[subkey] keyword[is] keyword[not] keyword[None] keyword[and] identifier[answer] keyword[is] keyword[not] identifier[default] : keyword[return] identifier[get] ( identifier[answer] , identifier[subkey] , identifier[default] = identifier[default] ) keyword[return] identifier[answer]
def get(msg_or_dict, key, default=_SENTINEL): """Retrieve a key's value from a protobuf Message or dictionary. Args: mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object. default (Any): If the key is not present on the object, and a default is set, returns that default instead. A type-appropriate falsy default is generally recommended, as protobuf messages almost always have default values for unset values and it is not always possible to tell the difference between a falsy value and an unset one. If no default is set then :class:`KeyError` will be raised if the key is not present in the object. Returns: Any: The return value from the underlying Message or dict. Raises: KeyError: If the key is not found. Note that, for unset values, messages and dictionaries may not have consistent behavior. TypeError: If ``msg_or_dict`` is not a Message or Mapping. """ # We may need to get a nested key. Resolve this. (key, subkey) = _resolve_subkeys(key) # Attempt to get the value from the two types of objects we know about. # If we get something else, complain. if isinstance(msg_or_dict, message.Message): answer = getattr(msg_or_dict, key, default) # depends on [control=['if'], data=[]] elif isinstance(msg_or_dict, collections_abc.Mapping): answer = msg_or_dict.get(key, default) # depends on [control=['if'], data=[]] else: raise TypeError('get() expected a dict or protobuf message, got {!r}.'.format(type(msg_or_dict))) # If the object we got back is our sentinel, raise KeyError; this is # a "not found" case. if answer is _SENTINEL: raise KeyError(key) # depends on [control=['if'], data=[]] # If a subkey exists, call this method recursively against the answer. if subkey is not None and answer is not default: return get(answer, subkey, default=default) # depends on [control=['if'], data=[]] return answer
def get_irmc_firmware_version(snmp_client): """Get irmc firmware version of the node. :param snmp_client: an SNMP client object. :raises: SNMPFailure if SNMP operation failed. :returns: a string of bmc name and irmc firmware version. """ try: bmc_name = snmp_client.get(BMC_NAME_OID) irmc_firm_ver = snmp_client.get(IRMC_FW_VERSION_OID) return ('%(bmc)s%(sep)s%(firm_ver)s' % {'bmc': bmc_name if bmc_name else '', 'firm_ver': irmc_firm_ver if irmc_firm_ver else '', 'sep': '-' if bmc_name and irmc_firm_ver else ''}) except SNMPFailure as e: raise SNMPIRMCFirmwareFailure( SNMP_FAILURE_MSG % ("GET IRMC FIRMWARE VERSION", e))
def function[get_irmc_firmware_version, parameter[snmp_client]]: constant[Get irmc firmware version of the node. :param snmp_client: an SNMP client object. :raises: SNMPFailure if SNMP operation failed. :returns: a string of bmc name and irmc firmware version. ] <ast.Try object at 0x7da1b19d1930>
keyword[def] identifier[get_irmc_firmware_version] ( identifier[snmp_client] ): literal[string] keyword[try] : identifier[bmc_name] = identifier[snmp_client] . identifier[get] ( identifier[BMC_NAME_OID] ) identifier[irmc_firm_ver] = identifier[snmp_client] . identifier[get] ( identifier[IRMC_FW_VERSION_OID] ) keyword[return] ( literal[string] % { literal[string] : identifier[bmc_name] keyword[if] identifier[bmc_name] keyword[else] literal[string] , literal[string] : identifier[irmc_firm_ver] keyword[if] identifier[irmc_firm_ver] keyword[else] literal[string] , literal[string] : literal[string] keyword[if] identifier[bmc_name] keyword[and] identifier[irmc_firm_ver] keyword[else] literal[string] }) keyword[except] identifier[SNMPFailure] keyword[as] identifier[e] : keyword[raise] identifier[SNMPIRMCFirmwareFailure] ( identifier[SNMP_FAILURE_MSG] %( literal[string] , identifier[e] ))
def get_irmc_firmware_version(snmp_client): """Get irmc firmware version of the node. :param snmp_client: an SNMP client object. :raises: SNMPFailure if SNMP operation failed. :returns: a string of bmc name and irmc firmware version. """ try: bmc_name = snmp_client.get(BMC_NAME_OID) irmc_firm_ver = snmp_client.get(IRMC_FW_VERSION_OID) return '%(bmc)s%(sep)s%(firm_ver)s' % {'bmc': bmc_name if bmc_name else '', 'firm_ver': irmc_firm_ver if irmc_firm_ver else '', 'sep': '-' if bmc_name and irmc_firm_ver else ''} # depends on [control=['try'], data=[]] except SNMPFailure as e: raise SNMPIRMCFirmwareFailure(SNMP_FAILURE_MSG % ('GET IRMC FIRMWARE VERSION', e)) # depends on [control=['except'], data=['e']]
def consequence_level(self): """ One of ``NONE``, ``WATCH``, ``WARNING``, ``DISTRESS``, ``CRITICAL`` or ``SEVERE``. """ if self._proto.HasField('consequenceLevel'): return mdb_pb2.SignificanceInfo.SignificanceLevelType.Name(self._proto.consequenceLevel) return None
def function[consequence_level, parameter[self]]: constant[ One of ``NONE``, ``WATCH``, ``WARNING``, ``DISTRESS``, ``CRITICAL`` or ``SEVERE``. ] if call[name[self]._proto.HasField, parameter[constant[consequenceLevel]]] begin[:] return[call[name[mdb_pb2].SignificanceInfo.SignificanceLevelType.Name, parameter[name[self]._proto.consequenceLevel]]] return[constant[None]]
keyword[def] identifier[consequence_level] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_proto] . identifier[HasField] ( literal[string] ): keyword[return] identifier[mdb_pb2] . identifier[SignificanceInfo] . identifier[SignificanceLevelType] . identifier[Name] ( identifier[self] . identifier[_proto] . identifier[consequenceLevel] ) keyword[return] keyword[None]
def consequence_level(self): """ One of ``NONE``, ``WATCH``, ``WARNING``, ``DISTRESS``, ``CRITICAL`` or ``SEVERE``. """ if self._proto.HasField('consequenceLevel'): return mdb_pb2.SignificanceInfo.SignificanceLevelType.Name(self._proto.consequenceLevel) # depends on [control=['if'], data=[]] return None
def find_by_reference_ids(reference_ids, _connection=None, page_size=100, page_number=0, sort_by=enums.DEFAULT_SORT_BY, sort_order=enums.DEFAULT_SORT_ORDER): """ List all videos identified by a list of reference ids """ if not isinstance(reference_ids, (list, tuple)): err = "Video.find_by_reference_ids expects an iterable argument" raise exceptions.PyBrightcoveError(err) ids = ','.join(reference_ids) return connection.ItemResultSet( 'find_videos_by_reference_ids', Video, _connection, page_size, page_number, sort_by, sort_order, reference_ids=ids)
def function[find_by_reference_ids, parameter[reference_ids, _connection, page_size, page_number, sort_by, sort_order]]: constant[ List all videos identified by a list of reference ids ] if <ast.UnaryOp object at 0x7da204566590> begin[:] variable[err] assign[=] constant[Video.find_by_reference_ids expects an iterable argument] <ast.Raise object at 0x7da2045658d0> variable[ids] assign[=] call[constant[,].join, parameter[name[reference_ids]]] return[call[name[connection].ItemResultSet, parameter[constant[find_videos_by_reference_ids], name[Video], name[_connection], name[page_size], name[page_number], name[sort_by], name[sort_order]]]]
keyword[def] identifier[find_by_reference_ids] ( identifier[reference_ids] , identifier[_connection] = keyword[None] , identifier[page_size] = literal[int] , identifier[page_number] = literal[int] , identifier[sort_by] = identifier[enums] . identifier[DEFAULT_SORT_BY] , identifier[sort_order] = identifier[enums] . identifier[DEFAULT_SORT_ORDER] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[reference_ids] ,( identifier[list] , identifier[tuple] )): identifier[err] = literal[string] keyword[raise] identifier[exceptions] . identifier[PyBrightcoveError] ( identifier[err] ) identifier[ids] = literal[string] . identifier[join] ( identifier[reference_ids] ) keyword[return] identifier[connection] . identifier[ItemResultSet] ( literal[string] , identifier[Video] , identifier[_connection] , identifier[page_size] , identifier[page_number] , identifier[sort_by] , identifier[sort_order] , identifier[reference_ids] = identifier[ids] )
def find_by_reference_ids(reference_ids, _connection=None, page_size=100, page_number=0, sort_by=enums.DEFAULT_SORT_BY, sort_order=enums.DEFAULT_SORT_ORDER): """ List all videos identified by a list of reference ids """ if not isinstance(reference_ids, (list, tuple)): err = 'Video.find_by_reference_ids expects an iterable argument' raise exceptions.PyBrightcoveError(err) # depends on [control=['if'], data=[]] ids = ','.join(reference_ids) return connection.ItemResultSet('find_videos_by_reference_ids', Video, _connection, page_size, page_number, sort_by, sort_order, reference_ids=ids)