code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def GetClientConfig(self, context, validate=True, deploy_timestamp=True): """Generates the client config file for inclusion in deployable binaries.""" with utils.TempDirectory() as tmp_dir: # Make sure we write the file in yaml format. filename = os.path.join( tmp_dir, config.CONFIG.Get("ClientBuilder.config_filename", context=context)) new_config = config.CONFIG.MakeNewConfig() new_config.Initialize(reset=True, data="") new_config.SetWriteBack(filename) # Only copy certain sections to the client. We enumerate all # defined options and then resolve those from the config in the # client's context. The result is the raw option as if the # client read our config file. client_context = context[:] while contexts.CLIENT_BUILD_CONTEXT in client_context: client_context.remove(contexts.CLIENT_BUILD_CONTEXT) for descriptor in sorted(config.CONFIG.type_infos, key=lambda x: x.name): if descriptor.name in self.SKIP_OPTION_LIST: continue if descriptor.section in self.CONFIG_SECTIONS: value = config.CONFIG.GetRaw( descriptor.name, context=client_context, default=None) if value is not None: logging.debug("Copying config option to client: %s", descriptor.name) new_config.SetRaw(descriptor.name, value) if deploy_timestamp: deploy_time_string = str(rdfvalue.RDFDatetime.Now()) new_config.Set("Client.deploy_time", deploy_time_string) new_config.Write() if validate: self.ValidateEndConfig(new_config) private_validator = config.CONFIG.Get( "ClientBuilder.private_config_validator_class", context=context) if private_validator: try: validator = config_validator_base.PrivateConfigValidator.classes[ private_validator]() except KeyError: logging.error( "Couldn't find config validator class %s, " "you probably need to copy it into lib/local", private_validator) raise validator.ValidateEndConfig(new_config, self.context) return io.open(filename, "r").read()
def function[GetClientConfig, parameter[self, context, validate, deploy_timestamp]]: constant[Generates the client config file for inclusion in deployable binaries.] with call[name[utils].TempDirectory, parameter[]] begin[:] variable[filename] assign[=] call[name[os].path.join, parameter[name[tmp_dir], call[name[config].CONFIG.Get, parameter[constant[ClientBuilder.config_filename]]]]] variable[new_config] assign[=] call[name[config].CONFIG.MakeNewConfig, parameter[]] call[name[new_config].Initialize, parameter[]] call[name[new_config].SetWriteBack, parameter[name[filename]]] variable[client_context] assign[=] call[name[context]][<ast.Slice object at 0x7da18dc04070>] while compare[name[contexts].CLIENT_BUILD_CONTEXT in name[client_context]] begin[:] call[name[client_context].remove, parameter[name[contexts].CLIENT_BUILD_CONTEXT]] for taget[name[descriptor]] in starred[call[name[sorted], parameter[name[config].CONFIG.type_infos]]] begin[:] if compare[name[descriptor].name in name[self].SKIP_OPTION_LIST] begin[:] continue if compare[name[descriptor].section in name[self].CONFIG_SECTIONS] begin[:] variable[value] assign[=] call[name[config].CONFIG.GetRaw, parameter[name[descriptor].name]] if compare[name[value] is_not constant[None]] begin[:] call[name[logging].debug, parameter[constant[Copying config option to client: %s], name[descriptor].name]] call[name[new_config].SetRaw, parameter[name[descriptor].name, name[value]]] if name[deploy_timestamp] begin[:] variable[deploy_time_string] assign[=] call[name[str], parameter[call[name[rdfvalue].RDFDatetime.Now, parameter[]]]] call[name[new_config].Set, parameter[constant[Client.deploy_time], name[deploy_time_string]]] call[name[new_config].Write, parameter[]] if name[validate] begin[:] call[name[self].ValidateEndConfig, parameter[name[new_config]]] variable[private_validator] assign[=] call[name[config].CONFIG.Get, parameter[constant[ClientBuilder.private_config_validator_class]]] if name[private_validator] begin[:] <ast.Try object at 0x7da18dc07fa0> call[name[validator].ValidateEndConfig, parameter[name[new_config], name[self].context]] return[call[call[name[io].open, parameter[name[filename], constant[r]]].read, parameter[]]]
keyword[def] identifier[GetClientConfig] ( identifier[self] , identifier[context] , identifier[validate] = keyword[True] , identifier[deploy_timestamp] = keyword[True] ): literal[string] keyword[with] identifier[utils] . identifier[TempDirectory] () keyword[as] identifier[tmp_dir] : identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmp_dir] , identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[context] )) identifier[new_config] = identifier[config] . identifier[CONFIG] . identifier[MakeNewConfig] () identifier[new_config] . identifier[Initialize] ( identifier[reset] = keyword[True] , identifier[data] = literal[string] ) identifier[new_config] . identifier[SetWriteBack] ( identifier[filename] ) identifier[client_context] = identifier[context] [:] keyword[while] identifier[contexts] . identifier[CLIENT_BUILD_CONTEXT] keyword[in] identifier[client_context] : identifier[client_context] . identifier[remove] ( identifier[contexts] . identifier[CLIENT_BUILD_CONTEXT] ) keyword[for] identifier[descriptor] keyword[in] identifier[sorted] ( identifier[config] . identifier[CONFIG] . identifier[type_infos] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[name] ): keyword[if] identifier[descriptor] . identifier[name] keyword[in] identifier[self] . identifier[SKIP_OPTION_LIST] : keyword[continue] keyword[if] identifier[descriptor] . identifier[section] keyword[in] identifier[self] . identifier[CONFIG_SECTIONS] : identifier[value] = identifier[config] . identifier[CONFIG] . identifier[GetRaw] ( identifier[descriptor] . identifier[name] , identifier[context] = identifier[client_context] , identifier[default] = keyword[None] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[logging] . identifier[debug] ( literal[string] , identifier[descriptor] . identifier[name] ) identifier[new_config] . identifier[SetRaw] ( identifier[descriptor] . identifier[name] , identifier[value] ) keyword[if] identifier[deploy_timestamp] : identifier[deploy_time_string] = identifier[str] ( identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()) identifier[new_config] . identifier[Set] ( literal[string] , identifier[deploy_time_string] ) identifier[new_config] . identifier[Write] () keyword[if] identifier[validate] : identifier[self] . identifier[ValidateEndConfig] ( identifier[new_config] ) identifier[private_validator] = identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[context] ) keyword[if] identifier[private_validator] : keyword[try] : identifier[validator] = identifier[config_validator_base] . identifier[PrivateConfigValidator] . identifier[classes] [ identifier[private_validator] ]() keyword[except] identifier[KeyError] : identifier[logging] . identifier[error] ( literal[string] literal[string] , identifier[private_validator] ) keyword[raise] identifier[validator] . identifier[ValidateEndConfig] ( identifier[new_config] , identifier[self] . identifier[context] ) keyword[return] identifier[io] . identifier[open] ( identifier[filename] , literal[string] ). identifier[read] ()
def GetClientConfig(self, context, validate=True, deploy_timestamp=True): """Generates the client config file for inclusion in deployable binaries.""" with utils.TempDirectory() as tmp_dir: # Make sure we write the file in yaml format. filename = os.path.join(tmp_dir, config.CONFIG.Get('ClientBuilder.config_filename', context=context)) new_config = config.CONFIG.MakeNewConfig() new_config.Initialize(reset=True, data='') new_config.SetWriteBack(filename) # Only copy certain sections to the client. We enumerate all # defined options and then resolve those from the config in the # client's context. The result is the raw option as if the # client read our config file. client_context = context[:] while contexts.CLIENT_BUILD_CONTEXT in client_context: client_context.remove(contexts.CLIENT_BUILD_CONTEXT) # depends on [control=['while'], data=['client_context']] for descriptor in sorted(config.CONFIG.type_infos, key=lambda x: x.name): if descriptor.name in self.SKIP_OPTION_LIST: continue # depends on [control=['if'], data=[]] if descriptor.section in self.CONFIG_SECTIONS: value = config.CONFIG.GetRaw(descriptor.name, context=client_context, default=None) if value is not None: logging.debug('Copying config option to client: %s', descriptor.name) new_config.SetRaw(descriptor.name, value) # depends on [control=['if'], data=['value']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['descriptor']] if deploy_timestamp: deploy_time_string = str(rdfvalue.RDFDatetime.Now()) new_config.Set('Client.deploy_time', deploy_time_string) # depends on [control=['if'], data=[]] new_config.Write() if validate: self.ValidateEndConfig(new_config) # depends on [control=['if'], data=[]] private_validator = config.CONFIG.Get('ClientBuilder.private_config_validator_class', context=context) if private_validator: try: validator = config_validator_base.PrivateConfigValidator.classes[private_validator]() # depends on [control=['try'], data=[]] except KeyError: logging.error("Couldn't find config validator class %s, you probably need to copy it into lib/local", private_validator) raise # depends on [control=['except'], data=[]] validator.ValidateEndConfig(new_config, self.context) # depends on [control=['if'], data=[]] return io.open(filename, 'r').read() # depends on [control=['with'], data=['tmp_dir']]
def open_url(absolute_or_relative_url): """ Loads a web page in the current browser session. :param absolgenerateute_or_relative_url: an absolute url to web page in case of config.base_url is not specified, otherwise - relative url correspondingly :Usage: open_url('http://mydomain.com/subpage1') open_url('http://mydomain.com/subpage2') # OR config.base_url = 'http://mydomain.com' open_url('/subpage1') open_url('/subpage2') """ # todo: refactor next line when app_host is removed base_url = selene.config.app_host if selene.config.app_host else selene.config.base_url driver().get(base_url + absolute_or_relative_url)
def function[open_url, parameter[absolute_or_relative_url]]: constant[ Loads a web page in the current browser session. :param absolgenerateute_or_relative_url: an absolute url to web page in case of config.base_url is not specified, otherwise - relative url correspondingly :Usage: open_url('http://mydomain.com/subpage1') open_url('http://mydomain.com/subpage2') # OR config.base_url = 'http://mydomain.com' open_url('/subpage1') open_url('/subpage2') ] variable[base_url] assign[=] <ast.IfExp object at 0x7da18ede7190> call[call[name[driver], parameter[]].get, parameter[binary_operation[name[base_url] + name[absolute_or_relative_url]]]]
keyword[def] identifier[open_url] ( identifier[absolute_or_relative_url] ): literal[string] identifier[base_url] = identifier[selene] . identifier[config] . identifier[app_host] keyword[if] identifier[selene] . identifier[config] . identifier[app_host] keyword[else] identifier[selene] . identifier[config] . identifier[base_url] identifier[driver] (). identifier[get] ( identifier[base_url] + identifier[absolute_or_relative_url] )
def open_url(absolute_or_relative_url): """ Loads a web page in the current browser session. :param absolgenerateute_or_relative_url: an absolute url to web page in case of config.base_url is not specified, otherwise - relative url correspondingly :Usage: open_url('http://mydomain.com/subpage1') open_url('http://mydomain.com/subpage2') # OR config.base_url = 'http://mydomain.com' open_url('/subpage1') open_url('/subpage2') """ # todo: refactor next line when app_host is removed base_url = selene.config.app_host if selene.config.app_host else selene.config.base_url driver().get(base_url + absolute_or_relative_url)
def find_teradata_home(): """ Attempts to find the Teradata install directory with the defaults for a given platform. Should always return `None` when the defaults are not present and the TERADATA_HOME environment variable wasn't explicitly set to the correct install location. """ if platform.system() == 'Windows': # The default installation path for Windows is split between the # Windows directories for 32-bit/64-bit applications. It is # worth noting that Teradata archiecture installed should match # the architecture of the Python architecture being used (i.e. # TTU 32-bit is required /w Python 32-bit and TTU 64-bit is # required for Python 64-bit). if is_64bit(): return latest_teradata_version("C:/Program Files/Teradata/Client") else: return latest_teradata_version("C:/Program Files (x86)/Teradata/Client") elif platform.system() == 'Linux': return latest_teradata_version("/opt/teradata/client") elif platform.system() == 'Darwin': return latest_teradata_version("/Library/Application Support/teradata/client") else: # In the case nothing is found, the default for Linux is # attempted as a last effort to find the correct install # directory. return latest_teradata_version("/opt/teradata/client")
def function[find_teradata_home, parameter[]]: constant[ Attempts to find the Teradata install directory with the defaults for a given platform. Should always return `None` when the defaults are not present and the TERADATA_HOME environment variable wasn't explicitly set to the correct install location. ] if compare[call[name[platform].system, parameter[]] equal[==] constant[Windows]] begin[:] if call[name[is_64bit], parameter[]] begin[:] return[call[name[latest_teradata_version], parameter[constant[C:/Program Files/Teradata/Client]]]]
keyword[def] identifier[find_teradata_home] (): literal[string] keyword[if] identifier[platform] . identifier[system] ()== literal[string] : keyword[if] identifier[is_64bit] (): keyword[return] identifier[latest_teradata_version] ( literal[string] ) keyword[else] : keyword[return] identifier[latest_teradata_version] ( literal[string] ) keyword[elif] identifier[platform] . identifier[system] ()== literal[string] : keyword[return] identifier[latest_teradata_version] ( literal[string] ) keyword[elif] identifier[platform] . identifier[system] ()== literal[string] : keyword[return] identifier[latest_teradata_version] ( literal[string] ) keyword[else] : keyword[return] identifier[latest_teradata_version] ( literal[string] )
def find_teradata_home(): """ Attempts to find the Teradata install directory with the defaults for a given platform. Should always return `None` when the defaults are not present and the TERADATA_HOME environment variable wasn't explicitly set to the correct install location. """ if platform.system() == 'Windows': # The default installation path for Windows is split between the # Windows directories for 32-bit/64-bit applications. It is # worth noting that Teradata archiecture installed should match # the architecture of the Python architecture being used (i.e. # TTU 32-bit is required /w Python 32-bit and TTU 64-bit is # required for Python 64-bit). if is_64bit(): return latest_teradata_version('C:/Program Files/Teradata/Client') # depends on [control=['if'], data=[]] else: return latest_teradata_version('C:/Program Files (x86)/Teradata/Client') # depends on [control=['if'], data=[]] elif platform.system() == 'Linux': return latest_teradata_version('/opt/teradata/client') # depends on [control=['if'], data=[]] elif platform.system() == 'Darwin': return latest_teradata_version('/Library/Application Support/teradata/client') # depends on [control=['if'], data=[]] else: # In the case nothing is found, the default for Linux is # attempted as a last effort to find the correct install # directory. return latest_teradata_version('/opt/teradata/client')
def addFile(self, path, msg=""): """Adds a file to the version""" item = Item.from_path(repo=self.repo, path=path) self.addItem(item)
def function[addFile, parameter[self, path, msg]]: constant[Adds a file to the version] variable[item] assign[=] call[name[Item].from_path, parameter[]] call[name[self].addItem, parameter[name[item]]]
keyword[def] identifier[addFile] ( identifier[self] , identifier[path] , identifier[msg] = literal[string] ): literal[string] identifier[item] = identifier[Item] . identifier[from_path] ( identifier[repo] = identifier[self] . identifier[repo] , identifier[path] = identifier[path] ) identifier[self] . identifier[addItem] ( identifier[item] )
def addFile(self, path, msg=''): """Adds a file to the version""" item = Item.from_path(repo=self.repo, path=path) self.addItem(item)
def putch(self, char): """ Prints the specific character, which must be a valid printable ASCII value in the range 32..127 only, or one of carriage return (\\r), newline (\\n), backspace (\\b) or tab (\\t). :param char: The character to print. """ if char == '\r': self.carriage_return() elif char == '\n': self.newline() elif char == '\b': self.backspace() elif char == '\t': self.tab() else: w = self.font.getsize(char)[0] if self._cx + w >= self._device.width: self.newline() self.erase() self._canvas.text((self._cx, self._cy), text=char, font=self.font, fill=self._fgcolor) self._cx += w if self.animate: self.flush()
def function[putch, parameter[self, char]]: constant[ Prints the specific character, which must be a valid printable ASCII value in the range 32..127 only, or one of carriage return (\r), newline (\n), backspace (\b) or tab (\t). :param char: The character to print. ] if compare[name[char] equal[==] constant[ ]] begin[:] call[name[self].carriage_return, parameter[]]
keyword[def] identifier[putch] ( identifier[self] , identifier[char] ): literal[string] keyword[if] identifier[char] == literal[string] : identifier[self] . identifier[carriage_return] () keyword[elif] identifier[char] == literal[string] : identifier[self] . identifier[newline] () keyword[elif] identifier[char] == literal[string] : identifier[self] . identifier[backspace] () keyword[elif] identifier[char] == literal[string] : identifier[self] . identifier[tab] () keyword[else] : identifier[w] = identifier[self] . identifier[font] . identifier[getsize] ( identifier[char] )[ literal[int] ] keyword[if] identifier[self] . identifier[_cx] + identifier[w] >= identifier[self] . identifier[_device] . identifier[width] : identifier[self] . identifier[newline] () identifier[self] . identifier[erase] () identifier[self] . identifier[_canvas] . identifier[text] (( identifier[self] . identifier[_cx] , identifier[self] . identifier[_cy] ), identifier[text] = identifier[char] , identifier[font] = identifier[self] . identifier[font] , identifier[fill] = identifier[self] . identifier[_fgcolor] ) identifier[self] . identifier[_cx] += identifier[w] keyword[if] identifier[self] . identifier[animate] : identifier[self] . identifier[flush] ()
def putch(self, char): """ Prints the specific character, which must be a valid printable ASCII value in the range 32..127 only, or one of carriage return (\\r), newline (\\n), backspace (\\b) or tab (\\t). :param char: The character to print. """ if char == '\r': self.carriage_return() # depends on [control=['if'], data=[]] elif char == '\n': self.newline() # depends on [control=['if'], data=[]] elif char == '\x08': self.backspace() # depends on [control=['if'], data=[]] elif char == '\t': self.tab() # depends on [control=['if'], data=[]] else: w = self.font.getsize(char)[0] if self._cx + w >= self._device.width: self.newline() # depends on [control=['if'], data=[]] self.erase() self._canvas.text((self._cx, self._cy), text=char, font=self.font, fill=self._fgcolor) self._cx += w if self.animate: self.flush() # depends on [control=['if'], data=[]]
def get_bareground_mask(bareground_ds, bareground_thresh=60, out_fn=None): """Generate raster mask for exposed bare ground from global bareground data """ print("Loading bareground") b = bareground_ds.GetRasterBand(1) l = b.ReadAsArray() print("Masking pixels with <%0.1f%% bare ground" % bareground_thresh) if bareground_thresh < 0.0 or bareground_thresh > 100.0: sys.exit("Invalid bare ground percentage") mask = (l>bareground_thresh) #Write out original data if out_fn is not None: print("Writing out %s" % out_fn) iolib.writeGTiff(l, out_fn, bareground_ds) l = None return mask
def function[get_bareground_mask, parameter[bareground_ds, bareground_thresh, out_fn]]: constant[Generate raster mask for exposed bare ground from global bareground data ] call[name[print], parameter[constant[Loading bareground]]] variable[b] assign[=] call[name[bareground_ds].GetRasterBand, parameter[constant[1]]] variable[l] assign[=] call[name[b].ReadAsArray, parameter[]] call[name[print], parameter[binary_operation[constant[Masking pixels with <%0.1f%% bare ground] <ast.Mod object at 0x7da2590d6920> name[bareground_thresh]]]] if <ast.BoolOp object at 0x7da18eb57b20> begin[:] call[name[sys].exit, parameter[constant[Invalid bare ground percentage]]] variable[mask] assign[=] compare[name[l] greater[>] name[bareground_thresh]] if compare[name[out_fn] is_not constant[None]] begin[:] call[name[print], parameter[binary_operation[constant[Writing out %s] <ast.Mod object at 0x7da2590d6920> name[out_fn]]]] call[name[iolib].writeGTiff, parameter[name[l], name[out_fn], name[bareground_ds]]] variable[l] assign[=] constant[None] return[name[mask]]
keyword[def] identifier[get_bareground_mask] ( identifier[bareground_ds] , identifier[bareground_thresh] = literal[int] , identifier[out_fn] = keyword[None] ): literal[string] identifier[print] ( literal[string] ) identifier[b] = identifier[bareground_ds] . identifier[GetRasterBand] ( literal[int] ) identifier[l] = identifier[b] . identifier[ReadAsArray] () identifier[print] ( literal[string] % identifier[bareground_thresh] ) keyword[if] identifier[bareground_thresh] < literal[int] keyword[or] identifier[bareground_thresh] > literal[int] : identifier[sys] . identifier[exit] ( literal[string] ) identifier[mask] =( identifier[l] > identifier[bareground_thresh] ) keyword[if] identifier[out_fn] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] % identifier[out_fn] ) identifier[iolib] . identifier[writeGTiff] ( identifier[l] , identifier[out_fn] , identifier[bareground_ds] ) identifier[l] = keyword[None] keyword[return] identifier[mask]
def get_bareground_mask(bareground_ds, bareground_thresh=60, out_fn=None): """Generate raster mask for exposed bare ground from global bareground data """ print('Loading bareground') b = bareground_ds.GetRasterBand(1) l = b.ReadAsArray() print('Masking pixels with <%0.1f%% bare ground' % bareground_thresh) if bareground_thresh < 0.0 or bareground_thresh > 100.0: sys.exit('Invalid bare ground percentage') # depends on [control=['if'], data=[]] mask = l > bareground_thresh #Write out original data if out_fn is not None: print('Writing out %s' % out_fn) iolib.writeGTiff(l, out_fn, bareground_ds) # depends on [control=['if'], data=['out_fn']] l = None return mask
def wrap(cls, public_key, algorithm): """ Wraps a public key in a PublicKeyInfo structure :param public_key: A byte string or Asn1Value object of the public key :param algorithm: A unicode string of "rsa" :return: A PublicKeyInfo object """ if not isinstance(public_key, byte_cls) and not isinstance(public_key, Asn1Value): raise TypeError(unwrap( ''' public_key must be a byte string or Asn1Value, not %s ''', type_name(public_key) )) if algorithm != 'rsa': raise ValueError(unwrap( ''' algorithm must "rsa", not %s ''', repr(algorithm) )) algo = PublicKeyAlgorithm() algo['algorithm'] = PublicKeyAlgorithmId(algorithm) algo['parameters'] = Null() container = cls() container['algorithm'] = algo if isinstance(public_key, Asn1Value): public_key = public_key.untag().dump() container['public_key'] = ParsableOctetBitString(public_key) return container
def function[wrap, parameter[cls, public_key, algorithm]]: constant[ Wraps a public key in a PublicKeyInfo structure :param public_key: A byte string or Asn1Value object of the public key :param algorithm: A unicode string of "rsa" :return: A PublicKeyInfo object ] if <ast.BoolOp object at 0x7da1b08adb40> begin[:] <ast.Raise object at 0x7da1b08ae080> if compare[name[algorithm] not_equal[!=] constant[rsa]] begin[:] <ast.Raise object at 0x7da1b08ae200> variable[algo] assign[=] call[name[PublicKeyAlgorithm], parameter[]] call[name[algo]][constant[algorithm]] assign[=] call[name[PublicKeyAlgorithmId], parameter[name[algorithm]]] call[name[algo]][constant[parameters]] assign[=] call[name[Null], parameter[]] variable[container] assign[=] call[name[cls], parameter[]] call[name[container]][constant[algorithm]] assign[=] name[algo] if call[name[isinstance], parameter[name[public_key], name[Asn1Value]]] begin[:] variable[public_key] assign[=] call[call[name[public_key].untag, parameter[]].dump, parameter[]] call[name[container]][constant[public_key]] assign[=] call[name[ParsableOctetBitString], parameter[name[public_key]]] return[name[container]]
keyword[def] identifier[wrap] ( identifier[cls] , identifier[public_key] , identifier[algorithm] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[public_key] , identifier[byte_cls] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[public_key] , identifier[Asn1Value] ): keyword[raise] identifier[TypeError] ( identifier[unwrap] ( literal[string] , identifier[type_name] ( identifier[public_key] ) )) keyword[if] identifier[algorithm] != literal[string] : keyword[raise] identifier[ValueError] ( identifier[unwrap] ( literal[string] , identifier[repr] ( identifier[algorithm] ) )) identifier[algo] = identifier[PublicKeyAlgorithm] () identifier[algo] [ literal[string] ]= identifier[PublicKeyAlgorithmId] ( identifier[algorithm] ) identifier[algo] [ literal[string] ]= identifier[Null] () identifier[container] = identifier[cls] () identifier[container] [ literal[string] ]= identifier[algo] keyword[if] identifier[isinstance] ( identifier[public_key] , identifier[Asn1Value] ): identifier[public_key] = identifier[public_key] . identifier[untag] (). identifier[dump] () identifier[container] [ literal[string] ]= identifier[ParsableOctetBitString] ( identifier[public_key] ) keyword[return] identifier[container]
def wrap(cls, public_key, algorithm): """ Wraps a public key in a PublicKeyInfo structure :param public_key: A byte string or Asn1Value object of the public key :param algorithm: A unicode string of "rsa" :return: A PublicKeyInfo object """ if not isinstance(public_key, byte_cls) and (not isinstance(public_key, Asn1Value)): raise TypeError(unwrap('\n public_key must be a byte string or Asn1Value, not %s\n ', type_name(public_key))) # depends on [control=['if'], data=[]] if algorithm != 'rsa': raise ValueError(unwrap('\n algorithm must "rsa", not %s\n ', repr(algorithm))) # depends on [control=['if'], data=['algorithm']] algo = PublicKeyAlgorithm() algo['algorithm'] = PublicKeyAlgorithmId(algorithm) algo['parameters'] = Null() container = cls() container['algorithm'] = algo if isinstance(public_key, Asn1Value): public_key = public_key.untag().dump() # depends on [control=['if'], data=[]] container['public_key'] = ParsableOctetBitString(public_key) return container
def _channel_loop(detection, template, min_cc, detection_id, interpolate, i, pre_lag_ccsum=None, detect_chans=0, horizontal_chans=['E', 'N', '1', '2'], vertical_chans=['Z'], debug=0): """ Inner loop for correlating and assigning picks. Utility function to take a stream of data for the detected event and write maximum correlation to absolute time as picks in an obspy.core.event.Event object. Only outputs picks for picks above min_cc. :type detection: obspy.core.stream.Stream :param detection: Stream of data for the slave event detected using template. :type template: obspy.core.stream.Stream :param template: Stream of data as the template for the detection. :type min_cc: float :param min_cc: Minimum cross-correlation value to allow a pick to be made. :type detection_id: str :param detection_id: Detection ID to associate the event with. :type interpolate: bool :param interpolate: Interpolate the correlation function to achieve sub-sample precision. :type i: int :param i: Used to track which process has occurred when running in parallel. :type pre_lag_ccsum: float :param pre_lag_ccsum: Cross-correlation sum before lag-calc, will check that the cross-correlation sum is increased by lag-calc (using all channels, ignoring min_cc) :type detect_chans: int :param detect_chans: Number of channels originally used in detections, must match the number used here to allow for cccsum checking. :type horizontal_chans: list :param horizontal_chans: List of channel endings for horizontal-channels, on which S-picks will be made. :type vertical_chans: list :param vertical_chans: List of channel endings for vertical-channels, on which P-picks will be made. :type debug: int :param debug: Debug output level 0-5. :returns: Event object containing network, station, channel and pick information. :rtype: :class:`obspy.core.event.Event` """ from eqcorrscan.core.match_filter import normxcorr2 import math event = Event() s_stachans = {} cccsum = 0 checksum = 0 used_chans = 0 for tr in template: temp_net = tr.stats.network temp_sta = tr.stats.station temp_chan = tr.stats.channel debug_print('Working on: %s.%s.%s' % (temp_net, temp_sta, temp_chan), 3, debug) image = detection.select(station=temp_sta, channel=temp_chan) if len(image) == 0 or sum(image[0].data) == 0: print('No match in image.') continue if interpolate: try: ccc = normxcorr2(tr.data, image[0].data) except Exception: print('Could not calculate cc') print('Image is %i long' % len(image[0].data)) print('Template is %i long' % len(tr.data)) continue try: shift, cc_max = _xcorr_interp(ccc=ccc, dt=image[0].stats.delta) except IndexError: print('Could not interpolate ccc, not smooth') ccc = normxcorr2(tr.data, image[0].data) cc_max = np.amax(ccc) shift = np.argmax(ccc) * image[0].stats.delta # Convert the maximum cross-correlation time to an actual time if math.isnan(cc_max): print('Problematic trace, no cross correlation possible') continue else: picktime = image[0].stats.starttime + shift else: # Convert the maximum cross-correlation time to an actual time try: ccc = normxcorr2(tr.data, image[0].data) except Exception: print('Could not calculate cc') print('Image is %i long' % len(image[0].data)) print('Template is %i long' % len(tr.data)) continue cc_max = np.amax(ccc) if math.isnan(cc_max): print('Problematic trace, no cross correlation possible') continue else: picktime = image[0].stats.starttime + ( np.argmax(ccc) * image[0].stats.delta) debug_print('Maximum cross-corr=%s' % cc_max, 3, debug) checksum += cc_max used_chans += 1 if cc_max < min_cc: debug_print('Correlation below threshold, not used', 3, debug) continue cccsum += cc_max # Perhaps weight each pick by the cc val or cc val^2? # weight = np.amax(ccc) ** 2 if temp_chan[-1] in vertical_chans: phase = 'P' # Only take the S-pick with the best correlation elif temp_chan[-1] in horizontal_chans: phase = 'S' debug_print('Making S-pick on: %s.%s.%s' % (temp_net, temp_sta, temp_chan), 4, debug) if temp_sta not in s_stachans.keys(): s_stachans[temp_sta] = ((temp_chan, np.amax(ccc), picktime)) elif temp_sta in s_stachans.keys(): if np.amax(ccc) > s_stachans[temp_sta][1]: picktime = picktime else: continue else: phase = None _waveform_id = WaveformStreamID( network_code=temp_net, station_code=temp_sta, channel_code=temp_chan) event.picks.append(Pick( waveform_id=_waveform_id, time=picktime, method_id=ResourceIdentifier('EQcorrscan'), phase_hint=phase, creation_info='eqcorrscan.core.lag_calc', evaluation_mode='automatic', comments=[Comment(text='cc_max=%s' % cc_max)])) event.resource_id = detection_id ccc_str = ("detect_val=%s" % cccsum) event.comments.append(Comment(text=ccc_str)) if used_chans == detect_chans: if pre_lag_ccsum is not None and\ checksum - pre_lag_ccsum < -(0.3 * pre_lag_ccsum): msg = ('lag-calc has decreased cccsum from %f to %f - ' % (pre_lag_ccsum, checksum)) raise LagCalcError(msg) else: warnings.warn('Cannot check if cccsum is better, used %i channels ' 'for detection, but %i are used here' % (detect_chans, used_chans)) return i, event
def function[_channel_loop, parameter[detection, template, min_cc, detection_id, interpolate, i, pre_lag_ccsum, detect_chans, horizontal_chans, vertical_chans, debug]]: constant[ Inner loop for correlating and assigning picks. Utility function to take a stream of data for the detected event and write maximum correlation to absolute time as picks in an obspy.core.event.Event object. Only outputs picks for picks above min_cc. :type detection: obspy.core.stream.Stream :param detection: Stream of data for the slave event detected using template. :type template: obspy.core.stream.Stream :param template: Stream of data as the template for the detection. :type min_cc: float :param min_cc: Minimum cross-correlation value to allow a pick to be made. :type detection_id: str :param detection_id: Detection ID to associate the event with. :type interpolate: bool :param interpolate: Interpolate the correlation function to achieve sub-sample precision. :type i: int :param i: Used to track which process has occurred when running in parallel. :type pre_lag_ccsum: float :param pre_lag_ccsum: Cross-correlation sum before lag-calc, will check that the cross-correlation sum is increased by lag-calc (using all channels, ignoring min_cc) :type detect_chans: int :param detect_chans: Number of channels originally used in detections, must match the number used here to allow for cccsum checking. :type horizontal_chans: list :param horizontal_chans: List of channel endings for horizontal-channels, on which S-picks will be made. :type vertical_chans: list :param vertical_chans: List of channel endings for vertical-channels, on which P-picks will be made. :type debug: int :param debug: Debug output level 0-5. :returns: Event object containing network, station, channel and pick information. :rtype: :class:`obspy.core.event.Event` ] from relative_module[eqcorrscan.core.match_filter] import module[normxcorr2] import module[math] variable[event] assign[=] call[name[Event], parameter[]] variable[s_stachans] assign[=] dictionary[[], []] variable[cccsum] assign[=] constant[0] variable[checksum] assign[=] constant[0] variable[used_chans] assign[=] constant[0] for taget[name[tr]] in starred[name[template]] begin[:] variable[temp_net] assign[=] name[tr].stats.network variable[temp_sta] assign[=] name[tr].stats.station variable[temp_chan] assign[=] name[tr].stats.channel call[name[debug_print], parameter[binary_operation[constant[Working on: %s.%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6a9a50>, <ast.Name object at 0x7da20c6abb20>, <ast.Name object at 0x7da20c6aa3b0>]]], constant[3], name[debug]]] variable[image] assign[=] call[name[detection].select, parameter[]] if <ast.BoolOp object at 0x7da20c6aa650> begin[:] call[name[print], parameter[constant[No match in image.]]] continue if name[interpolate] begin[:] <ast.Try object at 0x7da20c6a8c40> <ast.Try object at 0x7da20c6aa9b0> if call[name[math].isnan, parameter[name[cc_max]]] begin[:] call[name[print], parameter[constant[Problematic trace, no cross correlation possible]]] continue call[name[debug_print], parameter[binary_operation[constant[Maximum cross-corr=%s] <ast.Mod object at 0x7da2590d6920> name[cc_max]], constant[3], name[debug]]] <ast.AugAssign object at 0x7da18f09fe50> <ast.AugAssign object at 0x7da18f09d7b0> if compare[name[cc_max] less[<] name[min_cc]] begin[:] call[name[debug_print], parameter[constant[Correlation below threshold, not used], constant[3], name[debug]]] continue <ast.AugAssign object at 0x7da18f09ea10> if compare[call[name[temp_chan]][<ast.UnaryOp object at 0x7da18f09d540>] in name[vertical_chans]] begin[:] variable[phase] assign[=] constant[P] variable[_waveform_id] assign[=] call[name[WaveformStreamID], parameter[]] call[name[event].picks.append, parameter[call[name[Pick], parameter[]]]] name[event].resource_id assign[=] name[detection_id] variable[ccc_str] assign[=] binary_operation[constant[detect_val=%s] <ast.Mod object at 0x7da2590d6920> name[cccsum]] call[name[event].comments.append, parameter[call[name[Comment], parameter[]]]] if compare[name[used_chans] equal[==] name[detect_chans]] begin[:] if <ast.BoolOp object at 0x7da18f09eef0> begin[:] variable[msg] assign[=] binary_operation[constant[lag-calc has decreased cccsum from %f to %f - ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09ef80>, <ast.Name object at 0x7da18f09c3a0>]]] <ast.Raise object at 0x7da18f09cfa0> return[tuple[[<ast.Name object at 0x7da18f09fc40>, <ast.Name object at 0x7da18f09c850>]]]
keyword[def] identifier[_channel_loop] ( identifier[detection] , identifier[template] , identifier[min_cc] , identifier[detection_id] , identifier[interpolate] , identifier[i] , identifier[pre_lag_ccsum] = keyword[None] , identifier[detect_chans] = literal[int] , identifier[horizontal_chans] =[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[vertical_chans] =[ literal[string] ], identifier[debug] = literal[int] ): literal[string] keyword[from] identifier[eqcorrscan] . identifier[core] . identifier[match_filter] keyword[import] identifier[normxcorr2] keyword[import] identifier[math] identifier[event] = identifier[Event] () identifier[s_stachans] ={} identifier[cccsum] = literal[int] identifier[checksum] = literal[int] identifier[used_chans] = literal[int] keyword[for] identifier[tr] keyword[in] identifier[template] : identifier[temp_net] = identifier[tr] . identifier[stats] . identifier[network] identifier[temp_sta] = identifier[tr] . identifier[stats] . identifier[station] identifier[temp_chan] = identifier[tr] . identifier[stats] . identifier[channel] identifier[debug_print] ( literal[string] %( identifier[temp_net] , identifier[temp_sta] , identifier[temp_chan] ), literal[int] , identifier[debug] ) identifier[image] = identifier[detection] . identifier[select] ( identifier[station] = identifier[temp_sta] , identifier[channel] = identifier[temp_chan] ) keyword[if] identifier[len] ( identifier[image] )== literal[int] keyword[or] identifier[sum] ( identifier[image] [ literal[int] ]. identifier[data] )== literal[int] : identifier[print] ( literal[string] ) keyword[continue] keyword[if] identifier[interpolate] : keyword[try] : identifier[ccc] = identifier[normxcorr2] ( identifier[tr] . identifier[data] , identifier[image] [ literal[int] ]. identifier[data] ) keyword[except] identifier[Exception] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[len] ( identifier[image] [ literal[int] ]. identifier[data] )) identifier[print] ( literal[string] % identifier[len] ( identifier[tr] . identifier[data] )) keyword[continue] keyword[try] : identifier[shift] , identifier[cc_max] = identifier[_xcorr_interp] ( identifier[ccc] = identifier[ccc] , identifier[dt] = identifier[image] [ literal[int] ]. identifier[stats] . identifier[delta] ) keyword[except] identifier[IndexError] : identifier[print] ( literal[string] ) identifier[ccc] = identifier[normxcorr2] ( identifier[tr] . identifier[data] , identifier[image] [ literal[int] ]. identifier[data] ) identifier[cc_max] = identifier[np] . identifier[amax] ( identifier[ccc] ) identifier[shift] = identifier[np] . identifier[argmax] ( identifier[ccc] )* identifier[image] [ literal[int] ]. identifier[stats] . identifier[delta] keyword[if] identifier[math] . identifier[isnan] ( identifier[cc_max] ): identifier[print] ( literal[string] ) keyword[continue] keyword[else] : identifier[picktime] = identifier[image] [ literal[int] ]. identifier[stats] . identifier[starttime] + identifier[shift] keyword[else] : keyword[try] : identifier[ccc] = identifier[normxcorr2] ( identifier[tr] . identifier[data] , identifier[image] [ literal[int] ]. identifier[data] ) keyword[except] identifier[Exception] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[len] ( identifier[image] [ literal[int] ]. identifier[data] )) identifier[print] ( literal[string] % identifier[len] ( identifier[tr] . identifier[data] )) keyword[continue] identifier[cc_max] = identifier[np] . identifier[amax] ( identifier[ccc] ) keyword[if] identifier[math] . identifier[isnan] ( identifier[cc_max] ): identifier[print] ( literal[string] ) keyword[continue] keyword[else] : identifier[picktime] = identifier[image] [ literal[int] ]. identifier[stats] . identifier[starttime] +( identifier[np] . identifier[argmax] ( identifier[ccc] )* identifier[image] [ literal[int] ]. identifier[stats] . identifier[delta] ) identifier[debug_print] ( literal[string] % identifier[cc_max] , literal[int] , identifier[debug] ) identifier[checksum] += identifier[cc_max] identifier[used_chans] += literal[int] keyword[if] identifier[cc_max] < identifier[min_cc] : identifier[debug_print] ( literal[string] , literal[int] , identifier[debug] ) keyword[continue] identifier[cccsum] += identifier[cc_max] keyword[if] identifier[temp_chan] [- literal[int] ] keyword[in] identifier[vertical_chans] : identifier[phase] = literal[string] keyword[elif] identifier[temp_chan] [- literal[int] ] keyword[in] identifier[horizontal_chans] : identifier[phase] = literal[string] identifier[debug_print] ( literal[string] % ( identifier[temp_net] , identifier[temp_sta] , identifier[temp_chan] ), literal[int] , identifier[debug] ) keyword[if] identifier[temp_sta] keyword[not] keyword[in] identifier[s_stachans] . identifier[keys] (): identifier[s_stachans] [ identifier[temp_sta] ]=(( identifier[temp_chan] , identifier[np] . identifier[amax] ( identifier[ccc] ), identifier[picktime] )) keyword[elif] identifier[temp_sta] keyword[in] identifier[s_stachans] . identifier[keys] (): keyword[if] identifier[np] . identifier[amax] ( identifier[ccc] )> identifier[s_stachans] [ identifier[temp_sta] ][ literal[int] ]: identifier[picktime] = identifier[picktime] keyword[else] : keyword[continue] keyword[else] : identifier[phase] = keyword[None] identifier[_waveform_id] = identifier[WaveformStreamID] ( identifier[network_code] = identifier[temp_net] , identifier[station_code] = identifier[temp_sta] , identifier[channel_code] = identifier[temp_chan] ) identifier[event] . identifier[picks] . identifier[append] ( identifier[Pick] ( identifier[waveform_id] = identifier[_waveform_id] , identifier[time] = identifier[picktime] , identifier[method_id] = identifier[ResourceIdentifier] ( literal[string] ), identifier[phase_hint] = identifier[phase] , identifier[creation_info] = literal[string] , identifier[evaluation_mode] = literal[string] , identifier[comments] =[ identifier[Comment] ( identifier[text] = literal[string] % identifier[cc_max] )])) identifier[event] . identifier[resource_id] = identifier[detection_id] identifier[ccc_str] =( literal[string] % identifier[cccsum] ) identifier[event] . identifier[comments] . identifier[append] ( identifier[Comment] ( identifier[text] = identifier[ccc_str] )) keyword[if] identifier[used_chans] == identifier[detect_chans] : keyword[if] identifier[pre_lag_ccsum] keyword[is] keyword[not] keyword[None] keyword[and] identifier[checksum] - identifier[pre_lag_ccsum] <-( literal[int] * identifier[pre_lag_ccsum] ): identifier[msg] =( literal[string] %( identifier[pre_lag_ccsum] , identifier[checksum] )) keyword[raise] identifier[LagCalcError] ( identifier[msg] ) keyword[else] : identifier[warnings] . identifier[warn] ( literal[string] literal[string] %( identifier[detect_chans] , identifier[used_chans] )) keyword[return] identifier[i] , identifier[event]
def _channel_loop(detection, template, min_cc, detection_id, interpolate, i, pre_lag_ccsum=None, detect_chans=0, horizontal_chans=['E', 'N', '1', '2'], vertical_chans=['Z'], debug=0): """ Inner loop for correlating and assigning picks. Utility function to take a stream of data for the detected event and write maximum correlation to absolute time as picks in an obspy.core.event.Event object. Only outputs picks for picks above min_cc. :type detection: obspy.core.stream.Stream :param detection: Stream of data for the slave event detected using template. :type template: obspy.core.stream.Stream :param template: Stream of data as the template for the detection. :type min_cc: float :param min_cc: Minimum cross-correlation value to allow a pick to be made. :type detection_id: str :param detection_id: Detection ID to associate the event with. :type interpolate: bool :param interpolate: Interpolate the correlation function to achieve sub-sample precision. :type i: int :param i: Used to track which process has occurred when running in parallel. :type pre_lag_ccsum: float :param pre_lag_ccsum: Cross-correlation sum before lag-calc, will check that the cross-correlation sum is increased by lag-calc (using all channels, ignoring min_cc) :type detect_chans: int :param detect_chans: Number of channels originally used in detections, must match the number used here to allow for cccsum checking. :type horizontal_chans: list :param horizontal_chans: List of channel endings for horizontal-channels, on which S-picks will be made. :type vertical_chans: list :param vertical_chans: List of channel endings for vertical-channels, on which P-picks will be made. :type debug: int :param debug: Debug output level 0-5. :returns: Event object containing network, station, channel and pick information. :rtype: :class:`obspy.core.event.Event` """ from eqcorrscan.core.match_filter import normxcorr2 import math event = Event() s_stachans = {} cccsum = 0 checksum = 0 used_chans = 0 for tr in template: temp_net = tr.stats.network temp_sta = tr.stats.station temp_chan = tr.stats.channel debug_print('Working on: %s.%s.%s' % (temp_net, temp_sta, temp_chan), 3, debug) image = detection.select(station=temp_sta, channel=temp_chan) if len(image) == 0 or sum(image[0].data) == 0: print('No match in image.') continue # depends on [control=['if'], data=[]] if interpolate: try: ccc = normxcorr2(tr.data, image[0].data) # depends on [control=['try'], data=[]] except Exception: print('Could not calculate cc') print('Image is %i long' % len(image[0].data)) print('Template is %i long' % len(tr.data)) continue # depends on [control=['except'], data=[]] try: (shift, cc_max) = _xcorr_interp(ccc=ccc, dt=image[0].stats.delta) # depends on [control=['try'], data=[]] except IndexError: print('Could not interpolate ccc, not smooth') ccc = normxcorr2(tr.data, image[0].data) cc_max = np.amax(ccc) shift = np.argmax(ccc) * image[0].stats.delta # depends on [control=['except'], data=[]] # Convert the maximum cross-correlation time to an actual time if math.isnan(cc_max): print('Problematic trace, no cross correlation possible') continue # depends on [control=['if'], data=[]] else: picktime = image[0].stats.starttime + shift # depends on [control=['if'], data=[]] else: # Convert the maximum cross-correlation time to an actual time try: ccc = normxcorr2(tr.data, image[0].data) # depends on [control=['try'], data=[]] except Exception: print('Could not calculate cc') print('Image is %i long' % len(image[0].data)) print('Template is %i long' % len(tr.data)) continue # depends on [control=['except'], data=[]] cc_max = np.amax(ccc) if math.isnan(cc_max): print('Problematic trace, no cross correlation possible') continue # depends on [control=['if'], data=[]] else: picktime = image[0].stats.starttime + np.argmax(ccc) * image[0].stats.delta debug_print('Maximum cross-corr=%s' % cc_max, 3, debug) checksum += cc_max used_chans += 1 if cc_max < min_cc: debug_print('Correlation below threshold, not used', 3, debug) continue # depends on [control=['if'], data=[]] cccsum += cc_max # Perhaps weight each pick by the cc val or cc val^2? # weight = np.amax(ccc) ** 2 if temp_chan[-1] in vertical_chans: phase = 'P' # depends on [control=['if'], data=[]] # Only take the S-pick with the best correlation elif temp_chan[-1] in horizontal_chans: phase = 'S' debug_print('Making S-pick on: %s.%s.%s' % (temp_net, temp_sta, temp_chan), 4, debug) if temp_sta not in s_stachans.keys(): s_stachans[temp_sta] = (temp_chan, np.amax(ccc), picktime) # depends on [control=['if'], data=['temp_sta']] elif temp_sta in s_stachans.keys(): if np.amax(ccc) > s_stachans[temp_sta][1]: picktime = picktime # depends on [control=['if'], data=[]] else: continue # depends on [control=['if'], data=['temp_sta']] # depends on [control=['if'], data=[]] else: phase = None _waveform_id = WaveformStreamID(network_code=temp_net, station_code=temp_sta, channel_code=temp_chan) event.picks.append(Pick(waveform_id=_waveform_id, time=picktime, method_id=ResourceIdentifier('EQcorrscan'), phase_hint=phase, creation_info='eqcorrscan.core.lag_calc', evaluation_mode='automatic', comments=[Comment(text='cc_max=%s' % cc_max)])) event.resource_id = detection_id # depends on [control=['for'], data=['tr']] ccc_str = 'detect_val=%s' % cccsum event.comments.append(Comment(text=ccc_str)) if used_chans == detect_chans: if pre_lag_ccsum is not None and checksum - pre_lag_ccsum < -(0.3 * pre_lag_ccsum): msg = 'lag-calc has decreased cccsum from %f to %f - ' % (pre_lag_ccsum, checksum) raise LagCalcError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: warnings.warn('Cannot check if cccsum is better, used %i channels for detection, but %i are used here' % (detect_chans, used_chans)) return (i, event)
def _sanitize_values(arr): """ return an ndarray for our input, in a platform independent manner """ if hasattr(arr, 'values'): arr = arr.values else: # scalar if is_scalar(arr): arr = [arr] # ndarray if isinstance(arr, np.ndarray): pass elif is_list_like(arr) and len(arr) > 0: arr = maybe_convert_platform(arr) else: arr = np.asarray(arr) return arr
def function[_sanitize_values, parameter[arr]]: constant[ return an ndarray for our input, in a platform independent manner ] if call[name[hasattr], parameter[name[arr], constant[values]]] begin[:] variable[arr] assign[=] name[arr].values return[name[arr]]
keyword[def] identifier[_sanitize_values] ( identifier[arr] ): literal[string] keyword[if] identifier[hasattr] ( identifier[arr] , literal[string] ): identifier[arr] = identifier[arr] . identifier[values] keyword[else] : keyword[if] identifier[is_scalar] ( identifier[arr] ): identifier[arr] =[ identifier[arr] ] keyword[if] identifier[isinstance] ( identifier[arr] , identifier[np] . identifier[ndarray] ): keyword[pass] keyword[elif] identifier[is_list_like] ( identifier[arr] ) keyword[and] identifier[len] ( identifier[arr] )> literal[int] : identifier[arr] = identifier[maybe_convert_platform] ( identifier[arr] ) keyword[else] : identifier[arr] = identifier[np] . identifier[asarray] ( identifier[arr] ) keyword[return] identifier[arr]
def _sanitize_values(arr): """ return an ndarray for our input, in a platform independent manner """ if hasattr(arr, 'values'): arr = arr.values # depends on [control=['if'], data=[]] else: # scalar if is_scalar(arr): arr = [arr] # depends on [control=['if'], data=[]] # ndarray if isinstance(arr, np.ndarray): pass # depends on [control=['if'], data=[]] elif is_list_like(arr) and len(arr) > 0: arr = maybe_convert_platform(arr) # depends on [control=['if'], data=[]] else: arr = np.asarray(arr) return arr
def arrays(self): """Return an iterator over (name, value) pairs for arrays only. Examples -------- >>> import zarr >>> g1 = zarr.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) >>> d2 = g1.create_dataset('quux', shape=200, chunks=20) >>> for n, v in g1.arrays(): ... print(n, type(v)) baz <class 'zarr.core.Array'> quux <class 'zarr.core.Array'> """ for key in sorted(listdir(self._store, self._path)): path = self._key_prefix + key if contains_array(self._store, path): yield key, Array(self._store, path=path, read_only=self._read_only, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer)
def function[arrays, parameter[self]]: constant[Return an iterator over (name, value) pairs for arrays only. Examples -------- >>> import zarr >>> g1 = zarr.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) >>> d2 = g1.create_dataset('quux', shape=200, chunks=20) >>> for n, v in g1.arrays(): ... print(n, type(v)) baz <class 'zarr.core.Array'> quux <class 'zarr.core.Array'> ] for taget[name[key]] in starred[call[name[sorted], parameter[call[name[listdir], parameter[name[self]._store, name[self]._path]]]]] begin[:] variable[path] assign[=] binary_operation[name[self]._key_prefix + name[key]] if call[name[contains_array], parameter[name[self]._store, name[path]]] begin[:] <ast.Yield object at 0x7da1b19d9fc0>
keyword[def] identifier[arrays] ( identifier[self] ): literal[string] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[listdir] ( identifier[self] . identifier[_store] , identifier[self] . identifier[_path] )): identifier[path] = identifier[self] . identifier[_key_prefix] + identifier[key] keyword[if] identifier[contains_array] ( identifier[self] . identifier[_store] , identifier[path] ): keyword[yield] identifier[key] , identifier[Array] ( identifier[self] . identifier[_store] , identifier[path] = identifier[path] , identifier[read_only] = identifier[self] . identifier[_read_only] , identifier[chunk_store] = identifier[self] . identifier[_chunk_store] , identifier[cache_attrs] = identifier[self] . identifier[attrs] . identifier[cache] , identifier[synchronizer] = identifier[self] . identifier[_synchronizer] )
def arrays(self): """Return an iterator over (name, value) pairs for arrays only. Examples -------- >>> import zarr >>> g1 = zarr.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) >>> d2 = g1.create_dataset('quux', shape=200, chunks=20) >>> for n, v in g1.arrays(): ... print(n, type(v)) baz <class 'zarr.core.Array'> quux <class 'zarr.core.Array'> """ for key in sorted(listdir(self._store, self._path)): path = self._key_prefix + key if contains_array(self._store, path): yield (key, Array(self._store, path=path, read_only=self._read_only, chunk_store=self._chunk_store, cache_attrs=self.attrs.cache, synchronizer=self._synchronizer)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
def run(coro: 'Optional[Coroutine]' = None, *, loop: Optional[AbstractEventLoop] = None, shutdown_handler: Optional[Callable[[AbstractEventLoop], None]] = None, executor_workers: int = 10, executor: Optional[Executor] = None, use_uvloop: bool = False) -> None: """ Start up the event loop, and wait for a signal to shut down. :param coro: Optionally supply a coroutine. The loop will still run if missing. The loop will continue to run after the supplied coroutine finishes. The supplied coroutine is typically a "main" coroutine from which all other work is spawned. :param loop: Optionally supply your own loop. If missing, the default loop attached to the current thread context will be used, i.e., whatever ``asyncio.get_event_loop()`` returns. :param shutdown_handler: By default, SIGINT and SIGTERM will be handled and will stop the loop, thereby invoking the shutdown sequence. Alternatively you can supply your own shutdown handler function. It should conform to the type spec as shown in the function signature. :param executor_workers: The number of workers in the executor. (NOTE: ``run()`` creates a new executor instance internally, regardless of whether you supply your own loop.) :param executor: You can decide to use your own executor instance if you like. :param use_uvloop: The loop policy will be set to use uvloop. It is your responsibility to install uvloop. If missing, an ``ImportError`` will be raised. """ logger.debug('Entering run()') assert not (loop and use_uvloop), ( "'loop' and 'use_uvloop' parameters are mutually " "exclusive. (Just make your own uvloop and pass it in)." ) if use_uvloop: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) loop_was_supplied = bool(loop) if not loop_was_supplied: loop = get_event_loop() if coro: async def new_coro(): """During shutdown, run_until_complete() will exit if a CancelledError bubbles up from anything in the group. To counteract that, we'll try to handle any CancelledErrors that bubble up from the given coro. This isn't fool-proof: if the user doesn't provide a coro, and instead creates their own with loop.create_task, that task might bubble a CancelledError into the run_until_complete().""" try: await coro except asyncio.CancelledError: pass loop.create_task(new_coro()) shutdown_handler = shutdown_handler or _shutdown_handler if WINDOWS: # pragma: no cover # This is to allow CTRL-C to be detected in a timely fashion, # see: https://bugs.python.org/issue23057#msg246316 loop.create_task(windows_support_wakeup()) # This is to be able to handle SIGBREAK. def windows_handler(sig, frame): # Disable the handler so it won't be called again. signame = signal.Signals(sig).name logger.critical('Received signal: %s. Stopping the loop.', signame) shutdown_handler(loop) signal.signal(signal.SIGBREAK, windows_handler) signal.signal(signal.SIGINT, windows_handler) else: loop.add_signal_handler(SIGINT, shutdown_handler, loop) loop.add_signal_handler(SIGTERM, shutdown_handler, loop) # TODO: We probably don't want to create a different executor if the # TODO: loop was supplied. (User might have put stuff on that loop's # TODO: executor). if not executor: logger.debug('Creating default executor') executor = ThreadPoolExecutor(max_workers=executor_workers) loop.set_default_executor(executor) try: loop.run_forever() except KeyboardInterrupt: # pragma: no cover logger.info('Got KeyboardInterrupt') if WINDOWS: # Windows doesn't do any POSIX signal handling, and no # abstraction layer for signals is currently implemented in # asyncio. So we fall back to KeyboardInterrupt (triggered # by the user/environment sending CTRL-C, or signal.CTRL_C_EVENT shutdown_handler() logger.info('Entering shutdown phase.') def sep(): tasks = all_tasks(loop=loop) do_not_cancel = set() for t in tasks: # TODO: we don't need access to the coro. We could simply # TODO: store the task itself in the weakset. if t._coro in _DO_NOT_CANCEL_COROS: do_not_cancel.add(t) tasks -= do_not_cancel logger.info('Cancelling pending tasks.') for t in tasks: logger.debug('Cancelling task: %s', t) t.cancel() return tasks, do_not_cancel tasks, do_not_cancel = sep() # Here's a protip: if you group a bunch of tasks, and some of them # get cancelled, and they DON'T HANDLE THE CANCELLATION, then the # raised CancelledError will bubble up to, and stop the # loop.run_until_complete() line: meaning, not all the tasks in # the gathered group will actually be complete. You need to # enable this with the ``return_exceptions`` flag. group = gather(*tasks, *do_not_cancel, return_exceptions=True) logger.info('Running pending tasks till complete') # TODO: obtain all the results, and log any results that are exceptions # other than CancelledError. Will be useful for troubleshooting. loop.run_until_complete(group) logger.info('Waiting for executor shutdown.') executor.shutdown(wait=True) # If loop was supplied, it's up to the caller to close! if not loop_was_supplied: logger.info('Closing the loop.') loop.close() logger.critical('Leaving. Bye!')
def function[run, parameter[coro]]: constant[ Start up the event loop, and wait for a signal to shut down. :param coro: Optionally supply a coroutine. The loop will still run if missing. The loop will continue to run after the supplied coroutine finishes. The supplied coroutine is typically a "main" coroutine from which all other work is spawned. :param loop: Optionally supply your own loop. If missing, the default loop attached to the current thread context will be used, i.e., whatever ``asyncio.get_event_loop()`` returns. :param shutdown_handler: By default, SIGINT and SIGTERM will be handled and will stop the loop, thereby invoking the shutdown sequence. Alternatively you can supply your own shutdown handler function. It should conform to the type spec as shown in the function signature. :param executor_workers: The number of workers in the executor. (NOTE: ``run()`` creates a new executor instance internally, regardless of whether you supply your own loop.) :param executor: You can decide to use your own executor instance if you like. :param use_uvloop: The loop policy will be set to use uvloop. It is your responsibility to install uvloop. If missing, an ``ImportError`` will be raised. ] call[name[logger].debug, parameter[constant[Entering run()]]] assert[<ast.UnaryOp object at 0x7da1b12a86d0>] if name[use_uvloop] begin[:] import module[uvloop] call[name[asyncio].set_event_loop_policy, parameter[call[name[uvloop].EventLoopPolicy, parameter[]]]] variable[loop_was_supplied] assign[=] call[name[bool], parameter[name[loop]]] if <ast.UnaryOp object at 0x7da1b12a9b10> begin[:] variable[loop] assign[=] call[name[get_event_loop], parameter[]] if name[coro] begin[:] <ast.AsyncFunctionDef object at 0x7da1b12a8a30> call[name[loop].create_task, parameter[call[name[new_coro], parameter[]]]] variable[shutdown_handler] assign[=] <ast.BoolOp object at 0x7da1b12ab100> if name[WINDOWS] begin[:] call[name[loop].create_task, parameter[call[name[windows_support_wakeup], parameter[]]]] def function[windows_handler, parameter[sig, frame]]: variable[signame] assign[=] call[name[signal].Signals, parameter[name[sig]]].name call[name[logger].critical, parameter[constant[Received signal: %s. Stopping the loop.], name[signame]]] call[name[shutdown_handler], parameter[name[loop]]] call[name[signal].signal, parameter[name[signal].SIGBREAK, name[windows_handler]]] call[name[signal].signal, parameter[name[signal].SIGINT, name[windows_handler]]] if <ast.UnaryOp object at 0x7da1b12a89a0> begin[:] call[name[logger].debug, parameter[constant[Creating default executor]]] variable[executor] assign[=] call[name[ThreadPoolExecutor], parameter[]] call[name[loop].set_default_executor, parameter[name[executor]]] <ast.Try object at 0x7da1b12ab3a0> call[name[logger].info, parameter[constant[Entering shutdown phase.]]] def function[sep, parameter[]]: variable[tasks] assign[=] call[name[all_tasks], parameter[]] variable[do_not_cancel] assign[=] call[name[set], parameter[]] for taget[name[t]] in starred[name[tasks]] begin[:] if compare[name[t]._coro in name[_DO_NOT_CANCEL_COROS]] begin[:] call[name[do_not_cancel].add, parameter[name[t]]] <ast.AugAssign object at 0x7da1b12f2890> call[name[logger].info, parameter[constant[Cancelling pending tasks.]]] for taget[name[t]] in starred[name[tasks]] begin[:] call[name[logger].debug, parameter[constant[Cancelling task: %s], name[t]]] call[name[t].cancel, parameter[]] return[tuple[[<ast.Name object at 0x7da1b12f1f00>, <ast.Name object at 0x7da1b12f3df0>]]] <ast.Tuple object at 0x7da1b12f2f80> assign[=] call[name[sep], parameter[]] variable[group] assign[=] call[name[gather], parameter[<ast.Starred object at 0x7da1b12f3490>, <ast.Starred object at 0x7da1b12f3f70>]] call[name[logger].info, parameter[constant[Running pending tasks till complete]]] call[name[loop].run_until_complete, parameter[name[group]]] call[name[logger].info, parameter[constant[Waiting for executor shutdown.]]] call[name[executor].shutdown, parameter[]] if <ast.UnaryOp object at 0x7da1b12c0f40> begin[:] call[name[logger].info, parameter[constant[Closing the loop.]]] call[name[loop].close, parameter[]] call[name[logger].critical, parameter[constant[Leaving. Bye!]]]
keyword[def] identifier[run] ( identifier[coro] : literal[string] = keyword[None] ,*, identifier[loop] : identifier[Optional] [ identifier[AbstractEventLoop] ]= keyword[None] , identifier[shutdown_handler] : identifier[Optional] [ identifier[Callable] [[ identifier[AbstractEventLoop] ], keyword[None] ]]= keyword[None] , identifier[executor_workers] : identifier[int] = literal[int] , identifier[executor] : identifier[Optional] [ identifier[Executor] ]= keyword[None] , identifier[use_uvloop] : identifier[bool] = keyword[False] )-> keyword[None] : literal[string] identifier[logger] . identifier[debug] ( literal[string] ) keyword[assert] keyword[not] ( identifier[loop] keyword[and] identifier[use_uvloop] ),( literal[string] literal[string] ) keyword[if] identifier[use_uvloop] : keyword[import] identifier[uvloop] identifier[asyncio] . identifier[set_event_loop_policy] ( identifier[uvloop] . identifier[EventLoopPolicy] ()) identifier[loop_was_supplied] = identifier[bool] ( identifier[loop] ) keyword[if] keyword[not] identifier[loop_was_supplied] : identifier[loop] = identifier[get_event_loop] () keyword[if] identifier[coro] : keyword[async] keyword[def] identifier[new_coro] (): literal[string] keyword[try] : keyword[await] identifier[coro] keyword[except] identifier[asyncio] . identifier[CancelledError] : keyword[pass] identifier[loop] . identifier[create_task] ( identifier[new_coro] ()) identifier[shutdown_handler] = identifier[shutdown_handler] keyword[or] identifier[_shutdown_handler] keyword[if] identifier[WINDOWS] : identifier[loop] . identifier[create_task] ( identifier[windows_support_wakeup] ()) keyword[def] identifier[windows_handler] ( identifier[sig] , identifier[frame] ): identifier[signame] = identifier[signal] . identifier[Signals] ( identifier[sig] ). identifier[name] identifier[logger] . identifier[critical] ( literal[string] , identifier[signame] ) identifier[shutdown_handler] ( identifier[loop] ) identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGBREAK] , identifier[windows_handler] ) identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[windows_handler] ) keyword[else] : identifier[loop] . identifier[add_signal_handler] ( identifier[SIGINT] , identifier[shutdown_handler] , identifier[loop] ) identifier[loop] . identifier[add_signal_handler] ( identifier[SIGTERM] , identifier[shutdown_handler] , identifier[loop] ) keyword[if] keyword[not] identifier[executor] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[executor] = identifier[ThreadPoolExecutor] ( identifier[max_workers] = identifier[executor_workers] ) identifier[loop] . identifier[set_default_executor] ( identifier[executor] ) keyword[try] : identifier[loop] . identifier[run_forever] () keyword[except] identifier[KeyboardInterrupt] : identifier[logger] . identifier[info] ( literal[string] ) keyword[if] identifier[WINDOWS] : identifier[shutdown_handler] () identifier[logger] . identifier[info] ( literal[string] ) keyword[def] identifier[sep] (): identifier[tasks] = identifier[all_tasks] ( identifier[loop] = identifier[loop] ) identifier[do_not_cancel] = identifier[set] () keyword[for] identifier[t] keyword[in] identifier[tasks] : keyword[if] identifier[t] . identifier[_coro] keyword[in] identifier[_DO_NOT_CANCEL_COROS] : identifier[do_not_cancel] . identifier[add] ( identifier[t] ) identifier[tasks] -= identifier[do_not_cancel] identifier[logger] . identifier[info] ( literal[string] ) keyword[for] identifier[t] keyword[in] identifier[tasks] : identifier[logger] . identifier[debug] ( literal[string] , identifier[t] ) identifier[t] . identifier[cancel] () keyword[return] identifier[tasks] , identifier[do_not_cancel] identifier[tasks] , identifier[do_not_cancel] = identifier[sep] () identifier[group] = identifier[gather] (* identifier[tasks] ,* identifier[do_not_cancel] , identifier[return_exceptions] = keyword[True] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[loop] . identifier[run_until_complete] ( identifier[group] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[executor] . identifier[shutdown] ( identifier[wait] = keyword[True] ) keyword[if] keyword[not] identifier[loop_was_supplied] : identifier[logger] . identifier[info] ( literal[string] ) identifier[loop] . identifier[close] () identifier[logger] . identifier[critical] ( literal[string] )
def run(coro: 'Optional[Coroutine]'=None, *, loop: Optional[AbstractEventLoop]=None, shutdown_handler: Optional[Callable[[AbstractEventLoop], None]]=None, executor_workers: int=10, executor: Optional[Executor]=None, use_uvloop: bool=False) -> None: """ Start up the event loop, and wait for a signal to shut down. :param coro: Optionally supply a coroutine. The loop will still run if missing. The loop will continue to run after the supplied coroutine finishes. The supplied coroutine is typically a "main" coroutine from which all other work is spawned. :param loop: Optionally supply your own loop. If missing, the default loop attached to the current thread context will be used, i.e., whatever ``asyncio.get_event_loop()`` returns. :param shutdown_handler: By default, SIGINT and SIGTERM will be handled and will stop the loop, thereby invoking the shutdown sequence. Alternatively you can supply your own shutdown handler function. It should conform to the type spec as shown in the function signature. :param executor_workers: The number of workers in the executor. (NOTE: ``run()`` creates a new executor instance internally, regardless of whether you supply your own loop.) :param executor: You can decide to use your own executor instance if you like. :param use_uvloop: The loop policy will be set to use uvloop. It is your responsibility to install uvloop. If missing, an ``ImportError`` will be raised. """ logger.debug('Entering run()') assert not (loop and use_uvloop), "'loop' and 'use_uvloop' parameters are mutually exclusive. (Just make your own uvloop and pass it in)." if use_uvloop: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) # depends on [control=['if'], data=[]] loop_was_supplied = bool(loop) if not loop_was_supplied: loop = get_event_loop() # depends on [control=['if'], data=[]] if coro: async def new_coro(): """During shutdown, run_until_complete() will exit if a CancelledError bubbles up from anything in the group. To counteract that, we'll try to handle any CancelledErrors that bubble up from the given coro. This isn't fool-proof: if the user doesn't provide a coro, and instead creates their own with loop.create_task, that task might bubble a CancelledError into the run_until_complete().""" try: await coro # depends on [control=['try'], data=[]] except asyncio.CancelledError: pass # depends on [control=['except'], data=[]] loop.create_task(new_coro()) # depends on [control=['if'], data=[]] shutdown_handler = shutdown_handler or _shutdown_handler if WINDOWS: # pragma: no cover # This is to allow CTRL-C to be detected in a timely fashion, # see: https://bugs.python.org/issue23057#msg246316 loop.create_task(windows_support_wakeup()) # This is to be able to handle SIGBREAK. def windows_handler(sig, frame): # Disable the handler so it won't be called again. signame = signal.Signals(sig).name logger.critical('Received signal: %s. Stopping the loop.', signame) shutdown_handler(loop) signal.signal(signal.SIGBREAK, windows_handler) signal.signal(signal.SIGINT, windows_handler) # depends on [control=['if'], data=[]] else: loop.add_signal_handler(SIGINT, shutdown_handler, loop) loop.add_signal_handler(SIGTERM, shutdown_handler, loop) # TODO: We probably don't want to create a different executor if the # TODO: loop was supplied. (User might have put stuff on that loop's # TODO: executor). if not executor: logger.debug('Creating default executor') executor = ThreadPoolExecutor(max_workers=executor_workers) # depends on [control=['if'], data=[]] loop.set_default_executor(executor) try: loop.run_forever() # depends on [control=['try'], data=[]] except KeyboardInterrupt: # pragma: no cover logger.info('Got KeyboardInterrupt') if WINDOWS: # Windows doesn't do any POSIX signal handling, and no # abstraction layer for signals is currently implemented in # asyncio. So we fall back to KeyboardInterrupt (triggered # by the user/environment sending CTRL-C, or signal.CTRL_C_EVENT shutdown_handler() # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] logger.info('Entering shutdown phase.') def sep(): tasks = all_tasks(loop=loop) do_not_cancel = set() for t in tasks: # TODO: we don't need access to the coro. We could simply # TODO: store the task itself in the weakset. if t._coro in _DO_NOT_CANCEL_COROS: do_not_cancel.add(t) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] tasks -= do_not_cancel logger.info('Cancelling pending tasks.') for t in tasks: logger.debug('Cancelling task: %s', t) t.cancel() # depends on [control=['for'], data=['t']] return (tasks, do_not_cancel) (tasks, do_not_cancel) = sep() # Here's a protip: if you group a bunch of tasks, and some of them # get cancelled, and they DON'T HANDLE THE CANCELLATION, then the # raised CancelledError will bubble up to, and stop the # loop.run_until_complete() line: meaning, not all the tasks in # the gathered group will actually be complete. You need to # enable this with the ``return_exceptions`` flag. group = gather(*tasks, *do_not_cancel, return_exceptions=True) logger.info('Running pending tasks till complete') # TODO: obtain all the results, and log any results that are exceptions # other than CancelledError. Will be useful for troubleshooting. loop.run_until_complete(group) logger.info('Waiting for executor shutdown.') executor.shutdown(wait=True) # If loop was supplied, it's up to the caller to close! if not loop_was_supplied: logger.info('Closing the loop.') loop.close() # depends on [control=['if'], data=[]] logger.critical('Leaving. Bye!')
def get_template(self, name=None, params=None): """ Retrieve an index template by its name. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html>`_ :arg name: The name of the template :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg include_type_name: Specify whether requests and responses should include a type name (default: depends on Elasticsearch version). """ return self.transport.perform_request( "GET", _make_path("_template", name), params=params )
def function[get_template, parameter[self, name, params]]: constant[ Retrieve an index template by its name. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html>`_ :arg name: The name of the template :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg include_type_name: Specify whether requests and responses should include a type name (default: depends on Elasticsearch version). ] return[call[name[self].transport.perform_request, parameter[constant[GET], call[name[_make_path], parameter[constant[_template], name[name]]]]]]
keyword[def] identifier[get_template] ( identifier[self] , identifier[name] = keyword[None] , identifier[params] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[transport] . identifier[perform_request] ( literal[string] , identifier[_make_path] ( literal[string] , identifier[name] ), identifier[params] = identifier[params] )
def get_template(self, name=None, params=None): """ Retrieve an index template by its name. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html>`_ :arg name: The name of the template :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg include_type_name: Specify whether requests and responses should include a type name (default: depends on Elasticsearch version). """ return self.transport.perform_request('GET', _make_path('_template', name), params=params)
def is_stopped(self, *args, **kwargs): """Return whether this container is stopped""" kwargs["waiting"] = False return self.wait_till_stopped(*args, **kwargs)
def function[is_stopped, parameter[self]]: constant[Return whether this container is stopped] call[name[kwargs]][constant[waiting]] assign[=] constant[False] return[call[name[self].wait_till_stopped, parameter[<ast.Starred object at 0x7da20c6c7df0>]]]
keyword[def] identifier[is_stopped] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[False] keyword[return] identifier[self] . identifier[wait_till_stopped] (* identifier[args] ,** identifier[kwargs] )
def is_stopped(self, *args, **kwargs): """Return whether this container is stopped""" kwargs['waiting'] = False return self.wait_till_stopped(*args, **kwargs)
def enter_eventloop(self): """enter eventloop""" self.log.info("entering eventloop") # restore default_int_handler signal(SIGINT, default_int_handler) while self.eventloop is not None: try: self.eventloop(self) except KeyboardInterrupt: # Ctrl-C shouldn't crash the kernel self.log.error("KeyboardInterrupt caught in kernel") continue else: # eventloop exited cleanly, this means we should stop (right?) self.eventloop = None break self.log.info("exiting eventloop") # if eventloop exits, IOLoop should stop ioloop.IOLoop.instance().stop()
def function[enter_eventloop, parameter[self]]: constant[enter eventloop] call[name[self].log.info, parameter[constant[entering eventloop]]] call[name[signal], parameter[name[SIGINT], name[default_int_handler]]] while compare[name[self].eventloop is_not constant[None]] begin[:] <ast.Try object at 0x7da1b26ae7a0> call[name[self].log.info, parameter[constant[exiting eventloop]]] call[call[name[ioloop].IOLoop.instance, parameter[]].stop, parameter[]]
keyword[def] identifier[enter_eventloop] ( identifier[self] ): literal[string] identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[signal] ( identifier[SIGINT] , identifier[default_int_handler] ) keyword[while] identifier[self] . identifier[eventloop] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[self] . identifier[eventloop] ( identifier[self] ) keyword[except] identifier[KeyboardInterrupt] : identifier[self] . identifier[log] . identifier[error] ( literal[string] ) keyword[continue] keyword[else] : identifier[self] . identifier[eventloop] = keyword[None] keyword[break] identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[ioloop] . identifier[IOLoop] . identifier[instance] (). identifier[stop] ()
def enter_eventloop(self): """enter eventloop""" self.log.info('entering eventloop') # restore default_int_handler signal(SIGINT, default_int_handler) while self.eventloop is not None: try: self.eventloop(self) # depends on [control=['try'], data=[]] except KeyboardInterrupt: # Ctrl-C shouldn't crash the kernel self.log.error('KeyboardInterrupt caught in kernel') continue # depends on [control=['except'], data=[]] else: # eventloop exited cleanly, this means we should stop (right?) self.eventloop = None break # depends on [control=['while'], data=[]] self.log.info('exiting eventloop') # if eventloop exits, IOLoop should stop ioloop.IOLoop.instance().stop()
def summary(args): """ %prog summary blastfile Provide summary on id% and cov%, for both query and reference. Often used in comparing genomes (based on NUCMER results). """ p = OptionParser(summary.__doc__) p.add_option("--strict", default=False, action="store_true", help="Strict 'gapless' mode. Exclude gaps from covered base.") p.add_option("--tabular", default=False, action="store_true", help="Print succint tabular output") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) blastfile, = args alignstats = get_stats(blastfile, strict=opts.strict) if opts.tabular: print(str(alignstats)) else: alignstats.print_stats()
def function[summary, parameter[args]]: constant[ %prog summary blastfile Provide summary on id% and cov%, for both query and reference. Often used in comparing genomes (based on NUCMER results). ] variable[p] assign[=] call[name[OptionParser], parameter[name[summary].__doc__]] call[name[p].add_option, parameter[constant[--strict]]] call[name[p].add_option, parameter[constant[--tabular]]] <ast.Tuple object at 0x7da2047ea1a0> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b08a1900>]] <ast.Tuple object at 0x7da1b08a27d0> assign[=] name[args] variable[alignstats] assign[=] call[name[get_stats], parameter[name[blastfile]]] if name[opts].tabular begin[:] call[name[print], parameter[call[name[str], parameter[name[alignstats]]]]]
keyword[def] identifier[summary] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[summary] . identifier[__doc__] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[blastfile] ,= identifier[args] identifier[alignstats] = identifier[get_stats] ( identifier[blastfile] , identifier[strict] = identifier[opts] . identifier[strict] ) keyword[if] identifier[opts] . identifier[tabular] : identifier[print] ( identifier[str] ( identifier[alignstats] )) keyword[else] : identifier[alignstats] . identifier[print_stats] ()
def summary(args): """ %prog summary blastfile Provide summary on id% and cov%, for both query and reference. Often used in comparing genomes (based on NUCMER results). """ p = OptionParser(summary.__doc__) p.add_option('--strict', default=False, action='store_true', help="Strict 'gapless' mode. Exclude gaps from covered base.") p.add_option('--tabular', default=False, action='store_true', help='Print succint tabular output') (opts, args) = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (blastfile,) = args alignstats = get_stats(blastfile, strict=opts.strict) if opts.tabular: print(str(alignstats)) # depends on [control=['if'], data=[]] else: alignstats.print_stats()
def smoothline(document, coords): "smoothed polyline" element = document.createElement('path') path = [] points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)] def pt(points): x0, y0 = points[0] x1, y1 = points[1] p0 = (2*x0-x1, 2*y0-y1) x0, y0 = points[-1] x1, y1 = points[-2] pn = (2*x0-x1, 2*y0-y1) p = [p0] + points[1:-1] + [pn] for i in range(1, len(points)-1): a = p[i-1] b = p[i] c = p[i+1] yield lerp(a, b, 0.5), b, lerp(b, c, 0.5) for i, (A, B, C) in enumerate(pt(points)): if i == 0: path.append("M%s,%s Q%s,%s %s,%s" % (A[0], A[1], B[0], B[1], C[0], C[1])) else: path.append("T%s,%s" % (C[0], C[1])) element.setAttribute('d', ' '.join(path)) return element
def function[smoothline, parameter[document, coords]]: constant[smoothed polyline] variable[element] assign[=] call[name[document].createElement, parameter[constant[path]]] variable[path] assign[=] list[[]] variable[points] assign[=] <ast.ListComp object at 0x7da1b0e2d0f0> def function[pt, parameter[points]]: <ast.Tuple object at 0x7da1b0e2c880> assign[=] call[name[points]][constant[0]] <ast.Tuple object at 0x7da1b0e2cfa0> assign[=] call[name[points]][constant[1]] variable[p0] assign[=] tuple[[<ast.BinOp object at 0x7da1b0e2c610>, <ast.BinOp object at 0x7da1b0e2ec50>]] <ast.Tuple object at 0x7da1b0e2e7a0> assign[=] call[name[points]][<ast.UnaryOp object at 0x7da1b0e2f040>] <ast.Tuple object at 0x7da1b0e2d0c0> assign[=] call[name[points]][<ast.UnaryOp object at 0x7da1b0ef6170>] variable[pn] assign[=] tuple[[<ast.BinOp object at 0x7da1b0ef5a80>, <ast.BinOp object at 0x7da1b0ef4f10>]] variable[p] assign[=] binary_operation[binary_operation[list[[<ast.Name object at 0x7da1b0ef62c0>]] + call[name[points]][<ast.Slice object at 0x7da1b0ef6f50>]] + list[[<ast.Name object at 0x7da1b0ef6020>]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[points]]] - constant[1]]]]] begin[:] variable[a] assign[=] call[name[p]][binary_operation[name[i] - constant[1]]] variable[b] assign[=] call[name[p]][name[i]] variable[c] assign[=] call[name[p]][binary_operation[name[i] + constant[1]]] <ast.Yield object at 0x7da1b0e33a30> for taget[tuple[[<ast.Name object at 0x7da1b0e32b30>, <ast.Tuple object at 0x7da1b0e32260>]]] in starred[call[name[enumerate], parameter[call[name[pt], parameter[name[points]]]]]] begin[:] if compare[name[i] equal[==] constant[0]] begin[:] call[name[path].append, parameter[binary_operation[constant[M%s,%s Q%s,%s %s,%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0e32f20>, <ast.Subscript object at 0x7da1b0ef5060>, <ast.Subscript object at 0x7da1b0ef4580>, <ast.Subscript object at 0x7da1b0ef4490>, <ast.Subscript object at 0x7da1b0ef5630>, <ast.Subscript object at 0x7da1b0ef7490>]]]]] call[name[element].setAttribute, parameter[constant[d], call[constant[ ].join, parameter[name[path]]]]] return[name[element]]
keyword[def] identifier[smoothline] ( identifier[document] , identifier[coords] ): literal[string] identifier[element] = identifier[document] . identifier[createElement] ( literal[string] ) identifier[path] =[] identifier[points] =[( identifier[coords] [ identifier[i] ], identifier[coords] [ identifier[i] + literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[coords] ), literal[int] )] keyword[def] identifier[pt] ( identifier[points] ): identifier[x0] , identifier[y0] = identifier[points] [ literal[int] ] identifier[x1] , identifier[y1] = identifier[points] [ literal[int] ] identifier[p0] =( literal[int] * identifier[x0] - identifier[x1] , literal[int] * identifier[y0] - identifier[y1] ) identifier[x0] , identifier[y0] = identifier[points] [- literal[int] ] identifier[x1] , identifier[y1] = identifier[points] [- literal[int] ] identifier[pn] =( literal[int] * identifier[x0] - identifier[x1] , literal[int] * identifier[y0] - identifier[y1] ) identifier[p] =[ identifier[p0] ]+ identifier[points] [ literal[int] :- literal[int] ]+[ identifier[pn] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[points] )- literal[int] ): identifier[a] = identifier[p] [ identifier[i] - literal[int] ] identifier[b] = identifier[p] [ identifier[i] ] identifier[c] = identifier[p] [ identifier[i] + literal[int] ] keyword[yield] identifier[lerp] ( identifier[a] , identifier[b] , literal[int] ), identifier[b] , identifier[lerp] ( identifier[b] , identifier[c] , literal[int] ) keyword[for] identifier[i] ,( identifier[A] , identifier[B] , identifier[C] ) keyword[in] identifier[enumerate] ( identifier[pt] ( identifier[points] )): keyword[if] identifier[i] == literal[int] : identifier[path] . identifier[append] ( literal[string] %( identifier[A] [ literal[int] ], identifier[A] [ literal[int] ], identifier[B] [ literal[int] ], identifier[B] [ literal[int] ], identifier[C] [ literal[int] ], identifier[C] [ literal[int] ])) keyword[else] : identifier[path] . identifier[append] ( literal[string] %( identifier[C] [ literal[int] ], identifier[C] [ literal[int] ])) identifier[element] . identifier[setAttribute] ( literal[string] , literal[string] . identifier[join] ( identifier[path] )) keyword[return] identifier[element]
def smoothline(document, coords): """smoothed polyline""" element = document.createElement('path') path = [] points = [(coords[i], coords[i + 1]) for i in range(0, len(coords), 2)] def pt(points): (x0, y0) = points[0] (x1, y1) = points[1] p0 = (2 * x0 - x1, 2 * y0 - y1) (x0, y0) = points[-1] (x1, y1) = points[-2] pn = (2 * x0 - x1, 2 * y0 - y1) p = [p0] + points[1:-1] + [pn] for i in range(1, len(points) - 1): a = p[i - 1] b = p[i] c = p[i + 1] yield (lerp(a, b, 0.5), b, lerp(b, c, 0.5)) # depends on [control=['for'], data=['i']] for (i, (A, B, C)) in enumerate(pt(points)): if i == 0: path.append('M%s,%s Q%s,%s %s,%s' % (A[0], A[1], B[0], B[1], C[0], C[1])) # depends on [control=['if'], data=[]] else: path.append('T%s,%s' % (C[0], C[1])) # depends on [control=['for'], data=[]] element.setAttribute('d', ' '.join(path)) return element
def _from_binary_stdinfo(cls, binary_stream): """See base class.""" ''' TIMESTAMPS(32) Creation time - 8 File altered time - 8 MFT/Metadata altered time - 8 Accessed time - 8 Flags - 4 (FileInfoFlags) Maximum number of versions - 4 Version number - 4 Class id - 4 Owner id - 4 (NTFS 3+) Security id - 4 (NTFS 3+) Quota charged - 8 (NTFS 3+) Update Sequence Number (USN) - 8 (NTFS 3+) ''' if len(binary_stream) == cls._REPR.size: #check if it is v3 by size of the stram t_created, t_changed, t_mft_changed, t_accessed, flags, m_ver, ver, \ c_id, o_id, s_id, quota_charged, usn = cls._REPR.unpack(binary_stream) nw_obj = cls( ( Timestamps((convert_filetime(t_created), convert_filetime(t_changed), convert_filetime(t_mft_changed), convert_filetime(t_accessed)) ), FileInfoFlags(flags), m_ver, ver, c_id, o_id, s_id, quota_charged, usn)) else: #if the content is not using v3 extension, added the missing stuff for consistency t_created, t_changed, t_mft_changed, t_accessed, flags, m_ver, ver, \ c_id = cls._REPR_NO_NFTS_3_EXTENSION.unpack(binary_stream) nw_obj = cls( ( Timestamps((convert_filetime(t_created), convert_filetime(t_changed), convert_filetime(t_mft_changed), convert_filetime(t_accessed)) ), FileInfoFlags(flags), m_ver, ver, c_id, None, None, None, None)) _MOD_LOGGER.debug("Attempted to unpack STANDARD_INFORMATION from \"%s\"\nResult: %s", binary_stream.tobytes(), nw_obj) return nw_obj
def function[_from_binary_stdinfo, parameter[cls, binary_stream]]: constant[See base class.] constant[ TIMESTAMPS(32) Creation time - 8 File altered time - 8 MFT/Metadata altered time - 8 Accessed time - 8 Flags - 4 (FileInfoFlags) Maximum number of versions - 4 Version number - 4 Class id - 4 Owner id - 4 (NTFS 3+) Security id - 4 (NTFS 3+) Quota charged - 8 (NTFS 3+) Update Sequence Number (USN) - 8 (NTFS 3+) ] if compare[call[name[len], parameter[name[binary_stream]]] equal[==] name[cls]._REPR.size] begin[:] <ast.Tuple object at 0x7da1b1309360> assign[=] call[name[cls]._REPR.unpack, parameter[name[binary_stream]]] variable[nw_obj] assign[=] call[name[cls], parameter[tuple[[<ast.Call object at 0x7da1b1342350>, <ast.Call object at 0x7da1b13cea10>, <ast.Name object at 0x7da1b13ce920>, <ast.Name object at 0x7da1b13ce890>, <ast.Name object at 0x7da1b13cebf0>, <ast.Name object at 0x7da1b13ce380>, <ast.Name object at 0x7da1b13cdf30>, <ast.Name object at 0x7da1b13cdab0>, <ast.Name object at 0x7da1b13ce350>]]]] call[name[_MOD_LOGGER].debug, parameter[constant[Attempted to unpack STANDARD_INFORMATION from "%s" Result: %s], call[name[binary_stream].tobytes, parameter[]], name[nw_obj]]] return[name[nw_obj]]
keyword[def] identifier[_from_binary_stdinfo] ( identifier[cls] , identifier[binary_stream] ): literal[string] literal[string] keyword[if] identifier[len] ( identifier[binary_stream] )== identifier[cls] . identifier[_REPR] . identifier[size] : identifier[t_created] , identifier[t_changed] , identifier[t_mft_changed] , identifier[t_accessed] , identifier[flags] , identifier[m_ver] , identifier[ver] , identifier[c_id] , identifier[o_id] , identifier[s_id] , identifier[quota_charged] , identifier[usn] = identifier[cls] . identifier[_REPR] . identifier[unpack] ( identifier[binary_stream] ) identifier[nw_obj] = identifier[cls] ( ( identifier[Timestamps] (( identifier[convert_filetime] ( identifier[t_created] ), identifier[convert_filetime] ( identifier[t_changed] ), identifier[convert_filetime] ( identifier[t_mft_changed] ), identifier[convert_filetime] ( identifier[t_accessed] )) ), identifier[FileInfoFlags] ( identifier[flags] ), identifier[m_ver] , identifier[ver] , identifier[c_id] , identifier[o_id] , identifier[s_id] , identifier[quota_charged] , identifier[usn] )) keyword[else] : identifier[t_created] , identifier[t_changed] , identifier[t_mft_changed] , identifier[t_accessed] , identifier[flags] , identifier[m_ver] , identifier[ver] , identifier[c_id] = identifier[cls] . identifier[_REPR_NO_NFTS_3_EXTENSION] . identifier[unpack] ( identifier[binary_stream] ) identifier[nw_obj] = identifier[cls] ( ( identifier[Timestamps] (( identifier[convert_filetime] ( identifier[t_created] ), identifier[convert_filetime] ( identifier[t_changed] ), identifier[convert_filetime] ( identifier[t_mft_changed] ), identifier[convert_filetime] ( identifier[t_accessed] )) ), identifier[FileInfoFlags] ( identifier[flags] ), identifier[m_ver] , identifier[ver] , identifier[c_id] , keyword[None] , keyword[None] , keyword[None] , keyword[None] )) identifier[_MOD_LOGGER] . identifier[debug] ( literal[string] , identifier[binary_stream] . identifier[tobytes] (), identifier[nw_obj] ) keyword[return] identifier[nw_obj]
def _from_binary_stdinfo(cls, binary_stream): """See base class.""" '\n TIMESTAMPS(32)\n Creation time - 8\n File altered time - 8\n MFT/Metadata altered time - 8\n Accessed time - 8\n Flags - 4 (FileInfoFlags)\n Maximum number of versions - 4\n Version number - 4\n Class id - 4\n Owner id - 4 (NTFS 3+)\n Security id - 4 (NTFS 3+)\n Quota charged - 8 (NTFS 3+)\n Update Sequence Number (USN) - 8 (NTFS 3+)\n ' if len(binary_stream) == cls._REPR.size: #check if it is v3 by size of the stram (t_created, t_changed, t_mft_changed, t_accessed, flags, m_ver, ver, c_id, o_id, s_id, quota_charged, usn) = cls._REPR.unpack(binary_stream) nw_obj = cls((Timestamps((convert_filetime(t_created), convert_filetime(t_changed), convert_filetime(t_mft_changed), convert_filetime(t_accessed))), FileInfoFlags(flags), m_ver, ver, c_id, o_id, s_id, quota_charged, usn)) # depends on [control=['if'], data=[]] else: #if the content is not using v3 extension, added the missing stuff for consistency (t_created, t_changed, t_mft_changed, t_accessed, flags, m_ver, ver, c_id) = cls._REPR_NO_NFTS_3_EXTENSION.unpack(binary_stream) nw_obj = cls((Timestamps((convert_filetime(t_created), convert_filetime(t_changed), convert_filetime(t_mft_changed), convert_filetime(t_accessed))), FileInfoFlags(flags), m_ver, ver, c_id, None, None, None, None)) _MOD_LOGGER.debug('Attempted to unpack STANDARD_INFORMATION from "%s"\nResult: %s', binary_stream.tobytes(), nw_obj) return nw_obj
def post(self, url, data): """Send a HTTP POST request to a URL and return the result. """ headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/json" } self.conn.request("POST", url, data, headers) return self._process_response()
def function[post, parameter[self, url, data]]: constant[Send a HTTP POST request to a URL and return the result. ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da204961450>, <ast.Constant object at 0x7da2049607f0>], [<ast.Constant object at 0x7da204962d40>, <ast.Constant object at 0x7da204960940>]] call[name[self].conn.request, parameter[constant[POST], name[url], name[data], name[headers]]] return[call[name[self]._process_response, parameter[]]]
keyword[def] identifier[post] ( identifier[self] , identifier[url] , identifier[data] ): literal[string] identifier[headers] ={ literal[string] : literal[string] , literal[string] : literal[string] } identifier[self] . identifier[conn] . identifier[request] ( literal[string] , identifier[url] , identifier[data] , identifier[headers] ) keyword[return] identifier[self] . identifier[_process_response] ()
def post(self, url, data): """Send a HTTP POST request to a URL and return the result. """ headers = {'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/json'} self.conn.request('POST', url, data, headers) return self._process_response()
def execute(helper, config, args): """ Rebuilds an environment """ env_config = parse_env_config(config, args.environment) helper.rebuild_environment(args.environment) # wait if not args.dont_wait: helper.wait_for_environments(args.environment, health='Green', status='Ready')
def function[execute, parameter[helper, config, args]]: constant[ Rebuilds an environment ] variable[env_config] assign[=] call[name[parse_env_config], parameter[name[config], name[args].environment]] call[name[helper].rebuild_environment, parameter[name[args].environment]] if <ast.UnaryOp object at 0x7da1affc04c0> begin[:] call[name[helper].wait_for_environments, parameter[name[args].environment]]
keyword[def] identifier[execute] ( identifier[helper] , identifier[config] , identifier[args] ): literal[string] identifier[env_config] = identifier[parse_env_config] ( identifier[config] , identifier[args] . identifier[environment] ) identifier[helper] . identifier[rebuild_environment] ( identifier[args] . identifier[environment] ) keyword[if] keyword[not] identifier[args] . identifier[dont_wait] : identifier[helper] . identifier[wait_for_environments] ( identifier[args] . identifier[environment] , identifier[health] = literal[string] , identifier[status] = literal[string] )
def execute(helper, config, args): """ Rebuilds an environment """ env_config = parse_env_config(config, args.environment) helper.rebuild_environment(args.environment) # wait if not args.dont_wait: helper.wait_for_environments(args.environment, health='Green', status='Ready') # depends on [control=['if'], data=[]]
def execute_callback(self, *args, **kwargs): """Executes a callback and returns the proper response. Refer to :meth:`sijax.Sijax.execute_callback` for more details. """ response = self._sijax.execute_callback(*args, **kwargs) return _make_response(response)
def function[execute_callback, parameter[self]]: constant[Executes a callback and returns the proper response. Refer to :meth:`sijax.Sijax.execute_callback` for more details. ] variable[response] assign[=] call[name[self]._sijax.execute_callback, parameter[<ast.Starred object at 0x7da1b10c6920>]] return[call[name[_make_response], parameter[name[response]]]]
keyword[def] identifier[execute_callback] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[response] = identifier[self] . identifier[_sijax] . identifier[execute_callback] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[_make_response] ( identifier[response] )
def execute_callback(self, *args, **kwargs): """Executes a callback and returns the proper response. Refer to :meth:`sijax.Sijax.execute_callback` for more details. """ response = self._sijax.execute_callback(*args, **kwargs) return _make_response(response)
def forecast(stl, fc_func, steps=10, seasonal=False, **fc_func_kwargs): """Forecast the given decomposition ``stl`` forward by ``steps`` steps using the forecasting function ``fc_func``, optionally including the calculated seasonality. This is an additive model, Y[t] = T[t] + S[t] + e[t] Args: stl (a modified statsmodels.tsa.seasonal.DecomposeResult): STL decomposition of observed time series created using the ``stldecompose.decompose()`` method. fc_func (function): Function which takes an array of observations and returns a single valued forecast for the next point. steps (int, optional): Number of forward steps to include in the forecast seasonal (bool, optional): Include seasonal component in forecast fc_func_kwargs: keyword arguments All remaining arguments are passed to the forecasting function ``fc_func`` Returns: forecast_frame (pd.Dataframe): A ``pandas.Dataframe`` containing forecast values and a DatetimeIndex matching the observed index. """ # container for forecast values forecast_array = np.array([]) # forecast trend # unpack precalculated trend array stl frame trend_array = stl.trend # iteratively forecast trend ("seasonally adjusted") component # note: this loop can be slow for step in range(steps): # make this prediction on all available data pred = fc_func(np.append(trend_array, forecast_array), **fc_func_kwargs) # add this prediction to current array forecast_array = np.append(forecast_array, pred) col_name = fc_func.__name__ # forecast start and index are determined by observed data observed_timedelta = stl.observed.index[-1] - stl.observed.index[-2] forecast_idx_start = stl.observed.index[-1] + observed_timedelta forecast_idx = pd.date_range(start=forecast_idx_start, periods=steps, freq=pd.tseries.frequencies.to_offset(observed_timedelta)) # (optionally) forecast seasonal & combine if seasonal: # track index and value of max correlation seasonal_ix = 0 max_correlation = -np.inf # loop over indexes=length of period avgs detrended_array = np.asanyarray(stl.observed - stl.trend).squeeze() for i, x in enumerate(stl.period_averages): # work slices backward from end of detrended observations if i == 0: # slicing w/ [x:-0] doesn't work detrended_slice = detrended_array[-len(stl.period_averages):] else: detrended_slice = detrended_array[-(len(stl.period_averages) + i):-i] # calculate corr b/w period_avgs and detrend_slice this_correlation = np.correlate(detrended_slice, stl.period_averages)[0] if this_correlation > max_correlation: # update ix and max correlation max_correlation = this_correlation seasonal_ix = i # roll seasonal signal to matching phase rolled_period_averages = np.roll(stl.period_averages, -seasonal_ix) # tile as many time as needed to reach "steps", then truncate tiled_averages = np.tile(rolled_period_averages, (steps // len(stl.period_averages) + 1))[:steps] # add seasonal values to previous forecast forecast_array += tiled_averages col_name += '+seasonal' # combine data array with index into named dataframe forecast_frame = pd.DataFrame(data=forecast_array, index=forecast_idx) forecast_frame.columns = [col_name] return forecast_frame
def function[forecast, parameter[stl, fc_func, steps, seasonal]]: constant[Forecast the given decomposition ``stl`` forward by ``steps`` steps using the forecasting function ``fc_func``, optionally including the calculated seasonality. This is an additive model, Y[t] = T[t] + S[t] + e[t] Args: stl (a modified statsmodels.tsa.seasonal.DecomposeResult): STL decomposition of observed time series created using the ``stldecompose.decompose()`` method. fc_func (function): Function which takes an array of observations and returns a single valued forecast for the next point. steps (int, optional): Number of forward steps to include in the forecast seasonal (bool, optional): Include seasonal component in forecast fc_func_kwargs: keyword arguments All remaining arguments are passed to the forecasting function ``fc_func`` Returns: forecast_frame (pd.Dataframe): A ``pandas.Dataframe`` containing forecast values and a DatetimeIndex matching the observed index. ] variable[forecast_array] assign[=] call[name[np].array, parameter[list[[]]]] variable[trend_array] assign[=] name[stl].trend for taget[name[step]] in starred[call[name[range], parameter[name[steps]]]] begin[:] variable[pred] assign[=] call[name[fc_func], parameter[call[name[np].append, parameter[name[trend_array], name[forecast_array]]]]] variable[forecast_array] assign[=] call[name[np].append, parameter[name[forecast_array], name[pred]]] variable[col_name] assign[=] name[fc_func].__name__ variable[observed_timedelta] assign[=] binary_operation[call[name[stl].observed.index][<ast.UnaryOp object at 0x7da1b26ad8d0>] - call[name[stl].observed.index][<ast.UnaryOp object at 0x7da1b26ac310>]] variable[forecast_idx_start] assign[=] binary_operation[call[name[stl].observed.index][<ast.UnaryOp object at 0x7da1b26aeaa0>] + name[observed_timedelta]] variable[forecast_idx] assign[=] call[name[pd].date_range, parameter[]] if name[seasonal] begin[:] variable[seasonal_ix] assign[=] constant[0] variable[max_correlation] assign[=] <ast.UnaryOp object at 0x7da1b26af250> variable[detrended_array] assign[=] call[call[name[np].asanyarray, parameter[binary_operation[name[stl].observed - name[stl].trend]]].squeeze, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b26ac250>, <ast.Name object at 0x7da1b26ac070>]]] in starred[call[name[enumerate], parameter[name[stl].period_averages]]] begin[:] if compare[name[i] equal[==] constant[0]] begin[:] variable[detrended_slice] assign[=] call[name[detrended_array]][<ast.Slice object at 0x7da1b26aca60>] variable[this_correlation] assign[=] call[call[name[np].correlate, parameter[name[detrended_slice], name[stl].period_averages]]][constant[0]] if compare[name[this_correlation] greater[>] name[max_correlation]] begin[:] variable[max_correlation] assign[=] name[this_correlation] variable[seasonal_ix] assign[=] name[i] variable[rolled_period_averages] assign[=] call[name[np].roll, parameter[name[stl].period_averages, <ast.UnaryOp object at 0x7da1b26add80>]] variable[tiled_averages] assign[=] call[call[name[np].tile, parameter[name[rolled_period_averages], binary_operation[binary_operation[name[steps] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[len], parameter[name[stl].period_averages]]] + constant[1]]]]][<ast.Slice object at 0x7da1b26ae200>] <ast.AugAssign object at 0x7da1b26aff70> <ast.AugAssign object at 0x7da1b26ad300> variable[forecast_frame] assign[=] call[name[pd].DataFrame, parameter[]] name[forecast_frame].columns assign[=] list[[<ast.Name object at 0x7da1b26aee30>]] return[name[forecast_frame]]
keyword[def] identifier[forecast] ( identifier[stl] , identifier[fc_func] , identifier[steps] = literal[int] , identifier[seasonal] = keyword[False] ,** identifier[fc_func_kwargs] ): literal[string] identifier[forecast_array] = identifier[np] . identifier[array] ([]) identifier[trend_array] = identifier[stl] . identifier[trend] keyword[for] identifier[step] keyword[in] identifier[range] ( identifier[steps] ): identifier[pred] = identifier[fc_func] ( identifier[np] . identifier[append] ( identifier[trend_array] , identifier[forecast_array] ),** identifier[fc_func_kwargs] ) identifier[forecast_array] = identifier[np] . identifier[append] ( identifier[forecast_array] , identifier[pred] ) identifier[col_name] = identifier[fc_func] . identifier[__name__] identifier[observed_timedelta] = identifier[stl] . identifier[observed] . identifier[index] [- literal[int] ]- identifier[stl] . identifier[observed] . identifier[index] [- literal[int] ] identifier[forecast_idx_start] = identifier[stl] . identifier[observed] . identifier[index] [- literal[int] ]+ identifier[observed_timedelta] identifier[forecast_idx] = identifier[pd] . identifier[date_range] ( identifier[start] = identifier[forecast_idx_start] , identifier[periods] = identifier[steps] , identifier[freq] = identifier[pd] . identifier[tseries] . identifier[frequencies] . identifier[to_offset] ( identifier[observed_timedelta] )) keyword[if] identifier[seasonal] : identifier[seasonal_ix] = literal[int] identifier[max_correlation] =- identifier[np] . identifier[inf] identifier[detrended_array] = identifier[np] . identifier[asanyarray] ( identifier[stl] . identifier[observed] - identifier[stl] . identifier[trend] ). identifier[squeeze] () keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[stl] . identifier[period_averages] ): keyword[if] identifier[i] == literal[int] : identifier[detrended_slice] = identifier[detrended_array] [- identifier[len] ( identifier[stl] . identifier[period_averages] ):] keyword[else] : identifier[detrended_slice] = identifier[detrended_array] [-( identifier[len] ( identifier[stl] . identifier[period_averages] )+ identifier[i] ):- identifier[i] ] identifier[this_correlation] = identifier[np] . identifier[correlate] ( identifier[detrended_slice] , identifier[stl] . identifier[period_averages] )[ literal[int] ] keyword[if] identifier[this_correlation] > identifier[max_correlation] : identifier[max_correlation] = identifier[this_correlation] identifier[seasonal_ix] = identifier[i] identifier[rolled_period_averages] = identifier[np] . identifier[roll] ( identifier[stl] . identifier[period_averages] ,- identifier[seasonal_ix] ) identifier[tiled_averages] = identifier[np] . identifier[tile] ( identifier[rolled_period_averages] , ( identifier[steps] // identifier[len] ( identifier[stl] . identifier[period_averages] )+ literal[int] ))[: identifier[steps] ] identifier[forecast_array] += identifier[tiled_averages] identifier[col_name] += literal[string] identifier[forecast_frame] = identifier[pd] . identifier[DataFrame] ( identifier[data] = identifier[forecast_array] , identifier[index] = identifier[forecast_idx] ) identifier[forecast_frame] . identifier[columns] =[ identifier[col_name] ] keyword[return] identifier[forecast_frame]
def forecast(stl, fc_func, steps=10, seasonal=False, **fc_func_kwargs): """Forecast the given decomposition ``stl`` forward by ``steps`` steps using the forecasting function ``fc_func``, optionally including the calculated seasonality. This is an additive model, Y[t] = T[t] + S[t] + e[t] Args: stl (a modified statsmodels.tsa.seasonal.DecomposeResult): STL decomposition of observed time series created using the ``stldecompose.decompose()`` method. fc_func (function): Function which takes an array of observations and returns a single valued forecast for the next point. steps (int, optional): Number of forward steps to include in the forecast seasonal (bool, optional): Include seasonal component in forecast fc_func_kwargs: keyword arguments All remaining arguments are passed to the forecasting function ``fc_func`` Returns: forecast_frame (pd.Dataframe): A ``pandas.Dataframe`` containing forecast values and a DatetimeIndex matching the observed index. """ # container for forecast values forecast_array = np.array([]) # forecast trend # unpack precalculated trend array stl frame trend_array = stl.trend # iteratively forecast trend ("seasonally adjusted") component # note: this loop can be slow for step in range(steps): # make this prediction on all available data pred = fc_func(np.append(trend_array, forecast_array), **fc_func_kwargs) # add this prediction to current array forecast_array = np.append(forecast_array, pred) # depends on [control=['for'], data=[]] col_name = fc_func.__name__ # forecast start and index are determined by observed data observed_timedelta = stl.observed.index[-1] - stl.observed.index[-2] forecast_idx_start = stl.observed.index[-1] + observed_timedelta forecast_idx = pd.date_range(start=forecast_idx_start, periods=steps, freq=pd.tseries.frequencies.to_offset(observed_timedelta)) # (optionally) forecast seasonal & combine if seasonal: # track index and value of max correlation seasonal_ix = 0 max_correlation = -np.inf # loop over indexes=length of period avgs detrended_array = np.asanyarray(stl.observed - stl.trend).squeeze() for (i, x) in enumerate(stl.period_averages): # work slices backward from end of detrended observations if i == 0: # slicing w/ [x:-0] doesn't work detrended_slice = detrended_array[-len(stl.period_averages):] # depends on [control=['if'], data=[]] else: detrended_slice = detrended_array[-(len(stl.period_averages) + i):-i] # calculate corr b/w period_avgs and detrend_slice this_correlation = np.correlate(detrended_slice, stl.period_averages)[0] if this_correlation > max_correlation: # update ix and max correlation max_correlation = this_correlation seasonal_ix = i # depends on [control=['if'], data=['this_correlation', 'max_correlation']] # depends on [control=['for'], data=[]] # roll seasonal signal to matching phase rolled_period_averages = np.roll(stl.period_averages, -seasonal_ix) # tile as many time as needed to reach "steps", then truncate tiled_averages = np.tile(rolled_period_averages, steps // len(stl.period_averages) + 1)[:steps] # add seasonal values to previous forecast forecast_array += tiled_averages col_name += '+seasonal' # depends on [control=['if'], data=[]] # combine data array with index into named dataframe forecast_frame = pd.DataFrame(data=forecast_array, index=forecast_idx) forecast_frame.columns = [col_name] return forecast_frame
def add_routes(fapp, routes, prefix=""): """Batch routes registering Register routes to a blueprint/flask_app previously collected with :func:`routes_collector`. :param fapp: bluprint or flask_app to whom attach new routes. :param routes: dict of routes collected by :func:`routes_collector` :param prefix: url prefix under which register all routes """ for r in routes: r['rule'] = prefix + r['rule'] fapp.add_url_rule(**r)
def function[add_routes, parameter[fapp, routes, prefix]]: constant[Batch routes registering Register routes to a blueprint/flask_app previously collected with :func:`routes_collector`. :param fapp: bluprint or flask_app to whom attach new routes. :param routes: dict of routes collected by :func:`routes_collector` :param prefix: url prefix under which register all routes ] for taget[name[r]] in starred[name[routes]] begin[:] call[name[r]][constant[rule]] assign[=] binary_operation[name[prefix] + call[name[r]][constant[rule]]] call[name[fapp].add_url_rule, parameter[]]
keyword[def] identifier[add_routes] ( identifier[fapp] , identifier[routes] , identifier[prefix] = literal[string] ): literal[string] keyword[for] identifier[r] keyword[in] identifier[routes] : identifier[r] [ literal[string] ]= identifier[prefix] + identifier[r] [ literal[string] ] identifier[fapp] . identifier[add_url_rule] (** identifier[r] )
def add_routes(fapp, routes, prefix=''): """Batch routes registering Register routes to a blueprint/flask_app previously collected with :func:`routes_collector`. :param fapp: bluprint or flask_app to whom attach new routes. :param routes: dict of routes collected by :func:`routes_collector` :param prefix: url prefix under which register all routes """ for r in routes: r['rule'] = prefix + r['rule'] fapp.add_url_rule(**r) # depends on [control=['for'], data=['r']]
def __setup(local_download_dir_warc, log_level): """ Setup :return: """ if not os.path.exists(local_download_dir_warc): os.makedirs(local_download_dir_warc) # make loggers quite configure_logging({"LOG_LEVEL": "ERROR"}) logging.getLogger('requests').setLevel(logging.CRITICAL) logging.getLogger('readability').setLevel(logging.CRITICAL) logging.getLogger('PIL').setLevel(logging.CRITICAL) logging.getLogger('newspaper').setLevel(logging.CRITICAL) logging.getLogger('newsplease').setLevel(logging.CRITICAL) logging.getLogger('urllib3').setLevel(logging.CRITICAL) # set own logger logging.basicConfig(level=log_level) __logger = logging.getLogger(__name__) __logger.setLevel(log_level)
def function[__setup, parameter[local_download_dir_warc, log_level]]: constant[ Setup :return: ] if <ast.UnaryOp object at 0x7da18dc99e70> begin[:] call[name[os].makedirs, parameter[name[local_download_dir_warc]]] call[name[configure_logging], parameter[dictionary[[<ast.Constant object at 0x7da18dc9aa70>], [<ast.Constant object at 0x7da18dc9a440>]]]] call[call[name[logging].getLogger, parameter[constant[requests]]].setLevel, parameter[name[logging].CRITICAL]] call[call[name[logging].getLogger, parameter[constant[readability]]].setLevel, parameter[name[logging].CRITICAL]] call[call[name[logging].getLogger, parameter[constant[PIL]]].setLevel, parameter[name[logging].CRITICAL]] call[call[name[logging].getLogger, parameter[constant[newspaper]]].setLevel, parameter[name[logging].CRITICAL]] call[call[name[logging].getLogger, parameter[constant[newsplease]]].setLevel, parameter[name[logging].CRITICAL]] call[call[name[logging].getLogger, parameter[constant[urllib3]]].setLevel, parameter[name[logging].CRITICAL]] call[name[logging].basicConfig, parameter[]] variable[__logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]] call[name[__logger].setLevel, parameter[name[log_level]]]
keyword[def] identifier[__setup] ( identifier[local_download_dir_warc] , identifier[log_level] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[local_download_dir_warc] ): identifier[os] . identifier[makedirs] ( identifier[local_download_dir_warc] ) identifier[configure_logging] ({ literal[string] : literal[string] }) identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] ) identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] ) identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] ) identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] ) identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] ) identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] ) identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[log_level] ) identifier[__logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] ) identifier[__logger] . identifier[setLevel] ( identifier[log_level] )
def __setup(local_download_dir_warc, log_level): """ Setup :return: """ if not os.path.exists(local_download_dir_warc): os.makedirs(local_download_dir_warc) # depends on [control=['if'], data=[]] # make loggers quite configure_logging({'LOG_LEVEL': 'ERROR'}) logging.getLogger('requests').setLevel(logging.CRITICAL) logging.getLogger('readability').setLevel(logging.CRITICAL) logging.getLogger('PIL').setLevel(logging.CRITICAL) logging.getLogger('newspaper').setLevel(logging.CRITICAL) logging.getLogger('newsplease').setLevel(logging.CRITICAL) logging.getLogger('urllib3').setLevel(logging.CRITICAL) # set own logger logging.basicConfig(level=log_level) __logger = logging.getLogger(__name__) __logger.setLevel(log_level)
def closeView(self, view=None): """ Closes the inputed view. :param view | <int> || <XView> || None """ if type(view) == int: view = self.widget(view) elif view == None: view = self.currentView() index = self.indexOf(view) if index == -1: return False # close the view count = self.count() if count == 1: self.closePanel() else: view.close() return True
def function[closeView, parameter[self, view]]: constant[ Closes the inputed view. :param view | <int> || <XView> || None ] if compare[call[name[type], parameter[name[view]]] equal[==] name[int]] begin[:] variable[view] assign[=] call[name[self].widget, parameter[name[view]]] variable[index] assign[=] call[name[self].indexOf, parameter[name[view]]] if compare[name[index] equal[==] <ast.UnaryOp object at 0x7da2044c1c30>] begin[:] return[constant[False]] variable[count] assign[=] call[name[self].count, parameter[]] if compare[name[count] equal[==] constant[1]] begin[:] call[name[self].closePanel, parameter[]] return[constant[True]]
keyword[def] identifier[closeView] ( identifier[self] , identifier[view] = keyword[None] ): literal[string] keyword[if] identifier[type] ( identifier[view] )== identifier[int] : identifier[view] = identifier[self] . identifier[widget] ( identifier[view] ) keyword[elif] identifier[view] == keyword[None] : identifier[view] = identifier[self] . identifier[currentView] () identifier[index] = identifier[self] . identifier[indexOf] ( identifier[view] ) keyword[if] identifier[index] ==- literal[int] : keyword[return] keyword[False] identifier[count] = identifier[self] . identifier[count] () keyword[if] identifier[count] == literal[int] : identifier[self] . identifier[closePanel] () keyword[else] : identifier[view] . identifier[close] () keyword[return] keyword[True]
def closeView(self, view=None): """ Closes the inputed view. :param view | <int> || <XView> || None """ if type(view) == int: view = self.widget(view) # depends on [control=['if'], data=[]] elif view == None: view = self.currentView() # depends on [control=['if'], data=['view']] index = self.indexOf(view) if index == -1: return False # depends on [control=['if'], data=[]] # close the view count = self.count() if count == 1: self.closePanel() # depends on [control=['if'], data=[]] else: view.close() return True
def add_nodes(self, nodes): # noqa: D302 r""" Add nodes to tree. :param nodes: Node(s) to add with associated data. If there are several list items in the argument with the same node name the resulting node data is a list with items corresponding to the data of each entry in the argument with the same node name, in their order of appearance, in addition to any existing node data if the node is already present in the tree :type nodes: :ref:`NodesWithData` :raises: * RuntimeError (Argument \`nodes\` is not valid) * ValueError (Illegal node name: *[node_name]*) For example: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('ptrie_example.py', cog.out) .. =]= .. code-block:: python # ptrie_example.py import ptrie def create_tree(): tobj = ptrie.Trie() tobj.add_nodes([ {'name':'root.branch1', 'data':5}, {'name':'root.branch1', 'data':7}, {'name':'root.branch2', 'data':[]}, {'name':'root.branch1.leaf1', 'data':[]}, {'name':'root.branch1.leaf1.subleaf1', 'data':333}, {'name':'root.branch1.leaf2', 'data':'Hello world!'}, {'name':'root.branch1.leaf2.subleaf2', 'data':[]}, ]) return tobj .. =[=end=]= .. code-block:: python >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.get_data('root.branch1') [5, 7] """ self._validate_nodes_with_data(nodes) nodes = nodes if isinstance(nodes, list) else [nodes] # Create root node (if needed) if not self.root_name: self._set_root_name(nodes[0]["name"].split(self._node_separator)[0].strip()) self._root_hierarchy_length = len( self.root_name.split(self._node_separator) ) self._create_node(name=self.root_name, parent="", children=[], data=[]) # Process new data for node_dict in nodes: name, data = node_dict["name"], node_dict["data"] if name not in self._db: # Validate node name (root of new node same as tree root) if not name.startswith(self.root_name + self._node_separator): raise ValueError("Illegal node name: {0}".format(name)) self._create_intermediate_nodes(name) self._db[name]["data"] += copy.deepcopy( data if isinstance(data, list) and data else ([] if isinstance(data, list) else [data]) )
def function[add_nodes, parameter[self, nodes]]: constant[ Add nodes to tree. :param nodes: Node(s) to add with associated data. If there are several list items in the argument with the same node name the resulting node data is a list with items corresponding to the data of each entry in the argument with the same node name, in their order of appearance, in addition to any existing node data if the node is already present in the tree :type nodes: :ref:`NodesWithData` :raises: * RuntimeError (Argument \`nodes\` is not valid) * ValueError (Illegal node name: *[node_name]*) For example: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('ptrie_example.py', cog.out) .. =]= .. code-block:: python # ptrie_example.py import ptrie def create_tree(): tobj = ptrie.Trie() tobj.add_nodes([ {'name':'root.branch1', 'data':5}, {'name':'root.branch1', 'data':7}, {'name':'root.branch2', 'data':[]}, {'name':'root.branch1.leaf1', 'data':[]}, {'name':'root.branch1.leaf1.subleaf1', 'data':333}, {'name':'root.branch1.leaf2', 'data':'Hello world!'}, {'name':'root.branch1.leaf2.subleaf2', 'data':[]}, ]) return tobj .. =[=end=]= .. code-block:: python >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.get_data('root.branch1') [5, 7] ] call[name[self]._validate_nodes_with_data, parameter[name[nodes]]] variable[nodes] assign[=] <ast.IfExp object at 0x7da20c76cd30> if <ast.UnaryOp object at 0x7da20c76dde0> begin[:] call[name[self]._set_root_name, parameter[call[call[call[call[call[name[nodes]][constant[0]]][constant[name]].split, parameter[name[self]._node_separator]]][constant[0]].strip, parameter[]]]] name[self]._root_hierarchy_length assign[=] call[name[len], parameter[call[name[self].root_name.split, parameter[name[self]._node_separator]]]] call[name[self]._create_node, parameter[]] for taget[name[node_dict]] in starred[name[nodes]] begin[:] <ast.Tuple object at 0x7da1b101b2b0> assign[=] tuple[[<ast.Subscript object at 0x7da1b101bdc0>, <ast.Subscript object at 0x7da1b1019270>]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._db] begin[:] if <ast.UnaryOp object at 0x7da1b101a8c0> begin[:] <ast.Raise object at 0x7da1b101b280> call[name[self]._create_intermediate_nodes, parameter[name[name]]] <ast.AugAssign object at 0x7da1b1018610>
keyword[def] identifier[add_nodes] ( identifier[self] , identifier[nodes] ): literal[string] identifier[self] . identifier[_validate_nodes_with_data] ( identifier[nodes] ) identifier[nodes] = identifier[nodes] keyword[if] identifier[isinstance] ( identifier[nodes] , identifier[list] ) keyword[else] [ identifier[nodes] ] keyword[if] keyword[not] identifier[self] . identifier[root_name] : identifier[self] . identifier[_set_root_name] ( identifier[nodes] [ literal[int] ][ literal[string] ]. identifier[split] ( identifier[self] . identifier[_node_separator] )[ literal[int] ]. identifier[strip] ()) identifier[self] . identifier[_root_hierarchy_length] = identifier[len] ( identifier[self] . identifier[root_name] . identifier[split] ( identifier[self] . identifier[_node_separator] ) ) identifier[self] . identifier[_create_node] ( identifier[name] = identifier[self] . identifier[root_name] , identifier[parent] = literal[string] , identifier[children] =[], identifier[data] =[]) keyword[for] identifier[node_dict] keyword[in] identifier[nodes] : identifier[name] , identifier[data] = identifier[node_dict] [ literal[string] ], identifier[node_dict] [ literal[string] ] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_db] : keyword[if] keyword[not] identifier[name] . identifier[startswith] ( identifier[self] . identifier[root_name] + identifier[self] . identifier[_node_separator] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] )) identifier[self] . identifier[_create_intermediate_nodes] ( identifier[name] ) identifier[self] . identifier[_db] [ identifier[name] ][ literal[string] ]+= identifier[copy] . identifier[deepcopy] ( identifier[data] keyword[if] identifier[isinstance] ( identifier[data] , identifier[list] ) keyword[and] identifier[data] keyword[else] ([] keyword[if] identifier[isinstance] ( identifier[data] , identifier[list] ) keyword[else] [ identifier[data] ]) )
def add_nodes(self, nodes): # noqa: D302 "\n Add nodes to tree.\n\n :param nodes: Node(s) to add with associated data. If there are\n several list items in the argument with the same node\n name the resulting node data is a list with items\n corresponding to the data of each entry in the argument\n with the same node name, in their order of appearance,\n in addition to any existing node data if the node is\n already present in the tree\n :type nodes: :ref:`NodesWithData`\n\n :raises:\n * RuntimeError (Argument \\`nodes\\` is not valid)\n\n * ValueError (Illegal node name: *[node_name]*)\n\n For example:\n\n .. =[=cog\n .. import docs.support.incfile\n .. docs.support.incfile.incfile('ptrie_example.py', cog.out)\n .. =]=\n .. code-block:: python\n\n # ptrie_example.py\n import ptrie\n\n def create_tree():\n tobj = ptrie.Trie()\n tobj.add_nodes([\n {'name':'root.branch1', 'data':5},\n {'name':'root.branch1', 'data':7},\n {'name':'root.branch2', 'data':[]},\n {'name':'root.branch1.leaf1', 'data':[]},\n {'name':'root.branch1.leaf1.subleaf1', 'data':333},\n {'name':'root.branch1.leaf2', 'data':'Hello world!'},\n {'name':'root.branch1.leaf2.subleaf2', 'data':[]},\n ])\n return tobj\n\n .. =[=end=]=\n\n .. code-block:: python\n\n >>> from __future__ import print_function\n >>> import docs.support.ptrie_example\n >>> tobj = docs.support.ptrie_example.create_tree()\n >>> print(tobj)\n root\n ├branch1 (*)\n │├leaf1\n ││└subleaf1 (*)\n │└leaf2 (*)\n │ └subleaf2\n └branch2\n\n >>> tobj.get_data('root.branch1')\n [5, 7]\n " self._validate_nodes_with_data(nodes) nodes = nodes if isinstance(nodes, list) else [nodes] # Create root node (if needed) if not self.root_name: self._set_root_name(nodes[0]['name'].split(self._node_separator)[0].strip()) self._root_hierarchy_length = len(self.root_name.split(self._node_separator)) self._create_node(name=self.root_name, parent='', children=[], data=[]) # depends on [control=['if'], data=[]] # Process new data for node_dict in nodes: (name, data) = (node_dict['name'], node_dict['data']) if name not in self._db: # Validate node name (root of new node same as tree root) if not name.startswith(self.root_name + self._node_separator): raise ValueError('Illegal node name: {0}'.format(name)) # depends on [control=['if'], data=[]] self._create_intermediate_nodes(name) # depends on [control=['if'], data=['name']] self._db[name]['data'] += copy.deepcopy(data if isinstance(data, list) and data else [] if isinstance(data, list) else [data]) # depends on [control=['for'], data=['node_dict']]
def _render(self, contexts, partials): """render inverted section""" val = self._lookup(self.value, contexts) if val: return EMPTYSTRING return self._render_children(contexts, partials)
def function[_render, parameter[self, contexts, partials]]: constant[render inverted section] variable[val] assign[=] call[name[self]._lookup, parameter[name[self].value, name[contexts]]] if name[val] begin[:] return[name[EMPTYSTRING]] return[call[name[self]._render_children, parameter[name[contexts], name[partials]]]]
keyword[def] identifier[_render] ( identifier[self] , identifier[contexts] , identifier[partials] ): literal[string] identifier[val] = identifier[self] . identifier[_lookup] ( identifier[self] . identifier[value] , identifier[contexts] ) keyword[if] identifier[val] : keyword[return] identifier[EMPTYSTRING] keyword[return] identifier[self] . identifier[_render_children] ( identifier[contexts] , identifier[partials] )
def _render(self, contexts, partials): """render inverted section""" val = self._lookup(self.value, contexts) if val: return EMPTYSTRING # depends on [control=['if'], data=[]] return self._render_children(contexts, partials)
def set_nsxcontroller_ip(self, **kwargs): """ Set nsx-controller IP Args: IP (str): IPV4 address. callback (function): A function executed upon completion of the method. Returns: Return value of `callback`. Raises: None """ name = kwargs.pop('name') ip_addr = str((kwargs.pop('ip_addr', None))) nsxipaddress = ip_interface(unicode(ip_addr)) if nsxipaddress.version != 4: raise ValueError('NSX Controller ip must be IPV4') ip_args = dict(name=name, address=ip_addr) method_name = 'nsx_controller_connection_addr_address' method_class = self._brocade_tunnels nsxcontroller_attr = getattr(method_class, method_name) config = nsxcontroller_attr(**ip_args) output = self._callback(config) return output
def function[set_nsxcontroller_ip, parameter[self]]: constant[ Set nsx-controller IP Args: IP (str): IPV4 address. callback (function): A function executed upon completion of the method. Returns: Return value of `callback`. Raises: None ] variable[name] assign[=] call[name[kwargs].pop, parameter[constant[name]]] variable[ip_addr] assign[=] call[name[str], parameter[call[name[kwargs].pop, parameter[constant[ip_addr], constant[None]]]]] variable[nsxipaddress] assign[=] call[name[ip_interface], parameter[call[name[unicode], parameter[name[ip_addr]]]]] if compare[name[nsxipaddress].version not_equal[!=] constant[4]] begin[:] <ast.Raise object at 0x7da18dc07970> variable[ip_args] assign[=] call[name[dict], parameter[]] variable[method_name] assign[=] constant[nsx_controller_connection_addr_address] variable[method_class] assign[=] name[self]._brocade_tunnels variable[nsxcontroller_attr] assign[=] call[name[getattr], parameter[name[method_class], name[method_name]]] variable[config] assign[=] call[name[nsxcontroller_attr], parameter[]] variable[output] assign[=] call[name[self]._callback, parameter[name[config]]] return[name[output]]
keyword[def] identifier[set_nsxcontroller_ip] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[name] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[ip_addr] = identifier[str] (( identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ))) identifier[nsxipaddress] = identifier[ip_interface] ( identifier[unicode] ( identifier[ip_addr] )) keyword[if] identifier[nsxipaddress] . identifier[version] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[ip_args] = identifier[dict] ( identifier[name] = identifier[name] , identifier[address] = identifier[ip_addr] ) identifier[method_name] = literal[string] identifier[method_class] = identifier[self] . identifier[_brocade_tunnels] identifier[nsxcontroller_attr] = identifier[getattr] ( identifier[method_class] , identifier[method_name] ) identifier[config] = identifier[nsxcontroller_attr] (** identifier[ip_args] ) identifier[output] = identifier[self] . identifier[_callback] ( identifier[config] ) keyword[return] identifier[output]
def set_nsxcontroller_ip(self, **kwargs): """ Set nsx-controller IP Args: IP (str): IPV4 address. callback (function): A function executed upon completion of the method. Returns: Return value of `callback`. Raises: None """ name = kwargs.pop('name') ip_addr = str(kwargs.pop('ip_addr', None)) nsxipaddress = ip_interface(unicode(ip_addr)) if nsxipaddress.version != 4: raise ValueError('NSX Controller ip must be IPV4') # depends on [control=['if'], data=[]] ip_args = dict(name=name, address=ip_addr) method_name = 'nsx_controller_connection_addr_address' method_class = self._brocade_tunnels nsxcontroller_attr = getattr(method_class, method_name) config = nsxcontroller_attr(**ip_args) output = self._callback(config) return output
def remove_user_from_group(uid, gid): """ Removes a user from a group within DCOS Enterprise. :param uid: user id :type uid: str :param gid: group id :type gid: str """ acl_url = urljoin(_acl_url(), 'groups/{}/users/{}'.format(gid, uid)) try: r = http.delete(acl_url) assert r.status_code == 204 except dcos.errors.DCOSBadRequest: pass
def function[remove_user_from_group, parameter[uid, gid]]: constant[ Removes a user from a group within DCOS Enterprise. :param uid: user id :type uid: str :param gid: group id :type gid: str ] variable[acl_url] assign[=] call[name[urljoin], parameter[call[name[_acl_url], parameter[]], call[constant[groups/{}/users/{}].format, parameter[name[gid], name[uid]]]]] <ast.Try object at 0x7da2041da110>
keyword[def] identifier[remove_user_from_group] ( identifier[uid] , identifier[gid] ): literal[string] identifier[acl_url] = identifier[urljoin] ( identifier[_acl_url] (), literal[string] . identifier[format] ( identifier[gid] , identifier[uid] )) keyword[try] : identifier[r] = identifier[http] . identifier[delete] ( identifier[acl_url] ) keyword[assert] identifier[r] . identifier[status_code] == literal[int] keyword[except] identifier[dcos] . identifier[errors] . identifier[DCOSBadRequest] : keyword[pass]
def remove_user_from_group(uid, gid): """ Removes a user from a group within DCOS Enterprise. :param uid: user id :type uid: str :param gid: group id :type gid: str """ acl_url = urljoin(_acl_url(), 'groups/{}/users/{}'.format(gid, uid)) try: r = http.delete(acl_url) assert r.status_code == 204 # depends on [control=['try'], data=[]] except dcos.errors.DCOSBadRequest: pass # depends on [control=['except'], data=[]]
def restore_default_configuration(): """ Restores the sys.stdout and the sys.stderr buffer streams to their default values without regard to what step has currently overridden their values. This is useful during cleanup outside of the running execution block """ def restore(target, default_value): if target == default_value: return default_value if not isinstance(target, RedirectBuffer): return target try: target.active = False target.close() except Exception: pass return default_value sys.stdout = restore(sys.stdout, sys.__stdout__) sys.stderr = restore(sys.stderr, sys.__stderr__)
def function[restore_default_configuration, parameter[]]: constant[ Restores the sys.stdout and the sys.stderr buffer streams to their default values without regard to what step has currently overridden their values. This is useful during cleanup outside of the running execution block ] def function[restore, parameter[target, default_value]]: if compare[name[target] equal[==] name[default_value]] begin[:] return[name[default_value]] if <ast.UnaryOp object at 0x7da1b1b68eb0> begin[:] return[name[target]] <ast.Try object at 0x7da1b1b6a4a0> return[name[default_value]] name[sys].stdout assign[=] call[name[restore], parameter[name[sys].stdout, name[sys].__stdout__]] name[sys].stderr assign[=] call[name[restore], parameter[name[sys].stderr, name[sys].__stderr__]]
keyword[def] identifier[restore_default_configuration] (): literal[string] keyword[def] identifier[restore] ( identifier[target] , identifier[default_value] ): keyword[if] identifier[target] == identifier[default_value] : keyword[return] identifier[default_value] keyword[if] keyword[not] identifier[isinstance] ( identifier[target] , identifier[RedirectBuffer] ): keyword[return] identifier[target] keyword[try] : identifier[target] . identifier[active] = keyword[False] identifier[target] . identifier[close] () keyword[except] identifier[Exception] : keyword[pass] keyword[return] identifier[default_value] identifier[sys] . identifier[stdout] = identifier[restore] ( identifier[sys] . identifier[stdout] , identifier[sys] . identifier[__stdout__] ) identifier[sys] . identifier[stderr] = identifier[restore] ( identifier[sys] . identifier[stderr] , identifier[sys] . identifier[__stderr__] )
def restore_default_configuration(): """ Restores the sys.stdout and the sys.stderr buffer streams to their default values without regard to what step has currently overridden their values. This is useful during cleanup outside of the running execution block """ def restore(target, default_value): if target == default_value: return default_value # depends on [control=['if'], data=['default_value']] if not isinstance(target, RedirectBuffer): return target # depends on [control=['if'], data=[]] try: target.active = False target.close() # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] return default_value sys.stdout = restore(sys.stdout, sys.__stdout__) sys.stderr = restore(sys.stderr, sys.__stderr__)
def get_create_command(self): """Get the command to create the local repository.""" command = ['git', 'clone' if self.remote else 'init'] if self.bare: command.append('--bare') if self.remote: command.append(self.remote) command.append(self.local) return command
def function[get_create_command, parameter[self]]: constant[Get the command to create the local repository.] variable[command] assign[=] list[[<ast.Constant object at 0x7da1b0a36890>, <ast.IfExp object at 0x7da1b0a36980>]] if name[self].bare begin[:] call[name[command].append, parameter[constant[--bare]]] if name[self].remote begin[:] call[name[command].append, parameter[name[self].remote]] call[name[command].append, parameter[name[self].local]] return[name[command]]
keyword[def] identifier[get_create_command] ( identifier[self] ): literal[string] identifier[command] =[ literal[string] , literal[string] keyword[if] identifier[self] . identifier[remote] keyword[else] literal[string] ] keyword[if] identifier[self] . identifier[bare] : identifier[command] . identifier[append] ( literal[string] ) keyword[if] identifier[self] . identifier[remote] : identifier[command] . identifier[append] ( identifier[self] . identifier[remote] ) identifier[command] . identifier[append] ( identifier[self] . identifier[local] ) keyword[return] identifier[command]
def get_create_command(self): """Get the command to create the local repository.""" command = ['git', 'clone' if self.remote else 'init'] if self.bare: command.append('--bare') # depends on [control=['if'], data=[]] if self.remote: command.append(self.remote) # depends on [control=['if'], data=[]] command.append(self.local) return command
def find_exe(name, filepath=None): """Find an executable. Args: name: Name of the program, eg 'python'. filepath: Path to executable, a search is performed if None. Returns: Path to the executable if found, otherwise an error is raised. """ if filepath: if not os.path.exists(filepath): open(filepath) # raise IOError elif not os.path.isfile(filepath): raise RezBindError("not a file: %s" % filepath) else: filepath = which(name) if not filepath: raise RezBindError("could not find executable: %s" % name) return filepath
def function[find_exe, parameter[name, filepath]]: constant[Find an executable. Args: name: Name of the program, eg 'python'. filepath: Path to executable, a search is performed if None. Returns: Path to the executable if found, otherwise an error is raised. ] if name[filepath] begin[:] if <ast.UnaryOp object at 0x7da1b17b8430> begin[:] call[name[open], parameter[name[filepath]]] return[name[filepath]]
keyword[def] identifier[find_exe] ( identifier[name] , identifier[filepath] = keyword[None] ): literal[string] keyword[if] identifier[filepath] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filepath] ): identifier[open] ( identifier[filepath] ) keyword[elif] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filepath] ): keyword[raise] identifier[RezBindError] ( literal[string] % identifier[filepath] ) keyword[else] : identifier[filepath] = identifier[which] ( identifier[name] ) keyword[if] keyword[not] identifier[filepath] : keyword[raise] identifier[RezBindError] ( literal[string] % identifier[name] ) keyword[return] identifier[filepath]
def find_exe(name, filepath=None): """Find an executable. Args: name: Name of the program, eg 'python'. filepath: Path to executable, a search is performed if None. Returns: Path to the executable if found, otherwise an error is raised. """ if filepath: if not os.path.exists(filepath): open(filepath) # raise IOError # depends on [control=['if'], data=[]] elif not os.path.isfile(filepath): raise RezBindError('not a file: %s' % filepath) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: filepath = which(name) if not filepath: raise RezBindError('could not find executable: %s' % name) # depends on [control=['if'], data=[]] return filepath
def get_meta_graph_def(saved_model_dir, tag_set): """Utility function to read a meta_graph_def from disk. From `saved_model_cli.py <https://github.com/tensorflow/tensorflow/blob/8e0e8d41a3a8f2d4a6100c2ea1dc9d6c6c4ad382/tensorflow/python/tools/saved_model_cli.py#L186>`_ Args: :saved_model_dir: path to saved_model. :tag_set: list of string tags identifying the TensorFlow graph within the saved_model. Returns: A TensorFlow meta_graph_def, or raises an Exception otherwise. """ saved_model = reader.read_saved_model(saved_model_dir) set_of_tags = set(tag_set.split(',')) for meta_graph_def in saved_model.meta_graphs: if set(meta_graph_def.meta_info_def.tags) == set_of_tags: return meta_graph_def raise RuntimeError("MetaGraphDef associated with tag-set {0} could not be found in SavedModel".format(tag_set))
def function[get_meta_graph_def, parameter[saved_model_dir, tag_set]]: constant[Utility function to read a meta_graph_def from disk. From `saved_model_cli.py <https://github.com/tensorflow/tensorflow/blob/8e0e8d41a3a8f2d4a6100c2ea1dc9d6c6c4ad382/tensorflow/python/tools/saved_model_cli.py#L186>`_ Args: :saved_model_dir: path to saved_model. :tag_set: list of string tags identifying the TensorFlow graph within the saved_model. Returns: A TensorFlow meta_graph_def, or raises an Exception otherwise. ] variable[saved_model] assign[=] call[name[reader].read_saved_model, parameter[name[saved_model_dir]]] variable[set_of_tags] assign[=] call[name[set], parameter[call[name[tag_set].split, parameter[constant[,]]]]] for taget[name[meta_graph_def]] in starred[name[saved_model].meta_graphs] begin[:] if compare[call[name[set], parameter[name[meta_graph_def].meta_info_def.tags]] equal[==] name[set_of_tags]] begin[:] return[name[meta_graph_def]] <ast.Raise object at 0x7da18f09f0a0>
keyword[def] identifier[get_meta_graph_def] ( identifier[saved_model_dir] , identifier[tag_set] ): literal[string] identifier[saved_model] = identifier[reader] . identifier[read_saved_model] ( identifier[saved_model_dir] ) identifier[set_of_tags] = identifier[set] ( identifier[tag_set] . identifier[split] ( literal[string] )) keyword[for] identifier[meta_graph_def] keyword[in] identifier[saved_model] . identifier[meta_graphs] : keyword[if] identifier[set] ( identifier[meta_graph_def] . identifier[meta_info_def] . identifier[tags] )== identifier[set_of_tags] : keyword[return] identifier[meta_graph_def] keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[tag_set] ))
def get_meta_graph_def(saved_model_dir, tag_set): """Utility function to read a meta_graph_def from disk. From `saved_model_cli.py <https://github.com/tensorflow/tensorflow/blob/8e0e8d41a3a8f2d4a6100c2ea1dc9d6c6c4ad382/tensorflow/python/tools/saved_model_cli.py#L186>`_ Args: :saved_model_dir: path to saved_model. :tag_set: list of string tags identifying the TensorFlow graph within the saved_model. Returns: A TensorFlow meta_graph_def, or raises an Exception otherwise. """ saved_model = reader.read_saved_model(saved_model_dir) set_of_tags = set(tag_set.split(',')) for meta_graph_def in saved_model.meta_graphs: if set(meta_graph_def.meta_info_def.tags) == set_of_tags: return meta_graph_def # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['meta_graph_def']] raise RuntimeError('MetaGraphDef associated with tag-set {0} could not be found in SavedModel'.format(tag_set))
def _index_item(self, uri, num, batch_num): """ queries the triplestore for an item sends it to elasticsearch """ data = RdfDataset(get_all_item_data(uri, self.namespace), uri).base_class.es_json() self.batch_data[batch_num].append(data) self.count += 1
def function[_index_item, parameter[self, uri, num, batch_num]]: constant[ queries the triplestore for an item sends it to elasticsearch ] variable[data] assign[=] call[call[name[RdfDataset], parameter[call[name[get_all_item_data], parameter[name[uri], name[self].namespace]], name[uri]]].base_class.es_json, parameter[]] call[call[name[self].batch_data][name[batch_num]].append, parameter[name[data]]] <ast.AugAssign object at 0x7da1b1461750>
keyword[def] identifier[_index_item] ( identifier[self] , identifier[uri] , identifier[num] , identifier[batch_num] ): literal[string] identifier[data] = identifier[RdfDataset] ( identifier[get_all_item_data] ( identifier[uri] , identifier[self] . identifier[namespace] ), identifier[uri] ). identifier[base_class] . identifier[es_json] () identifier[self] . identifier[batch_data] [ identifier[batch_num] ]. identifier[append] ( identifier[data] ) identifier[self] . identifier[count] += literal[int]
def _index_item(self, uri, num, batch_num): """ queries the triplestore for an item sends it to elasticsearch """ data = RdfDataset(get_all_item_data(uri, self.namespace), uri).base_class.es_json() self.batch_data[batch_num].append(data) self.count += 1
async def scalar(query, as_tuple=False): """Get single value from ``select()`` query, i.e. for aggregation. :return: result is the same as after sync ``query.scalar()`` call """ cursor = await _execute_query_async(query) try: row = await cursor.fetchone() finally: await cursor.release() if row and not as_tuple: return row[0] else: return row
<ast.AsyncFunctionDef object at 0x7da20cabcdf0>
keyword[async] keyword[def] identifier[scalar] ( identifier[query] , identifier[as_tuple] = keyword[False] ): literal[string] identifier[cursor] = keyword[await] identifier[_execute_query_async] ( identifier[query] ) keyword[try] : identifier[row] = keyword[await] identifier[cursor] . identifier[fetchone] () keyword[finally] : keyword[await] identifier[cursor] . identifier[release] () keyword[if] identifier[row] keyword[and] keyword[not] identifier[as_tuple] : keyword[return] identifier[row] [ literal[int] ] keyword[else] : keyword[return] identifier[row]
async def scalar(query, as_tuple=False): """Get single value from ``select()`` query, i.e. for aggregation. :return: result is the same as after sync ``query.scalar()`` call """ cursor = await _execute_query_async(query) try: row = await cursor.fetchone() # depends on [control=['try'], data=[]] finally: await cursor.release() if row and (not as_tuple): return row[0] # depends on [control=['if'], data=[]] else: return row
def post(self, endpoint, data): """ Executes the HTTP POST request :param endpoint: string indicating the URL component to call :param data: the data to submit :return: the dumped JSON response content """ headers = { "Content-Type": "application/json", "Accept": "application/json", "x-standardize-only": "true" if self.standardize else "false", "x-include-invalid": "true" if self.invalid else "false", "x-accept-keypair": "true" if self.accept_keypair else "false", } if not self.logging: headers["x-suppress-logging"] = "true" params = {"auth-id": self.auth_id, "auth-token": self.auth_token} url = self.BASE_URL + endpoint response = self.session.post( url, json.dumps(stringify(data)), params=params, headers=headers, timeout=self.timeout, ) if response.status_code == 200: return response.json() raise ERROR_CODES.get(response.status_code, SmartyStreetsError)
def function[post, parameter[self, endpoint, data]]: constant[ Executes the HTTP POST request :param endpoint: string indicating the URL component to call :param data: the data to submit :return: the dumped JSON response content ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b25ecd00>, <ast.Constant object at 0x7da1b25ee4d0>, <ast.Constant object at 0x7da1b25ec310>, <ast.Constant object at 0x7da1b25ed330>, <ast.Constant object at 0x7da1b25ecb50>], [<ast.Constant object at 0x7da1b25ee830>, <ast.Constant object at 0x7da1b25ec1c0>, <ast.IfExp object at 0x7da1b25ec6a0>, <ast.IfExp object at 0x7da1b25ecf10>, <ast.IfExp object at 0x7da1b25ed030>]] if <ast.UnaryOp object at 0x7da1b25ee440> begin[:] call[name[headers]][constant[x-suppress-logging]] assign[=] constant[true] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b23806a0>, <ast.Constant object at 0x7da1b2380910>], [<ast.Attribute object at 0x7da1b2380a60>, <ast.Attribute object at 0x7da1b23803d0>]] variable[url] assign[=] binary_operation[name[self].BASE_URL + name[endpoint]] variable[response] assign[=] call[name[self].session.post, parameter[name[url], call[name[json].dumps, parameter[call[name[stringify], parameter[name[data]]]]]]] if compare[name[response].status_code equal[==] constant[200]] begin[:] return[call[name[response].json, parameter[]]] <ast.Raise object at 0x7da1b2449390>
keyword[def] identifier[post] ( identifier[self] , identifier[endpoint] , identifier[data] ): literal[string] identifier[headers] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] keyword[if] identifier[self] . identifier[standardize] keyword[else] literal[string] , literal[string] : literal[string] keyword[if] identifier[self] . identifier[invalid] keyword[else] literal[string] , literal[string] : literal[string] keyword[if] identifier[self] . identifier[accept_keypair] keyword[else] literal[string] , } keyword[if] keyword[not] identifier[self] . identifier[logging] : identifier[headers] [ literal[string] ]= literal[string] identifier[params] ={ literal[string] : identifier[self] . identifier[auth_id] , literal[string] : identifier[self] . identifier[auth_token] } identifier[url] = identifier[self] . identifier[BASE_URL] + identifier[endpoint] identifier[response] = identifier[self] . identifier[session] . identifier[post] ( identifier[url] , identifier[json] . identifier[dumps] ( identifier[stringify] ( identifier[data] )), identifier[params] = identifier[params] , identifier[headers] = identifier[headers] , identifier[timeout] = identifier[self] . identifier[timeout] , ) keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[return] identifier[response] . identifier[json] () keyword[raise] identifier[ERROR_CODES] . identifier[get] ( identifier[response] . identifier[status_code] , identifier[SmartyStreetsError] )
def post(self, endpoint, data): """ Executes the HTTP POST request :param endpoint: string indicating the URL component to call :param data: the data to submit :return: the dumped JSON response content """ headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'x-standardize-only': 'true' if self.standardize else 'false', 'x-include-invalid': 'true' if self.invalid else 'false', 'x-accept-keypair': 'true' if self.accept_keypair else 'false'} if not self.logging: headers['x-suppress-logging'] = 'true' # depends on [control=['if'], data=[]] params = {'auth-id': self.auth_id, 'auth-token': self.auth_token} url = self.BASE_URL + endpoint response = self.session.post(url, json.dumps(stringify(data)), params=params, headers=headers, timeout=self.timeout) if response.status_code == 200: return response.json() # depends on [control=['if'], data=[]] raise ERROR_CODES.get(response.status_code, SmartyStreetsError)
def bought_value(self): """ [已弃用] """ user_system_log.warn(_(u"[abandon] {} is no longer valid.").format('stock_position.bought_value')) return self._quantity * self._avg_price
def function[bought_value, parameter[self]]: constant[ [已弃用] ] call[name[user_system_log].warn, parameter[call[call[name[_], parameter[constant[[abandon] {} is no longer valid.]]].format, parameter[constant[stock_position.bought_value]]]]] return[binary_operation[name[self]._quantity * name[self]._avg_price]]
keyword[def] identifier[bought_value] ( identifier[self] ): literal[string] identifier[user_system_log] . identifier[warn] ( identifier[_] ( literal[string] ). identifier[format] ( literal[string] )) keyword[return] identifier[self] . identifier[_quantity] * identifier[self] . identifier[_avg_price]
def bought_value(self): """ [已弃用] """ user_system_log.warn(_(u'[abandon] {} is no longer valid.').format('stock_position.bought_value')) return self._quantity * self._avg_price
def memoize(function): """A very simple memoize decorator to optimize pure-ish functions Don't use this unless you've examined the code and see the potential risks. """ cache = {} @functools.wraps(function) def _memoize(*args): if args in cache: return cache[args] result = function(*args) cache[args] = result return result return function
def function[memoize, parameter[function]]: constant[A very simple memoize decorator to optimize pure-ish functions Don't use this unless you've examined the code and see the potential risks. ] variable[cache] assign[=] dictionary[[], []] def function[_memoize, parameter[]]: if compare[name[args] in name[cache]] begin[:] return[call[name[cache]][name[args]]] variable[result] assign[=] call[name[function], parameter[<ast.Starred object at 0x7da207f9a110>]] call[name[cache]][name[args]] assign[=] name[result] return[name[result]] return[name[function]]
keyword[def] identifier[memoize] ( identifier[function] ): literal[string] identifier[cache] ={} @ identifier[functools] . identifier[wraps] ( identifier[function] ) keyword[def] identifier[_memoize] (* identifier[args] ): keyword[if] identifier[args] keyword[in] identifier[cache] : keyword[return] identifier[cache] [ identifier[args] ] identifier[result] = identifier[function] (* identifier[args] ) identifier[cache] [ identifier[args] ]= identifier[result] keyword[return] identifier[result] keyword[return] identifier[function]
def memoize(function): """A very simple memoize decorator to optimize pure-ish functions Don't use this unless you've examined the code and see the potential risks. """ cache = {} @functools.wraps(function) def _memoize(*args): if args in cache: return cache[args] # depends on [control=['if'], data=['args', 'cache']] result = function(*args) cache[args] = result return result return function
def frames(self): """Retrieve the next frame from the image directory and convert it to a ColorImage, a DepthImage, and an IrImage. Parameters ---------- skip_registration : bool If True, the registration step is skipped. Returns ------- :obj:`tuple` of :obj:`ColorImage`, :obj:`DepthImage`, :obj:`IrImage`, :obj:`numpy.ndarray` The ColorImage, DepthImage, and IrImage of the current frame. Raises ------ RuntimeError If the stream is not running or if all images in the directory have been used. """ if not self._running: raise RuntimeError('Device pointing to %s not runnning. Cannot read frames' %(self._path_to_images)) if self._im_index >= self._num_images: raise RuntimeError('Device is out of images') # read images color_filename = os.path.join(self._path_to_images, 'color_%d%s' %(self._im_index, self._color_ext)) color_im = ColorImage.open(color_filename, frame=self._frame) depth_filename = os.path.join(self._path_to_images, 'depth_%d.npy' %(self._im_index)) depth_im = DepthImage.open(depth_filename, frame=self._frame) self._im_index = (self._im_index + 1) % self._num_images return color_im, depth_im, None
def function[frames, parameter[self]]: constant[Retrieve the next frame from the image directory and convert it to a ColorImage, a DepthImage, and an IrImage. Parameters ---------- skip_registration : bool If True, the registration step is skipped. Returns ------- :obj:`tuple` of :obj:`ColorImage`, :obj:`DepthImage`, :obj:`IrImage`, :obj:`numpy.ndarray` The ColorImage, DepthImage, and IrImage of the current frame. Raises ------ RuntimeError If the stream is not running or if all images in the directory have been used. ] if <ast.UnaryOp object at 0x7da1b0577550> begin[:] <ast.Raise object at 0x7da1b0577b50> if compare[name[self]._im_index greater_or_equal[>=] name[self]._num_images] begin[:] <ast.Raise object at 0x7da1b05753f0> variable[color_filename] assign[=] call[name[os].path.join, parameter[name[self]._path_to_images, binary_operation[constant[color_%d%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0575000>, <ast.Attribute object at 0x7da1b05769e0>]]]]] variable[color_im] assign[=] call[name[ColorImage].open, parameter[name[color_filename]]] variable[depth_filename] assign[=] call[name[os].path.join, parameter[name[self]._path_to_images, binary_operation[constant[depth_%d.npy] <ast.Mod object at 0x7da2590d6920> name[self]._im_index]]] variable[depth_im] assign[=] call[name[DepthImage].open, parameter[name[depth_filename]]] name[self]._im_index assign[=] binary_operation[binary_operation[name[self]._im_index + constant[1]] <ast.Mod object at 0x7da2590d6920> name[self]._num_images] return[tuple[[<ast.Name object at 0x7da204960ee0>, <ast.Name object at 0x7da204963610>, <ast.Constant object at 0x7da204961ba0>]]]
keyword[def] identifier[frames] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_running] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[self] . identifier[_path_to_images] )) keyword[if] identifier[self] . identifier[_im_index] >= identifier[self] . identifier[_num_images] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[color_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_path_to_images] , literal[string] %( identifier[self] . identifier[_im_index] , identifier[self] . identifier[_color_ext] )) identifier[color_im] = identifier[ColorImage] . identifier[open] ( identifier[color_filename] , identifier[frame] = identifier[self] . identifier[_frame] ) identifier[depth_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_path_to_images] , literal[string] %( identifier[self] . identifier[_im_index] )) identifier[depth_im] = identifier[DepthImage] . identifier[open] ( identifier[depth_filename] , identifier[frame] = identifier[self] . identifier[_frame] ) identifier[self] . identifier[_im_index] =( identifier[self] . identifier[_im_index] + literal[int] )% identifier[self] . identifier[_num_images] keyword[return] identifier[color_im] , identifier[depth_im] , keyword[None]
def frames(self): """Retrieve the next frame from the image directory and convert it to a ColorImage, a DepthImage, and an IrImage. Parameters ---------- skip_registration : bool If True, the registration step is skipped. Returns ------- :obj:`tuple` of :obj:`ColorImage`, :obj:`DepthImage`, :obj:`IrImage`, :obj:`numpy.ndarray` The ColorImage, DepthImage, and IrImage of the current frame. Raises ------ RuntimeError If the stream is not running or if all images in the directory have been used. """ if not self._running: raise RuntimeError('Device pointing to %s not runnning. Cannot read frames' % self._path_to_images) # depends on [control=['if'], data=[]] if self._im_index >= self._num_images: raise RuntimeError('Device is out of images') # depends on [control=['if'], data=[]] # read images color_filename = os.path.join(self._path_to_images, 'color_%d%s' % (self._im_index, self._color_ext)) color_im = ColorImage.open(color_filename, frame=self._frame) depth_filename = os.path.join(self._path_to_images, 'depth_%d.npy' % self._im_index) depth_im = DepthImage.open(depth_filename, frame=self._frame) self._im_index = (self._im_index + 1) % self._num_images return (color_im, depth_im, None)
def dpcnn(embedding_matrix, embedding_size, trainable_embedding, maxlen, max_features, filter_nr, kernel_size, repeat_block, dense_size, repeat_dense, output_size, output_activation, max_pooling, mean_pooling, weighted_average_attention, concat_mode, dropout_embedding, conv_dropout, dense_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, dense_kernel_reg_l2, dense_bias_reg_l2, use_prelu, use_batch_norm, batch_norm_first): """ Note: Implementation of http://ai.tencent.com/ailab/media/publications/ACL3-Brady.pdf post activation is used instead of pre-activation, could be worth exploring """ input_text = Input(shape=(maxlen,)) if embedding_matrix is not None: embedding = Embedding(max_features, embedding_size, weights=[embedding_matrix], trainable=trainable_embedding)(input_text) else: embedding = Embedding(max_features, embedding_size)(input_text) embedding = dropout_block(dropout_embedding, dropout_mode)(embedding) x = convolutional_block(filter_nr, kernel_size, use_batch_norm, use_prelu, conv_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, batch_norm_first)(embedding) x = convolutional_block(filter_nr, kernel_size, conv_bias_reg_l2, use_prelu, conv_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, batch_norm_first)(x) if embedding_size == filter_nr: x = add([embedding, x]) else: embedding_resized = shape_matching_layer(filter_nr, use_prelu, conv_kernel_reg_l2, conv_bias_reg_l2)(embedding) x = add([embedding_resized, x]) for _ in range(repeat_block): x = dpcnn_block(filter_nr, kernel_size, use_batch_norm, use_prelu, conv_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, batch_norm_first)(x) predictions = classification_block(dense_size=dense_size, repeat_dense=repeat_dense, output_size=output_size, output_activation=output_activation, max_pooling=max_pooling, mean_pooling=mean_pooling, weighted_average_attention=weighted_average_attention, concat_mode=concat_mode, dropout=dense_dropout, kernel_reg_l2=dense_kernel_reg_l2, bias_reg_l2=dense_bias_reg_l2, use_prelu=use_prelu, use_batch_norm=use_batch_norm, batch_norm_first=batch_norm_first)(x) model = Model(inputs=input_text, outputs=predictions) return model
def function[dpcnn, parameter[embedding_matrix, embedding_size, trainable_embedding, maxlen, max_features, filter_nr, kernel_size, repeat_block, dense_size, repeat_dense, output_size, output_activation, max_pooling, mean_pooling, weighted_average_attention, concat_mode, dropout_embedding, conv_dropout, dense_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, dense_kernel_reg_l2, dense_bias_reg_l2, use_prelu, use_batch_norm, batch_norm_first]]: constant[ Note: Implementation of http://ai.tencent.com/ailab/media/publications/ACL3-Brady.pdf post activation is used instead of pre-activation, could be worth exploring ] variable[input_text] assign[=] call[name[Input], parameter[]] if compare[name[embedding_matrix] is_not constant[None]] begin[:] variable[embedding] assign[=] call[call[name[Embedding], parameter[name[max_features], name[embedding_size]]], parameter[name[input_text]]] variable[embedding] assign[=] call[call[name[dropout_block], parameter[name[dropout_embedding], name[dropout_mode]]], parameter[name[embedding]]] variable[x] assign[=] call[call[name[convolutional_block], parameter[name[filter_nr], name[kernel_size], name[use_batch_norm], name[use_prelu], name[conv_dropout], name[dropout_mode], name[conv_kernel_reg_l2], name[conv_bias_reg_l2], name[batch_norm_first]]], parameter[name[embedding]]] variable[x] assign[=] call[call[name[convolutional_block], parameter[name[filter_nr], name[kernel_size], name[conv_bias_reg_l2], name[use_prelu], name[conv_dropout], name[dropout_mode], name[conv_kernel_reg_l2], name[conv_bias_reg_l2], name[batch_norm_first]]], parameter[name[x]]] if compare[name[embedding_size] equal[==] name[filter_nr]] begin[:] variable[x] assign[=] call[name[add], parameter[list[[<ast.Name object at 0x7da18f58ebc0>, <ast.Name object at 0x7da18f58f820>]]]] for taget[name[_]] in starred[call[name[range], parameter[name[repeat_block]]]] begin[:] variable[x] assign[=] call[call[name[dpcnn_block], parameter[name[filter_nr], name[kernel_size], name[use_batch_norm], name[use_prelu], name[conv_dropout], name[dropout_mode], name[conv_kernel_reg_l2], name[conv_bias_reg_l2], name[batch_norm_first]]], parameter[name[x]]] variable[predictions] assign[=] call[call[name[classification_block], parameter[]], parameter[name[x]]] variable[model] assign[=] call[name[Model], parameter[]] return[name[model]]
keyword[def] identifier[dpcnn] ( identifier[embedding_matrix] , identifier[embedding_size] , identifier[trainable_embedding] , identifier[maxlen] , identifier[max_features] , identifier[filter_nr] , identifier[kernel_size] , identifier[repeat_block] , identifier[dense_size] , identifier[repeat_dense] , identifier[output_size] , identifier[output_activation] , identifier[max_pooling] , identifier[mean_pooling] , identifier[weighted_average_attention] , identifier[concat_mode] , identifier[dropout_embedding] , identifier[conv_dropout] , identifier[dense_dropout] , identifier[dropout_mode] , identifier[conv_kernel_reg_l2] , identifier[conv_bias_reg_l2] , identifier[dense_kernel_reg_l2] , identifier[dense_bias_reg_l2] , identifier[use_prelu] , identifier[use_batch_norm] , identifier[batch_norm_first] ): literal[string] identifier[input_text] = identifier[Input] ( identifier[shape] =( identifier[maxlen] ,)) keyword[if] identifier[embedding_matrix] keyword[is] keyword[not] keyword[None] : identifier[embedding] = identifier[Embedding] ( identifier[max_features] , identifier[embedding_size] , identifier[weights] =[ identifier[embedding_matrix] ], identifier[trainable] = identifier[trainable_embedding] )( identifier[input_text] ) keyword[else] : identifier[embedding] = identifier[Embedding] ( identifier[max_features] , identifier[embedding_size] )( identifier[input_text] ) identifier[embedding] = identifier[dropout_block] ( identifier[dropout_embedding] , identifier[dropout_mode] )( identifier[embedding] ) identifier[x] = identifier[convolutional_block] ( identifier[filter_nr] , identifier[kernel_size] , identifier[use_batch_norm] , identifier[use_prelu] , identifier[conv_dropout] , identifier[dropout_mode] , identifier[conv_kernel_reg_l2] , identifier[conv_bias_reg_l2] , identifier[batch_norm_first] )( identifier[embedding] ) identifier[x] = identifier[convolutional_block] ( identifier[filter_nr] , identifier[kernel_size] , identifier[conv_bias_reg_l2] , identifier[use_prelu] , identifier[conv_dropout] , identifier[dropout_mode] , identifier[conv_kernel_reg_l2] , identifier[conv_bias_reg_l2] , identifier[batch_norm_first] )( identifier[x] ) keyword[if] identifier[embedding_size] == identifier[filter_nr] : identifier[x] = identifier[add] ([ identifier[embedding] , identifier[x] ]) keyword[else] : identifier[embedding_resized] = identifier[shape_matching_layer] ( identifier[filter_nr] , identifier[use_prelu] , identifier[conv_kernel_reg_l2] , identifier[conv_bias_reg_l2] )( identifier[embedding] ) identifier[x] = identifier[add] ([ identifier[embedding_resized] , identifier[x] ]) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[repeat_block] ): identifier[x] = identifier[dpcnn_block] ( identifier[filter_nr] , identifier[kernel_size] , identifier[use_batch_norm] , identifier[use_prelu] , identifier[conv_dropout] , identifier[dropout_mode] , identifier[conv_kernel_reg_l2] , identifier[conv_bias_reg_l2] , identifier[batch_norm_first] )( identifier[x] ) identifier[predictions] = identifier[classification_block] ( identifier[dense_size] = identifier[dense_size] , identifier[repeat_dense] = identifier[repeat_dense] , identifier[output_size] = identifier[output_size] , identifier[output_activation] = identifier[output_activation] , identifier[max_pooling] = identifier[max_pooling] , identifier[mean_pooling] = identifier[mean_pooling] , identifier[weighted_average_attention] = identifier[weighted_average_attention] , identifier[concat_mode] = identifier[concat_mode] , identifier[dropout] = identifier[dense_dropout] , identifier[kernel_reg_l2] = identifier[dense_kernel_reg_l2] , identifier[bias_reg_l2] = identifier[dense_bias_reg_l2] , identifier[use_prelu] = identifier[use_prelu] , identifier[use_batch_norm] = identifier[use_batch_norm] , identifier[batch_norm_first] = identifier[batch_norm_first] )( identifier[x] ) identifier[model] = identifier[Model] ( identifier[inputs] = identifier[input_text] , identifier[outputs] = identifier[predictions] ) keyword[return] identifier[model]
def dpcnn(embedding_matrix, embedding_size, trainable_embedding, maxlen, max_features, filter_nr, kernel_size, repeat_block, dense_size, repeat_dense, output_size, output_activation, max_pooling, mean_pooling, weighted_average_attention, concat_mode, dropout_embedding, conv_dropout, dense_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, dense_kernel_reg_l2, dense_bias_reg_l2, use_prelu, use_batch_norm, batch_norm_first): """ Note: Implementation of http://ai.tencent.com/ailab/media/publications/ACL3-Brady.pdf post activation is used instead of pre-activation, could be worth exploring """ input_text = Input(shape=(maxlen,)) if embedding_matrix is not None: embedding = Embedding(max_features, embedding_size, weights=[embedding_matrix], trainable=trainable_embedding)(input_text) # depends on [control=['if'], data=['embedding_matrix']] else: embedding = Embedding(max_features, embedding_size)(input_text) embedding = dropout_block(dropout_embedding, dropout_mode)(embedding) x = convolutional_block(filter_nr, kernel_size, use_batch_norm, use_prelu, conv_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, batch_norm_first)(embedding) x = convolutional_block(filter_nr, kernel_size, conv_bias_reg_l2, use_prelu, conv_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, batch_norm_first)(x) if embedding_size == filter_nr: x = add([embedding, x]) # depends on [control=['if'], data=[]] else: embedding_resized = shape_matching_layer(filter_nr, use_prelu, conv_kernel_reg_l2, conv_bias_reg_l2)(embedding) x = add([embedding_resized, x]) for _ in range(repeat_block): x = dpcnn_block(filter_nr, kernel_size, use_batch_norm, use_prelu, conv_dropout, dropout_mode, conv_kernel_reg_l2, conv_bias_reg_l2, batch_norm_first)(x) # depends on [control=['for'], data=[]] predictions = classification_block(dense_size=dense_size, repeat_dense=repeat_dense, output_size=output_size, output_activation=output_activation, max_pooling=max_pooling, mean_pooling=mean_pooling, weighted_average_attention=weighted_average_attention, concat_mode=concat_mode, dropout=dense_dropout, kernel_reg_l2=dense_kernel_reg_l2, bias_reg_l2=dense_bias_reg_l2, use_prelu=use_prelu, use_batch_norm=use_batch_norm, batch_norm_first=batch_norm_first)(x) model = Model(inputs=input_text, outputs=predictions) return model
def _is_reference(arg): ''' Return True, if arg is a reference to a previously defined statement. ''' return isinstance(arg, dict) and len(arg) == 1 and isinstance(next(six.itervalues(arg)), six.string_types)
def function[_is_reference, parameter[arg]]: constant[ Return True, if arg is a reference to a previously defined statement. ] return[<ast.BoolOp object at 0x7da1b1c36e30>]
keyword[def] identifier[_is_reference] ( identifier[arg] ): literal[string] keyword[return] identifier[isinstance] ( identifier[arg] , identifier[dict] ) keyword[and] identifier[len] ( identifier[arg] )== literal[int] keyword[and] identifier[isinstance] ( identifier[next] ( identifier[six] . identifier[itervalues] ( identifier[arg] )), identifier[six] . identifier[string_types] )
def _is_reference(arg): """ Return True, if arg is a reference to a previously defined statement. """ return isinstance(arg, dict) and len(arg) == 1 and isinstance(next(six.itervalues(arg)), six.string_types)
def gen_opt_str(ser_rec: pd.Series)->str: '''generate rst option string Parameters ---------- ser_rec : pd.Series record for specifications Returns ------- str rst string ''' name = ser_rec.name indent = r' ' str_opt = f'.. option:: {name}'+'\n\n' for spec in ser_rec.sort_index().index: str_opt += indent+f':{spec}:'+'\n' spec_content = ser_rec[spec] str_opt += indent+indent+f'{spec_content}'+'\n' return str_opt
def function[gen_opt_str, parameter[ser_rec]]: constant[generate rst option string Parameters ---------- ser_rec : pd.Series record for specifications Returns ------- str rst string ] variable[name] assign[=] name[ser_rec].name variable[indent] assign[=] constant[ ] variable[str_opt] assign[=] binary_operation[<ast.JoinedStr object at 0x7da1b0d0e5c0> + constant[ ]] for taget[name[spec]] in starred[call[name[ser_rec].sort_index, parameter[]].index] begin[:] <ast.AugAssign object at 0x7da1b0d0c790> variable[spec_content] assign[=] call[name[ser_rec]][name[spec]] <ast.AugAssign object at 0x7da18bc72fb0> return[name[str_opt]]
keyword[def] identifier[gen_opt_str] ( identifier[ser_rec] : identifier[pd] . identifier[Series] )-> identifier[str] : literal[string] identifier[name] = identifier[ser_rec] . identifier[name] identifier[indent] = literal[string] identifier[str_opt] = literal[string] + literal[string] keyword[for] identifier[spec] keyword[in] identifier[ser_rec] . identifier[sort_index] (). identifier[index] : identifier[str_opt] += identifier[indent] + literal[string] + literal[string] identifier[spec_content] = identifier[ser_rec] [ identifier[spec] ] identifier[str_opt] += identifier[indent] + identifier[indent] + literal[string] + literal[string] keyword[return] identifier[str_opt]
def gen_opt_str(ser_rec: pd.Series) -> str: """generate rst option string Parameters ---------- ser_rec : pd.Series record for specifications Returns ------- str rst string """ name = ser_rec.name indent = ' ' str_opt = f'.. option:: {name}' + '\n\n' for spec in ser_rec.sort_index().index: str_opt += indent + f':{spec}:' + '\n' spec_content = ser_rec[spec] str_opt += indent + indent + f'{spec_content}' + '\n' # depends on [control=['for'], data=['spec']] return str_opt
def open_for_io(self, writable, password): """Open the medium for I/O. in writable of type bool Set this to open the medium for both reading and writing. When not set the medium is opened readonly. in password of type str Password for accessing an encrypted medium. Must be empty if not encrypted. return medium_io of type :class:`IMediumIO` Medium I/O object. """ if not isinstance(writable, bool): raise TypeError("writable can only be an instance of type bool") if not isinstance(password, basestring): raise TypeError("password can only be an instance of type basestring") medium_io = self._call("openForIO", in_p=[writable, password]) medium_io = IMediumIO(medium_io) return medium_io
def function[open_for_io, parameter[self, writable, password]]: constant[Open the medium for I/O. in writable of type bool Set this to open the medium for both reading and writing. When not set the medium is opened readonly. in password of type str Password for accessing an encrypted medium. Must be empty if not encrypted. return medium_io of type :class:`IMediumIO` Medium I/O object. ] if <ast.UnaryOp object at 0x7da2041d8340> begin[:] <ast.Raise object at 0x7da2047e88b0> if <ast.UnaryOp object at 0x7da2047ead10> begin[:] <ast.Raise object at 0x7da2047e9c90> variable[medium_io] assign[=] call[name[self]._call, parameter[constant[openForIO]]] variable[medium_io] assign[=] call[name[IMediumIO], parameter[name[medium_io]]] return[name[medium_io]]
keyword[def] identifier[open_for_io] ( identifier[self] , identifier[writable] , identifier[password] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[writable] , identifier[bool] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[password] , identifier[basestring] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[medium_io] = identifier[self] . identifier[_call] ( literal[string] , identifier[in_p] =[ identifier[writable] , identifier[password] ]) identifier[medium_io] = identifier[IMediumIO] ( identifier[medium_io] ) keyword[return] identifier[medium_io]
def open_for_io(self, writable, password): """Open the medium for I/O. in writable of type bool Set this to open the medium for both reading and writing. When not set the medium is opened readonly. in password of type str Password for accessing an encrypted medium. Must be empty if not encrypted. return medium_io of type :class:`IMediumIO` Medium I/O object. """ if not isinstance(writable, bool): raise TypeError('writable can only be an instance of type bool') # depends on [control=['if'], data=[]] if not isinstance(password, basestring): raise TypeError('password can only be an instance of type basestring') # depends on [control=['if'], data=[]] medium_io = self._call('openForIO', in_p=[writable, password]) medium_io = IMediumIO(medium_io) return medium_io
def confirm_value(self, widget): """event called pressing on OK button. propagates the string content of the input field """ self.hide() params = (self.fileFolderNavigator.get_selection_list(),) return params
def function[confirm_value, parameter[self, widget]]: constant[event called pressing on OK button. propagates the string content of the input field ] call[name[self].hide, parameter[]] variable[params] assign[=] tuple[[<ast.Call object at 0x7da20c993820>]] return[name[params]]
keyword[def] identifier[confirm_value] ( identifier[self] , identifier[widget] ): literal[string] identifier[self] . identifier[hide] () identifier[params] =( identifier[self] . identifier[fileFolderNavigator] . identifier[get_selection_list] (),) keyword[return] identifier[params]
def confirm_value(self, widget): """event called pressing on OK button. propagates the string content of the input field """ self.hide() params = (self.fileFolderNavigator.get_selection_list(),) return params
def StartingAgeEnum(ctx): """Starting Age Enumeration.""" return Enum( ctx, what=-2, unset=-1, dark=0, feudal=1, castle=2, imperial=3, postimperial=4, dmpostimperial=6 )
def function[StartingAgeEnum, parameter[ctx]]: constant[Starting Age Enumeration.] return[call[name[Enum], parameter[name[ctx]]]]
keyword[def] identifier[StartingAgeEnum] ( identifier[ctx] ): literal[string] keyword[return] identifier[Enum] ( identifier[ctx] , identifier[what] =- literal[int] , identifier[unset] =- literal[int] , identifier[dark] = literal[int] , identifier[feudal] = literal[int] , identifier[castle] = literal[int] , identifier[imperial] = literal[int] , identifier[postimperial] = literal[int] , identifier[dmpostimperial] = literal[int] )
def StartingAgeEnum(ctx): """Starting Age Enumeration.""" return Enum(ctx, what=-2, unset=-1, dark=0, feudal=1, castle=2, imperial=3, postimperial=4, dmpostimperial=6)
def render_string(self, template_name, **kwargs): """This method was rewritten to support multiple template engine (Determine by `TEMPLATE_ENGINE` setting, could be `tornado` and `jinja2`), it will only affect on template rendering process, ui modules feature, which is mostly exposed in `render` method, is kept to be used as normal. """ if 'tornado' == settings['TEMPLATE_ENGINE']: return super(BaseHandler, self).render_string(template_name, **kwargs) elif 'jinja2' == settings['TEMPLATE_ENGINE']: return jinja2_render(template_name, **kwargs) else: raise errors.SettingsError( '%s is not a supported TEMPLATE_ENGINE, should be `tornado` or `jinja2`' % settings['TEMPLATE_ENGINE'])
def function[render_string, parameter[self, template_name]]: constant[This method was rewritten to support multiple template engine (Determine by `TEMPLATE_ENGINE` setting, could be `tornado` and `jinja2`), it will only affect on template rendering process, ui modules feature, which is mostly exposed in `render` method, is kept to be used as normal. ] if compare[constant[tornado] equal[==] call[name[settings]][constant[TEMPLATE_ENGINE]]] begin[:] return[call[call[name[super], parameter[name[BaseHandler], name[self]]].render_string, parameter[name[template_name]]]]
keyword[def] identifier[render_string] ( identifier[self] , identifier[template_name] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] == identifier[settings] [ literal[string] ]: keyword[return] identifier[super] ( identifier[BaseHandler] , identifier[self] ). identifier[render_string] ( identifier[template_name] ,** identifier[kwargs] ) keyword[elif] literal[string] == identifier[settings] [ literal[string] ]: keyword[return] identifier[jinja2_render] ( identifier[template_name] ,** identifier[kwargs] ) keyword[else] : keyword[raise] identifier[errors] . identifier[SettingsError] ( literal[string] % identifier[settings] [ literal[string] ])
def render_string(self, template_name, **kwargs): """This method was rewritten to support multiple template engine (Determine by `TEMPLATE_ENGINE` setting, could be `tornado` and `jinja2`), it will only affect on template rendering process, ui modules feature, which is mostly exposed in `render` method, is kept to be used as normal. """ if 'tornado' == settings['TEMPLATE_ENGINE']: return super(BaseHandler, self).render_string(template_name, **kwargs) # depends on [control=['if'], data=[]] elif 'jinja2' == settings['TEMPLATE_ENGINE']: return jinja2_render(template_name, **kwargs) # depends on [control=['if'], data=[]] else: raise errors.SettingsError('%s is not a supported TEMPLATE_ENGINE, should be `tornado` or `jinja2`' % settings['TEMPLATE_ENGINE'])
def check(self, query): """ :param query: """ if query.get_type() != Keyword.DELETE: return Ok(True) return Err("Delete queries are forbidden.")
def function[check, parameter[self, query]]: constant[ :param query: ] if compare[call[name[query].get_type, parameter[]] not_equal[!=] name[Keyword].DELETE] begin[:] return[call[name[Ok], parameter[constant[True]]]] return[call[name[Err], parameter[constant[Delete queries are forbidden.]]]]
keyword[def] identifier[check] ( identifier[self] , identifier[query] ): literal[string] keyword[if] identifier[query] . identifier[get_type] ()!= identifier[Keyword] . identifier[DELETE] : keyword[return] identifier[Ok] ( keyword[True] ) keyword[return] identifier[Err] ( literal[string] )
def check(self, query): """ :param query: """ if query.get_type() != Keyword.DELETE: return Ok(True) # depends on [control=['if'], data=[]] return Err('Delete queries are forbidden.')
def add_step(self, value_map): """ Add the values in value_map to the end of the trace. """ if len(self.trace) == 0: raise PyrtlError('error, simulation trace needs at least 1 signal to track ' '(by default, unnamed signals are not traced -- try either passing ' 'a name to a WireVector or setting a "wirevector_subset" option)') for wire in self.trace: tracelist = self.trace[wire] wirevec = self._wires[wire] tracelist.append(value_map[wirevec])
def function[add_step, parameter[self, value_map]]: constant[ Add the values in value_map to the end of the trace. ] if compare[call[name[len], parameter[name[self].trace]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20c796260> for taget[name[wire]] in starred[name[self].trace] begin[:] variable[tracelist] assign[=] call[name[self].trace][name[wire]] variable[wirevec] assign[=] call[name[self]._wires][name[wire]] call[name[tracelist].append, parameter[call[name[value_map]][name[wirevec]]]]
keyword[def] identifier[add_step] ( identifier[self] , identifier[value_map] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[trace] )== literal[int] : keyword[raise] identifier[PyrtlError] ( literal[string] literal[string] literal[string] ) keyword[for] identifier[wire] keyword[in] identifier[self] . identifier[trace] : identifier[tracelist] = identifier[self] . identifier[trace] [ identifier[wire] ] identifier[wirevec] = identifier[self] . identifier[_wires] [ identifier[wire] ] identifier[tracelist] . identifier[append] ( identifier[value_map] [ identifier[wirevec] ])
def add_step(self, value_map): """ Add the values in value_map to the end of the trace. """ if len(self.trace) == 0: raise PyrtlError('error, simulation trace needs at least 1 signal to track (by default, unnamed signals are not traced -- try either passing a name to a WireVector or setting a "wirevector_subset" option)') # depends on [control=['if'], data=[]] for wire in self.trace: tracelist = self.trace[wire] wirevec = self._wires[wire] tracelist.append(value_map[wirevec]) # depends on [control=['for'], data=['wire']]
def get_catalogs(self): """Pass through to provider CatalogLookupSession.get_catalogs""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_template catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs() cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
def function[get_catalogs, parameter[self]]: constant[Pass through to provider CatalogLookupSession.get_catalogs] variable[catalogs] assign[=] call[call[name[self]._get_provider_session, parameter[constant[catalog_lookup_session]]].get_catalogs, parameter[]] variable[cat_list] assign[=] list[[]] for taget[name[cat]] in starred[name[catalogs]] begin[:] call[name[cat_list].append, parameter[call[name[Catalog], parameter[name[self]._provider_manager, name[cat], name[self]._runtime, name[self]._proxy]]]] return[call[name[CatalogList], parameter[name[cat_list]]]]
keyword[def] identifier[get_catalogs] ( identifier[self] ): literal[string] identifier[catalogs] = identifier[self] . identifier[_get_provider_session] ( literal[string] ). identifier[get_catalogs] () identifier[cat_list] =[] keyword[for] identifier[cat] keyword[in] identifier[catalogs] : identifier[cat_list] . identifier[append] ( identifier[Catalog] ( identifier[self] . identifier[_provider_manager] , identifier[cat] , identifier[self] . identifier[_runtime] , identifier[self] . identifier[_proxy] )) keyword[return] identifier[CatalogList] ( identifier[cat_list] )
def get_catalogs(self): """Pass through to provider CatalogLookupSession.get_catalogs""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_template catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs() cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) # depends on [control=['for'], data=['cat']] return CatalogList(cat_list)
def registerViewType(self, cls, window=None): """ Registers the inputed widget class as a potential view class. If the \ optional window argument is supplied, then the registerToWindow method \ will be called for the class. :param cls | <subclass of XView> window | <QMainWindow> || <QDialog> || None """ if ( not cls in self._viewTypes ): self._viewTypes.append(cls) if ( window ): cls.registerToWindow(window)
def function[registerViewType, parameter[self, cls, window]]: constant[ Registers the inputed widget class as a potential view class. If the optional window argument is supplied, then the registerToWindow method will be called for the class. :param cls | <subclass of XView> window | <QMainWindow> || <QDialog> || None ] if <ast.UnaryOp object at 0x7da1b24c3fa0> begin[:] call[name[self]._viewTypes.append, parameter[name[cls]]] if name[window] begin[:] call[name[cls].registerToWindow, parameter[name[window]]]
keyword[def] identifier[registerViewType] ( identifier[self] , identifier[cls] , identifier[window] = keyword[None] ): literal[string] keyword[if] ( keyword[not] identifier[cls] keyword[in] identifier[self] . identifier[_viewTypes] ): identifier[self] . identifier[_viewTypes] . identifier[append] ( identifier[cls] ) keyword[if] ( identifier[window] ): identifier[cls] . identifier[registerToWindow] ( identifier[window] )
def registerViewType(self, cls, window=None): """ Registers the inputed widget class as a potential view class. If the optional window argument is supplied, then the registerToWindow method will be called for the class. :param cls | <subclass of XView> window | <QMainWindow> || <QDialog> || None """ if not cls in self._viewTypes: self._viewTypes.append(cls) if window: cls.registerToWindow(window) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def exec_all_endpoints(self, *args, **kwargs): """Execute each passed endpoint and collect the results. If a result is anoter `MultipleResults` it will extend the results with those contained therein. If the result is `NoResult`, skip the addition.""" results = [] for handler in self.endpoints: if isinstance(handler, weakref.ref): handler = handler() if self.adapt_params: bind = self._adapt_call_params(handler, args, kwargs) res = handler(*bind.args, **bind.kwargs) else: res = handler(*args, **kwargs) if isinstance(res, MultipleResults): if res.done: results += res.results else: results += res._results elif res is not NoResult: results.append(res) return MultipleResults(results, concurrent=self.concurrent, owner=self)
def function[exec_all_endpoints, parameter[self]]: constant[Execute each passed endpoint and collect the results. If a result is anoter `MultipleResults` it will extend the results with those contained therein. If the result is `NoResult`, skip the addition.] variable[results] assign[=] list[[]] for taget[name[handler]] in starred[name[self].endpoints] begin[:] if call[name[isinstance], parameter[name[handler], name[weakref].ref]] begin[:] variable[handler] assign[=] call[name[handler], parameter[]] if name[self].adapt_params begin[:] variable[bind] assign[=] call[name[self]._adapt_call_params, parameter[name[handler], name[args], name[kwargs]]] variable[res] assign[=] call[name[handler], parameter[<ast.Starred object at 0x7da1b1046890>]] if call[name[isinstance], parameter[name[res], name[MultipleResults]]] begin[:] if name[res].done begin[:] <ast.AugAssign object at 0x7da1b1044a60> return[call[name[MultipleResults], parameter[name[results]]]]
keyword[def] identifier[exec_all_endpoints] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[results] =[] keyword[for] identifier[handler] keyword[in] identifier[self] . identifier[endpoints] : keyword[if] identifier[isinstance] ( identifier[handler] , identifier[weakref] . identifier[ref] ): identifier[handler] = identifier[handler] () keyword[if] identifier[self] . identifier[adapt_params] : identifier[bind] = identifier[self] . identifier[_adapt_call_params] ( identifier[handler] , identifier[args] , identifier[kwargs] ) identifier[res] = identifier[handler] (* identifier[bind] . identifier[args] ,** identifier[bind] . identifier[kwargs] ) keyword[else] : identifier[res] = identifier[handler] (* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[res] , identifier[MultipleResults] ): keyword[if] identifier[res] . identifier[done] : identifier[results] += identifier[res] . identifier[results] keyword[else] : identifier[results] += identifier[res] . identifier[_results] keyword[elif] identifier[res] keyword[is] keyword[not] identifier[NoResult] : identifier[results] . identifier[append] ( identifier[res] ) keyword[return] identifier[MultipleResults] ( identifier[results] , identifier[concurrent] = identifier[self] . identifier[concurrent] , identifier[owner] = identifier[self] )
def exec_all_endpoints(self, *args, **kwargs): """Execute each passed endpoint and collect the results. If a result is anoter `MultipleResults` it will extend the results with those contained therein. If the result is `NoResult`, skip the addition.""" results = [] for handler in self.endpoints: if isinstance(handler, weakref.ref): handler = handler() # depends on [control=['if'], data=[]] if self.adapt_params: bind = self._adapt_call_params(handler, args, kwargs) res = handler(*bind.args, **bind.kwargs) # depends on [control=['if'], data=[]] else: res = handler(*args, **kwargs) if isinstance(res, MultipleResults): if res.done: results += res.results # depends on [control=['if'], data=[]] else: results += res._results # depends on [control=['if'], data=[]] elif res is not NoResult: results.append(res) # depends on [control=['if'], data=['res']] # depends on [control=['for'], data=['handler']] return MultipleResults(results, concurrent=self.concurrent, owner=self)
def _collapse_invariants(bases: List[type], namespace: MutableMapping[str, Any]) -> None: """Collect invariants from the bases and merge them with the invariants in the namespace.""" invariants = [] # type: List[Contract] # Add invariants of the bases for base in bases: if hasattr(base, "__invariants__"): invariants.extend(getattr(base, "__invariants__")) # Add invariants in the current namespace if '__invariants__' in namespace: invariants.extend(namespace['__invariants__']) # Change the final invariants in the namespace if invariants: namespace["__invariants__"] = invariants
def function[_collapse_invariants, parameter[bases, namespace]]: constant[Collect invariants from the bases and merge them with the invariants in the namespace.] variable[invariants] assign[=] list[[]] for taget[name[base]] in starred[name[bases]] begin[:] if call[name[hasattr], parameter[name[base], constant[__invariants__]]] begin[:] call[name[invariants].extend, parameter[call[name[getattr], parameter[name[base], constant[__invariants__]]]]] if compare[constant[__invariants__] in name[namespace]] begin[:] call[name[invariants].extend, parameter[call[name[namespace]][constant[__invariants__]]]] if name[invariants] begin[:] call[name[namespace]][constant[__invariants__]] assign[=] name[invariants]
keyword[def] identifier[_collapse_invariants] ( identifier[bases] : identifier[List] [ identifier[type] ], identifier[namespace] : identifier[MutableMapping] [ identifier[str] , identifier[Any] ])-> keyword[None] : literal[string] identifier[invariants] =[] keyword[for] identifier[base] keyword[in] identifier[bases] : keyword[if] identifier[hasattr] ( identifier[base] , literal[string] ): identifier[invariants] . identifier[extend] ( identifier[getattr] ( identifier[base] , literal[string] )) keyword[if] literal[string] keyword[in] identifier[namespace] : identifier[invariants] . identifier[extend] ( identifier[namespace] [ literal[string] ]) keyword[if] identifier[invariants] : identifier[namespace] [ literal[string] ]= identifier[invariants]
def _collapse_invariants(bases: List[type], namespace: MutableMapping[str, Any]) -> None: """Collect invariants from the bases and merge them with the invariants in the namespace.""" invariants = [] # type: List[Contract] # Add invariants of the bases for base in bases: if hasattr(base, '__invariants__'): invariants.extend(getattr(base, '__invariants__')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['base']] # Add invariants in the current namespace if '__invariants__' in namespace: invariants.extend(namespace['__invariants__']) # depends on [control=['if'], data=['namespace']] # Change the final invariants in the namespace if invariants: namespace['__invariants__'] = invariants # depends on [control=['if'], data=[]]
def astype(self, dtype): """Return a copy of this space with new ``dtype``. Parameters ---------- dtype : Scalar data type of the returned space. Can be provided in any way the `numpy.dtype` constructor understands, e.g. as built-in type or as a string. Data types with non-trivial shapes are not allowed. Returns ------- newspace : `ProductSpace` Version of this space with given data type. """ if dtype is None: # Need to filter this out since Numpy iterprets it as 'float' raise ValueError('`None` is not a valid data type') dtype = np.dtype(dtype) current_dtype = getattr(self, 'dtype', object) if dtype == current_dtype: return self else: return ProductSpace(*[space.astype(dtype) for space in self.spaces])
def function[astype, parameter[self, dtype]]: constant[Return a copy of this space with new ``dtype``. Parameters ---------- dtype : Scalar data type of the returned space. Can be provided in any way the `numpy.dtype` constructor understands, e.g. as built-in type or as a string. Data types with non-trivial shapes are not allowed. Returns ------- newspace : `ProductSpace` Version of this space with given data type. ] if compare[name[dtype] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1d0e6b0> variable[dtype] assign[=] call[name[np].dtype, parameter[name[dtype]]] variable[current_dtype] assign[=] call[name[getattr], parameter[name[self], constant[dtype], name[object]]] if compare[name[dtype] equal[==] name[current_dtype]] begin[:] return[name[self]]
keyword[def] identifier[astype] ( identifier[self] , identifier[dtype] ): literal[string] keyword[if] identifier[dtype] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[dtype] = identifier[np] . identifier[dtype] ( identifier[dtype] ) identifier[current_dtype] = identifier[getattr] ( identifier[self] , literal[string] , identifier[object] ) keyword[if] identifier[dtype] == identifier[current_dtype] : keyword[return] identifier[self] keyword[else] : keyword[return] identifier[ProductSpace] (*[ identifier[space] . identifier[astype] ( identifier[dtype] ) keyword[for] identifier[space] keyword[in] identifier[self] . identifier[spaces] ])
def astype(self, dtype): """Return a copy of this space with new ``dtype``. Parameters ---------- dtype : Scalar data type of the returned space. Can be provided in any way the `numpy.dtype` constructor understands, e.g. as built-in type or as a string. Data types with non-trivial shapes are not allowed. Returns ------- newspace : `ProductSpace` Version of this space with given data type. """ if dtype is None: # Need to filter this out since Numpy iterprets it as 'float' raise ValueError('`None` is not a valid data type') # depends on [control=['if'], data=[]] dtype = np.dtype(dtype) current_dtype = getattr(self, 'dtype', object) if dtype == current_dtype: return self # depends on [control=['if'], data=[]] else: return ProductSpace(*[space.astype(dtype) for space in self.spaces])
def integer(_object): """ Validates a given input is of type int.. Example usage:: data = {'a' : 21} schema = ('a', integer) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, int), "not of type int") return _validator(value) return decorated ensure(isinstance(_object, int), "not of type int")
def function[integer, parameter[_object]]: constant[ Validates a given input is of type int.. Example usage:: data = {'a' : 21} schema = ('a', integer) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. ] if call[name[is_callable], parameter[name[_object]]] begin[:] variable[_validator] assign[=] name[_object] def function[decorated, parameter[value]]: call[name[ensure], parameter[call[name[isinstance], parameter[name[value], name[int]]], constant[not of type int]]] return[call[name[_validator], parameter[name[value]]]] return[name[decorated]] call[name[ensure], parameter[call[name[isinstance], parameter[name[_object], name[int]]], constant[not of type int]]]
keyword[def] identifier[integer] ( identifier[_object] ): literal[string] keyword[if] identifier[is_callable] ( identifier[_object] ): identifier[_validator] = identifier[_object] @ identifier[wraps] ( identifier[_validator] ) keyword[def] identifier[decorated] ( identifier[value] ): identifier[ensure] ( identifier[isinstance] ( identifier[value] , identifier[int] ), literal[string] ) keyword[return] identifier[_validator] ( identifier[value] ) keyword[return] identifier[decorated] identifier[ensure] ( identifier[isinstance] ( identifier[_object] , identifier[int] ), literal[string] )
def integer(_object): """ Validates a given input is of type int.. Example usage:: data = {'a' : 21} schema = ('a', integer) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, int), 'not of type int') return _validator(value) return decorated # depends on [control=['if'], data=[]] ensure(isinstance(_object, int), 'not of type int')
def put_name(self, type_, id_, name): """ Write a cached name to disk. :param type_: str, "user" or "tag" :param id_: int, eg. 123456 :returns: None """ cachefile = self.filename(type_, id_) dirname = os.path.dirname(cachefile) try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: raise with open(cachefile, 'w') as f: f.write(name)
def function[put_name, parameter[self, type_, id_, name]]: constant[ Write a cached name to disk. :param type_: str, "user" or "tag" :param id_: int, eg. 123456 :returns: None ] variable[cachefile] assign[=] call[name[self].filename, parameter[name[type_], name[id_]]] variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[cachefile]]] <ast.Try object at 0x7da1b209dd80> with call[name[open], parameter[name[cachefile], constant[w]]] begin[:] call[name[f].write, parameter[name[name]]]
keyword[def] identifier[put_name] ( identifier[self] , identifier[type_] , identifier[id_] , identifier[name] ): literal[string] identifier[cachefile] = identifier[self] . identifier[filename] ( identifier[type_] , identifier[id_] ) identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[cachefile] ) keyword[try] : identifier[os] . identifier[makedirs] ( identifier[dirname] ) keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[errno] != identifier[errno] . identifier[EEXIST] : keyword[raise] keyword[with] identifier[open] ( identifier[cachefile] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[name] )
def put_name(self, type_, id_, name): """ Write a cached name to disk. :param type_: str, "user" or "tag" :param id_: int, eg. 123456 :returns: None """ cachefile = self.filename(type_, id_) dirname = os.path.dirname(cachefile) try: os.makedirs(dirname) # depends on [control=['try'], data=[]] except OSError as e: if e.errno != errno.EEXIST: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] with open(cachefile, 'w') as f: f.write(name) # depends on [control=['with'], data=['f']]
def plot_grid(grid_arcsec, array, units, kpc_per_arcsec, pointsize, zoom_offset_arcsec): """Plot a grid of points over the array of data on the figure. Parameters -----------. grid_arcsec : ndarray or data.array.grids.RegularGrid A grid of (y,x) coordinates in arc-seconds which may be plotted over the array. array : data.array.scaled_array.ScaledArray The 2D array of data which is plotted. units : str The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc'). kpc_per_arcsec : float or None The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc. grid_pointsize : int The size of the points plotted to show the grid. """ if grid_arcsec is not None: if zoom_offset_arcsec is not None: grid_arcsec -= zoom_offset_arcsec grid_units = convert_grid_units(grid_arcsec=grid_arcsec, array=array, units=units, kpc_per_arcsec=kpc_per_arcsec) plt.scatter(y=np.asarray(grid_units[:, 0]), x=np.asarray(grid_units[:, 1]), s=pointsize, c='k')
def function[plot_grid, parameter[grid_arcsec, array, units, kpc_per_arcsec, pointsize, zoom_offset_arcsec]]: constant[Plot a grid of points over the array of data on the figure. Parameters -----------. grid_arcsec : ndarray or data.array.grids.RegularGrid A grid of (y,x) coordinates in arc-seconds which may be plotted over the array. array : data.array.scaled_array.ScaledArray The 2D array of data which is plotted. units : str The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc'). kpc_per_arcsec : float or None The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc. grid_pointsize : int The size of the points plotted to show the grid. ] if compare[name[grid_arcsec] is_not constant[None]] begin[:] if compare[name[zoom_offset_arcsec] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da18f813a00> variable[grid_units] assign[=] call[name[convert_grid_units], parameter[]] call[name[plt].scatter, parameter[]]
keyword[def] identifier[plot_grid] ( identifier[grid_arcsec] , identifier[array] , identifier[units] , identifier[kpc_per_arcsec] , identifier[pointsize] , identifier[zoom_offset_arcsec] ): literal[string] keyword[if] identifier[grid_arcsec] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[zoom_offset_arcsec] keyword[is] keyword[not] keyword[None] : identifier[grid_arcsec] -= identifier[zoom_offset_arcsec] identifier[grid_units] = identifier[convert_grid_units] ( identifier[grid_arcsec] = identifier[grid_arcsec] , identifier[array] = identifier[array] , identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ) identifier[plt] . identifier[scatter] ( identifier[y] = identifier[np] . identifier[asarray] ( identifier[grid_units] [:, literal[int] ]), identifier[x] = identifier[np] . identifier[asarray] ( identifier[grid_units] [:, literal[int] ]), identifier[s] = identifier[pointsize] , identifier[c] = literal[string] )
def plot_grid(grid_arcsec, array, units, kpc_per_arcsec, pointsize, zoom_offset_arcsec): """Plot a grid of points over the array of data on the figure. Parameters -----------. grid_arcsec : ndarray or data.array.grids.RegularGrid A grid of (y,x) coordinates in arc-seconds which may be plotted over the array. array : data.array.scaled_array.ScaledArray The 2D array of data which is plotted. units : str The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc'). kpc_per_arcsec : float or None The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc. grid_pointsize : int The size of the points plotted to show the grid. """ if grid_arcsec is not None: if zoom_offset_arcsec is not None: grid_arcsec -= zoom_offset_arcsec # depends on [control=['if'], data=['zoom_offset_arcsec']] grid_units = convert_grid_units(grid_arcsec=grid_arcsec, array=array, units=units, kpc_per_arcsec=kpc_per_arcsec) plt.scatter(y=np.asarray(grid_units[:, 0]), x=np.asarray(grid_units[:, 1]), s=pointsize, c='k') # depends on [control=['if'], data=['grid_arcsec']]
def convert_data_to_dtype(data, data_type, mot_float_type='float'): """Convert the given input data to the correct numpy type. Args: data (ndarray): The value to convert to the correct numpy type data_type (str): the data type we need to convert the data to mot_float_type (str): the data type of the current ``mot_float_type`` Returns: ndarray: the input data but then converted to the desired numpy data type """ scalar_dtype = ctype_to_dtype(data_type, mot_float_type) if isinstance(data, numbers.Number): data = scalar_dtype(data) if is_vector_ctype(data_type): shape = data.shape dtype = ctype_to_dtype(data_type, mot_float_type) ve = np.zeros(shape[:-1], dtype=dtype) if len(shape) == 1: for vector_ind in range(shape[0]): ve[0][vector_ind] = data[vector_ind] elif len(shape) == 2: for i in range(data.shape[0]): for vector_ind in range(data.shape[1]): ve[i][vector_ind] = data[i, vector_ind] elif len(shape) == 3: for i in range(data.shape[0]): for j in range(data.shape[1]): for vector_ind in range(data.shape[2]): ve[i, j][vector_ind] = data[i, j, vector_ind] return np.require(ve, requirements=['C', 'A', 'O']) return np.require(data, scalar_dtype, ['C', 'A', 'O'])
def function[convert_data_to_dtype, parameter[data, data_type, mot_float_type]]: constant[Convert the given input data to the correct numpy type. Args: data (ndarray): The value to convert to the correct numpy type data_type (str): the data type we need to convert the data to mot_float_type (str): the data type of the current ``mot_float_type`` Returns: ndarray: the input data but then converted to the desired numpy data type ] variable[scalar_dtype] assign[=] call[name[ctype_to_dtype], parameter[name[data_type], name[mot_float_type]]] if call[name[isinstance], parameter[name[data], name[numbers].Number]] begin[:] variable[data] assign[=] call[name[scalar_dtype], parameter[name[data]]] if call[name[is_vector_ctype], parameter[name[data_type]]] begin[:] variable[shape] assign[=] name[data].shape variable[dtype] assign[=] call[name[ctype_to_dtype], parameter[name[data_type], name[mot_float_type]]] variable[ve] assign[=] call[name[np].zeros, parameter[call[name[shape]][<ast.Slice object at 0x7da18f00fe80>]]] if compare[call[name[len], parameter[name[shape]]] equal[==] constant[1]] begin[:] for taget[name[vector_ind]] in starred[call[name[range], parameter[call[name[shape]][constant[0]]]]] begin[:] call[call[name[ve]][constant[0]]][name[vector_ind]] assign[=] call[name[data]][name[vector_ind]] return[call[name[np].require, parameter[name[ve]]]] return[call[name[np].require, parameter[name[data], name[scalar_dtype], list[[<ast.Constant object at 0x7da18bcc8df0>, <ast.Constant object at 0x7da18bcc8700>, <ast.Constant object at 0x7da18bcca7d0>]]]]]
keyword[def] identifier[convert_data_to_dtype] ( identifier[data] , identifier[data_type] , identifier[mot_float_type] = literal[string] ): literal[string] identifier[scalar_dtype] = identifier[ctype_to_dtype] ( identifier[data_type] , identifier[mot_float_type] ) keyword[if] identifier[isinstance] ( identifier[data] , identifier[numbers] . identifier[Number] ): identifier[data] = identifier[scalar_dtype] ( identifier[data] ) keyword[if] identifier[is_vector_ctype] ( identifier[data_type] ): identifier[shape] = identifier[data] . identifier[shape] identifier[dtype] = identifier[ctype_to_dtype] ( identifier[data_type] , identifier[mot_float_type] ) identifier[ve] = identifier[np] . identifier[zeros] ( identifier[shape] [:- literal[int] ], identifier[dtype] = identifier[dtype] ) keyword[if] identifier[len] ( identifier[shape] )== literal[int] : keyword[for] identifier[vector_ind] keyword[in] identifier[range] ( identifier[shape] [ literal[int] ]): identifier[ve] [ literal[int] ][ identifier[vector_ind] ]= identifier[data] [ identifier[vector_ind] ] keyword[elif] identifier[len] ( identifier[shape] )== literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]): keyword[for] identifier[vector_ind] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]): identifier[ve] [ identifier[i] ][ identifier[vector_ind] ]= identifier[data] [ identifier[i] , identifier[vector_ind] ] keyword[elif] identifier[len] ( identifier[shape] )== literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]): keyword[for] identifier[vector_ind] keyword[in] identifier[range] ( identifier[data] . identifier[shape] [ literal[int] ]): identifier[ve] [ identifier[i] , identifier[j] ][ identifier[vector_ind] ]= identifier[data] [ identifier[i] , identifier[j] , identifier[vector_ind] ] keyword[return] identifier[np] . identifier[require] ( identifier[ve] , identifier[requirements] =[ literal[string] , literal[string] , literal[string] ]) keyword[return] identifier[np] . identifier[require] ( identifier[data] , identifier[scalar_dtype] ,[ literal[string] , literal[string] , literal[string] ])
def convert_data_to_dtype(data, data_type, mot_float_type='float'): """Convert the given input data to the correct numpy type. Args: data (ndarray): The value to convert to the correct numpy type data_type (str): the data type we need to convert the data to mot_float_type (str): the data type of the current ``mot_float_type`` Returns: ndarray: the input data but then converted to the desired numpy data type """ scalar_dtype = ctype_to_dtype(data_type, mot_float_type) if isinstance(data, numbers.Number): data = scalar_dtype(data) # depends on [control=['if'], data=[]] if is_vector_ctype(data_type): shape = data.shape dtype = ctype_to_dtype(data_type, mot_float_type) ve = np.zeros(shape[:-1], dtype=dtype) if len(shape) == 1: for vector_ind in range(shape[0]): ve[0][vector_ind] = data[vector_ind] # depends on [control=['for'], data=['vector_ind']] # depends on [control=['if'], data=[]] elif len(shape) == 2: for i in range(data.shape[0]): for vector_ind in range(data.shape[1]): ve[i][vector_ind] = data[i, vector_ind] # depends on [control=['for'], data=['vector_ind']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif len(shape) == 3: for i in range(data.shape[0]): for j in range(data.shape[1]): for vector_ind in range(data.shape[2]): ve[i, j][vector_ind] = data[i, j, vector_ind] # depends on [control=['for'], data=['vector_ind']] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] return np.require(ve, requirements=['C', 'A', 'O']) # depends on [control=['if'], data=[]] return np.require(data, scalar_dtype, ['C', 'A', 'O'])
def bytes2NativeString(x, encoding='utf-8'): """ Convert C{bytes} to a native C{str}. On Python 3 and higher, str and bytes are not equivalent. In this case, decode the bytes, and return a native string. On Python 2 and lower, str and bytes are equivalent. In this case, just just return the native string. @param x: a string of type C{bytes} @param encoding: an optional codec, default: 'utf-8' @return: a string of type C{str} """ if isinstance(x, bytes) and str != bytes: return x.decode(encoding) return x
def function[bytes2NativeString, parameter[x, encoding]]: constant[ Convert C{bytes} to a native C{str}. On Python 3 and higher, str and bytes are not equivalent. In this case, decode the bytes, and return a native string. On Python 2 and lower, str and bytes are equivalent. In this case, just just return the native string. @param x: a string of type C{bytes} @param encoding: an optional codec, default: 'utf-8' @return: a string of type C{str} ] if <ast.BoolOp object at 0x7da18c4cd6c0> begin[:] return[call[name[x].decode, parameter[name[encoding]]]] return[name[x]]
keyword[def] identifier[bytes2NativeString] ( identifier[x] , identifier[encoding] = literal[string] ): literal[string] keyword[if] identifier[isinstance] ( identifier[x] , identifier[bytes] ) keyword[and] identifier[str] != identifier[bytes] : keyword[return] identifier[x] . identifier[decode] ( identifier[encoding] ) keyword[return] identifier[x]
def bytes2NativeString(x, encoding='utf-8'): """ Convert C{bytes} to a native C{str}. On Python 3 and higher, str and bytes are not equivalent. In this case, decode the bytes, and return a native string. On Python 2 and lower, str and bytes are equivalent. In this case, just just return the native string. @param x: a string of type C{bytes} @param encoding: an optional codec, default: 'utf-8' @return: a string of type C{str} """ if isinstance(x, bytes) and str != bytes: return x.decode(encoding) # depends on [control=['if'], data=[]] return x
def Print(x, data, message, **kwargs): # pylint: disable=invalid-name """Call tf.Print. Args: x: a Tensor. data: a list of Tensor message: a string **kwargs: keyword arguments to tf.Print Returns: a Tensor which is identical in value to x """ return PrintOperation(x, data, message, **kwargs).outputs[0]
def function[Print, parameter[x, data, message]]: constant[Call tf.Print. Args: x: a Tensor. data: a list of Tensor message: a string **kwargs: keyword arguments to tf.Print Returns: a Tensor which is identical in value to x ] return[call[call[name[PrintOperation], parameter[name[x], name[data], name[message]]].outputs][constant[0]]]
keyword[def] identifier[Print] ( identifier[x] , identifier[data] , identifier[message] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[PrintOperation] ( identifier[x] , identifier[data] , identifier[message] ,** identifier[kwargs] ). identifier[outputs] [ literal[int] ]
def Print(x, data, message, **kwargs): # pylint: disable=invalid-name 'Call tf.Print.\n\n Args:\n x: a Tensor.\n data: a list of Tensor\n message: a string\n **kwargs: keyword arguments to tf.Print\n Returns:\n a Tensor which is identical in value to x\n ' return PrintOperation(x, data, message, **kwargs).outputs[0]
def get_activities(self, before=None, after=None, limit=None): """ Get activities for authenticated user sorted by newest first. http://strava.github.io/api/v3/activities/ :param before: Result will start with activities whose start date is before specified date. (UTC) :type before: datetime.datetime or str or None :param after: Result will start with activities whose start date is after specified value. (UTC) :type after: datetime.datetime or str or None :param limit: How many maximum activities to return. :type limit: int or None :return: An iterator of :class:`stravalib.model.Activity` objects. :rtype: :class:`BatchedResultsIterator` """ if before: before = self._utc_datetime_to_epoch(before) if after: after = self._utc_datetime_to_epoch(after) params = dict(before=before, after=after) result_fetcher = functools.partial(self.protocol.get, '/athlete/activities', **params) return BatchedResultsIterator(entity=model.Activity, bind_client=self, result_fetcher=result_fetcher, limit=limit)
def function[get_activities, parameter[self, before, after, limit]]: constant[ Get activities for authenticated user sorted by newest first. http://strava.github.io/api/v3/activities/ :param before: Result will start with activities whose start date is before specified date. (UTC) :type before: datetime.datetime or str or None :param after: Result will start with activities whose start date is after specified value. (UTC) :type after: datetime.datetime or str or None :param limit: How many maximum activities to return. :type limit: int or None :return: An iterator of :class:`stravalib.model.Activity` objects. :rtype: :class:`BatchedResultsIterator` ] if name[before] begin[:] variable[before] assign[=] call[name[self]._utc_datetime_to_epoch, parameter[name[before]]] if name[after] begin[:] variable[after] assign[=] call[name[self]._utc_datetime_to_epoch, parameter[name[after]]] variable[params] assign[=] call[name[dict], parameter[]] variable[result_fetcher] assign[=] call[name[functools].partial, parameter[name[self].protocol.get, constant[/athlete/activities]]] return[call[name[BatchedResultsIterator], parameter[]]]
keyword[def] identifier[get_activities] ( identifier[self] , identifier[before] = keyword[None] , identifier[after] = keyword[None] , identifier[limit] = keyword[None] ): literal[string] keyword[if] identifier[before] : identifier[before] = identifier[self] . identifier[_utc_datetime_to_epoch] ( identifier[before] ) keyword[if] identifier[after] : identifier[after] = identifier[self] . identifier[_utc_datetime_to_epoch] ( identifier[after] ) identifier[params] = identifier[dict] ( identifier[before] = identifier[before] , identifier[after] = identifier[after] ) identifier[result_fetcher] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[protocol] . identifier[get] , literal[string] , ** identifier[params] ) keyword[return] identifier[BatchedResultsIterator] ( identifier[entity] = identifier[model] . identifier[Activity] , identifier[bind_client] = identifier[self] , identifier[result_fetcher] = identifier[result_fetcher] , identifier[limit] = identifier[limit] )
def get_activities(self, before=None, after=None, limit=None): """ Get activities for authenticated user sorted by newest first. http://strava.github.io/api/v3/activities/ :param before: Result will start with activities whose start date is before specified date. (UTC) :type before: datetime.datetime or str or None :param after: Result will start with activities whose start date is after specified value. (UTC) :type after: datetime.datetime or str or None :param limit: How many maximum activities to return. :type limit: int or None :return: An iterator of :class:`stravalib.model.Activity` objects. :rtype: :class:`BatchedResultsIterator` """ if before: before = self._utc_datetime_to_epoch(before) # depends on [control=['if'], data=[]] if after: after = self._utc_datetime_to_epoch(after) # depends on [control=['if'], data=[]] params = dict(before=before, after=after) result_fetcher = functools.partial(self.protocol.get, '/athlete/activities', **params) return BatchedResultsIterator(entity=model.Activity, bind_client=self, result_fetcher=result_fetcher, limit=limit)
def get_nginx_configuration_spec(port_spec_dict, docker_bridge_ip): """This function will take in a port spec as specified by the port_spec compiler and will output an nginx web proxy config string. This string can then be written to a file and used running nginx """ nginx_http_config, nginx_stream_config = "", "" for port_spec in port_spec_dict['nginx']: if port_spec['type'] == 'http': nginx_http_config += _nginx_http_spec(port_spec, docker_bridge_ip) elif port_spec['type'] == 'stream': nginx_stream_config += _nginx_stream_spec(port_spec, docker_bridge_ip) return {'http': nginx_http_config, 'stream': nginx_stream_config}
def function[get_nginx_configuration_spec, parameter[port_spec_dict, docker_bridge_ip]]: constant[This function will take in a port spec as specified by the port_spec compiler and will output an nginx web proxy config string. This string can then be written to a file and used running nginx ] <ast.Tuple object at 0x7da207f981c0> assign[=] tuple[[<ast.Constant object at 0x7da207f9ad40>, <ast.Constant object at 0x7da207f9b490>]] for taget[name[port_spec]] in starred[call[name[port_spec_dict]][constant[nginx]]] begin[:] if compare[call[name[port_spec]][constant[type]] equal[==] constant[http]] begin[:] <ast.AugAssign object at 0x7da207f99d20> return[dictionary[[<ast.Constant object at 0x7da20e961600>, <ast.Constant object at 0x7da20e963df0>], [<ast.Name object at 0x7da20e962b90>, <ast.Name object at 0x7da20e960610>]]]
keyword[def] identifier[get_nginx_configuration_spec] ( identifier[port_spec_dict] , identifier[docker_bridge_ip] ): literal[string] identifier[nginx_http_config] , identifier[nginx_stream_config] = literal[string] , literal[string] keyword[for] identifier[port_spec] keyword[in] identifier[port_spec_dict] [ literal[string] ]: keyword[if] identifier[port_spec] [ literal[string] ]== literal[string] : identifier[nginx_http_config] += identifier[_nginx_http_spec] ( identifier[port_spec] , identifier[docker_bridge_ip] ) keyword[elif] identifier[port_spec] [ literal[string] ]== literal[string] : identifier[nginx_stream_config] += identifier[_nginx_stream_spec] ( identifier[port_spec] , identifier[docker_bridge_ip] ) keyword[return] { literal[string] : identifier[nginx_http_config] , literal[string] : identifier[nginx_stream_config] }
def get_nginx_configuration_spec(port_spec_dict, docker_bridge_ip): """This function will take in a port spec as specified by the port_spec compiler and will output an nginx web proxy config string. This string can then be written to a file and used running nginx """ (nginx_http_config, nginx_stream_config) = ('', '') for port_spec in port_spec_dict['nginx']: if port_spec['type'] == 'http': nginx_http_config += _nginx_http_spec(port_spec, docker_bridge_ip) # depends on [control=['if'], data=[]] elif port_spec['type'] == 'stream': nginx_stream_config += _nginx_stream_spec(port_spec, docker_bridge_ip) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['port_spec']] return {'http': nginx_http_config, 'stream': nginx_stream_config}
async def get_connections(self, data=True): """Return connections for all the agents in the slave environments. This is a managing function for :meth:`~creamas.mp.MultiEnvironment.get_connections`. """ return await self.menv.get_connections(data=data, as_coro=True)
<ast.AsyncFunctionDef object at 0x7da2044c1150>
keyword[async] keyword[def] identifier[get_connections] ( identifier[self] , identifier[data] = keyword[True] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[menv] . identifier[get_connections] ( identifier[data] = identifier[data] , identifier[as_coro] = keyword[True] )
async def get_connections(self, data=True): """Return connections for all the agents in the slave environments. This is a managing function for :meth:`~creamas.mp.MultiEnvironment.get_connections`. """ return await self.menv.get_connections(data=data, as_coro=True)
def search_id(id, medium, credentials): """Grabs the [medium] with the given id from MyAnimeList as a [medium] object. :param id The id of the [medium]. :param medium Anime or manga (tokens.Medium.ANIME or tokens.Medium.MANGA). :return The [medium] object with id requested, or None if no such [medium] exists. :raise ValueError For bad arguments. """ helpers.check_creds(credentials, header) if id <= 0 or not float(id).is_integer(): raise ValueError(constants.INVALID_ID) scrape_query = helpers.get_scrape_url(id, medium) if scrape_query is None: raise ValueError(constants.INVALID_MEDIUM) search_resp = requests.get(scrape_query, headers=header) scrape_soup = BeautifulSoup(search_resp.text, 'html.parser') #inspect element on an anime page, you'll see where this scrape is #coming from. query = scrape_soup.find('span', {'itemprop':'name'}) if query is None: return helpers.reschedule(search_id, constants.DEFAULT_WAIT_SECS, id, medium) matches = search(query.text, medium, credentials) index = [match.id for match in matches].index(str(id)) if index != -1: return matches[index] else: return None
def function[search_id, parameter[id, medium, credentials]]: constant[Grabs the [medium] with the given id from MyAnimeList as a [medium] object. :param id The id of the [medium]. :param medium Anime or manga (tokens.Medium.ANIME or tokens.Medium.MANGA). :return The [medium] object with id requested, or None if no such [medium] exists. :raise ValueError For bad arguments. ] call[name[helpers].check_creds, parameter[name[credentials], name[header]]] if <ast.BoolOp object at 0x7da1b2380460> begin[:] <ast.Raise object at 0x7da1b2381960> variable[scrape_query] assign[=] call[name[helpers].get_scrape_url, parameter[name[id], name[medium]]] if compare[name[scrape_query] is constant[None]] begin[:] <ast.Raise object at 0x7da1b2381de0> variable[search_resp] assign[=] call[name[requests].get, parameter[name[scrape_query]]] variable[scrape_soup] assign[=] call[name[BeautifulSoup], parameter[name[search_resp].text, constant[html.parser]]] variable[query] assign[=] call[name[scrape_soup].find, parameter[constant[span], dictionary[[<ast.Constant object at 0x7da1b2555090>], [<ast.Constant object at 0x7da1b25546d0>]]]] if compare[name[query] is constant[None]] begin[:] return[call[name[helpers].reschedule, parameter[name[search_id], name[constants].DEFAULT_WAIT_SECS, name[id], name[medium]]]] variable[matches] assign[=] call[name[search], parameter[name[query].text, name[medium], name[credentials]]] variable[index] assign[=] call[<ast.ListComp object at 0x7da1b25550c0>.index, parameter[call[name[str], parameter[name[id]]]]] if compare[name[index] not_equal[!=] <ast.UnaryOp object at 0x7da1b2555c90>] begin[:] return[call[name[matches]][name[index]]]
keyword[def] identifier[search_id] ( identifier[id] , identifier[medium] , identifier[credentials] ): literal[string] identifier[helpers] . identifier[check_creds] ( identifier[credentials] , identifier[header] ) keyword[if] identifier[id] <= literal[int] keyword[or] keyword[not] identifier[float] ( identifier[id] ). identifier[is_integer] (): keyword[raise] identifier[ValueError] ( identifier[constants] . identifier[INVALID_ID] ) identifier[scrape_query] = identifier[helpers] . identifier[get_scrape_url] ( identifier[id] , identifier[medium] ) keyword[if] identifier[scrape_query] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( identifier[constants] . identifier[INVALID_MEDIUM] ) identifier[search_resp] = identifier[requests] . identifier[get] ( identifier[scrape_query] , identifier[headers] = identifier[header] ) identifier[scrape_soup] = identifier[BeautifulSoup] ( identifier[search_resp] . identifier[text] , literal[string] ) identifier[query] = identifier[scrape_soup] . identifier[find] ( literal[string] ,{ literal[string] : literal[string] }) keyword[if] identifier[query] keyword[is] keyword[None] : keyword[return] identifier[helpers] . identifier[reschedule] ( identifier[search_id] , identifier[constants] . identifier[DEFAULT_WAIT_SECS] , identifier[id] , identifier[medium] ) identifier[matches] = identifier[search] ( identifier[query] . identifier[text] , identifier[medium] , identifier[credentials] ) identifier[index] =[ identifier[match] . identifier[id] keyword[for] identifier[match] keyword[in] identifier[matches] ]. identifier[index] ( identifier[str] ( identifier[id] )) keyword[if] identifier[index] !=- literal[int] : keyword[return] identifier[matches] [ identifier[index] ] keyword[else] : keyword[return] keyword[None]
def search_id(id, medium, credentials): """Grabs the [medium] with the given id from MyAnimeList as a [medium] object. :param id The id of the [medium]. :param medium Anime or manga (tokens.Medium.ANIME or tokens.Medium.MANGA). :return The [medium] object with id requested, or None if no such [medium] exists. :raise ValueError For bad arguments. """ helpers.check_creds(credentials, header) if id <= 0 or not float(id).is_integer(): raise ValueError(constants.INVALID_ID) # depends on [control=['if'], data=[]] scrape_query = helpers.get_scrape_url(id, medium) if scrape_query is None: raise ValueError(constants.INVALID_MEDIUM) # depends on [control=['if'], data=[]] search_resp = requests.get(scrape_query, headers=header) scrape_soup = BeautifulSoup(search_resp.text, 'html.parser') #inspect element on an anime page, you'll see where this scrape is #coming from. query = scrape_soup.find('span', {'itemprop': 'name'}) if query is None: return helpers.reschedule(search_id, constants.DEFAULT_WAIT_SECS, id, medium) # depends on [control=['if'], data=[]] matches = search(query.text, medium, credentials) index = [match.id for match in matches].index(str(id)) if index != -1: return matches[index] # depends on [control=['if'], data=['index']] else: return None
def cli(env, volume_id, capacity, tier, upgrade): """Order snapshot space for a file storage volume.""" file_manager = SoftLayer.FileStorageManager(env.client) if tier is not None: tier = float(tier) try: order = file_manager.order_snapshot_space( volume_id, capacity=capacity, tier=tier, upgrade=upgrade ) except ValueError as ex: raise exceptions.ArgumentError(str(ex)) if 'placedOrder' in order.keys(): click.echo("Order #{0} placed successfully!".format( order['placedOrder']['id'])) for item in order['placedOrder']['items']: click.echo(" > %s" % item['description']) if 'status' in order['placedOrder'].keys(): click.echo(" > Order status: %s" % order['placedOrder']['status']) else: click.echo("Order could not be placed! Please verify your options " + "and try again.")
def function[cli, parameter[env, volume_id, capacity, tier, upgrade]]: constant[Order snapshot space for a file storage volume.] variable[file_manager] assign[=] call[name[SoftLayer].FileStorageManager, parameter[name[env].client]] if compare[name[tier] is_not constant[None]] begin[:] variable[tier] assign[=] call[name[float], parameter[name[tier]]] <ast.Try object at 0x7da20c9912d0> if compare[constant[placedOrder] in call[name[order].keys, parameter[]]] begin[:] call[name[click].echo, parameter[call[constant[Order #{0} placed successfully!].format, parameter[call[call[name[order]][constant[placedOrder]]][constant[id]]]]]] for taget[name[item]] in starred[call[call[name[order]][constant[placedOrder]]][constant[items]]] begin[:] call[name[click].echo, parameter[binary_operation[constant[ > %s] <ast.Mod object at 0x7da2590d6920> call[name[item]][constant[description]]]]] if compare[constant[status] in call[call[name[order]][constant[placedOrder]].keys, parameter[]]] begin[:] call[name[click].echo, parameter[binary_operation[constant[ > Order status: %s] <ast.Mod object at 0x7da2590d6920> call[call[name[order]][constant[placedOrder]]][constant[status]]]]]
keyword[def] identifier[cli] ( identifier[env] , identifier[volume_id] , identifier[capacity] , identifier[tier] , identifier[upgrade] ): literal[string] identifier[file_manager] = identifier[SoftLayer] . identifier[FileStorageManager] ( identifier[env] . identifier[client] ) keyword[if] identifier[tier] keyword[is] keyword[not] keyword[None] : identifier[tier] = identifier[float] ( identifier[tier] ) keyword[try] : identifier[order] = identifier[file_manager] . identifier[order_snapshot_space] ( identifier[volume_id] , identifier[capacity] = identifier[capacity] , identifier[tier] = identifier[tier] , identifier[upgrade] = identifier[upgrade] ) keyword[except] identifier[ValueError] keyword[as] identifier[ex] : keyword[raise] identifier[exceptions] . identifier[ArgumentError] ( identifier[str] ( identifier[ex] )) keyword[if] literal[string] keyword[in] identifier[order] . identifier[keys] (): identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[order] [ literal[string] ][ literal[string] ])) keyword[for] identifier[item] keyword[in] identifier[order] [ literal[string] ][ literal[string] ]: identifier[click] . identifier[echo] ( literal[string] % identifier[item] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[order] [ literal[string] ]. identifier[keys] (): identifier[click] . identifier[echo] ( literal[string] % identifier[order] [ literal[string] ][ literal[string] ]) keyword[else] : identifier[click] . identifier[echo] ( literal[string] + literal[string] )
def cli(env, volume_id, capacity, tier, upgrade): """Order snapshot space for a file storage volume.""" file_manager = SoftLayer.FileStorageManager(env.client) if tier is not None: tier = float(tier) # depends on [control=['if'], data=['tier']] try: order = file_manager.order_snapshot_space(volume_id, capacity=capacity, tier=tier, upgrade=upgrade) # depends on [control=['try'], data=[]] except ValueError as ex: raise exceptions.ArgumentError(str(ex)) # depends on [control=['except'], data=['ex']] if 'placedOrder' in order.keys(): click.echo('Order #{0} placed successfully!'.format(order['placedOrder']['id'])) for item in order['placedOrder']['items']: click.echo(' > %s' % item['description']) # depends on [control=['for'], data=['item']] if 'status' in order['placedOrder'].keys(): click.echo(' > Order status: %s' % order['placedOrder']['status']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: click.echo('Order could not be placed! Please verify your options ' + 'and try again.')
def run(self, positionals=None): '''run the entire helper procedure, including: - start: initialize the helper, collection preferences - record: record any relevant features for the environment / session - interact: interact with the user for additional informatoin - submit: submit the completed request Each of the above functions for a helper can determine global collection preferences from the system helpme.cfg in the module root. After performing global actions, each function then calls a class specific function of the same name (e.g., start calls _start) that is implemented by the helper class to do custom operations for the helper. ''' # Step 0: Each run session is given a fun name self.run_id = RobotNamer().generate() # Step 1: get config steps steps = self.config._sections[self.name] # Step 2: Start the helper (announce and run start, which is init code) self.start(positionals) # Step 3: Iterate through flow, check each step for known record/prompt, # and collect outputs appropriately for step, content in steps.items(): self.collect(step, content) # Step 4: When data collected, pass data structures to submit self.submit()
def function[run, parameter[self, positionals]]: constant[run the entire helper procedure, including: - start: initialize the helper, collection preferences - record: record any relevant features for the environment / session - interact: interact with the user for additional informatoin - submit: submit the completed request Each of the above functions for a helper can determine global collection preferences from the system helpme.cfg in the module root. After performing global actions, each function then calls a class specific function of the same name (e.g., start calls _start) that is implemented by the helper class to do custom operations for the helper. ] name[self].run_id assign[=] call[call[name[RobotNamer], parameter[]].generate, parameter[]] variable[steps] assign[=] call[name[self].config._sections][name[self].name] call[name[self].start, parameter[name[positionals]]] for taget[tuple[[<ast.Name object at 0x7da20e956770>, <ast.Name object at 0x7da20e957e50>]]] in starred[call[name[steps].items, parameter[]]] begin[:] call[name[self].collect, parameter[name[step], name[content]]] call[name[self].submit, parameter[]]
keyword[def] identifier[run] ( identifier[self] , identifier[positionals] = keyword[None] ): literal[string] identifier[self] . identifier[run_id] = identifier[RobotNamer] (). identifier[generate] () identifier[steps] = identifier[self] . identifier[config] . identifier[_sections] [ identifier[self] . identifier[name] ] identifier[self] . identifier[start] ( identifier[positionals] ) keyword[for] identifier[step] , identifier[content] keyword[in] identifier[steps] . identifier[items] (): identifier[self] . identifier[collect] ( identifier[step] , identifier[content] ) identifier[self] . identifier[submit] ()
def run(self, positionals=None): """run the entire helper procedure, including: - start: initialize the helper, collection preferences - record: record any relevant features for the environment / session - interact: interact with the user for additional informatoin - submit: submit the completed request Each of the above functions for a helper can determine global collection preferences from the system helpme.cfg in the module root. After performing global actions, each function then calls a class specific function of the same name (e.g., start calls _start) that is implemented by the helper class to do custom operations for the helper. """ # Step 0: Each run session is given a fun name self.run_id = RobotNamer().generate() # Step 1: get config steps steps = self.config._sections[self.name] # Step 2: Start the helper (announce and run start, which is init code) self.start(positionals) # Step 3: Iterate through flow, check each step for known record/prompt, # and collect outputs appropriately for (step, content) in steps.items(): self.collect(step, content) # depends on [control=['for'], data=[]] # Step 4: When data collected, pass data structures to submit self.submit()
def get_measurements(region, core_info, data, extra_offset=0): """ Get the complete measurement info from likwid's region info. Args: region: The region we took a measurement in. core_info: The core information. data: The raw data. extra_offset (int): default = 0 Returns (list((region, metric, core, value))): A list of measurement tuples, a tuple contains the information about the region, the metric, the core and the actual value. """ measurements = [] clean_core_info = [x for x in core_info if x] cores = len(clean_core_info) for k in data: if k not in ["1", "Region Info", "Event", "Metric", "CPU clock"]: slot = data[k] for i in range(cores): core = core_info[i] idx = extra_offset + i if core and slot[idx]: measurements.append((region, k, core, slot[idx])) return measurements
def function[get_measurements, parameter[region, core_info, data, extra_offset]]: constant[ Get the complete measurement info from likwid's region info. Args: region: The region we took a measurement in. core_info: The core information. data: The raw data. extra_offset (int): default = 0 Returns (list((region, metric, core, value))): A list of measurement tuples, a tuple contains the information about the region, the metric, the core and the actual value. ] variable[measurements] assign[=] list[[]] variable[clean_core_info] assign[=] <ast.ListComp object at 0x7da1b1848f70> variable[cores] assign[=] call[name[len], parameter[name[clean_core_info]]] for taget[name[k]] in starred[name[data]] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b1849f00>, <ast.Constant object at 0x7da1b18490f0>, <ast.Constant object at 0x7da1b1849480>, <ast.Constant object at 0x7da1b184a1a0>, <ast.Constant object at 0x7da1b18483a0>]]] begin[:] variable[slot] assign[=] call[name[data]][name[k]] for taget[name[i]] in starred[call[name[range], parameter[name[cores]]]] begin[:] variable[core] assign[=] call[name[core_info]][name[i]] variable[idx] assign[=] binary_operation[name[extra_offset] + name[i]] if <ast.BoolOp object at 0x7da1b184a440> begin[:] call[name[measurements].append, parameter[tuple[[<ast.Name object at 0x7da1b184be80>, <ast.Name object at 0x7da1b1848f10>, <ast.Name object at 0x7da1b184b850>, <ast.Subscript object at 0x7da1b1848550>]]]] return[name[measurements]]
keyword[def] identifier[get_measurements] ( identifier[region] , identifier[core_info] , identifier[data] , identifier[extra_offset] = literal[int] ): literal[string] identifier[measurements] =[] identifier[clean_core_info] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[core_info] keyword[if] identifier[x] ] identifier[cores] = identifier[len] ( identifier[clean_core_info] ) keyword[for] identifier[k] keyword[in] identifier[data] : keyword[if] identifier[k] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[slot] = identifier[data] [ identifier[k] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[cores] ): identifier[core] = identifier[core_info] [ identifier[i] ] identifier[idx] = identifier[extra_offset] + identifier[i] keyword[if] identifier[core] keyword[and] identifier[slot] [ identifier[idx] ]: identifier[measurements] . identifier[append] (( identifier[region] , identifier[k] , identifier[core] , identifier[slot] [ identifier[idx] ])) keyword[return] identifier[measurements]
def get_measurements(region, core_info, data, extra_offset=0): """ Get the complete measurement info from likwid's region info. Args: region: The region we took a measurement in. core_info: The core information. data: The raw data. extra_offset (int): default = 0 Returns (list((region, metric, core, value))): A list of measurement tuples, a tuple contains the information about the region, the metric, the core and the actual value. """ measurements = [] clean_core_info = [x for x in core_info if x] cores = len(clean_core_info) for k in data: if k not in ['1', 'Region Info', 'Event', 'Metric', 'CPU clock']: slot = data[k] for i in range(cores): core = core_info[i] idx = extra_offset + i if core and slot[idx]: measurements.append((region, k, core, slot[idx])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] return measurements
def lonlat2xyz(lon, lat): """ Convert lon / lat (radians) for the spherical triangulation into x,y,z on the unit sphere """ lons = np.array(lon) lats = np.array(lat) xs = np.cos(lats) * np.cos(lons) ys = np.cos(lats) * np.sin(lons) zs = np.sin(lats) return xs, ys, zs
def function[lonlat2xyz, parameter[lon, lat]]: constant[ Convert lon / lat (radians) for the spherical triangulation into x,y,z on the unit sphere ] variable[lons] assign[=] call[name[np].array, parameter[name[lon]]] variable[lats] assign[=] call[name[np].array, parameter[name[lat]]] variable[xs] assign[=] binary_operation[call[name[np].cos, parameter[name[lats]]] * call[name[np].cos, parameter[name[lons]]]] variable[ys] assign[=] binary_operation[call[name[np].cos, parameter[name[lats]]] * call[name[np].sin, parameter[name[lons]]]] variable[zs] assign[=] call[name[np].sin, parameter[name[lats]]] return[tuple[[<ast.Name object at 0x7da207f036d0>, <ast.Name object at 0x7da207f010f0>, <ast.Name object at 0x7da207f00400>]]]
keyword[def] identifier[lonlat2xyz] ( identifier[lon] , identifier[lat] ): literal[string] identifier[lons] = identifier[np] . identifier[array] ( identifier[lon] ) identifier[lats] = identifier[np] . identifier[array] ( identifier[lat] ) identifier[xs] = identifier[np] . identifier[cos] ( identifier[lats] )* identifier[np] . identifier[cos] ( identifier[lons] ) identifier[ys] = identifier[np] . identifier[cos] ( identifier[lats] )* identifier[np] . identifier[sin] ( identifier[lons] ) identifier[zs] = identifier[np] . identifier[sin] ( identifier[lats] ) keyword[return] identifier[xs] , identifier[ys] , identifier[zs]
def lonlat2xyz(lon, lat): """ Convert lon / lat (radians) for the spherical triangulation into x,y,z on the unit sphere """ lons = np.array(lon) lats = np.array(lat) xs = np.cos(lats) * np.cos(lons) ys = np.cos(lats) * np.sin(lons) zs = np.sin(lats) return (xs, ys, zs)
def TableArgsMeta(table_args): '''Declarative metaclass automatically adding (merging) __table_args__ to mapped classes. Example: Meta = TableArgsMeta({ 'mysql_engine': 'InnoDB', 'mysql_default charset': 'utf8', } Base = declarative_base(name='Base', metaclass=Meta) class MyClass(Base): … is equivalent to Base = declarative_base(name='Base') class MyClass(Base): __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_default charset': 'utf8', } … ''' class _TableArgsMeta(declarative.DeclarativeMeta): def __init__(cls, name, bases, dict_): if ( # Do not extend base class '_decl_class_registry' not in cls.__dict__ and # Missing __tablename_ or equal to None means single table # inheritance — no table for it (columns go to table of # base class) cls.__dict__.get('__tablename__') and # Abstract class — no table for it (columns go to table[s] # of subclass[es] not cls.__dict__.get('__abstract__', False)): ta = getattr(cls, '__table_args__', {}) if isinstance(ta, dict): ta = dict(table_args, **ta) cls.__table_args__ = ta else: assert isinstance(ta, tuple) if ta and isinstance(ta[-1], dict): tad = dict(table_args, **ta[-1]) ta = ta[:-1] else: tad = dict(table_args) cls.__table_args__ = ta + (tad,) super(_TableArgsMeta, cls).__init__(name, bases, dict_) return _TableArgsMeta
def function[TableArgsMeta, parameter[table_args]]: constant[Declarative metaclass automatically adding (merging) __table_args__ to mapped classes. Example: Meta = TableArgsMeta({ 'mysql_engine': 'InnoDB', 'mysql_default charset': 'utf8', } Base = declarative_base(name='Base', metaclass=Meta) class MyClass(Base): … is equivalent to Base = declarative_base(name='Base') class MyClass(Base): __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_default charset': 'utf8', } … ] class class[_TableArgsMeta, parameter[]] begin[:] def function[__init__, parameter[cls, name, bases, dict_]]: if <ast.BoolOp object at 0x7da18dc04910> begin[:] variable[ta] assign[=] call[name[getattr], parameter[name[cls], constant[__table_args__], dictionary[[], []]]] if call[name[isinstance], parameter[name[ta], name[dict]]] begin[:] variable[ta] assign[=] call[name[dict], parameter[name[table_args]]] name[cls].__table_args__ assign[=] name[ta] call[call[name[super], parameter[name[_TableArgsMeta], name[cls]]].__init__, parameter[name[name], name[bases], name[dict_]]] return[name[_TableArgsMeta]]
keyword[def] identifier[TableArgsMeta] ( identifier[table_args] ): literal[string] keyword[class] identifier[_TableArgsMeta] ( identifier[declarative] . identifier[DeclarativeMeta] ): keyword[def] identifier[__init__] ( identifier[cls] , identifier[name] , identifier[bases] , identifier[dict_] ): keyword[if] ( literal[string] keyword[not] keyword[in] identifier[cls] . identifier[__dict__] keyword[and] identifier[cls] . identifier[__dict__] . identifier[get] ( literal[string] ) keyword[and] keyword[not] identifier[cls] . identifier[__dict__] . identifier[get] ( literal[string] , keyword[False] )): identifier[ta] = identifier[getattr] ( identifier[cls] , literal[string] ,{}) keyword[if] identifier[isinstance] ( identifier[ta] , identifier[dict] ): identifier[ta] = identifier[dict] ( identifier[table_args] ,** identifier[ta] ) identifier[cls] . identifier[__table_args__] = identifier[ta] keyword[else] : keyword[assert] identifier[isinstance] ( identifier[ta] , identifier[tuple] ) keyword[if] identifier[ta] keyword[and] identifier[isinstance] ( identifier[ta] [- literal[int] ], identifier[dict] ): identifier[tad] = identifier[dict] ( identifier[table_args] ,** identifier[ta] [- literal[int] ]) identifier[ta] = identifier[ta] [:- literal[int] ] keyword[else] : identifier[tad] = identifier[dict] ( identifier[table_args] ) identifier[cls] . identifier[__table_args__] = identifier[ta] +( identifier[tad] ,) identifier[super] ( identifier[_TableArgsMeta] , identifier[cls] ). identifier[__init__] ( identifier[name] , identifier[bases] , identifier[dict_] ) keyword[return] identifier[_TableArgsMeta]
def TableArgsMeta(table_args): """Declarative metaclass automatically adding (merging) __table_args__ to mapped classes. Example: Meta = TableArgsMeta({ 'mysql_engine': 'InnoDB', 'mysql_default charset': 'utf8', } Base = declarative_base(name='Base', metaclass=Meta) class MyClass(Base): … is equivalent to Base = declarative_base(name='Base') class MyClass(Base): __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_default charset': 'utf8', } … """ class _TableArgsMeta(declarative.DeclarativeMeta): def __init__(cls, name, bases, dict_): if '_decl_class_registry' not in cls.__dict__ and cls.__dict__.get('__tablename__') and (not cls.__dict__.get('__abstract__', False)): # Do not extend base class # Missing __tablename_ or equal to None means single table # inheritance — no table for it (columns go to table of # base class) # Abstract class — no table for it (columns go to table[s] # of subclass[es] ta = getattr(cls, '__table_args__', {}) if isinstance(ta, dict): ta = dict(table_args, **ta) cls.__table_args__ = ta # depends on [control=['if'], data=[]] else: assert isinstance(ta, tuple) if ta and isinstance(ta[-1], dict): tad = dict(table_args, **ta[-1]) ta = ta[:-1] # depends on [control=['if'], data=[]] else: tad = dict(table_args) cls.__table_args__ = ta + (tad,) # depends on [control=['if'], data=[]] super(_TableArgsMeta, cls).__init__(name, bases, dict_) return _TableArgsMeta
def create(message: str, pubkey: Optional[str] = None, signing_keys: Optional[List[SigningKey]] = None, message_comment: Optional[str] = None, signatures_comment: Optional[str] = None) -> str: """ Encrypt a message in ascii armor format, optionally signing it :param message: Utf-8 message :param pubkey: Public key of recipient for encryption :param signing_keys: Optional list of SigningKey instances :param message_comment: Optional message comment field :param signatures_comment: Optional signatures comment field :return: """ # if no public key and no signing key... if not pubkey and not signing_keys: # We can not create an Ascii Armor Message raise MISSING_PUBLIC_KEY_AND_SIGNING_KEY_EXCEPTION # keep only one newline at the end of the message message = message.rstrip("\n\r") + "\n" # create block with headers ascii_armor_block = """{begin_message_header} """.format(begin_message_header=BEGIN_MESSAGE_HEADER) # if encrypted message... if pubkey: # add encrypted message fields ascii_armor_block += """{version_field} """.format(version_field=AsciiArmor._get_version_field()) # add message comment if specified if message_comment: ascii_armor_block += """{comment_field} """.format(comment_field=AsciiArmor._get_comment_field(message_comment)) # blank line separator ascii_armor_block += '\n' if pubkey: # add encrypted message pubkey_instance = PublicKey(pubkey) base64_encrypted_message = base64.b64encode(pubkey_instance.encrypt_seal(message)) # type: bytes ascii_armor_block += """{base64_encrypted_message} """.format(base64_encrypted_message=base64_encrypted_message.decode('utf-8')) else: # remove trailing spaces message = AsciiArmor._remove_trailing_spaces(message) # add dash escaped message to ascii armor content ascii_armor_block += AsciiArmor._dash_escape_text(message) # if no signature... if signing_keys is None: # add message tail ascii_armor_block += END_MESSAGE_HEADER else: # add signature blocks and close block on last signature count = 1 for signing_key in signing_keys: ascii_armor_block += AsciiArmor._get_signature_block(message, signing_key, count == len(signing_keys), signatures_comment) count += 1 return ascii_armor_block
def function[create, parameter[message, pubkey, signing_keys, message_comment, signatures_comment]]: constant[ Encrypt a message in ascii armor format, optionally signing it :param message: Utf-8 message :param pubkey: Public key of recipient for encryption :param signing_keys: Optional list of SigningKey instances :param message_comment: Optional message comment field :param signatures_comment: Optional signatures comment field :return: ] if <ast.BoolOp object at 0x7da18dc98f70> begin[:] <ast.Raise object at 0x7da18dc99600> variable[message] assign[=] binary_operation[call[name[message].rstrip, parameter[constant[ ]]] + constant[ ]] variable[ascii_armor_block] assign[=] call[constant[{begin_message_header} ].format, parameter[]] if name[pubkey] begin[:] <ast.AugAssign object at 0x7da18dc987f0> if name[message_comment] begin[:] <ast.AugAssign object at 0x7da18dc987c0> <ast.AugAssign object at 0x7da18dc99960> if name[pubkey] begin[:] variable[pubkey_instance] assign[=] call[name[PublicKey], parameter[name[pubkey]]] variable[base64_encrypted_message] assign[=] call[name[base64].b64encode, parameter[call[name[pubkey_instance].encrypt_seal, parameter[name[message]]]]] <ast.AugAssign object at 0x7da18dc99b70> if compare[name[signing_keys] is constant[None]] begin[:] <ast.AugAssign object at 0x7da18dc99810> return[name[ascii_armor_block]]
keyword[def] identifier[create] ( identifier[message] : identifier[str] , identifier[pubkey] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[signing_keys] : identifier[Optional] [ identifier[List] [ identifier[SigningKey] ]]= keyword[None] , identifier[message_comment] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[signatures_comment] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[str] : literal[string] keyword[if] keyword[not] identifier[pubkey] keyword[and] keyword[not] identifier[signing_keys] : keyword[raise] identifier[MISSING_PUBLIC_KEY_AND_SIGNING_KEY_EXCEPTION] identifier[message] = identifier[message] . identifier[rstrip] ( literal[string] )+ literal[string] identifier[ascii_armor_block] = literal[string] . identifier[format] ( identifier[begin_message_header] = identifier[BEGIN_MESSAGE_HEADER] ) keyword[if] identifier[pubkey] : identifier[ascii_armor_block] += literal[string] . identifier[format] ( identifier[version_field] = identifier[AsciiArmor] . identifier[_get_version_field] ()) keyword[if] identifier[message_comment] : identifier[ascii_armor_block] += literal[string] . identifier[format] ( identifier[comment_field] = identifier[AsciiArmor] . identifier[_get_comment_field] ( identifier[message_comment] )) identifier[ascii_armor_block] += literal[string] keyword[if] identifier[pubkey] : identifier[pubkey_instance] = identifier[PublicKey] ( identifier[pubkey] ) identifier[base64_encrypted_message] = identifier[base64] . identifier[b64encode] ( identifier[pubkey_instance] . identifier[encrypt_seal] ( identifier[message] )) identifier[ascii_armor_block] += literal[string] . identifier[format] ( identifier[base64_encrypted_message] = identifier[base64_encrypted_message] . identifier[decode] ( literal[string] )) keyword[else] : identifier[message] = identifier[AsciiArmor] . identifier[_remove_trailing_spaces] ( identifier[message] ) identifier[ascii_armor_block] += identifier[AsciiArmor] . identifier[_dash_escape_text] ( identifier[message] ) keyword[if] identifier[signing_keys] keyword[is] keyword[None] : identifier[ascii_armor_block] += identifier[END_MESSAGE_HEADER] keyword[else] : identifier[count] = literal[int] keyword[for] identifier[signing_key] keyword[in] identifier[signing_keys] : identifier[ascii_armor_block] += identifier[AsciiArmor] . identifier[_get_signature_block] ( identifier[message] , identifier[signing_key] , identifier[count] == identifier[len] ( identifier[signing_keys] ), identifier[signatures_comment] ) identifier[count] += literal[int] keyword[return] identifier[ascii_armor_block]
def create(message: str, pubkey: Optional[str]=None, signing_keys: Optional[List[SigningKey]]=None, message_comment: Optional[str]=None, signatures_comment: Optional[str]=None) -> str: """ Encrypt a message in ascii armor format, optionally signing it :param message: Utf-8 message :param pubkey: Public key of recipient for encryption :param signing_keys: Optional list of SigningKey instances :param message_comment: Optional message comment field :param signatures_comment: Optional signatures comment field :return: """ # if no public key and no signing key... if not pubkey and (not signing_keys): # We can not create an Ascii Armor Message raise MISSING_PUBLIC_KEY_AND_SIGNING_KEY_EXCEPTION # depends on [control=['if'], data=[]] # keep only one newline at the end of the message message = message.rstrip('\n\r') + '\n' # create block with headers ascii_armor_block = '{begin_message_header}\n'.format(begin_message_header=BEGIN_MESSAGE_HEADER) # if encrypted message... if pubkey: # add encrypted message fields ascii_armor_block += '{version_field}\n'.format(version_field=AsciiArmor._get_version_field()) # depends on [control=['if'], data=[]] # add message comment if specified if message_comment: ascii_armor_block += '{comment_field}\n'.format(comment_field=AsciiArmor._get_comment_field(message_comment)) # depends on [control=['if'], data=[]] # blank line separator ascii_armor_block += '\n' if pubkey: # add encrypted message pubkey_instance = PublicKey(pubkey) base64_encrypted_message = base64.b64encode(pubkey_instance.encrypt_seal(message)) # type: bytes ascii_armor_block += '{base64_encrypted_message}\n'.format(base64_encrypted_message=base64_encrypted_message.decode('utf-8')) # depends on [control=['if'], data=[]] else: # remove trailing spaces message = AsciiArmor._remove_trailing_spaces(message) # add dash escaped message to ascii armor content ascii_armor_block += AsciiArmor._dash_escape_text(message) # if no signature... if signing_keys is None: # add message tail ascii_armor_block += END_MESSAGE_HEADER # depends on [control=['if'], data=[]] else: # add signature blocks and close block on last signature count = 1 for signing_key in signing_keys: ascii_armor_block += AsciiArmor._get_signature_block(message, signing_key, count == len(signing_keys), signatures_comment) count += 1 # depends on [control=['for'], data=['signing_key']] return ascii_armor_block
def Nu_Swenson(Re, Pr, rho_w=None, rho_b=None): r'''Calculates internal convection Nusselt number for turbulent vertical upward flow in a pipe under supercritical conditions according to [1]_. .. math:: Nu_w = 0.00459 Re_w^{0.923} Pr_w^{0.613} \left(\frac{\rho_w}{\rho_b}\right)^{0.231} \bar{Cp} = \frac{H_w-H_b}{T_w-T_b} Parameters ---------- Re : float Reynolds number with wall fluid properties, [-] Pr : float Prandtl number with wall fluid properties and an average heat capacity between the wall and bulk temperatures [-] rho_w : float, optional Density at the wall temperature, [kg/m^3] rho_b : float, optional Density at the bulk temperature, [kg/m^3] Returns ------- Nu : float Nusselt number with wall fluid properties, [-] Notes ----- The range of examined parameters is as follows: P from 22.8 to 27.6 MPa; G from 542-2150 kg/m^2/s; Re from 7.5E4 to 3.16E6; T_b from 75 to 576 degrees Celsius and T_w from 93 to 649 degrees Celsius. Cp used in the calculation of Prandtl number should be the average value of those at the wall and the bulk temperatures. For deteriorated heat transfer, this was the most accurate correlation in [2]_ with a MAD of 18.4%. On the overall database in [3]_, it was the 9th most accurate correlation. If the extra density information is not provided, it will not be used. Examples -------- >>> Nu_Swenson(1E5, 1.2, 330, 290.) 217.92827034803668 References ---------- .. [1] Swenson, H. S., J. R. Carver, and C. R. Kakarala. "Heat Transfer to Supercritical Water in Smooth-Bore Tubes." Journal of Heat Transfer 87, no. 4 (November 1, 1965): 477-83. doi:10.1115/1.3689139. .. [2] Chen, Weiwei, Xiande Fang, Yu Xu, and Xianghui Su. "An Assessment of Correlations of Forced Convection Heat Transfer to Water at Supercritical Pressure." Annals of Nuclear Energy 76 (February 2015): 451-60. doi:10.1016/j.anucene.2014.10.027. .. [3] Yu, Jiyang, Baoshan Jia, Dan Wu, and Daling Wang. "Optimization of Heat Transfer Coefficient Correlation at Supercritical Pressure Using Genetic Algorithms." Heat and Mass Transfer 45, no. 6 (January 8, 2009): 757-66. doi:10.1007/s00231-008-0475-4. .. [4] Jäger, Wadim, Victor Hugo Sánchez Espinoza, and Antonio Hurtado. "Review and Proposal for Heat Transfer Predictions at Supercritical Water Conditions Using Existing Correlations and Experiments." Nuclear Engineering and Design, (W3MDM) University of Leeds International Symposium: What Where When? Multi-dimensional Advances for Industrial Process Monitoring, 241, no. 6 (June 2011): 2184-2203. doi:10.1016/j.nucengdes.2011.03.022. ''' Nu = 0.00459*Re**0.923*Pr**0.613 if rho_w and rho_b: Nu *= (rho_w/rho_b)**0.231 return Nu
def function[Nu_Swenson, parameter[Re, Pr, rho_w, rho_b]]: constant[Calculates internal convection Nusselt number for turbulent vertical upward flow in a pipe under supercritical conditions according to [1]_. .. math:: Nu_w = 0.00459 Re_w^{0.923} Pr_w^{0.613} \left(\frac{\rho_w}{\rho_b}\right)^{0.231} \bar{Cp} = \frac{H_w-H_b}{T_w-T_b} Parameters ---------- Re : float Reynolds number with wall fluid properties, [-] Pr : float Prandtl number with wall fluid properties and an average heat capacity between the wall and bulk temperatures [-] rho_w : float, optional Density at the wall temperature, [kg/m^3] rho_b : float, optional Density at the bulk temperature, [kg/m^3] Returns ------- Nu : float Nusselt number with wall fluid properties, [-] Notes ----- The range of examined parameters is as follows: P from 22.8 to 27.6 MPa; G from 542-2150 kg/m^2/s; Re from 7.5E4 to 3.16E6; T_b from 75 to 576 degrees Celsius and T_w from 93 to 649 degrees Celsius. Cp used in the calculation of Prandtl number should be the average value of those at the wall and the bulk temperatures. For deteriorated heat transfer, this was the most accurate correlation in [2]_ with a MAD of 18.4%. On the overall database in [3]_, it was the 9th most accurate correlation. If the extra density information is not provided, it will not be used. Examples -------- >>> Nu_Swenson(1E5, 1.2, 330, 290.) 217.92827034803668 References ---------- .. [1] Swenson, H. S., J. R. Carver, and C. R. Kakarala. "Heat Transfer to Supercritical Water in Smooth-Bore Tubes." Journal of Heat Transfer 87, no. 4 (November 1, 1965): 477-83. doi:10.1115/1.3689139. .. [2] Chen, Weiwei, Xiande Fang, Yu Xu, and Xianghui Su. "An Assessment of Correlations of Forced Convection Heat Transfer to Water at Supercritical Pressure." Annals of Nuclear Energy 76 (February 2015): 451-60. doi:10.1016/j.anucene.2014.10.027. .. [3] Yu, Jiyang, Baoshan Jia, Dan Wu, and Daling Wang. "Optimization of Heat Transfer Coefficient Correlation at Supercritical Pressure Using Genetic Algorithms." Heat and Mass Transfer 45, no. 6 (January 8, 2009): 757-66. doi:10.1007/s00231-008-0475-4. .. [4] Jäger, Wadim, Victor Hugo Sánchez Espinoza, and Antonio Hurtado. "Review and Proposal for Heat Transfer Predictions at Supercritical Water Conditions Using Existing Correlations and Experiments." Nuclear Engineering and Design, (W3MDM) University of Leeds International Symposium: What Where When? Multi-dimensional Advances for Industrial Process Monitoring, 241, no. 6 (June 2011): 2184-2203. doi:10.1016/j.nucengdes.2011.03.022. ] variable[Nu] assign[=] binary_operation[binary_operation[constant[0.00459] * binary_operation[name[Re] ** constant[0.923]]] * binary_operation[name[Pr] ** constant[0.613]]] if <ast.BoolOp object at 0x7da2041d9f60> begin[:] <ast.AugAssign object at 0x7da2041da7a0> return[name[Nu]]
keyword[def] identifier[Nu_Swenson] ( identifier[Re] , identifier[Pr] , identifier[rho_w] = keyword[None] , identifier[rho_b] = keyword[None] ): literal[string] identifier[Nu] = literal[int] * identifier[Re] ** literal[int] * identifier[Pr] ** literal[int] keyword[if] identifier[rho_w] keyword[and] identifier[rho_b] : identifier[Nu] *=( identifier[rho_w] / identifier[rho_b] )** literal[int] keyword[return] identifier[Nu]
def Nu_Swenson(Re, Pr, rho_w=None, rho_b=None): """Calculates internal convection Nusselt number for turbulent vertical upward flow in a pipe under supercritical conditions according to [1]_. .. math:: Nu_w = 0.00459 Re_w^{0.923} Pr_w^{0.613} \\left(\\frac{\\rho_w}{\\rho_b}\\right)^{0.231} \\bar{Cp} = \\frac{H_w-H_b}{T_w-T_b} Parameters ---------- Re : float Reynolds number with wall fluid properties, [-] Pr : float Prandtl number with wall fluid properties and an average heat capacity between the wall and bulk temperatures [-] rho_w : float, optional Density at the wall temperature, [kg/m^3] rho_b : float, optional Density at the bulk temperature, [kg/m^3] Returns ------- Nu : float Nusselt number with wall fluid properties, [-] Notes ----- The range of examined parameters is as follows: P from 22.8 to 27.6 MPa; G from 542-2150 kg/m^2/s; Re from 7.5E4 to 3.16E6; T_b from 75 to 576 degrees Celsius and T_w from 93 to 649 degrees Celsius. Cp used in the calculation of Prandtl number should be the average value of those at the wall and the bulk temperatures. For deteriorated heat transfer, this was the most accurate correlation in [2]_ with a MAD of 18.4%. On the overall database in [3]_, it was the 9th most accurate correlation. If the extra density information is not provided, it will not be used. Examples -------- >>> Nu_Swenson(1E5, 1.2, 330, 290.) 217.92827034803668 References ---------- .. [1] Swenson, H. S., J. R. Carver, and C. R. Kakarala. "Heat Transfer to Supercritical Water in Smooth-Bore Tubes." Journal of Heat Transfer 87, no. 4 (November 1, 1965): 477-83. doi:10.1115/1.3689139. .. [2] Chen, Weiwei, Xiande Fang, Yu Xu, and Xianghui Su. "An Assessment of Correlations of Forced Convection Heat Transfer to Water at Supercritical Pressure." Annals of Nuclear Energy 76 (February 2015): 451-60. doi:10.1016/j.anucene.2014.10.027. .. [3] Yu, Jiyang, Baoshan Jia, Dan Wu, and Daling Wang. "Optimization of Heat Transfer Coefficient Correlation at Supercritical Pressure Using Genetic Algorithms." Heat and Mass Transfer 45, no. 6 (January 8, 2009): 757-66. doi:10.1007/s00231-008-0475-4. .. [4] Jäger, Wadim, Victor Hugo Sánchez Espinoza, and Antonio Hurtado. "Review and Proposal for Heat Transfer Predictions at Supercritical Water Conditions Using Existing Correlations and Experiments." Nuclear Engineering and Design, (W3MDM) University of Leeds International Symposium: What Where When? Multi-dimensional Advances for Industrial Process Monitoring, 241, no. 6 (June 2011): 2184-2203. doi:10.1016/j.nucengdes.2011.03.022. """ Nu = 0.00459 * Re ** 0.923 * Pr ** 0.613 if rho_w and rho_b: Nu *= (rho_w / rho_b) ** 0.231 # depends on [control=['if'], data=[]] return Nu
def resolve_image(input, resolvers=None, fmt='png', width=300, height=300, frame=False, crop=None, bgcolor=None, atomcolor=None, hcolor=None, bondcolor=None, framecolor=None, symbolfontsize=11, linewidth=2, hsymbol='special', csymbol='special', stereolabels=False, stereowedges=True, header=None, footer=None, **kwargs): """Resolve input to a 2D image depiction. :param string input: Chemical identifier to resolve :param list(string) resolvers: (Optional) Ordered list of resolvers to use :param string fmt: (Optional) gif or png image format (default png) :param int width: (Optional) Image width in pixels (default 300) :param int height: (Optional) Image height in pixels (default 300) :param bool frame: (Optional) Whether to show border frame (default False) :param int crop: (Optional) Crop image with specified padding :param int symbolfontsize: (Optional) Atom label font size (default 11) :param int linewidth: (Optional) Bond line width (default 2) :param string bgcolor: (Optional) Background color :param string atomcolor: (Optional) Atom label color :param string hcolor: (Optional) Hydrogen atom label color :param string bondcolor: (Optional) Bond color :param string framecolor: (Optional) Border frame color :param bool hsymbol: (Optional) Hydrogens: all, special or none (default special) :param bool csymbol: (Optional) Carbons: all, special or none (default special) :param bool stereolabels: (Optional) Whether to show stereochemistry labels (default False) :param bool stereowedges: (Optional) Whether to show wedge/dash bonds (default True) :param string header: (Optional) Header text above structure :param string footer: (Optional) Footer text below structure """ # Aggregate all arguments into kwargs args, _, _, values = inspect.getargvalues(inspect.currentframe()) for arg in args: if values[arg] is not None: kwargs[arg] = values[arg] # Turn off anti-aliasing for transparent background if kwargs.get('bgcolor') == 'transparent': kwargs['antialiasing'] = False # Renamed parameters if 'stereolabels' in kwargs: kwargs['showstereo'] = kwargs.pop('stereolabels') if 'fmt' in kwargs: kwargs['format'] = kwargs.pop('fmt') # Toggle stereo wedges if 'stereowedges' in kwargs: status = kwargs.pop('stereowedges') kwargs.update({'wedges': status, 'dashes': status}) # Constant values kwargs.update({'representation': 'image', 'xml': False}) url = construct_api_url(**kwargs) log.debug('Making image request: %s', url) response = urlopen(url) return response.read()
def function[resolve_image, parameter[input, resolvers, fmt, width, height, frame, crop, bgcolor, atomcolor, hcolor, bondcolor, framecolor, symbolfontsize, linewidth, hsymbol, csymbol, stereolabels, stereowedges, header, footer]]: constant[Resolve input to a 2D image depiction. :param string input: Chemical identifier to resolve :param list(string) resolvers: (Optional) Ordered list of resolvers to use :param string fmt: (Optional) gif or png image format (default png) :param int width: (Optional) Image width in pixels (default 300) :param int height: (Optional) Image height in pixels (default 300) :param bool frame: (Optional) Whether to show border frame (default False) :param int crop: (Optional) Crop image with specified padding :param int symbolfontsize: (Optional) Atom label font size (default 11) :param int linewidth: (Optional) Bond line width (default 2) :param string bgcolor: (Optional) Background color :param string atomcolor: (Optional) Atom label color :param string hcolor: (Optional) Hydrogen atom label color :param string bondcolor: (Optional) Bond color :param string framecolor: (Optional) Border frame color :param bool hsymbol: (Optional) Hydrogens: all, special or none (default special) :param bool csymbol: (Optional) Carbons: all, special or none (default special) :param bool stereolabels: (Optional) Whether to show stereochemistry labels (default False) :param bool stereowedges: (Optional) Whether to show wedge/dash bonds (default True) :param string header: (Optional) Header text above structure :param string footer: (Optional) Footer text below structure ] <ast.Tuple object at 0x7da18ede5660> assign[=] call[name[inspect].getargvalues, parameter[call[name[inspect].currentframe, parameter[]]]] for taget[name[arg]] in starred[name[args]] begin[:] if compare[call[name[values]][name[arg]] is_not constant[None]] begin[:] call[name[kwargs]][name[arg]] assign[=] call[name[values]][name[arg]] if compare[call[name[kwargs].get, parameter[constant[bgcolor]]] equal[==] constant[transparent]] begin[:] call[name[kwargs]][constant[antialiasing]] assign[=] constant[False] if compare[constant[stereolabels] in name[kwargs]] begin[:] call[name[kwargs]][constant[showstereo]] assign[=] call[name[kwargs].pop, parameter[constant[stereolabels]]] if compare[constant[fmt] in name[kwargs]] begin[:] call[name[kwargs]][constant[format]] assign[=] call[name[kwargs].pop, parameter[constant[fmt]]] if compare[constant[stereowedges] in name[kwargs]] begin[:] variable[status] assign[=] call[name[kwargs].pop, parameter[constant[stereowedges]]] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0397d60>, <ast.Constant object at 0x7da1b0395180>], [<ast.Name object at 0x7da1b03954b0>, <ast.Name object at 0x7da1b0396590>]]]] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0397160>, <ast.Constant object at 0x7da1b0396230>], [<ast.Constant object at 0x7da1b0397f70>, <ast.Constant object at 0x7da1b0395cf0>]]]] variable[url] assign[=] call[name[construct_api_url], parameter[]] call[name[log].debug, parameter[constant[Making image request: %s], name[url]]] variable[response] assign[=] call[name[urlopen], parameter[name[url]]] return[call[name[response].read, parameter[]]]
keyword[def] identifier[resolve_image] ( identifier[input] , identifier[resolvers] = keyword[None] , identifier[fmt] = literal[string] , identifier[width] = literal[int] , identifier[height] = literal[int] , identifier[frame] = keyword[False] , identifier[crop] = keyword[None] , identifier[bgcolor] = keyword[None] , identifier[atomcolor] = keyword[None] , identifier[hcolor] = keyword[None] , identifier[bondcolor] = keyword[None] , identifier[framecolor] = keyword[None] , identifier[symbolfontsize] = literal[int] , identifier[linewidth] = literal[int] , identifier[hsymbol] = literal[string] , identifier[csymbol] = literal[string] , identifier[stereolabels] = keyword[False] , identifier[stereowedges] = keyword[True] , identifier[header] = keyword[None] , identifier[footer] = keyword[None] , ** identifier[kwargs] ): literal[string] identifier[args] , identifier[_] , identifier[_] , identifier[values] = identifier[inspect] . identifier[getargvalues] ( identifier[inspect] . identifier[currentframe] ()) keyword[for] identifier[arg] keyword[in] identifier[args] : keyword[if] identifier[values] [ identifier[arg] ] keyword[is] keyword[not] keyword[None] : identifier[kwargs] [ identifier[arg] ]= identifier[values] [ identifier[arg] ] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] )== literal[string] : identifier[kwargs] [ literal[string] ]= keyword[False] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[status] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[kwargs] . identifier[update] ({ literal[string] : identifier[status] , literal[string] : identifier[status] }) identifier[kwargs] . identifier[update] ({ literal[string] : literal[string] , literal[string] : keyword[False] }) identifier[url] = identifier[construct_api_url] (** identifier[kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[url] ) identifier[response] = identifier[urlopen] ( identifier[url] ) keyword[return] identifier[response] . identifier[read] ()
def resolve_image(input, resolvers=None, fmt='png', width=300, height=300, frame=False, crop=None, bgcolor=None, atomcolor=None, hcolor=None, bondcolor=None, framecolor=None, symbolfontsize=11, linewidth=2, hsymbol='special', csymbol='special', stereolabels=False, stereowedges=True, header=None, footer=None, **kwargs): """Resolve input to a 2D image depiction. :param string input: Chemical identifier to resolve :param list(string) resolvers: (Optional) Ordered list of resolvers to use :param string fmt: (Optional) gif or png image format (default png) :param int width: (Optional) Image width in pixels (default 300) :param int height: (Optional) Image height in pixels (default 300) :param bool frame: (Optional) Whether to show border frame (default False) :param int crop: (Optional) Crop image with specified padding :param int symbolfontsize: (Optional) Atom label font size (default 11) :param int linewidth: (Optional) Bond line width (default 2) :param string bgcolor: (Optional) Background color :param string atomcolor: (Optional) Atom label color :param string hcolor: (Optional) Hydrogen atom label color :param string bondcolor: (Optional) Bond color :param string framecolor: (Optional) Border frame color :param bool hsymbol: (Optional) Hydrogens: all, special or none (default special) :param bool csymbol: (Optional) Carbons: all, special or none (default special) :param bool stereolabels: (Optional) Whether to show stereochemistry labels (default False) :param bool stereowedges: (Optional) Whether to show wedge/dash bonds (default True) :param string header: (Optional) Header text above structure :param string footer: (Optional) Footer text below structure """ # Aggregate all arguments into kwargs (args, _, _, values) = inspect.getargvalues(inspect.currentframe()) for arg in args: if values[arg] is not None: kwargs[arg] = values[arg] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']] # Turn off anti-aliasing for transparent background if kwargs.get('bgcolor') == 'transparent': kwargs['antialiasing'] = False # depends on [control=['if'], data=[]] # Renamed parameters if 'stereolabels' in kwargs: kwargs['showstereo'] = kwargs.pop('stereolabels') # depends on [control=['if'], data=['kwargs']] if 'fmt' in kwargs: kwargs['format'] = kwargs.pop('fmt') # depends on [control=['if'], data=['kwargs']] # Toggle stereo wedges if 'stereowedges' in kwargs: status = kwargs.pop('stereowedges') kwargs.update({'wedges': status, 'dashes': status}) # depends on [control=['if'], data=['kwargs']] # Constant values kwargs.update({'representation': 'image', 'xml': False}) url = construct_api_url(**kwargs) log.debug('Making image request: %s', url) response = urlopen(url) return response.read()
def get_travis_branch(): """Get current branch per Travis environment variables If travis is building a PR, then TRAVIS_PULL_REQUEST is truthy and the name of the branch corresponding to the PR is stored in the TRAVIS_PULL_REQUEST_BRANCH environment variable. Else, the name of the branch is stored in the TRAVIS_BRANCH environment variable. See also: <https://docs.travis-ci.com/user/environment-variables/#default-environment-variables> """ # noqa E501 try: travis_pull_request = get_travis_env_or_fail('TRAVIS_PULL_REQUEST') if truthy(travis_pull_request): travis_pull_request_branch = get_travis_env_or_fail( 'TRAVIS_PULL_REQUEST_BRANCH') return travis_pull_request_branch else: travis_branch = get_travis_env_or_fail('TRAVIS_BRANCH') return travis_branch except UnexpectedTravisEnvironmentError: return None
def function[get_travis_branch, parameter[]]: constant[Get current branch per Travis environment variables If travis is building a PR, then TRAVIS_PULL_REQUEST is truthy and the name of the branch corresponding to the PR is stored in the TRAVIS_PULL_REQUEST_BRANCH environment variable. Else, the name of the branch is stored in the TRAVIS_BRANCH environment variable. See also: <https://docs.travis-ci.com/user/environment-variables/#default-environment-variables> ] <ast.Try object at 0x7da20c7c89a0>
keyword[def] identifier[get_travis_branch] (): literal[string] keyword[try] : identifier[travis_pull_request] = identifier[get_travis_env_or_fail] ( literal[string] ) keyword[if] identifier[truthy] ( identifier[travis_pull_request] ): identifier[travis_pull_request_branch] = identifier[get_travis_env_or_fail] ( literal[string] ) keyword[return] identifier[travis_pull_request_branch] keyword[else] : identifier[travis_branch] = identifier[get_travis_env_or_fail] ( literal[string] ) keyword[return] identifier[travis_branch] keyword[except] identifier[UnexpectedTravisEnvironmentError] : keyword[return] keyword[None]
def get_travis_branch(): """Get current branch per Travis environment variables If travis is building a PR, then TRAVIS_PULL_REQUEST is truthy and the name of the branch corresponding to the PR is stored in the TRAVIS_PULL_REQUEST_BRANCH environment variable. Else, the name of the branch is stored in the TRAVIS_BRANCH environment variable. See also: <https://docs.travis-ci.com/user/environment-variables/#default-environment-variables> """ # noqa E501 try: travis_pull_request = get_travis_env_or_fail('TRAVIS_PULL_REQUEST') if truthy(travis_pull_request): travis_pull_request_branch = get_travis_env_or_fail('TRAVIS_PULL_REQUEST_BRANCH') return travis_pull_request_branch # depends on [control=['if'], data=[]] else: travis_branch = get_travis_env_or_fail('TRAVIS_BRANCH') return travis_branch # depends on [control=['try'], data=[]] except UnexpectedTravisEnvironmentError: return None # depends on [control=['except'], data=[]]
def create_ssl_context(): """Create and return SSL Context.""" ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE return ssl_context
def function[create_ssl_context, parameter[]]: constant[Create and return SSL Context.] variable[ssl_context] assign[=] call[name[ssl].create_default_context, parameter[name[ssl].Purpose.SERVER_AUTH]] name[ssl_context].check_hostname assign[=] constant[False] name[ssl_context].verify_mode assign[=] name[ssl].CERT_NONE return[name[ssl_context]]
keyword[def] identifier[create_ssl_context] (): literal[string] identifier[ssl_context] = identifier[ssl] . identifier[create_default_context] ( identifier[ssl] . identifier[Purpose] . identifier[SERVER_AUTH] ) identifier[ssl_context] . identifier[check_hostname] = keyword[False] identifier[ssl_context] . identifier[verify_mode] = identifier[ssl] . identifier[CERT_NONE] keyword[return] identifier[ssl_context]
def create_ssl_context(): """Create and return SSL Context.""" ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE return ssl_context
def init_cursor(self): """Position the cursor appropriately. The cursor is set to either the beginning of the oplog, or wherever it was last left off. Returns the cursor and True if the cursor is empty. """ timestamp = self.read_last_checkpoint() if timestamp is None or self.only_dump: if self.collection_dump: # dump collection and update checkpoint timestamp = self.dump_collection() if self.only_dump: LOG.info("Finished dump. Exiting.") timestamp = None self.running = False self.update_checkpoint(timestamp) if timestamp is None: return None, True else: # Collection dump disabled: # Return cursor to beginning of oplog but do not set the # checkpoint. The checkpoint will be set after an operation # has been applied. cursor = self.get_oplog_cursor() return cursor, self._cursor_empty(cursor) cursor = self.get_oplog_cursor(timestamp) cursor_empty = self._cursor_empty(cursor) if cursor_empty: # rollback, update checkpoint, and retry LOG.debug("OplogThread: Initiating rollback from " "get_oplog_cursor") self.update_checkpoint(self.rollback()) return self.init_cursor() first_oplog_entry = next(cursor) oldest_ts_long = util.bson_ts_to_long(self.get_oldest_oplog_timestamp()) checkpoint_ts_long = util.bson_ts_to_long(timestamp) if checkpoint_ts_long < oldest_ts_long: # We've fallen behind, the checkpoint has fallen off the oplog return None, True cursor_ts_long = util.bson_ts_to_long(first_oplog_entry["ts"]) if cursor_ts_long > checkpoint_ts_long: # The checkpoint is not present in this oplog and the oplog # did not rollover. This means that we connected to a new # primary which did not replicate the checkpoint and which has # new changes in its oplog for us to process. # rollback, update checkpoint, and retry LOG.debug( "OplogThread: Initiating rollback from " "get_oplog_cursor: new oplog entries found but " "checkpoint is not present" ) self.update_checkpoint(self.rollback()) return self.init_cursor() # first entry has been consumed return cursor, cursor_empty
def function[init_cursor, parameter[self]]: constant[Position the cursor appropriately. The cursor is set to either the beginning of the oplog, or wherever it was last left off. Returns the cursor and True if the cursor is empty. ] variable[timestamp] assign[=] call[name[self].read_last_checkpoint, parameter[]] if <ast.BoolOp object at 0x7da1b1edbee0> begin[:] if name[self].collection_dump begin[:] variable[timestamp] assign[=] call[name[self].dump_collection, parameter[]] if name[self].only_dump begin[:] call[name[LOG].info, parameter[constant[Finished dump. Exiting.]]] variable[timestamp] assign[=] constant[None] name[self].running assign[=] constant[False] call[name[self].update_checkpoint, parameter[name[timestamp]]] if compare[name[timestamp] is constant[None]] begin[:] return[tuple[[<ast.Constant object at 0x7da1b1edba60>, <ast.Constant object at 0x7da1b1edbac0>]]] variable[cursor] assign[=] call[name[self].get_oplog_cursor, parameter[name[timestamp]]] variable[cursor_empty] assign[=] call[name[self]._cursor_empty, parameter[name[cursor]]] if name[cursor_empty] begin[:] call[name[LOG].debug, parameter[constant[OplogThread: Initiating rollback from get_oplog_cursor]]] call[name[self].update_checkpoint, parameter[call[name[self].rollback, parameter[]]]] return[call[name[self].init_cursor, parameter[]]] variable[first_oplog_entry] assign[=] call[name[next], parameter[name[cursor]]] variable[oldest_ts_long] assign[=] call[name[util].bson_ts_to_long, parameter[call[name[self].get_oldest_oplog_timestamp, parameter[]]]] variable[checkpoint_ts_long] assign[=] call[name[util].bson_ts_to_long, parameter[name[timestamp]]] if compare[name[checkpoint_ts_long] less[<] name[oldest_ts_long]] begin[:] return[tuple[[<ast.Constant object at 0x7da1b1e6a110>, <ast.Constant object at 0x7da1b1e691e0>]]] variable[cursor_ts_long] assign[=] call[name[util].bson_ts_to_long, parameter[call[name[first_oplog_entry]][constant[ts]]]] if compare[name[cursor_ts_long] greater[>] name[checkpoint_ts_long]] begin[:] call[name[LOG].debug, parameter[constant[OplogThread: Initiating rollback from get_oplog_cursor: new oplog entries found but checkpoint is not present]]] call[name[self].update_checkpoint, parameter[call[name[self].rollback, parameter[]]]] return[call[name[self].init_cursor, parameter[]]] return[tuple[[<ast.Name object at 0x7da1b1e6ac20>, <ast.Name object at 0x7da1b1e6b640>]]]
keyword[def] identifier[init_cursor] ( identifier[self] ): literal[string] identifier[timestamp] = identifier[self] . identifier[read_last_checkpoint] () keyword[if] identifier[timestamp] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[only_dump] : keyword[if] identifier[self] . identifier[collection_dump] : identifier[timestamp] = identifier[self] . identifier[dump_collection] () keyword[if] identifier[self] . identifier[only_dump] : identifier[LOG] . identifier[info] ( literal[string] ) identifier[timestamp] = keyword[None] identifier[self] . identifier[running] = keyword[False] identifier[self] . identifier[update_checkpoint] ( identifier[timestamp] ) keyword[if] identifier[timestamp] keyword[is] keyword[None] : keyword[return] keyword[None] , keyword[True] keyword[else] : identifier[cursor] = identifier[self] . identifier[get_oplog_cursor] () keyword[return] identifier[cursor] , identifier[self] . identifier[_cursor_empty] ( identifier[cursor] ) identifier[cursor] = identifier[self] . identifier[get_oplog_cursor] ( identifier[timestamp] ) identifier[cursor_empty] = identifier[self] . identifier[_cursor_empty] ( identifier[cursor] ) keyword[if] identifier[cursor_empty] : identifier[LOG] . identifier[debug] ( literal[string] literal[string] ) identifier[self] . identifier[update_checkpoint] ( identifier[self] . identifier[rollback] ()) keyword[return] identifier[self] . identifier[init_cursor] () identifier[first_oplog_entry] = identifier[next] ( identifier[cursor] ) identifier[oldest_ts_long] = identifier[util] . identifier[bson_ts_to_long] ( identifier[self] . identifier[get_oldest_oplog_timestamp] ()) identifier[checkpoint_ts_long] = identifier[util] . identifier[bson_ts_to_long] ( identifier[timestamp] ) keyword[if] identifier[checkpoint_ts_long] < identifier[oldest_ts_long] : keyword[return] keyword[None] , keyword[True] identifier[cursor_ts_long] = identifier[util] . identifier[bson_ts_to_long] ( identifier[first_oplog_entry] [ literal[string] ]) keyword[if] identifier[cursor_ts_long] > identifier[checkpoint_ts_long] : identifier[LOG] . identifier[debug] ( literal[string] literal[string] literal[string] ) identifier[self] . identifier[update_checkpoint] ( identifier[self] . identifier[rollback] ()) keyword[return] identifier[self] . identifier[init_cursor] () keyword[return] identifier[cursor] , identifier[cursor_empty]
def init_cursor(self): """Position the cursor appropriately. The cursor is set to either the beginning of the oplog, or wherever it was last left off. Returns the cursor and True if the cursor is empty. """ timestamp = self.read_last_checkpoint() if timestamp is None or self.only_dump: if self.collection_dump: # dump collection and update checkpoint timestamp = self.dump_collection() if self.only_dump: LOG.info('Finished dump. Exiting.') timestamp = None self.running = False # depends on [control=['if'], data=[]] self.update_checkpoint(timestamp) if timestamp is None: return (None, True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # Collection dump disabled: # Return cursor to beginning of oplog but do not set the # checkpoint. The checkpoint will be set after an operation # has been applied. cursor = self.get_oplog_cursor() return (cursor, self._cursor_empty(cursor)) # depends on [control=['if'], data=[]] cursor = self.get_oplog_cursor(timestamp) cursor_empty = self._cursor_empty(cursor) if cursor_empty: # rollback, update checkpoint, and retry LOG.debug('OplogThread: Initiating rollback from get_oplog_cursor') self.update_checkpoint(self.rollback()) return self.init_cursor() # depends on [control=['if'], data=[]] first_oplog_entry = next(cursor) oldest_ts_long = util.bson_ts_to_long(self.get_oldest_oplog_timestamp()) checkpoint_ts_long = util.bson_ts_to_long(timestamp) if checkpoint_ts_long < oldest_ts_long: # We've fallen behind, the checkpoint has fallen off the oplog return (None, True) # depends on [control=['if'], data=[]] cursor_ts_long = util.bson_ts_to_long(first_oplog_entry['ts']) if cursor_ts_long > checkpoint_ts_long: # The checkpoint is not present in this oplog and the oplog # did not rollover. This means that we connected to a new # primary which did not replicate the checkpoint and which has # new changes in its oplog for us to process. # rollback, update checkpoint, and retry LOG.debug('OplogThread: Initiating rollback from get_oplog_cursor: new oplog entries found but checkpoint is not present') self.update_checkpoint(self.rollback()) return self.init_cursor() # depends on [control=['if'], data=[]] # first entry has been consumed return (cursor, cursor_empty)
def Build_ConfigPanel(self): """config panel for left-hand-side of frame: RGB Maps""" panel = self.config_panel sizer = wx.BoxSizer(wx.VERTICAL) lsty = wx.ALIGN_LEFT|wx.LEFT|wx.TOP|wx.EXPAND if self.config_mode == 'rgb': for icol, col in enumerate(RGB_COLORS): self.cmap_panels[icol] = ColorMapPanel(self.config_panel, self.panel, title='%s: ' % col.title(), color=icol, default=col, colormap_list=None) sizer.Add(self.cmap_panels[icol], 0, lsty, 2) sizer.Add(wx.StaticLine(self.config_panel, size=(100, 2), style=wx.LI_HORIZONTAL), 0, lsty, 2) self.interp_panel = InterpPanel(self.config_panel, callback=self.onInterp) self.contrast_panel = ContrastPanel(self.config_panel, callback=self.set_contrast_levels) sizer.Add(self.interp_panel, 0, lsty, 2) sizer.Add(self.contrast_panel, 0, lsty, 2) else: self.cmap_panels[0] = ColorMapPanel(self.config_panel, self.panel, default='gray', colormap_list=ColorMap_List) self.interp_panel = InterpPanel(self.config_panel, callback=self.onInterp) self.contrast_panel = ContrastPanel(self.config_panel, callback=self.set_contrast_levels) sizer.Add(self.cmap_panels[0], 0, lsty, 1) sizer.Add(wx.StaticLine(self.config_panel, size=(100, 2), style=wx.LI_HORIZONTAL), 0, lsty, 2) sizer.Add(self.interp_panel, 0, lsty, 2) sizer.Add(self.contrast_panel, 0, lsty, 2) cust = self.CustomConfig(self.config_panel, None, 0) if cust is not None: sizer.Add(cust, 0, lsty, 1) pack(self.config_panel, sizer)
def function[Build_ConfigPanel, parameter[self]]: constant[config panel for left-hand-side of frame: RGB Maps] variable[panel] assign[=] name[self].config_panel variable[sizer] assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] variable[lsty] assign[=] binary_operation[binary_operation[binary_operation[name[wx].ALIGN_LEFT <ast.BitOr object at 0x7da2590d6aa0> name[wx].LEFT] <ast.BitOr object at 0x7da2590d6aa0> name[wx].TOP] <ast.BitOr object at 0x7da2590d6aa0> name[wx].EXPAND] if compare[name[self].config_mode equal[==] constant[rgb]] begin[:] for taget[tuple[[<ast.Name object at 0x7da204567580>, <ast.Name object at 0x7da2045653c0>]]] in starred[call[name[enumerate], parameter[name[RGB_COLORS]]]] begin[:] call[name[self].cmap_panels][name[icol]] assign[=] call[name[ColorMapPanel], parameter[name[self].config_panel, name[self].panel]] call[name[sizer].Add, parameter[call[name[self].cmap_panels][name[icol]], constant[0], name[lsty], constant[2]]] call[name[sizer].Add, parameter[call[name[wx].StaticLine, parameter[name[self].config_panel]], constant[0], name[lsty], constant[2]]] name[self].interp_panel assign[=] call[name[InterpPanel], parameter[name[self].config_panel]] name[self].contrast_panel assign[=] call[name[ContrastPanel], parameter[name[self].config_panel]] call[name[sizer].Add, parameter[name[self].interp_panel, constant[0], name[lsty], constant[2]]] call[name[sizer].Add, parameter[name[self].contrast_panel, constant[0], name[lsty], constant[2]]] variable[cust] assign[=] call[name[self].CustomConfig, parameter[name[self].config_panel, constant[None], constant[0]]] if compare[name[cust] is_not constant[None]] begin[:] call[name[sizer].Add, parameter[name[cust], constant[0], name[lsty], constant[1]]] call[name[pack], parameter[name[self].config_panel, name[sizer]]]
keyword[def] identifier[Build_ConfigPanel] ( identifier[self] ): literal[string] identifier[panel] = identifier[self] . identifier[config_panel] identifier[sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) identifier[lsty] = identifier[wx] . identifier[ALIGN_LEFT] | identifier[wx] . identifier[LEFT] | identifier[wx] . identifier[TOP] | identifier[wx] . identifier[EXPAND] keyword[if] identifier[self] . identifier[config_mode] == literal[string] : keyword[for] identifier[icol] , identifier[col] keyword[in] identifier[enumerate] ( identifier[RGB_COLORS] ): identifier[self] . identifier[cmap_panels] [ identifier[icol] ]= identifier[ColorMapPanel] ( identifier[self] . identifier[config_panel] , identifier[self] . identifier[panel] , identifier[title] = literal[string] % identifier[col] . identifier[title] (), identifier[color] = identifier[icol] , identifier[default] = identifier[col] , identifier[colormap_list] = keyword[None] ) identifier[sizer] . identifier[Add] ( identifier[self] . identifier[cmap_panels] [ identifier[icol] ], literal[int] , identifier[lsty] , literal[int] ) identifier[sizer] . identifier[Add] ( identifier[wx] . identifier[StaticLine] ( identifier[self] . identifier[config_panel] , identifier[size] =( literal[int] , literal[int] ), identifier[style] = identifier[wx] . identifier[LI_HORIZONTAL] ), literal[int] , identifier[lsty] , literal[int] ) identifier[self] . identifier[interp_panel] = identifier[InterpPanel] ( identifier[self] . identifier[config_panel] , identifier[callback] = identifier[self] . identifier[onInterp] ) identifier[self] . identifier[contrast_panel] = identifier[ContrastPanel] ( identifier[self] . identifier[config_panel] , identifier[callback] = identifier[self] . identifier[set_contrast_levels] ) identifier[sizer] . identifier[Add] ( identifier[self] . identifier[interp_panel] , literal[int] , identifier[lsty] , literal[int] ) identifier[sizer] . identifier[Add] ( identifier[self] . identifier[contrast_panel] , literal[int] , identifier[lsty] , literal[int] ) keyword[else] : identifier[self] . identifier[cmap_panels] [ literal[int] ]= identifier[ColorMapPanel] ( identifier[self] . identifier[config_panel] , identifier[self] . identifier[panel] , identifier[default] = literal[string] , identifier[colormap_list] = identifier[ColorMap_List] ) identifier[self] . identifier[interp_panel] = identifier[InterpPanel] ( identifier[self] . identifier[config_panel] , identifier[callback] = identifier[self] . identifier[onInterp] ) identifier[self] . identifier[contrast_panel] = identifier[ContrastPanel] ( identifier[self] . identifier[config_panel] , identifier[callback] = identifier[self] . identifier[set_contrast_levels] ) identifier[sizer] . identifier[Add] ( identifier[self] . identifier[cmap_panels] [ literal[int] ], literal[int] , identifier[lsty] , literal[int] ) identifier[sizer] . identifier[Add] ( identifier[wx] . identifier[StaticLine] ( identifier[self] . identifier[config_panel] , identifier[size] =( literal[int] , literal[int] ), identifier[style] = identifier[wx] . identifier[LI_HORIZONTAL] ), literal[int] , identifier[lsty] , literal[int] ) identifier[sizer] . identifier[Add] ( identifier[self] . identifier[interp_panel] , literal[int] , identifier[lsty] , literal[int] ) identifier[sizer] . identifier[Add] ( identifier[self] . identifier[contrast_panel] , literal[int] , identifier[lsty] , literal[int] ) identifier[cust] = identifier[self] . identifier[CustomConfig] ( identifier[self] . identifier[config_panel] , keyword[None] , literal[int] ) keyword[if] identifier[cust] keyword[is] keyword[not] keyword[None] : identifier[sizer] . identifier[Add] ( identifier[cust] , literal[int] , identifier[lsty] , literal[int] ) identifier[pack] ( identifier[self] . identifier[config_panel] , identifier[sizer] )
def Build_ConfigPanel(self): """config panel for left-hand-side of frame: RGB Maps""" panel = self.config_panel sizer = wx.BoxSizer(wx.VERTICAL) lsty = wx.ALIGN_LEFT | wx.LEFT | wx.TOP | wx.EXPAND if self.config_mode == 'rgb': for (icol, col) in enumerate(RGB_COLORS): self.cmap_panels[icol] = ColorMapPanel(self.config_panel, self.panel, title='%s: ' % col.title(), color=icol, default=col, colormap_list=None) sizer.Add(self.cmap_panels[icol], 0, lsty, 2) sizer.Add(wx.StaticLine(self.config_panel, size=(100, 2), style=wx.LI_HORIZONTAL), 0, lsty, 2) # depends on [control=['for'], data=[]] self.interp_panel = InterpPanel(self.config_panel, callback=self.onInterp) self.contrast_panel = ContrastPanel(self.config_panel, callback=self.set_contrast_levels) sizer.Add(self.interp_panel, 0, lsty, 2) sizer.Add(self.contrast_panel, 0, lsty, 2) # depends on [control=['if'], data=[]] else: self.cmap_panels[0] = ColorMapPanel(self.config_panel, self.panel, default='gray', colormap_list=ColorMap_List) self.interp_panel = InterpPanel(self.config_panel, callback=self.onInterp) self.contrast_panel = ContrastPanel(self.config_panel, callback=self.set_contrast_levels) sizer.Add(self.cmap_panels[0], 0, lsty, 1) sizer.Add(wx.StaticLine(self.config_panel, size=(100, 2), style=wx.LI_HORIZONTAL), 0, lsty, 2) sizer.Add(self.interp_panel, 0, lsty, 2) sizer.Add(self.contrast_panel, 0, lsty, 2) cust = self.CustomConfig(self.config_panel, None, 0) if cust is not None: sizer.Add(cust, 0, lsty, 1) # depends on [control=['if'], data=['cust']] pack(self.config_panel, sizer)
def gen_bash_vars(job_input_file, job_homedir=None, check_name_collision=True): """ :param job_input_file: path to a JSON file describing the job inputs :param job_homedir: path to home directory, used for testing purposes :param check_name_collision: should we check for name collisions? :return: list of lines :rtype: list of strings Calculates a line for each shell variable to instantiate. If *check_name_collision* is true, then detect and warn about collisions with essential environment variables. """ file_key_descs, rest_hash = analyze_bash_vars(job_input_file, job_homedir) def string_of_elem(elem): result = None if isinstance(elem, basestring): result = elem elif isinstance(elem, dxpy.DXFile): result = json.dumps(dxpy.dxlink(elem)) else: result = json.dumps(elem) return pipes.quote(result) def string_of_value(val): if isinstance(val, list): string = " ".join([string_of_elem(vitem) for vitem in val]) return "( {} )".format(string) else: return string_of_elem(val) var_defs_hash = {} def gen_text_line_and_name_collision(key, val): ''' In the absence of a name collision, create a line describing a bash variable. ''' if check_name_collision: if key not in environ and key not in var_defs_hash: var_defs_hash[key] = val else: sys.stderr.write(dxpy.utils.printing.fill( "Creating environment variable ({}) would cause a name collision".format(key) ) + "\n") else: var_defs_hash[key] = val # Processing non-file variables before the file variables. This priorities them, # so that in case of name collisions, the file-variables will be dropped. for key, desc in list(rest_hash.items()): gen_text_line_and_name_collision(key, string_of_value(desc)) for file_key, desc in list(file_key_descs.items()): gen_text_line_and_name_collision(file_key, string_of_value(desc['handler'])) gen_text_line_and_name_collision(file_key + "_name", string_of_value(desc['basename'])) gen_text_line_and_name_collision(file_key + "_prefix", string_of_value(desc['prefix'])) gen_text_line_and_name_collision(file_key + "_path", string_of_value(desc['path'])) return var_defs_hash
def function[gen_bash_vars, parameter[job_input_file, job_homedir, check_name_collision]]: constant[ :param job_input_file: path to a JSON file describing the job inputs :param job_homedir: path to home directory, used for testing purposes :param check_name_collision: should we check for name collisions? :return: list of lines :rtype: list of strings Calculates a line for each shell variable to instantiate. If *check_name_collision* is true, then detect and warn about collisions with essential environment variables. ] <ast.Tuple object at 0x7da20c9918a0> assign[=] call[name[analyze_bash_vars], parameter[name[job_input_file], name[job_homedir]]] def function[string_of_elem, parameter[elem]]: variable[result] assign[=] constant[None] if call[name[isinstance], parameter[name[elem], name[basestring]]] begin[:] variable[result] assign[=] name[elem] return[call[name[pipes].quote, parameter[name[result]]]] def function[string_of_value, parameter[val]]: if call[name[isinstance], parameter[name[val], name[list]]] begin[:] variable[string] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da20c9925c0>]] return[call[constant[( {} )].format, parameter[name[string]]]] variable[var_defs_hash] assign[=] dictionary[[], []] def function[gen_text_line_and_name_collision, parameter[key, val]]: constant[ In the absence of a name collision, create a line describing a bash variable. ] if name[check_name_collision] begin[:] if <ast.BoolOp object at 0x7da20c992530> begin[:] call[name[var_defs_hash]][name[key]] assign[=] name[val] for taget[tuple[[<ast.Name object at 0x7da20c9907f0>, <ast.Name object at 0x7da20c992590>]]] in starred[call[name[list], parameter[call[name[rest_hash].items, parameter[]]]]] begin[:] call[name[gen_text_line_and_name_collision], parameter[name[key], call[name[string_of_value], parameter[name[desc]]]]] for taget[tuple[[<ast.Name object at 0x7da20c991000>, <ast.Name object at 0x7da20c992440>]]] in starred[call[name[list], parameter[call[name[file_key_descs].items, parameter[]]]]] begin[:] call[name[gen_text_line_and_name_collision], parameter[name[file_key], call[name[string_of_value], parameter[call[name[desc]][constant[handler]]]]]] call[name[gen_text_line_and_name_collision], parameter[binary_operation[name[file_key] + constant[_name]], call[name[string_of_value], parameter[call[name[desc]][constant[basename]]]]]] call[name[gen_text_line_and_name_collision], parameter[binary_operation[name[file_key] + constant[_prefix]], call[name[string_of_value], parameter[call[name[desc]][constant[prefix]]]]]] call[name[gen_text_line_and_name_collision], parameter[binary_operation[name[file_key] + constant[_path]], call[name[string_of_value], parameter[call[name[desc]][constant[path]]]]]] return[name[var_defs_hash]]
keyword[def] identifier[gen_bash_vars] ( identifier[job_input_file] , identifier[job_homedir] = keyword[None] , identifier[check_name_collision] = keyword[True] ): literal[string] identifier[file_key_descs] , identifier[rest_hash] = identifier[analyze_bash_vars] ( identifier[job_input_file] , identifier[job_homedir] ) keyword[def] identifier[string_of_elem] ( identifier[elem] ): identifier[result] = keyword[None] keyword[if] identifier[isinstance] ( identifier[elem] , identifier[basestring] ): identifier[result] = identifier[elem] keyword[elif] identifier[isinstance] ( identifier[elem] , identifier[dxpy] . identifier[DXFile] ): identifier[result] = identifier[json] . identifier[dumps] ( identifier[dxpy] . identifier[dxlink] ( identifier[elem] )) keyword[else] : identifier[result] = identifier[json] . identifier[dumps] ( identifier[elem] ) keyword[return] identifier[pipes] . identifier[quote] ( identifier[result] ) keyword[def] identifier[string_of_value] ( identifier[val] ): keyword[if] identifier[isinstance] ( identifier[val] , identifier[list] ): identifier[string] = literal[string] . identifier[join] ([ identifier[string_of_elem] ( identifier[vitem] ) keyword[for] identifier[vitem] keyword[in] identifier[val] ]) keyword[return] literal[string] . identifier[format] ( identifier[string] ) keyword[else] : keyword[return] identifier[string_of_elem] ( identifier[val] ) identifier[var_defs_hash] ={} keyword[def] identifier[gen_text_line_and_name_collision] ( identifier[key] , identifier[val] ): literal[string] keyword[if] identifier[check_name_collision] : keyword[if] identifier[key] keyword[not] keyword[in] identifier[environ] keyword[and] identifier[key] keyword[not] keyword[in] identifier[var_defs_hash] : identifier[var_defs_hash] [ identifier[key] ]= identifier[val] keyword[else] : identifier[sys] . identifier[stderr] . identifier[write] ( identifier[dxpy] . identifier[utils] . identifier[printing] . identifier[fill] ( literal[string] . identifier[format] ( identifier[key] ) )+ literal[string] ) keyword[else] : identifier[var_defs_hash] [ identifier[key] ]= identifier[val] keyword[for] identifier[key] , identifier[desc] keyword[in] identifier[list] ( identifier[rest_hash] . identifier[items] ()): identifier[gen_text_line_and_name_collision] ( identifier[key] , identifier[string_of_value] ( identifier[desc] )) keyword[for] identifier[file_key] , identifier[desc] keyword[in] identifier[list] ( identifier[file_key_descs] . identifier[items] ()): identifier[gen_text_line_and_name_collision] ( identifier[file_key] , identifier[string_of_value] ( identifier[desc] [ literal[string] ])) identifier[gen_text_line_and_name_collision] ( identifier[file_key] + literal[string] , identifier[string_of_value] ( identifier[desc] [ literal[string] ])) identifier[gen_text_line_and_name_collision] ( identifier[file_key] + literal[string] , identifier[string_of_value] ( identifier[desc] [ literal[string] ])) identifier[gen_text_line_and_name_collision] ( identifier[file_key] + literal[string] , identifier[string_of_value] ( identifier[desc] [ literal[string] ])) keyword[return] identifier[var_defs_hash]
def gen_bash_vars(job_input_file, job_homedir=None, check_name_collision=True): """ :param job_input_file: path to a JSON file describing the job inputs :param job_homedir: path to home directory, used for testing purposes :param check_name_collision: should we check for name collisions? :return: list of lines :rtype: list of strings Calculates a line for each shell variable to instantiate. If *check_name_collision* is true, then detect and warn about collisions with essential environment variables. """ (file_key_descs, rest_hash) = analyze_bash_vars(job_input_file, job_homedir) def string_of_elem(elem): result = None if isinstance(elem, basestring): result = elem # depends on [control=['if'], data=[]] elif isinstance(elem, dxpy.DXFile): result = json.dumps(dxpy.dxlink(elem)) # depends on [control=['if'], data=[]] else: result = json.dumps(elem) return pipes.quote(result) def string_of_value(val): if isinstance(val, list): string = ' '.join([string_of_elem(vitem) for vitem in val]) return '( {} )'.format(string) # depends on [control=['if'], data=[]] else: return string_of_elem(val) var_defs_hash = {} def gen_text_line_and_name_collision(key, val): """ In the absence of a name collision, create a line describing a bash variable. """ if check_name_collision: if key not in environ and key not in var_defs_hash: var_defs_hash[key] = val # depends on [control=['if'], data=[]] else: sys.stderr.write(dxpy.utils.printing.fill('Creating environment variable ({}) would cause a name collision'.format(key)) + '\n') # depends on [control=['if'], data=[]] else: var_defs_hash[key] = val # Processing non-file variables before the file variables. This priorities them, # so that in case of name collisions, the file-variables will be dropped. for (key, desc) in list(rest_hash.items()): gen_text_line_and_name_collision(key, string_of_value(desc)) # depends on [control=['for'], data=[]] for (file_key, desc) in list(file_key_descs.items()): gen_text_line_and_name_collision(file_key, string_of_value(desc['handler'])) gen_text_line_and_name_collision(file_key + '_name', string_of_value(desc['basename'])) gen_text_line_and_name_collision(file_key + '_prefix', string_of_value(desc['prefix'])) gen_text_line_and_name_collision(file_key + '_path', string_of_value(desc['path'])) # depends on [control=['for'], data=[]] return var_defs_hash
def _download_file(url, local_filename): ''' Utility function that downloads a chunked response from the specified url to a local path. This method is suitable for larger downloads. ''' response = requests.get(url, stream=True) with open(local_filename, 'wb') as outfile: for chunk in response.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks outfile.write(chunk)
def function[_download_file, parameter[url, local_filename]]: constant[ Utility function that downloads a chunked response from the specified url to a local path. This method is suitable for larger downloads. ] variable[response] assign[=] call[name[requests].get, parameter[name[url]]] with call[name[open], parameter[name[local_filename], constant[wb]]] begin[:] for taget[name[chunk]] in starred[call[name[response].iter_content, parameter[]]] begin[:] if name[chunk] begin[:] call[name[outfile].write, parameter[name[chunk]]]
keyword[def] identifier[_download_file] ( identifier[url] , identifier[local_filename] ): literal[string] identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[stream] = keyword[True] ) keyword[with] identifier[open] ( identifier[local_filename] , literal[string] ) keyword[as] identifier[outfile] : keyword[for] identifier[chunk] keyword[in] identifier[response] . identifier[iter_content] ( identifier[chunk_size] = literal[int] ): keyword[if] identifier[chunk] : identifier[outfile] . identifier[write] ( identifier[chunk] )
def _download_file(url, local_filename): """ Utility function that downloads a chunked response from the specified url to a local path. This method is suitable for larger downloads. """ response = requests.get(url, stream=True) with open(local_filename, 'wb') as outfile: for chunk in response.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks outfile.write(chunk) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chunk']] # depends on [control=['with'], data=['outfile']]
def rename(self, new_name): """ Rename the container. On success, returns the new Container object. On failure, returns False. """ if _lxc.Container.rename(self, new_name): return Container(new_name) return False
def function[rename, parameter[self, new_name]]: constant[ Rename the container. On success, returns the new Container object. On failure, returns False. ] if call[name[_lxc].Container.rename, parameter[name[self], name[new_name]]] begin[:] return[call[name[Container], parameter[name[new_name]]]] return[constant[False]]
keyword[def] identifier[rename] ( identifier[self] , identifier[new_name] ): literal[string] keyword[if] identifier[_lxc] . identifier[Container] . identifier[rename] ( identifier[self] , identifier[new_name] ): keyword[return] identifier[Container] ( identifier[new_name] ) keyword[return] keyword[False]
def rename(self, new_name): """ Rename the container. On success, returns the new Container object. On failure, returns False. """ if _lxc.Container.rename(self, new_name): return Container(new_name) # depends on [control=['if'], data=[]] return False
def _recv_robust(self, sock, size): """ Receive size from sock, and retry if the recv() call was interrupted. (this is only required for python2 compatability) """ while True: try: return sock.recv(size) except socket.error as e: if e.errno != errno.EINTR: raise
def function[_recv_robust, parameter[self, sock, size]]: constant[ Receive size from sock, and retry if the recv() call was interrupted. (this is only required for python2 compatability) ] while constant[True] begin[:] <ast.Try object at 0x7da1b22e9240>
keyword[def] identifier[_recv_robust] ( identifier[self] , identifier[sock] , identifier[size] ): literal[string] keyword[while] keyword[True] : keyword[try] : keyword[return] identifier[sock] . identifier[recv] ( identifier[size] ) keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[errno] != identifier[errno] . identifier[EINTR] : keyword[raise]
def _recv_robust(self, sock, size): """ Receive size from sock, and retry if the recv() call was interrupted. (this is only required for python2 compatability) """ while True: try: return sock.recv(size) # depends on [control=['try'], data=[]] except socket.error as e: if e.errno != errno.EINTR: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
def headloss_fric(FlowRate, Diam, Length, Nu, PipeRough): """Return the major head loss (due to wall shear) in a pipe. This equation applies to both laminar and turbulent flows. """ #Checking input validity - inputs not checked here are checked by #functions this function calls. ut.check_range([Length, ">0", "Length"]) return (fric(FlowRate, Diam, Nu, PipeRough) * 8 / (gravity.magnitude * np.pi**2) * (Length * FlowRate**2) / Diam**5 )
def function[headloss_fric, parameter[FlowRate, Diam, Length, Nu, PipeRough]]: constant[Return the major head loss (due to wall shear) in a pipe. This equation applies to both laminar and turbulent flows. ] call[name[ut].check_range, parameter[list[[<ast.Name object at 0x7da1b06ce0b0>, <ast.Constant object at 0x7da1b06cc8b0>, <ast.Constant object at 0x7da1b06cc880>]]]] return[binary_operation[binary_operation[binary_operation[binary_operation[call[name[fric], parameter[name[FlowRate], name[Diam], name[Nu], name[PipeRough]]] * constant[8]] / binary_operation[name[gravity].magnitude * binary_operation[name[np].pi ** constant[2]]]] * binary_operation[name[Length] * binary_operation[name[FlowRate] ** constant[2]]]] / binary_operation[name[Diam] ** constant[5]]]]
keyword[def] identifier[headloss_fric] ( identifier[FlowRate] , identifier[Diam] , identifier[Length] , identifier[Nu] , identifier[PipeRough] ): literal[string] identifier[ut] . identifier[check_range] ([ identifier[Length] , literal[string] , literal[string] ]) keyword[return] ( identifier[fric] ( identifier[FlowRate] , identifier[Diam] , identifier[Nu] , identifier[PipeRough] ) * literal[int] /( identifier[gravity] . identifier[magnitude] * identifier[np] . identifier[pi] ** literal[int] ) *( identifier[Length] * identifier[FlowRate] ** literal[int] )/ identifier[Diam] ** literal[int] )
def headloss_fric(FlowRate, Diam, Length, Nu, PipeRough): """Return the major head loss (due to wall shear) in a pipe. This equation applies to both laminar and turbulent flows. """ #Checking input validity - inputs not checked here are checked by #functions this function calls. ut.check_range([Length, '>0', 'Length']) return fric(FlowRate, Diam, Nu, PipeRough) * 8 / (gravity.magnitude * np.pi ** 2) * (Length * FlowRate ** 2) / Diam ** 5
async def find(self, seq_set: SequenceSet, selected: SelectedMailbox, requirement: FetchRequirement = FetchRequirement.METADATA) \ -> AsyncIterable[Tuple[int, MessageT]]: """Find the active message UID and message pairs in the mailbox that are contained in the given sequences set. Message sequence numbers are resolved by the selected mailbox session. Args: seq_set: The sequence set of the desired messages. selected: The selected mailbox session. requirement: The data required from each message. """ for seq, cached_msg in selected.messages.get_all(seq_set): msg = await self.get(cached_msg.uid, cached_msg, requirement) if msg is not None: yield (seq, msg)
<ast.AsyncFunctionDef object at 0x7da18f58c7f0>
keyword[async] keyword[def] identifier[find] ( identifier[self] , identifier[seq_set] : identifier[SequenceSet] , identifier[selected] : identifier[SelectedMailbox] , identifier[requirement] : identifier[FetchRequirement] = identifier[FetchRequirement] . identifier[METADATA] )-> identifier[AsyncIterable] [ identifier[Tuple] [ identifier[int] , identifier[MessageT] ]]: literal[string] keyword[for] identifier[seq] , identifier[cached_msg] keyword[in] identifier[selected] . identifier[messages] . identifier[get_all] ( identifier[seq_set] ): identifier[msg] = keyword[await] identifier[self] . identifier[get] ( identifier[cached_msg] . identifier[uid] , identifier[cached_msg] , identifier[requirement] ) keyword[if] identifier[msg] keyword[is] keyword[not] keyword[None] : keyword[yield] ( identifier[seq] , identifier[msg] )
async def find(self, seq_set: SequenceSet, selected: SelectedMailbox, requirement: FetchRequirement=FetchRequirement.METADATA) -> AsyncIterable[Tuple[int, MessageT]]: """Find the active message UID and message pairs in the mailbox that are contained in the given sequences set. Message sequence numbers are resolved by the selected mailbox session. Args: seq_set: The sequence set of the desired messages. selected: The selected mailbox session. requirement: The data required from each message. """ for (seq, cached_msg) in selected.messages.get_all(seq_set): msg = await self.get(cached_msg.uid, cached_msg, requirement) if msg is not None: yield (seq, msg) # depends on [control=['if'], data=['msg']] # depends on [control=['for'], data=[]]
def importer(name, extensions=None, sniff=None): ''' @importer(name) is a decorator that declares that the following function is an file loading function that should be registered with the neuropythy load function. See also the forget_importer function. Any importer function must take, as its first argument, a filename; after that it may take any number of keyword arguments, but no other non-keyword arguments. These keyword arguments can be passed to the neuropythy load function. The following options are accepted: * extensions (default: None) may be a string or a collection of strings that indicate possible file extensions for files of this type. * sniff (default: None) may optionally be a function f(s) that yields True when the given string s is a filename for a file of this type. If no sniff is given, this type can still be detected by running the importer and catching any raised exception. ''' name = name.lower() if name in importers: raise ValueError('An importer for type %s already exists; see forget_importer' % name) if extensions is None: extensions = () elif pimms.is_str(extensions): (extensions,) else: extensions = tuple(extensions) def _importer(f): global importers importers = importers.set(name, (f, extensions, sniff)) setattr(load, name, f) return f return _importer
def function[importer, parameter[name, extensions, sniff]]: constant[ @importer(name) is a decorator that declares that the following function is an file loading function that should be registered with the neuropythy load function. See also the forget_importer function. Any importer function must take, as its first argument, a filename; after that it may take any number of keyword arguments, but no other non-keyword arguments. These keyword arguments can be passed to the neuropythy load function. The following options are accepted: * extensions (default: None) may be a string or a collection of strings that indicate possible file extensions for files of this type. * sniff (default: None) may optionally be a function f(s) that yields True when the given string s is a filename for a file of this type. If no sniff is given, this type can still be detected by running the importer and catching any raised exception. ] variable[name] assign[=] call[name[name].lower, parameter[]] if compare[name[name] in name[importers]] begin[:] <ast.Raise object at 0x7da204346ce0> if compare[name[extensions] is constant[None]] begin[:] variable[extensions] assign[=] tuple[[]] def function[_importer, parameter[f]]: <ast.Global object at 0x7da204961000> variable[importers] assign[=] call[name[importers].set, parameter[name[name], tuple[[<ast.Name object at 0x7da204961d50>, <ast.Name object at 0x7da204963820>, <ast.Name object at 0x7da204963fd0>]]]] call[name[setattr], parameter[name[load], name[name], name[f]]] return[name[f]] return[name[_importer]]
keyword[def] identifier[importer] ( identifier[name] , identifier[extensions] = keyword[None] , identifier[sniff] = keyword[None] ): literal[string] identifier[name] = identifier[name] . identifier[lower] () keyword[if] identifier[name] keyword[in] identifier[importers] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] ) keyword[if] identifier[extensions] keyword[is] keyword[None] : identifier[extensions] =() keyword[elif] identifier[pimms] . identifier[is_str] ( identifier[extensions] ):( identifier[extensions] ,) keyword[else] : identifier[extensions] = identifier[tuple] ( identifier[extensions] ) keyword[def] identifier[_importer] ( identifier[f] ): keyword[global] identifier[importers] identifier[importers] = identifier[importers] . identifier[set] ( identifier[name] ,( identifier[f] , identifier[extensions] , identifier[sniff] )) identifier[setattr] ( identifier[load] , identifier[name] , identifier[f] ) keyword[return] identifier[f] keyword[return] identifier[_importer]
def importer(name, extensions=None, sniff=None): """ @importer(name) is a decorator that declares that the following function is an file loading function that should be registered with the neuropythy load function. See also the forget_importer function. Any importer function must take, as its first argument, a filename; after that it may take any number of keyword arguments, but no other non-keyword arguments. These keyword arguments can be passed to the neuropythy load function. The following options are accepted: * extensions (default: None) may be a string or a collection of strings that indicate possible file extensions for files of this type. * sniff (default: None) may optionally be a function f(s) that yields True when the given string s is a filename for a file of this type. If no sniff is given, this type can still be detected by running the importer and catching any raised exception. """ name = name.lower() if name in importers: raise ValueError('An importer for type %s already exists; see forget_importer' % name) # depends on [control=['if'], data=['name']] if extensions is None: extensions = () # depends on [control=['if'], data=['extensions']] elif pimms.is_str(extensions): (extensions,) # depends on [control=['if'], data=[]] else: extensions = tuple(extensions) def _importer(f): global importers importers = importers.set(name, (f, extensions, sniff)) setattr(load, name, f) return f return _importer
def auth_user_process_url(self, url): 'Process tokens and errors from redirect_uri.' url = urlparse.urlparse(url) url_qs = dict(it.chain.from_iterable( urlparse.parse_qsl(v) for v in [url.query, url.fragment] )) if url_qs.get('error'): raise APIAuthError( '{} :: {}'.format(url_qs['error'], url_qs.get('error_description')) ) self.auth_code = url_qs['code'] return self.auth_code
def function[auth_user_process_url, parameter[self, url]]: constant[Process tokens and errors from redirect_uri.] variable[url] assign[=] call[name[urlparse].urlparse, parameter[name[url]]] variable[url_qs] assign[=] call[name[dict], parameter[call[name[it].chain.from_iterable, parameter[<ast.GeneratorExp object at 0x7da1b0210a60>]]]] if call[name[url_qs].get, parameter[constant[error]]] begin[:] <ast.Raise object at 0x7da1b0213df0> name[self].auth_code assign[=] call[name[url_qs]][constant[code]] return[name[self].auth_code]
keyword[def] identifier[auth_user_process_url] ( identifier[self] , identifier[url] ): literal[string] identifier[url] = identifier[urlparse] . identifier[urlparse] ( identifier[url] ) identifier[url_qs] = identifier[dict] ( identifier[it] . identifier[chain] . identifier[from_iterable] ( identifier[urlparse] . identifier[parse_qsl] ( identifier[v] ) keyword[for] identifier[v] keyword[in] [ identifier[url] . identifier[query] , identifier[url] . identifier[fragment] ])) keyword[if] identifier[url_qs] . identifier[get] ( literal[string] ): keyword[raise] identifier[APIAuthError] ( literal[string] . identifier[format] ( identifier[url_qs] [ literal[string] ], identifier[url_qs] . identifier[get] ( literal[string] ))) identifier[self] . identifier[auth_code] = identifier[url_qs] [ literal[string] ] keyword[return] identifier[self] . identifier[auth_code]
def auth_user_process_url(self, url): """Process tokens and errors from redirect_uri.""" url = urlparse.urlparse(url) url_qs = dict(it.chain.from_iterable((urlparse.parse_qsl(v) for v in [url.query, url.fragment]))) if url_qs.get('error'): raise APIAuthError('{} :: {}'.format(url_qs['error'], url_qs.get('error_description'))) # depends on [control=['if'], data=[]] self.auth_code = url_qs['code'] return self.auth_code
def get_datawrapper(self, file_format='BlockNeuronBuilder', data_wrapper=DataWrapper): '''returns a DataWrapper''' self._check_consistency() datablock, sections = self._make_datablock() return data_wrapper(datablock, file_format, sections)
def function[get_datawrapper, parameter[self, file_format, data_wrapper]]: constant[returns a DataWrapper] call[name[self]._check_consistency, parameter[]] <ast.Tuple object at 0x7da18bc726b0> assign[=] call[name[self]._make_datablock, parameter[]] return[call[name[data_wrapper], parameter[name[datablock], name[file_format], name[sections]]]]
keyword[def] identifier[get_datawrapper] ( identifier[self] , identifier[file_format] = literal[string] , identifier[data_wrapper] = identifier[DataWrapper] ): literal[string] identifier[self] . identifier[_check_consistency] () identifier[datablock] , identifier[sections] = identifier[self] . identifier[_make_datablock] () keyword[return] identifier[data_wrapper] ( identifier[datablock] , identifier[file_format] , identifier[sections] )
def get_datawrapper(self, file_format='BlockNeuronBuilder', data_wrapper=DataWrapper): """returns a DataWrapper""" self._check_consistency() (datablock, sections) = self._make_datablock() return data_wrapper(datablock, file_format, sections)
def _create_gcl_resource(self): """Create a configured Resource object. The logging.resource.Resource object enables GCL to filter and bucket incoming logs according to which resource (host) they're coming from. Returns: (obj): Instance of `google.cloud.logging.resource.Resource` """ return gcl_resource.Resource('gce_instance', { 'project_id': self.project_id, 'instance_id': self.instance_id, 'zone': self.zone })
def function[_create_gcl_resource, parameter[self]]: constant[Create a configured Resource object. The logging.resource.Resource object enables GCL to filter and bucket incoming logs according to which resource (host) they're coming from. Returns: (obj): Instance of `google.cloud.logging.resource.Resource` ] return[call[name[gcl_resource].Resource, parameter[constant[gce_instance], dictionary[[<ast.Constant object at 0x7da18c4cd600>, <ast.Constant object at 0x7da18c4ce860>, <ast.Constant object at 0x7da18c4cf520>], [<ast.Attribute object at 0x7da18c4cc3d0>, <ast.Attribute object at 0x7da18c4cd960>, <ast.Attribute object at 0x7da18c4cda50>]]]]]
keyword[def] identifier[_create_gcl_resource] ( identifier[self] ): literal[string] keyword[return] identifier[gcl_resource] . identifier[Resource] ( literal[string] ,{ literal[string] : identifier[self] . identifier[project_id] , literal[string] : identifier[self] . identifier[instance_id] , literal[string] : identifier[self] . identifier[zone] })
def _create_gcl_resource(self): """Create a configured Resource object. The logging.resource.Resource object enables GCL to filter and bucket incoming logs according to which resource (host) they're coming from. Returns: (obj): Instance of `google.cloud.logging.resource.Resource` """ return gcl_resource.Resource('gce_instance', {'project_id': self.project_id, 'instance_id': self.instance_id, 'zone': self.zone})
def parse(cls, credentials) -> typing.Optional["Credentials"]: """Parse/interpret some given credentials. These may take the form of: * An empty string. * An empty sequence. * A string, containing three parts (consumer key, token key, and token secret) separated by colons. * A sequence of three strings (consumer key, token key, and token secret). * None. """ if credentials is None: return None elif isinstance(credentials, cls): return credentials elif isinstance(credentials, str): if credentials == "": return None elif credentials.count(":") == 2: return cls(*credentials.split(":")) else: raise ValueError( "Malformed credentials. Expected 3 colon-separated " "parts, got %r." % (credentials, )) else: parts = list(credentials) if len(parts) == 0: return None elif len(parts) == 3: return cls(*parts) else: raise ValueError( "Malformed credentials. Expected 3 parts, " "got %r." % (credentials, ))
def function[parse, parameter[cls, credentials]]: constant[Parse/interpret some given credentials. These may take the form of: * An empty string. * An empty sequence. * A string, containing three parts (consumer key, token key, and token secret) separated by colons. * A sequence of three strings (consumer key, token key, and token secret). * None. ] if compare[name[credentials] is constant[None]] begin[:] return[constant[None]]
keyword[def] identifier[parse] ( identifier[cls] , identifier[credentials] )-> identifier[typing] . identifier[Optional] [ literal[string] ]: literal[string] keyword[if] identifier[credentials] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[elif] identifier[isinstance] ( identifier[credentials] , identifier[cls] ): keyword[return] identifier[credentials] keyword[elif] identifier[isinstance] ( identifier[credentials] , identifier[str] ): keyword[if] identifier[credentials] == literal[string] : keyword[return] keyword[None] keyword[elif] identifier[credentials] . identifier[count] ( literal[string] )== literal[int] : keyword[return] identifier[cls] (* identifier[credentials] . identifier[split] ( literal[string] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[credentials] ,)) keyword[else] : identifier[parts] = identifier[list] ( identifier[credentials] ) keyword[if] identifier[len] ( identifier[parts] )== literal[int] : keyword[return] keyword[None] keyword[elif] identifier[len] ( identifier[parts] )== literal[int] : keyword[return] identifier[cls] (* identifier[parts] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[credentials] ,))
def parse(cls, credentials) -> typing.Optional['Credentials']: """Parse/interpret some given credentials. These may take the form of: * An empty string. * An empty sequence. * A string, containing three parts (consumer key, token key, and token secret) separated by colons. * A sequence of three strings (consumer key, token key, and token secret). * None. """ if credentials is None: return None # depends on [control=['if'], data=[]] elif isinstance(credentials, cls): return credentials # depends on [control=['if'], data=[]] elif isinstance(credentials, str): if credentials == '': return None # depends on [control=['if'], data=[]] elif credentials.count(':') == 2: return cls(*credentials.split(':')) # depends on [control=['if'], data=[]] else: raise ValueError('Malformed credentials. Expected 3 colon-separated parts, got %r.' % (credentials,)) # depends on [control=['if'], data=[]] else: parts = list(credentials) if len(parts) == 0: return None # depends on [control=['if'], data=[]] elif len(parts) == 3: return cls(*parts) # depends on [control=['if'], data=[]] else: raise ValueError('Malformed credentials. Expected 3 parts, got %r.' % (credentials,))
def do_view(self): """ Authenticate user with given credentials. Connects user's queue and exchange """ self.current.output['login_process'] = True self.current.task_data['login_successful'] = False if self.current.is_auth: self._do_upgrade() else: try: auth_result = self.current.auth.authenticate( self.current.input['username'], self.current.input['password']) self.current.task_data['login_successful'] = auth_result if auth_result: self._do_upgrade() except ObjectDoesNotExist: self.current.log.exception("Wrong username or another error occurred") pass except: raise if self.current.output.get('cmd') != 'upgrade': self.current.output['status_code'] = 403 else: KeepAlive(self.current.user_id).reset()
def function[do_view, parameter[self]]: constant[ Authenticate user with given credentials. Connects user's queue and exchange ] call[name[self].current.output][constant[login_process]] assign[=] constant[True] call[name[self].current.task_data][constant[login_successful]] assign[=] constant[False] if name[self].current.is_auth begin[:] call[name[self]._do_upgrade, parameter[]]
keyword[def] identifier[do_view] ( identifier[self] ): literal[string] identifier[self] . identifier[current] . identifier[output] [ literal[string] ]= keyword[True] identifier[self] . identifier[current] . identifier[task_data] [ literal[string] ]= keyword[False] keyword[if] identifier[self] . identifier[current] . identifier[is_auth] : identifier[self] . identifier[_do_upgrade] () keyword[else] : keyword[try] : identifier[auth_result] = identifier[self] . identifier[current] . identifier[auth] . identifier[authenticate] ( identifier[self] . identifier[current] . identifier[input] [ literal[string] ], identifier[self] . identifier[current] . identifier[input] [ literal[string] ]) identifier[self] . identifier[current] . identifier[task_data] [ literal[string] ]= identifier[auth_result] keyword[if] identifier[auth_result] : identifier[self] . identifier[_do_upgrade] () keyword[except] identifier[ObjectDoesNotExist] : identifier[self] . identifier[current] . identifier[log] . identifier[exception] ( literal[string] ) keyword[pass] keyword[except] : keyword[raise] keyword[if] identifier[self] . identifier[current] . identifier[output] . identifier[get] ( literal[string] )!= literal[string] : identifier[self] . identifier[current] . identifier[output] [ literal[string] ]= literal[int] keyword[else] : identifier[KeepAlive] ( identifier[self] . identifier[current] . identifier[user_id] ). identifier[reset] ()
def do_view(self): """ Authenticate user with given credentials. Connects user's queue and exchange """ self.current.output['login_process'] = True self.current.task_data['login_successful'] = False if self.current.is_auth: self._do_upgrade() # depends on [control=['if'], data=[]] else: try: auth_result = self.current.auth.authenticate(self.current.input['username'], self.current.input['password']) self.current.task_data['login_successful'] = auth_result if auth_result: self._do_upgrade() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ObjectDoesNotExist: self.current.log.exception('Wrong username or another error occurred') pass # depends on [control=['except'], data=[]] except: raise # depends on [control=['except'], data=[]] if self.current.output.get('cmd') != 'upgrade': self.current.output['status_code'] = 403 # depends on [control=['if'], data=[]] else: KeepAlive(self.current.user_id).reset()
def read(self, size=None): """Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed. """ if not self._is_open: raise IOError('Not opened.') if self._current_offset < 0: raise IOError('Invalid current offset value less than zero.') if self._current_offset > self._size: return b'' if size is None or self._current_offset + size > self._size: size = self._size - self._current_offset self._tar_ext_file.seek(self._current_offset, os.SEEK_SET) data = self._tar_ext_file.read(size) # It is possible the that returned data size is not the same as the # requested data size. At this layer we don't care and this discrepancy # should be dealt with on a higher layer if necessary. self._current_offset += len(data) return data
def function[read, parameter[self, size]]: constant[Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed. ] if <ast.UnaryOp object at 0x7da1b0655bd0> begin[:] <ast.Raise object at 0x7da1b06565f0> if compare[name[self]._current_offset less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b0655e40> if compare[name[self]._current_offset greater[>] name[self]._size] begin[:] return[constant[b'']] if <ast.BoolOp object at 0x7da1b0655d20> begin[:] variable[size] assign[=] binary_operation[name[self]._size - name[self]._current_offset] call[name[self]._tar_ext_file.seek, parameter[name[self]._current_offset, name[os].SEEK_SET]] variable[data] assign[=] call[name[self]._tar_ext_file.read, parameter[name[size]]] <ast.AugAssign object at 0x7da1b06553f0> return[name[data]]
keyword[def] identifier[read] ( identifier[self] , identifier[size] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_is_open] : keyword[raise] identifier[IOError] ( literal[string] ) keyword[if] identifier[self] . identifier[_current_offset] < literal[int] : keyword[raise] identifier[IOError] ( literal[string] ) keyword[if] identifier[self] . identifier[_current_offset] > identifier[self] . identifier[_size] : keyword[return] literal[string] keyword[if] identifier[size] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[_current_offset] + identifier[size] > identifier[self] . identifier[_size] : identifier[size] = identifier[self] . identifier[_size] - identifier[self] . identifier[_current_offset] identifier[self] . identifier[_tar_ext_file] . identifier[seek] ( identifier[self] . identifier[_current_offset] , identifier[os] . identifier[SEEK_SET] ) identifier[data] = identifier[self] . identifier[_tar_ext_file] . identifier[read] ( identifier[size] ) identifier[self] . identifier[_current_offset] += identifier[len] ( identifier[data] ) keyword[return] identifier[data]
def read(self, size=None): """Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed. """ if not self._is_open: raise IOError('Not opened.') # depends on [control=['if'], data=[]] if self._current_offset < 0: raise IOError('Invalid current offset value less than zero.') # depends on [control=['if'], data=[]] if self._current_offset > self._size: return b'' # depends on [control=['if'], data=[]] if size is None or self._current_offset + size > self._size: size = self._size - self._current_offset # depends on [control=['if'], data=[]] self._tar_ext_file.seek(self._current_offset, os.SEEK_SET) data = self._tar_ext_file.read(size) # It is possible the that returned data size is not the same as the # requested data size. At this layer we don't care and this discrepancy # should be dealt with on a higher layer if necessary. self._current_offset += len(data) return data
def show_zoning_enabled_configuration_output_enabled_configuration_enabled_zone_member_entry_entry_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_zoning_enabled_configuration = ET.Element("show_zoning_enabled_configuration") config = show_zoning_enabled_configuration output = ET.SubElement(show_zoning_enabled_configuration, "output") enabled_configuration = ET.SubElement(output, "enabled-configuration") enabled_zone = ET.SubElement(enabled_configuration, "enabled-zone") zone_name_key = ET.SubElement(enabled_zone, "zone-name") zone_name_key.text = kwargs.pop('zone_name') member_entry = ET.SubElement(enabled_zone, "member-entry") entry_name = ET.SubElement(member_entry, "entry-name") entry_name.text = kwargs.pop('entry_name') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[show_zoning_enabled_configuration_output_enabled_configuration_enabled_zone_member_entry_entry_name, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[show_zoning_enabled_configuration] assign[=] call[name[ET].Element, parameter[constant[show_zoning_enabled_configuration]]] variable[config] assign[=] name[show_zoning_enabled_configuration] variable[output] assign[=] call[name[ET].SubElement, parameter[name[show_zoning_enabled_configuration], constant[output]]] variable[enabled_configuration] assign[=] call[name[ET].SubElement, parameter[name[output], constant[enabled-configuration]]] variable[enabled_zone] assign[=] call[name[ET].SubElement, parameter[name[enabled_configuration], constant[enabled-zone]]] variable[zone_name_key] assign[=] call[name[ET].SubElement, parameter[name[enabled_zone], constant[zone-name]]] name[zone_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[zone_name]]] variable[member_entry] assign[=] call[name[ET].SubElement, parameter[name[enabled_zone], constant[member-entry]]] variable[entry_name] assign[=] call[name[ET].SubElement, parameter[name[member_entry], constant[entry-name]]] name[entry_name].text assign[=] call[name[kwargs].pop, parameter[constant[entry_name]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[show_zoning_enabled_configuration_output_enabled_configuration_enabled_zone_member_entry_entry_name] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[show_zoning_enabled_configuration] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[show_zoning_enabled_configuration] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[show_zoning_enabled_configuration] , literal[string] ) identifier[enabled_configuration] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[enabled_zone] = identifier[ET] . identifier[SubElement] ( identifier[enabled_configuration] , literal[string] ) identifier[zone_name_key] = identifier[ET] . identifier[SubElement] ( identifier[enabled_zone] , literal[string] ) identifier[zone_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[member_entry] = identifier[ET] . identifier[SubElement] ( identifier[enabled_zone] , literal[string] ) identifier[entry_name] = identifier[ET] . identifier[SubElement] ( identifier[member_entry] , literal[string] ) identifier[entry_name] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def show_zoning_enabled_configuration_output_enabled_configuration_enabled_zone_member_entry_entry_name(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') show_zoning_enabled_configuration = ET.Element('show_zoning_enabled_configuration') config = show_zoning_enabled_configuration output = ET.SubElement(show_zoning_enabled_configuration, 'output') enabled_configuration = ET.SubElement(output, 'enabled-configuration') enabled_zone = ET.SubElement(enabled_configuration, 'enabled-zone') zone_name_key = ET.SubElement(enabled_zone, 'zone-name') zone_name_key.text = kwargs.pop('zone_name') member_entry = ET.SubElement(enabled_zone, 'member-entry') entry_name = ET.SubElement(member_entry, 'entry-name') entry_name.text = kwargs.pop('entry_name') callback = kwargs.pop('callback', self._callback) return callback(config)
def _autocorr_func3(mags, lag, maglen, magmed, magstd): ''' This is yet another alternative to calculate the autocorrelation. Taken from: `Bayesian Methods for Hackers by Cameron Pilon <http://nbviewer.jupyter.org/github/CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Chapter3_MCMC/Chapter3.ipynb#Autocorrelation>`_ (This should be the fastest method to calculate ACFs.) Parameters ---------- mags : np.array This is the magnitudes array. MUST NOT have any nans. lag : float The specific lag value to calculate the auto-correlation for. This MUST be less than total number of observations in `mags`. maglen : int The number of elements in the `mags` array. magmed : float The median of the `mags` array. magstd : float The standard deviation of the `mags` array. Returns ------- float The auto-correlation at this specific `lag` value. ''' # from http://tinyurl.com/afz57c4 result = npcorrelate(mags, mags, mode='full') result = result / npmax(result) return result[int(result.size / 2):]
def function[_autocorr_func3, parameter[mags, lag, maglen, magmed, magstd]]: constant[ This is yet another alternative to calculate the autocorrelation. Taken from: `Bayesian Methods for Hackers by Cameron Pilon <http://nbviewer.jupyter.org/github/CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Chapter3_MCMC/Chapter3.ipynb#Autocorrelation>`_ (This should be the fastest method to calculate ACFs.) Parameters ---------- mags : np.array This is the magnitudes array. MUST NOT have any nans. lag : float The specific lag value to calculate the auto-correlation for. This MUST be less than total number of observations in `mags`. maglen : int The number of elements in the `mags` array. magmed : float The median of the `mags` array. magstd : float The standard deviation of the `mags` array. Returns ------- float The auto-correlation at this specific `lag` value. ] variable[result] assign[=] call[name[npcorrelate], parameter[name[mags], name[mags]]] variable[result] assign[=] binary_operation[name[result] / call[name[npmax], parameter[name[result]]]] return[call[name[result]][<ast.Slice object at 0x7da2054a7c40>]]
keyword[def] identifier[_autocorr_func3] ( identifier[mags] , identifier[lag] , identifier[maglen] , identifier[magmed] , identifier[magstd] ): literal[string] identifier[result] = identifier[npcorrelate] ( identifier[mags] , identifier[mags] , identifier[mode] = literal[string] ) identifier[result] = identifier[result] / identifier[npmax] ( identifier[result] ) keyword[return] identifier[result] [ identifier[int] ( identifier[result] . identifier[size] / literal[int] ):]
def _autocorr_func3(mags, lag, maglen, magmed, magstd): """ This is yet another alternative to calculate the autocorrelation. Taken from: `Bayesian Methods for Hackers by Cameron Pilon <http://nbviewer.jupyter.org/github/CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Chapter3_MCMC/Chapter3.ipynb#Autocorrelation>`_ (This should be the fastest method to calculate ACFs.) Parameters ---------- mags : np.array This is the magnitudes array. MUST NOT have any nans. lag : float The specific lag value to calculate the auto-correlation for. This MUST be less than total number of observations in `mags`. maglen : int The number of elements in the `mags` array. magmed : float The median of the `mags` array. magstd : float The standard deviation of the `mags` array. Returns ------- float The auto-correlation at this specific `lag` value. """ # from http://tinyurl.com/afz57c4 result = npcorrelate(mags, mags, mode='full') result = result / npmax(result) return result[int(result.size / 2):]
def _compute_edges_cells(self): """This creates interior edge->cells relations. While it's not necessary for many applications, it sometimes does come in handy. """ if self.edges is None: self.create_edges() num_edges = len(self.edges["nodes"]) counts = numpy.zeros(num_edges, dtype=int) fastfunc.add.at( counts, self.cells["edges"], numpy.ones(self.cells["edges"].shape, dtype=int), ) # <https://stackoverflow.com/a/50395231/353337> edges_flat = self.cells["edges"].flat idx_sort = numpy.argsort(edges_flat) idx_start, count = grp_start_len(edges_flat[idx_sort]) res1 = idx_sort[idx_start[count == 1]][:, numpy.newaxis] idx = idx_start[count == 2] res2 = numpy.column_stack([idx_sort[idx], idx_sort[idx + 1]]) self._edges_cells = [ [], # no edges with zero adjacent cells res1 // 3, res2 // 3, ] # self._edges_local = [ # [], # no edges with zero adjacent cells # res1 % 3, # res2 % 3, # ] # For each edge, store the number of adjacent cells plus the index into # the respective edge array. self._edge_gid_to_edge_list = numpy.empty((num_edges, 2), dtype=int) self._edge_gid_to_edge_list[:, 0] = count c1 = count == 1 l1 = numpy.sum(c1) self._edge_gid_to_edge_list[c1, 1] = numpy.arange(l1) c2 = count == 2 l2 = numpy.sum(c2) self._edge_gid_to_edge_list[c2, 1] = numpy.arange(l2) assert l1 + l2 == len(count) return
def function[_compute_edges_cells, parameter[self]]: constant[This creates interior edge->cells relations. While it's not necessary for many applications, it sometimes does come in handy. ] if compare[name[self].edges is constant[None]] begin[:] call[name[self].create_edges, parameter[]] variable[num_edges] assign[=] call[name[len], parameter[call[name[self].edges][constant[nodes]]]] variable[counts] assign[=] call[name[numpy].zeros, parameter[name[num_edges]]] call[name[fastfunc].add.at, parameter[name[counts], call[name[self].cells][constant[edges]], call[name[numpy].ones, parameter[call[name[self].cells][constant[edges]].shape]]]] variable[edges_flat] assign[=] call[name[self].cells][constant[edges]].flat variable[idx_sort] assign[=] call[name[numpy].argsort, parameter[name[edges_flat]]] <ast.Tuple object at 0x7da20c76c3a0> assign[=] call[name[grp_start_len], parameter[call[name[edges_flat]][name[idx_sort]]]] variable[res1] assign[=] call[call[name[idx_sort]][call[name[idx_start]][compare[name[count] equal[==] constant[1]]]]][tuple[[<ast.Slice object at 0x7da20c76cd90>, <ast.Attribute object at 0x7da20c76d2a0>]]] variable[idx] assign[=] call[name[idx_start]][compare[name[count] equal[==] constant[2]]] variable[res2] assign[=] call[name[numpy].column_stack, parameter[list[[<ast.Subscript object at 0x7da20c76fa60>, <ast.Subscript object at 0x7da20c76fd90>]]]] name[self]._edges_cells assign[=] list[[<ast.List object at 0x7da20c76df30>, <ast.BinOp object at 0x7da20c76eef0>, <ast.BinOp object at 0x7da20c76e620>]] name[self]._edge_gid_to_edge_list assign[=] call[name[numpy].empty, parameter[tuple[[<ast.Name object at 0x7da20c76d690>, <ast.Constant object at 0x7da20c76f160>]]]] call[name[self]._edge_gid_to_edge_list][tuple[[<ast.Slice object at 0x7da20c76e710>, <ast.Constant object at 0x7da20c76ca90>]]] assign[=] name[count] variable[c1] assign[=] compare[name[count] equal[==] constant[1]] variable[l1] assign[=] call[name[numpy].sum, parameter[name[c1]]] call[name[self]._edge_gid_to_edge_list][tuple[[<ast.Name object at 0x7da207f01720>, <ast.Constant object at 0x7da207f03790>]]] assign[=] call[name[numpy].arange, parameter[name[l1]]] variable[c2] assign[=] compare[name[count] equal[==] constant[2]] variable[l2] assign[=] call[name[numpy].sum, parameter[name[c2]]] call[name[self]._edge_gid_to_edge_list][tuple[[<ast.Name object at 0x7da207f03e50>, <ast.Constant object at 0x7da207f03460>]]] assign[=] call[name[numpy].arange, parameter[name[l2]]] assert[compare[binary_operation[name[l1] + name[l2]] equal[==] call[name[len], parameter[name[count]]]]] return[None]
keyword[def] identifier[_compute_edges_cells] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[edges] keyword[is] keyword[None] : identifier[self] . identifier[create_edges] () identifier[num_edges] = identifier[len] ( identifier[self] . identifier[edges] [ literal[string] ]) identifier[counts] = identifier[numpy] . identifier[zeros] ( identifier[num_edges] , identifier[dtype] = identifier[int] ) identifier[fastfunc] . identifier[add] . identifier[at] ( identifier[counts] , identifier[self] . identifier[cells] [ literal[string] ], identifier[numpy] . identifier[ones] ( identifier[self] . identifier[cells] [ literal[string] ]. identifier[shape] , identifier[dtype] = identifier[int] ), ) identifier[edges_flat] = identifier[self] . identifier[cells] [ literal[string] ]. identifier[flat] identifier[idx_sort] = identifier[numpy] . identifier[argsort] ( identifier[edges_flat] ) identifier[idx_start] , identifier[count] = identifier[grp_start_len] ( identifier[edges_flat] [ identifier[idx_sort] ]) identifier[res1] = identifier[idx_sort] [ identifier[idx_start] [ identifier[count] == literal[int] ]][:, identifier[numpy] . identifier[newaxis] ] identifier[idx] = identifier[idx_start] [ identifier[count] == literal[int] ] identifier[res2] = identifier[numpy] . identifier[column_stack] ([ identifier[idx_sort] [ identifier[idx] ], identifier[idx_sort] [ identifier[idx] + literal[int] ]]) identifier[self] . identifier[_edges_cells] =[ [], identifier[res1] // literal[int] , identifier[res2] // literal[int] , ] identifier[self] . identifier[_edge_gid_to_edge_list] = identifier[numpy] . identifier[empty] (( identifier[num_edges] , literal[int] ), identifier[dtype] = identifier[int] ) identifier[self] . identifier[_edge_gid_to_edge_list] [:, literal[int] ]= identifier[count] identifier[c1] = identifier[count] == literal[int] identifier[l1] = identifier[numpy] . identifier[sum] ( identifier[c1] ) identifier[self] . identifier[_edge_gid_to_edge_list] [ identifier[c1] , literal[int] ]= identifier[numpy] . identifier[arange] ( identifier[l1] ) identifier[c2] = identifier[count] == literal[int] identifier[l2] = identifier[numpy] . identifier[sum] ( identifier[c2] ) identifier[self] . identifier[_edge_gid_to_edge_list] [ identifier[c2] , literal[int] ]= identifier[numpy] . identifier[arange] ( identifier[l2] ) keyword[assert] identifier[l1] + identifier[l2] == identifier[len] ( identifier[count] ) keyword[return]
def _compute_edges_cells(self): """This creates interior edge->cells relations. While it's not necessary for many applications, it sometimes does come in handy. """ if self.edges is None: self.create_edges() # depends on [control=['if'], data=[]] num_edges = len(self.edges['nodes']) counts = numpy.zeros(num_edges, dtype=int) fastfunc.add.at(counts, self.cells['edges'], numpy.ones(self.cells['edges'].shape, dtype=int)) # <https://stackoverflow.com/a/50395231/353337> edges_flat = self.cells['edges'].flat idx_sort = numpy.argsort(edges_flat) (idx_start, count) = grp_start_len(edges_flat[idx_sort]) res1 = idx_sort[idx_start[count == 1]][:, numpy.newaxis] idx = idx_start[count == 2] res2 = numpy.column_stack([idx_sort[idx], idx_sort[idx + 1]]) # no edges with zero adjacent cells self._edges_cells = [[], res1 // 3, res2 // 3] # self._edges_local = [ # [], # no edges with zero adjacent cells # res1 % 3, # res2 % 3, # ] # For each edge, store the number of adjacent cells plus the index into # the respective edge array. self._edge_gid_to_edge_list = numpy.empty((num_edges, 2), dtype=int) self._edge_gid_to_edge_list[:, 0] = count c1 = count == 1 l1 = numpy.sum(c1) self._edge_gid_to_edge_list[c1, 1] = numpy.arange(l1) c2 = count == 2 l2 = numpy.sum(c2) self._edge_gid_to_edge_list[c2, 1] = numpy.arange(l2) assert l1 + l2 == len(count) return
def repr2(data, **kwargs): """ Makes a pretty and easy-to-doctest string representation! This is an alternative to repr, and `pprint.pformat` that attempts to be both more configurable and generate output that is consistent between python versions. Notes: This function has many keyword arguments that can be used to customize the final representation. For convinience some of the more frequently used kwargs have short aliases. See `Args` for more details. Args: data (object): an arbitrary python object **kwargs: see `the Kwargs` section Kwargs: si, stritems, (bool): dict/list items use str instead of repr strkeys, sk (bool): dict keys use str instead of repr strvals, sv (bool): dict values use str instead of repr nl, newlines (int | bool): number of top level nestings to place a newline after. If true all items are followed by newlines regardless of nesting level. Defaults to 1 for lists and True for dicts. nobr, nobraces (bool, default=False): if True, text will not contain outer braces for containers cbr, compact_brace (bool, default=False): if True, braces are compactified (i.e. they will not have newlines placed directly after them, think java / K&R / 1TBS) trailsep, trailing_sep (bool): if True, a separator is placed after the last item in a sequence. By default this is True if there are any `nl > 0`. explicit (bool, default=False): changes dict representation from `{k1: v1, ...}` to `dict(k1=v1, ...)`. precision (int, default=None): if specified floats are formatted with this precision kvsep (str, default=': '): separator between keys and values itemsep (str, default=' '): separator between items sort (bool): if True, attempts to sort all unordered collections in the returned text. NOTE: currently if True this will sort lists, this may not be a correct thing to do, as such the behavior of this arg is subject to change. suppress_small (bool): passed to `numpy.array2string` for ndarrays max_line_width (int): passed to `numpy.array2string` for ndarrays with_dtype (bool): only relevant to ndarrays. if True includes the dtype. Returns: str: outstr: output string Notes: There are also internal kwargs, which should not be used: _return_info (bool): return information about child context _root_info (depth): information about parent context CommandLine: python -m ubelt.util_format repr2:0 python -m ubelt.util_format repr2:1 Example: >>> from ubelt.util_format import * >>> import ubelt as ub >>> dict_ = { ... 'custom_types': [slice(0, 1, None), 1/3], ... 'nest_dict': {'k1': [1, 2, {3: {4, 5}}], ... 'key2': [1, 2, {3: {4, 5}}], ... 'key3': [1, 2, {3: {4, 5}}], ... }, ... 'nest_dict2': {'k': [1, 2, {3: {4, 5}}]}, ... 'nested_tuples': [tuple([1]), tuple([2, 3]), frozenset([4, 5, 6])], ... 'one_tup': tuple([1]), ... 'simple_dict': {'spam': 'eggs', 'ham': 'jam'}, ... 'simple_list': [1, 2, 'red', 'blue'], ... 'odict': ub.odict([(1, '1'), (2, '2')]), ... } >>> result = repr2(dict_, nl=3, precision=2); print(result) >>> result = repr2(dict_, nl=2, precision=2); print(result) >>> result = repr2(dict_, nl=1, precision=2); print(result) >>> result = repr2(dict_, nl=1, precision=2, itemsep='', explicit=True); print(result) >>> result = repr2(dict_, nl=1, precision=2, nobr=1, itemsep='', explicit=True); print(result) >>> result = repr2(dict_, nl=3, precision=2, cbr=True); print(result) >>> result = repr2(dict_, nl=3, precision=2, si=True); print(result) >>> result = repr2(dict_, nl=3, sort=True); print(result) >>> result = repr2(dict_, nl=3, sort=False, trailing_sep=False); print(result) >>> result = repr2(dict_, nl=3, sort=False, trailing_sep=False, nobr=True); print(result) Example: >>> from ubelt.util_format import * >>> def _nest(d, w): ... if d == 0: ... return {} ... else: ... return {'n{}'.format(d): _nest(d - 1, w + 1), 'm{}'.format(d): _nest(d - 1, w + 1)} >>> dict_ = _nest(d=4, w=1) >>> result = repr2(dict_, nl=6, precision=2, cbr=1) >>> print('---') >>> print(result) >>> result = repr2(dict_, nl=-1, precision=2) >>> print('---') >>> print(result) """ custom_extensions = kwargs.get('extensions', None) _return_info = kwargs.get('_return_info', False) kwargs['_root_info'] = _rectify_root_info(kwargs.get('_root_info', None)) outstr = None _leaf_info = None if custom_extensions: func = custom_extensions.lookup(data) if func is not None: outstr = func(data, **kwargs) if outstr is None: if isinstance(data, dict): outstr, _leaf_info = _format_dict(data, **kwargs) elif isinstance(data, (list, tuple, set, frozenset)): outstr, _leaf_info = _format_list(data, **kwargs) if outstr is None: # check any globally registered functions for special formatters func = _FORMATTER_EXTENSIONS.lookup(data) if func is not None: outstr = func(data, **kwargs) else: outstr = _format_object(data, **kwargs) if _return_info: _leaf_info = _rectify_leaf_info(_leaf_info) return outstr, _leaf_info else: return outstr
def function[repr2, parameter[data]]: constant[ Makes a pretty and easy-to-doctest string representation! This is an alternative to repr, and `pprint.pformat` that attempts to be both more configurable and generate output that is consistent between python versions. Notes: This function has many keyword arguments that can be used to customize the final representation. For convinience some of the more frequently used kwargs have short aliases. See `Args` for more details. Args: data (object): an arbitrary python object **kwargs: see `the Kwargs` section Kwargs: si, stritems, (bool): dict/list items use str instead of repr strkeys, sk (bool): dict keys use str instead of repr strvals, sv (bool): dict values use str instead of repr nl, newlines (int | bool): number of top level nestings to place a newline after. If true all items are followed by newlines regardless of nesting level. Defaults to 1 for lists and True for dicts. nobr, nobraces (bool, default=False): if True, text will not contain outer braces for containers cbr, compact_brace (bool, default=False): if True, braces are compactified (i.e. they will not have newlines placed directly after them, think java / K&R / 1TBS) trailsep, trailing_sep (bool): if True, a separator is placed after the last item in a sequence. By default this is True if there are any `nl > 0`. explicit (bool, default=False): changes dict representation from `{k1: v1, ...}` to `dict(k1=v1, ...)`. precision (int, default=None): if specified floats are formatted with this precision kvsep (str, default=': '): separator between keys and values itemsep (str, default=' '): separator between items sort (bool): if True, attempts to sort all unordered collections in the returned text. NOTE: currently if True this will sort lists, this may not be a correct thing to do, as such the behavior of this arg is subject to change. suppress_small (bool): passed to `numpy.array2string` for ndarrays max_line_width (int): passed to `numpy.array2string` for ndarrays with_dtype (bool): only relevant to ndarrays. if True includes the dtype. Returns: str: outstr: output string Notes: There are also internal kwargs, which should not be used: _return_info (bool): return information about child context _root_info (depth): information about parent context CommandLine: python -m ubelt.util_format repr2:0 python -m ubelt.util_format repr2:1 Example: >>> from ubelt.util_format import * >>> import ubelt as ub >>> dict_ = { ... 'custom_types': [slice(0, 1, None), 1/3], ... 'nest_dict': {'k1': [1, 2, {3: {4, 5}}], ... 'key2': [1, 2, {3: {4, 5}}], ... 'key3': [1, 2, {3: {4, 5}}], ... }, ... 'nest_dict2': {'k': [1, 2, {3: {4, 5}}]}, ... 'nested_tuples': [tuple([1]), tuple([2, 3]), frozenset([4, 5, 6])], ... 'one_tup': tuple([1]), ... 'simple_dict': {'spam': 'eggs', 'ham': 'jam'}, ... 'simple_list': [1, 2, 'red', 'blue'], ... 'odict': ub.odict([(1, '1'), (2, '2')]), ... } >>> result = repr2(dict_, nl=3, precision=2); print(result) >>> result = repr2(dict_, nl=2, precision=2); print(result) >>> result = repr2(dict_, nl=1, precision=2); print(result) >>> result = repr2(dict_, nl=1, precision=2, itemsep='', explicit=True); print(result) >>> result = repr2(dict_, nl=1, precision=2, nobr=1, itemsep='', explicit=True); print(result) >>> result = repr2(dict_, nl=3, precision=2, cbr=True); print(result) >>> result = repr2(dict_, nl=3, precision=2, si=True); print(result) >>> result = repr2(dict_, nl=3, sort=True); print(result) >>> result = repr2(dict_, nl=3, sort=False, trailing_sep=False); print(result) >>> result = repr2(dict_, nl=3, sort=False, trailing_sep=False, nobr=True); print(result) Example: >>> from ubelt.util_format import * >>> def _nest(d, w): ... if d == 0: ... return {} ... else: ... return {'n{}'.format(d): _nest(d - 1, w + 1), 'm{}'.format(d): _nest(d - 1, w + 1)} >>> dict_ = _nest(d=4, w=1) >>> result = repr2(dict_, nl=6, precision=2, cbr=1) >>> print('---') >>> print(result) >>> result = repr2(dict_, nl=-1, precision=2) >>> print('---') >>> print(result) ] variable[custom_extensions] assign[=] call[name[kwargs].get, parameter[constant[extensions], constant[None]]] variable[_return_info] assign[=] call[name[kwargs].get, parameter[constant[_return_info], constant[False]]] call[name[kwargs]][constant[_root_info]] assign[=] call[name[_rectify_root_info], parameter[call[name[kwargs].get, parameter[constant[_root_info], constant[None]]]]] variable[outstr] assign[=] constant[None] variable[_leaf_info] assign[=] constant[None] if name[custom_extensions] begin[:] variable[func] assign[=] call[name[custom_extensions].lookup, parameter[name[data]]] if compare[name[func] is_not constant[None]] begin[:] variable[outstr] assign[=] call[name[func], parameter[name[data]]] if compare[name[outstr] is constant[None]] begin[:] if call[name[isinstance], parameter[name[data], name[dict]]] begin[:] <ast.Tuple object at 0x7da207f01450> assign[=] call[name[_format_dict], parameter[name[data]]] if compare[name[outstr] is constant[None]] begin[:] variable[func] assign[=] call[name[_FORMATTER_EXTENSIONS].lookup, parameter[name[data]]] if compare[name[func] is_not constant[None]] begin[:] variable[outstr] assign[=] call[name[func], parameter[name[data]]] if name[_return_info] begin[:] variable[_leaf_info] assign[=] call[name[_rectify_leaf_info], parameter[name[_leaf_info]]] return[tuple[[<ast.Name object at 0x7da207f00160>, <ast.Name object at 0x7da207f03370>]]]
keyword[def] identifier[repr2] ( identifier[data] ,** identifier[kwargs] ): literal[string] identifier[custom_extensions] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[_return_info] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ) identifier[kwargs] [ literal[string] ]= identifier[_rectify_root_info] ( identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )) identifier[outstr] = keyword[None] identifier[_leaf_info] = keyword[None] keyword[if] identifier[custom_extensions] : identifier[func] = identifier[custom_extensions] . identifier[lookup] ( identifier[data] ) keyword[if] identifier[func] keyword[is] keyword[not] keyword[None] : identifier[outstr] = identifier[func] ( identifier[data] ,** identifier[kwargs] ) keyword[if] identifier[outstr] keyword[is] keyword[None] : keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ): identifier[outstr] , identifier[_leaf_info] = identifier[_format_dict] ( identifier[data] ,** identifier[kwargs] ) keyword[elif] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] , identifier[set] , identifier[frozenset] )): identifier[outstr] , identifier[_leaf_info] = identifier[_format_list] ( identifier[data] ,** identifier[kwargs] ) keyword[if] identifier[outstr] keyword[is] keyword[None] : identifier[func] = identifier[_FORMATTER_EXTENSIONS] . identifier[lookup] ( identifier[data] ) keyword[if] identifier[func] keyword[is] keyword[not] keyword[None] : identifier[outstr] = identifier[func] ( identifier[data] ,** identifier[kwargs] ) keyword[else] : identifier[outstr] = identifier[_format_object] ( identifier[data] ,** identifier[kwargs] ) keyword[if] identifier[_return_info] : identifier[_leaf_info] = identifier[_rectify_leaf_info] ( identifier[_leaf_info] ) keyword[return] identifier[outstr] , identifier[_leaf_info] keyword[else] : keyword[return] identifier[outstr]
def repr2(data, **kwargs): """ Makes a pretty and easy-to-doctest string representation! This is an alternative to repr, and `pprint.pformat` that attempts to be both more configurable and generate output that is consistent between python versions. Notes: This function has many keyword arguments that can be used to customize the final representation. For convinience some of the more frequently used kwargs have short aliases. See `Args` for more details. Args: data (object): an arbitrary python object **kwargs: see `the Kwargs` section Kwargs: si, stritems, (bool): dict/list items use str instead of repr strkeys, sk (bool): dict keys use str instead of repr strvals, sv (bool): dict values use str instead of repr nl, newlines (int | bool): number of top level nestings to place a newline after. If true all items are followed by newlines regardless of nesting level. Defaults to 1 for lists and True for dicts. nobr, nobraces (bool, default=False): if True, text will not contain outer braces for containers cbr, compact_brace (bool, default=False): if True, braces are compactified (i.e. they will not have newlines placed directly after them, think java / K&R / 1TBS) trailsep, trailing_sep (bool): if True, a separator is placed after the last item in a sequence. By default this is True if there are any `nl > 0`. explicit (bool, default=False): changes dict representation from `{k1: v1, ...}` to `dict(k1=v1, ...)`. precision (int, default=None): if specified floats are formatted with this precision kvsep (str, default=': '): separator between keys and values itemsep (str, default=' '): separator between items sort (bool): if True, attempts to sort all unordered collections in the returned text. NOTE: currently if True this will sort lists, this may not be a correct thing to do, as such the behavior of this arg is subject to change. suppress_small (bool): passed to `numpy.array2string` for ndarrays max_line_width (int): passed to `numpy.array2string` for ndarrays with_dtype (bool): only relevant to ndarrays. if True includes the dtype. Returns: str: outstr: output string Notes: There are also internal kwargs, which should not be used: _return_info (bool): return information about child context _root_info (depth): information about parent context CommandLine: python -m ubelt.util_format repr2:0 python -m ubelt.util_format repr2:1 Example: >>> from ubelt.util_format import * >>> import ubelt as ub >>> dict_ = { ... 'custom_types': [slice(0, 1, None), 1/3], ... 'nest_dict': {'k1': [1, 2, {3: {4, 5}}], ... 'key2': [1, 2, {3: {4, 5}}], ... 'key3': [1, 2, {3: {4, 5}}], ... }, ... 'nest_dict2': {'k': [1, 2, {3: {4, 5}}]}, ... 'nested_tuples': [tuple([1]), tuple([2, 3]), frozenset([4, 5, 6])], ... 'one_tup': tuple([1]), ... 'simple_dict': {'spam': 'eggs', 'ham': 'jam'}, ... 'simple_list': [1, 2, 'red', 'blue'], ... 'odict': ub.odict([(1, '1'), (2, '2')]), ... } >>> result = repr2(dict_, nl=3, precision=2); print(result) >>> result = repr2(dict_, nl=2, precision=2); print(result) >>> result = repr2(dict_, nl=1, precision=2); print(result) >>> result = repr2(dict_, nl=1, precision=2, itemsep='', explicit=True); print(result) >>> result = repr2(dict_, nl=1, precision=2, nobr=1, itemsep='', explicit=True); print(result) >>> result = repr2(dict_, nl=3, precision=2, cbr=True); print(result) >>> result = repr2(dict_, nl=3, precision=2, si=True); print(result) >>> result = repr2(dict_, nl=3, sort=True); print(result) >>> result = repr2(dict_, nl=3, sort=False, trailing_sep=False); print(result) >>> result = repr2(dict_, nl=3, sort=False, trailing_sep=False, nobr=True); print(result) Example: >>> from ubelt.util_format import * >>> def _nest(d, w): ... if d == 0: ... return {} ... else: ... return {'n{}'.format(d): _nest(d - 1, w + 1), 'm{}'.format(d): _nest(d - 1, w + 1)} >>> dict_ = _nest(d=4, w=1) >>> result = repr2(dict_, nl=6, precision=2, cbr=1) >>> print('---') >>> print(result) >>> result = repr2(dict_, nl=-1, precision=2) >>> print('---') >>> print(result) """ custom_extensions = kwargs.get('extensions', None) _return_info = kwargs.get('_return_info', False) kwargs['_root_info'] = _rectify_root_info(kwargs.get('_root_info', None)) outstr = None _leaf_info = None if custom_extensions: func = custom_extensions.lookup(data) if func is not None: outstr = func(data, **kwargs) # depends on [control=['if'], data=['func']] # depends on [control=['if'], data=[]] if outstr is None: if isinstance(data, dict): (outstr, _leaf_info) = _format_dict(data, **kwargs) # depends on [control=['if'], data=[]] elif isinstance(data, (list, tuple, set, frozenset)): (outstr, _leaf_info) = _format_list(data, **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['outstr']] if outstr is None: # check any globally registered functions for special formatters func = _FORMATTER_EXTENSIONS.lookup(data) if func is not None: outstr = func(data, **kwargs) # depends on [control=['if'], data=['func']] else: outstr = _format_object(data, **kwargs) # depends on [control=['if'], data=['outstr']] if _return_info: _leaf_info = _rectify_leaf_info(_leaf_info) return (outstr, _leaf_info) # depends on [control=['if'], data=[]] else: return outstr
def gt(self, key, value, includeMissing=False): '''Return entries where the key's value is greater (>). Example of use: >>> test = [ ... {"name": "Jim", "age": 18, "income": 93000, "wigs": 68 }, ... {"name": "Larry", "age": 18, "wigs": [3, 2, 9]}, ... {"name": "Joe", "age": 20, "income": 15000, "wigs": [1, 2, 3]}, ... {"name": "Bill", "age": 19, "income": 29000 }, ... ] >>> print PLOD(test).gt("age", 19).returnString() [ {age: 20, income: 15000, name: 'Joe', wigs: [1, 2, 3]} ] .. versionadded:: 0.1.1 :param key: The dictionary key (or cascading list of keys) that should be the basis of comparison. :param value: The value to compare with. :param includeMissing: Defaults to False. If True, then entries missing the key are also included. :returns: self ''' (self.table, self.index_track) = internal.select(self.table, self.index_track, key, self.GREATER, value, includeMissing) return self
def function[gt, parameter[self, key, value, includeMissing]]: constant[Return entries where the key's value is greater (>). Example of use: >>> test = [ ... {"name": "Jim", "age": 18, "income": 93000, "wigs": 68 }, ... {"name": "Larry", "age": 18, "wigs": [3, 2, 9]}, ... {"name": "Joe", "age": 20, "income": 15000, "wigs": [1, 2, 3]}, ... {"name": "Bill", "age": 19, "income": 29000 }, ... ] >>> print PLOD(test).gt("age", 19).returnString() [ {age: 20, income: 15000, name: 'Joe', wigs: [1, 2, 3]} ] .. versionadded:: 0.1.1 :param key: The dictionary key (or cascading list of keys) that should be the basis of comparison. :param value: The value to compare with. :param includeMissing: Defaults to False. If True, then entries missing the key are also included. :returns: self ] <ast.Tuple object at 0x7da18f00dcf0> assign[=] call[name[internal].select, parameter[name[self].table, name[self].index_track, name[key], name[self].GREATER, name[value], name[includeMissing]]] return[name[self]]
keyword[def] identifier[gt] ( identifier[self] , identifier[key] , identifier[value] , identifier[includeMissing] = keyword[False] ): literal[string] ( identifier[self] . identifier[table] , identifier[self] . identifier[index_track] )= identifier[internal] . identifier[select] ( identifier[self] . identifier[table] , identifier[self] . identifier[index_track] , identifier[key] , identifier[self] . identifier[GREATER] , identifier[value] , identifier[includeMissing] ) keyword[return] identifier[self]
def gt(self, key, value, includeMissing=False): """Return entries where the key's value is greater (>). Example of use: >>> test = [ ... {"name": "Jim", "age": 18, "income": 93000, "wigs": 68 }, ... {"name": "Larry", "age": 18, "wigs": [3, 2, 9]}, ... {"name": "Joe", "age": 20, "income": 15000, "wigs": [1, 2, 3]}, ... {"name": "Bill", "age": 19, "income": 29000 }, ... ] >>> print PLOD(test).gt("age", 19).returnString() [ {age: 20, income: 15000, name: 'Joe', wigs: [1, 2, 3]} ] .. versionadded:: 0.1.1 :param key: The dictionary key (or cascading list of keys) that should be the basis of comparison. :param value: The value to compare with. :param includeMissing: Defaults to False. If True, then entries missing the key are also included. :returns: self """ (self.table, self.index_track) = internal.select(self.table, self.index_track, key, self.GREATER, value, includeMissing) return self
def _get_relative_reference(self, cursor, ref_key): """Returns absolute reference code for key. Parameters ---------- cursor: 3-tuple of Integer \tCurrent cursor position ref_key: 3-tuple of Integer \tAbsolute reference key """ magics = ["X", "Y", "Z"] # mapper takes magic, key, ref_key to build string def get_rel_key_ele(cursor_ele, ref_key_ele): """Returns relative key suffix for given key and reference key""" # cursor is current cursor position # ref_key is absolute target position diff_key_ele = ref_key_ele - cursor_ele if diff_key_ele == 0: return u"" elif diff_key_ele < 0: return u"-" + str(abs(diff_key_ele)) elif diff_key_ele > 0: return u"+" + str(diff_key_ele) else: msg = _("{key} seems to be no Integer") msg = msg.format(key=diff_key_ele) raise ValueError(msg) key_strings = [] for magic, cursor_ele, ref_key_ele in zip(magics, cursor, ref_key): key_strings.append(magic + get_rel_key_ele(cursor_ele, ref_key_ele)) key_string = u", ".join(key_strings) return u"S[" + key_string + u"]"
def function[_get_relative_reference, parameter[self, cursor, ref_key]]: constant[Returns absolute reference code for key. Parameters ---------- cursor: 3-tuple of Integer Current cursor position ref_key: 3-tuple of Integer Absolute reference key ] variable[magics] assign[=] list[[<ast.Constant object at 0x7da1b16bf640>, <ast.Constant object at 0x7da1b16bf430>, <ast.Constant object at 0x7da1b16bd450>]] def function[get_rel_key_ele, parameter[cursor_ele, ref_key_ele]]: constant[Returns relative key suffix for given key and reference key] variable[diff_key_ele] assign[=] binary_operation[name[ref_key_ele] - name[cursor_ele]] if compare[name[diff_key_ele] equal[==] constant[0]] begin[:] return[constant[]] variable[key_strings] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b16bffa0>, <ast.Name object at 0x7da1b16bf100>, <ast.Name object at 0x7da1b16bf6a0>]]] in starred[call[name[zip], parameter[name[magics], name[cursor], name[ref_key]]]] begin[:] call[name[key_strings].append, parameter[binary_operation[name[magic] + call[name[get_rel_key_ele], parameter[name[cursor_ele], name[ref_key_ele]]]]]] variable[key_string] assign[=] call[constant[, ].join, parameter[name[key_strings]]] return[binary_operation[binary_operation[constant[S[] + name[key_string]] + constant[]]]]
keyword[def] identifier[_get_relative_reference] ( identifier[self] , identifier[cursor] , identifier[ref_key] ): literal[string] identifier[magics] =[ literal[string] , literal[string] , literal[string] ] keyword[def] identifier[get_rel_key_ele] ( identifier[cursor_ele] , identifier[ref_key_ele] ): literal[string] identifier[diff_key_ele] = identifier[ref_key_ele] - identifier[cursor_ele] keyword[if] identifier[diff_key_ele] == literal[int] : keyword[return] literal[string] keyword[elif] identifier[diff_key_ele] < literal[int] : keyword[return] literal[string] + identifier[str] ( identifier[abs] ( identifier[diff_key_ele] )) keyword[elif] identifier[diff_key_ele] > literal[int] : keyword[return] literal[string] + identifier[str] ( identifier[diff_key_ele] ) keyword[else] : identifier[msg] = identifier[_] ( literal[string] ) identifier[msg] = identifier[msg] . identifier[format] ( identifier[key] = identifier[diff_key_ele] ) keyword[raise] identifier[ValueError] ( identifier[msg] ) identifier[key_strings] =[] keyword[for] identifier[magic] , identifier[cursor_ele] , identifier[ref_key_ele] keyword[in] identifier[zip] ( identifier[magics] , identifier[cursor] , identifier[ref_key] ): identifier[key_strings] . identifier[append] ( identifier[magic] + identifier[get_rel_key_ele] ( identifier[cursor_ele] , identifier[ref_key_ele] )) identifier[key_string] = literal[string] . identifier[join] ( identifier[key_strings] ) keyword[return] literal[string] + identifier[key_string] + literal[string]
def _get_relative_reference(self, cursor, ref_key): """Returns absolute reference code for key. Parameters ---------- cursor: 3-tuple of Integer Current cursor position ref_key: 3-tuple of Integer Absolute reference key """ magics = ['X', 'Y', 'Z'] # mapper takes magic, key, ref_key to build string def get_rel_key_ele(cursor_ele, ref_key_ele): """Returns relative key suffix for given key and reference key""" # cursor is current cursor position # ref_key is absolute target position diff_key_ele = ref_key_ele - cursor_ele if diff_key_ele == 0: return u'' # depends on [control=['if'], data=[]] elif diff_key_ele < 0: return u'-' + str(abs(diff_key_ele)) # depends on [control=['if'], data=['diff_key_ele']] elif diff_key_ele > 0: return u'+' + str(diff_key_ele) # depends on [control=['if'], data=['diff_key_ele']] else: msg = _('{key} seems to be no Integer') msg = msg.format(key=diff_key_ele) raise ValueError(msg) key_strings = [] for (magic, cursor_ele, ref_key_ele) in zip(magics, cursor, ref_key): key_strings.append(magic + get_rel_key_ele(cursor_ele, ref_key_ele)) # depends on [control=['for'], data=[]] key_string = u', '.join(key_strings) return u'S[' + key_string + u']'
def from_rfc3339_nanos(value): """Convert a nanosecond-precision timestamp to a native datetime. .. note:: Python datetimes do not support nanosecond precision; this function therefore truncates such values to microseconds. Args: value (str): The RFC3339 string to convert. Returns: datetime.datetime: The datetime object equivalent to the timestamp in UTC. Raises: ValueError: If the timestamp does not match the RFC 3339 regular expression. """ with_nanos = _RFC3339_NANOS.match(value) if with_nanos is None: raise ValueError( "Timestamp: {!r}, does not match pattern: {!r}".format( value, _RFC3339_NANOS.pattern ) ) bare_seconds = datetime.datetime.strptime( with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION ) fraction = with_nanos.group("nanos") if fraction is None: micros = 0 else: scale = 9 - len(fraction) nanos = int(fraction) * (10 ** scale) micros = nanos // 1000 return bare_seconds.replace(microsecond=micros, tzinfo=pytz.utc)
def function[from_rfc3339_nanos, parameter[value]]: constant[Convert a nanosecond-precision timestamp to a native datetime. .. note:: Python datetimes do not support nanosecond precision; this function therefore truncates such values to microseconds. Args: value (str): The RFC3339 string to convert. Returns: datetime.datetime: The datetime object equivalent to the timestamp in UTC. Raises: ValueError: If the timestamp does not match the RFC 3339 regular expression. ] variable[with_nanos] assign[=] call[name[_RFC3339_NANOS].match, parameter[name[value]]] if compare[name[with_nanos] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6e5e10> variable[bare_seconds] assign[=] call[name[datetime].datetime.strptime, parameter[call[name[with_nanos].group, parameter[constant[no_fraction]]], name[_RFC3339_NO_FRACTION]]] variable[fraction] assign[=] call[name[with_nanos].group, parameter[constant[nanos]]] if compare[name[fraction] is constant[None]] begin[:] variable[micros] assign[=] constant[0] return[call[name[bare_seconds].replace, parameter[]]]
keyword[def] identifier[from_rfc3339_nanos] ( identifier[value] ): literal[string] identifier[with_nanos] = identifier[_RFC3339_NANOS] . identifier[match] ( identifier[value] ) keyword[if] identifier[with_nanos] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[value] , identifier[_RFC3339_NANOS] . identifier[pattern] ) ) identifier[bare_seconds] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[with_nanos] . identifier[group] ( literal[string] ), identifier[_RFC3339_NO_FRACTION] ) identifier[fraction] = identifier[with_nanos] . identifier[group] ( literal[string] ) keyword[if] identifier[fraction] keyword[is] keyword[None] : identifier[micros] = literal[int] keyword[else] : identifier[scale] = literal[int] - identifier[len] ( identifier[fraction] ) identifier[nanos] = identifier[int] ( identifier[fraction] )*( literal[int] ** identifier[scale] ) identifier[micros] = identifier[nanos] // literal[int] keyword[return] identifier[bare_seconds] . identifier[replace] ( identifier[microsecond] = identifier[micros] , identifier[tzinfo] = identifier[pytz] . identifier[utc] )
def from_rfc3339_nanos(value): """Convert a nanosecond-precision timestamp to a native datetime. .. note:: Python datetimes do not support nanosecond precision; this function therefore truncates such values to microseconds. Args: value (str): The RFC3339 string to convert. Returns: datetime.datetime: The datetime object equivalent to the timestamp in UTC. Raises: ValueError: If the timestamp does not match the RFC 3339 regular expression. """ with_nanos = _RFC3339_NANOS.match(value) if with_nanos is None: raise ValueError('Timestamp: {!r}, does not match pattern: {!r}'.format(value, _RFC3339_NANOS.pattern)) # depends on [control=['if'], data=[]] bare_seconds = datetime.datetime.strptime(with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION) fraction = with_nanos.group('nanos') if fraction is None: micros = 0 # depends on [control=['if'], data=[]] else: scale = 9 - len(fraction) nanos = int(fraction) * 10 ** scale micros = nanos // 1000 return bare_seconds.replace(microsecond=micros, tzinfo=pytz.utc)
def read_cache(cachefile, coltype=LIGOTimeGPS, sort=None, segment=None): """Read a LAL- or FFL-format cache file as a list of file paths Parameters ---------- cachefile : `str`, `file` Input file or file path to read. coltype : `LIGOTimeGPS`, `int`, optional Type for GPS times. sort : `callable`, optional A callable key function by which to sort the output list of file paths segment : `gwpy.segments.Segment`, optional A GPS `[start, stop)` interval, if given only files overlapping this interval will be returned. Returns ------- paths : `list` of `str` A list of file paths as read from the cache file. """ # open file if not isinstance(cachefile, FILE_LIKE): with open(file_path(cachefile), 'r') as fobj: return read_cache(fobj, coltype=coltype, sort=sort, segment=segment) # read file cache = [x.path for x in _iter_cache(cachefile, gpstype=coltype)] # sieve and sort if segment: cache = sieve(cache, segment=segment) if sort: cache.sort(key=sort) # read simple paths return cache
def function[read_cache, parameter[cachefile, coltype, sort, segment]]: constant[Read a LAL- or FFL-format cache file as a list of file paths Parameters ---------- cachefile : `str`, `file` Input file or file path to read. coltype : `LIGOTimeGPS`, `int`, optional Type for GPS times. sort : `callable`, optional A callable key function by which to sort the output list of file paths segment : `gwpy.segments.Segment`, optional A GPS `[start, stop)` interval, if given only files overlapping this interval will be returned. Returns ------- paths : `list` of `str` A list of file paths as read from the cache file. ] if <ast.UnaryOp object at 0x7da2043470a0> begin[:] with call[name[open], parameter[call[name[file_path], parameter[name[cachefile]]], constant[r]]] begin[:] return[call[name[read_cache], parameter[name[fobj]]]] variable[cache] assign[=] <ast.ListComp object at 0x7da207f9a1a0> if name[segment] begin[:] variable[cache] assign[=] call[name[sieve], parameter[name[cache]]] if name[sort] begin[:] call[name[cache].sort, parameter[]] return[name[cache]]
keyword[def] identifier[read_cache] ( identifier[cachefile] , identifier[coltype] = identifier[LIGOTimeGPS] , identifier[sort] = keyword[None] , identifier[segment] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[cachefile] , identifier[FILE_LIKE] ): keyword[with] identifier[open] ( identifier[file_path] ( identifier[cachefile] ), literal[string] ) keyword[as] identifier[fobj] : keyword[return] identifier[read_cache] ( identifier[fobj] , identifier[coltype] = identifier[coltype] , identifier[sort] = identifier[sort] , identifier[segment] = identifier[segment] ) identifier[cache] =[ identifier[x] . identifier[path] keyword[for] identifier[x] keyword[in] identifier[_iter_cache] ( identifier[cachefile] , identifier[gpstype] = identifier[coltype] )] keyword[if] identifier[segment] : identifier[cache] = identifier[sieve] ( identifier[cache] , identifier[segment] = identifier[segment] ) keyword[if] identifier[sort] : identifier[cache] . identifier[sort] ( identifier[key] = identifier[sort] ) keyword[return] identifier[cache]
def read_cache(cachefile, coltype=LIGOTimeGPS, sort=None, segment=None): """Read a LAL- or FFL-format cache file as a list of file paths Parameters ---------- cachefile : `str`, `file` Input file or file path to read. coltype : `LIGOTimeGPS`, `int`, optional Type for GPS times. sort : `callable`, optional A callable key function by which to sort the output list of file paths segment : `gwpy.segments.Segment`, optional A GPS `[start, stop)` interval, if given only files overlapping this interval will be returned. Returns ------- paths : `list` of `str` A list of file paths as read from the cache file. """ # open file if not isinstance(cachefile, FILE_LIKE): with open(file_path(cachefile), 'r') as fobj: return read_cache(fobj, coltype=coltype, sort=sort, segment=segment) # depends on [control=['with'], data=['fobj']] # depends on [control=['if'], data=[]] # read file cache = [x.path for x in _iter_cache(cachefile, gpstype=coltype)] # sieve and sort if segment: cache = sieve(cache, segment=segment) # depends on [control=['if'], data=[]] if sort: cache.sort(key=sort) # depends on [control=['if'], data=[]] # read simple paths return cache
def _memory_usage(func, gallery_conf): """Get memory usage of a function call.""" if gallery_conf['show_memory']: from memory_profiler import memory_usage assert callable(func) mem, out = memory_usage(func, max_usage=True, retval=True, multiprocess=True) mem = mem[0] else: out = func() mem = 0 return out, mem
def function[_memory_usage, parameter[func, gallery_conf]]: constant[Get memory usage of a function call.] if call[name[gallery_conf]][constant[show_memory]] begin[:] from relative_module[memory_profiler] import module[memory_usage] assert[call[name[callable], parameter[name[func]]]] <ast.Tuple object at 0x7da204621300> assign[=] call[name[memory_usage], parameter[name[func]]] variable[mem] assign[=] call[name[mem]][constant[0]] return[tuple[[<ast.Name object at 0x7da1b26afa30>, <ast.Name object at 0x7da1b26af580>]]]
keyword[def] identifier[_memory_usage] ( identifier[func] , identifier[gallery_conf] ): literal[string] keyword[if] identifier[gallery_conf] [ literal[string] ]: keyword[from] identifier[memory_profiler] keyword[import] identifier[memory_usage] keyword[assert] identifier[callable] ( identifier[func] ) identifier[mem] , identifier[out] = identifier[memory_usage] ( identifier[func] , identifier[max_usage] = keyword[True] , identifier[retval] = keyword[True] , identifier[multiprocess] = keyword[True] ) identifier[mem] = identifier[mem] [ literal[int] ] keyword[else] : identifier[out] = identifier[func] () identifier[mem] = literal[int] keyword[return] identifier[out] , identifier[mem]
def _memory_usage(func, gallery_conf): """Get memory usage of a function call.""" if gallery_conf['show_memory']: from memory_profiler import memory_usage assert callable(func) (mem, out) = memory_usage(func, max_usage=True, retval=True, multiprocess=True) mem = mem[0] # depends on [control=['if'], data=[]] else: out = func() mem = 0 return (out, mem)
def get_deadletter_receiver( self, transfer_deadletter=False, prefetch=0, mode=ReceiveSettleMode.PeekLock, idle_timeout=0, **kwargs): """Get a Receiver for the deadletter endpoint of the entity. A Receiver represents a single open connection with which multiple receive operations can be made. :param transfer_deadletter: Whether to connect to the transfer deadletter queue, or the standard deadletter queue. Default is False, using the standard deadletter endpoint. :type transfer_deadletter: bool :param prefetch: The maximum number of messages to cache with each request to the service. The default value is 0, meaning messages will be received from the service and processed one at a time. Increasing this value will improve message throughput performance but increase the change that messages will expire while they are cached if they're not processed fast enough. :type prefetch: int :param mode: The mode with which messages will be retrieved from the entity. The two options are PeekLock and ReceiveAndDelete. Messages received with PeekLock must be settled within a given lock period before they will be removed from the queue. Messages received with ReceiveAndDelete will be immediately removed from the queue, and cannot be subsequently rejected or re-received if the client fails to process the message. The default mode is PeekLock. :type mode: ~azure.servicebus.common.constants.ReceiveSettleMode :param idle_timeout: The timeout in seconds between received messages after which the receiver will automatically shutdown. The default value is 0, meaning no timeout. :type idle_timeout: int :returns: A Receiver instance with an unopened Connection. :rtype: ~azure.servicebus.aio.async_receive_handler.Receiver Example: .. literalinclude:: ../examples/async_examples/test_examples_async.py :start-after: [START receiver_deadletter_messages] :end-before: [END receiver_deadletter_messages] :language: python :dedent: 4 :caption: Receive dead-lettered messages. """ if int(prefetch) < 0 or int(prefetch) > 50000: raise ValueError("Prefetch must be an integer between 0 and 50000 inclusive.") prefetch += 1 handler_id = str(uuid.uuid4()) if transfer_deadletter: entity_uri = self.mgmt_client.format_transfer_dead_letter_queue_name(self.entity_uri) else: entity_uri = self.mgmt_client.format_dead_letter_queue_name(self.entity_uri) return Receiver( handler_id, entity_uri, self.auth_config, loop=self.loop, debug=self.debug, timeout=int(idle_timeout * 1000), prefetch=prefetch, mode=mode, **kwargs)
def function[get_deadletter_receiver, parameter[self, transfer_deadletter, prefetch, mode, idle_timeout]]: constant[Get a Receiver for the deadletter endpoint of the entity. A Receiver represents a single open connection with which multiple receive operations can be made. :param transfer_deadletter: Whether to connect to the transfer deadletter queue, or the standard deadletter queue. Default is False, using the standard deadletter endpoint. :type transfer_deadletter: bool :param prefetch: The maximum number of messages to cache with each request to the service. The default value is 0, meaning messages will be received from the service and processed one at a time. Increasing this value will improve message throughput performance but increase the change that messages will expire while they are cached if they're not processed fast enough. :type prefetch: int :param mode: The mode with which messages will be retrieved from the entity. The two options are PeekLock and ReceiveAndDelete. Messages received with PeekLock must be settled within a given lock period before they will be removed from the queue. Messages received with ReceiveAndDelete will be immediately removed from the queue, and cannot be subsequently rejected or re-received if the client fails to process the message. The default mode is PeekLock. :type mode: ~azure.servicebus.common.constants.ReceiveSettleMode :param idle_timeout: The timeout in seconds between received messages after which the receiver will automatically shutdown. The default value is 0, meaning no timeout. :type idle_timeout: int :returns: A Receiver instance with an unopened Connection. :rtype: ~azure.servicebus.aio.async_receive_handler.Receiver Example: .. literalinclude:: ../examples/async_examples/test_examples_async.py :start-after: [START receiver_deadletter_messages] :end-before: [END receiver_deadletter_messages] :language: python :dedent: 4 :caption: Receive dead-lettered messages. ] if <ast.BoolOp object at 0x7da20cabf4c0> begin[:] <ast.Raise object at 0x7da204620550> <ast.AugAssign object at 0x7da204623880> variable[handler_id] assign[=] call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]] if name[transfer_deadletter] begin[:] variable[entity_uri] assign[=] call[name[self].mgmt_client.format_transfer_dead_letter_queue_name, parameter[name[self].entity_uri]] return[call[name[Receiver], parameter[name[handler_id], name[entity_uri], name[self].auth_config]]]
keyword[def] identifier[get_deadletter_receiver] ( identifier[self] , identifier[transfer_deadletter] = keyword[False] , identifier[prefetch] = literal[int] , identifier[mode] = identifier[ReceiveSettleMode] . identifier[PeekLock] , identifier[idle_timeout] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[int] ( identifier[prefetch] )< literal[int] keyword[or] identifier[int] ( identifier[prefetch] )> literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[prefetch] += literal[int] identifier[handler_id] = identifier[str] ( identifier[uuid] . identifier[uuid4] ()) keyword[if] identifier[transfer_deadletter] : identifier[entity_uri] = identifier[self] . identifier[mgmt_client] . identifier[format_transfer_dead_letter_queue_name] ( identifier[self] . identifier[entity_uri] ) keyword[else] : identifier[entity_uri] = identifier[self] . identifier[mgmt_client] . identifier[format_dead_letter_queue_name] ( identifier[self] . identifier[entity_uri] ) keyword[return] identifier[Receiver] ( identifier[handler_id] , identifier[entity_uri] , identifier[self] . identifier[auth_config] , identifier[loop] = identifier[self] . identifier[loop] , identifier[debug] = identifier[self] . identifier[debug] , identifier[timeout] = identifier[int] ( identifier[idle_timeout] * literal[int] ), identifier[prefetch] = identifier[prefetch] , identifier[mode] = identifier[mode] , ** identifier[kwargs] )
def get_deadletter_receiver(self, transfer_deadletter=False, prefetch=0, mode=ReceiveSettleMode.PeekLock, idle_timeout=0, **kwargs): """Get a Receiver for the deadletter endpoint of the entity. A Receiver represents a single open connection with which multiple receive operations can be made. :param transfer_deadletter: Whether to connect to the transfer deadletter queue, or the standard deadletter queue. Default is False, using the standard deadletter endpoint. :type transfer_deadletter: bool :param prefetch: The maximum number of messages to cache with each request to the service. The default value is 0, meaning messages will be received from the service and processed one at a time. Increasing this value will improve message throughput performance but increase the change that messages will expire while they are cached if they're not processed fast enough. :type prefetch: int :param mode: The mode with which messages will be retrieved from the entity. The two options are PeekLock and ReceiveAndDelete. Messages received with PeekLock must be settled within a given lock period before they will be removed from the queue. Messages received with ReceiveAndDelete will be immediately removed from the queue, and cannot be subsequently rejected or re-received if the client fails to process the message. The default mode is PeekLock. :type mode: ~azure.servicebus.common.constants.ReceiveSettleMode :param idle_timeout: The timeout in seconds between received messages after which the receiver will automatically shutdown. The default value is 0, meaning no timeout. :type idle_timeout: int :returns: A Receiver instance with an unopened Connection. :rtype: ~azure.servicebus.aio.async_receive_handler.Receiver Example: .. literalinclude:: ../examples/async_examples/test_examples_async.py :start-after: [START receiver_deadletter_messages] :end-before: [END receiver_deadletter_messages] :language: python :dedent: 4 :caption: Receive dead-lettered messages. """ if int(prefetch) < 0 or int(prefetch) > 50000: raise ValueError('Prefetch must be an integer between 0 and 50000 inclusive.') # depends on [control=['if'], data=[]] prefetch += 1 handler_id = str(uuid.uuid4()) if transfer_deadletter: entity_uri = self.mgmt_client.format_transfer_dead_letter_queue_name(self.entity_uri) # depends on [control=['if'], data=[]] else: entity_uri = self.mgmt_client.format_dead_letter_queue_name(self.entity_uri) return Receiver(handler_id, entity_uri, self.auth_config, loop=self.loop, debug=self.debug, timeout=int(idle_timeout * 1000), prefetch=prefetch, mode=mode, **kwargs)
def randkey(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' .. """ return "".join(char for char in iter_random_chars(bits, keyspace, rng))
def function[randkey, parameter[bits, keyspace, rng]]: constant[ Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' .. ] return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b10a6200>]]]
keyword[def] identifier[randkey] ( identifier[bits] , identifier[keyspace] = identifier[string] . identifier[ascii_letters] + identifier[string] . identifier[digits] + literal[string] , identifier[rng] = keyword[None] ): literal[string] keyword[return] literal[string] . identifier[join] ( identifier[char] keyword[for] identifier[char] keyword[in] identifier[iter_random_chars] ( identifier[bits] , identifier[keyspace] , identifier[rng] ))
def randkey(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' .. """ return ''.join((char for char in iter_random_chars(bits, keyspace, rng)))
def prepare_method_call(self, method, args): """ Wraps a method so that method() will call ``method(*args)`` or ``method(**args)``, depending of args type :param method: a callable object (method) :param args: dict or list with the parameters for the function :return: a 'patched' callable """ if self._method_requires_handler_ref(method): if isinstance(args, list): args = [self] + args elif isinstance(args, dict): args["handler"] = self if isinstance(args, list): to_call = partial(method, *args) elif isinstance(args, dict): to_call = partial(method, **args) else: raise TypeError( "args must be list or dict but got {} instead".format(type(args).__name__)) return to_call
def function[prepare_method_call, parameter[self, method, args]]: constant[ Wraps a method so that method() will call ``method(*args)`` or ``method(**args)``, depending of args type :param method: a callable object (method) :param args: dict or list with the parameters for the function :return: a 'patched' callable ] if call[name[self]._method_requires_handler_ref, parameter[name[method]]] begin[:] if call[name[isinstance], parameter[name[args], name[list]]] begin[:] variable[args] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b0a4f580>]] + name[args]] if call[name[isinstance], parameter[name[args], name[list]]] begin[:] variable[to_call] assign[=] call[name[partial], parameter[name[method], <ast.Starred object at 0x7da1b0a4e710>]] return[name[to_call]]
keyword[def] identifier[prepare_method_call] ( identifier[self] , identifier[method] , identifier[args] ): literal[string] keyword[if] identifier[self] . identifier[_method_requires_handler_ref] ( identifier[method] ): keyword[if] identifier[isinstance] ( identifier[args] , identifier[list] ): identifier[args] =[ identifier[self] ]+ identifier[args] keyword[elif] identifier[isinstance] ( identifier[args] , identifier[dict] ): identifier[args] [ literal[string] ]= identifier[self] keyword[if] identifier[isinstance] ( identifier[args] , identifier[list] ): identifier[to_call] = identifier[partial] ( identifier[method] ,* identifier[args] ) keyword[elif] identifier[isinstance] ( identifier[args] , identifier[dict] ): identifier[to_call] = identifier[partial] ( identifier[method] ,** identifier[args] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[args] ). identifier[__name__] )) keyword[return] identifier[to_call]
def prepare_method_call(self, method, args): """ Wraps a method so that method() will call ``method(*args)`` or ``method(**args)``, depending of args type :param method: a callable object (method) :param args: dict or list with the parameters for the function :return: a 'patched' callable """ if self._method_requires_handler_ref(method): if isinstance(args, list): args = [self] + args # depends on [control=['if'], data=[]] elif isinstance(args, dict): args['handler'] = self # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if isinstance(args, list): to_call = partial(method, *args) # depends on [control=['if'], data=[]] elif isinstance(args, dict): to_call = partial(method, **args) # depends on [control=['if'], data=[]] else: raise TypeError('args must be list or dict but got {} instead'.format(type(args).__name__)) return to_call
def WriteOutput(title, locations, limit, f): """Write html to f for up to limit trips between locations. Args: title: String used in html title locations: list of (lat, lng) tuples limit: maximum number of queries in the html f: a file object """ output_prefix = """ <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>%(title)s</title> </head> <body> Random queries for %(title)s<p> This list of random queries should speed up important manual testing. Here are some things to check when looking at the results of a query. <ul> <li> Check the agency attribution under the trip results: <ul> <li> has correct name and spelling of the agency <li> opens a page with general information about the service </ul> <li> For each alternate trip check that each of these is reasonable: <ul> <li> the total time of the trip <li> the time for each leg. Bad data frequently results in a leg going a long way in a few minutes. <li> the icons and mode names (Tram, Bus, etc) are correct for each leg <li> the route names and headsigns are correctly formatted and not redundant. For a good example see <a href="https://developers.google.com/transit/gtfs/examples/display-to-users"> the screenshots in the Google Transit Feed Specification</a>. <li> the shape line on the map looks correct. Make sure the polyline does not zig-zag, loop, skip stops or jump far away unless the trip does the same thing. <li> the route is active on the day the trip planner returns </ul> </ul> If you find a problem be sure to save the URL. This file is generated randomly. <ol> """ % locals() output_suffix = """ </ol> </body> </html> """ % locals() f.write(transitfeed.EncodeUnicode(output_prefix)) for source, destination in zip(locations[0:limit], locations[1:limit + 1]): f.write(transitfeed.EncodeUnicode("<li>%s\n" % LatLngsToGoogleLink(source, destination))) f.write(transitfeed.EncodeUnicode(output_suffix))
def function[WriteOutput, parameter[title, locations, limit, f]]: constant[Write html to f for up to limit trips between locations. Args: title: String used in html title locations: list of (lat, lng) tuples limit: maximum number of queries in the html f: a file object ] variable[output_prefix] assign[=] binary_operation[constant[ <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>%(title)s</title> </head> <body> Random queries for %(title)s<p> This list of random queries should speed up important manual testing. Here are some things to check when looking at the results of a query. <ul> <li> Check the agency attribution under the trip results: <ul> <li> has correct name and spelling of the agency <li> opens a page with general information about the service </ul> <li> For each alternate trip check that each of these is reasonable: <ul> <li> the total time of the trip <li> the time for each leg. Bad data frequently results in a leg going a long way in a few minutes. <li> the icons and mode names (Tram, Bus, etc) are correct for each leg <li> the route names and headsigns are correctly formatted and not redundant. For a good example see <a href="https://developers.google.com/transit/gtfs/examples/display-to-users"> the screenshots in the Google Transit Feed Specification</a>. <li> the shape line on the map looks correct. Make sure the polyline does not zig-zag, loop, skip stops or jump far away unless the trip does the same thing. <li> the route is active on the day the trip planner returns </ul> </ul> If you find a problem be sure to save the URL. This file is generated randomly. <ol> ] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]] variable[output_suffix] assign[=] binary_operation[constant[ </ol> </body> </html> ] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]] call[name[f].write, parameter[call[name[transitfeed].EncodeUnicode, parameter[name[output_prefix]]]]] for taget[tuple[[<ast.Name object at 0x7da18bccab30>, <ast.Name object at 0x7da18bccace0>]]] in starred[call[name[zip], parameter[call[name[locations]][<ast.Slice object at 0x7da18bccada0>], call[name[locations]][<ast.Slice object at 0x7da18bcc83a0>]]]] begin[:] call[name[f].write, parameter[call[name[transitfeed].EncodeUnicode, parameter[binary_operation[constant[<li>%s ] <ast.Mod object at 0x7da2590d6920> call[name[LatLngsToGoogleLink], parameter[name[source], name[destination]]]]]]]] call[name[f].write, parameter[call[name[transitfeed].EncodeUnicode, parameter[name[output_suffix]]]]]
keyword[def] identifier[WriteOutput] ( identifier[title] , identifier[locations] , identifier[limit] , identifier[f] ): literal[string] identifier[output_prefix] = literal[string] % identifier[locals] () identifier[output_suffix] = literal[string] % identifier[locals] () identifier[f] . identifier[write] ( identifier[transitfeed] . identifier[EncodeUnicode] ( identifier[output_prefix] )) keyword[for] identifier[source] , identifier[destination] keyword[in] identifier[zip] ( identifier[locations] [ literal[int] : identifier[limit] ], identifier[locations] [ literal[int] : identifier[limit] + literal[int] ]): identifier[f] . identifier[write] ( identifier[transitfeed] . identifier[EncodeUnicode] ( literal[string] % identifier[LatLngsToGoogleLink] ( identifier[source] , identifier[destination] ))) identifier[f] . identifier[write] ( identifier[transitfeed] . identifier[EncodeUnicode] ( identifier[output_suffix] ))
def WriteOutput(title, locations, limit, f): """Write html to f for up to limit trips between locations. Args: title: String used in html title locations: list of (lat, lng) tuples limit: maximum number of queries in the html f: a file object """ output_prefix = '\n<html>\n<head>\n<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">\n<title>%(title)s</title>\n</head>\n<body>\nRandom queries for %(title)s<p>\nThis list of random queries should speed up important manual testing. Here are\nsome things to check when looking at the results of a query.\n<ul>\n <li> Check the agency attribution under the trip results:\n <ul>\n <li> has correct name and spelling of the agency\n <li> opens a page with general information about the service\n </ul>\n <li> For each alternate trip check that each of these is reasonable:\n <ul>\n <li> the total time of the trip\n <li> the time for each leg. Bad data frequently results in a leg going a long\n way in a few minutes.\n <li> the icons and mode names (Tram, Bus, etc) are correct for each leg\n <li> the route names and headsigns are correctly formatted and not\n redundant.\n For a good example see <a\n href="https://developers.google.com/transit/gtfs/examples/display-to-users">\n the screenshots in the Google Transit Feed Specification</a>.\n <li> the shape line on the map looks correct. Make sure the polyline does\n not zig-zag, loop, skip stops or jump far away unless the trip does the\n same thing.\n <li> the route is active on the day the trip planner returns\n </ul>\n</ul>\nIf you find a problem be sure to save the URL. This file is generated randomly.\n<ol>\n' % locals() output_suffix = '\n</ol>\n</body>\n</html>\n' % locals() f.write(transitfeed.EncodeUnicode(output_prefix)) for (source, destination) in zip(locations[0:limit], locations[1:limit + 1]): f.write(transitfeed.EncodeUnicode('<li>%s\n' % LatLngsToGoogleLink(source, destination))) # depends on [control=['for'], data=[]] f.write(transitfeed.EncodeUnicode(output_suffix))