code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _pyqt4(): """Initialise PyQt4""" import sip # Validation of envivornment variable. Prevents an error if # the variable is invalid since it's just a hint. try: hint = int(QT_SIP_API_HINT) except TypeError: hint = None # Variable was None, i.e. not set. except ValueError: raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2") for api in ("QString", "QVariant", "QDate", "QDateTime", "QTextStream", "QTime", "QUrl"): try: sip.setapi(api, hint or 2) except AttributeError: raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py") except ValueError: actual = sip.getapi(api) if not hint: raise ImportError("API version already set to %d" % actual) else: # Having provided a hint indicates a soft constraint, one # that doesn't throw an exception. sys.stderr.write( "Warning: API '%s' has already been set to %d.\n" % (api, actual) ) import PyQt4 as module extras = ["uic"] try: import sip extras.append(sip.__name__) except ImportError: sip = None _setup(module, extras) if hasattr(Qt, "_sip"): Qt.QtCompat.wrapInstance = _wrapinstance Qt.QtCompat.getCppPointer = _getcpppointer Qt.QtCompat.delete = sip.delete if hasattr(Qt, "_uic"): Qt.QtCompat.loadUi = _loadUi if hasattr(Qt, "_QtGui"): setattr(Qt, "QtWidgets", _new_module("QtWidgets")) setattr(Qt, "_QtWidgets", Qt._QtGui) if hasattr(Qt._QtGui, "QX11Info"): setattr(Qt, "QtX11Extras", _new_module("QtX11Extras")) Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info Qt.QtCompat.setSectionResizeMode = \ Qt._QtGui.QHeaderView.setResizeMode if hasattr(Qt, "_QtCore"): Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR _reassign_misplaced_members("PyQt4") # QFileDialog QtCompat decorator def _standardizeQFileDialog(some_function): """Decorator that makes PyQt4 return conform to other bindings""" def wrapper(*args, **kwargs): ret = (some_function(*args, **kwargs)) # PyQt4 only returns the selected filename, force it to a # standard return of the selected filename, and a empty string # for the selected filter return ret, '' wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper decorators = { "QFileDialog": { "getOpenFileName": _standardizeQFileDialog, "getOpenFileNames": _standardizeQFileDialog, "getSaveFileName": _standardizeQFileDialog, } } _build_compatibility_members('PyQt4', decorators)
def function[_pyqt4, parameter[]]: constant[Initialise PyQt4] import module[sip] <ast.Try object at 0x7da1b18e7fd0> for taget[name[api]] in starred[tuple[[<ast.Constant object at 0x7da1b18e5c90>, <ast.Constant object at 0x7da1b18e5900>, <ast.Constant object at 0x7da1b18e5930>, <ast.Constant object at 0x7da1b18e74f0>, <ast.Constant object at 0x7da1b18e6b90>, <ast.Constant object at 0x7da1b18e7e50>, <ast.Constant object at 0x7da1b18e5b10>]]] begin[:] <ast.Try object at 0x7da1b18e6ad0> import module[PyQt4] as alias[module] variable[extras] assign[=] list[[<ast.Constant object at 0x7da1b18e6350>]] <ast.Try object at 0x7da1b18e4340> call[name[_setup], parameter[name[module], name[extras]]] if call[name[hasattr], parameter[name[Qt], constant[_sip]]] begin[:] name[Qt].QtCompat.wrapInstance assign[=] name[_wrapinstance] name[Qt].QtCompat.getCppPointer assign[=] name[_getcpppointer] name[Qt].QtCompat.delete assign[=] name[sip].delete if call[name[hasattr], parameter[name[Qt], constant[_uic]]] begin[:] name[Qt].QtCompat.loadUi assign[=] name[_loadUi] if call[name[hasattr], parameter[name[Qt], constant[_QtGui]]] begin[:] call[name[setattr], parameter[name[Qt], constant[QtWidgets], call[name[_new_module], parameter[constant[QtWidgets]]]]] call[name[setattr], parameter[name[Qt], constant[_QtWidgets], name[Qt]._QtGui]] if call[name[hasattr], parameter[name[Qt]._QtGui, constant[QX11Info]]] begin[:] call[name[setattr], parameter[name[Qt], constant[QtX11Extras], call[name[_new_module], parameter[constant[QtX11Extras]]]]] name[Qt].QtX11Extras.QX11Info assign[=] name[Qt]._QtGui.QX11Info name[Qt].QtCompat.setSectionResizeMode assign[=] name[Qt]._QtGui.QHeaderView.setResizeMode if call[name[hasattr], parameter[name[Qt], constant[_QtCore]]] begin[:] name[Qt].__binding_version__ assign[=] name[Qt]._QtCore.PYQT_VERSION_STR name[Qt].__qt_version__ assign[=] name[Qt]._QtCore.QT_VERSION_STR call[name[_reassign_misplaced_members], parameter[constant[PyQt4]]] def function[_standardizeQFileDialog, parameter[some_function]]: constant[Decorator that makes PyQt4 return conform to other bindings] def function[wrapper, parameter[]]: variable[ret] assign[=] call[name[some_function], parameter[<ast.Starred object at 0x7da1b18e4f70>]] return[tuple[[<ast.Name object at 0x7da1b18e74c0>, <ast.Constant object at 0x7da1b18e60e0>]]] name[wrapper].__doc__ assign[=] name[some_function].__doc__ name[wrapper].__name__ assign[=] name[some_function].__name__ return[name[wrapper]] variable[decorators] assign[=] dictionary[[<ast.Constant object at 0x7da1b18e46a0>], [<ast.Dict object at 0x7da1b18e44c0>]] call[name[_build_compatibility_members], parameter[constant[PyQt4], name[decorators]]]
keyword[def] identifier[_pyqt4] (): literal[string] keyword[import] identifier[sip] keyword[try] : identifier[hint] = identifier[int] ( identifier[QT_SIP_API_HINT] ) keyword[except] identifier[TypeError] : identifier[hint] = keyword[None] keyword[except] identifier[ValueError] : keyword[raise] identifier[ImportError] ( literal[string] ) keyword[for] identifier[api] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[try] : identifier[sip] . identifier[setapi] ( identifier[api] , identifier[hint] keyword[or] literal[int] ) keyword[except] identifier[AttributeError] : keyword[raise] identifier[ImportError] ( literal[string] ) keyword[except] identifier[ValueError] : identifier[actual] = identifier[sip] . identifier[getapi] ( identifier[api] ) keyword[if] keyword[not] identifier[hint] : keyword[raise] identifier[ImportError] ( literal[string] % identifier[actual] ) keyword[else] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[api] , identifier[actual] ) ) keyword[import] identifier[PyQt4] keyword[as] identifier[module] identifier[extras] =[ literal[string] ] keyword[try] : keyword[import] identifier[sip] identifier[extras] . identifier[append] ( identifier[sip] . identifier[__name__] ) keyword[except] identifier[ImportError] : identifier[sip] = keyword[None] identifier[_setup] ( identifier[module] , identifier[extras] ) keyword[if] identifier[hasattr] ( identifier[Qt] , literal[string] ): identifier[Qt] . identifier[QtCompat] . identifier[wrapInstance] = identifier[_wrapinstance] identifier[Qt] . identifier[QtCompat] . identifier[getCppPointer] = identifier[_getcpppointer] identifier[Qt] . identifier[QtCompat] . identifier[delete] = identifier[sip] . identifier[delete] keyword[if] identifier[hasattr] ( identifier[Qt] , literal[string] ): identifier[Qt] . identifier[QtCompat] . identifier[loadUi] = identifier[_loadUi] keyword[if] identifier[hasattr] ( identifier[Qt] , literal[string] ): identifier[setattr] ( identifier[Qt] , literal[string] , identifier[_new_module] ( literal[string] )) identifier[setattr] ( identifier[Qt] , literal[string] , identifier[Qt] . identifier[_QtGui] ) keyword[if] identifier[hasattr] ( identifier[Qt] . identifier[_QtGui] , literal[string] ): identifier[setattr] ( identifier[Qt] , literal[string] , identifier[_new_module] ( literal[string] )) identifier[Qt] . identifier[QtX11Extras] . identifier[QX11Info] = identifier[Qt] . identifier[_QtGui] . identifier[QX11Info] identifier[Qt] . identifier[QtCompat] . identifier[setSectionResizeMode] = identifier[Qt] . identifier[_QtGui] . identifier[QHeaderView] . identifier[setResizeMode] keyword[if] identifier[hasattr] ( identifier[Qt] , literal[string] ): identifier[Qt] . identifier[__binding_version__] = identifier[Qt] . identifier[_QtCore] . identifier[PYQT_VERSION_STR] identifier[Qt] . identifier[__qt_version__] = identifier[Qt] . identifier[_QtCore] . identifier[QT_VERSION_STR] identifier[_reassign_misplaced_members] ( literal[string] ) keyword[def] identifier[_standardizeQFileDialog] ( identifier[some_function] ): literal[string] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): identifier[ret] =( identifier[some_function] (* identifier[args] ,** identifier[kwargs] )) keyword[return] identifier[ret] , literal[string] identifier[wrapper] . identifier[__doc__] = identifier[some_function] . identifier[__doc__] identifier[wrapper] . identifier[__name__] = identifier[some_function] . identifier[__name__] keyword[return] identifier[wrapper] identifier[decorators] ={ literal[string] :{ literal[string] : identifier[_standardizeQFileDialog] , literal[string] : identifier[_standardizeQFileDialog] , literal[string] : identifier[_standardizeQFileDialog] , } } identifier[_build_compatibility_members] ( literal[string] , identifier[decorators] )
def _pyqt4(): """Initialise PyQt4""" import sip # Validation of envivornment variable. Prevents an error if # the variable is invalid since it's just a hint. try: hint = int(QT_SIP_API_HINT) # depends on [control=['try'], data=[]] except TypeError: hint = None # Variable was None, i.e. not set. # depends on [control=['except'], data=[]] except ValueError: raise ImportError('QT_SIP_API_HINT=%s must be a 1 or 2') # depends on [control=['except'], data=[]] for api in ('QString', 'QVariant', 'QDate', 'QDateTime', 'QTextStream', 'QTime', 'QUrl'): try: sip.setapi(api, hint or 2) # depends on [control=['try'], data=[]] except AttributeError: raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py") # depends on [control=['except'], data=[]] except ValueError: actual = sip.getapi(api) if not hint: raise ImportError('API version already set to %d' % actual) # depends on [control=['if'], data=[]] else: # Having provided a hint indicates a soft constraint, one # that doesn't throw an exception. sys.stderr.write("Warning: API '%s' has already been set to %d.\n" % (api, actual)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['api']] import PyQt4 as module extras = ['uic'] try: import sip extras.append(sip.__name__) # depends on [control=['try'], data=[]] except ImportError: sip = None # depends on [control=['except'], data=[]] _setup(module, extras) if hasattr(Qt, '_sip'): Qt.QtCompat.wrapInstance = _wrapinstance Qt.QtCompat.getCppPointer = _getcpppointer Qt.QtCompat.delete = sip.delete # depends on [control=['if'], data=[]] if hasattr(Qt, '_uic'): Qt.QtCompat.loadUi = _loadUi # depends on [control=['if'], data=[]] if hasattr(Qt, '_QtGui'): setattr(Qt, 'QtWidgets', _new_module('QtWidgets')) setattr(Qt, '_QtWidgets', Qt._QtGui) if hasattr(Qt._QtGui, 'QX11Info'): setattr(Qt, 'QtX11Extras', _new_module('QtX11Extras')) Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info # depends on [control=['if'], data=[]] Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode # depends on [control=['if'], data=[]] if hasattr(Qt, '_QtCore'): Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR # depends on [control=['if'], data=[]] _reassign_misplaced_members('PyQt4') # QFileDialog QtCompat decorator def _standardizeQFileDialog(some_function): """Decorator that makes PyQt4 return conform to other bindings""" def wrapper(*args, **kwargs): ret = some_function(*args, **kwargs) # PyQt4 only returns the selected filename, force it to a # standard return of the selected filename, and a empty string # for the selected filter return (ret, '') wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper decorators = {'QFileDialog': {'getOpenFileName': _standardizeQFileDialog, 'getOpenFileNames': _standardizeQFileDialog, 'getSaveFileName': _standardizeQFileDialog}} _build_compatibility_members('PyQt4', decorators)
def turtle_to_texture(turtle_program, turn_amount=DEFAULT_TURN, initial_angle=DEFAULT_INITIAL_ANGLE, resolution=1): """Makes a texture from a turtle program. Args: turtle_program (str): a string representing the turtle program; see the docstring of `branching_turtle_generator` for more details turn_amount (float): amount to turn in degrees initial_angle (float): initial orientation of the turtle resolution (int): if provided, interpolation amount for visible lines Returns: texture: A texture. """ generator = branching_turtle_generator( turtle_program, turn_amount, initial_angle, resolution) return texture_from_generator(generator)
def function[turtle_to_texture, parameter[turtle_program, turn_amount, initial_angle, resolution]]: constant[Makes a texture from a turtle program. Args: turtle_program (str): a string representing the turtle program; see the docstring of `branching_turtle_generator` for more details turn_amount (float): amount to turn in degrees initial_angle (float): initial orientation of the turtle resolution (int): if provided, interpolation amount for visible lines Returns: texture: A texture. ] variable[generator] assign[=] call[name[branching_turtle_generator], parameter[name[turtle_program], name[turn_amount], name[initial_angle], name[resolution]]] return[call[name[texture_from_generator], parameter[name[generator]]]]
keyword[def] identifier[turtle_to_texture] ( identifier[turtle_program] , identifier[turn_amount] = identifier[DEFAULT_TURN] , identifier[initial_angle] = identifier[DEFAULT_INITIAL_ANGLE] , identifier[resolution] = literal[int] ): literal[string] identifier[generator] = identifier[branching_turtle_generator] ( identifier[turtle_program] , identifier[turn_amount] , identifier[initial_angle] , identifier[resolution] ) keyword[return] identifier[texture_from_generator] ( identifier[generator] )
def turtle_to_texture(turtle_program, turn_amount=DEFAULT_TURN, initial_angle=DEFAULT_INITIAL_ANGLE, resolution=1): """Makes a texture from a turtle program. Args: turtle_program (str): a string representing the turtle program; see the docstring of `branching_turtle_generator` for more details turn_amount (float): amount to turn in degrees initial_angle (float): initial orientation of the turtle resolution (int): if provided, interpolation amount for visible lines Returns: texture: A texture. """ generator = branching_turtle_generator(turtle_program, turn_amount, initial_angle, resolution) return texture_from_generator(generator)
def write(self, gpio, level): """ Sets the gpio level. gpio:= 0-53. level:= 0, 1. If PWM or servo pulses are active on the gpio they are switched off. ... pi.set_mode(17, pigpio.OUTPUT) pi.write(17,0) print(pi.read(17)) 0 pi.write(17,1) print(pi.read(17)) 1 ... """ res = yield from self._pigpio_aio_command(_PI_CMD_WRITE, gpio, level) return _u2i(res)
def function[write, parameter[self, gpio, level]]: constant[ Sets the gpio level. gpio:= 0-53. level:= 0, 1. If PWM or servo pulses are active on the gpio they are switched off. ... pi.set_mode(17, pigpio.OUTPUT) pi.write(17,0) print(pi.read(17)) 0 pi.write(17,1) print(pi.read(17)) 1 ... ] variable[res] assign[=] <ast.YieldFrom object at 0x7da1b24ae9e0> return[call[name[_u2i], parameter[name[res]]]]
keyword[def] identifier[write] ( identifier[self] , identifier[gpio] , identifier[level] ): literal[string] identifier[res] = keyword[yield] keyword[from] identifier[self] . identifier[_pigpio_aio_command] ( identifier[_PI_CMD_WRITE] , identifier[gpio] , identifier[level] ) keyword[return] identifier[_u2i] ( identifier[res] )
def write(self, gpio, level): """ Sets the gpio level. gpio:= 0-53. level:= 0, 1. If PWM or servo pulses are active on the gpio they are switched off. ... pi.set_mode(17, pigpio.OUTPUT) pi.write(17,0) print(pi.read(17)) 0 pi.write(17,1) print(pi.read(17)) 1 ... """ res = (yield from self._pigpio_aio_command(_PI_CMD_WRITE, gpio, level)) return _u2i(res)
def get_og_image_url(self): """ :return: URL of the image to use in OG shares """ li = self.get_list_image() if li: from easy_thumbnails.files import get_thumbnailer thumb_url = get_thumbnailer(li)['og_image'].url # TODO: looks like this may fail if SITE_DOMAIN = "acmi.lvh.me" return urljoin(settings.SITE_DOMAIN, thumb_url)
def function[get_og_image_url, parameter[self]]: constant[ :return: URL of the image to use in OG shares ] variable[li] assign[=] call[name[self].get_list_image, parameter[]] if name[li] begin[:] from relative_module[easy_thumbnails.files] import module[get_thumbnailer] variable[thumb_url] assign[=] call[call[name[get_thumbnailer], parameter[name[li]]]][constant[og_image]].url return[call[name[urljoin], parameter[name[settings].SITE_DOMAIN, name[thumb_url]]]]
keyword[def] identifier[get_og_image_url] ( identifier[self] ): literal[string] identifier[li] = identifier[self] . identifier[get_list_image] () keyword[if] identifier[li] : keyword[from] identifier[easy_thumbnails] . identifier[files] keyword[import] identifier[get_thumbnailer] identifier[thumb_url] = identifier[get_thumbnailer] ( identifier[li] )[ literal[string] ]. identifier[url] keyword[return] identifier[urljoin] ( identifier[settings] . identifier[SITE_DOMAIN] , identifier[thumb_url] )
def get_og_image_url(self): """ :return: URL of the image to use in OG shares """ li = self.get_list_image() if li: from easy_thumbnails.files import get_thumbnailer thumb_url = get_thumbnailer(li)['og_image'].url # TODO: looks like this may fail if SITE_DOMAIN = "acmi.lvh.me" return urljoin(settings.SITE_DOMAIN, thumb_url) # depends on [control=['if'], data=[]]
async def consume_queue(self, subscriber: AbstractSubscriber) -> None: """ Subscribe to the queue consuming. :param subscriber: :return: """ queue_name = subscriber.name topics = subscriber.requested_topics if queue_name in self._known_queues: raise exceptions.ConsumerError("Queue '%s' already being consumed" % queue_name) await self._declare_queue(queue_name) # TODO: There is a lot of room to improvement here. Figure out routing done the right way for key in topics: self._routing.setdefault(key, set()) if subscriber in self._routing[key]: logger.warning("Subscriber '%s' already receiving routing_key '%s'", subscriber, key) break await self._bind_key_to_queue(key, queue_name) self._routing[key].add(subscriber) logger.info("Consuming queue '%s'", queue_name) await asyncio.wait_for( self._channel.basic_consume(callback=self._on_message, queue_name=queue_name), timeout=10 ) self._add_to_known_queue(queue_name)
<ast.AsyncFunctionDef object at 0x7da1b15d4160>
keyword[async] keyword[def] identifier[consume_queue] ( identifier[self] , identifier[subscriber] : identifier[AbstractSubscriber] )-> keyword[None] : literal[string] identifier[queue_name] = identifier[subscriber] . identifier[name] identifier[topics] = identifier[subscriber] . identifier[requested_topics] keyword[if] identifier[queue_name] keyword[in] identifier[self] . identifier[_known_queues] : keyword[raise] identifier[exceptions] . identifier[ConsumerError] ( literal[string] % identifier[queue_name] ) keyword[await] identifier[self] . identifier[_declare_queue] ( identifier[queue_name] ) keyword[for] identifier[key] keyword[in] identifier[topics] : identifier[self] . identifier[_routing] . identifier[setdefault] ( identifier[key] , identifier[set] ()) keyword[if] identifier[subscriber] keyword[in] identifier[self] . identifier[_routing] [ identifier[key] ]: identifier[logger] . identifier[warning] ( literal[string] , identifier[subscriber] , identifier[key] ) keyword[break] keyword[await] identifier[self] . identifier[_bind_key_to_queue] ( identifier[key] , identifier[queue_name] ) identifier[self] . identifier[_routing] [ identifier[key] ]. identifier[add] ( identifier[subscriber] ) identifier[logger] . identifier[info] ( literal[string] , identifier[queue_name] ) keyword[await] identifier[asyncio] . identifier[wait_for] ( identifier[self] . identifier[_channel] . identifier[basic_consume] ( identifier[callback] = identifier[self] . identifier[_on_message] , identifier[queue_name] = identifier[queue_name] ), identifier[timeout] = literal[int] ) identifier[self] . identifier[_add_to_known_queue] ( identifier[queue_name] )
async def consume_queue(self, subscriber: AbstractSubscriber) -> None: """ Subscribe to the queue consuming. :param subscriber: :return: """ queue_name = subscriber.name topics = subscriber.requested_topics if queue_name in self._known_queues: raise exceptions.ConsumerError("Queue '%s' already being consumed" % queue_name) # depends on [control=['if'], data=['queue_name']] await self._declare_queue(queue_name) # TODO: There is a lot of room to improvement here. Figure out routing done the right way for key in topics: self._routing.setdefault(key, set()) if subscriber in self._routing[key]: logger.warning("Subscriber '%s' already receiving routing_key '%s'", subscriber, key) break # depends on [control=['if'], data=['subscriber']] await self._bind_key_to_queue(key, queue_name) self._routing[key].add(subscriber) # depends on [control=['for'], data=['key']] logger.info("Consuming queue '%s'", queue_name) await asyncio.wait_for(self._channel.basic_consume(callback=self._on_message, queue_name=queue_name), timeout=10) self._add_to_known_queue(queue_name)
def boolbox(msg="Shall I continue?" , title=" " , choices=("Yes","No") , image=None ): """ Display a boolean msgbox. The default is the first choice. The returned value is calculated this way:: if the first choice is chosen, or if the dialog is cancelled: returns 1 else: returns 0 """ reply = buttonbox(msg=msg, choices=choices, title=title, image=image) if reply == choices[0]: return 1 else: return 0
def function[boolbox, parameter[msg, title, choices, image]]: constant[ Display a boolean msgbox. The default is the first choice. The returned value is calculated this way:: if the first choice is chosen, or if the dialog is cancelled: returns 1 else: returns 0 ] variable[reply] assign[=] call[name[buttonbox], parameter[]] if compare[name[reply] equal[==] call[name[choices]][constant[0]]] begin[:] return[constant[1]]
keyword[def] identifier[boolbox] ( identifier[msg] = literal[string] , identifier[title] = literal[string] , identifier[choices] =( literal[string] , literal[string] ) , identifier[image] = keyword[None] ): literal[string] identifier[reply] = identifier[buttonbox] ( identifier[msg] = identifier[msg] , identifier[choices] = identifier[choices] , identifier[title] = identifier[title] , identifier[image] = identifier[image] ) keyword[if] identifier[reply] == identifier[choices] [ literal[int] ]: keyword[return] literal[int] keyword[else] : keyword[return] literal[int]
def boolbox(msg='Shall I continue?', title=' ', choices=('Yes', 'No'), image=None): """ Display a boolean msgbox. The default is the first choice. The returned value is calculated this way:: if the first choice is chosen, or if the dialog is cancelled: returns 1 else: returns 0 """ reply = buttonbox(msg=msg, choices=choices, title=title, image=image) if reply == choices[0]: return 1 # depends on [control=['if'], data=[]] else: return 0
def shareable(self, boolean): """Pass through helper function for flag function.""" if(boolean): r = self.flag({ "flag": "make_shareable" }) else: r = self.flag({ "flag": "make_not_shareable" }) return r
def function[shareable, parameter[self, boolean]]: constant[Pass through helper function for flag function.] if name[boolean] begin[:] variable[r] assign[=] call[name[self].flag, parameter[dictionary[[<ast.Constant object at 0x7da18dc99fc0>], [<ast.Constant object at 0x7da18dc98b20>]]]] return[name[r]]
keyword[def] identifier[shareable] ( identifier[self] , identifier[boolean] ): literal[string] keyword[if] ( identifier[boolean] ): identifier[r] = identifier[self] . identifier[flag] ({ literal[string] : literal[string] }) keyword[else] : identifier[r] = identifier[self] . identifier[flag] ({ literal[string] : literal[string] }) keyword[return] identifier[r]
def shareable(self, boolean): """Pass through helper function for flag function.""" if boolean: r = self.flag({'flag': 'make_shareable'}) # depends on [control=['if'], data=[]] else: r = self.flag({'flag': 'make_not_shareable'}) return r
def urldefrag(url): """Removes any existing fragment from URL. Returns a tuple of the defragmented URL and the fragment. If the URL contained no fragments, the second element is the empty string. """ url, _coerce_result = _coerce_args(url) if '#' in url: s, n, p, a, q, frag = urlparse(url) defrag = urlunparse((s, n, p, a, q, '')) else: frag = '' defrag = url return _coerce_result(DefragResult(defrag, frag))
def function[urldefrag, parameter[url]]: constant[Removes any existing fragment from URL. Returns a tuple of the defragmented URL and the fragment. If the URL contained no fragments, the second element is the empty string. ] <ast.Tuple object at 0x7da18bcc9000> assign[=] call[name[_coerce_args], parameter[name[url]]] if compare[constant[#] in name[url]] begin[:] <ast.Tuple object at 0x7da18bcc8a60> assign[=] call[name[urlparse], parameter[name[url]]] variable[defrag] assign[=] call[name[urlunparse], parameter[tuple[[<ast.Name object at 0x7da1b2344bb0>, <ast.Name object at 0x7da1b2346bf0>, <ast.Name object at 0x7da1b2346c20>, <ast.Name object at 0x7da1b23472b0>, <ast.Name object at 0x7da1b2345a80>, <ast.Constant object at 0x7da1b2346620>]]]] return[call[name[_coerce_result], parameter[call[name[DefragResult], parameter[name[defrag], name[frag]]]]]]
keyword[def] identifier[urldefrag] ( identifier[url] ): literal[string] identifier[url] , identifier[_coerce_result] = identifier[_coerce_args] ( identifier[url] ) keyword[if] literal[string] keyword[in] identifier[url] : identifier[s] , identifier[n] , identifier[p] , identifier[a] , identifier[q] , identifier[frag] = identifier[urlparse] ( identifier[url] ) identifier[defrag] = identifier[urlunparse] (( identifier[s] , identifier[n] , identifier[p] , identifier[a] , identifier[q] , literal[string] )) keyword[else] : identifier[frag] = literal[string] identifier[defrag] = identifier[url] keyword[return] identifier[_coerce_result] ( identifier[DefragResult] ( identifier[defrag] , identifier[frag] ))
def urldefrag(url): """Removes any existing fragment from URL. Returns a tuple of the defragmented URL and the fragment. If the URL contained no fragments, the second element is the empty string. """ (url, _coerce_result) = _coerce_args(url) if '#' in url: (s, n, p, a, q, frag) = urlparse(url) defrag = urlunparse((s, n, p, a, q, '')) # depends on [control=['if'], data=['url']] else: frag = '' defrag = url return _coerce_result(DefragResult(defrag, frag))
def get_repositories_by_composition(self, *args, **kwargs): """Pass through to provider CompositionRepositorySession.get_repositories_by_composition""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_bins_by_resource catalogs = self._get_provider_session('composition_repository_session').get_repositories_by_composition(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Repository(self._provider_manager, cat, self._runtime, self._proxy)) return RepositoryList(cat_list)
def function[get_repositories_by_composition, parameter[self]]: constant[Pass through to provider CompositionRepositorySession.get_repositories_by_composition] variable[catalogs] assign[=] call[call[name[self]._get_provider_session, parameter[constant[composition_repository_session]]].get_repositories_by_composition, parameter[<ast.Starred object at 0x7da20c796aa0>]] variable[cat_list] assign[=] list[[]] for taget[name[cat]] in starred[name[catalogs]] begin[:] call[name[cat_list].append, parameter[call[name[Repository], parameter[name[self]._provider_manager, name[cat], name[self]._runtime, name[self]._proxy]]]] return[call[name[RepositoryList], parameter[name[cat_list]]]]
keyword[def] identifier[get_repositories_by_composition] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[catalogs] = identifier[self] . identifier[_get_provider_session] ( literal[string] ). identifier[get_repositories_by_composition] (* identifier[args] ,** identifier[kwargs] ) identifier[cat_list] =[] keyword[for] identifier[cat] keyword[in] identifier[catalogs] : identifier[cat_list] . identifier[append] ( identifier[Repository] ( identifier[self] . identifier[_provider_manager] , identifier[cat] , identifier[self] . identifier[_runtime] , identifier[self] . identifier[_proxy] )) keyword[return] identifier[RepositoryList] ( identifier[cat_list] )
def get_repositories_by_composition(self, *args, **kwargs): """Pass through to provider CompositionRepositorySession.get_repositories_by_composition""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_bins_by_resource catalogs = self._get_provider_session('composition_repository_session').get_repositories_by_composition(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Repository(self._provider_manager, cat, self._runtime, self._proxy)) # depends on [control=['for'], data=['cat']] return RepositoryList(cat_list)
def primary_key(self): """Returns either the primary key value, or a tuple containing the primary key values in the case of a composite primary key. """ pkname = self.primary_key_name if pkname is None: return None elif isinstance(pkname, str): return getattr(self, pkname) else: return tuple((getattr(self, pkn) for pkn in pkname))
def function[primary_key, parameter[self]]: constant[Returns either the primary key value, or a tuple containing the primary key values in the case of a composite primary key. ] variable[pkname] assign[=] name[self].primary_key_name if compare[name[pkname] is constant[None]] begin[:] return[constant[None]]
keyword[def] identifier[primary_key] ( identifier[self] ): literal[string] identifier[pkname] = identifier[self] . identifier[primary_key_name] keyword[if] identifier[pkname] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[elif] identifier[isinstance] ( identifier[pkname] , identifier[str] ): keyword[return] identifier[getattr] ( identifier[self] , identifier[pkname] ) keyword[else] : keyword[return] identifier[tuple] (( identifier[getattr] ( identifier[self] , identifier[pkn] ) keyword[for] identifier[pkn] keyword[in] identifier[pkname] ))
def primary_key(self): """Returns either the primary key value, or a tuple containing the primary key values in the case of a composite primary key. """ pkname = self.primary_key_name if pkname is None: return None # depends on [control=['if'], data=[]] elif isinstance(pkname, str): return getattr(self, pkname) # depends on [control=['if'], data=[]] else: return tuple((getattr(self, pkn) for pkn in pkname))
def update_edges(self, elev_fn, dem_proc): """ After finishing a calculation, this will update the neighbors and the todo for that tile """ interp = self.build_interpolator(dem_proc) self.update_edge_todo(elev_fn, dem_proc) self.set_neighbor_data(elev_fn, dem_proc, interp)
def function[update_edges, parameter[self, elev_fn, dem_proc]]: constant[ After finishing a calculation, this will update the neighbors and the todo for that tile ] variable[interp] assign[=] call[name[self].build_interpolator, parameter[name[dem_proc]]] call[name[self].update_edge_todo, parameter[name[elev_fn], name[dem_proc]]] call[name[self].set_neighbor_data, parameter[name[elev_fn], name[dem_proc], name[interp]]]
keyword[def] identifier[update_edges] ( identifier[self] , identifier[elev_fn] , identifier[dem_proc] ): literal[string] identifier[interp] = identifier[self] . identifier[build_interpolator] ( identifier[dem_proc] ) identifier[self] . identifier[update_edge_todo] ( identifier[elev_fn] , identifier[dem_proc] ) identifier[self] . identifier[set_neighbor_data] ( identifier[elev_fn] , identifier[dem_proc] , identifier[interp] )
def update_edges(self, elev_fn, dem_proc): """ After finishing a calculation, this will update the neighbors and the todo for that tile """ interp = self.build_interpolator(dem_proc) self.update_edge_todo(elev_fn, dem_proc) self.set_neighbor_data(elev_fn, dem_proc, interp)
def compute_start_timeperiod(self, process_name, timeperiod): """ computes lowest *inclusive* timeperiod boundary for job to process for process with time_grouping == 1, it returns given timeperiod with no change for process with time_grouping != 1, it computes first timeperiod, not processed by the previous job run For instance: with time_grouping = 3, QUALIFIER_HOURLY, and timeperiod = 2016042018, the start_timeperiod will be = 2016042016 (computed as 2016042018 - 3 + 1) """ time_grouping = context.process_context[process_name].time_grouping if time_grouping == 1: return timeperiod # step1: translate given timeperiod to the time grouped one process_hierarchy = self.timetable.get_tree(process_name).process_hierarchy timeperiod_dict = process_hierarchy[process_name].timeperiod_dict translated_timeperiod = timeperiod_dict._translate_timeperiod(timeperiod) # step 2: compute previous grouped period # NOTICE: simple `time_helper.increment_timeperiod(time_qualifier, timeperiod)` is insufficient # as it does not address edge cases, such as the last day of the month or the last hour of the day # For instance: with time_grouping=3, QUALIFIER_DAILY, and 2016123100 # the `increment_timeperiod` will yield 2016122800 instead of 2016123100 time_qualifier = context.process_context[process_name].time_qualifier for i in range(1, time_grouping + 1): prev_timeperiod = time_helper.increment_timeperiod(time_qualifier, translated_timeperiod, delta=-i) if prev_timeperiod == timeperiod_dict._translate_timeperiod(prev_timeperiod): # prev_timeperiod is currently at the last grouped timeperiod break # step 3: compute first exclusive timeperiod after the *prev_timeperiod*, # which becomes first inclusive timeperiod for this job run over_the_edge_timeperiod = time_helper.increment_timeperiod(time_qualifier, prev_timeperiod, delta=-1) if prev_timeperiod != timeperiod_dict._translate_timeperiod(over_the_edge_timeperiod): # over_the_edge_timeperiod fell into previous day or month or year # *prev_timeperiod* points to the first month, first day of the month or 00 hour start_timeperiod = prev_timeperiod else: start_timeperiod = self.compute_end_timeperiod(process_name, prev_timeperiod) return start_timeperiod
def function[compute_start_timeperiod, parameter[self, process_name, timeperiod]]: constant[ computes lowest *inclusive* timeperiod boundary for job to process for process with time_grouping == 1, it returns given timeperiod with no change for process with time_grouping != 1, it computes first timeperiod, not processed by the previous job run For instance: with time_grouping = 3, QUALIFIER_HOURLY, and timeperiod = 2016042018, the start_timeperiod will be = 2016042016 (computed as 2016042018 - 3 + 1) ] variable[time_grouping] assign[=] call[name[context].process_context][name[process_name]].time_grouping if compare[name[time_grouping] equal[==] constant[1]] begin[:] return[name[timeperiod]] variable[process_hierarchy] assign[=] call[name[self].timetable.get_tree, parameter[name[process_name]]].process_hierarchy variable[timeperiod_dict] assign[=] call[name[process_hierarchy]][name[process_name]].timeperiod_dict variable[translated_timeperiod] assign[=] call[name[timeperiod_dict]._translate_timeperiod, parameter[name[timeperiod]]] variable[time_qualifier] assign[=] call[name[context].process_context][name[process_name]].time_qualifier for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[time_grouping] + constant[1]]]]] begin[:] variable[prev_timeperiod] assign[=] call[name[time_helper].increment_timeperiod, parameter[name[time_qualifier], name[translated_timeperiod]]] if compare[name[prev_timeperiod] equal[==] call[name[timeperiod_dict]._translate_timeperiod, parameter[name[prev_timeperiod]]]] begin[:] break variable[over_the_edge_timeperiod] assign[=] call[name[time_helper].increment_timeperiod, parameter[name[time_qualifier], name[prev_timeperiod]]] if compare[name[prev_timeperiod] not_equal[!=] call[name[timeperiod_dict]._translate_timeperiod, parameter[name[over_the_edge_timeperiod]]]] begin[:] variable[start_timeperiod] assign[=] name[prev_timeperiod] return[name[start_timeperiod]]
keyword[def] identifier[compute_start_timeperiod] ( identifier[self] , identifier[process_name] , identifier[timeperiod] ): literal[string] identifier[time_grouping] = identifier[context] . identifier[process_context] [ identifier[process_name] ]. identifier[time_grouping] keyword[if] identifier[time_grouping] == literal[int] : keyword[return] identifier[timeperiod] identifier[process_hierarchy] = identifier[self] . identifier[timetable] . identifier[get_tree] ( identifier[process_name] ). identifier[process_hierarchy] identifier[timeperiod_dict] = identifier[process_hierarchy] [ identifier[process_name] ]. identifier[timeperiod_dict] identifier[translated_timeperiod] = identifier[timeperiod_dict] . identifier[_translate_timeperiod] ( identifier[timeperiod] ) identifier[time_qualifier] = identifier[context] . identifier[process_context] [ identifier[process_name] ]. identifier[time_qualifier] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[time_grouping] + literal[int] ): identifier[prev_timeperiod] = identifier[time_helper] . identifier[increment_timeperiod] ( identifier[time_qualifier] , identifier[translated_timeperiod] , identifier[delta] =- identifier[i] ) keyword[if] identifier[prev_timeperiod] == identifier[timeperiod_dict] . identifier[_translate_timeperiod] ( identifier[prev_timeperiod] ): keyword[break] identifier[over_the_edge_timeperiod] = identifier[time_helper] . identifier[increment_timeperiod] ( identifier[time_qualifier] , identifier[prev_timeperiod] , identifier[delta] =- literal[int] ) keyword[if] identifier[prev_timeperiod] != identifier[timeperiod_dict] . identifier[_translate_timeperiod] ( identifier[over_the_edge_timeperiod] ): identifier[start_timeperiod] = identifier[prev_timeperiod] keyword[else] : identifier[start_timeperiod] = identifier[self] . identifier[compute_end_timeperiod] ( identifier[process_name] , identifier[prev_timeperiod] ) keyword[return] identifier[start_timeperiod]
def compute_start_timeperiod(self, process_name, timeperiod): """ computes lowest *inclusive* timeperiod boundary for job to process for process with time_grouping == 1, it returns given timeperiod with no change for process with time_grouping != 1, it computes first timeperiod, not processed by the previous job run For instance: with time_grouping = 3, QUALIFIER_HOURLY, and timeperiod = 2016042018, the start_timeperiod will be = 2016042016 (computed as 2016042018 - 3 + 1) """ time_grouping = context.process_context[process_name].time_grouping if time_grouping == 1: return timeperiod # depends on [control=['if'], data=[]] # step1: translate given timeperiod to the time grouped one process_hierarchy = self.timetable.get_tree(process_name).process_hierarchy timeperiod_dict = process_hierarchy[process_name].timeperiod_dict translated_timeperiod = timeperiod_dict._translate_timeperiod(timeperiod) # step 2: compute previous grouped period # NOTICE: simple `time_helper.increment_timeperiod(time_qualifier, timeperiod)` is insufficient # as it does not address edge cases, such as the last day of the month or the last hour of the day # For instance: with time_grouping=3, QUALIFIER_DAILY, and 2016123100 # the `increment_timeperiod` will yield 2016122800 instead of 2016123100 time_qualifier = context.process_context[process_name].time_qualifier for i in range(1, time_grouping + 1): prev_timeperiod = time_helper.increment_timeperiod(time_qualifier, translated_timeperiod, delta=-i) if prev_timeperiod == timeperiod_dict._translate_timeperiod(prev_timeperiod): # prev_timeperiod is currently at the last grouped timeperiod break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # step 3: compute first exclusive timeperiod after the *prev_timeperiod*, # which becomes first inclusive timeperiod for this job run over_the_edge_timeperiod = time_helper.increment_timeperiod(time_qualifier, prev_timeperiod, delta=-1) if prev_timeperiod != timeperiod_dict._translate_timeperiod(over_the_edge_timeperiod): # over_the_edge_timeperiod fell into previous day or month or year # *prev_timeperiod* points to the first month, first day of the month or 00 hour start_timeperiod = prev_timeperiod # depends on [control=['if'], data=['prev_timeperiod']] else: start_timeperiod = self.compute_end_timeperiod(process_name, prev_timeperiod) return start_timeperiod
def BuildFilterFindSpecs( self, artifact_definitions_path, custom_artifacts_path, knowledge_base_object, artifact_filter_names=None, filter_file_path=None): """Builds find specifications from artifacts or filter file if available. Args: artifact_definitions_path (str): path to artifact definitions file. custom_artifacts_path (str): path to custom artifact definitions file. knowledge_base_object (KnowledgeBase): knowledge base. artifact_filter_names (Optional[list[str]]): names of artifact definitions that are used for filtering file system and Windows Registry key paths. filter_file_path (Optional[str]): path of filter file. Returns: list[dfvfs.FindSpec]: find specifications for the file source type. Raises: InvalidFilter: if no valid FindSpecs are built. """ environment_variables = knowledge_base_object.GetEnvironmentVariables() find_specs = None if artifact_filter_names: logger.debug( 'building find specification based on artifacts: {0:s}'.format( ', '.join(artifact_filter_names))) artifacts_registry_object = BaseEngine.BuildArtifactsRegistry( artifact_definitions_path, custom_artifacts_path) self._artifacts_filter_helper = ( artifact_filters.ArtifactDefinitionsFilterHelper( artifacts_registry_object, knowledge_base_object)) self._artifacts_filter_helper.BuildFindSpecs( artifact_filter_names, environment_variables=environment_variables) # If the user selected Windows Registry artifacts we have to ensure # the Windows Registry files are parsed. if self._artifacts_filter_helper.registry_find_specs: self._artifacts_filter_helper.BuildFindSpecs( self._WINDOWS_REGISTRY_FILES_ARTIFACT_NAMES, environment_variables=environment_variables) find_specs = self._artifacts_filter_helper.file_system_find_specs if not find_specs: raise errors.InvalidFilter( 'No valid file system find specifications were built from ' 'artifacts.') elif filter_file_path: logger.debug( 'building find specification based on filter file: {0:s}'.format( filter_file_path)) filter_file_object = filter_file.FilterFile(filter_file_path) find_specs = filter_file_object.BuildFindSpecs( environment_variables=environment_variables) if not find_specs: raise errors.InvalidFilter( 'No valid file system find specifications were built from filter ' 'file.') return find_specs
def function[BuildFilterFindSpecs, parameter[self, artifact_definitions_path, custom_artifacts_path, knowledge_base_object, artifact_filter_names, filter_file_path]]: constant[Builds find specifications from artifacts or filter file if available. Args: artifact_definitions_path (str): path to artifact definitions file. custom_artifacts_path (str): path to custom artifact definitions file. knowledge_base_object (KnowledgeBase): knowledge base. artifact_filter_names (Optional[list[str]]): names of artifact definitions that are used for filtering file system and Windows Registry key paths. filter_file_path (Optional[str]): path of filter file. Returns: list[dfvfs.FindSpec]: find specifications for the file source type. Raises: InvalidFilter: if no valid FindSpecs are built. ] variable[environment_variables] assign[=] call[name[knowledge_base_object].GetEnvironmentVariables, parameter[]] variable[find_specs] assign[=] constant[None] if name[artifact_filter_names] begin[:] call[name[logger].debug, parameter[call[constant[building find specification based on artifacts: {0:s}].format, parameter[call[constant[, ].join, parameter[name[artifact_filter_names]]]]]]] variable[artifacts_registry_object] assign[=] call[name[BaseEngine].BuildArtifactsRegistry, parameter[name[artifact_definitions_path], name[custom_artifacts_path]]] name[self]._artifacts_filter_helper assign[=] call[name[artifact_filters].ArtifactDefinitionsFilterHelper, parameter[name[artifacts_registry_object], name[knowledge_base_object]]] call[name[self]._artifacts_filter_helper.BuildFindSpecs, parameter[name[artifact_filter_names]]] if name[self]._artifacts_filter_helper.registry_find_specs begin[:] call[name[self]._artifacts_filter_helper.BuildFindSpecs, parameter[name[self]._WINDOWS_REGISTRY_FILES_ARTIFACT_NAMES]] variable[find_specs] assign[=] name[self]._artifacts_filter_helper.file_system_find_specs if <ast.UnaryOp object at 0x7da204344a30> begin[:] <ast.Raise object at 0x7da204346920> return[name[find_specs]]
keyword[def] identifier[BuildFilterFindSpecs] ( identifier[self] , identifier[artifact_definitions_path] , identifier[custom_artifacts_path] , identifier[knowledge_base_object] , identifier[artifact_filter_names] = keyword[None] , identifier[filter_file_path] = keyword[None] ): literal[string] identifier[environment_variables] = identifier[knowledge_base_object] . identifier[GetEnvironmentVariables] () identifier[find_specs] = keyword[None] keyword[if] identifier[artifact_filter_names] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[artifact_filter_names] ))) identifier[artifacts_registry_object] = identifier[BaseEngine] . identifier[BuildArtifactsRegistry] ( identifier[artifact_definitions_path] , identifier[custom_artifacts_path] ) identifier[self] . identifier[_artifacts_filter_helper] =( identifier[artifact_filters] . identifier[ArtifactDefinitionsFilterHelper] ( identifier[artifacts_registry_object] , identifier[knowledge_base_object] )) identifier[self] . identifier[_artifacts_filter_helper] . identifier[BuildFindSpecs] ( identifier[artifact_filter_names] , identifier[environment_variables] = identifier[environment_variables] ) keyword[if] identifier[self] . identifier[_artifacts_filter_helper] . identifier[registry_find_specs] : identifier[self] . identifier[_artifacts_filter_helper] . identifier[BuildFindSpecs] ( identifier[self] . identifier[_WINDOWS_REGISTRY_FILES_ARTIFACT_NAMES] , identifier[environment_variables] = identifier[environment_variables] ) identifier[find_specs] = identifier[self] . identifier[_artifacts_filter_helper] . identifier[file_system_find_specs] keyword[if] keyword[not] identifier[find_specs] : keyword[raise] identifier[errors] . identifier[InvalidFilter] ( literal[string] literal[string] ) keyword[elif] identifier[filter_file_path] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[filter_file_path] )) identifier[filter_file_object] = identifier[filter_file] . identifier[FilterFile] ( identifier[filter_file_path] ) identifier[find_specs] = identifier[filter_file_object] . identifier[BuildFindSpecs] ( identifier[environment_variables] = identifier[environment_variables] ) keyword[if] keyword[not] identifier[find_specs] : keyword[raise] identifier[errors] . identifier[InvalidFilter] ( literal[string] literal[string] ) keyword[return] identifier[find_specs]
def BuildFilterFindSpecs(self, artifact_definitions_path, custom_artifacts_path, knowledge_base_object, artifact_filter_names=None, filter_file_path=None): """Builds find specifications from artifacts or filter file if available. Args: artifact_definitions_path (str): path to artifact definitions file. custom_artifacts_path (str): path to custom artifact definitions file. knowledge_base_object (KnowledgeBase): knowledge base. artifact_filter_names (Optional[list[str]]): names of artifact definitions that are used for filtering file system and Windows Registry key paths. filter_file_path (Optional[str]): path of filter file. Returns: list[dfvfs.FindSpec]: find specifications for the file source type. Raises: InvalidFilter: if no valid FindSpecs are built. """ environment_variables = knowledge_base_object.GetEnvironmentVariables() find_specs = None if artifact_filter_names: logger.debug('building find specification based on artifacts: {0:s}'.format(', '.join(artifact_filter_names))) artifacts_registry_object = BaseEngine.BuildArtifactsRegistry(artifact_definitions_path, custom_artifacts_path) self._artifacts_filter_helper = artifact_filters.ArtifactDefinitionsFilterHelper(artifacts_registry_object, knowledge_base_object) self._artifacts_filter_helper.BuildFindSpecs(artifact_filter_names, environment_variables=environment_variables) # If the user selected Windows Registry artifacts we have to ensure # the Windows Registry files are parsed. if self._artifacts_filter_helper.registry_find_specs: self._artifacts_filter_helper.BuildFindSpecs(self._WINDOWS_REGISTRY_FILES_ARTIFACT_NAMES, environment_variables=environment_variables) # depends on [control=['if'], data=[]] find_specs = self._artifacts_filter_helper.file_system_find_specs if not find_specs: raise errors.InvalidFilter('No valid file system find specifications were built from artifacts.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif filter_file_path: logger.debug('building find specification based on filter file: {0:s}'.format(filter_file_path)) filter_file_object = filter_file.FilterFile(filter_file_path) find_specs = filter_file_object.BuildFindSpecs(environment_variables=environment_variables) if not find_specs: raise errors.InvalidFilter('No valid file system find specifications were built from filter file.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return find_specs
def convert_to_coarse(data,chan_per_coarse): ''' Converts a data array with length n_chans to an array of length n_coarse_chans by averaging over the coarse channels ''' #find number of coarse channels and reshape array num_coarse = data.size/chan_per_coarse data_shaped = np.array(np.reshape(data,(num_coarse,chan_per_coarse))) #Return the average over each coarse channel return np.mean(data_shaped[:,2:-1],axis=1)
def function[convert_to_coarse, parameter[data, chan_per_coarse]]: constant[ Converts a data array with length n_chans to an array of length n_coarse_chans by averaging over the coarse channels ] variable[num_coarse] assign[=] binary_operation[name[data].size / name[chan_per_coarse]] variable[data_shaped] assign[=] call[name[np].array, parameter[call[name[np].reshape, parameter[name[data], tuple[[<ast.Name object at 0x7da20c7c9720>, <ast.Name object at 0x7da20c7c9c30>]]]]]] return[call[name[np].mean, parameter[call[name[data_shaped]][tuple[[<ast.Slice object at 0x7da20c7ca290>, <ast.Slice object at 0x7da20c7cb1c0>]]]]]]
keyword[def] identifier[convert_to_coarse] ( identifier[data] , identifier[chan_per_coarse] ): literal[string] identifier[num_coarse] = identifier[data] . identifier[size] / identifier[chan_per_coarse] identifier[data_shaped] = identifier[np] . identifier[array] ( identifier[np] . identifier[reshape] ( identifier[data] ,( identifier[num_coarse] , identifier[chan_per_coarse] ))) keyword[return] identifier[np] . identifier[mean] ( identifier[data_shaped] [:, literal[int] :- literal[int] ], identifier[axis] = literal[int] )
def convert_to_coarse(data, chan_per_coarse): """ Converts a data array with length n_chans to an array of length n_coarse_chans by averaging over the coarse channels """ #find number of coarse channels and reshape array num_coarse = data.size / chan_per_coarse data_shaped = np.array(np.reshape(data, (num_coarse, chan_per_coarse))) #Return the average over each coarse channel return np.mean(data_shaped[:, 2:-1], axis=1)
def plot(self, numPoints=100): """ Specific plotting method for boxes. Only supports 3-dimensional objects. """ fig = plt.figure() ax = fig.add_subplot(111, projection='3d') # generate cylinder x = np.linspace(- self.dimensions[0]/2., self.dimensions[0]/2., numPoints) y = np.linspace(- self.dimensions[1]/2., self.dimensions[1]/2., numPoints) z = np.linspace(- self.dimensions[2]/2., self.dimensions[2]/2., numPoints) # plot Xc, Yc = np.meshgrid(x, y) ax.plot_surface(Xc, Yc, -self.dimensions[2]/2, alpha=0.2, rstride=20, cstride=10) ax.plot_surface(Xc, Yc, self.dimensions[2]/2, alpha=0.2, rstride=20, cstride=10) Yc, Zc = np.meshgrid(y, z) ax.plot_surface(-self.dimensions[0]/2, Yc, Zc, alpha=0.2, rstride=20, cstride=10) ax.plot_surface(self.dimensions[0]/2, Yc, Zc, alpha=0.2, rstride=20, cstride=10) Xc, Zc = np.meshgrid(x, z) ax.plot_surface(Xc, -self.dimensions[1]/2, Zc, alpha=0.2, rstride=20, cstride=10) ax.plot_surface(Xc, self.dimensions[1]/2, Zc, alpha=0.2, rstride=20, cstride=10) ax.set_xlabel("X") ax.set_ylabel("Y") ax.set_zlabel("Z") plt.title("{}".format(self)) return fig, ax
def function[plot, parameter[self, numPoints]]: constant[ Specific plotting method for boxes. Only supports 3-dimensional objects. ] variable[fig] assign[=] call[name[plt].figure, parameter[]] variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]] variable[x] assign[=] call[name[np].linspace, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0847a90> / constant[2.0]], binary_operation[call[name[self].dimensions][constant[0]] / constant[2.0]], name[numPoints]]] variable[y] assign[=] call[name[np].linspace, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0847700> / constant[2.0]], binary_operation[call[name[self].dimensions][constant[1]] / constant[2.0]], name[numPoints]]] variable[z] assign[=] call[name[np].linspace, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0846aa0> / constant[2.0]], binary_operation[call[name[self].dimensions][constant[2]] / constant[2.0]], name[numPoints]]] <ast.Tuple object at 0x7da1b08440a0> assign[=] call[name[np].meshgrid, parameter[name[x], name[y]]] call[name[ax].plot_surface, parameter[name[Xc], name[Yc], binary_operation[<ast.UnaryOp object at 0x7da1b0844370> / constant[2]]]] call[name[ax].plot_surface, parameter[name[Xc], name[Yc], binary_operation[call[name[self].dimensions][constant[2]] / constant[2]]]] <ast.Tuple object at 0x7da1b0845db0> assign[=] call[name[np].meshgrid, parameter[name[y], name[z]]] call[name[ax].plot_surface, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0846110> / constant[2]], name[Yc], name[Zc]]] call[name[ax].plot_surface, parameter[binary_operation[call[name[self].dimensions][constant[0]] / constant[2]], name[Yc], name[Zc]]] <ast.Tuple object at 0x7da1b0844c40> assign[=] call[name[np].meshgrid, parameter[name[x], name[z]]] call[name[ax].plot_surface, parameter[name[Xc], binary_operation[<ast.UnaryOp object at 0x7da1b0846680> / constant[2]], name[Zc]]] call[name[ax].plot_surface, parameter[name[Xc], binary_operation[call[name[self].dimensions][constant[1]] / constant[2]], name[Zc]]] call[name[ax].set_xlabel, parameter[constant[X]]] call[name[ax].set_ylabel, parameter[constant[Y]]] call[name[ax].set_zlabel, parameter[constant[Z]]] call[name[plt].title, parameter[call[constant[{}].format, parameter[name[self]]]]] return[tuple[[<ast.Name object at 0x7da1b08308e0>, <ast.Name object at 0x7da1b08327a0>]]]
keyword[def] identifier[plot] ( identifier[self] , identifier[numPoints] = literal[int] ): literal[string] identifier[fig] = identifier[plt] . identifier[figure] () identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] , identifier[projection] = literal[string] ) identifier[x] = identifier[np] . identifier[linspace] (- identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[numPoints] ) identifier[y] = identifier[np] . identifier[linspace] (- identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[numPoints] ) identifier[z] = identifier[np] . identifier[linspace] (- identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[numPoints] ) identifier[Xc] , identifier[Yc] = identifier[np] . identifier[meshgrid] ( identifier[x] , identifier[y] ) identifier[ax] . identifier[plot_surface] ( identifier[Xc] , identifier[Yc] ,- identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[alpha] = literal[int] , identifier[rstride] = literal[int] , identifier[cstride] = literal[int] ) identifier[ax] . identifier[plot_surface] ( identifier[Xc] , identifier[Yc] , identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[alpha] = literal[int] , identifier[rstride] = literal[int] , identifier[cstride] = literal[int] ) identifier[Yc] , identifier[Zc] = identifier[np] . identifier[meshgrid] ( identifier[y] , identifier[z] ) identifier[ax] . identifier[plot_surface] (- identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[Yc] , identifier[Zc] , identifier[alpha] = literal[int] , identifier[rstride] = literal[int] , identifier[cstride] = literal[int] ) identifier[ax] . identifier[plot_surface] ( identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[Yc] , identifier[Zc] , identifier[alpha] = literal[int] , identifier[rstride] = literal[int] , identifier[cstride] = literal[int] ) identifier[Xc] , identifier[Zc] = identifier[np] . identifier[meshgrid] ( identifier[x] , identifier[z] ) identifier[ax] . identifier[plot_surface] ( identifier[Xc] ,- identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[Zc] , identifier[alpha] = literal[int] , identifier[rstride] = literal[int] , identifier[cstride] = literal[int] ) identifier[ax] . identifier[plot_surface] ( identifier[Xc] , identifier[self] . identifier[dimensions] [ literal[int] ]/ literal[int] , identifier[Zc] , identifier[alpha] = literal[int] , identifier[rstride] = literal[int] , identifier[cstride] = literal[int] ) identifier[ax] . identifier[set_xlabel] ( literal[string] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[ax] . identifier[set_zlabel] ( literal[string] ) identifier[plt] . identifier[title] ( literal[string] . identifier[format] ( identifier[self] )) keyword[return] identifier[fig] , identifier[ax]
def plot(self, numPoints=100): """ Specific plotting method for boxes. Only supports 3-dimensional objects. """ fig = plt.figure() ax = fig.add_subplot(111, projection='3d') # generate cylinder x = np.linspace(-self.dimensions[0] / 2.0, self.dimensions[0] / 2.0, numPoints) y = np.linspace(-self.dimensions[1] / 2.0, self.dimensions[1] / 2.0, numPoints) z = np.linspace(-self.dimensions[2] / 2.0, self.dimensions[2] / 2.0, numPoints) # plot (Xc, Yc) = np.meshgrid(x, y) ax.plot_surface(Xc, Yc, -self.dimensions[2] / 2, alpha=0.2, rstride=20, cstride=10) ax.plot_surface(Xc, Yc, self.dimensions[2] / 2, alpha=0.2, rstride=20, cstride=10) (Yc, Zc) = np.meshgrid(y, z) ax.plot_surface(-self.dimensions[0] / 2, Yc, Zc, alpha=0.2, rstride=20, cstride=10) ax.plot_surface(self.dimensions[0] / 2, Yc, Zc, alpha=0.2, rstride=20, cstride=10) (Xc, Zc) = np.meshgrid(x, z) ax.plot_surface(Xc, -self.dimensions[1] / 2, Zc, alpha=0.2, rstride=20, cstride=10) ax.plot_surface(Xc, self.dimensions[1] / 2, Zc, alpha=0.2, rstride=20, cstride=10) ax.set_xlabel('X') ax.set_ylabel('Y') ax.set_zlabel('Z') plt.title('{}'.format(self)) return (fig, ax)
def has_length(self, value, q, strict=False): """if value has a length of q""" value = stringify(value) if value is not None: if len(value) == q: return self.shout('Value %r not matching length %r', strict, value, q)
def function[has_length, parameter[self, value, q, strict]]: constant[if value has a length of q] variable[value] assign[=] call[name[stringify], parameter[name[value]]] if compare[name[value] is_not constant[None]] begin[:] if compare[call[name[len], parameter[name[value]]] equal[==] name[q]] begin[:] return[None] call[name[self].shout, parameter[constant[Value %r not matching length %r], name[strict], name[value], name[q]]]
keyword[def] identifier[has_length] ( identifier[self] , identifier[value] , identifier[q] , identifier[strict] = keyword[False] ): literal[string] identifier[value] = identifier[stringify] ( identifier[value] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[len] ( identifier[value] )== identifier[q] : keyword[return] identifier[self] . identifier[shout] ( literal[string] , identifier[strict] , identifier[value] , identifier[q] )
def has_length(self, value, q, strict=False): """if value has a length of q""" value = stringify(value) if value is not None: if len(value) == q: return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] self.shout('Value %r not matching length %r', strict, value, q)
def _insert_and_merge_core(self, pos, direction): """ The core part of method _insert_and_merge. :param int pos: The starting position. :param str direction: If we are traversing forwards or backwards in the list. It determines where the "sort" of the overlapping memory block comes from. If everything works as expected, "sort" of the overlapping block is always equal to the segment occupied most recently. :return: A tuple of (merged (bool), new position to begin searching (int), change in total bytes (int) :rtype: tuple """ bytes_changed = 0 if direction == "forward": if pos == len(self._list) - 1: return False, pos, 0 previous_segment = self._list[pos] previous_segment_pos = pos segment = self._list[pos + 1] segment_pos = pos + 1 else: # if direction == "backward": if pos == 0: return False, pos, 0 segment = self._list[pos] segment_pos = pos previous_segment = self._list[pos - 1] previous_segment_pos = pos - 1 merged = False new_pos = pos if segment.start <= previous_segment.end: # we should always have new_start+new_size >= segment.start if segment.sort == previous_segment.sort: # They are of the same sort - we should merge them! new_end = max(previous_segment.end, segment.start + segment.size) new_start = min(previous_segment.start, segment.start) new_size = new_end - new_start self._list[segment_pos] = Segment(new_start, new_end, segment.sort) self._list.pop(previous_segment_pos) bytes_changed = -(segment.size + previous_segment.size - new_size) merged = True new_pos = previous_segment_pos else: # Different sorts. It's a bit trickier. if segment.start == previous_segment.end: # They are adjacent. Just don't merge. pass else: # They are overlapping. We will create one, two, or three different blocks based on how they are # overlapping new_segments = [ ] if segment.start < previous_segment.start: new_segments.append(Segment(segment.start, previous_segment.start, segment.sort)) sort = previous_segment.sort if direction == "forward" else segment.sort new_segments.append(Segment(previous_segment.start, previous_segment.end, sort)) if segment.end < previous_segment.end: new_segments.append(Segment(segment.end, previous_segment.end, previous_segment.sort)) elif segment.end > previous_segment.end: new_segments.append(Segment(previous_segment.end, segment.end, segment.sort)) else: # segment.start >= previous_segment.start if segment.start > previous_segment.start: new_segments.append(Segment(previous_segment.start, segment.start, previous_segment.sort)) sort = previous_segment.sort if direction == "forward" else segment.sort if segment.end > previous_segment.end: new_segments.append(Segment(segment.start, previous_segment.end, sort)) new_segments.append(Segment(previous_segment.end, segment.end, segment.sort)) elif segment.end < previous_segment.end: new_segments.append(Segment(segment.start, segment.end, sort)) new_segments.append(Segment(segment.end, previous_segment.end, previous_segment.sort)) else: new_segments.append(Segment(segment.start, segment.end, sort)) # merge segments in new_segments array if they are of the same sort i = 0 while len(new_segments) > 1 and i < len(new_segments) - 1: s0 = new_segments[i] s1 = new_segments[i + 1] if s0.sort == s1.sort: new_segments = new_segments[ : i] + [ Segment(s0.start, s1.end, s0.sort) ] + new_segments[i + 2 : ] else: i += 1 # Put new segments into self._list old_size = sum([ seg.size for seg in self._list[previous_segment_pos : segment_pos + 1] ]) new_size = sum([ seg.size for seg in new_segments ]) bytes_changed = new_size - old_size self._list = self._list[ : previous_segment_pos] + new_segments + self._list[ segment_pos + 1 : ] merged = True if direction == "forward": new_pos = previous_segment_pos + len(new_segments) else: new_pos = previous_segment_pos return merged, new_pos, bytes_changed
def function[_insert_and_merge_core, parameter[self, pos, direction]]: constant[ The core part of method _insert_and_merge. :param int pos: The starting position. :param str direction: If we are traversing forwards or backwards in the list. It determines where the "sort" of the overlapping memory block comes from. If everything works as expected, "sort" of the overlapping block is always equal to the segment occupied most recently. :return: A tuple of (merged (bool), new position to begin searching (int), change in total bytes (int) :rtype: tuple ] variable[bytes_changed] assign[=] constant[0] if compare[name[direction] equal[==] constant[forward]] begin[:] if compare[name[pos] equal[==] binary_operation[call[name[len], parameter[name[self]._list]] - constant[1]]] begin[:] return[tuple[[<ast.Constant object at 0x7da18eb55300>, <ast.Name object at 0x7da18eb56b30>, <ast.Constant object at 0x7da18eb55960>]]] variable[previous_segment] assign[=] call[name[self]._list][name[pos]] variable[previous_segment_pos] assign[=] name[pos] variable[segment] assign[=] call[name[self]._list][binary_operation[name[pos] + constant[1]]] variable[segment_pos] assign[=] binary_operation[name[pos] + constant[1]] variable[merged] assign[=] constant[False] variable[new_pos] assign[=] name[pos] if compare[name[segment].start less_or_equal[<=] name[previous_segment].end] begin[:] if compare[name[segment].sort equal[==] name[previous_segment].sort] begin[:] variable[new_end] assign[=] call[name[max], parameter[name[previous_segment].end, binary_operation[name[segment].start + name[segment].size]]] variable[new_start] assign[=] call[name[min], parameter[name[previous_segment].start, name[segment].start]] variable[new_size] assign[=] binary_operation[name[new_end] - name[new_start]] call[name[self]._list][name[segment_pos]] assign[=] call[name[Segment], parameter[name[new_start], name[new_end], name[segment].sort]] call[name[self]._list.pop, parameter[name[previous_segment_pos]]] variable[bytes_changed] assign[=] <ast.UnaryOp object at 0x7da20e957fa0> variable[merged] assign[=] constant[True] variable[new_pos] assign[=] name[previous_segment_pos] return[tuple[[<ast.Name object at 0x7da18c4ceec0>, <ast.Name object at 0x7da18c4ce020>, <ast.Name object at 0x7da18c4cec20>]]]
keyword[def] identifier[_insert_and_merge_core] ( identifier[self] , identifier[pos] , identifier[direction] ): literal[string] identifier[bytes_changed] = literal[int] keyword[if] identifier[direction] == literal[string] : keyword[if] identifier[pos] == identifier[len] ( identifier[self] . identifier[_list] )- literal[int] : keyword[return] keyword[False] , identifier[pos] , literal[int] identifier[previous_segment] = identifier[self] . identifier[_list] [ identifier[pos] ] identifier[previous_segment_pos] = identifier[pos] identifier[segment] = identifier[self] . identifier[_list] [ identifier[pos] + literal[int] ] identifier[segment_pos] = identifier[pos] + literal[int] keyword[else] : keyword[if] identifier[pos] == literal[int] : keyword[return] keyword[False] , identifier[pos] , literal[int] identifier[segment] = identifier[self] . identifier[_list] [ identifier[pos] ] identifier[segment_pos] = identifier[pos] identifier[previous_segment] = identifier[self] . identifier[_list] [ identifier[pos] - literal[int] ] identifier[previous_segment_pos] = identifier[pos] - literal[int] identifier[merged] = keyword[False] identifier[new_pos] = identifier[pos] keyword[if] identifier[segment] . identifier[start] <= identifier[previous_segment] . identifier[end] : keyword[if] identifier[segment] . identifier[sort] == identifier[previous_segment] . identifier[sort] : identifier[new_end] = identifier[max] ( identifier[previous_segment] . identifier[end] , identifier[segment] . identifier[start] + identifier[segment] . identifier[size] ) identifier[new_start] = identifier[min] ( identifier[previous_segment] . identifier[start] , identifier[segment] . identifier[start] ) identifier[new_size] = identifier[new_end] - identifier[new_start] identifier[self] . identifier[_list] [ identifier[segment_pos] ]= identifier[Segment] ( identifier[new_start] , identifier[new_end] , identifier[segment] . identifier[sort] ) identifier[self] . identifier[_list] . identifier[pop] ( identifier[previous_segment_pos] ) identifier[bytes_changed] =-( identifier[segment] . identifier[size] + identifier[previous_segment] . identifier[size] - identifier[new_size] ) identifier[merged] = keyword[True] identifier[new_pos] = identifier[previous_segment_pos] keyword[else] : keyword[if] identifier[segment] . identifier[start] == identifier[previous_segment] . identifier[end] : keyword[pass] keyword[else] : identifier[new_segments] =[] keyword[if] identifier[segment] . identifier[start] < identifier[previous_segment] . identifier[start] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[segment] . identifier[start] , identifier[previous_segment] . identifier[start] , identifier[segment] . identifier[sort] )) identifier[sort] = identifier[previous_segment] . identifier[sort] keyword[if] identifier[direction] == literal[string] keyword[else] identifier[segment] . identifier[sort] identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[previous_segment] . identifier[start] , identifier[previous_segment] . identifier[end] , identifier[sort] )) keyword[if] identifier[segment] . identifier[end] < identifier[previous_segment] . identifier[end] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[segment] . identifier[end] , identifier[previous_segment] . identifier[end] , identifier[previous_segment] . identifier[sort] )) keyword[elif] identifier[segment] . identifier[end] > identifier[previous_segment] . identifier[end] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[previous_segment] . identifier[end] , identifier[segment] . identifier[end] , identifier[segment] . identifier[sort] )) keyword[else] : keyword[if] identifier[segment] . identifier[start] > identifier[previous_segment] . identifier[start] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[previous_segment] . identifier[start] , identifier[segment] . identifier[start] , identifier[previous_segment] . identifier[sort] )) identifier[sort] = identifier[previous_segment] . identifier[sort] keyword[if] identifier[direction] == literal[string] keyword[else] identifier[segment] . identifier[sort] keyword[if] identifier[segment] . identifier[end] > identifier[previous_segment] . identifier[end] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[segment] . identifier[start] , identifier[previous_segment] . identifier[end] , identifier[sort] )) identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[previous_segment] . identifier[end] , identifier[segment] . identifier[end] , identifier[segment] . identifier[sort] )) keyword[elif] identifier[segment] . identifier[end] < identifier[previous_segment] . identifier[end] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[segment] . identifier[start] , identifier[segment] . identifier[end] , identifier[sort] )) identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[segment] . identifier[end] , identifier[previous_segment] . identifier[end] , identifier[previous_segment] . identifier[sort] )) keyword[else] : identifier[new_segments] . identifier[append] ( identifier[Segment] ( identifier[segment] . identifier[start] , identifier[segment] . identifier[end] , identifier[sort] )) identifier[i] = literal[int] keyword[while] identifier[len] ( identifier[new_segments] )> literal[int] keyword[and] identifier[i] < identifier[len] ( identifier[new_segments] )- literal[int] : identifier[s0] = identifier[new_segments] [ identifier[i] ] identifier[s1] = identifier[new_segments] [ identifier[i] + literal[int] ] keyword[if] identifier[s0] . identifier[sort] == identifier[s1] . identifier[sort] : identifier[new_segments] = identifier[new_segments] [: identifier[i] ]+[ identifier[Segment] ( identifier[s0] . identifier[start] , identifier[s1] . identifier[end] , identifier[s0] . identifier[sort] )]+ identifier[new_segments] [ identifier[i] + literal[int] :] keyword[else] : identifier[i] += literal[int] identifier[old_size] = identifier[sum] ([ identifier[seg] . identifier[size] keyword[for] identifier[seg] keyword[in] identifier[self] . identifier[_list] [ identifier[previous_segment_pos] : identifier[segment_pos] + literal[int] ]]) identifier[new_size] = identifier[sum] ([ identifier[seg] . identifier[size] keyword[for] identifier[seg] keyword[in] identifier[new_segments] ]) identifier[bytes_changed] = identifier[new_size] - identifier[old_size] identifier[self] . identifier[_list] = identifier[self] . identifier[_list] [: identifier[previous_segment_pos] ]+ identifier[new_segments] + identifier[self] . identifier[_list] [ identifier[segment_pos] + literal[int] :] identifier[merged] = keyword[True] keyword[if] identifier[direction] == literal[string] : identifier[new_pos] = identifier[previous_segment_pos] + identifier[len] ( identifier[new_segments] ) keyword[else] : identifier[new_pos] = identifier[previous_segment_pos] keyword[return] identifier[merged] , identifier[new_pos] , identifier[bytes_changed]
def _insert_and_merge_core(self, pos, direction): """ The core part of method _insert_and_merge. :param int pos: The starting position. :param str direction: If we are traversing forwards or backwards in the list. It determines where the "sort" of the overlapping memory block comes from. If everything works as expected, "sort" of the overlapping block is always equal to the segment occupied most recently. :return: A tuple of (merged (bool), new position to begin searching (int), change in total bytes (int) :rtype: tuple """ bytes_changed = 0 if direction == 'forward': if pos == len(self._list) - 1: return (False, pos, 0) # depends on [control=['if'], data=['pos']] previous_segment = self._list[pos] previous_segment_pos = pos segment = self._list[pos + 1] segment_pos = pos + 1 # depends on [control=['if'], data=[]] else: # if direction == "backward": if pos == 0: return (False, pos, 0) # depends on [control=['if'], data=['pos']] segment = self._list[pos] segment_pos = pos previous_segment = self._list[pos - 1] previous_segment_pos = pos - 1 merged = False new_pos = pos if segment.start <= previous_segment.end: # we should always have new_start+new_size >= segment.start if segment.sort == previous_segment.sort: # They are of the same sort - we should merge them! new_end = max(previous_segment.end, segment.start + segment.size) new_start = min(previous_segment.start, segment.start) new_size = new_end - new_start self._list[segment_pos] = Segment(new_start, new_end, segment.sort) self._list.pop(previous_segment_pos) bytes_changed = -(segment.size + previous_segment.size - new_size) merged = True new_pos = previous_segment_pos # depends on [control=['if'], data=[]] # Different sorts. It's a bit trickier. elif segment.start == previous_segment.end: # They are adjacent. Just don't merge. pass # depends on [control=['if'], data=[]] else: # They are overlapping. We will create one, two, or three different blocks based on how they are # overlapping new_segments = [] if segment.start < previous_segment.start: new_segments.append(Segment(segment.start, previous_segment.start, segment.sort)) sort = previous_segment.sort if direction == 'forward' else segment.sort new_segments.append(Segment(previous_segment.start, previous_segment.end, sort)) if segment.end < previous_segment.end: new_segments.append(Segment(segment.end, previous_segment.end, previous_segment.sort)) # depends on [control=['if'], data=[]] elif segment.end > previous_segment.end: new_segments.append(Segment(previous_segment.end, segment.end, segment.sort)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # segment.start >= previous_segment.start if segment.start > previous_segment.start: new_segments.append(Segment(previous_segment.start, segment.start, previous_segment.sort)) # depends on [control=['if'], data=[]] sort = previous_segment.sort if direction == 'forward' else segment.sort if segment.end > previous_segment.end: new_segments.append(Segment(segment.start, previous_segment.end, sort)) new_segments.append(Segment(previous_segment.end, segment.end, segment.sort)) # depends on [control=['if'], data=[]] elif segment.end < previous_segment.end: new_segments.append(Segment(segment.start, segment.end, sort)) new_segments.append(Segment(segment.end, previous_segment.end, previous_segment.sort)) # depends on [control=['if'], data=[]] else: new_segments.append(Segment(segment.start, segment.end, sort)) # merge segments in new_segments array if they are of the same sort i = 0 while len(new_segments) > 1 and i < len(new_segments) - 1: s0 = new_segments[i] s1 = new_segments[i + 1] if s0.sort == s1.sort: new_segments = new_segments[:i] + [Segment(s0.start, s1.end, s0.sort)] + new_segments[i + 2:] # depends on [control=['if'], data=[]] else: i += 1 # depends on [control=['while'], data=[]] # Put new segments into self._list old_size = sum([seg.size for seg in self._list[previous_segment_pos:segment_pos + 1]]) new_size = sum([seg.size for seg in new_segments]) bytes_changed = new_size - old_size self._list = self._list[:previous_segment_pos] + new_segments + self._list[segment_pos + 1:] merged = True if direction == 'forward': new_pos = previous_segment_pos + len(new_segments) # depends on [control=['if'], data=[]] else: new_pos = previous_segment_pos # depends on [control=['if'], data=[]] return (merged, new_pos, bytes_changed)
def _set_destination(self, v, load=False): """ Setter method for destination, mapped from YANG variable /openflow_global/openflow/mirror/destination (container) If this variable is read-only (config: false) in the source YANG file, then _set_destination is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_destination() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=destination.destination, is_container='container', presence=False, yang_name="destination", rest_name="destination", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Mirror destination interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """destination must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=destination.destination, is_container='container', presence=False, yang_name="destination", rest_name="destination", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Mirror destination interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='container', is_config=True)""", }) self.__destination = t if hasattr(self, '_set'): self._set()
def function[_set_destination, parameter[self, v, load]]: constant[ Setter method for destination, mapped from YANG variable /openflow_global/openflow/mirror/destination (container) If this variable is read-only (config: false) in the source YANG file, then _set_destination is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_destination() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18f811c00> name[self].__destination assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_destination] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[destination] . identifier[destination] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__destination] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_destination(self, v, load=False): """ Setter method for destination, mapped from YANG variable /openflow_global/openflow/mirror/destination (container) If this variable is read-only (config: false) in the source YANG file, then _set_destination is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_destination() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=destination.destination, is_container='container', presence=False, yang_name='destination', rest_name='destination', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Mirror destination interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'destination must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=destination.destination, is_container=\'container\', presence=False, yang_name="destination", rest_name="destination", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Mirror destination interface\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-openflow\', defining_module=\'brocade-openflow\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__destination = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def save_offsets( cls, consumer_offsets_metadata, topics_dict, json_file, groupid, ): """Built offsets for given topic-partitions in required format from current offsets metadata and write to given json-file. :param consumer_offsets_metadata: Fetched consumer offsets from kafka. :param topics_dict: Dictionary of topic-partitions. :param json_file: Filename to store consumer-offsets. :param groupid: Current consumer-group. """ # Build consumer-offset data in desired format current_consumer_offsets = defaultdict(dict) for topic, topic_offsets in six.iteritems(consumer_offsets_metadata): for partition_offset in topic_offsets: current_consumer_offsets[topic][partition_offset.partition] = \ partition_offset.current consumer_offsets_data = {'groupid': groupid, 'offsets': current_consumer_offsets} cls.write_offsets_to_file(json_file, consumer_offsets_data)
def function[save_offsets, parameter[cls, consumer_offsets_metadata, topics_dict, json_file, groupid]]: constant[Built offsets for given topic-partitions in required format from current offsets metadata and write to given json-file. :param consumer_offsets_metadata: Fetched consumer offsets from kafka. :param topics_dict: Dictionary of topic-partitions. :param json_file: Filename to store consumer-offsets. :param groupid: Current consumer-group. ] variable[current_consumer_offsets] assign[=] call[name[defaultdict], parameter[name[dict]]] for taget[tuple[[<ast.Name object at 0x7da1b079af80>, <ast.Name object at 0x7da1b0799f30>]]] in starred[call[name[six].iteritems, parameter[name[consumer_offsets_metadata]]]] begin[:] for taget[name[partition_offset]] in starred[name[topic_offsets]] begin[:] call[call[name[current_consumer_offsets]][name[topic]]][name[partition_offset].partition] assign[=] name[partition_offset].current variable[consumer_offsets_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b079af50>, <ast.Constant object at 0x7da1b079a560>], [<ast.Name object at 0x7da1b07995a0>, <ast.Name object at 0x7da1b079a410>]] call[name[cls].write_offsets_to_file, parameter[name[json_file], name[consumer_offsets_data]]]
keyword[def] identifier[save_offsets] ( identifier[cls] , identifier[consumer_offsets_metadata] , identifier[topics_dict] , identifier[json_file] , identifier[groupid] , ): literal[string] identifier[current_consumer_offsets] = identifier[defaultdict] ( identifier[dict] ) keyword[for] identifier[topic] , identifier[topic_offsets] keyword[in] identifier[six] . identifier[iteritems] ( identifier[consumer_offsets_metadata] ): keyword[for] identifier[partition_offset] keyword[in] identifier[topic_offsets] : identifier[current_consumer_offsets] [ identifier[topic] ][ identifier[partition_offset] . identifier[partition] ]= identifier[partition_offset] . identifier[current] identifier[consumer_offsets_data] ={ literal[string] : identifier[groupid] , literal[string] : identifier[current_consumer_offsets] } identifier[cls] . identifier[write_offsets_to_file] ( identifier[json_file] , identifier[consumer_offsets_data] )
def save_offsets(cls, consumer_offsets_metadata, topics_dict, json_file, groupid): """Built offsets for given topic-partitions in required format from current offsets metadata and write to given json-file. :param consumer_offsets_metadata: Fetched consumer offsets from kafka. :param topics_dict: Dictionary of topic-partitions. :param json_file: Filename to store consumer-offsets. :param groupid: Current consumer-group. """ # Build consumer-offset data in desired format current_consumer_offsets = defaultdict(dict) for (topic, topic_offsets) in six.iteritems(consumer_offsets_metadata): for partition_offset in topic_offsets: current_consumer_offsets[topic][partition_offset.partition] = partition_offset.current # depends on [control=['for'], data=['partition_offset']] # depends on [control=['for'], data=[]] consumer_offsets_data = {'groupid': groupid, 'offsets': current_consumer_offsets} cls.write_offsets_to_file(json_file, consumer_offsets_data)
def list(self, root=False, **kwargs): """Return a list of groups. =====API DOCS===== Retrieve a list of groups. :param root: Flag that if set, only root groups of a specific inventory will be listed. :type root: bool :param parent: Primary key or name of the group whose child groups will be listed. :type parent: str :param all_pages: Flag that if set, collect all pages of content from the API when returning results. :type all_pages: bool :param page: The page to show. Ignored if all_pages is set. :type page: int :param query: Contains 2-tuples used as query parameters to filter resulting resource objects. :type query: list :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects. :returns: A JSON object containing details of all resource objects returned by Tower backend. :rtype: dict :raises tower_cli.exceptions.UsageError: When ``root`` flag is on and ``inventory`` is not present in ``**kwargs``. =====API DOCS===== """ # Option to list children of a parent group if kwargs.get('parent', None): self.set_child_endpoint(parent=kwargs['parent'], inventory=kwargs.get('inventory', None)) kwargs.pop('parent') # Sanity check: If we got `--root` and no inventory, that's an error. if root and not kwargs.get('inventory', None): raise exc.UsageError('The --root option requires specifying an inventory also.') # If we are tasked with getting root groups, do that. if root: inventory_id = kwargs['inventory'] r = client.get('/inventories/%d/root_groups/' % inventory_id) return r.json() # Return the superclass implementation. return super(Resource, self).list(**kwargs)
def function[list, parameter[self, root]]: constant[Return a list of groups. =====API DOCS===== Retrieve a list of groups. :param root: Flag that if set, only root groups of a specific inventory will be listed. :type root: bool :param parent: Primary key or name of the group whose child groups will be listed. :type parent: str :param all_pages: Flag that if set, collect all pages of content from the API when returning results. :type all_pages: bool :param page: The page to show. Ignored if all_pages is set. :type page: int :param query: Contains 2-tuples used as query parameters to filter resulting resource objects. :type query: list :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects. :returns: A JSON object containing details of all resource objects returned by Tower backend. :rtype: dict :raises tower_cli.exceptions.UsageError: When ``root`` flag is on and ``inventory`` is not present in ``**kwargs``. =====API DOCS===== ] if call[name[kwargs].get, parameter[constant[parent], constant[None]]] begin[:] call[name[self].set_child_endpoint, parameter[]] call[name[kwargs].pop, parameter[constant[parent]]] if <ast.BoolOp object at 0x7da1b000d780> begin[:] <ast.Raise object at 0x7da18f812b90> if name[root] begin[:] variable[inventory_id] assign[=] call[name[kwargs]][constant[inventory]] variable[r] assign[=] call[name[client].get, parameter[binary_operation[constant[/inventories/%d/root_groups/] <ast.Mod object at 0x7da2590d6920> name[inventory_id]]]] return[call[name[r].json, parameter[]]] return[call[call[name[super], parameter[name[Resource], name[self]]].list, parameter[]]]
keyword[def] identifier[list] ( identifier[self] , identifier[root] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ): identifier[self] . identifier[set_child_endpoint] ( identifier[parent] = identifier[kwargs] [ literal[string] ], identifier[inventory] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )) identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] identifier[root] keyword[and] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ): keyword[raise] identifier[exc] . identifier[UsageError] ( literal[string] ) keyword[if] identifier[root] : identifier[inventory_id] = identifier[kwargs] [ literal[string] ] identifier[r] = identifier[client] . identifier[get] ( literal[string] % identifier[inventory_id] ) keyword[return] identifier[r] . identifier[json] () keyword[return] identifier[super] ( identifier[Resource] , identifier[self] ). identifier[list] (** identifier[kwargs] )
def list(self, root=False, **kwargs): """Return a list of groups. =====API DOCS===== Retrieve a list of groups. :param root: Flag that if set, only root groups of a specific inventory will be listed. :type root: bool :param parent: Primary key or name of the group whose child groups will be listed. :type parent: str :param all_pages: Flag that if set, collect all pages of content from the API when returning results. :type all_pages: bool :param page: The page to show. Ignored if all_pages is set. :type page: int :param query: Contains 2-tuples used as query parameters to filter resulting resource objects. :type query: list :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects. :returns: A JSON object containing details of all resource objects returned by Tower backend. :rtype: dict :raises tower_cli.exceptions.UsageError: When ``root`` flag is on and ``inventory`` is not present in ``**kwargs``. =====API DOCS===== """ # Option to list children of a parent group if kwargs.get('parent', None): self.set_child_endpoint(parent=kwargs['parent'], inventory=kwargs.get('inventory', None)) kwargs.pop('parent') # depends on [control=['if'], data=[]] # Sanity check: If we got `--root` and no inventory, that's an error. if root and (not kwargs.get('inventory', None)): raise exc.UsageError('The --root option requires specifying an inventory also.') # depends on [control=['if'], data=[]] # If we are tasked with getting root groups, do that. if root: inventory_id = kwargs['inventory'] r = client.get('/inventories/%d/root_groups/' % inventory_id) return r.json() # depends on [control=['if'], data=[]] # Return the superclass implementation. return super(Resource, self).list(**kwargs)
def color(self, color): """ Set group color. Color is set on a best-effort basis. :param color: RGB color tuple. """ if color == RGB_WHITE: self.white() return self._color = color self.hue = hue_of_color(color)
def function[color, parameter[self, color]]: constant[ Set group color. Color is set on a best-effort basis. :param color: RGB color tuple. ] if compare[name[color] equal[==] name[RGB_WHITE]] begin[:] call[name[self].white, parameter[]] return[None] name[self]._color assign[=] name[color] name[self].hue assign[=] call[name[hue_of_color], parameter[name[color]]]
keyword[def] identifier[color] ( identifier[self] , identifier[color] ): literal[string] keyword[if] identifier[color] == identifier[RGB_WHITE] : identifier[self] . identifier[white] () keyword[return] identifier[self] . identifier[_color] = identifier[color] identifier[self] . identifier[hue] = identifier[hue_of_color] ( identifier[color] )
def color(self, color): """ Set group color. Color is set on a best-effort basis. :param color: RGB color tuple. """ if color == RGB_WHITE: self.white() return # depends on [control=['if'], data=[]] self._color = color self.hue = hue_of_color(color)
def calc_log_size(request, calc_id): """ Get the current number of lines in the log """ try: response_data = logs.dbcmd('get_log_size', calc_id) except dbapi.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
def function[calc_log_size, parameter[request, calc_id]]: constant[ Get the current number of lines in the log ] <ast.Try object at 0x7da1b138e320> return[call[name[HttpResponse], parameter[]]]
keyword[def] identifier[calc_log_size] ( identifier[request] , identifier[calc_id] ): literal[string] keyword[try] : identifier[response_data] = identifier[logs] . identifier[dbcmd] ( literal[string] , identifier[calc_id] ) keyword[except] identifier[dbapi] . identifier[NotFound] : keyword[return] identifier[HttpResponseNotFound] () keyword[return] identifier[HttpResponse] ( identifier[content] = identifier[json] . identifier[dumps] ( identifier[response_data] ), identifier[content_type] = identifier[JSON] )
def calc_log_size(request, calc_id): """ Get the current number of lines in the log """ try: response_data = logs.dbcmd('get_log_size', calc_id) # depends on [control=['try'], data=[]] except dbapi.NotFound: return HttpResponseNotFound() # depends on [control=['except'], data=[]] return HttpResponse(content=json.dumps(response_data), content_type=JSON)
def dispatch(self, method_frame): ''' Dispatch a method for this protocol. ''' method = self.dispatch_map.get(method_frame.method_id) if method: callback = self.channel.clear_synchronous_cb(method) callback(method_frame) else: raise self.InvalidMethod( "no method is registered with id: %d" % method_frame.method_id)
def function[dispatch, parameter[self, method_frame]]: constant[ Dispatch a method for this protocol. ] variable[method] assign[=] call[name[self].dispatch_map.get, parameter[name[method_frame].method_id]] if name[method] begin[:] variable[callback] assign[=] call[name[self].channel.clear_synchronous_cb, parameter[name[method]]] call[name[callback], parameter[name[method_frame]]]
keyword[def] identifier[dispatch] ( identifier[self] , identifier[method_frame] ): literal[string] identifier[method] = identifier[self] . identifier[dispatch_map] . identifier[get] ( identifier[method_frame] . identifier[method_id] ) keyword[if] identifier[method] : identifier[callback] = identifier[self] . identifier[channel] . identifier[clear_synchronous_cb] ( identifier[method] ) identifier[callback] ( identifier[method_frame] ) keyword[else] : keyword[raise] identifier[self] . identifier[InvalidMethod] ( literal[string] % identifier[method_frame] . identifier[method_id] )
def dispatch(self, method_frame): """ Dispatch a method for this protocol. """ method = self.dispatch_map.get(method_frame.method_id) if method: callback = self.channel.clear_synchronous_cb(method) callback(method_frame) # depends on [control=['if'], data=[]] else: raise self.InvalidMethod('no method is registered with id: %d' % method_frame.method_id)
def gen_paula_etree(paula_id): """ creates an element tree representation of an empty PAULA XML file. """ E = ElementMaker(nsmap=NSMAP) tree = E('paula', version='1.1') tree.append(E('header', paula_id=paula_id)) return E, tree
def function[gen_paula_etree, parameter[paula_id]]: constant[ creates an element tree representation of an empty PAULA XML file. ] variable[E] assign[=] call[name[ElementMaker], parameter[]] variable[tree] assign[=] call[name[E], parameter[constant[paula]]] call[name[tree].append, parameter[call[name[E], parameter[constant[header]]]]] return[tuple[[<ast.Name object at 0x7da20c795240>, <ast.Name object at 0x7da20c794460>]]]
keyword[def] identifier[gen_paula_etree] ( identifier[paula_id] ): literal[string] identifier[E] = identifier[ElementMaker] ( identifier[nsmap] = identifier[NSMAP] ) identifier[tree] = identifier[E] ( literal[string] , identifier[version] = literal[string] ) identifier[tree] . identifier[append] ( identifier[E] ( literal[string] , identifier[paula_id] = identifier[paula_id] )) keyword[return] identifier[E] , identifier[tree]
def gen_paula_etree(paula_id): """ creates an element tree representation of an empty PAULA XML file. """ E = ElementMaker(nsmap=NSMAP) tree = E('paula', version='1.1') tree.append(E('header', paula_id=paula_id)) return (E, tree)
def get_lr(self, step, nowarn=False): """ :param step: which of t_total steps we're on :param nowarn: set to True to suppress warning regarding training beyond specified 't_total' steps :return: learning rate multiplier for current update """ if self.t_total < 0: return 1. progress = float(step) / self.t_total ret = self.get_lr_(progress) # warning for exceeding t_total (only active with warmup_linear if not nowarn and self.warn_t_total and progress > 1. and progress > self.warned_for_t_total_at_progress: logger.warning( "Training beyond specified 't_total'. Learning rate multiplier set to {}. Please set 't_total' of {} correctly." .format(ret, self.__class__.__name__)) self.warned_for_t_total_at_progress = progress # end warning return ret
def function[get_lr, parameter[self, step, nowarn]]: constant[ :param step: which of t_total steps we're on :param nowarn: set to True to suppress warning regarding training beyond specified 't_total' steps :return: learning rate multiplier for current update ] if compare[name[self].t_total less[<] constant[0]] begin[:] return[constant[1.0]] variable[progress] assign[=] binary_operation[call[name[float], parameter[name[step]]] / name[self].t_total] variable[ret] assign[=] call[name[self].get_lr_, parameter[name[progress]]] if <ast.BoolOp object at 0x7da1b2344d30> begin[:] call[name[logger].warning, parameter[call[constant[Training beyond specified 't_total'. Learning rate multiplier set to {}. Please set 't_total' of {} correctly.].format, parameter[name[ret], name[self].__class__.__name__]]]] name[self].warned_for_t_total_at_progress assign[=] name[progress] return[name[ret]]
keyword[def] identifier[get_lr] ( identifier[self] , identifier[step] , identifier[nowarn] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[t_total] < literal[int] : keyword[return] literal[int] identifier[progress] = identifier[float] ( identifier[step] )/ identifier[self] . identifier[t_total] identifier[ret] = identifier[self] . identifier[get_lr_] ( identifier[progress] ) keyword[if] keyword[not] identifier[nowarn] keyword[and] identifier[self] . identifier[warn_t_total] keyword[and] identifier[progress] > literal[int] keyword[and] identifier[progress] > identifier[self] . identifier[warned_for_t_total_at_progress] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[ret] , identifier[self] . identifier[__class__] . identifier[__name__] )) identifier[self] . identifier[warned_for_t_total_at_progress] = identifier[progress] keyword[return] identifier[ret]
def get_lr(self, step, nowarn=False): """ :param step: which of t_total steps we're on :param nowarn: set to True to suppress warning regarding training beyond specified 't_total' steps :return: learning rate multiplier for current update """ if self.t_total < 0: return 1.0 # depends on [control=['if'], data=[]] progress = float(step) / self.t_total ret = self.get_lr_(progress) # warning for exceeding t_total (only active with warmup_linear if not nowarn and self.warn_t_total and (progress > 1.0) and (progress > self.warned_for_t_total_at_progress): logger.warning("Training beyond specified 't_total'. Learning rate multiplier set to {}. Please set 't_total' of {} correctly.".format(ret, self.__class__.__name__)) self.warned_for_t_total_at_progress = progress # depends on [control=['if'], data=[]] # end warning return ret
def requires_conversion(cls, fileset, file_format): """Checks whether the fileset matches the requested file format""" if file_format is None: return False try: filset_format = fileset.format except AttributeError: return False # Field input else: return (file_format != filset_format)
def function[requires_conversion, parameter[cls, fileset, file_format]]: constant[Checks whether the fileset matches the requested file format] if compare[name[file_format] is constant[None]] begin[:] return[constant[False]] <ast.Try object at 0x7da1b18bb040>
keyword[def] identifier[requires_conversion] ( identifier[cls] , identifier[fileset] , identifier[file_format] ): literal[string] keyword[if] identifier[file_format] keyword[is] keyword[None] : keyword[return] keyword[False] keyword[try] : identifier[filset_format] = identifier[fileset] . identifier[format] keyword[except] identifier[AttributeError] : keyword[return] keyword[False] keyword[else] : keyword[return] ( identifier[file_format] != identifier[filset_format] )
def requires_conversion(cls, fileset, file_format): """Checks whether the fileset matches the requested file format""" if file_format is None: return False # depends on [control=['if'], data=[]] try: filset_format = fileset.format # depends on [control=['try'], data=[]] except AttributeError: return False # Field input # depends on [control=['except'], data=[]] else: return file_format != filset_format
def get_pos_hint_y(poshints, sizehinty): """Return ``poshints['y']`` if available, or its computed equivalent otherwise. """ if 'y' in poshints: return poshints['y'] elif sizehinty is not None: if 'center_y' in poshints: return ( poshints['center_y'] - sizehinty / 2 ) elif 'top' in poshints: return ( poshints['top'] - sizehinty )
def function[get_pos_hint_y, parameter[poshints, sizehinty]]: constant[Return ``poshints['y']`` if available, or its computed equivalent otherwise. ] if compare[constant[y] in name[poshints]] begin[:] return[call[name[poshints]][constant[y]]]
keyword[def] identifier[get_pos_hint_y] ( identifier[poshints] , identifier[sizehinty] ): literal[string] keyword[if] literal[string] keyword[in] identifier[poshints] : keyword[return] identifier[poshints] [ literal[string] ] keyword[elif] identifier[sizehinty] keyword[is] keyword[not] keyword[None] : keyword[if] literal[string] keyword[in] identifier[poshints] : keyword[return] ( identifier[poshints] [ literal[string] ]- identifier[sizehinty] / literal[int] ) keyword[elif] literal[string] keyword[in] identifier[poshints] : keyword[return] ( identifier[poshints] [ literal[string] ]- identifier[sizehinty] )
def get_pos_hint_y(poshints, sizehinty): """Return ``poshints['y']`` if available, or its computed equivalent otherwise. """ if 'y' in poshints: return poshints['y'] # depends on [control=['if'], data=['poshints']] elif sizehinty is not None: if 'center_y' in poshints: return poshints['center_y'] - sizehinty / 2 # depends on [control=['if'], data=['poshints']] elif 'top' in poshints: return poshints['top'] - sizehinty # depends on [control=['if'], data=['poshints']] # depends on [control=['if'], data=['sizehinty']]
def url_builder(self, endpoint, *, root=None, params=None, url_params=None): """Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL. """ if root is None: root = self.ROOT scheme, netloc, path, _, _ = urlsplit(root) return urlunsplit(( scheme, netloc, urljoin(path, endpoint), urlencode(url_params or {}), '', )).format(**params or {})
def function[url_builder, parameter[self, endpoint]]: constant[Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL. ] if compare[name[root] is constant[None]] begin[:] variable[root] assign[=] name[self].ROOT <ast.Tuple object at 0x7da1b008f6a0> assign[=] call[name[urlsplit], parameter[name[root]]] return[call[call[name[urlunsplit], parameter[tuple[[<ast.Name object at 0x7da1afe0c0d0>, <ast.Name object at 0x7da1afe0e740>, <ast.Call object at 0x7da1afe0d9c0>, <ast.Call object at 0x7da1afe0f6d0>, <ast.Constant object at 0x7da1afe0d960>]]]].format, parameter[]]]
keyword[def] identifier[url_builder] ( identifier[self] , identifier[endpoint] ,*, identifier[root] = keyword[None] , identifier[params] = keyword[None] , identifier[url_params] = keyword[None] ): literal[string] keyword[if] identifier[root] keyword[is] keyword[None] : identifier[root] = identifier[self] . identifier[ROOT] identifier[scheme] , identifier[netloc] , identifier[path] , identifier[_] , identifier[_] = identifier[urlsplit] ( identifier[root] ) keyword[return] identifier[urlunsplit] (( identifier[scheme] , identifier[netloc] , identifier[urljoin] ( identifier[path] , identifier[endpoint] ), identifier[urlencode] ( identifier[url_params] keyword[or] {}), literal[string] , )). identifier[format] (** identifier[params] keyword[or] {})
def url_builder(self, endpoint, *, root=None, params=None, url_params=None): """Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL. """ if root is None: root = self.ROOT # depends on [control=['if'], data=['root']] (scheme, netloc, path, _, _) = urlsplit(root) return urlunsplit((scheme, netloc, urljoin(path, endpoint), urlencode(url_params or {}), '')).format(**params or {})
def combine_xml_points(l, units, handle_units): """Combine multiple Point tags into an array.""" ret = {} for item in l: for key, value in item.items(): ret.setdefault(key, []).append(value) for key, value in ret.items(): if key != 'date': ret[key] = handle_units(value, units.get(key, None)) return ret
def function[combine_xml_points, parameter[l, units, handle_units]]: constant[Combine multiple Point tags into an array.] variable[ret] assign[=] dictionary[[], []] for taget[name[item]] in starred[name[l]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b1061000>, <ast.Name object at 0x7da1b1060df0>]]] in starred[call[name[item].items, parameter[]]] begin[:] call[call[name[ret].setdefault, parameter[name[key], list[[]]]].append, parameter[name[value]]] for taget[tuple[[<ast.Name object at 0x7da1b1061450>, <ast.Name object at 0x7da1b10600a0>]]] in starred[call[name[ret].items, parameter[]]] begin[:] if compare[name[key] not_equal[!=] constant[date]] begin[:] call[name[ret]][name[key]] assign[=] call[name[handle_units], parameter[name[value], call[name[units].get, parameter[name[key], constant[None]]]]] return[name[ret]]
keyword[def] identifier[combine_xml_points] ( identifier[l] , identifier[units] , identifier[handle_units] ): literal[string] identifier[ret] ={} keyword[for] identifier[item] keyword[in] identifier[l] : keyword[for] identifier[key] , identifier[value] keyword[in] identifier[item] . identifier[items] (): identifier[ret] . identifier[setdefault] ( identifier[key] ,[]). identifier[append] ( identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[ret] . identifier[items] (): keyword[if] identifier[key] != literal[string] : identifier[ret] [ identifier[key] ]= identifier[handle_units] ( identifier[value] , identifier[units] . identifier[get] ( identifier[key] , keyword[None] )) keyword[return] identifier[ret]
def combine_xml_points(l, units, handle_units): """Combine multiple Point tags into an array.""" ret = {} for item in l: for (key, value) in item.items(): ret.setdefault(key, []).append(value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['item']] for (key, value) in ret.items(): if key != 'date': ret[key] = handle_units(value, units.get(key, None)) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] return ret
def _bounds_dist(self, p): """Get the lower and upper bound distances. Negative is bad.""" prob = self.problem lb_dist = (p - prob.variable_bounds[0, ]).min() ub_dist = (prob.variable_bounds[1, ] - p).min() if prob.bounds.shape[0] > 0: const = prob.inequalities.dot(p) const_lb_dist = (const - prob.bounds[0, ]).min() const_ub_dist = (prob.bounds[1, ] - const).min() lb_dist = min(lb_dist, const_lb_dist) ub_dist = min(ub_dist, const_ub_dist) return np.array([lb_dist, ub_dist])
def function[_bounds_dist, parameter[self, p]]: constant[Get the lower and upper bound distances. Negative is bad.] variable[prob] assign[=] name[self].problem variable[lb_dist] assign[=] call[binary_operation[name[p] - call[name[prob].variable_bounds][tuple[[<ast.Constant object at 0x7da1b0122c20>]]]].min, parameter[]] variable[ub_dist] assign[=] call[binary_operation[call[name[prob].variable_bounds][tuple[[<ast.Constant object at 0x7da1b0060be0>]]] - name[p]].min, parameter[]] if compare[call[name[prob].bounds.shape][constant[0]] greater[>] constant[0]] begin[:] variable[const] assign[=] call[name[prob].inequalities.dot, parameter[name[p]]] variable[const_lb_dist] assign[=] call[binary_operation[name[const] - call[name[prob].bounds][tuple[[<ast.Constant object at 0x7da1b0060100>]]]].min, parameter[]] variable[const_ub_dist] assign[=] call[binary_operation[call[name[prob].bounds][tuple[[<ast.Constant object at 0x7da1b00609d0>]]] - name[const]].min, parameter[]] variable[lb_dist] assign[=] call[name[min], parameter[name[lb_dist], name[const_lb_dist]]] variable[ub_dist] assign[=] call[name[min], parameter[name[ub_dist], name[const_ub_dist]]] return[call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0076c20>, <ast.Name object at 0x7da1b0076350>]]]]]
keyword[def] identifier[_bounds_dist] ( identifier[self] , identifier[p] ): literal[string] identifier[prob] = identifier[self] . identifier[problem] identifier[lb_dist] =( identifier[p] - identifier[prob] . identifier[variable_bounds] [ literal[int] ,]). identifier[min] () identifier[ub_dist] =( identifier[prob] . identifier[variable_bounds] [ literal[int] ,]- identifier[p] ). identifier[min] () keyword[if] identifier[prob] . identifier[bounds] . identifier[shape] [ literal[int] ]> literal[int] : identifier[const] = identifier[prob] . identifier[inequalities] . identifier[dot] ( identifier[p] ) identifier[const_lb_dist] =( identifier[const] - identifier[prob] . identifier[bounds] [ literal[int] ,]). identifier[min] () identifier[const_ub_dist] =( identifier[prob] . identifier[bounds] [ literal[int] ,]- identifier[const] ). identifier[min] () identifier[lb_dist] = identifier[min] ( identifier[lb_dist] , identifier[const_lb_dist] ) identifier[ub_dist] = identifier[min] ( identifier[ub_dist] , identifier[const_ub_dist] ) keyword[return] identifier[np] . identifier[array] ([ identifier[lb_dist] , identifier[ub_dist] ])
def _bounds_dist(self, p): """Get the lower and upper bound distances. Negative is bad.""" prob = self.problem lb_dist = (p - prob.variable_bounds[0,]).min() ub_dist = (prob.variable_bounds[1,] - p).min() if prob.bounds.shape[0] > 0: const = prob.inequalities.dot(p) const_lb_dist = (const - prob.bounds[0,]).min() const_ub_dist = (prob.bounds[1,] - const).min() lb_dist = min(lb_dist, const_lb_dist) ub_dist = min(ub_dist, const_ub_dist) # depends on [control=['if'], data=[]] return np.array([lb_dist, ub_dist])
def namedTempFileReader(self) -> NamedTempFileReader: """ Named Temporary File Reader This provides an object compatible with NamedTemporaryFile, used for reading this files contents. This will still delete after the object falls out of scope. This solves the problem on windows where a NamedTemporaryFile can not be read while it's being written to """ # Get the weak ref directory = self._directory() assert isinstance(directory, Directory), ( "Expected Directory, receieved %s" % directory) # Return the object return NamedTempFileReader(directory, self)
def function[namedTempFileReader, parameter[self]]: constant[ Named Temporary File Reader This provides an object compatible with NamedTemporaryFile, used for reading this files contents. This will still delete after the object falls out of scope. This solves the problem on windows where a NamedTemporaryFile can not be read while it's being written to ] variable[directory] assign[=] call[name[self]._directory, parameter[]] assert[call[name[isinstance], parameter[name[directory], name[Directory]]]] return[call[name[NamedTempFileReader], parameter[name[directory], name[self]]]]
keyword[def] identifier[namedTempFileReader] ( identifier[self] )-> identifier[NamedTempFileReader] : literal[string] identifier[directory] = identifier[self] . identifier[_directory] () keyword[assert] identifier[isinstance] ( identifier[directory] , identifier[Directory] ),( literal[string] % identifier[directory] ) keyword[return] identifier[NamedTempFileReader] ( identifier[directory] , identifier[self] )
def namedTempFileReader(self) -> NamedTempFileReader: """ Named Temporary File Reader This provides an object compatible with NamedTemporaryFile, used for reading this files contents. This will still delete after the object falls out of scope. This solves the problem on windows where a NamedTemporaryFile can not be read while it's being written to """ # Get the weak ref directory = self._directory() assert isinstance(directory, Directory), 'Expected Directory, receieved %s' % directory # Return the object return NamedTempFileReader(directory, self)
def read_block(self, block): """Read complete PEB data from file. Argument: Obj:block -- Block data is desired for. """ self.seek(block.file_offset) return self._fhandle.read(block.size)
def function[read_block, parameter[self, block]]: constant[Read complete PEB data from file. Argument: Obj:block -- Block data is desired for. ] call[name[self].seek, parameter[name[block].file_offset]] return[call[name[self]._fhandle.read, parameter[name[block].size]]]
keyword[def] identifier[read_block] ( identifier[self] , identifier[block] ): literal[string] identifier[self] . identifier[seek] ( identifier[block] . identifier[file_offset] ) keyword[return] identifier[self] . identifier[_fhandle] . identifier[read] ( identifier[block] . identifier[size] )
def read_block(self, block): """Read complete PEB data from file. Argument: Obj:block -- Block data is desired for. """ self.seek(block.file_offset) return self._fhandle.read(block.size)
def get_model_loss(ctx, model, pretrained, dataset_name, dtype, ckpt_dir=None, start_step=None): """Get model for pre-training.""" # model model, vocabulary = nlp.model.get_model(model, dataset_name=dataset_name, pretrained=pretrained, ctx=ctx) if not pretrained: model.initialize(init=mx.init.Normal(0.02), ctx=ctx) model.cast(dtype) if ckpt_dir and start_step: param_path = os.path.join(ckpt_dir, '%07d.params'%start_step) model.load_parameters(param_path, ctx=ctx) logging.info('Loading step %d checkpoints from %s.', start_step, param_path) model.hybridize(static_alloc=True) # losses nsp_loss = mx.gluon.loss.SoftmaxCELoss() mlm_loss = mx.gluon.loss.SoftmaxCELoss() nsp_loss.hybridize(static_alloc=True) mlm_loss.hybridize(static_alloc=True) return model, nsp_loss, mlm_loss, vocabulary
def function[get_model_loss, parameter[ctx, model, pretrained, dataset_name, dtype, ckpt_dir, start_step]]: constant[Get model for pre-training.] <ast.Tuple object at 0x7da1b26ae320> assign[=] call[name[nlp].model.get_model, parameter[name[model]]] if <ast.UnaryOp object at 0x7da1b26afe50> begin[:] call[name[model].initialize, parameter[]] call[name[model].cast, parameter[name[dtype]]] if <ast.BoolOp object at 0x7da1b26ac850> begin[:] variable[param_path] assign[=] call[name[os].path.join, parameter[name[ckpt_dir], binary_operation[constant[%07d.params] <ast.Mod object at 0x7da2590d6920> name[start_step]]]] call[name[model].load_parameters, parameter[name[param_path]]] call[name[logging].info, parameter[constant[Loading step %d checkpoints from %s.], name[start_step], name[param_path]]] call[name[model].hybridize, parameter[]] variable[nsp_loss] assign[=] call[name[mx].gluon.loss.SoftmaxCELoss, parameter[]] variable[mlm_loss] assign[=] call[name[mx].gluon.loss.SoftmaxCELoss, parameter[]] call[name[nsp_loss].hybridize, parameter[]] call[name[mlm_loss].hybridize, parameter[]] return[tuple[[<ast.Name object at 0x7da1b26ad030>, <ast.Name object at 0x7da1b26afb80>, <ast.Name object at 0x7da1b26ada20>, <ast.Name object at 0x7da1b26acfd0>]]]
keyword[def] identifier[get_model_loss] ( identifier[ctx] , identifier[model] , identifier[pretrained] , identifier[dataset_name] , identifier[dtype] , identifier[ckpt_dir] = keyword[None] , identifier[start_step] = keyword[None] ): literal[string] identifier[model] , identifier[vocabulary] = identifier[nlp] . identifier[model] . identifier[get_model] ( identifier[model] , identifier[dataset_name] = identifier[dataset_name] , identifier[pretrained] = identifier[pretrained] , identifier[ctx] = identifier[ctx] ) keyword[if] keyword[not] identifier[pretrained] : identifier[model] . identifier[initialize] ( identifier[init] = identifier[mx] . identifier[init] . identifier[Normal] ( literal[int] ), identifier[ctx] = identifier[ctx] ) identifier[model] . identifier[cast] ( identifier[dtype] ) keyword[if] identifier[ckpt_dir] keyword[and] identifier[start_step] : identifier[param_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[ckpt_dir] , literal[string] % identifier[start_step] ) identifier[model] . identifier[load_parameters] ( identifier[param_path] , identifier[ctx] = identifier[ctx] ) identifier[logging] . identifier[info] ( literal[string] , identifier[start_step] , identifier[param_path] ) identifier[model] . identifier[hybridize] ( identifier[static_alloc] = keyword[True] ) identifier[nsp_loss] = identifier[mx] . identifier[gluon] . identifier[loss] . identifier[SoftmaxCELoss] () identifier[mlm_loss] = identifier[mx] . identifier[gluon] . identifier[loss] . identifier[SoftmaxCELoss] () identifier[nsp_loss] . identifier[hybridize] ( identifier[static_alloc] = keyword[True] ) identifier[mlm_loss] . identifier[hybridize] ( identifier[static_alloc] = keyword[True] ) keyword[return] identifier[model] , identifier[nsp_loss] , identifier[mlm_loss] , identifier[vocabulary]
def get_model_loss(ctx, model, pretrained, dataset_name, dtype, ckpt_dir=None, start_step=None): """Get model for pre-training.""" # model (model, vocabulary) = nlp.model.get_model(model, dataset_name=dataset_name, pretrained=pretrained, ctx=ctx) if not pretrained: model.initialize(init=mx.init.Normal(0.02), ctx=ctx) # depends on [control=['if'], data=[]] model.cast(dtype) if ckpt_dir and start_step: param_path = os.path.join(ckpt_dir, '%07d.params' % start_step) model.load_parameters(param_path, ctx=ctx) logging.info('Loading step %d checkpoints from %s.', start_step, param_path) # depends on [control=['if'], data=[]] model.hybridize(static_alloc=True) # losses nsp_loss = mx.gluon.loss.SoftmaxCELoss() mlm_loss = mx.gluon.loss.SoftmaxCELoss() nsp_loss.hybridize(static_alloc=True) mlm_loss.hybridize(static_alloc=True) return (model, nsp_loss, mlm_loss, vocabulary)
def parse_header(recipe, header="from", remove_header=True): '''take a recipe, and return the complete header, line. If remove_header is True, only return the value. Parameters ========== recipe: the recipe file headers: the header key to find and parse remove_header: if true, remove the key ''' parsed_header = None fromline = [x for x in recipe.split('\n') if "%s:" %header in x.lower()] # Case 1: We did not find the fromline if len(fromline) == 0: return "" # Case 2: We found it! if len(fromline) > 0: fromline = fromline[0] parsed_header = fromline.strip() # Does the user want to clean it up? if remove_header is True: parsed_header = fromline.split(':', 1)[-1].strip() return parsed_header
def function[parse_header, parameter[recipe, header, remove_header]]: constant[take a recipe, and return the complete header, line. If remove_header is True, only return the value. Parameters ========== recipe: the recipe file headers: the header key to find and parse remove_header: if true, remove the key ] variable[parsed_header] assign[=] constant[None] variable[fromline] assign[=] <ast.ListComp object at 0x7da1b03f9db0> if compare[call[name[len], parameter[name[fromline]]] equal[==] constant[0]] begin[:] return[constant[]] if compare[call[name[len], parameter[name[fromline]]] greater[>] constant[0]] begin[:] variable[fromline] assign[=] call[name[fromline]][constant[0]] variable[parsed_header] assign[=] call[name[fromline].strip, parameter[]] if compare[name[remove_header] is constant[True]] begin[:] variable[parsed_header] assign[=] call[call[call[name[fromline].split, parameter[constant[:], constant[1]]]][<ast.UnaryOp object at 0x7da1b03f8280>].strip, parameter[]] return[name[parsed_header]]
keyword[def] identifier[parse_header] ( identifier[recipe] , identifier[header] = literal[string] , identifier[remove_header] = keyword[True] ): literal[string] identifier[parsed_header] = keyword[None] identifier[fromline] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[recipe] . identifier[split] ( literal[string] ) keyword[if] literal[string] % identifier[header] keyword[in] identifier[x] . identifier[lower] ()] keyword[if] identifier[len] ( identifier[fromline] )== literal[int] : keyword[return] literal[string] keyword[if] identifier[len] ( identifier[fromline] )> literal[int] : identifier[fromline] = identifier[fromline] [ literal[int] ] identifier[parsed_header] = identifier[fromline] . identifier[strip] () keyword[if] identifier[remove_header] keyword[is] keyword[True] : identifier[parsed_header] = identifier[fromline] . identifier[split] ( literal[string] , literal[int] )[- literal[int] ]. identifier[strip] () keyword[return] identifier[parsed_header]
def parse_header(recipe, header='from', remove_header=True): """take a recipe, and return the complete header, line. If remove_header is True, only return the value. Parameters ========== recipe: the recipe file headers: the header key to find and parse remove_header: if true, remove the key """ parsed_header = None fromline = [x for x in recipe.split('\n') if '%s:' % header in x.lower()] # Case 1: We did not find the fromline if len(fromline) == 0: return '' # depends on [control=['if'], data=[]] # Case 2: We found it! if len(fromline) > 0: fromline = fromline[0] parsed_header = fromline.strip() # depends on [control=['if'], data=[]] # Does the user want to clean it up? if remove_header is True: parsed_header = fromline.split(':', 1)[-1].strip() # depends on [control=['if'], data=[]] return parsed_header
def _load_txt(file, devices, channels, header, **kwargs): """ Function used for reading .txt files generated by OpenSignals. ---------- Parameters ---------- file : file, str, or pathlib.Path File, filename, or generator to read. If the filename extension is ``.gz`` or ``.bz2``, the file is first decompressed. Note that generators should return byte strings for Python 3k. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. header : dict File header with relevant metadata for identifying which columns may be read. **kwargs : list of variable keyword arguments. The valid keywords are those used by numpy.loadtxt function. Returns ------- out_dict : dict Data read from the text file. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%% Exclusion of invalid keywords %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% kwargs_txt = _filter_keywords(numpy.loadtxt, kwargs) # %%%%%%%%%%%%%%%%%%%%%%%%%% Columns of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = {} for dev_nbr, device in enumerate(devices): out_dict[device] = {} columns = [] for chn in channels[dev_nbr]: columns.append(header[device]["column labels"][chn]) # header[device]["column labels"] contains the column of .txt file where the data of # channel "chn" is located. out_dict[device]["CH" + str(chn)] = numpy.loadtxt(fname=file, usecols=header[device]["column labels"][chn], **kwargs_txt) return out_dict
def function[_load_txt, parameter[file, devices, channels, header]]: constant[ Function used for reading .txt files generated by OpenSignals. ---------- Parameters ---------- file : file, str, or pathlib.Path File, filename, or generator to read. If the filename extension is ``.gz`` or ``.bz2``, the file is first decompressed. Note that generators should return byte strings for Python 3k. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. header : dict File header with relevant metadata for identifying which columns may be read. **kwargs : list of variable keyword arguments. The valid keywords are those used by numpy.loadtxt function. Returns ------- out_dict : dict Data read from the text file. ] variable[kwargs_txt] assign[=] call[name[_filter_keywords], parameter[name[numpy].loadtxt, name[kwargs]]] variable[out_dict] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da207f002e0>, <ast.Name object at 0x7da207f02bc0>]]] in starred[call[name[enumerate], parameter[name[devices]]]] begin[:] call[name[out_dict]][name[device]] assign[=] dictionary[[], []] variable[columns] assign[=] list[[]] for taget[name[chn]] in starred[call[name[channels]][name[dev_nbr]]] begin[:] call[name[columns].append, parameter[call[call[call[name[header]][name[device]]][constant[column labels]]][name[chn]]]] call[call[name[out_dict]][name[device]]][binary_operation[constant[CH] + call[name[str], parameter[name[chn]]]]] assign[=] call[name[numpy].loadtxt, parameter[]] return[name[out_dict]]
keyword[def] identifier[_load_txt] ( identifier[file] , identifier[devices] , identifier[channels] , identifier[header] ,** identifier[kwargs] ): literal[string] identifier[kwargs_txt] = identifier[_filter_keywords] ( identifier[numpy] . identifier[loadtxt] , identifier[kwargs] ) identifier[out_dict] ={} keyword[for] identifier[dev_nbr] , identifier[device] keyword[in] identifier[enumerate] ( identifier[devices] ): identifier[out_dict] [ identifier[device] ]={} identifier[columns] =[] keyword[for] identifier[chn] keyword[in] identifier[channels] [ identifier[dev_nbr] ]: identifier[columns] . identifier[append] ( identifier[header] [ identifier[device] ][ literal[string] ][ identifier[chn] ]) identifier[out_dict] [ identifier[device] ][ literal[string] + identifier[str] ( identifier[chn] )]= identifier[numpy] . identifier[loadtxt] ( identifier[fname] = identifier[file] , identifier[usecols] = identifier[header] [ identifier[device] ][ literal[string] ][ identifier[chn] ], ** identifier[kwargs_txt] ) keyword[return] identifier[out_dict]
def _load_txt(file, devices, channels, header, **kwargs): """ Function used for reading .txt files generated by OpenSignals. ---------- Parameters ---------- file : file, str, or pathlib.Path File, filename, or generator to read. If the filename extension is ``.gz`` or ``.bz2``, the file is first decompressed. Note that generators should return byte strings for Python 3k. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. header : dict File header with relevant metadata for identifying which columns may be read. **kwargs : list of variable keyword arguments. The valid keywords are those used by numpy.loadtxt function. Returns ------- out_dict : dict Data read from the text file. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%% Exclusion of invalid keywords %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% kwargs_txt = _filter_keywords(numpy.loadtxt, kwargs) # %%%%%%%%%%%%%%%%%%%%%%%%%% Columns of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = {} for (dev_nbr, device) in enumerate(devices): out_dict[device] = {} columns = [] for chn in channels[dev_nbr]: columns.append(header[device]['column labels'][chn]) # header[device]["column labels"] contains the column of .txt file where the data of # channel "chn" is located. out_dict[device]['CH' + str(chn)] = numpy.loadtxt(fname=file, usecols=header[device]['column labels'][chn], **kwargs_txt) # depends on [control=['for'], data=['chn']] # depends on [control=['for'], data=[]] return out_dict
def ua_string(praw_info): """Return the user-agent string. The user-agent string contains PRAW version and platform version info. """ if os.environ.get('SERVER_SOFTWARE') is not None: # Google App Engine information # https://developers.google.com/appengine/docs/python/ info = os.environ.get('SERVER_SOFTWARE') else: # Standard platform information info = platform.platform(True).encode('ascii', 'ignore') return '{0} PRAW/{1} Python/{2} {3}'.format( praw_info, __version__, sys.version.split()[0], info)
def function[ua_string, parameter[praw_info]]: constant[Return the user-agent string. The user-agent string contains PRAW version and platform version info. ] if compare[call[name[os].environ.get, parameter[constant[SERVER_SOFTWARE]]] is_not constant[None]] begin[:] variable[info] assign[=] call[name[os].environ.get, parameter[constant[SERVER_SOFTWARE]]] return[call[constant[{0} PRAW/{1} Python/{2} {3}].format, parameter[name[praw_info], name[__version__], call[call[name[sys].version.split, parameter[]]][constant[0]], name[info]]]]
keyword[def] identifier[ua_string] ( identifier[praw_info] ): literal[string] keyword[if] identifier[os] . identifier[environ] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[info] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] ) keyword[else] : identifier[info] = identifier[platform] . identifier[platform] ( keyword[True] ). identifier[encode] ( literal[string] , literal[string] ) keyword[return] literal[string] . identifier[format] ( identifier[praw_info] , identifier[__version__] , identifier[sys] . identifier[version] . identifier[split] ()[ literal[int] ], identifier[info] )
def ua_string(praw_info): """Return the user-agent string. The user-agent string contains PRAW version and platform version info. """ if os.environ.get('SERVER_SOFTWARE') is not None: # Google App Engine information # https://developers.google.com/appengine/docs/python/ info = os.environ.get('SERVER_SOFTWARE') # depends on [control=['if'], data=[]] else: # Standard platform information info = platform.platform(True).encode('ascii', 'ignore') return '{0} PRAW/{1} Python/{2} {3}'.format(praw_info, __version__, sys.version.split()[0], info)
def _post(self, uri, data, headers=None): """ Simple POST request for a given uri path. """ if not headers: headers = self._get_headers() logging.debug("URI=" + str(uri)) logging.debug("HEADERS=" + str(headers)) logging.debug("BODY=" + str(data)) response = self.session.post(uri, headers=headers, data=json.dumps(data)) logging.debug("STATUS=" + str(response.status_code)) if response.status_code in [200, 201]: return response.json() else: logging.error(b"ERROR=" + response.content) response.raise_for_status()
def function[_post, parameter[self, uri, data, headers]]: constant[ Simple POST request for a given uri path. ] if <ast.UnaryOp object at 0x7da204344be0> begin[:] variable[headers] assign[=] call[name[self]._get_headers, parameter[]] call[name[logging].debug, parameter[binary_operation[constant[URI=] + call[name[str], parameter[name[uri]]]]]] call[name[logging].debug, parameter[binary_operation[constant[HEADERS=] + call[name[str], parameter[name[headers]]]]]] call[name[logging].debug, parameter[binary_operation[constant[BODY=] + call[name[str], parameter[name[data]]]]]] variable[response] assign[=] call[name[self].session.post, parameter[name[uri]]] call[name[logging].debug, parameter[binary_operation[constant[STATUS=] + call[name[str], parameter[name[response].status_code]]]]] if compare[name[response].status_code in list[[<ast.Constant object at 0x7da18f8124a0>, <ast.Constant object at 0x7da18f8135b0>]]] begin[:] return[call[name[response].json, parameter[]]]
keyword[def] identifier[_post] ( identifier[self] , identifier[uri] , identifier[data] , identifier[headers] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[headers] : identifier[headers] = identifier[self] . identifier[_get_headers] () identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[uri] )) identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[headers] )) identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[data] )) identifier[response] = identifier[self] . identifier[session] . identifier[post] ( identifier[uri] , identifier[headers] = identifier[headers] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] )) identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[response] . identifier[status_code] )) keyword[if] identifier[response] . identifier[status_code] keyword[in] [ literal[int] , literal[int] ]: keyword[return] identifier[response] . identifier[json] () keyword[else] : identifier[logging] . identifier[error] ( literal[string] + identifier[response] . identifier[content] ) identifier[response] . identifier[raise_for_status] ()
def _post(self, uri, data, headers=None): """ Simple POST request for a given uri path. """ if not headers: headers = self._get_headers() # depends on [control=['if'], data=[]] logging.debug('URI=' + str(uri)) logging.debug('HEADERS=' + str(headers)) logging.debug('BODY=' + str(data)) response = self.session.post(uri, headers=headers, data=json.dumps(data)) logging.debug('STATUS=' + str(response.status_code)) if response.status_code in [200, 201]: return response.json() # depends on [control=['if'], data=[]] else: logging.error(b'ERROR=' + response.content) response.raise_for_status()
def _configure_key_pair(config): """Configure SSH access, using an existing key pair if possible. Creates a project-wide ssh key that can be used to access all the instances unless explicitly prohibited by instance config. The ssh-keys created by ray are of format: [USERNAME]:ssh-rsa [KEY_VALUE] [USERNAME] where: [USERNAME] is the user for the SSH key, specified in the config. [KEY_VALUE] is the public SSH key value. """ if "ssh_private_key" in config["auth"]: return config ssh_user = config["auth"]["ssh_user"] project = compute.projects().get( project=config["provider"]["project_id"]).execute() # Key pairs associated with project meta data. The key pairs are general, # and not just ssh keys. ssh_keys_str = next( (item for item in project["commonInstanceMetadata"].get("items", []) if item["key"] == "ssh-keys"), {}).get("value", "") ssh_keys = ssh_keys_str.split("\n") if ssh_keys_str else [] # Try a few times to get or create a good key pair. key_found = False for i in range(10): key_name = key_pair_name(i, config["provider"]["region"], config["provider"]["project_id"], ssh_user) public_key_path, private_key_path = key_pair_paths(key_name) for ssh_key in ssh_keys: key_parts = ssh_key.split(" ") if len(key_parts) != 3: continue if key_parts[2] == ssh_user and os.path.exists(private_key_path): # Found a key key_found = True break # Create a key since it doesn't exist locally or in GCP if not key_found and not os.path.exists(private_key_path): logger.info("_configure_key_pair: " "Creating new key pair {}".format(key_name)) public_key, private_key = generate_rsa_key_pair() _create_project_ssh_key_pair(project, public_key, ssh_user) with open(private_key_path, "w") as f: f.write(private_key) os.chmod(private_key_path, 0o600) with open(public_key_path, "w") as f: f.write(public_key) key_found = True break if key_found: break assert key_found, "SSH keypair for user {} not found for {}".format( ssh_user, private_key_path) assert os.path.exists(private_key_path), ( "Private key file {} not found for user {}" "".format(private_key_path, ssh_user)) logger.info("_configure_key_pair: " "Private key not specified in config, using" "{}".format(private_key_path)) config["auth"]["ssh_private_key"] = private_key_path return config
def function[_configure_key_pair, parameter[config]]: constant[Configure SSH access, using an existing key pair if possible. Creates a project-wide ssh key that can be used to access all the instances unless explicitly prohibited by instance config. The ssh-keys created by ray are of format: [USERNAME]:ssh-rsa [KEY_VALUE] [USERNAME] where: [USERNAME] is the user for the SSH key, specified in the config. [KEY_VALUE] is the public SSH key value. ] if compare[constant[ssh_private_key] in call[name[config]][constant[auth]]] begin[:] return[name[config]] variable[ssh_user] assign[=] call[call[name[config]][constant[auth]]][constant[ssh_user]] variable[project] assign[=] call[call[call[name[compute].projects, parameter[]].get, parameter[]].execute, parameter[]] variable[ssh_keys_str] assign[=] call[call[name[next], parameter[<ast.GeneratorExp object at 0x7da18eb54cd0>, dictionary[[], []]]].get, parameter[constant[value], constant[]]] variable[ssh_keys] assign[=] <ast.IfExp object at 0x7da18eb54700> variable[key_found] assign[=] constant[False] for taget[name[i]] in starred[call[name[range], parameter[constant[10]]]] begin[:] variable[key_name] assign[=] call[name[key_pair_name], parameter[name[i], call[call[name[config]][constant[provider]]][constant[region]], call[call[name[config]][constant[provider]]][constant[project_id]], name[ssh_user]]] <ast.Tuple object at 0x7da18eb54400> assign[=] call[name[key_pair_paths], parameter[name[key_name]]] for taget[name[ssh_key]] in starred[name[ssh_keys]] begin[:] variable[key_parts] assign[=] call[name[ssh_key].split, parameter[constant[ ]]] if compare[call[name[len], parameter[name[key_parts]]] not_equal[!=] constant[3]] begin[:] continue if <ast.BoolOp object at 0x7da18f09db70> begin[:] variable[key_found] assign[=] constant[True] break if <ast.BoolOp object at 0x7da18f09f430> begin[:] call[name[logger].info, parameter[call[constant[_configure_key_pair: Creating new key pair {}].format, parameter[name[key_name]]]]] <ast.Tuple object at 0x7da18f09f2b0> assign[=] call[name[generate_rsa_key_pair], parameter[]] call[name[_create_project_ssh_key_pair], parameter[name[project], name[public_key], name[ssh_user]]] with call[name[open], parameter[name[private_key_path], constant[w]]] begin[:] call[name[f].write, parameter[name[private_key]]] call[name[os].chmod, parameter[name[private_key_path], constant[384]]] with call[name[open], parameter[name[public_key_path], constant[w]]] begin[:] call[name[f].write, parameter[name[public_key]]] variable[key_found] assign[=] constant[True] break if name[key_found] begin[:] break assert[name[key_found]] assert[call[name[os].path.exists, parameter[name[private_key_path]]]] call[name[logger].info, parameter[call[constant[_configure_key_pair: Private key not specified in config, using{}].format, parameter[name[private_key_path]]]]] call[call[name[config]][constant[auth]]][constant[ssh_private_key]] assign[=] name[private_key_path] return[name[config]]
keyword[def] identifier[_configure_key_pair] ( identifier[config] ): literal[string] keyword[if] literal[string] keyword[in] identifier[config] [ literal[string] ]: keyword[return] identifier[config] identifier[ssh_user] = identifier[config] [ literal[string] ][ literal[string] ] identifier[project] = identifier[compute] . identifier[projects] (). identifier[get] ( identifier[project] = identifier[config] [ literal[string] ][ literal[string] ]). identifier[execute] () identifier[ssh_keys_str] = identifier[next] ( ( identifier[item] keyword[for] identifier[item] keyword[in] identifier[project] [ literal[string] ]. identifier[get] ( literal[string] ,[]) keyword[if] identifier[item] [ literal[string] ]== literal[string] ),{}). identifier[get] ( literal[string] , literal[string] ) identifier[ssh_keys] = identifier[ssh_keys_str] . identifier[split] ( literal[string] ) keyword[if] identifier[ssh_keys_str] keyword[else] [] identifier[key_found] = keyword[False] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[key_name] = identifier[key_pair_name] ( identifier[i] , identifier[config] [ literal[string] ][ literal[string] ], identifier[config] [ literal[string] ][ literal[string] ], identifier[ssh_user] ) identifier[public_key_path] , identifier[private_key_path] = identifier[key_pair_paths] ( identifier[key_name] ) keyword[for] identifier[ssh_key] keyword[in] identifier[ssh_keys] : identifier[key_parts] = identifier[ssh_key] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[key_parts] )!= literal[int] : keyword[continue] keyword[if] identifier[key_parts] [ literal[int] ]== identifier[ssh_user] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[private_key_path] ): identifier[key_found] = keyword[True] keyword[break] keyword[if] keyword[not] identifier[key_found] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[private_key_path] ): identifier[logger] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[key_name] )) identifier[public_key] , identifier[private_key] = identifier[generate_rsa_key_pair] () identifier[_create_project_ssh_key_pair] ( identifier[project] , identifier[public_key] , identifier[ssh_user] ) keyword[with] identifier[open] ( identifier[private_key_path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[private_key] ) identifier[os] . identifier[chmod] ( identifier[private_key_path] , literal[int] ) keyword[with] identifier[open] ( identifier[public_key_path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[public_key] ) identifier[key_found] = keyword[True] keyword[break] keyword[if] identifier[key_found] : keyword[break] keyword[assert] identifier[key_found] , literal[string] . identifier[format] ( identifier[ssh_user] , identifier[private_key_path] ) keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[private_key_path] ),( literal[string] literal[string] . identifier[format] ( identifier[private_key_path] , identifier[ssh_user] )) identifier[logger] . identifier[info] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[private_key_path] )) identifier[config] [ literal[string] ][ literal[string] ]= identifier[private_key_path] keyword[return] identifier[config]
def _configure_key_pair(config): """Configure SSH access, using an existing key pair if possible. Creates a project-wide ssh key that can be used to access all the instances unless explicitly prohibited by instance config. The ssh-keys created by ray are of format: [USERNAME]:ssh-rsa [KEY_VALUE] [USERNAME] where: [USERNAME] is the user for the SSH key, specified in the config. [KEY_VALUE] is the public SSH key value. """ if 'ssh_private_key' in config['auth']: return config # depends on [control=['if'], data=[]] ssh_user = config['auth']['ssh_user'] project = compute.projects().get(project=config['provider']['project_id']).execute() # Key pairs associated with project meta data. The key pairs are general, # and not just ssh keys. ssh_keys_str = next((item for item in project['commonInstanceMetadata'].get('items', []) if item['key'] == 'ssh-keys'), {}).get('value', '') ssh_keys = ssh_keys_str.split('\n') if ssh_keys_str else [] # Try a few times to get or create a good key pair. key_found = False for i in range(10): key_name = key_pair_name(i, config['provider']['region'], config['provider']['project_id'], ssh_user) (public_key_path, private_key_path) = key_pair_paths(key_name) for ssh_key in ssh_keys: key_parts = ssh_key.split(' ') if len(key_parts) != 3: continue # depends on [control=['if'], data=[]] if key_parts[2] == ssh_user and os.path.exists(private_key_path): # Found a key key_found = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ssh_key']] # Create a key since it doesn't exist locally or in GCP if not key_found and (not os.path.exists(private_key_path)): logger.info('_configure_key_pair: Creating new key pair {}'.format(key_name)) (public_key, private_key) = generate_rsa_key_pair() _create_project_ssh_key_pair(project, public_key, ssh_user) with open(private_key_path, 'w') as f: f.write(private_key) # depends on [control=['with'], data=['f']] os.chmod(private_key_path, 384) with open(public_key_path, 'w') as f: f.write(public_key) # depends on [control=['with'], data=['f']] key_found = True break # depends on [control=['if'], data=[]] if key_found: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] assert key_found, 'SSH keypair for user {} not found for {}'.format(ssh_user, private_key_path) assert os.path.exists(private_key_path), 'Private key file {} not found for user {}'.format(private_key_path, ssh_user) logger.info('_configure_key_pair: Private key not specified in config, using{}'.format(private_key_path)) config['auth']['ssh_private_key'] = private_key_path return config
def avl_new_top(t1, t2, top, direction=0): """ if direction == 0: (t1, t2) is (left, right) if direction == 1: (t1, t2) is (right, left) """ top.parent = None assert top.parent is None, str(top.parent.value) top.set_child(direction, t1) top.set_child(1 - direction, t2) top.balance = max(height(t1), height(t2)) + 1 return top
def function[avl_new_top, parameter[t1, t2, top, direction]]: constant[ if direction == 0: (t1, t2) is (left, right) if direction == 1: (t1, t2) is (right, left) ] name[top].parent assign[=] constant[None] assert[compare[name[top].parent is constant[None]]] call[name[top].set_child, parameter[name[direction], name[t1]]] call[name[top].set_child, parameter[binary_operation[constant[1] - name[direction]], name[t2]]] name[top].balance assign[=] binary_operation[call[name[max], parameter[call[name[height], parameter[name[t1]]], call[name[height], parameter[name[t2]]]]] + constant[1]] return[name[top]]
keyword[def] identifier[avl_new_top] ( identifier[t1] , identifier[t2] , identifier[top] , identifier[direction] = literal[int] ): literal[string] identifier[top] . identifier[parent] = keyword[None] keyword[assert] identifier[top] . identifier[parent] keyword[is] keyword[None] , identifier[str] ( identifier[top] . identifier[parent] . identifier[value] ) identifier[top] . identifier[set_child] ( identifier[direction] , identifier[t1] ) identifier[top] . identifier[set_child] ( literal[int] - identifier[direction] , identifier[t2] ) identifier[top] . identifier[balance] = identifier[max] ( identifier[height] ( identifier[t1] ), identifier[height] ( identifier[t2] ))+ literal[int] keyword[return] identifier[top]
def avl_new_top(t1, t2, top, direction=0): """ if direction == 0: (t1, t2) is (left, right) if direction == 1: (t1, t2) is (right, left) """ top.parent = None assert top.parent is None, str(top.parent.value) top.set_child(direction, t1) top.set_child(1 - direction, t2) top.balance = max(height(t1), height(t2)) + 1 return top
def use_isolated_book_view(self): """Pass through to provider CommentLookupSession.use_isolated_book_view""" self._book_view = ISOLATED # self._get_provider_session('comment_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_isolated_book_view() except AttributeError: pass
def function[use_isolated_book_view, parameter[self]]: constant[Pass through to provider CommentLookupSession.use_isolated_book_view] name[self]._book_view assign[=] name[ISOLATED] for taget[name[session]] in starred[call[name[self]._get_provider_sessions, parameter[]]] begin[:] <ast.Try object at 0x7da2041da7d0>
keyword[def] identifier[use_isolated_book_view] ( identifier[self] ): literal[string] identifier[self] . identifier[_book_view] = identifier[ISOLATED] keyword[for] identifier[session] keyword[in] identifier[self] . identifier[_get_provider_sessions] (): keyword[try] : identifier[session] . identifier[use_isolated_book_view] () keyword[except] identifier[AttributeError] : keyword[pass]
def use_isolated_book_view(self): """Pass through to provider CommentLookupSession.use_isolated_book_view""" self._book_view = ISOLATED # self._get_provider_session('comment_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_isolated_book_view() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['session']]
def sign_execute_cancellation(cancellation_params, key_pair): """ Function to sign the parameters required to execute a cancellation request on the Switcheo Exchange. Execution of this function is as follows:: sign_execute_cancellation(cancellation_params=signable_params, key_pair=key_pair) The expected return result for this function is as follows:: { 'signature': '6a40d6c011b7517f8fd3f2d0de32dd486adfd1d424d06d56c80eb....' } :param cancellation_params: Parameters the Switcheo Exchange returns from the create cancellation. :type cancellation_params: dict :param key_pair: The KeyPair for the wallet being used to sign deposit message. :type key_pair: KeyPair :return: Dictionary of signed message to send to the Switcheo API. """ signature = sign_transaction(transaction=cancellation_params['transaction'], private_key_hex=private_key_to_hex(key_pair=key_pair)) return {'signature': signature}
def function[sign_execute_cancellation, parameter[cancellation_params, key_pair]]: constant[ Function to sign the parameters required to execute a cancellation request on the Switcheo Exchange. Execution of this function is as follows:: sign_execute_cancellation(cancellation_params=signable_params, key_pair=key_pair) The expected return result for this function is as follows:: { 'signature': '6a40d6c011b7517f8fd3f2d0de32dd486adfd1d424d06d56c80eb....' } :param cancellation_params: Parameters the Switcheo Exchange returns from the create cancellation. :type cancellation_params: dict :param key_pair: The KeyPair for the wallet being used to sign deposit message. :type key_pair: KeyPair :return: Dictionary of signed message to send to the Switcheo API. ] variable[signature] assign[=] call[name[sign_transaction], parameter[]] return[dictionary[[<ast.Constant object at 0x7da20e9b38b0>], [<ast.Name object at 0x7da20e9b32e0>]]]
keyword[def] identifier[sign_execute_cancellation] ( identifier[cancellation_params] , identifier[key_pair] ): literal[string] identifier[signature] = identifier[sign_transaction] ( identifier[transaction] = identifier[cancellation_params] [ literal[string] ], identifier[private_key_hex] = identifier[private_key_to_hex] ( identifier[key_pair] = identifier[key_pair] )) keyword[return] { literal[string] : identifier[signature] }
def sign_execute_cancellation(cancellation_params, key_pair): """ Function to sign the parameters required to execute a cancellation request on the Switcheo Exchange. Execution of this function is as follows:: sign_execute_cancellation(cancellation_params=signable_params, key_pair=key_pair) The expected return result for this function is as follows:: { 'signature': '6a40d6c011b7517f8fd3f2d0de32dd486adfd1d424d06d56c80eb....' } :param cancellation_params: Parameters the Switcheo Exchange returns from the create cancellation. :type cancellation_params: dict :param key_pair: The KeyPair for the wallet being used to sign deposit message. :type key_pair: KeyPair :return: Dictionary of signed message to send to the Switcheo API. """ signature = sign_transaction(transaction=cancellation_params['transaction'], private_key_hex=private_key_to_hex(key_pair=key_pair)) return {'signature': signature}
def call_mr_transform(data, opt='', path='./', remove_files=True): # pragma: no cover r"""Call mr_transform This method calls the iSAP module mr_transform Parameters ---------- data : np.ndarray Input data, 2D array opt : list or str, optional Options to be passed to mr_transform path : str, optional Path for output files (default is './') remove_files : bool, optional Option to remove output files (default is 'True') Returns ------- np.ndarray results of mr_transform Raises ------ ValueError If the input data is not a 2D numpy array Examples -------- >>> from modopt.signal.wavelet import * >>> a = np.arange(9).reshape(3, 3).astype(float) >>> call_mr_transform(a) array([[[-1.5 , -1.125 , -0.75 ], [-0.375 , 0. , 0.375 ], [ 0.75 , 1.125 , 1.5 ]], [[-1.5625 , -1.171875 , -0.78125 ], [-0.390625 , 0. , 0.390625 ], [ 0.78125 , 1.171875 , 1.5625 ]], [[-0.5859375 , -0.43945312, -0.29296875], [-0.14648438, 0. , 0.14648438], [ 0.29296875, 0.43945312, 0.5859375 ]], [[ 3.6484375 , 3.73632812, 3.82421875], [ 3.91210938, 4. , 4.08789062], [ 4.17578125, 4.26367188, 4.3515625 ]]], dtype=float32) """ if not import_astropy: raise ImportError('Astropy package not found.') if (not isinstance(data, np.ndarray)) or (data.ndim != 2): raise ValueError('Input data must be a 2D numpy array.') executable = 'mr_transform' # Make sure mr_transform is installed. is_executable(executable) # Create a unique string using the current date and time. unique_string = datetime.now().strftime('%Y.%m.%d_%H.%M.%S') # Set the ouput file names. file_name = path + 'mr_temp_' + unique_string file_fits = file_name + '.fits' file_mr = file_name + '.mr' # Write the input data to a fits file. fits.writeto(file_fits, data) if isinstance(opt, str): opt = opt.split() # Call mr_transform. try: check_call([executable] + opt + [file_fits, file_mr]) except Exception: warn('{} failed to run with the options provided.'.format(executable)) remove(file_fits) else: # Retrieve wavelet transformed data. result = fits.getdata(file_mr) # Remove the temporary files. if remove_files: remove(file_fits) remove(file_mr) # Return the mr_transform results. return result
def function[call_mr_transform, parameter[data, opt, path, remove_files]]: constant[Call mr_transform This method calls the iSAP module mr_transform Parameters ---------- data : np.ndarray Input data, 2D array opt : list or str, optional Options to be passed to mr_transform path : str, optional Path for output files (default is './') remove_files : bool, optional Option to remove output files (default is 'True') Returns ------- np.ndarray results of mr_transform Raises ------ ValueError If the input data is not a 2D numpy array Examples -------- >>> from modopt.signal.wavelet import * >>> a = np.arange(9).reshape(3, 3).astype(float) >>> call_mr_transform(a) array([[[-1.5 , -1.125 , -0.75 ], [-0.375 , 0. , 0.375 ], [ 0.75 , 1.125 , 1.5 ]], [[-1.5625 , -1.171875 , -0.78125 ], [-0.390625 , 0. , 0.390625 ], [ 0.78125 , 1.171875 , 1.5625 ]], [[-0.5859375 , -0.43945312, -0.29296875], [-0.14648438, 0. , 0.14648438], [ 0.29296875, 0.43945312, 0.5859375 ]], [[ 3.6484375 , 3.73632812, 3.82421875], [ 3.91210938, 4. , 4.08789062], [ 4.17578125, 4.26367188, 4.3515625 ]]], dtype=float32) ] if <ast.UnaryOp object at 0x7da20e9567a0> begin[:] <ast.Raise object at 0x7da20e955ae0> if <ast.BoolOp object at 0x7da20e957220> begin[:] <ast.Raise object at 0x7da1b0e14460> variable[executable] assign[=] constant[mr_transform] call[name[is_executable], parameter[name[executable]]] variable[unique_string] assign[=] call[call[name[datetime].now, parameter[]].strftime, parameter[constant[%Y.%m.%d_%H.%M.%S]]] variable[file_name] assign[=] binary_operation[binary_operation[name[path] + constant[mr_temp_]] + name[unique_string]] variable[file_fits] assign[=] binary_operation[name[file_name] + constant[.fits]] variable[file_mr] assign[=] binary_operation[name[file_name] + constant[.mr]] call[name[fits].writeto, parameter[name[file_fits], name[data]]] if call[name[isinstance], parameter[name[opt], name[str]]] begin[:] variable[opt] assign[=] call[name[opt].split, parameter[]] <ast.Try object at 0x7da1b0dbd210>
keyword[def] identifier[call_mr_transform] ( identifier[data] , identifier[opt] = literal[string] , identifier[path] = literal[string] , identifier[remove_files] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[import_astropy] : keyword[raise] identifier[ImportError] ( literal[string] ) keyword[if] ( keyword[not] identifier[isinstance] ( identifier[data] , identifier[np] . identifier[ndarray] )) keyword[or] ( identifier[data] . identifier[ndim] != literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[executable] = literal[string] identifier[is_executable] ( identifier[executable] ) identifier[unique_string] = identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] ) identifier[file_name] = identifier[path] + literal[string] + identifier[unique_string] identifier[file_fits] = identifier[file_name] + literal[string] identifier[file_mr] = identifier[file_name] + literal[string] identifier[fits] . identifier[writeto] ( identifier[file_fits] , identifier[data] ) keyword[if] identifier[isinstance] ( identifier[opt] , identifier[str] ): identifier[opt] = identifier[opt] . identifier[split] () keyword[try] : identifier[check_call] ([ identifier[executable] ]+ identifier[opt] +[ identifier[file_fits] , identifier[file_mr] ]) keyword[except] identifier[Exception] : identifier[warn] ( literal[string] . identifier[format] ( identifier[executable] )) identifier[remove] ( identifier[file_fits] ) keyword[else] : identifier[result] = identifier[fits] . identifier[getdata] ( identifier[file_mr] ) keyword[if] identifier[remove_files] : identifier[remove] ( identifier[file_fits] ) identifier[remove] ( identifier[file_mr] ) keyword[return] identifier[result]
def call_mr_transform(data, opt='', path='./', remove_files=True): # pragma: no cover "Call mr_transform\n\n This method calls the iSAP module mr_transform\n\n Parameters\n ----------\n data : np.ndarray\n Input data, 2D array\n opt : list or str, optional\n Options to be passed to mr_transform\n path : str, optional\n Path for output files (default is './')\n remove_files : bool, optional\n Option to remove output files (default is 'True')\n\n Returns\n -------\n np.ndarray results of mr_transform\n\n Raises\n ------\n ValueError\n If the input data is not a 2D numpy array\n\n Examples\n --------\n >>> from modopt.signal.wavelet import *\n >>> a = np.arange(9).reshape(3, 3).astype(float)\n >>> call_mr_transform(a)\n array([[[-1.5 , -1.125 , -0.75 ],\n [-0.375 , 0. , 0.375 ],\n [ 0.75 , 1.125 , 1.5 ]],\n\n [[-1.5625 , -1.171875 , -0.78125 ],\n [-0.390625 , 0. , 0.390625 ],\n [ 0.78125 , 1.171875 , 1.5625 ]],\n\n [[-0.5859375 , -0.43945312, -0.29296875],\n [-0.14648438, 0. , 0.14648438],\n [ 0.29296875, 0.43945312, 0.5859375 ]],\n\n [[ 3.6484375 , 3.73632812, 3.82421875],\n [ 3.91210938, 4. , 4.08789062],\n [ 4.17578125, 4.26367188, 4.3515625 ]]], dtype=float32)\n\n " if not import_astropy: raise ImportError('Astropy package not found.') # depends on [control=['if'], data=[]] if not isinstance(data, np.ndarray) or data.ndim != 2: raise ValueError('Input data must be a 2D numpy array.') # depends on [control=['if'], data=[]] executable = 'mr_transform' # Make sure mr_transform is installed. is_executable(executable) # Create a unique string using the current date and time. unique_string = datetime.now().strftime('%Y.%m.%d_%H.%M.%S') # Set the ouput file names. file_name = path + 'mr_temp_' + unique_string file_fits = file_name + '.fits' file_mr = file_name + '.mr' # Write the input data to a fits file. fits.writeto(file_fits, data) if isinstance(opt, str): opt = opt.split() # depends on [control=['if'], data=[]] # Call mr_transform. try: check_call([executable] + opt + [file_fits, file_mr]) # depends on [control=['try'], data=[]] except Exception: warn('{} failed to run with the options provided.'.format(executable)) remove(file_fits) # depends on [control=['except'], data=[]] else: # Retrieve wavelet transformed data. result = fits.getdata(file_mr) # Remove the temporary files. if remove_files: remove(file_fits) remove(file_mr) # depends on [control=['if'], data=[]] # Return the mr_transform results. return result
def expect_prompt(self, consume=True): """ Monitors the data received from the remote host and waits for a prompt in the response. The prompt attempts to use a sane default that works with many devices running Unix, IOS, IOS-XR, or Junos and others. If that fails, a custom prompt may also be defined using the set_prompt() method. This method also stores the received data in the response attribute (self.response). :type consume: boolean (Default: True) :param consume: Whether to consume the prompt from the buffer or not. :rtype: int, re.MatchObject :return: The index of the prompt regular expression that matched, and the match object. """ if consume: result = self.expect(self.get_prompt()) else: self._dbg(1, "DO NOT CONSUME PROMPT!") result = self.waitfor(self.get_prompt()) # We skip the first line because it contains the echo of the command # sent. self._dbg(5, "Checking %s for errors" % repr(self.response)) for line in self.response.split('\n')[1:]: for prompt in self.get_error_prompt(): if not prompt.search(line): continue args = repr(prompt.pattern), repr(line) self._dbg(5, "error prompt (%s) matches %s" % args) raise InvalidCommandException('Device said:\n' + self.response) return result
def function[expect_prompt, parameter[self, consume]]: constant[ Monitors the data received from the remote host and waits for a prompt in the response. The prompt attempts to use a sane default that works with many devices running Unix, IOS, IOS-XR, or Junos and others. If that fails, a custom prompt may also be defined using the set_prompt() method. This method also stores the received data in the response attribute (self.response). :type consume: boolean (Default: True) :param consume: Whether to consume the prompt from the buffer or not. :rtype: int, re.MatchObject :return: The index of the prompt regular expression that matched, and the match object. ] if name[consume] begin[:] variable[result] assign[=] call[name[self].expect, parameter[call[name[self].get_prompt, parameter[]]]] call[name[self]._dbg, parameter[constant[5], binary_operation[constant[Checking %s for errors] <ast.Mod object at 0x7da2590d6920> call[name[repr], parameter[name[self].response]]]]] for taget[name[line]] in starred[call[call[name[self].response.split, parameter[constant[ ]]]][<ast.Slice object at 0x7da1b0544ca0>]] begin[:] for taget[name[prompt]] in starred[call[name[self].get_error_prompt, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da1b0547580> begin[:] continue variable[args] assign[=] tuple[[<ast.Call object at 0x7da1b0544d90>, <ast.Call object at 0x7da1b06a0bb0>]] call[name[self]._dbg, parameter[constant[5], binary_operation[constant[error prompt (%s) matches %s] <ast.Mod object at 0x7da2590d6920> name[args]]]] <ast.Raise object at 0x7da1b06a1cc0> return[name[result]]
keyword[def] identifier[expect_prompt] ( identifier[self] , identifier[consume] = keyword[True] ): literal[string] keyword[if] identifier[consume] : identifier[result] = identifier[self] . identifier[expect] ( identifier[self] . identifier[get_prompt] ()) keyword[else] : identifier[self] . identifier[_dbg] ( literal[int] , literal[string] ) identifier[result] = identifier[self] . identifier[waitfor] ( identifier[self] . identifier[get_prompt] ()) identifier[self] . identifier[_dbg] ( literal[int] , literal[string] % identifier[repr] ( identifier[self] . identifier[response] )) keyword[for] identifier[line] keyword[in] identifier[self] . identifier[response] . identifier[split] ( literal[string] )[ literal[int] :]: keyword[for] identifier[prompt] keyword[in] identifier[self] . identifier[get_error_prompt] (): keyword[if] keyword[not] identifier[prompt] . identifier[search] ( identifier[line] ): keyword[continue] identifier[args] = identifier[repr] ( identifier[prompt] . identifier[pattern] ), identifier[repr] ( identifier[line] ) identifier[self] . identifier[_dbg] ( literal[int] , literal[string] % identifier[args] ) keyword[raise] identifier[InvalidCommandException] ( literal[string] + identifier[self] . identifier[response] ) keyword[return] identifier[result]
def expect_prompt(self, consume=True): """ Monitors the data received from the remote host and waits for a prompt in the response. The prompt attempts to use a sane default that works with many devices running Unix, IOS, IOS-XR, or Junos and others. If that fails, a custom prompt may also be defined using the set_prompt() method. This method also stores the received data in the response attribute (self.response). :type consume: boolean (Default: True) :param consume: Whether to consume the prompt from the buffer or not. :rtype: int, re.MatchObject :return: The index of the prompt regular expression that matched, and the match object. """ if consume: result = self.expect(self.get_prompt()) # depends on [control=['if'], data=[]] else: self._dbg(1, 'DO NOT CONSUME PROMPT!') result = self.waitfor(self.get_prompt()) # We skip the first line because it contains the echo of the command # sent. self._dbg(5, 'Checking %s for errors' % repr(self.response)) for line in self.response.split('\n')[1:]: for prompt in self.get_error_prompt(): if not prompt.search(line): continue # depends on [control=['if'], data=[]] args = (repr(prompt.pattern), repr(line)) self._dbg(5, 'error prompt (%s) matches %s' % args) raise InvalidCommandException('Device said:\n' + self.response) # depends on [control=['for'], data=['prompt']] # depends on [control=['for'], data=['line']] return result
def get_distance(node1, node2): """ Reports the distance in the machine topology between two nodes. The factors are a multiple of 10. It returns 0 when the distance cannot be determined. A node has distance 10 to itself. Reporting the distance requires a Linux kernel version of 2.6.10 or newer. @param node1: node idx @type node1: C{int} @param node2: node idx @type node2: C{int} @rtype: C{int} """ if node1 < 0 or node1 > get_max_node(): raise ValueError(node1) if node2 < 0 or node2 > get_max_node(): raise ValueError(node2) return libnuma.numa_distance(node1, node2)
def function[get_distance, parameter[node1, node2]]: constant[ Reports the distance in the machine topology between two nodes. The factors are a multiple of 10. It returns 0 when the distance cannot be determined. A node has distance 10 to itself. Reporting the distance requires a Linux kernel version of 2.6.10 or newer. @param node1: node idx @type node1: C{int} @param node2: node idx @type node2: C{int} @rtype: C{int} ] if <ast.BoolOp object at 0x7da18f09ff70> begin[:] <ast.Raise object at 0x7da18f09ec80> if <ast.BoolOp object at 0x7da18f09dff0> begin[:] <ast.Raise object at 0x7da18f09e9e0> return[call[name[libnuma].numa_distance, parameter[name[node1], name[node2]]]]
keyword[def] identifier[get_distance] ( identifier[node1] , identifier[node2] ): literal[string] keyword[if] identifier[node1] < literal[int] keyword[or] identifier[node1] > identifier[get_max_node] (): keyword[raise] identifier[ValueError] ( identifier[node1] ) keyword[if] identifier[node2] < literal[int] keyword[or] identifier[node2] > identifier[get_max_node] (): keyword[raise] identifier[ValueError] ( identifier[node2] ) keyword[return] identifier[libnuma] . identifier[numa_distance] ( identifier[node1] , identifier[node2] )
def get_distance(node1, node2): """ Reports the distance in the machine topology between two nodes. The factors are a multiple of 10. It returns 0 when the distance cannot be determined. A node has distance 10 to itself. Reporting the distance requires a Linux kernel version of 2.6.10 or newer. @param node1: node idx @type node1: C{int} @param node2: node idx @type node2: C{int} @rtype: C{int} """ if node1 < 0 or node1 > get_max_node(): raise ValueError(node1) # depends on [control=['if'], data=[]] if node2 < 0 or node2 > get_max_node(): raise ValueError(node2) # depends on [control=['if'], data=[]] return libnuma.numa_distance(node1, node2)
def aaa_config_aaa_authentication_login_first(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa") aaa = ET.SubElement(aaa_config, "aaa") authentication = ET.SubElement(aaa, "authentication") login = ET.SubElement(authentication, "login") first = ET.SubElement(login, "first") first.text = kwargs.pop('first') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[aaa_config_aaa_authentication_login_first, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[aaa_config] assign[=] call[name[ET].SubElement, parameter[name[config], constant[aaa-config]]] variable[aaa] assign[=] call[name[ET].SubElement, parameter[name[aaa_config], constant[aaa]]] variable[authentication] assign[=] call[name[ET].SubElement, parameter[name[aaa], constant[authentication]]] variable[login] assign[=] call[name[ET].SubElement, parameter[name[authentication], constant[login]]] variable[first] assign[=] call[name[ET].SubElement, parameter[name[login], constant[first]]] name[first].text assign[=] call[name[kwargs].pop, parameter[constant[first]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[aaa_config_aaa_authentication_login_first] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[aaa_config] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[aaa] = identifier[ET] . identifier[SubElement] ( identifier[aaa_config] , literal[string] ) identifier[authentication] = identifier[ET] . identifier[SubElement] ( identifier[aaa] , literal[string] ) identifier[login] = identifier[ET] . identifier[SubElement] ( identifier[authentication] , literal[string] ) identifier[first] = identifier[ET] . identifier[SubElement] ( identifier[login] , literal[string] ) identifier[first] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def aaa_config_aaa_authentication_login_first(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') aaa_config = ET.SubElement(config, 'aaa-config', xmlns='urn:brocade.com:mgmt:brocade-aaa') aaa = ET.SubElement(aaa_config, 'aaa') authentication = ET.SubElement(aaa, 'authentication') login = ET.SubElement(authentication, 'login') first = ET.SubElement(login, 'first') first.text = kwargs.pop('first') callback = kwargs.pop('callback', self._callback) return callback(config)
def syscall_noreturn(self, func): ''' Call a syscall method. A syscall method is executed outside of any routines, directly in the scheduler loop, which gives it chances to directly operate the event loop. See :py:method::`vlcp.event.core.Scheduler.syscall`. ''' matcher = self.scheduler.syscall(func) while not matcher: yield matcher = self.scheduler.syscall(func) ev, _ = yield (matcher,) return ev
def function[syscall_noreturn, parameter[self, func]]: constant[ Call a syscall method. A syscall method is executed outside of any routines, directly in the scheduler loop, which gives it chances to directly operate the event loop. See :py:method::`vlcp.event.core.Scheduler.syscall`. ] variable[matcher] assign[=] call[name[self].scheduler.syscall, parameter[name[func]]] while <ast.UnaryOp object at 0x7da20c6aa5f0> begin[:] <ast.Yield object at 0x7da20c6a88b0> variable[matcher] assign[=] call[name[self].scheduler.syscall, parameter[name[func]]] <ast.Tuple object at 0x7da20c6a9a80> assign[=] <ast.Yield object at 0x7da20c6aa3b0> return[name[ev]]
keyword[def] identifier[syscall_noreturn] ( identifier[self] , identifier[func] ): literal[string] identifier[matcher] = identifier[self] . identifier[scheduler] . identifier[syscall] ( identifier[func] ) keyword[while] keyword[not] identifier[matcher] : keyword[yield] identifier[matcher] = identifier[self] . identifier[scheduler] . identifier[syscall] ( identifier[func] ) identifier[ev] , identifier[_] = keyword[yield] ( identifier[matcher] ,) keyword[return] identifier[ev]
def syscall_noreturn(self, func): """ Call a syscall method. A syscall method is executed outside of any routines, directly in the scheduler loop, which gives it chances to directly operate the event loop. See :py:method::`vlcp.event.core.Scheduler.syscall`. """ matcher = self.scheduler.syscall(func) while not matcher: yield matcher = self.scheduler.syscall(func) # depends on [control=['while'], data=[]] (ev, _) = (yield (matcher,)) return ev
def unregister_model(self, model): """ Unregisters the given model. """ if model not in self._model_registry: raise NotRegistered('The model %s is not registered' % model) del self._model_registry[model]
def function[unregister_model, parameter[self, model]]: constant[ Unregisters the given model. ] if compare[name[model] <ast.NotIn object at 0x7da2590d7190> name[self]._model_registry] begin[:] <ast.Raise object at 0x7da20e957550> <ast.Delete object at 0x7da20e955c60>
keyword[def] identifier[unregister_model] ( identifier[self] , identifier[model] ): literal[string] keyword[if] identifier[model] keyword[not] keyword[in] identifier[self] . identifier[_model_registry] : keyword[raise] identifier[NotRegistered] ( literal[string] % identifier[model] ) keyword[del] identifier[self] . identifier[_model_registry] [ identifier[model] ]
def unregister_model(self, model): """ Unregisters the given model. """ if model not in self._model_registry: raise NotRegistered('The model %s is not registered' % model) # depends on [control=['if'], data=['model']] del self._model_registry[model]
def get_month_start_end_day(): """ Get the month start date a nd end date """ t = date.today() n = mdays[t.month] return (date(t.year, t.month, 1), date(t.year, t.month, n))
def function[get_month_start_end_day, parameter[]]: constant[ Get the month start date a nd end date ] variable[t] assign[=] call[name[date].today, parameter[]] variable[n] assign[=] call[name[mdays]][name[t].month] return[tuple[[<ast.Call object at 0x7da1b0ab9180>, <ast.Call object at 0x7da1b0ab8f10>]]]
keyword[def] identifier[get_month_start_end_day] (): literal[string] identifier[t] = identifier[date] . identifier[today] () identifier[n] = identifier[mdays] [ identifier[t] . identifier[month] ] keyword[return] ( identifier[date] ( identifier[t] . identifier[year] , identifier[t] . identifier[month] , literal[int] ), identifier[date] ( identifier[t] . identifier[year] , identifier[t] . identifier[month] , identifier[n] ))
def get_month_start_end_day(): """ Get the month start date a nd end date """ t = date.today() n = mdays[t.month] return (date(t.year, t.month, 1), date(t.year, t.month, n))
def _instance_from_process(self, process): """Default converter from psutil.Process to process instance classes for subclassing.""" return ProcessManager(name=process.name(), pid=process.pid, process_name=process.name(), metadata_base_dir=self._metadata_base_dir)
def function[_instance_from_process, parameter[self, process]]: constant[Default converter from psutil.Process to process instance classes for subclassing.] return[call[name[ProcessManager], parameter[]]]
keyword[def] identifier[_instance_from_process] ( identifier[self] , identifier[process] ): literal[string] keyword[return] identifier[ProcessManager] ( identifier[name] = identifier[process] . identifier[name] (), identifier[pid] = identifier[process] . identifier[pid] , identifier[process_name] = identifier[process] . identifier[name] (), identifier[metadata_base_dir] = identifier[self] . identifier[_metadata_base_dir] )
def _instance_from_process(self, process): """Default converter from psutil.Process to process instance classes for subclassing.""" return ProcessManager(name=process.name(), pid=process.pid, process_name=process.name(), metadata_base_dir=self._metadata_base_dir)
def main(self): """ Generates an output string by replacing the keywords in the format string with the corresponding values from a submission dictionary. """ self.manage_submissions() out_string = self.options['format'] # Pop until we get something which len(title) <= max-chars length = float('inf') while length > self.options['max_chars']: self.selected_submission = self.submissions.pop() length = len(self.selected_submission['title']) for k, v in self.selected_submission.items(): out_string = out_string.replace(k, self.h.unescape(str(v))) return self.output(out_string, out_string)
def function[main, parameter[self]]: constant[ Generates an output string by replacing the keywords in the format string with the corresponding values from a submission dictionary. ] call[name[self].manage_submissions, parameter[]] variable[out_string] assign[=] call[name[self].options][constant[format]] variable[length] assign[=] call[name[float], parameter[constant[inf]]] while compare[name[length] greater[>] call[name[self].options][constant[max_chars]]] begin[:] name[self].selected_submission assign[=] call[name[self].submissions.pop, parameter[]] variable[length] assign[=] call[name[len], parameter[call[name[self].selected_submission][constant[title]]]] for taget[tuple[[<ast.Name object at 0x7da18f58dc00>, <ast.Name object at 0x7da18f58f640>]]] in starred[call[name[self].selected_submission.items, parameter[]]] begin[:] variable[out_string] assign[=] call[name[out_string].replace, parameter[name[k], call[name[self].h.unescape, parameter[call[name[str], parameter[name[v]]]]]]] return[call[name[self].output, parameter[name[out_string], name[out_string]]]]
keyword[def] identifier[main] ( identifier[self] ): literal[string] identifier[self] . identifier[manage_submissions] () identifier[out_string] = identifier[self] . identifier[options] [ literal[string] ] identifier[length] = identifier[float] ( literal[string] ) keyword[while] identifier[length] > identifier[self] . identifier[options] [ literal[string] ]: identifier[self] . identifier[selected_submission] = identifier[self] . identifier[submissions] . identifier[pop] () identifier[length] = identifier[len] ( identifier[self] . identifier[selected_submission] [ literal[string] ]) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[selected_submission] . identifier[items] (): identifier[out_string] = identifier[out_string] . identifier[replace] ( identifier[k] , identifier[self] . identifier[h] . identifier[unescape] ( identifier[str] ( identifier[v] ))) keyword[return] identifier[self] . identifier[output] ( identifier[out_string] , identifier[out_string] )
def main(self): """ Generates an output string by replacing the keywords in the format string with the corresponding values from a submission dictionary. """ self.manage_submissions() out_string = self.options['format'] # Pop until we get something which len(title) <= max-chars length = float('inf') while length > self.options['max_chars']: self.selected_submission = self.submissions.pop() length = len(self.selected_submission['title']) # depends on [control=['while'], data=['length']] for (k, v) in self.selected_submission.items(): out_string = out_string.replace(k, self.h.unescape(str(v))) # depends on [control=['for'], data=[]] return self.output(out_string, out_string)
def save_tabs_when_changed(func): """Decorator for save-tabs-when-changed """ def wrapper(*args, **kwargs): func(*args, **kwargs) log.debug("mom, I've been called: %s %s", func.__name__, func) # Find me the Guake! clsname = args[0].__class__.__name__ g = None if clsname == 'Guake': g = args[0] elif getattr(args[0], 'get_guake', None): g = args[0].get_guake() elif getattr(args[0], 'get_notebook', None): g = args[0].get_notebook().guake elif getattr(args[0], 'guake', None): g = args[0].guake elif getattr(args[0], 'notebook', None): g = args[0].notebook.guake # Tada! if g and g.settings.general.get_boolean('save-tabs-when-changed'): g.save_tabs() return wrapper
def function[save_tabs_when_changed, parameter[func]]: constant[Decorator for save-tabs-when-changed ] def function[wrapper, parameter[]]: call[name[func], parameter[<ast.Starred object at 0x7da20c76e590>]] call[name[log].debug, parameter[constant[mom, I've been called: %s %s], name[func].__name__, name[func]]] variable[clsname] assign[=] call[name[args]][constant[0]].__class__.__name__ variable[g] assign[=] constant[None] if compare[name[clsname] equal[==] constant[Guake]] begin[:] variable[g] assign[=] call[name[args]][constant[0]] if <ast.BoolOp object at 0x7da20c6aa170> begin[:] call[name[g].save_tabs, parameter[]] return[name[wrapper]]
keyword[def] identifier[save_tabs_when_changed] ( identifier[func] ): literal[string] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): identifier[func] (* identifier[args] ,** identifier[kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[func] . identifier[__name__] , identifier[func] ) identifier[clsname] = identifier[args] [ literal[int] ]. identifier[__class__] . identifier[__name__] identifier[g] = keyword[None] keyword[if] identifier[clsname] == literal[string] : identifier[g] = identifier[args] [ literal[int] ] keyword[elif] identifier[getattr] ( identifier[args] [ literal[int] ], literal[string] , keyword[None] ): identifier[g] = identifier[args] [ literal[int] ]. identifier[get_guake] () keyword[elif] identifier[getattr] ( identifier[args] [ literal[int] ], literal[string] , keyword[None] ): identifier[g] = identifier[args] [ literal[int] ]. identifier[get_notebook] (). identifier[guake] keyword[elif] identifier[getattr] ( identifier[args] [ literal[int] ], literal[string] , keyword[None] ): identifier[g] = identifier[args] [ literal[int] ]. identifier[guake] keyword[elif] identifier[getattr] ( identifier[args] [ literal[int] ], literal[string] , keyword[None] ): identifier[g] = identifier[args] [ literal[int] ]. identifier[notebook] . identifier[guake] keyword[if] identifier[g] keyword[and] identifier[g] . identifier[settings] . identifier[general] . identifier[get_boolean] ( literal[string] ): identifier[g] . identifier[save_tabs] () keyword[return] identifier[wrapper]
def save_tabs_when_changed(func): """Decorator for save-tabs-when-changed """ def wrapper(*args, **kwargs): func(*args, **kwargs) log.debug("mom, I've been called: %s %s", func.__name__, func) # Find me the Guake! clsname = args[0].__class__.__name__ g = None if clsname == 'Guake': g = args[0] # depends on [control=['if'], data=[]] elif getattr(args[0], 'get_guake', None): g = args[0].get_guake() # depends on [control=['if'], data=[]] elif getattr(args[0], 'get_notebook', None): g = args[0].get_notebook().guake # depends on [control=['if'], data=[]] elif getattr(args[0], 'guake', None): g = args[0].guake # depends on [control=['if'], data=[]] elif getattr(args[0], 'notebook', None): g = args[0].notebook.guake # depends on [control=['if'], data=[]] # Tada! if g and g.settings.general.get_boolean('save-tabs-when-changed'): g.save_tabs() # depends on [control=['if'], data=[]] return wrapper
def pulled_up(self, function): """ Returns a bool indicating whether a physical pull-up is attached to the pin supporting the specified *function*. Either :exc:`PinNoPins` or :exc:`PinMultiplePins` may be raised if the function is not associated with a single pin. :param str function: The pin function you wish to determine pull-up for. Usually this is something like "GPIO9" for Broadcom GPIO pin 9. """ try: header, number = self.physical_pin(function) except PinNoPins: return False else: return self.headers[header].pins[number].pull_up
def function[pulled_up, parameter[self, function]]: constant[ Returns a bool indicating whether a physical pull-up is attached to the pin supporting the specified *function*. Either :exc:`PinNoPins` or :exc:`PinMultiplePins` may be raised if the function is not associated with a single pin. :param str function: The pin function you wish to determine pull-up for. Usually this is something like "GPIO9" for Broadcom GPIO pin 9. ] <ast.Try object at 0x7da18f09cbe0>
keyword[def] identifier[pulled_up] ( identifier[self] , identifier[function] ): literal[string] keyword[try] : identifier[header] , identifier[number] = identifier[self] . identifier[physical_pin] ( identifier[function] ) keyword[except] identifier[PinNoPins] : keyword[return] keyword[False] keyword[else] : keyword[return] identifier[self] . identifier[headers] [ identifier[header] ]. identifier[pins] [ identifier[number] ]. identifier[pull_up]
def pulled_up(self, function): """ Returns a bool indicating whether a physical pull-up is attached to the pin supporting the specified *function*. Either :exc:`PinNoPins` or :exc:`PinMultiplePins` may be raised if the function is not associated with a single pin. :param str function: The pin function you wish to determine pull-up for. Usually this is something like "GPIO9" for Broadcom GPIO pin 9. """ try: (header, number) = self.physical_pin(function) # depends on [control=['try'], data=[]] except PinNoPins: return False # depends on [control=['except'], data=[]] else: return self.headers[header].pins[number].pull_up
def execute(self, input_data): ''' Execute the VTQuery worker ''' md5 = input_data['meta']['md5'] response = requests.get('http://www.virustotal.com/vtapi/v2/file/report', params={'apikey':self.apikey,'resource':md5, 'allinfo':1}) # Make sure we got a json blob back try: vt_output = response.json() except ValueError: return {'vt_error': 'VirusTotal Query Error, no valid response... past per min quota?'} # Just pull some of the fields output = {field:vt_output[field] for field in vt_output.keys() if field not in self.exclude} # Check for not-found not_found = False if output else True # Add in file_type output['file_type'] = input_data['meta']['file_type'] # Toss back a not found if not_found: output['not_found'] = True return output # Organize the scans fields scan_results = collections.Counter() for scan in vt_output['scans'].values(): if 'result' in scan: if scan['result']: scan_results[scan['result']] += 1 output['scan_results'] = scan_results.most_common(5) return output
def function[execute, parameter[self, input_data]]: constant[ Execute the VTQuery worker ] variable[md5] assign[=] call[call[name[input_data]][constant[meta]]][constant[md5]] variable[response] assign[=] call[name[requests].get, parameter[constant[http://www.virustotal.com/vtapi/v2/file/report]]] <ast.Try object at 0x7da18bccb910> variable[output] assign[=] <ast.DictComp object at 0x7da18bcc9ea0> variable[not_found] assign[=] <ast.IfExp object at 0x7da18bccb400> call[name[output]][constant[file_type]] assign[=] call[call[name[input_data]][constant[meta]]][constant[file_type]] if name[not_found] begin[:] call[name[output]][constant[not_found]] assign[=] constant[True] return[name[output]] variable[scan_results] assign[=] call[name[collections].Counter, parameter[]] for taget[name[scan]] in starred[call[call[name[vt_output]][constant[scans]].values, parameter[]]] begin[:] if compare[constant[result] in name[scan]] begin[:] if call[name[scan]][constant[result]] begin[:] <ast.AugAssign object at 0x7da18bccabf0> call[name[output]][constant[scan_results]] assign[=] call[name[scan_results].most_common, parameter[constant[5]]] return[name[output]]
keyword[def] identifier[execute] ( identifier[self] , identifier[input_data] ): literal[string] identifier[md5] = identifier[input_data] [ literal[string] ][ literal[string] ] identifier[response] = identifier[requests] . identifier[get] ( literal[string] , identifier[params] ={ literal[string] : identifier[self] . identifier[apikey] , literal[string] : identifier[md5] , literal[string] : literal[int] }) keyword[try] : identifier[vt_output] = identifier[response] . identifier[json] () keyword[except] identifier[ValueError] : keyword[return] { literal[string] : literal[string] } identifier[output] ={ identifier[field] : identifier[vt_output] [ identifier[field] ] keyword[for] identifier[field] keyword[in] identifier[vt_output] . identifier[keys] () keyword[if] identifier[field] keyword[not] keyword[in] identifier[self] . identifier[exclude] } identifier[not_found] = keyword[False] keyword[if] identifier[output] keyword[else] keyword[True] identifier[output] [ literal[string] ]= identifier[input_data] [ literal[string] ][ literal[string] ] keyword[if] identifier[not_found] : identifier[output] [ literal[string] ]= keyword[True] keyword[return] identifier[output] identifier[scan_results] = identifier[collections] . identifier[Counter] () keyword[for] identifier[scan] keyword[in] identifier[vt_output] [ literal[string] ]. identifier[values] (): keyword[if] literal[string] keyword[in] identifier[scan] : keyword[if] identifier[scan] [ literal[string] ]: identifier[scan_results] [ identifier[scan] [ literal[string] ]]+= literal[int] identifier[output] [ literal[string] ]= identifier[scan_results] . identifier[most_common] ( literal[int] ) keyword[return] identifier[output]
def execute(self, input_data): """ Execute the VTQuery worker """ md5 = input_data['meta']['md5'] response = requests.get('http://www.virustotal.com/vtapi/v2/file/report', params={'apikey': self.apikey, 'resource': md5, 'allinfo': 1}) # Make sure we got a json blob back try: vt_output = response.json() # depends on [control=['try'], data=[]] except ValueError: return {'vt_error': 'VirusTotal Query Error, no valid response... past per min quota?'} # depends on [control=['except'], data=[]] # Just pull some of the fields output = {field: vt_output[field] for field in vt_output.keys() if field not in self.exclude} # Check for not-found not_found = False if output else True # Add in file_type output['file_type'] = input_data['meta']['file_type'] # Toss back a not found if not_found: output['not_found'] = True return output # depends on [control=['if'], data=[]] # Organize the scans fields scan_results = collections.Counter() for scan in vt_output['scans'].values(): if 'result' in scan: if scan['result']: scan_results[scan['result']] += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['scan']] # depends on [control=['for'], data=['scan']] output['scan_results'] = scan_results.most_common(5) return output
def __get_total_response_time(meta_datas_expanded): """ caculate total response time of all meta_datas """ try: response_time = 0 for meta_data in meta_datas_expanded: response_time += meta_data["stat"]["response_time_ms"] return "{:.2f}".format(response_time) except TypeError: # failure exists return "N/A"
def function[__get_total_response_time, parameter[meta_datas_expanded]]: constant[ caculate total response time of all meta_datas ] <ast.Try object at 0x7da18ede4d60>
keyword[def] identifier[__get_total_response_time] ( identifier[meta_datas_expanded] ): literal[string] keyword[try] : identifier[response_time] = literal[int] keyword[for] identifier[meta_data] keyword[in] identifier[meta_datas_expanded] : identifier[response_time] += identifier[meta_data] [ literal[string] ][ literal[string] ] keyword[return] literal[string] . identifier[format] ( identifier[response_time] ) keyword[except] identifier[TypeError] : keyword[return] literal[string]
def __get_total_response_time(meta_datas_expanded): """ caculate total response time of all meta_datas """ try: response_time = 0 for meta_data in meta_datas_expanded: response_time += meta_data['stat']['response_time_ms'] # depends on [control=['for'], data=['meta_data']] return '{:.2f}'.format(response_time) # depends on [control=['try'], data=[]] except TypeError: # failure exists return 'N/A' # depends on [control=['except'], data=[]]
def unregister_presence_callback(self, type_, from_): """ Unregister a callback previously registered with :meth:`register_presence_callback`. :param type_: Presence type to listen for. :type type_: :class:`~.PresenceType` :param from_: Sender JID to listen for, or :data:`None` for a wildcard match. :type from_: :class:`~aioxmpp.JID` or :data:`None`. :raises KeyError: if no callback is currently registered for the given ``(type_, from_)`` pair :raises ValueError: if `type_` is not a valid :class:`~.PresenceType` (and cannot be cast to a :class:`~.PresenceType`) The match is made on the exact pair; it is not possible to unregister arbitrary listeners by passing :data:`None` to the `from_` arguments (i.e. the wildcarding only applies for receiving stanzas, not for unregistering callbacks; unregistering a wildcard match with `from_` set to :data:`None` is of course possible). .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.PresenceType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. .. deprecated:: 0.9 This method has been deprecated. It is recommended to use :class:`aioxmpp.PresenceClient` instead. """ type_ = self._coerce_enum(type_, structs.PresenceType) warnings.warn( "unregister_presence_callback is deprecated; use " "aioxmpp.dispatcher.SimplePresenceDispatcher or " "aioxmpp.PresenceClient instead", DeprecationWarning, stacklevel=2 ) self._xxx_presence_dispatcher.unregister_callback( type_, from_, )
def function[unregister_presence_callback, parameter[self, type_, from_]]: constant[ Unregister a callback previously registered with :meth:`register_presence_callback`. :param type_: Presence type to listen for. :type type_: :class:`~.PresenceType` :param from_: Sender JID to listen for, or :data:`None` for a wildcard match. :type from_: :class:`~aioxmpp.JID` or :data:`None`. :raises KeyError: if no callback is currently registered for the given ``(type_, from_)`` pair :raises ValueError: if `type_` is not a valid :class:`~.PresenceType` (and cannot be cast to a :class:`~.PresenceType`) The match is made on the exact pair; it is not possible to unregister arbitrary listeners by passing :data:`None` to the `from_` arguments (i.e. the wildcarding only applies for receiving stanzas, not for unregistering callbacks; unregistering a wildcard match with `from_` set to :data:`None` is of course possible). .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.PresenceType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. .. deprecated:: 0.9 This method has been deprecated. It is recommended to use :class:`aioxmpp.PresenceClient` instead. ] variable[type_] assign[=] call[name[self]._coerce_enum, parameter[name[type_], name[structs].PresenceType]] call[name[warnings].warn, parameter[constant[unregister_presence_callback is deprecated; use aioxmpp.dispatcher.SimplePresenceDispatcher or aioxmpp.PresenceClient instead], name[DeprecationWarning]]] call[name[self]._xxx_presence_dispatcher.unregister_callback, parameter[name[type_], name[from_]]]
keyword[def] identifier[unregister_presence_callback] ( identifier[self] , identifier[type_] , identifier[from_] ): literal[string] identifier[type_] = identifier[self] . identifier[_coerce_enum] ( identifier[type_] , identifier[structs] . identifier[PresenceType] ) identifier[warnings] . identifier[warn] ( literal[string] literal[string] literal[string] , identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) identifier[self] . identifier[_xxx_presence_dispatcher] . identifier[unregister_callback] ( identifier[type_] , identifier[from_] , )
def unregister_presence_callback(self, type_, from_): """ Unregister a callback previously registered with :meth:`register_presence_callback`. :param type_: Presence type to listen for. :type type_: :class:`~.PresenceType` :param from_: Sender JID to listen for, or :data:`None` for a wildcard match. :type from_: :class:`~aioxmpp.JID` or :data:`None`. :raises KeyError: if no callback is currently registered for the given ``(type_, from_)`` pair :raises ValueError: if `type_` is not a valid :class:`~.PresenceType` (and cannot be cast to a :class:`~.PresenceType`) The match is made on the exact pair; it is not possible to unregister arbitrary listeners by passing :data:`None` to the `from_` arguments (i.e. the wildcarding only applies for receiving stanzas, not for unregistering callbacks; unregistering a wildcard match with `from_` set to :data:`None` is of course possible). .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.PresenceType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. .. deprecated:: 0.9 This method has been deprecated. It is recommended to use :class:`aioxmpp.PresenceClient` instead. """ type_ = self._coerce_enum(type_, structs.PresenceType) warnings.warn('unregister_presence_callback is deprecated; use aioxmpp.dispatcher.SimplePresenceDispatcher or aioxmpp.PresenceClient instead', DeprecationWarning, stacklevel=2) self._xxx_presence_dispatcher.unregister_callback(type_, from_)
def _process_pubs(self, limit): """ Flybase publications. :param limit: :return: """ if self.test_mode: graph = self.testgraph else: graph = self.graph model = Model(graph) line_counter = 0 raw = '/'.join((self.rawdir, 'pub')) LOG.info("building labels for pubs") with open(raw, 'r') as f: f.readline() # read the header row; skip filereader = csv.reader(f, delimiter='\t', quotechar='\"') for line in filereader: (pub_id, title, volumetitle, volume, series_name, issue, pyear, pages, miniref, type_id, is_obsolete, publisher, pubplace, uniquename) = line # 2 12153979 1 2 FBst0000002 w[*]; betaTub60D[2] Kr[If-1]/CyO 10670 # if self.test_mode is True: # if int(object_key) not in self.test_keys.get('genotype'): # continue pub_num = pub_id pub_id = 'FlyBase:'+uniquename.strip() self.idhash['publication'][pub_num] = pub_id # TODO figure out the type of pub by type_id if not re.match(r'(FBrf|multi)', uniquename): continue line_counter += 1 reference = Reference(graph, pub_id) if title != '': reference.setTitle(title) if pyear != '': reference.setYear(str(pyear)) if miniref != '': reference.setShortCitation(miniref) if not self.test_mode and limit is not None and line_counter > limit: pass else: if self.test_mode and int(pub_num) not in self.test_keys['pub']: continue if is_obsolete == 't': model.addDeprecatedIndividual(pub_id) else: reference.addRefToGraph() return
def function[_process_pubs, parameter[self, limit]]: constant[ Flybase publications. :param limit: :return: ] if name[self].test_mode begin[:] variable[graph] assign[=] name[self].testgraph variable[model] assign[=] call[name[Model], parameter[name[graph]]] variable[line_counter] assign[=] constant[0] variable[raw] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da2041d8280>, <ast.Constant object at 0x7da2041d9090>]]]] call[name[LOG].info, parameter[constant[building labels for pubs]]] with call[name[open], parameter[name[raw], constant[r]]] begin[:] call[name[f].readline, parameter[]] variable[filereader] assign[=] call[name[csv].reader, parameter[name[f]]] for taget[name[line]] in starred[name[filereader]] begin[:] <ast.Tuple object at 0x7da2041dbdf0> assign[=] name[line] variable[pub_num] assign[=] name[pub_id] variable[pub_id] assign[=] binary_operation[constant[FlyBase:] + call[name[uniquename].strip, parameter[]]] call[call[name[self].idhash][constant[publication]]][name[pub_num]] assign[=] name[pub_id] if <ast.UnaryOp object at 0x7da2041da3b0> begin[:] continue <ast.AugAssign object at 0x7da2041d8b50> variable[reference] assign[=] call[name[Reference], parameter[name[graph], name[pub_id]]] if compare[name[title] not_equal[!=] constant[]] begin[:] call[name[reference].setTitle, parameter[name[title]]] if compare[name[pyear] not_equal[!=] constant[]] begin[:] call[name[reference].setYear, parameter[call[name[str], parameter[name[pyear]]]]] if compare[name[miniref] not_equal[!=] constant[]] begin[:] call[name[reference].setShortCitation, parameter[name[miniref]]] if <ast.BoolOp object at 0x7da2041db310> begin[:] pass return[None]
keyword[def] identifier[_process_pubs] ( identifier[self] , identifier[limit] ): literal[string] keyword[if] identifier[self] . identifier[test_mode] : identifier[graph] = identifier[self] . identifier[testgraph] keyword[else] : identifier[graph] = identifier[self] . identifier[graph] identifier[model] = identifier[Model] ( identifier[graph] ) identifier[line_counter] = literal[int] identifier[raw] = literal[string] . identifier[join] (( identifier[self] . identifier[rawdir] , literal[string] )) identifier[LOG] . identifier[info] ( literal[string] ) keyword[with] identifier[open] ( identifier[raw] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[readline] () identifier[filereader] = identifier[csv] . identifier[reader] ( identifier[f] , identifier[delimiter] = literal[string] , identifier[quotechar] = literal[string] ) keyword[for] identifier[line] keyword[in] identifier[filereader] : ( identifier[pub_id] , identifier[title] , identifier[volumetitle] , identifier[volume] , identifier[series_name] , identifier[issue] , identifier[pyear] , identifier[pages] , identifier[miniref] , identifier[type_id] , identifier[is_obsolete] , identifier[publisher] , identifier[pubplace] , identifier[uniquename] )= identifier[line] identifier[pub_num] = identifier[pub_id] identifier[pub_id] = literal[string] + identifier[uniquename] . identifier[strip] () identifier[self] . identifier[idhash] [ literal[string] ][ identifier[pub_num] ]= identifier[pub_id] keyword[if] keyword[not] identifier[re] . identifier[match] ( literal[string] , identifier[uniquename] ): keyword[continue] identifier[line_counter] += literal[int] identifier[reference] = identifier[Reference] ( identifier[graph] , identifier[pub_id] ) keyword[if] identifier[title] != literal[string] : identifier[reference] . identifier[setTitle] ( identifier[title] ) keyword[if] identifier[pyear] != literal[string] : identifier[reference] . identifier[setYear] ( identifier[str] ( identifier[pyear] )) keyword[if] identifier[miniref] != literal[string] : identifier[reference] . identifier[setShortCitation] ( identifier[miniref] ) keyword[if] keyword[not] identifier[self] . identifier[test_mode] keyword[and] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[line_counter] > identifier[limit] : keyword[pass] keyword[else] : keyword[if] identifier[self] . identifier[test_mode] keyword[and] identifier[int] ( identifier[pub_num] ) keyword[not] keyword[in] identifier[self] . identifier[test_keys] [ literal[string] ]: keyword[continue] keyword[if] identifier[is_obsolete] == literal[string] : identifier[model] . identifier[addDeprecatedIndividual] ( identifier[pub_id] ) keyword[else] : identifier[reference] . identifier[addRefToGraph] () keyword[return]
def _process_pubs(self, limit): """ Flybase publications. :param limit: :return: """ if self.test_mode: graph = self.testgraph # depends on [control=['if'], data=[]] else: graph = self.graph model = Model(graph) line_counter = 0 raw = '/'.join((self.rawdir, 'pub')) LOG.info('building labels for pubs') with open(raw, 'r') as f: f.readline() # read the header row; skip filereader = csv.reader(f, delimiter='\t', quotechar='"') for line in filereader: (pub_id, title, volumetitle, volume, series_name, issue, pyear, pages, miniref, type_id, is_obsolete, publisher, pubplace, uniquename) = line # 2 12153979 1 2 FBst0000002 w[*]; betaTub60D[2] Kr[If-1]/CyO 10670 # if self.test_mode is True: # if int(object_key) not in self.test_keys.get('genotype'): # continue pub_num = pub_id pub_id = 'FlyBase:' + uniquename.strip() self.idhash['publication'][pub_num] = pub_id # TODO figure out the type of pub by type_id if not re.match('(FBrf|multi)', uniquename): continue # depends on [control=['if'], data=[]] line_counter += 1 reference = Reference(graph, pub_id) if title != '': reference.setTitle(title) # depends on [control=['if'], data=['title']] if pyear != '': reference.setYear(str(pyear)) # depends on [control=['if'], data=['pyear']] if miniref != '': reference.setShortCitation(miniref) # depends on [control=['if'], data=['miniref']] if not self.test_mode and limit is not None and (line_counter > limit): pass # depends on [control=['if'], data=[]] else: if self.test_mode and int(pub_num) not in self.test_keys['pub']: continue # depends on [control=['if'], data=[]] if is_obsolete == 't': model.addDeprecatedIndividual(pub_id) # depends on [control=['if'], data=[]] else: reference.addRefToGraph() # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']] return
def send_invoice(token, chat_id, title, description, invoice_payload, provider_token, currency, prices, start_parameter, photo_url=None, photo_size=None, photo_width=None, photo_height=None, need_name=None, need_phone_number=None, need_email=None, need_shipping_address=None, is_flexible=None, disable_notification=None, reply_to_message_id=None, reply_markup=None, provider_data=None): """ Use this method to send invoices. On success, the sent Message is returned. :param token: Bot's token (you don't need to fill this) :param chat_id: Unique identifier for the target private chat :param title: Product name :param description: Product description :param invoice_payload: Bot-defined invoice payload, 1-128 bytes. This will not be displayed to the user, use for your internal processes. :param provider_token: Payments provider token, obtained via @Botfather :param currency: Three-letter ISO 4217 currency code, see https://core.telegram.org/bots/payments#supported-currencies :param prices: Price breakdown, a list of components (e.g. product price, tax, discount, delivery cost, delivery tax, bonus, etc.) :param start_parameter: Unique deep-linking parameter that can be used to generate this invoice when used as a start parameter :param photo_url: URL of the product photo for the invoice. Can be a photo of the goods or a marketing image for a service. People like it better when they see what they are paying for. :param photo_size: Photo size :param photo_width: Photo width :param photo_height: Photo height :param need_name: Pass True, if you require the user's full name to complete the order :param need_phone_number: Pass True, if you require the user's phone number to complete the order :param need_email: Pass True, if you require the user's email to complete the order :param need_shipping_address: Pass True, if you require the user's shipping address to complete the order :param is_flexible: Pass True, if the final price depends on the shipping method :param disable_notification: Sends the message silently. Users will receive a notification with no sound. :param reply_to_message_id: If the message is a reply, ID of the original message :param reply_markup: A JSON-serialized object for an inline keyboard. If empty, one 'Pay total price' button will be shown. If not empty, the first button must be a Pay button :param provider_data: :return: """ method_url = r'sendInvoice' payload = {'chat_id': chat_id, 'title': title, 'description': description, 'payload': invoice_payload, 'provider_token': provider_token, 'start_parameter': start_parameter, 'currency': currency, 'prices': _convert_list_json_serializable(prices)} if photo_url: payload['photo_url'] = photo_url if photo_size: payload['photo_size'] = photo_size if photo_width: payload['photo_width'] = photo_width if photo_height: payload['photo_height'] = photo_height if need_name: payload['need_name'] = need_name if need_phone_number: payload['need_phone_number'] = need_phone_number if need_email: payload['need_email'] = need_email if need_shipping_address: payload['need_shipping_address'] = need_shipping_address if is_flexible: payload['is_flexible'] = is_flexible if disable_notification: payload['disable_notification'] = disable_notification if reply_to_message_id: payload['reply_to_message_id'] = reply_to_message_id if reply_markup: payload['reply_markup'] = _convert_markup(reply_markup) if provider_data: payload['provider_data'] = provider_data return _make_request(token, method_url, params=payload)
def function[send_invoice, parameter[token, chat_id, title, description, invoice_payload, provider_token, currency, prices, start_parameter, photo_url, photo_size, photo_width, photo_height, need_name, need_phone_number, need_email, need_shipping_address, is_flexible, disable_notification, reply_to_message_id, reply_markup, provider_data]]: constant[ Use this method to send invoices. On success, the sent Message is returned. :param token: Bot's token (you don't need to fill this) :param chat_id: Unique identifier for the target private chat :param title: Product name :param description: Product description :param invoice_payload: Bot-defined invoice payload, 1-128 bytes. This will not be displayed to the user, use for your internal processes. :param provider_token: Payments provider token, obtained via @Botfather :param currency: Three-letter ISO 4217 currency code, see https://core.telegram.org/bots/payments#supported-currencies :param prices: Price breakdown, a list of components (e.g. product price, tax, discount, delivery cost, delivery tax, bonus, etc.) :param start_parameter: Unique deep-linking parameter that can be used to generate this invoice when used as a start parameter :param photo_url: URL of the product photo for the invoice. Can be a photo of the goods or a marketing image for a service. People like it better when they see what they are paying for. :param photo_size: Photo size :param photo_width: Photo width :param photo_height: Photo height :param need_name: Pass True, if you require the user's full name to complete the order :param need_phone_number: Pass True, if you require the user's phone number to complete the order :param need_email: Pass True, if you require the user's email to complete the order :param need_shipping_address: Pass True, if you require the user's shipping address to complete the order :param is_flexible: Pass True, if the final price depends on the shipping method :param disable_notification: Sends the message silently. Users will receive a notification with no sound. :param reply_to_message_id: If the message is a reply, ID of the original message :param reply_markup: A JSON-serialized object for an inline keyboard. If empty, one 'Pay total price' button will be shown. If not empty, the first button must be a Pay button :param provider_data: :return: ] variable[method_url] assign[=] constant[sendInvoice] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c359c0>, <ast.Constant object at 0x7da1b1c358d0>, <ast.Constant object at 0x7da1b1c35720>, <ast.Constant object at 0x7da1b1c34f40>, <ast.Constant object at 0x7da1b1c352a0>, <ast.Constant object at 0x7da1b1c351e0>, <ast.Constant object at 0x7da1b1c364a0>, <ast.Constant object at 0x7da1b1c36980>], [<ast.Name object at 0x7da1b1c362f0>, <ast.Name object at 0x7da1b1c35f90>, <ast.Name object at 0x7da1b1c35ff0>, <ast.Name object at 0x7da1b1c36440>, <ast.Name object at 0x7da1b1c36ec0>, <ast.Name object at 0x7da1b1c364d0>, <ast.Name object at 0x7da1b1c34dc0>, <ast.Call object at 0x7da1b1c36d40>]] if name[photo_url] begin[:] call[name[payload]][constant[photo_url]] assign[=] name[photo_url] if name[photo_size] begin[:] call[name[payload]][constant[photo_size]] assign[=] name[photo_size] if name[photo_width] begin[:] call[name[payload]][constant[photo_width]] assign[=] name[photo_width] if name[photo_height] begin[:] call[name[payload]][constant[photo_height]] assign[=] name[photo_height] if name[need_name] begin[:] call[name[payload]][constant[need_name]] assign[=] name[need_name] if name[need_phone_number] begin[:] call[name[payload]][constant[need_phone_number]] assign[=] name[need_phone_number] if name[need_email] begin[:] call[name[payload]][constant[need_email]] assign[=] name[need_email] if name[need_shipping_address] begin[:] call[name[payload]][constant[need_shipping_address]] assign[=] name[need_shipping_address] if name[is_flexible] begin[:] call[name[payload]][constant[is_flexible]] assign[=] name[is_flexible] if name[disable_notification] begin[:] call[name[payload]][constant[disable_notification]] assign[=] name[disable_notification] if name[reply_to_message_id] begin[:] call[name[payload]][constant[reply_to_message_id]] assign[=] name[reply_to_message_id] if name[reply_markup] begin[:] call[name[payload]][constant[reply_markup]] assign[=] call[name[_convert_markup], parameter[name[reply_markup]]] if name[provider_data] begin[:] call[name[payload]][constant[provider_data]] assign[=] name[provider_data] return[call[name[_make_request], parameter[name[token], name[method_url]]]]
keyword[def] identifier[send_invoice] ( identifier[token] , identifier[chat_id] , identifier[title] , identifier[description] , identifier[invoice_payload] , identifier[provider_token] , identifier[currency] , identifier[prices] , identifier[start_parameter] , identifier[photo_url] = keyword[None] , identifier[photo_size] = keyword[None] , identifier[photo_width] = keyword[None] , identifier[photo_height] = keyword[None] , identifier[need_name] = keyword[None] , identifier[need_phone_number] = keyword[None] , identifier[need_email] = keyword[None] , identifier[need_shipping_address] = keyword[None] , identifier[is_flexible] = keyword[None] , identifier[disable_notification] = keyword[None] , identifier[reply_to_message_id] = keyword[None] , identifier[reply_markup] = keyword[None] , identifier[provider_data] = keyword[None] ): literal[string] identifier[method_url] = literal[string] identifier[payload] ={ literal[string] : identifier[chat_id] , literal[string] : identifier[title] , literal[string] : identifier[description] , literal[string] : identifier[invoice_payload] , literal[string] : identifier[provider_token] , literal[string] : identifier[start_parameter] , literal[string] : identifier[currency] , literal[string] : identifier[_convert_list_json_serializable] ( identifier[prices] )} keyword[if] identifier[photo_url] : identifier[payload] [ literal[string] ]= identifier[photo_url] keyword[if] identifier[photo_size] : identifier[payload] [ literal[string] ]= identifier[photo_size] keyword[if] identifier[photo_width] : identifier[payload] [ literal[string] ]= identifier[photo_width] keyword[if] identifier[photo_height] : identifier[payload] [ literal[string] ]= identifier[photo_height] keyword[if] identifier[need_name] : identifier[payload] [ literal[string] ]= identifier[need_name] keyword[if] identifier[need_phone_number] : identifier[payload] [ literal[string] ]= identifier[need_phone_number] keyword[if] identifier[need_email] : identifier[payload] [ literal[string] ]= identifier[need_email] keyword[if] identifier[need_shipping_address] : identifier[payload] [ literal[string] ]= identifier[need_shipping_address] keyword[if] identifier[is_flexible] : identifier[payload] [ literal[string] ]= identifier[is_flexible] keyword[if] identifier[disable_notification] : identifier[payload] [ literal[string] ]= identifier[disable_notification] keyword[if] identifier[reply_to_message_id] : identifier[payload] [ literal[string] ]= identifier[reply_to_message_id] keyword[if] identifier[reply_markup] : identifier[payload] [ literal[string] ]= identifier[_convert_markup] ( identifier[reply_markup] ) keyword[if] identifier[provider_data] : identifier[payload] [ literal[string] ]= identifier[provider_data] keyword[return] identifier[_make_request] ( identifier[token] , identifier[method_url] , identifier[params] = identifier[payload] )
def send_invoice(token, chat_id, title, description, invoice_payload, provider_token, currency, prices, start_parameter, photo_url=None, photo_size=None, photo_width=None, photo_height=None, need_name=None, need_phone_number=None, need_email=None, need_shipping_address=None, is_flexible=None, disable_notification=None, reply_to_message_id=None, reply_markup=None, provider_data=None): """ Use this method to send invoices. On success, the sent Message is returned. :param token: Bot's token (you don't need to fill this) :param chat_id: Unique identifier for the target private chat :param title: Product name :param description: Product description :param invoice_payload: Bot-defined invoice payload, 1-128 bytes. This will not be displayed to the user, use for your internal processes. :param provider_token: Payments provider token, obtained via @Botfather :param currency: Three-letter ISO 4217 currency code, see https://core.telegram.org/bots/payments#supported-currencies :param prices: Price breakdown, a list of components (e.g. product price, tax, discount, delivery cost, delivery tax, bonus, etc.) :param start_parameter: Unique deep-linking parameter that can be used to generate this invoice when used as a start parameter :param photo_url: URL of the product photo for the invoice. Can be a photo of the goods or a marketing image for a service. People like it better when they see what they are paying for. :param photo_size: Photo size :param photo_width: Photo width :param photo_height: Photo height :param need_name: Pass True, if you require the user's full name to complete the order :param need_phone_number: Pass True, if you require the user's phone number to complete the order :param need_email: Pass True, if you require the user's email to complete the order :param need_shipping_address: Pass True, if you require the user's shipping address to complete the order :param is_flexible: Pass True, if the final price depends on the shipping method :param disable_notification: Sends the message silently. Users will receive a notification with no sound. :param reply_to_message_id: If the message is a reply, ID of the original message :param reply_markup: A JSON-serialized object for an inline keyboard. If empty, one 'Pay total price' button will be shown. If not empty, the first button must be a Pay button :param provider_data: :return: """ method_url = 'sendInvoice' payload = {'chat_id': chat_id, 'title': title, 'description': description, 'payload': invoice_payload, 'provider_token': provider_token, 'start_parameter': start_parameter, 'currency': currency, 'prices': _convert_list_json_serializable(prices)} if photo_url: payload['photo_url'] = photo_url # depends on [control=['if'], data=[]] if photo_size: payload['photo_size'] = photo_size # depends on [control=['if'], data=[]] if photo_width: payload['photo_width'] = photo_width # depends on [control=['if'], data=[]] if photo_height: payload['photo_height'] = photo_height # depends on [control=['if'], data=[]] if need_name: payload['need_name'] = need_name # depends on [control=['if'], data=[]] if need_phone_number: payload['need_phone_number'] = need_phone_number # depends on [control=['if'], data=[]] if need_email: payload['need_email'] = need_email # depends on [control=['if'], data=[]] if need_shipping_address: payload['need_shipping_address'] = need_shipping_address # depends on [control=['if'], data=[]] if is_flexible: payload['is_flexible'] = is_flexible # depends on [control=['if'], data=[]] if disable_notification: payload['disable_notification'] = disable_notification # depends on [control=['if'], data=[]] if reply_to_message_id: payload['reply_to_message_id'] = reply_to_message_id # depends on [control=['if'], data=[]] if reply_markup: payload['reply_markup'] = _convert_markup(reply_markup) # depends on [control=['if'], data=[]] if provider_data: payload['provider_data'] = provider_data # depends on [control=['if'], data=[]] return _make_request(token, method_url, params=payload)
def order_derived_variables(regime): """ Finds ordering of derived_variables. @param regime: Dynamics Regime containing derived variables. @type regime: lems.model.dynamics.regime @return: Returns ordered list of derived variables. @rtype: list(string) @raise SimBuildError: Raised when a proper ordering of derived variables could not be found. """ ordering = [] dvs = [] dvsnoexp = [] maxcount = 5 for dv in regime.derived_variables: if dv.expression_tree == None: dvsnoexp.append(dv.name) else: dvs.append(dv.name) for dv in regime.conditional_derived_variables: if len(dv.cases) == 0: dvsnoexp.append(dv.name) else: dvs.append(dv.name) count = maxcount while count > 0 and dvs != []: count = count - 1 for dv1 in dvs: if dv1 in regime.derived_variables: dv = regime.derived_variables[dv1] else: dv = regime.conditional_derived_variables[dv1] found = False if isinstance(dv, DerivedVariable): exp_tree = dv.expression_tree for dv2 in dvs: if dv1 != dv2 and is_var_in_exp_tree(dv2, exp_tree): found = True else: for case in dv.cases: for dv2 in dvs: if dv1 != dv2 and (is_var_in_exp_tree(dv2, case.condition_expression_tree) or is_var_in_exp_tree(dv2, case.value_expression_tree)): found = True if not found: ordering.append(dv1) del dvs[dvs.index(dv1)] count = maxcount break if count == 0: raise SimBuildError(("Unable to find ordering for derived " "variables in regime '{0}'").format(regime.name)) #return ordering + dvsnoexp return dvsnoexp + ordering
def function[order_derived_variables, parameter[regime]]: constant[ Finds ordering of derived_variables. @param regime: Dynamics Regime containing derived variables. @type regime: lems.model.dynamics.regime @return: Returns ordered list of derived variables. @rtype: list(string) @raise SimBuildError: Raised when a proper ordering of derived variables could not be found. ] variable[ordering] assign[=] list[[]] variable[dvs] assign[=] list[[]] variable[dvsnoexp] assign[=] list[[]] variable[maxcount] assign[=] constant[5] for taget[name[dv]] in starred[name[regime].derived_variables] begin[:] if compare[name[dv].expression_tree equal[==] constant[None]] begin[:] call[name[dvsnoexp].append, parameter[name[dv].name]] for taget[name[dv]] in starred[name[regime].conditional_derived_variables] begin[:] if compare[call[name[len], parameter[name[dv].cases]] equal[==] constant[0]] begin[:] call[name[dvsnoexp].append, parameter[name[dv].name]] variable[count] assign[=] name[maxcount] while <ast.BoolOp object at 0x7da1b253ab00> begin[:] variable[count] assign[=] binary_operation[name[count] - constant[1]] for taget[name[dv1]] in starred[name[dvs]] begin[:] if compare[name[dv1] in name[regime].derived_variables] begin[:] variable[dv] assign[=] call[name[regime].derived_variables][name[dv1]] variable[found] assign[=] constant[False] if call[name[isinstance], parameter[name[dv], name[DerivedVariable]]] begin[:] variable[exp_tree] assign[=] name[dv].expression_tree for taget[name[dv2]] in starred[name[dvs]] begin[:] if <ast.BoolOp object at 0x7da1b2491900> begin[:] variable[found] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b24e2e00> begin[:] call[name[ordering].append, parameter[name[dv1]]] <ast.Delete object at 0x7da1b24e28f0> variable[count] assign[=] name[maxcount] break if compare[name[count] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b24e25c0> return[binary_operation[name[dvsnoexp] + name[ordering]]]
keyword[def] identifier[order_derived_variables] ( identifier[regime] ): literal[string] identifier[ordering] =[] identifier[dvs] =[] identifier[dvsnoexp] =[] identifier[maxcount] = literal[int] keyword[for] identifier[dv] keyword[in] identifier[regime] . identifier[derived_variables] : keyword[if] identifier[dv] . identifier[expression_tree] == keyword[None] : identifier[dvsnoexp] . identifier[append] ( identifier[dv] . identifier[name] ) keyword[else] : identifier[dvs] . identifier[append] ( identifier[dv] . identifier[name] ) keyword[for] identifier[dv] keyword[in] identifier[regime] . identifier[conditional_derived_variables] : keyword[if] identifier[len] ( identifier[dv] . identifier[cases] )== literal[int] : identifier[dvsnoexp] . identifier[append] ( identifier[dv] . identifier[name] ) keyword[else] : identifier[dvs] . identifier[append] ( identifier[dv] . identifier[name] ) identifier[count] = identifier[maxcount] keyword[while] identifier[count] > literal[int] keyword[and] identifier[dvs] !=[]: identifier[count] = identifier[count] - literal[int] keyword[for] identifier[dv1] keyword[in] identifier[dvs] : keyword[if] identifier[dv1] keyword[in] identifier[regime] . identifier[derived_variables] : identifier[dv] = identifier[regime] . identifier[derived_variables] [ identifier[dv1] ] keyword[else] : identifier[dv] = identifier[regime] . identifier[conditional_derived_variables] [ identifier[dv1] ] identifier[found] = keyword[False] keyword[if] identifier[isinstance] ( identifier[dv] , identifier[DerivedVariable] ): identifier[exp_tree] = identifier[dv] . identifier[expression_tree] keyword[for] identifier[dv2] keyword[in] identifier[dvs] : keyword[if] identifier[dv1] != identifier[dv2] keyword[and] identifier[is_var_in_exp_tree] ( identifier[dv2] , identifier[exp_tree] ): identifier[found] = keyword[True] keyword[else] : keyword[for] identifier[case] keyword[in] identifier[dv] . identifier[cases] : keyword[for] identifier[dv2] keyword[in] identifier[dvs] : keyword[if] identifier[dv1] != identifier[dv2] keyword[and] ( identifier[is_var_in_exp_tree] ( identifier[dv2] , identifier[case] . identifier[condition_expression_tree] ) keyword[or] identifier[is_var_in_exp_tree] ( identifier[dv2] , identifier[case] . identifier[value_expression_tree] )): identifier[found] = keyword[True] keyword[if] keyword[not] identifier[found] : identifier[ordering] . identifier[append] ( identifier[dv1] ) keyword[del] identifier[dvs] [ identifier[dvs] . identifier[index] ( identifier[dv1] )] identifier[count] = identifier[maxcount] keyword[break] keyword[if] identifier[count] == literal[int] : keyword[raise] identifier[SimBuildError] (( literal[string] literal[string] ). identifier[format] ( identifier[regime] . identifier[name] )) keyword[return] identifier[dvsnoexp] + identifier[ordering]
def order_derived_variables(regime): """ Finds ordering of derived_variables. @param regime: Dynamics Regime containing derived variables. @type regime: lems.model.dynamics.regime @return: Returns ordered list of derived variables. @rtype: list(string) @raise SimBuildError: Raised when a proper ordering of derived variables could not be found. """ ordering = [] dvs = [] dvsnoexp = [] maxcount = 5 for dv in regime.derived_variables: if dv.expression_tree == None: dvsnoexp.append(dv.name) # depends on [control=['if'], data=[]] else: dvs.append(dv.name) # depends on [control=['for'], data=['dv']] for dv in regime.conditional_derived_variables: if len(dv.cases) == 0: dvsnoexp.append(dv.name) # depends on [control=['if'], data=[]] else: dvs.append(dv.name) # depends on [control=['for'], data=['dv']] count = maxcount while count > 0 and dvs != []: count = count - 1 for dv1 in dvs: if dv1 in regime.derived_variables: dv = regime.derived_variables[dv1] # depends on [control=['if'], data=['dv1']] else: dv = regime.conditional_derived_variables[dv1] found = False if isinstance(dv, DerivedVariable): exp_tree = dv.expression_tree for dv2 in dvs: if dv1 != dv2 and is_var_in_exp_tree(dv2, exp_tree): found = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dv2']] # depends on [control=['if'], data=[]] else: for case in dv.cases: for dv2 in dvs: if dv1 != dv2 and (is_var_in_exp_tree(dv2, case.condition_expression_tree) or is_var_in_exp_tree(dv2, case.value_expression_tree)): found = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dv2']] # depends on [control=['for'], data=['case']] if not found: ordering.append(dv1) del dvs[dvs.index(dv1)] count = maxcount break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dv1']] # depends on [control=['while'], data=[]] if count == 0: raise SimBuildError("Unable to find ordering for derived variables in regime '{0}'".format(regime.name)) # depends on [control=['if'], data=[]] #return ordering + dvsnoexp return dvsnoexp + ordering
def set_wrappable_term(self, v, term): """Set the Root.Description, possibly splitting long descriptions across multiple terms. """ import textwrap for t in self['Root'].find(term): self.remove_term(t) for l in textwrap.wrap(v, 80): self['Root'].new_term(term, l)
def function[set_wrappable_term, parameter[self, v, term]]: constant[Set the Root.Description, possibly splitting long descriptions across multiple terms. ] import module[textwrap] for taget[name[t]] in starred[call[call[name[self]][constant[Root]].find, parameter[name[term]]]] begin[:] call[name[self].remove_term, parameter[name[t]]] for taget[name[l]] in starred[call[name[textwrap].wrap, parameter[name[v], constant[80]]]] begin[:] call[call[name[self]][constant[Root]].new_term, parameter[name[term], name[l]]]
keyword[def] identifier[set_wrappable_term] ( identifier[self] , identifier[v] , identifier[term] ): literal[string] keyword[import] identifier[textwrap] keyword[for] identifier[t] keyword[in] identifier[self] [ literal[string] ]. identifier[find] ( identifier[term] ): identifier[self] . identifier[remove_term] ( identifier[t] ) keyword[for] identifier[l] keyword[in] identifier[textwrap] . identifier[wrap] ( identifier[v] , literal[int] ): identifier[self] [ literal[string] ]. identifier[new_term] ( identifier[term] , identifier[l] )
def set_wrappable_term(self, v, term): """Set the Root.Description, possibly splitting long descriptions across multiple terms. """ import textwrap for t in self['Root'].find(term): self.remove_term(t) # depends on [control=['for'], data=['t']] for l in textwrap.wrap(v, 80): self['Root'].new_term(term, l) # depends on [control=['for'], data=['l']]
def writeXML(self, n): """ Writes a XML string to the data stream. @type n: L{ET<xml.ET>} @param n: The XML Document to be encoded to the AMF3 data stream. """ self.stream.write(TYPE_XMLSTRING) ref = self.context.getObjectReference(n) if ref != -1: self._writeInteger(ref << 1) return self.context.addObject(n) self.serialiseString(xml.tostring(n).encode('utf-8'))
def function[writeXML, parameter[self, n]]: constant[ Writes a XML string to the data stream. @type n: L{ET<xml.ET>} @param n: The XML Document to be encoded to the AMF3 data stream. ] call[name[self].stream.write, parameter[name[TYPE_XMLSTRING]]] variable[ref] assign[=] call[name[self].context.getObjectReference, parameter[name[n]]] if compare[name[ref] not_equal[!=] <ast.UnaryOp object at 0x7da1b15c70d0>] begin[:] call[name[self]._writeInteger, parameter[binary_operation[name[ref] <ast.LShift object at 0x7da2590d69e0> constant[1]]]] return[None] call[name[self].context.addObject, parameter[name[n]]] call[name[self].serialiseString, parameter[call[call[name[xml].tostring, parameter[name[n]]].encode, parameter[constant[utf-8]]]]]
keyword[def] identifier[writeXML] ( identifier[self] , identifier[n] ): literal[string] identifier[self] . identifier[stream] . identifier[write] ( identifier[TYPE_XMLSTRING] ) identifier[ref] = identifier[self] . identifier[context] . identifier[getObjectReference] ( identifier[n] ) keyword[if] identifier[ref] !=- literal[int] : identifier[self] . identifier[_writeInteger] ( identifier[ref] << literal[int] ) keyword[return] identifier[self] . identifier[context] . identifier[addObject] ( identifier[n] ) identifier[self] . identifier[serialiseString] ( identifier[xml] . identifier[tostring] ( identifier[n] ). identifier[encode] ( literal[string] ))
def writeXML(self, n): """ Writes a XML string to the data stream. @type n: L{ET<xml.ET>} @param n: The XML Document to be encoded to the AMF3 data stream. """ self.stream.write(TYPE_XMLSTRING) ref = self.context.getObjectReference(n) if ref != -1: self._writeInteger(ref << 1) return # depends on [control=['if'], data=['ref']] self.context.addObject(n) self.serialiseString(xml.tostring(n).encode('utf-8'))
def encodeABI(cls, fn_name, args=None, kwargs=None, data=None): """ Encodes the arguments using the Ethereum ABI for the contract function that matches the given name and arguments.. :param data: defaults to function selector """ fn_abi, fn_selector, fn_arguments = get_function_info( fn_name, contract_abi=cls.abi, args=args, kwargs=kwargs, ) if data is None: data = fn_selector return encode_abi(cls.web3, fn_abi, fn_arguments, data)
def function[encodeABI, parameter[cls, fn_name, args, kwargs, data]]: constant[ Encodes the arguments using the Ethereum ABI for the contract function that matches the given name and arguments.. :param data: defaults to function selector ] <ast.Tuple object at 0x7da18f813eb0> assign[=] call[name[get_function_info], parameter[name[fn_name]]] if compare[name[data] is constant[None]] begin[:] variable[data] assign[=] name[fn_selector] return[call[name[encode_abi], parameter[name[cls].web3, name[fn_abi], name[fn_arguments], name[data]]]]
keyword[def] identifier[encodeABI] ( identifier[cls] , identifier[fn_name] , identifier[args] = keyword[None] , identifier[kwargs] = keyword[None] , identifier[data] = keyword[None] ): literal[string] identifier[fn_abi] , identifier[fn_selector] , identifier[fn_arguments] = identifier[get_function_info] ( identifier[fn_name] , identifier[contract_abi] = identifier[cls] . identifier[abi] , identifier[args] = identifier[args] , identifier[kwargs] = identifier[kwargs] , ) keyword[if] identifier[data] keyword[is] keyword[None] : identifier[data] = identifier[fn_selector] keyword[return] identifier[encode_abi] ( identifier[cls] . identifier[web3] , identifier[fn_abi] , identifier[fn_arguments] , identifier[data] )
def encodeABI(cls, fn_name, args=None, kwargs=None, data=None): """ Encodes the arguments using the Ethereum ABI for the contract function that matches the given name and arguments.. :param data: defaults to function selector """ (fn_abi, fn_selector, fn_arguments) = get_function_info(fn_name, contract_abi=cls.abi, args=args, kwargs=kwargs) if data is None: data = fn_selector # depends on [control=['if'], data=['data']] return encode_abi(cls.web3, fn_abi, fn_arguments, data)
def set_buf_size(fd): """Set up os pipe buffer size, if applicable""" if OS_PIPE_SZ and hasattr(fcntl, 'F_SETPIPE_SZ'): fcntl.fcntl(fd, fcntl.F_SETPIPE_SZ, OS_PIPE_SZ)
def function[set_buf_size, parameter[fd]]: constant[Set up os pipe buffer size, if applicable] if <ast.BoolOp object at 0x7da20c6c4040> begin[:] call[name[fcntl].fcntl, parameter[name[fd], name[fcntl].F_SETPIPE_SZ, name[OS_PIPE_SZ]]]
keyword[def] identifier[set_buf_size] ( identifier[fd] ): literal[string] keyword[if] identifier[OS_PIPE_SZ] keyword[and] identifier[hasattr] ( identifier[fcntl] , literal[string] ): identifier[fcntl] . identifier[fcntl] ( identifier[fd] , identifier[fcntl] . identifier[F_SETPIPE_SZ] , identifier[OS_PIPE_SZ] )
def set_buf_size(fd): """Set up os pipe buffer size, if applicable""" if OS_PIPE_SZ and hasattr(fcntl, 'F_SETPIPE_SZ'): fcntl.fcntl(fd, fcntl.F_SETPIPE_SZ, OS_PIPE_SZ) # depends on [control=['if'], data=[]]
def _format_id(self, payload): """Echos only the id""" if 'id' in payload: return str(payload['id']) if 'results' in payload: return ' '.join([six.text_type(item['id']) for item in payload['results']]) raise MultipleRelatedError('Could not serialize output with id format.')
def function[_format_id, parameter[self, payload]]: constant[Echos only the id] if compare[constant[id] in name[payload]] begin[:] return[call[name[str], parameter[call[name[payload]][constant[id]]]]] if compare[constant[results] in name[payload]] begin[:] return[call[constant[ ].join, parameter[<ast.ListComp object at 0x7da20c794130>]]] <ast.Raise object at 0x7da1b0137c40>
keyword[def] identifier[_format_id] ( identifier[self] , identifier[payload] ): literal[string] keyword[if] literal[string] keyword[in] identifier[payload] : keyword[return] identifier[str] ( identifier[payload] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[payload] : keyword[return] literal[string] . identifier[join] ([ identifier[six] . identifier[text_type] ( identifier[item] [ literal[string] ]) keyword[for] identifier[item] keyword[in] identifier[payload] [ literal[string] ]]) keyword[raise] identifier[MultipleRelatedError] ( literal[string] )
def _format_id(self, payload): """Echos only the id""" if 'id' in payload: return str(payload['id']) # depends on [control=['if'], data=['payload']] if 'results' in payload: return ' '.join([six.text_type(item['id']) for item in payload['results']]) # depends on [control=['if'], data=['payload']] raise MultipleRelatedError('Could not serialize output with id format.')
def _num_values(self, vdr_dict): ''' Returns the number of values in a record, using a given VDR dictionary. Multiplies the dimension sizes of each dimension, if it is varying. ''' values = 1 for x in range(0, vdr_dict['num_dims']): if (vdr_dict['dim_vary'][x] != 0): values = values * vdr_dict['dim_sizes'][x] return values
def function[_num_values, parameter[self, vdr_dict]]: constant[ Returns the number of values in a record, using a given VDR dictionary. Multiplies the dimension sizes of each dimension, if it is varying. ] variable[values] assign[=] constant[1] for taget[name[x]] in starred[call[name[range], parameter[constant[0], call[name[vdr_dict]][constant[num_dims]]]]] begin[:] if compare[call[call[name[vdr_dict]][constant[dim_vary]]][name[x]] not_equal[!=] constant[0]] begin[:] variable[values] assign[=] binary_operation[name[values] * call[call[name[vdr_dict]][constant[dim_sizes]]][name[x]]] return[name[values]]
keyword[def] identifier[_num_values] ( identifier[self] , identifier[vdr_dict] ): literal[string] identifier[values] = literal[int] keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[vdr_dict] [ literal[string] ]): keyword[if] ( identifier[vdr_dict] [ literal[string] ][ identifier[x] ]!= literal[int] ): identifier[values] = identifier[values] * identifier[vdr_dict] [ literal[string] ][ identifier[x] ] keyword[return] identifier[values]
def _num_values(self, vdr_dict): """ Returns the number of values in a record, using a given VDR dictionary. Multiplies the dimension sizes of each dimension, if it is varying. """ values = 1 for x in range(0, vdr_dict['num_dims']): if vdr_dict['dim_vary'][x] != 0: values = values * vdr_dict['dim_sizes'][x] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] return values
def render_placeholder(request, placeholder, parent_object=None, template_name=None, cachable=None, limit_parent_language=True, fallback_language=None): """ Render a :class:`~fluent_contents.models.Placeholder` object. Returns a :class:`~fluent_contents.models.ContentItemOutput` object which contains the HTML output and :class:`~django.forms.Media` object. This function also caches the complete output of the placeholder when all individual items are cacheable. :param request: The current request object. :type request: :class:`~django.http.HttpRequest` :param placeholder: The placeholder object. :type placeholder: :class:`~fluent_contents.models.Placeholder` :param parent_object: Optional, the parent object of the placeholder (already implied by the placeholder) :param template_name: Optional template name used to concatenate the placeholder output. :type template_name: str | None :param cachable: Whether the output is cachable, otherwise the full output will not be cached. Default: False when using a template, True otherwise. :type cachable: bool | None :param limit_parent_language: Whether the items should be limited to the parent language. :type limit_parent_language: bool :param fallback_language: The fallback language to use if there are no items in the current language. Passing ``True`` uses the default :ref:`FLUENT_CONTENTS_DEFAULT_LANGUAGE_CODE`. :type fallback_language: bool/str :rtype: :class:`~fluent_contents.models.ContentItemOutput` """ output = PlaceholderRenderingPipe(request).render_placeholder( placeholder=placeholder, parent_object=parent_object, template_name=template_name, cachable=cachable, limit_parent_language=limit_parent_language, fallback_language=fallback_language ) # Wrap the result after it's stored in the cache. if markers.is_edit_mode(request): output.html = markers.wrap_placeholder_output(output.html, placeholder) return output
def function[render_placeholder, parameter[request, placeholder, parent_object, template_name, cachable, limit_parent_language, fallback_language]]: constant[ Render a :class:`~fluent_contents.models.Placeholder` object. Returns a :class:`~fluent_contents.models.ContentItemOutput` object which contains the HTML output and :class:`~django.forms.Media` object. This function also caches the complete output of the placeholder when all individual items are cacheable. :param request: The current request object. :type request: :class:`~django.http.HttpRequest` :param placeholder: The placeholder object. :type placeholder: :class:`~fluent_contents.models.Placeholder` :param parent_object: Optional, the parent object of the placeholder (already implied by the placeholder) :param template_name: Optional template name used to concatenate the placeholder output. :type template_name: str | None :param cachable: Whether the output is cachable, otherwise the full output will not be cached. Default: False when using a template, True otherwise. :type cachable: bool | None :param limit_parent_language: Whether the items should be limited to the parent language. :type limit_parent_language: bool :param fallback_language: The fallback language to use if there are no items in the current language. Passing ``True`` uses the default :ref:`FLUENT_CONTENTS_DEFAULT_LANGUAGE_CODE`. :type fallback_language: bool/str :rtype: :class:`~fluent_contents.models.ContentItemOutput` ] variable[output] assign[=] call[call[name[PlaceholderRenderingPipe], parameter[name[request]]].render_placeholder, parameter[]] if call[name[markers].is_edit_mode, parameter[name[request]]] begin[:] name[output].html assign[=] call[name[markers].wrap_placeholder_output, parameter[name[output].html, name[placeholder]]] return[name[output]]
keyword[def] identifier[render_placeholder] ( identifier[request] , identifier[placeholder] , identifier[parent_object] = keyword[None] , identifier[template_name] = keyword[None] , identifier[cachable] = keyword[None] , identifier[limit_parent_language] = keyword[True] , identifier[fallback_language] = keyword[None] ): literal[string] identifier[output] = identifier[PlaceholderRenderingPipe] ( identifier[request] ). identifier[render_placeholder] ( identifier[placeholder] = identifier[placeholder] , identifier[parent_object] = identifier[parent_object] , identifier[template_name] = identifier[template_name] , identifier[cachable] = identifier[cachable] , identifier[limit_parent_language] = identifier[limit_parent_language] , identifier[fallback_language] = identifier[fallback_language] ) keyword[if] identifier[markers] . identifier[is_edit_mode] ( identifier[request] ): identifier[output] . identifier[html] = identifier[markers] . identifier[wrap_placeholder_output] ( identifier[output] . identifier[html] , identifier[placeholder] ) keyword[return] identifier[output]
def render_placeholder(request, placeholder, parent_object=None, template_name=None, cachable=None, limit_parent_language=True, fallback_language=None): """ Render a :class:`~fluent_contents.models.Placeholder` object. Returns a :class:`~fluent_contents.models.ContentItemOutput` object which contains the HTML output and :class:`~django.forms.Media` object. This function also caches the complete output of the placeholder when all individual items are cacheable. :param request: The current request object. :type request: :class:`~django.http.HttpRequest` :param placeholder: The placeholder object. :type placeholder: :class:`~fluent_contents.models.Placeholder` :param parent_object: Optional, the parent object of the placeholder (already implied by the placeholder) :param template_name: Optional template name used to concatenate the placeholder output. :type template_name: str | None :param cachable: Whether the output is cachable, otherwise the full output will not be cached. Default: False when using a template, True otherwise. :type cachable: bool | None :param limit_parent_language: Whether the items should be limited to the parent language. :type limit_parent_language: bool :param fallback_language: The fallback language to use if there are no items in the current language. Passing ``True`` uses the default :ref:`FLUENT_CONTENTS_DEFAULT_LANGUAGE_CODE`. :type fallback_language: bool/str :rtype: :class:`~fluent_contents.models.ContentItemOutput` """ output = PlaceholderRenderingPipe(request).render_placeholder(placeholder=placeholder, parent_object=parent_object, template_name=template_name, cachable=cachable, limit_parent_language=limit_parent_language, fallback_language=fallback_language) # Wrap the result after it's stored in the cache. if markers.is_edit_mode(request): output.html = markers.wrap_placeholder_output(output.html, placeholder) # depends on [control=['if'], data=[]] return output
def _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check): """Helper for _issubclass_Union. """ # this function is partly based on code from typing module 3.5.2.2 super_args = get_Union_params(superclass) if super_args is None: return is_Union(subclass) elif is_Union(subclass): sub_args = get_Union_params(subclass) if sub_args is None: return False return all(_issubclass(c, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) \ for c in (sub_args)) elif isinstance(subclass, TypeVar): if subclass in super_args: return True if subclass.__constraints__: return _issubclass(Union[subclass.__constraints__], superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) return False else: return any(_issubclass(subclass, t, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) \ for t in super_args)
def function[_issubclass_Union_rec, parameter[subclass, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check]]: constant[Helper for _issubclass_Union. ] variable[super_args] assign[=] call[name[get_Union_params], parameter[name[superclass]]] if compare[name[super_args] is constant[None]] begin[:] return[call[name[is_Union], parameter[name[subclass]]]]
keyword[def] identifier[_issubclass_Union_rec] ( identifier[subclass] , identifier[superclass] , identifier[bound_Generic] , identifier[bound_typevars] , identifier[bound_typevars_readonly] , identifier[follow_fwd_refs] , identifier[_recursion_check] ): literal[string] identifier[super_args] = identifier[get_Union_params] ( identifier[superclass] ) keyword[if] identifier[super_args] keyword[is] keyword[None] : keyword[return] identifier[is_Union] ( identifier[subclass] ) keyword[elif] identifier[is_Union] ( identifier[subclass] ): identifier[sub_args] = identifier[get_Union_params] ( identifier[subclass] ) keyword[if] identifier[sub_args] keyword[is] keyword[None] : keyword[return] keyword[False] keyword[return] identifier[all] ( identifier[_issubclass] ( identifier[c] , identifier[superclass] , identifier[bound_Generic] , identifier[bound_typevars] , identifier[bound_typevars_readonly] , identifier[follow_fwd_refs] , identifier[_recursion_check] ) keyword[for] identifier[c] keyword[in] ( identifier[sub_args] )) keyword[elif] identifier[isinstance] ( identifier[subclass] , identifier[TypeVar] ): keyword[if] identifier[subclass] keyword[in] identifier[super_args] : keyword[return] keyword[True] keyword[if] identifier[subclass] . identifier[__constraints__] : keyword[return] identifier[_issubclass] ( identifier[Union] [ identifier[subclass] . identifier[__constraints__] ], identifier[superclass] , identifier[bound_Generic] , identifier[bound_typevars] , identifier[bound_typevars_readonly] , identifier[follow_fwd_refs] , identifier[_recursion_check] ) keyword[return] keyword[False] keyword[else] : keyword[return] identifier[any] ( identifier[_issubclass] ( identifier[subclass] , identifier[t] , identifier[bound_Generic] , identifier[bound_typevars] , identifier[bound_typevars_readonly] , identifier[follow_fwd_refs] , identifier[_recursion_check] ) keyword[for] identifier[t] keyword[in] identifier[super_args] )
def _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check): """Helper for _issubclass_Union. """ # this function is partly based on code from typing module 3.5.2.2 super_args = get_Union_params(superclass) if super_args is None: return is_Union(subclass) # depends on [control=['if'], data=[]] elif is_Union(subclass): sub_args = get_Union_params(subclass) if sub_args is None: return False # depends on [control=['if'], data=[]] return all((_issubclass(c, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) for c in sub_args)) # depends on [control=['if'], data=[]] elif isinstance(subclass, TypeVar): if subclass in super_args: return True # depends on [control=['if'], data=[]] if subclass.__constraints__: return _issubclass(Union[subclass.__constraints__], superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) # depends on [control=['if'], data=[]] return False # depends on [control=['if'], data=[]] else: return any((_issubclass(subclass, t, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) for t in super_args))
def shutdown(name, wait=False, reboot=False): ''' graceful shutdown sent to the container :param wait: should we wait for the shutdown to complete? :param reboot: reboot a container, ignores wait ''' if not exists(name): raise ContainerNotExists("The container (%s) does not exist!" % name) cmd = ['lxc-shutdown', '-n', name] if wait: cmd += ['-w'] if reboot: cmd += ['-r'] subprocess.check_call(cmd)
def function[shutdown, parameter[name, wait, reboot]]: constant[ graceful shutdown sent to the container :param wait: should we wait for the shutdown to complete? :param reboot: reboot a container, ignores wait ] if <ast.UnaryOp object at 0x7da204346200> begin[:] <ast.Raise object at 0x7da204347eb0> variable[cmd] assign[=] list[[<ast.Constant object at 0x7da204345870>, <ast.Constant object at 0x7da204344700>, <ast.Name object at 0x7da204346260>]] if name[wait] begin[:] <ast.AugAssign object at 0x7da204347ca0> if name[reboot] begin[:] <ast.AugAssign object at 0x7da2043470a0> call[name[subprocess].check_call, parameter[name[cmd]]]
keyword[def] identifier[shutdown] ( identifier[name] , identifier[wait] = keyword[False] , identifier[reboot] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[exists] ( identifier[name] ): keyword[raise] identifier[ContainerNotExists] ( literal[string] % identifier[name] ) identifier[cmd] =[ literal[string] , literal[string] , identifier[name] ] keyword[if] identifier[wait] : identifier[cmd] +=[ literal[string] ] keyword[if] identifier[reboot] : identifier[cmd] +=[ literal[string] ] identifier[subprocess] . identifier[check_call] ( identifier[cmd] )
def shutdown(name, wait=False, reboot=False): """ graceful shutdown sent to the container :param wait: should we wait for the shutdown to complete? :param reboot: reboot a container, ignores wait """ if not exists(name): raise ContainerNotExists('The container (%s) does not exist!' % name) # depends on [control=['if'], data=[]] cmd = ['lxc-shutdown', '-n', name] if wait: cmd += ['-w'] # depends on [control=['if'], data=[]] if reboot: cmd += ['-r'] # depends on [control=['if'], data=[]] subprocess.check_call(cmd)
def networks(self): """ Return a new raw REST interface to network resources :rtype: :py:class:`ns1.rest.ipam.Networks` """ import ns1.rest.ipam return ns1.rest.ipam.Networks(self.config)
def function[networks, parameter[self]]: constant[ Return a new raw REST interface to network resources :rtype: :py:class:`ns1.rest.ipam.Networks` ] import module[ns1.rest.ipam] return[call[name[ns1].rest.ipam.Networks, parameter[name[self].config]]]
keyword[def] identifier[networks] ( identifier[self] ): literal[string] keyword[import] identifier[ns1] . identifier[rest] . identifier[ipam] keyword[return] identifier[ns1] . identifier[rest] . identifier[ipam] . identifier[Networks] ( identifier[self] . identifier[config] )
def networks(self): """ Return a new raw REST interface to network resources :rtype: :py:class:`ns1.rest.ipam.Networks` """ import ns1.rest.ipam return ns1.rest.ipam.Networks(self.config)
def computeDelaunayTriangulation(points): """ Takes a list of point objects (which must have x and y fields). Returns a list of 3-tuples: the indices of the points that form a Delaunay triangle. """ siteList = SiteList(points) context = Context() context.triangulate = True voronoi(siteList,context) return context.triangles
def function[computeDelaunayTriangulation, parameter[points]]: constant[ Takes a list of point objects (which must have x and y fields). Returns a list of 3-tuples: the indices of the points that form a Delaunay triangle. ] variable[siteList] assign[=] call[name[SiteList], parameter[name[points]]] variable[context] assign[=] call[name[Context], parameter[]] name[context].triangulate assign[=] constant[True] call[name[voronoi], parameter[name[siteList], name[context]]] return[name[context].triangles]
keyword[def] identifier[computeDelaunayTriangulation] ( identifier[points] ): literal[string] identifier[siteList] = identifier[SiteList] ( identifier[points] ) identifier[context] = identifier[Context] () identifier[context] . identifier[triangulate] = keyword[True] identifier[voronoi] ( identifier[siteList] , identifier[context] ) keyword[return] identifier[context] . identifier[triangles]
def computeDelaunayTriangulation(points): """ Takes a list of point objects (which must have x and y fields). Returns a list of 3-tuples: the indices of the points that form a Delaunay triangle. """ siteList = SiteList(points) context = Context() context.triangulate = True voronoi(siteList, context) return context.triangles
def iterativeFetch(query, batchSize=default_batch_size): """ Returns rows of a sql fetch query on demand """ while True: rows = query.fetchmany(batchSize) if not rows: break rowDicts = sqliteRowsToDicts(rows) for rowDict in rowDicts: yield rowDict
def function[iterativeFetch, parameter[query, batchSize]]: constant[ Returns rows of a sql fetch query on demand ] while constant[True] begin[:] variable[rows] assign[=] call[name[query].fetchmany, parameter[name[batchSize]]] if <ast.UnaryOp object at 0x7da18ede4700> begin[:] break variable[rowDicts] assign[=] call[name[sqliteRowsToDicts], parameter[name[rows]]] for taget[name[rowDict]] in starred[name[rowDicts]] begin[:] <ast.Yield object at 0x7da18ede5930>
keyword[def] identifier[iterativeFetch] ( identifier[query] , identifier[batchSize] = identifier[default_batch_size] ): literal[string] keyword[while] keyword[True] : identifier[rows] = identifier[query] . identifier[fetchmany] ( identifier[batchSize] ) keyword[if] keyword[not] identifier[rows] : keyword[break] identifier[rowDicts] = identifier[sqliteRowsToDicts] ( identifier[rows] ) keyword[for] identifier[rowDict] keyword[in] identifier[rowDicts] : keyword[yield] identifier[rowDict]
def iterativeFetch(query, batchSize=default_batch_size): """ Returns rows of a sql fetch query on demand """ while True: rows = query.fetchmany(batchSize) if not rows: break # depends on [control=['if'], data=[]] rowDicts = sqliteRowsToDicts(rows) for rowDict in rowDicts: yield rowDict # depends on [control=['for'], data=['rowDict']] # depends on [control=['while'], data=[]]
def are_checksums_equal(checksum_a_pyxb, checksum_b_pyxb): """Determine if checksums are equal. Args: checksum_a_pyxb, checksum_b_pyxb: PyXB Checksum objects to compare. Returns: bool - **True**: The checksums contain the same hexadecimal values calculated with the same algorithm. Identical checksums guarantee (for all practical purposes) that the checksums were calculated from the same sequence of bytes. - **False**: The checksums were calculated with the same algorithm but the hexadecimal values are different. Raises: ValueError The checksums were calculated with different algorithms, hence cannot be compared. """ if checksum_a_pyxb.algorithm != checksum_b_pyxb.algorithm: raise ValueError( 'Cannot compare checksums calculated with different algorithms. ' 'a="{}" b="{}"'.format(checksum_a_pyxb.algorithm, checksum_b_pyxb.algorithm) ) return checksum_a_pyxb.value().lower() == checksum_b_pyxb.value().lower()
def function[are_checksums_equal, parameter[checksum_a_pyxb, checksum_b_pyxb]]: constant[Determine if checksums are equal. Args: checksum_a_pyxb, checksum_b_pyxb: PyXB Checksum objects to compare. Returns: bool - **True**: The checksums contain the same hexadecimal values calculated with the same algorithm. Identical checksums guarantee (for all practical purposes) that the checksums were calculated from the same sequence of bytes. - **False**: The checksums were calculated with the same algorithm but the hexadecimal values are different. Raises: ValueError The checksums were calculated with different algorithms, hence cannot be compared. ] if compare[name[checksum_a_pyxb].algorithm not_equal[!=] name[checksum_b_pyxb].algorithm] begin[:] <ast.Raise object at 0x7da1b1adeb60> return[compare[call[call[name[checksum_a_pyxb].value, parameter[]].lower, parameter[]] equal[==] call[call[name[checksum_b_pyxb].value, parameter[]].lower, parameter[]]]]
keyword[def] identifier[are_checksums_equal] ( identifier[checksum_a_pyxb] , identifier[checksum_b_pyxb] ): literal[string] keyword[if] identifier[checksum_a_pyxb] . identifier[algorithm] != identifier[checksum_b_pyxb] . identifier[algorithm] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[checksum_a_pyxb] . identifier[algorithm] , identifier[checksum_b_pyxb] . identifier[algorithm] ) ) keyword[return] identifier[checksum_a_pyxb] . identifier[value] (). identifier[lower] ()== identifier[checksum_b_pyxb] . identifier[value] (). identifier[lower] ()
def are_checksums_equal(checksum_a_pyxb, checksum_b_pyxb): """Determine if checksums are equal. Args: checksum_a_pyxb, checksum_b_pyxb: PyXB Checksum objects to compare. Returns: bool - **True**: The checksums contain the same hexadecimal values calculated with the same algorithm. Identical checksums guarantee (for all practical purposes) that the checksums were calculated from the same sequence of bytes. - **False**: The checksums were calculated with the same algorithm but the hexadecimal values are different. Raises: ValueError The checksums were calculated with different algorithms, hence cannot be compared. """ if checksum_a_pyxb.algorithm != checksum_b_pyxb.algorithm: raise ValueError('Cannot compare checksums calculated with different algorithms. a="{}" b="{}"'.format(checksum_a_pyxb.algorithm, checksum_b_pyxb.algorithm)) # depends on [control=['if'], data=[]] return checksum_a_pyxb.value().lower() == checksum_b_pyxb.value().lower()
def filter_ribo_counts(counts, orf_start=None, orf_stop=None): """Filter read counts and return only upstream of orf_start or downstream of orf_stop. Keyword arguments: counts -- Ribo-Seq read counts obtained from get_ribo_counts. orf_start -- Start position of the longest ORF. orf_stop -- Stop position of the longest ORF. """ filtered_counts = dict.copy(counts) for position in counts: if orf_start and orf_stop: # if only upstream and downstream reads are required, check if # current position is upstream or downstream of the ORF start/stop # if not, remove from counts if (position > orf_start and position < orf_stop): filtered_counts.pop(position) elif orf_start: # check if current position is upstream of ORF start. if not, remove if position >= orf_start: filtered_counts.pop(position) elif orf_stop: # check if current position is downstream of ORF stop. If not, # remove if position <= orf_stop: filtered_counts.pop(position) # calculate total reads for this transcript total_reads = sum(sum(item.values()) for item in filtered_counts.values()) return filtered_counts, total_reads
def function[filter_ribo_counts, parameter[counts, orf_start, orf_stop]]: constant[Filter read counts and return only upstream of orf_start or downstream of orf_stop. Keyword arguments: counts -- Ribo-Seq read counts obtained from get_ribo_counts. orf_start -- Start position of the longest ORF. orf_stop -- Stop position of the longest ORF. ] variable[filtered_counts] assign[=] call[name[dict].copy, parameter[name[counts]]] for taget[name[position]] in starred[name[counts]] begin[:] if <ast.BoolOp object at 0x7da18c4ccd00> begin[:] if <ast.BoolOp object at 0x7da18c4ccb50> begin[:] call[name[filtered_counts].pop, parameter[name[position]]] variable[total_reads] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18c4ceda0>]] return[tuple[[<ast.Name object at 0x7da18c4ccbb0>, <ast.Name object at 0x7da18c4cc6a0>]]]
keyword[def] identifier[filter_ribo_counts] ( identifier[counts] , identifier[orf_start] = keyword[None] , identifier[orf_stop] = keyword[None] ): literal[string] identifier[filtered_counts] = identifier[dict] . identifier[copy] ( identifier[counts] ) keyword[for] identifier[position] keyword[in] identifier[counts] : keyword[if] identifier[orf_start] keyword[and] identifier[orf_stop] : keyword[if] ( identifier[position] > identifier[orf_start] keyword[and] identifier[position] < identifier[orf_stop] ): identifier[filtered_counts] . identifier[pop] ( identifier[position] ) keyword[elif] identifier[orf_start] : keyword[if] identifier[position] >= identifier[orf_start] : identifier[filtered_counts] . identifier[pop] ( identifier[position] ) keyword[elif] identifier[orf_stop] : keyword[if] identifier[position] <= identifier[orf_stop] : identifier[filtered_counts] . identifier[pop] ( identifier[position] ) identifier[total_reads] = identifier[sum] ( identifier[sum] ( identifier[item] . identifier[values] ()) keyword[for] identifier[item] keyword[in] identifier[filtered_counts] . identifier[values] ()) keyword[return] identifier[filtered_counts] , identifier[total_reads]
def filter_ribo_counts(counts, orf_start=None, orf_stop=None): """Filter read counts and return only upstream of orf_start or downstream of orf_stop. Keyword arguments: counts -- Ribo-Seq read counts obtained from get_ribo_counts. orf_start -- Start position of the longest ORF. orf_stop -- Stop position of the longest ORF. """ filtered_counts = dict.copy(counts) for position in counts: if orf_start and orf_stop: # if only upstream and downstream reads are required, check if # current position is upstream or downstream of the ORF start/stop # if not, remove from counts if position > orf_start and position < orf_stop: filtered_counts.pop(position) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif orf_start: # check if current position is upstream of ORF start. if not, remove if position >= orf_start: filtered_counts.pop(position) # depends on [control=['if'], data=['position']] # depends on [control=['if'], data=[]] elif orf_stop: # check if current position is downstream of ORF stop. If not, # remove if position <= orf_stop: filtered_counts.pop(position) # depends on [control=['if'], data=['position']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['position']] # calculate total reads for this transcript total_reads = sum((sum(item.values()) for item in filtered_counts.values())) return (filtered_counts, total_reads)
def clean(self): """ Prevents cycles in the tree. """ super(CTENode, self).clean() if self.parent and self.pk in getattr(self.parent, self._cte_node_path): raise ValidationError(_("A node cannot be made a descendant of itself."))
def function[clean, parameter[self]]: constant[ Prevents cycles in the tree. ] call[call[name[super], parameter[name[CTENode], name[self]]].clean, parameter[]] if <ast.BoolOp object at 0x7da2047eb4f0> begin[:] <ast.Raise object at 0x7da2047e8b20>
keyword[def] identifier[clean] ( identifier[self] ): literal[string] identifier[super] ( identifier[CTENode] , identifier[self] ). identifier[clean] () keyword[if] identifier[self] . identifier[parent] keyword[and] identifier[self] . identifier[pk] keyword[in] identifier[getattr] ( identifier[self] . identifier[parent] , identifier[self] . identifier[_cte_node_path] ): keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string] ))
def clean(self): """ Prevents cycles in the tree. """ super(CTENode, self).clean() if self.parent and self.pk in getattr(self.parent, self._cte_node_path): raise ValidationError(_('A node cannot be made a descendant of itself.')) # depends on [control=['if'], data=[]]
def plot_projections(x, relative_to=None, autolim=True, axes=None, subplots_kwargs=dict(), labels=None, plot_function=None, **kwargs): """ Given N-dimensional quantity, ``x``, make a figure containing 2D projections of all combinations of the axes. Parameters ---------- x : array_like Array of values. ``axis=0`` is assumed to be the dimensionality, ``axis=1`` is the time axis. See :ref:`shape-conventions` for more information. relative_to : bool (optional) Plot the values relative to this value or values. autolim : bool (optional) Automatically set the plot limits to be something sensible. axes : array_like (optional) Array of matplotlib Axes objects. subplots_kwargs : dict (optional) Dictionary of kwargs passed to :func:`~matplotlib.pyplot.subplots`. labels : iterable (optional) List or iterable of axis labels as strings. They should correspond to the dimensions of the input orbit. plot_function : callable (optional) The ``matplotlib`` plot function to use. By default, this is :func:`~matplotlib.pyplot.scatter`, but can also be, e.g., :func:`~matplotlib.pyplot.plot`. **kwargs All other keyword arguments are passed to the ``plot_function``. You can pass in any of the usual style kwargs like ``color=...``, ``marker=...``, etc. Returns ------- fig : `~matplotlib.Figure` """ # don't propagate changes back... x = np.array(x, copy=True) ndim = x.shape[0] # get axes object from arguments if axes is None: axes = _get_axes(dim=ndim, subplots_kwargs=subplots_kwargs) # if the quantities are relative if relative_to is not None: x -= relative_to # name of the plotting function plot_fn_name = plot_function.__name__ # automatically determine limits if autolim: lims = [] for i in range(ndim): max_,min_ = np.max(x[i]), np.min(x[i]) delta = max_ - min_ if delta == 0.: delta = 1. lims.append([min_ - delta*0.02, max_ + delta*0.02]) k = 0 for i in range(ndim): for j in range(ndim): if i >= j: continue # skip diagonal, upper triangle plot_func = getattr(axes[k], plot_fn_name) plot_func(x[i], x[j], **kwargs) if labels is not None: axes[k].set_xlabel(labels[i]) axes[k].set_ylabel(labels[j]) if autolim: axes[k].set_xlim(lims[i]) axes[k].set_ylim(lims[j]) k += 1 axes[0].figure.tight_layout() return axes[0].figure
def function[plot_projections, parameter[x, relative_to, autolim, axes, subplots_kwargs, labels, plot_function]]: constant[ Given N-dimensional quantity, ``x``, make a figure containing 2D projections of all combinations of the axes. Parameters ---------- x : array_like Array of values. ``axis=0`` is assumed to be the dimensionality, ``axis=1`` is the time axis. See :ref:`shape-conventions` for more information. relative_to : bool (optional) Plot the values relative to this value or values. autolim : bool (optional) Automatically set the plot limits to be something sensible. axes : array_like (optional) Array of matplotlib Axes objects. subplots_kwargs : dict (optional) Dictionary of kwargs passed to :func:`~matplotlib.pyplot.subplots`. labels : iterable (optional) List or iterable of axis labels as strings. They should correspond to the dimensions of the input orbit. plot_function : callable (optional) The ``matplotlib`` plot function to use. By default, this is :func:`~matplotlib.pyplot.scatter`, but can also be, e.g., :func:`~matplotlib.pyplot.plot`. **kwargs All other keyword arguments are passed to the ``plot_function``. You can pass in any of the usual style kwargs like ``color=...``, ``marker=...``, etc. Returns ------- fig : `~matplotlib.Figure` ] variable[x] assign[=] call[name[np].array, parameter[name[x]]] variable[ndim] assign[=] call[name[x].shape][constant[0]] if compare[name[axes] is constant[None]] begin[:] variable[axes] assign[=] call[name[_get_axes], parameter[]] if compare[name[relative_to] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0c0f1f0> variable[plot_fn_name] assign[=] name[plot_function].__name__ if name[autolim] begin[:] variable[lims] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[ndim]]]] begin[:] <ast.Tuple object at 0x7da1b0c0ec50> assign[=] tuple[[<ast.Call object at 0x7da1b0c0eb90>, <ast.Call object at 0x7da1b0c0ea70>]] variable[delta] assign[=] binary_operation[name[max_] - name[min_]] if compare[name[delta] equal[==] constant[0.0]] begin[:] variable[delta] assign[=] constant[1.0] call[name[lims].append, parameter[list[[<ast.BinOp object at 0x7da1b0c0f430>, <ast.BinOp object at 0x7da1b0c0faf0>]]]] variable[k] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[name[ndim]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[name[ndim]]]] begin[:] if compare[name[i] greater_or_equal[>=] name[j]] begin[:] continue variable[plot_func] assign[=] call[name[getattr], parameter[call[name[axes]][name[k]], name[plot_fn_name]]] call[name[plot_func], parameter[call[name[x]][name[i]], call[name[x]][name[j]]]] if compare[name[labels] is_not constant[None]] begin[:] call[call[name[axes]][name[k]].set_xlabel, parameter[call[name[labels]][name[i]]]] call[call[name[axes]][name[k]].set_ylabel, parameter[call[name[labels]][name[j]]]] if name[autolim] begin[:] call[call[name[axes]][name[k]].set_xlim, parameter[call[name[lims]][name[i]]]] call[call[name[axes]][name[k]].set_ylim, parameter[call[name[lims]][name[j]]]] <ast.AugAssign object at 0x7da1b0d10b80> call[call[name[axes]][constant[0]].figure.tight_layout, parameter[]] return[call[name[axes]][constant[0]].figure]
keyword[def] identifier[plot_projections] ( identifier[x] , identifier[relative_to] = keyword[None] , identifier[autolim] = keyword[True] , identifier[axes] = keyword[None] , identifier[subplots_kwargs] = identifier[dict] (), identifier[labels] = keyword[None] , identifier[plot_function] = keyword[None] , ** identifier[kwargs] ): literal[string] identifier[x] = identifier[np] . identifier[array] ( identifier[x] , identifier[copy] = keyword[True] ) identifier[ndim] = identifier[x] . identifier[shape] [ literal[int] ] keyword[if] identifier[axes] keyword[is] keyword[None] : identifier[axes] = identifier[_get_axes] ( identifier[dim] = identifier[ndim] , identifier[subplots_kwargs] = identifier[subplots_kwargs] ) keyword[if] identifier[relative_to] keyword[is] keyword[not] keyword[None] : identifier[x] -= identifier[relative_to] identifier[plot_fn_name] = identifier[plot_function] . identifier[__name__] keyword[if] identifier[autolim] : identifier[lims] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ndim] ): identifier[max_] , identifier[min_] = identifier[np] . identifier[max] ( identifier[x] [ identifier[i] ]), identifier[np] . identifier[min] ( identifier[x] [ identifier[i] ]) identifier[delta] = identifier[max_] - identifier[min_] keyword[if] identifier[delta] == literal[int] : identifier[delta] = literal[int] identifier[lims] . identifier[append] ([ identifier[min_] - identifier[delta] * literal[int] , identifier[max_] + identifier[delta] * literal[int] ]) identifier[k] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ndim] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[ndim] ): keyword[if] identifier[i] >= identifier[j] : keyword[continue] identifier[plot_func] = identifier[getattr] ( identifier[axes] [ identifier[k] ], identifier[plot_fn_name] ) identifier[plot_func] ( identifier[x] [ identifier[i] ], identifier[x] [ identifier[j] ],** identifier[kwargs] ) keyword[if] identifier[labels] keyword[is] keyword[not] keyword[None] : identifier[axes] [ identifier[k] ]. identifier[set_xlabel] ( identifier[labels] [ identifier[i] ]) identifier[axes] [ identifier[k] ]. identifier[set_ylabel] ( identifier[labels] [ identifier[j] ]) keyword[if] identifier[autolim] : identifier[axes] [ identifier[k] ]. identifier[set_xlim] ( identifier[lims] [ identifier[i] ]) identifier[axes] [ identifier[k] ]. identifier[set_ylim] ( identifier[lims] [ identifier[j] ]) identifier[k] += literal[int] identifier[axes] [ literal[int] ]. identifier[figure] . identifier[tight_layout] () keyword[return] identifier[axes] [ literal[int] ]. identifier[figure]
def plot_projections(x, relative_to=None, autolim=True, axes=None, subplots_kwargs=dict(), labels=None, plot_function=None, **kwargs): """ Given N-dimensional quantity, ``x``, make a figure containing 2D projections of all combinations of the axes. Parameters ---------- x : array_like Array of values. ``axis=0`` is assumed to be the dimensionality, ``axis=1`` is the time axis. See :ref:`shape-conventions` for more information. relative_to : bool (optional) Plot the values relative to this value or values. autolim : bool (optional) Automatically set the plot limits to be something sensible. axes : array_like (optional) Array of matplotlib Axes objects. subplots_kwargs : dict (optional) Dictionary of kwargs passed to :func:`~matplotlib.pyplot.subplots`. labels : iterable (optional) List or iterable of axis labels as strings. They should correspond to the dimensions of the input orbit. plot_function : callable (optional) The ``matplotlib`` plot function to use. By default, this is :func:`~matplotlib.pyplot.scatter`, but can also be, e.g., :func:`~matplotlib.pyplot.plot`. **kwargs All other keyword arguments are passed to the ``plot_function``. You can pass in any of the usual style kwargs like ``color=...``, ``marker=...``, etc. Returns ------- fig : `~matplotlib.Figure` """ # don't propagate changes back... x = np.array(x, copy=True) ndim = x.shape[0] # get axes object from arguments if axes is None: axes = _get_axes(dim=ndim, subplots_kwargs=subplots_kwargs) # depends on [control=['if'], data=['axes']] # if the quantities are relative if relative_to is not None: x -= relative_to # depends on [control=['if'], data=['relative_to']] # name of the plotting function plot_fn_name = plot_function.__name__ # automatically determine limits if autolim: lims = [] for i in range(ndim): (max_, min_) = (np.max(x[i]), np.min(x[i])) delta = max_ - min_ if delta == 0.0: delta = 1.0 # depends on [control=['if'], data=['delta']] lims.append([min_ - delta * 0.02, max_ + delta * 0.02]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] k = 0 for i in range(ndim): for j in range(ndim): if i >= j: continue # skip diagonal, upper triangle # depends on [control=['if'], data=[]] plot_func = getattr(axes[k], plot_fn_name) plot_func(x[i], x[j], **kwargs) if labels is not None: axes[k].set_xlabel(labels[i]) axes[k].set_ylabel(labels[j]) # depends on [control=['if'], data=['labels']] if autolim: axes[k].set_xlim(lims[i]) axes[k].set_ylim(lims[j]) # depends on [control=['if'], data=[]] k += 1 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] axes[0].figure.tight_layout() return axes[0].figure
def cql_encode_datetime(self, val): """ Converts a :class:`datetime.datetime` object to a (string) integer timestamp with millisecond precision. """ timestamp = calendar.timegm(val.utctimetuple()) return str(long(timestamp * 1e3 + getattr(val, 'microsecond', 0) / 1e3))
def function[cql_encode_datetime, parameter[self, val]]: constant[ Converts a :class:`datetime.datetime` object to a (string) integer timestamp with millisecond precision. ] variable[timestamp] assign[=] call[name[calendar].timegm, parameter[call[name[val].utctimetuple, parameter[]]]] return[call[name[str], parameter[call[name[long], parameter[binary_operation[binary_operation[name[timestamp] * constant[1000.0]] + binary_operation[call[name[getattr], parameter[name[val], constant[microsecond], constant[0]]] / constant[1000.0]]]]]]]]
keyword[def] identifier[cql_encode_datetime] ( identifier[self] , identifier[val] ): literal[string] identifier[timestamp] = identifier[calendar] . identifier[timegm] ( identifier[val] . identifier[utctimetuple] ()) keyword[return] identifier[str] ( identifier[long] ( identifier[timestamp] * literal[int] + identifier[getattr] ( identifier[val] , literal[string] , literal[int] )/ literal[int] ))
def cql_encode_datetime(self, val): """ Converts a :class:`datetime.datetime` object to a (string) integer timestamp with millisecond precision. """ timestamp = calendar.timegm(val.utctimetuple()) return str(long(timestamp * 1000.0 + getattr(val, 'microsecond', 0) / 1000.0))
def _create_pane(self, window=None, command=None, start_directory=None): """ Create a new :class:`pymux.arrangement.Pane` instance. (Don't put it in a window yet.) :param window: If a window is given, take the CWD of the current process of that window as the start path for this pane. :param command: If given, run this command instead of `self.default_shell`. :param start_directory: If given, use this as the CWD. """ assert window is None or isinstance(window, Window) assert command is None or isinstance(command, six.text_type) assert start_directory is None or isinstance(start_directory, six.text_type) def done_callback(): " When the process finishes. " if not self.remain_on_exit: # Remove pane from layout. self.arrangement.remove_pane(pane) # No panes left? -> Quit. if not self.arrangement.has_panes: self.stop() # Make sure the right pane is focused for each client. for client_state in self._client_states.values(): client_state.sync_focus() self.invalidate() def bell(): " Sound bell on all clients. " if self.enable_bell: for c in self.apps: c.output.bell() # Start directory. if start_directory: path = start_directory elif window and window.active_process: # When the path of the active process is known, # start the new process at the same location. path = window.active_process.get_cwd() else: path = None def before_exec(): " Called in the process fork (in the child process). " # Go to this directory. try: os.chdir(path or self.original_cwd) except OSError: pass # No such file or directory. # Set terminal variable. (We emulate xterm.) os.environ['TERM'] = self.default_terminal # Make sure to set the PYMUX environment variable. if self.socket_name: os.environ['PYMUX'] = '%s,%i' % ( self.socket_name, pane.pane_id) if command: command = command.split() else: command = [self.default_shell] # Create new pane and terminal. terminal = Terminal(done_callback=done_callback, bell_func=bell, before_exec_func=before_exec) pane = Pane(terminal) # Keep track of panes. This is a WeakKeyDictionary, we only add, but # don't remove. self.panes_by_id[pane.pane_id] = pane logger.info('Created process %r.', command) return pane
def function[_create_pane, parameter[self, window, command, start_directory]]: constant[ Create a new :class:`pymux.arrangement.Pane` instance. (Don't put it in a window yet.) :param window: If a window is given, take the CWD of the current process of that window as the start path for this pane. :param command: If given, run this command instead of `self.default_shell`. :param start_directory: If given, use this as the CWD. ] assert[<ast.BoolOp object at 0x7da20c76d810>] assert[<ast.BoolOp object at 0x7da20c76ee60>] assert[<ast.BoolOp object at 0x7da20c76ff70>] def function[done_callback, parameter[]]: constant[ When the process finishes. ] if <ast.UnaryOp object at 0x7da20c76c8e0> begin[:] call[name[self].arrangement.remove_pane, parameter[name[pane]]] if <ast.UnaryOp object at 0x7da20c76ed70> begin[:] call[name[self].stop, parameter[]] for taget[name[client_state]] in starred[call[name[self]._client_states.values, parameter[]]] begin[:] call[name[client_state].sync_focus, parameter[]] call[name[self].invalidate, parameter[]] def function[bell, parameter[]]: constant[ Sound bell on all clients. ] if name[self].enable_bell begin[:] for taget[name[c]] in starred[name[self].apps] begin[:] call[name[c].output.bell, parameter[]] if name[start_directory] begin[:] variable[path] assign[=] name[start_directory] def function[before_exec, parameter[]]: constant[ Called in the process fork (in the child process). ] <ast.Try object at 0x7da20c76c130> call[name[os].environ][constant[TERM]] assign[=] name[self].default_terminal if name[self].socket_name begin[:] call[name[os].environ][constant[PYMUX]] assign[=] binary_operation[constant[%s,%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c76ece0>, <ast.Attribute object at 0x7da20c76fdc0>]]] if name[command] begin[:] variable[command] assign[=] call[name[command].split, parameter[]] variable[terminal] assign[=] call[name[Terminal], parameter[]] variable[pane] assign[=] call[name[Pane], parameter[name[terminal]]] call[name[self].panes_by_id][name[pane].pane_id] assign[=] name[pane] call[name[logger].info, parameter[constant[Created process %r.], name[command]]] return[name[pane]]
keyword[def] identifier[_create_pane] ( identifier[self] , identifier[window] = keyword[None] , identifier[command] = keyword[None] , identifier[start_directory] = keyword[None] ): literal[string] keyword[assert] identifier[window] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[window] , identifier[Window] ) keyword[assert] identifier[command] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[command] , identifier[six] . identifier[text_type] ) keyword[assert] identifier[start_directory] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[start_directory] , identifier[six] . identifier[text_type] ) keyword[def] identifier[done_callback] (): literal[string] keyword[if] keyword[not] identifier[self] . identifier[remain_on_exit] : identifier[self] . identifier[arrangement] . identifier[remove_pane] ( identifier[pane] ) keyword[if] keyword[not] identifier[self] . identifier[arrangement] . identifier[has_panes] : identifier[self] . identifier[stop] () keyword[for] identifier[client_state] keyword[in] identifier[self] . identifier[_client_states] . identifier[values] (): identifier[client_state] . identifier[sync_focus] () identifier[self] . identifier[invalidate] () keyword[def] identifier[bell] (): literal[string] keyword[if] identifier[self] . identifier[enable_bell] : keyword[for] identifier[c] keyword[in] identifier[self] . identifier[apps] : identifier[c] . identifier[output] . identifier[bell] () keyword[if] identifier[start_directory] : identifier[path] = identifier[start_directory] keyword[elif] identifier[window] keyword[and] identifier[window] . identifier[active_process] : identifier[path] = identifier[window] . identifier[active_process] . identifier[get_cwd] () keyword[else] : identifier[path] = keyword[None] keyword[def] identifier[before_exec] (): literal[string] keyword[try] : identifier[os] . identifier[chdir] ( identifier[path] keyword[or] identifier[self] . identifier[original_cwd] ) keyword[except] identifier[OSError] : keyword[pass] identifier[os] . identifier[environ] [ literal[string] ]= identifier[self] . identifier[default_terminal] keyword[if] identifier[self] . identifier[socket_name] : identifier[os] . identifier[environ] [ literal[string] ]= literal[string] %( identifier[self] . identifier[socket_name] , identifier[pane] . identifier[pane_id] ) keyword[if] identifier[command] : identifier[command] = identifier[command] . identifier[split] () keyword[else] : identifier[command] =[ identifier[self] . identifier[default_shell] ] identifier[terminal] = identifier[Terminal] ( identifier[done_callback] = identifier[done_callback] , identifier[bell_func] = identifier[bell] , identifier[before_exec_func] = identifier[before_exec] ) identifier[pane] = identifier[Pane] ( identifier[terminal] ) identifier[self] . identifier[panes_by_id] [ identifier[pane] . identifier[pane_id] ]= identifier[pane] identifier[logger] . identifier[info] ( literal[string] , identifier[command] ) keyword[return] identifier[pane]
def _create_pane(self, window=None, command=None, start_directory=None): """ Create a new :class:`pymux.arrangement.Pane` instance. (Don't put it in a window yet.) :param window: If a window is given, take the CWD of the current process of that window as the start path for this pane. :param command: If given, run this command instead of `self.default_shell`. :param start_directory: If given, use this as the CWD. """ assert window is None or isinstance(window, Window) assert command is None or isinstance(command, six.text_type) assert start_directory is None or isinstance(start_directory, six.text_type) def done_callback(): """ When the process finishes. """ if not self.remain_on_exit: # Remove pane from layout. self.arrangement.remove_pane(pane) # No panes left? -> Quit. if not self.arrangement.has_panes: self.stop() # depends on [control=['if'], data=[]] # Make sure the right pane is focused for each client. for client_state in self._client_states.values(): client_state.sync_focus() # depends on [control=['for'], data=['client_state']] # depends on [control=['if'], data=[]] self.invalidate() def bell(): """ Sound bell on all clients. """ if self.enable_bell: for c in self.apps: c.output.bell() # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] # Start directory. if start_directory: path = start_directory # depends on [control=['if'], data=[]] elif window and window.active_process: # When the path of the active process is known, # start the new process at the same location. path = window.active_process.get_cwd() # depends on [control=['if'], data=[]] else: path = None def before_exec(): """ Called in the process fork (in the child process). """ # Go to this directory. try: os.chdir(path or self.original_cwd) # depends on [control=['try'], data=[]] except OSError: pass # No such file or directory. # depends on [control=['except'], data=[]] # Set terminal variable. (We emulate xterm.) os.environ['TERM'] = self.default_terminal # Make sure to set the PYMUX environment variable. if self.socket_name: os.environ['PYMUX'] = '%s,%i' % (self.socket_name, pane.pane_id) # depends on [control=['if'], data=[]] if command: command = command.split() # depends on [control=['if'], data=[]] else: command = [self.default_shell] # Create new pane and terminal. terminal = Terminal(done_callback=done_callback, bell_func=bell, before_exec_func=before_exec) pane = Pane(terminal) # Keep track of panes. This is a WeakKeyDictionary, we only add, but # don't remove. self.panes_by_id[pane.pane_id] = pane logger.info('Created process %r.', command) return pane
def description_of(lines, name='stdin'): """ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str """ u = UniversalDetector() for line in lines: line = bytearray(line) u.feed(line) # shortcut out of the loop to save reading further - particularly useful if we read a BOM. if u.done: break u.close() result = u.result if PY2: name = name.decode(sys.getfilesystemencoding(), 'ignore') if result['encoding']: return '{0}: {1} with confidence {2}'.format(name, result['encoding'], result['confidence']) else: return '{0}: no result'.format(name)
def function[description_of, parameter[lines, name]]: constant[ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str ] variable[u] assign[=] call[name[UniversalDetector], parameter[]] for taget[name[line]] in starred[name[lines]] begin[:] variable[line] assign[=] call[name[bytearray], parameter[name[line]]] call[name[u].feed, parameter[name[line]]] if name[u].done begin[:] break call[name[u].close, parameter[]] variable[result] assign[=] name[u].result if name[PY2] begin[:] variable[name] assign[=] call[name[name].decode, parameter[call[name[sys].getfilesystemencoding, parameter[]], constant[ignore]]] if call[name[result]][constant[encoding]] begin[:] return[call[constant[{0}: {1} with confidence {2}].format, parameter[name[name], call[name[result]][constant[encoding]], call[name[result]][constant[confidence]]]]]
keyword[def] identifier[description_of] ( identifier[lines] , identifier[name] = literal[string] ): literal[string] identifier[u] = identifier[UniversalDetector] () keyword[for] identifier[line] keyword[in] identifier[lines] : identifier[line] = identifier[bytearray] ( identifier[line] ) identifier[u] . identifier[feed] ( identifier[line] ) keyword[if] identifier[u] . identifier[done] : keyword[break] identifier[u] . identifier[close] () identifier[result] = identifier[u] . identifier[result] keyword[if] identifier[PY2] : identifier[name] = identifier[name] . identifier[decode] ( identifier[sys] . identifier[getfilesystemencoding] (), literal[string] ) keyword[if] identifier[result] [ literal[string] ]: keyword[return] literal[string] . identifier[format] ( identifier[name] , identifier[result] [ literal[string] ], identifier[result] [ literal[string] ]) keyword[else] : keyword[return] literal[string] . identifier[format] ( identifier[name] )
def description_of(lines, name='stdin'): """ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str """ u = UniversalDetector() for line in lines: line = bytearray(line) u.feed(line) # shortcut out of the loop to save reading further - particularly useful if we read a BOM. if u.done: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] u.close() result = u.result if PY2: name = name.decode(sys.getfilesystemencoding(), 'ignore') # depends on [control=['if'], data=[]] if result['encoding']: return '{0}: {1} with confidence {2}'.format(name, result['encoding'], result['confidence']) # depends on [control=['if'], data=[]] else: return '{0}: no result'.format(name)
def net_graph(block=None, split_state=False): """ Return a graph representation of the current block. Graph has the following form: { node1: { nodeA: edge1A, nodeB: edge1B}, node2: { nodeB: edge2B, nodeC: edge2C}, ... } aka: edge = graph[source][dest] Each node can be either a logic net or a WireVector (e.g. an Input, and Output, a Const or even an undriven WireVector (which acts as a source or sink in the network) Each edge is a WireVector or derived type (Input, Output, Register, etc.) Note that inputs, consts, and outputs will be both "node" and "edge". WireVectors that are not connected to any nets are not returned as part of the graph. """ # FIXME: make it not try to add unused wires (issue #204) block = working_block(block) from .wire import Register # self.sanity_check() graph = {} # add all of the nodes for net in block.logic: graph[net] = {} wire_src_dict, wire_dst_dict = block.net_connections() dest_set = set(wire_src_dict.keys()) arg_set = set(wire_dst_dict.keys()) dangle_set = dest_set.symmetric_difference(arg_set) for w in dangle_set: graph[w] = {} if split_state: for w in block.wirevector_subset(Register): graph[w] = {} # add all of the edges for w in (dest_set & arg_set): try: _from = wire_src_dict[w] except Exception: _from = w if split_state and isinstance(w, Register): _from = w try: _to_list = wire_dst_dict[w] except Exception: _to_list = [w] for _to in _to_list: graph[_from][_to] = w return graph
def function[net_graph, parameter[block, split_state]]: constant[ Return a graph representation of the current block. Graph has the following form: { node1: { nodeA: edge1A, nodeB: edge1B}, node2: { nodeB: edge2B, nodeC: edge2C}, ... } aka: edge = graph[source][dest] Each node can be either a logic net or a WireVector (e.g. an Input, and Output, a Const or even an undriven WireVector (which acts as a source or sink in the network) Each edge is a WireVector or derived type (Input, Output, Register, etc.) Note that inputs, consts, and outputs will be both "node" and "edge". WireVectors that are not connected to any nets are not returned as part of the graph. ] variable[block] assign[=] call[name[working_block], parameter[name[block]]] from relative_module[wire] import module[Register] variable[graph] assign[=] dictionary[[], []] for taget[name[net]] in starred[name[block].logic] begin[:] call[name[graph]][name[net]] assign[=] dictionary[[], []] <ast.Tuple object at 0x7da20c6aa470> assign[=] call[name[block].net_connections, parameter[]] variable[dest_set] assign[=] call[name[set], parameter[call[name[wire_src_dict].keys, parameter[]]]] variable[arg_set] assign[=] call[name[set], parameter[call[name[wire_dst_dict].keys, parameter[]]]] variable[dangle_set] assign[=] call[name[dest_set].symmetric_difference, parameter[name[arg_set]]] for taget[name[w]] in starred[name[dangle_set]] begin[:] call[name[graph]][name[w]] assign[=] dictionary[[], []] if name[split_state] begin[:] for taget[name[w]] in starred[call[name[block].wirevector_subset, parameter[name[Register]]]] begin[:] call[name[graph]][name[w]] assign[=] dictionary[[], []] for taget[name[w]] in starred[binary_operation[name[dest_set] <ast.BitAnd object at 0x7da2590d6b60> name[arg_set]]] begin[:] <ast.Try object at 0x7da20c6e4c40> if <ast.BoolOp object at 0x7da20c6e7640> begin[:] variable[_from] assign[=] name[w] <ast.Try object at 0x7da20c6e4b50> for taget[name[_to]] in starred[name[_to_list]] begin[:] call[call[name[graph]][name[_from]]][name[_to]] assign[=] name[w] return[name[graph]]
keyword[def] identifier[net_graph] ( identifier[block] = keyword[None] , identifier[split_state] = keyword[False] ): literal[string] identifier[block] = identifier[working_block] ( identifier[block] ) keyword[from] . identifier[wire] keyword[import] identifier[Register] identifier[graph] ={} keyword[for] identifier[net] keyword[in] identifier[block] . identifier[logic] : identifier[graph] [ identifier[net] ]={} identifier[wire_src_dict] , identifier[wire_dst_dict] = identifier[block] . identifier[net_connections] () identifier[dest_set] = identifier[set] ( identifier[wire_src_dict] . identifier[keys] ()) identifier[arg_set] = identifier[set] ( identifier[wire_dst_dict] . identifier[keys] ()) identifier[dangle_set] = identifier[dest_set] . identifier[symmetric_difference] ( identifier[arg_set] ) keyword[for] identifier[w] keyword[in] identifier[dangle_set] : identifier[graph] [ identifier[w] ]={} keyword[if] identifier[split_state] : keyword[for] identifier[w] keyword[in] identifier[block] . identifier[wirevector_subset] ( identifier[Register] ): identifier[graph] [ identifier[w] ]={} keyword[for] identifier[w] keyword[in] ( identifier[dest_set] & identifier[arg_set] ): keyword[try] : identifier[_from] = identifier[wire_src_dict] [ identifier[w] ] keyword[except] identifier[Exception] : identifier[_from] = identifier[w] keyword[if] identifier[split_state] keyword[and] identifier[isinstance] ( identifier[w] , identifier[Register] ): identifier[_from] = identifier[w] keyword[try] : identifier[_to_list] = identifier[wire_dst_dict] [ identifier[w] ] keyword[except] identifier[Exception] : identifier[_to_list] =[ identifier[w] ] keyword[for] identifier[_to] keyword[in] identifier[_to_list] : identifier[graph] [ identifier[_from] ][ identifier[_to] ]= identifier[w] keyword[return] identifier[graph]
def net_graph(block=None, split_state=False): """ Return a graph representation of the current block. Graph has the following form: { node1: { nodeA: edge1A, nodeB: edge1B}, node2: { nodeB: edge2B, nodeC: edge2C}, ... } aka: edge = graph[source][dest] Each node can be either a logic net or a WireVector (e.g. an Input, and Output, a Const or even an undriven WireVector (which acts as a source or sink in the network) Each edge is a WireVector or derived type (Input, Output, Register, etc.) Note that inputs, consts, and outputs will be both "node" and "edge". WireVectors that are not connected to any nets are not returned as part of the graph. """ # FIXME: make it not try to add unused wires (issue #204) block = working_block(block) from .wire import Register # self.sanity_check() graph = {} # add all of the nodes for net in block.logic: graph[net] = {} # depends on [control=['for'], data=['net']] (wire_src_dict, wire_dst_dict) = block.net_connections() dest_set = set(wire_src_dict.keys()) arg_set = set(wire_dst_dict.keys()) dangle_set = dest_set.symmetric_difference(arg_set) for w in dangle_set: graph[w] = {} # depends on [control=['for'], data=['w']] if split_state: for w in block.wirevector_subset(Register): graph[w] = {} # depends on [control=['for'], data=['w']] # depends on [control=['if'], data=[]] # add all of the edges for w in dest_set & arg_set: try: _from = wire_src_dict[w] # depends on [control=['try'], data=[]] except Exception: _from = w # depends on [control=['except'], data=[]] if split_state and isinstance(w, Register): _from = w # depends on [control=['if'], data=[]] try: _to_list = wire_dst_dict[w] # depends on [control=['try'], data=[]] except Exception: _to_list = [w] # depends on [control=['except'], data=[]] for _to in _to_list: graph[_from][_to] = w # depends on [control=['for'], data=['_to']] # depends on [control=['for'], data=['w']] return graph
def worksheet(self, title): """Returns a worksheet with specified `title`. :param title: A title of a worksheet. If there're multiple worksheets with the same title, first one will be returned. :type title: int :returns: an instance of :class:`gsperad.models.Worksheet`. Example. Getting worksheet named 'Annual bonuses' >>> sht = client.open('Sample one') >>> worksheet = sht.worksheet('Annual bonuses') """ sheet_data = self.fetch_sheet_metadata() try: item = finditem( lambda x: x['properties']['title'] == title, sheet_data['sheets'] ) return Worksheet(self, item['properties']) except (StopIteration, KeyError): raise WorksheetNotFound(title)
def function[worksheet, parameter[self, title]]: constant[Returns a worksheet with specified `title`. :param title: A title of a worksheet. If there're multiple worksheets with the same title, first one will be returned. :type title: int :returns: an instance of :class:`gsperad.models.Worksheet`. Example. Getting worksheet named 'Annual bonuses' >>> sht = client.open('Sample one') >>> worksheet = sht.worksheet('Annual bonuses') ] variable[sheet_data] assign[=] call[name[self].fetch_sheet_metadata, parameter[]] <ast.Try object at 0x7da20e9571c0>
keyword[def] identifier[worksheet] ( identifier[self] , identifier[title] ): literal[string] identifier[sheet_data] = identifier[self] . identifier[fetch_sheet_metadata] () keyword[try] : identifier[item] = identifier[finditem] ( keyword[lambda] identifier[x] : identifier[x] [ literal[string] ][ literal[string] ]== identifier[title] , identifier[sheet_data] [ literal[string] ] ) keyword[return] identifier[Worksheet] ( identifier[self] , identifier[item] [ literal[string] ]) keyword[except] ( identifier[StopIteration] , identifier[KeyError] ): keyword[raise] identifier[WorksheetNotFound] ( identifier[title] )
def worksheet(self, title): """Returns a worksheet with specified `title`. :param title: A title of a worksheet. If there're multiple worksheets with the same title, first one will be returned. :type title: int :returns: an instance of :class:`gsperad.models.Worksheet`. Example. Getting worksheet named 'Annual bonuses' >>> sht = client.open('Sample one') >>> worksheet = sht.worksheet('Annual bonuses') """ sheet_data = self.fetch_sheet_metadata() try: item = finditem(lambda x: x['properties']['title'] == title, sheet_data['sheets']) return Worksheet(self, item['properties']) # depends on [control=['try'], data=[]] except (StopIteration, KeyError): raise WorksheetNotFound(title) # depends on [control=['except'], data=[]]
async def _on_message(self, update): """Receive a message from a service.""" name = update['service'] message_obj = update['payload'] if name not in self.services: return with self._state_lock: self.services[name].post_message(message_obj['level'], message_obj['message'])
<ast.AsyncFunctionDef object at 0x7da18fe903a0>
keyword[async] keyword[def] identifier[_on_message] ( identifier[self] , identifier[update] ): literal[string] identifier[name] = identifier[update] [ literal[string] ] identifier[message_obj] = identifier[update] [ literal[string] ] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[services] : keyword[return] keyword[with] identifier[self] . identifier[_state_lock] : identifier[self] . identifier[services] [ identifier[name] ]. identifier[post_message] ( identifier[message_obj] [ literal[string] ], identifier[message_obj] [ literal[string] ])
async def _on_message(self, update): """Receive a message from a service.""" name = update['service'] message_obj = update['payload'] if name not in self.services: return # depends on [control=['if'], data=[]] with self._state_lock: self.services[name].post_message(message_obj['level'], message_obj['message']) # depends on [control=['with'], data=[]]
def communicate(self): """Read each request and respond appropriately.""" request_seen = False try: while True: # (re)set req to None so that if something goes wrong in # the RequestHandlerClass constructor, the error doesn't # get written to the previous request. req = None req = self.RequestHandlerClass(self.server, self) # This order of operations should guarantee correct pipelining. req.parse_request() if self.server.stats['Enabled']: self.requests_seen += 1 if not req.ready: # Something went wrong in the parsing (and the server has # probably already made a simple_response). Return and # let the conn close. return request_seen = True req.respond() if req.close_connection: return except socket.error as ex: errnum = ex.args[0] # sadly SSL sockets return a different (longer) time out string timeout_errs = 'timed out', 'The read operation timed out' if errnum in timeout_errs: # Don't error if we're between requests; only error # if 1) no request has been started at all, or 2) we're # in the middle of a request. # See https://github.com/cherrypy/cherrypy/issues/853 if (not request_seen) or (req and req.started_request): self._conditional_error(req, '408 Request Timeout') elif errnum not in errors.socket_errors_to_ignore: self.server.error_log( 'socket.error %s' % repr(errnum), level=logging.WARNING, traceback=True, ) self._conditional_error(req, '500 Internal Server Error') except (KeyboardInterrupt, SystemExit): raise except errors.FatalSSLAlert: pass except errors.NoSSLError: self._handle_no_ssl(req) except Exception as ex: self.server.error_log( repr(ex), level=logging.ERROR, traceback=True, ) self._conditional_error(req, '500 Internal Server Error')
def function[communicate, parameter[self]]: constant[Read each request and respond appropriately.] variable[request_seen] assign[=] constant[False] <ast.Try object at 0x7da20c6e6e60>
keyword[def] identifier[communicate] ( identifier[self] ): literal[string] identifier[request_seen] = keyword[False] keyword[try] : keyword[while] keyword[True] : identifier[req] = keyword[None] identifier[req] = identifier[self] . identifier[RequestHandlerClass] ( identifier[self] . identifier[server] , identifier[self] ) identifier[req] . identifier[parse_request] () keyword[if] identifier[self] . identifier[server] . identifier[stats] [ literal[string] ]: identifier[self] . identifier[requests_seen] += literal[int] keyword[if] keyword[not] identifier[req] . identifier[ready] : keyword[return] identifier[request_seen] = keyword[True] identifier[req] . identifier[respond] () keyword[if] identifier[req] . identifier[close_connection] : keyword[return] keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[ex] : identifier[errnum] = identifier[ex] . identifier[args] [ literal[int] ] identifier[timeout_errs] = literal[string] , literal[string] keyword[if] identifier[errnum] keyword[in] identifier[timeout_errs] : keyword[if] ( keyword[not] identifier[request_seen] ) keyword[or] ( identifier[req] keyword[and] identifier[req] . identifier[started_request] ): identifier[self] . identifier[_conditional_error] ( identifier[req] , literal[string] ) keyword[elif] identifier[errnum] keyword[not] keyword[in] identifier[errors] . identifier[socket_errors_to_ignore] : identifier[self] . identifier[server] . identifier[error_log] ( literal[string] % identifier[repr] ( identifier[errnum] ), identifier[level] = identifier[logging] . identifier[WARNING] , identifier[traceback] = keyword[True] , ) identifier[self] . identifier[_conditional_error] ( identifier[req] , literal[string] ) keyword[except] ( identifier[KeyboardInterrupt] , identifier[SystemExit] ): keyword[raise] keyword[except] identifier[errors] . identifier[FatalSSLAlert] : keyword[pass] keyword[except] identifier[errors] . identifier[NoSSLError] : identifier[self] . identifier[_handle_no_ssl] ( identifier[req] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[self] . identifier[server] . identifier[error_log] ( identifier[repr] ( identifier[ex] ), identifier[level] = identifier[logging] . identifier[ERROR] , identifier[traceback] = keyword[True] , ) identifier[self] . identifier[_conditional_error] ( identifier[req] , literal[string] )
def communicate(self): """Read each request and respond appropriately.""" request_seen = False try: while True: # (re)set req to None so that if something goes wrong in # the RequestHandlerClass constructor, the error doesn't # get written to the previous request. req = None req = self.RequestHandlerClass(self.server, self) # This order of operations should guarantee correct pipelining. req.parse_request() if self.server.stats['Enabled']: self.requests_seen += 1 # depends on [control=['if'], data=[]] if not req.ready: # Something went wrong in the parsing (and the server has # probably already made a simple_response). Return and # let the conn close. return # depends on [control=['if'], data=[]] request_seen = True req.respond() if req.close_connection: return # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except socket.error as ex: errnum = ex.args[0] # sadly SSL sockets return a different (longer) time out string timeout_errs = ('timed out', 'The read operation timed out') if errnum in timeout_errs: # Don't error if we're between requests; only error # if 1) no request has been started at all, or 2) we're # in the middle of a request. # See https://github.com/cherrypy/cherrypy/issues/853 if not request_seen or (req and req.started_request): self._conditional_error(req, '408 Request Timeout') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif errnum not in errors.socket_errors_to_ignore: self.server.error_log('socket.error %s' % repr(errnum), level=logging.WARNING, traceback=True) self._conditional_error(req, '500 Internal Server Error') # depends on [control=['if'], data=['errnum']] # depends on [control=['except'], data=['ex']] except (KeyboardInterrupt, SystemExit): raise # depends on [control=['except'], data=[]] except errors.FatalSSLAlert: pass # depends on [control=['except'], data=[]] except errors.NoSSLError: self._handle_no_ssl(req) # depends on [control=['except'], data=[]] except Exception as ex: self.server.error_log(repr(ex), level=logging.ERROR, traceback=True) self._conditional_error(req, '500 Internal Server Error') # depends on [control=['except'], data=['ex']]
def remove_core_element(self, model): """Remove respective core element of handed scoped variable model :param ScopedVariableModel model: Scoped variable model which core element should be removed :return: """ assert model.scoped_variable.parent is self.model.state gui_helper_state_machine.delete_core_element_of_model(model)
def function[remove_core_element, parameter[self, model]]: constant[Remove respective core element of handed scoped variable model :param ScopedVariableModel model: Scoped variable model which core element should be removed :return: ] assert[compare[name[model].scoped_variable.parent is name[self].model.state]] call[name[gui_helper_state_machine].delete_core_element_of_model, parameter[name[model]]]
keyword[def] identifier[remove_core_element] ( identifier[self] , identifier[model] ): literal[string] keyword[assert] identifier[model] . identifier[scoped_variable] . identifier[parent] keyword[is] identifier[self] . identifier[model] . identifier[state] identifier[gui_helper_state_machine] . identifier[delete_core_element_of_model] ( identifier[model] )
def remove_core_element(self, model): """Remove respective core element of handed scoped variable model :param ScopedVariableModel model: Scoped variable model which core element should be removed :return: """ assert model.scoped_variable.parent is self.model.state gui_helper_state_machine.delete_core_element_of_model(model)
def set_attr(self): """ set the data for this column """ setattr(self.attrs, self.kind_attr, self.values) setattr(self.attrs, self.meta_attr, self.meta) if self.dtype is not None: setattr(self.attrs, self.dtype_attr, self.dtype)
def function[set_attr, parameter[self]]: constant[ set the data for this column ] call[name[setattr], parameter[name[self].attrs, name[self].kind_attr, name[self].values]] call[name[setattr], parameter[name[self].attrs, name[self].meta_attr, name[self].meta]] if compare[name[self].dtype is_not constant[None]] begin[:] call[name[setattr], parameter[name[self].attrs, name[self].dtype_attr, name[self].dtype]]
keyword[def] identifier[set_attr] ( identifier[self] ): literal[string] identifier[setattr] ( identifier[self] . identifier[attrs] , identifier[self] . identifier[kind_attr] , identifier[self] . identifier[values] ) identifier[setattr] ( identifier[self] . identifier[attrs] , identifier[self] . identifier[meta_attr] , identifier[self] . identifier[meta] ) keyword[if] identifier[self] . identifier[dtype] keyword[is] keyword[not] keyword[None] : identifier[setattr] ( identifier[self] . identifier[attrs] , identifier[self] . identifier[dtype_attr] , identifier[self] . identifier[dtype] )
def set_attr(self): """ set the data for this column """ setattr(self.attrs, self.kind_attr, self.values) setattr(self.attrs, self.meta_attr, self.meta) if self.dtype is not None: setattr(self.attrs, self.dtype_attr, self.dtype) # depends on [control=['if'], data=[]]
def check_garner(text): """Suggest the preferred forms. source: Garner's Modern American Usage source_url: http://bit.ly/1T4alrY """ err = "redundancy.garner" msg = "Redundancy. Use '{}' instead of '{}'." redundancies = [ ["adequate", ["adequate enough"]], ["admitted", ["self-admitted"]], ["affidavit", ["sworn affidavit"]], ["agreement", ["mutual agreement"]], ["alumnus", ["former alumnus"]], ["antithetical", ["directly antithetical"]], ["approximately", ["approximately about"]], ["associate", ["associate together(?: in groups)?"]], ["bivouac", ["temporary bivouac", "bivouac camp"]], ["blend", ["blend together"]], ["but", ["but nevertheless"]], ["charged with...", ["accused of a charge"]], ["circumstances of", ["circumstances surrounding"]], ["circumstances", ["surrounding circumstances"]], ["close", ["close proximity"]], ["collaborate", ["collaborate together"]], ["collaborator", ["fellow collaborator"]], ["collaborators", ["fellow collaborators"]], ["collocated", ["collocated together"]], ["colleagues", ["fellow colleagues"]], ["combine", ["combine together"]], ["complacent", ["self-complacent"]], ["confessed", ["self-confessed"]], ["connect", ["connect together"]], ["consensus", ["(?:general )?consensus of opinion"]], ["consolidate", ["consolidate together"]], ["continues to", ["still continues to"]], ["contradictory", ["mutually contradictory"]], ["cooperation", ["mutual cooperation"]], ["couple", ["couple together"]], ["crisis", ["serious crisis"]], ["eliminate", ["entirely eliminate"]], ["especially", ["most especially"]], ["fact", ["actual fact"]], ["facts", ["true facts"]], ["forecast", ["future forecast"]], ["founding fathers", ["founding forefathers"]], ["free", ["free and gratis"]], ["free", ["free gratis"]], ["full", ["completely full"]], ["fundamentals", ["basic fundamentals"]], ["gift", ["free gift"]], ["innovation", ["new innovation"]], ["interact", ["interact with each other"]], ["large", ["large-size"]], ["meld", ["meld together"]], ["merge", ["merge together"]], ["mingle", ["mingle together"]], ["mix", ["mix together"]], ["mutual feelings", ["mutual feelings for eachother"]], ["mutual respect", ["mutual respect for each other"]], ["native citizen", ["native-born citizen"]], ["necessity", ["absolute necessity"]], ["obvious", ["blatantly obvious"]], ["pause", ["pause for a moment"]], ["planning", ["advance planning"]], ["plans", ["future plans"]], ["pooled", ["pooled together"]], ["potable water", ["potable drinking water"]], ["potable water", ["potable drinking water"]], ["recruit", ["new recruit"]], ["reelected", ["reelected for another term"]], ["refer", ["refer back"]], ["regress", ["regress back"]], ["repay them", ["repay them back"]], ["repay", ["repay back"]], ["repeat", ["repeat again"]], ["repeat", ["repeat back"]], ["repeat", ["repeat the same"]], ["repeated", ["repeated the same"]], ["reprieve", ["temporary reprieve"]], ["respite", ["brief respite"]], ["retirement", ["retiral", "retiracy"]], ["retreat", ["retreat back"]], ["return", ["return back"]], ["scrutinize", ["closely scrutinize"]], ["software", ["software program"]], ["surrounded", ["surrounded on all sides"]], ["the nation", ["the whole entire nation"]], ["throughout the", ["throughout the entire"]], ["timpani", ["timpani drum"]], ["twins", ["pair of twins"]], ["vacancy", ["unfilled vacancy"]], ["various", ["various different"]], ["veteran", ["former veteran"]], ["visible", ["visible to the eye"]], ["vocation", ["professional vocation"]], ["while", ["while at the same time"]], ] return preferred_forms_check(text, redundancies, err, msg)
def function[check_garner, parameter[text]]: constant[Suggest the preferred forms. source: Garner's Modern American Usage source_url: http://bit.ly/1T4alrY ] variable[err] assign[=] constant[redundancy.garner] variable[msg] assign[=] constant[Redundancy. Use '{}' instead of '{}'.] variable[redundancies] assign[=] list[[<ast.List object at 0x7da1b0868e20>, <ast.List object at 0x7da1b086be20>, <ast.List object at 0x7da1b086be50>, <ast.List object at 0x7da1b086a740>, <ast.List object at 0x7da1b086ad40>, <ast.List object at 0x7da1b08697b0>, <ast.List object at 0x7da1b0862140>, <ast.List object at 0x7da1b08ca8c0>, <ast.List object at 0x7da1b08c8b80>, <ast.List object at 0x7da1b08c9720>, <ast.List object at 0x7da1b08cbd30>, <ast.List object at 0x7da1b08c9ea0>, <ast.List object at 0x7da1b086b820>, <ast.List object at 0x7da1b0869720>, <ast.List object at 0x7da1b0869d80>, <ast.List object at 0x7da1b086a1d0>, <ast.List object at 0x7da1b0868b20>, <ast.List object at 0x7da1b086b100>, <ast.List object at 0x7da1b08681c0>, <ast.List object at 0x7da1b086bc10>, <ast.List object at 0x7da1b086a020>, <ast.List object at 0x7da1b0868250>, <ast.List object at 0x7da1b086ab90>, <ast.List object at 0x7da1b0868850>, <ast.List object at 0x7da1b086a620>, <ast.List object at 0x7da1b0869c30>, <ast.List object at 0x7da1b086b310>, <ast.List object at 0x7da1b0868910>, <ast.List object at 0x7da1b0868b80>, <ast.List object at 0x7da1b086af20>, <ast.List object at 0x7da1b0869900>, <ast.List object at 0x7da1b086ba00>, <ast.List object at 0x7da1b0868f40>, <ast.List object at 0x7da1b0869f90>, <ast.List object at 0x7da1b0868460>, <ast.List object at 0x7da1b08690c0>, <ast.List object at 0x7da1b086b190>, <ast.List object at 0x7da1b08687c0>, <ast.List object at 0x7da1b086bc70>, <ast.List object at 0x7da1b0869090>, <ast.List object at 0x7da1b0641e70>, <ast.List object at 0x7da1b0642920>, <ast.List object at 0x7da1b0640c10>, <ast.List object at 0x7da1b06408b0>, <ast.List object at 0x7da1b0642170>, <ast.List object at 0x7da1b0643b20>, <ast.List object at 0x7da1b0643f70>, <ast.List object at 0x7da1b0641f60>, <ast.List object at 0x7da1b0640e20>, <ast.List object at 0x7da1b0640ee0>, <ast.List object at 0x7da1b0642770>, <ast.List object at 0x7da1b06427d0>, <ast.List object at 0x7da1b06413f0>, <ast.List object at 0x7da1b0641b10>, <ast.List object at 0x7da1b0642a70>, <ast.List object at 0x7da1b0640400>, <ast.List object at 0x7da1b06402e0>, <ast.List object at 0x7da1b0643430>, <ast.List object at 0x7da1b06401c0>, <ast.List object at 0x7da1b06425f0>, <ast.List object at 0x7da1b0643190>, <ast.List object at 0x7da1b0642050>, <ast.List object at 0x7da1b0642aa0>, <ast.List object at 0x7da1b0642260>, <ast.List object at 0x7da1b0640cd0>, <ast.List object at 0x7da1b0643b80>, <ast.List object at 0x7da1b06434f0>, <ast.List object at 0x7da1b06403d0>, <ast.List object at 0x7da1b0642410>, <ast.List object at 0x7da1b0643280>, <ast.List object at 0x7da1b0642ce0>, <ast.List object at 0x7da1b0640040>, <ast.List object at 0x7da1b0640070>, <ast.List object at 0x7da1b06434c0>, <ast.List object at 0x7da1b06414e0>, <ast.List object at 0x7da1b0640220>, <ast.List object at 0x7da1b0642d70>, <ast.List object at 0x7da1b0642470>, <ast.List object at 0x7da1b0641720>, <ast.List object at 0x7da1b0642350>, <ast.List object at 0x7da1b0641510>, <ast.List object at 0x7da1b0641a50>, <ast.List object at 0x7da1b06416f0>, <ast.List object at 0x7da1b0641840>, <ast.List object at 0x7da1b0640310>, <ast.List object at 0x7da1b0640bb0>, <ast.List object at 0x7da1b0642dd0>, <ast.List object at 0x7da1b0640850>]] return[call[name[preferred_forms_check], parameter[name[text], name[redundancies], name[err], name[msg]]]]
keyword[def] identifier[check_garner] ( identifier[text] ): literal[string] identifier[err] = literal[string] identifier[msg] = literal[string] identifier[redundancies] =[ [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] , literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] , literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], [ literal[string] ,[ literal[string] ]], ] keyword[return] identifier[preferred_forms_check] ( identifier[text] , identifier[redundancies] , identifier[err] , identifier[msg] )
def check_garner(text): """Suggest the preferred forms. source: Garner's Modern American Usage source_url: http://bit.ly/1T4alrY """ err = 'redundancy.garner' msg = "Redundancy. Use '{}' instead of '{}'." redundancies = [['adequate', ['adequate enough']], ['admitted', ['self-admitted']], ['affidavit', ['sworn affidavit']], ['agreement', ['mutual agreement']], ['alumnus', ['former alumnus']], ['antithetical', ['directly antithetical']], ['approximately', ['approximately about']], ['associate', ['associate together(?: in groups)?']], ['bivouac', ['temporary bivouac', 'bivouac camp']], ['blend', ['blend together']], ['but', ['but nevertheless']], ['charged with...', ['accused of a charge']], ['circumstances of', ['circumstances surrounding']], ['circumstances', ['surrounding circumstances']], ['close', ['close proximity']], ['collaborate', ['collaborate together']], ['collaborator', ['fellow collaborator']], ['collaborators', ['fellow collaborators']], ['collocated', ['collocated together']], ['colleagues', ['fellow colleagues']], ['combine', ['combine together']], ['complacent', ['self-complacent']], ['confessed', ['self-confessed']], ['connect', ['connect together']], ['consensus', ['(?:general )?consensus of opinion']], ['consolidate', ['consolidate together']], ['continues to', ['still continues to']], ['contradictory', ['mutually contradictory']], ['cooperation', ['mutual cooperation']], ['couple', ['couple together']], ['crisis', ['serious crisis']], ['eliminate', ['entirely eliminate']], ['especially', ['most especially']], ['fact', ['actual fact']], ['facts', ['true facts']], ['forecast', ['future forecast']], ['founding fathers', ['founding forefathers']], ['free', ['free and gratis']], ['free', ['free gratis']], ['full', ['completely full']], ['fundamentals', ['basic fundamentals']], ['gift', ['free gift']], ['innovation', ['new innovation']], ['interact', ['interact with each other']], ['large', ['large-size']], ['meld', ['meld together']], ['merge', ['merge together']], ['mingle', ['mingle together']], ['mix', ['mix together']], ['mutual feelings', ['mutual feelings for eachother']], ['mutual respect', ['mutual respect for each other']], ['native citizen', ['native-born citizen']], ['necessity', ['absolute necessity']], ['obvious', ['blatantly obvious']], ['pause', ['pause for a moment']], ['planning', ['advance planning']], ['plans', ['future plans']], ['pooled', ['pooled together']], ['potable water', ['potable drinking water']], ['potable water', ['potable drinking water']], ['recruit', ['new recruit']], ['reelected', ['reelected for another term']], ['refer', ['refer back']], ['regress', ['regress back']], ['repay them', ['repay them back']], ['repay', ['repay back']], ['repeat', ['repeat again']], ['repeat', ['repeat back']], ['repeat', ['repeat the same']], ['repeated', ['repeated the same']], ['reprieve', ['temporary reprieve']], ['respite', ['brief respite']], ['retirement', ['retiral', 'retiracy']], ['retreat', ['retreat back']], ['return', ['return back']], ['scrutinize', ['closely scrutinize']], ['software', ['software program']], ['surrounded', ['surrounded on all sides']], ['the nation', ['the whole entire nation']], ['throughout the', ['throughout the entire']], ['timpani', ['timpani drum']], ['twins', ['pair of twins']], ['vacancy', ['unfilled vacancy']], ['various', ['various different']], ['veteran', ['former veteran']], ['visible', ['visible to the eye']], ['vocation', ['professional vocation']], ['while', ['while at the same time']]] return preferred_forms_check(text, redundancies, err, msg)
def ScanForWindowsVolume(self, source_path): """Scans for a Windows volume. Args: source_path (str): source path. Returns: bool: True if a Windows volume was found. Raises: ScannerError: if the source path does not exists, or if the source path is not a file or directory, or if the format of or within the source file is not supported. """ windows_path_specs = self.GetBasePathSpecs(source_path) if (not windows_path_specs or self._source_type == definitions.SOURCE_TYPE_FILE): return False file_system_path_spec = windows_path_specs[0] self._file_system = resolver.Resolver.OpenFileSystem(file_system_path_spec) if file_system_path_spec.type_indicator == definitions.TYPE_INDICATOR_OS: mount_point = file_system_path_spec else: mount_point = file_system_path_spec.parent self._path_resolver = windows_path_resolver.WindowsPathResolver( self._file_system, mount_point) # The source is a directory or single volume storage media image. if not self._windows_directory: self._ScanFileSystemForWindowsDirectory(self._path_resolver) if not self._windows_directory: return False self._path_resolver.SetEnvironmentVariable( 'SystemRoot', self._windows_directory) self._path_resolver.SetEnvironmentVariable( 'WinDir', self._windows_directory) return True
def function[ScanForWindowsVolume, parameter[self, source_path]]: constant[Scans for a Windows volume. Args: source_path (str): source path. Returns: bool: True if a Windows volume was found. Raises: ScannerError: if the source path does not exists, or if the source path is not a file or directory, or if the format of or within the source file is not supported. ] variable[windows_path_specs] assign[=] call[name[self].GetBasePathSpecs, parameter[name[source_path]]] if <ast.BoolOp object at 0x7da1b064f2b0> begin[:] return[constant[False]] variable[file_system_path_spec] assign[=] call[name[windows_path_specs]][constant[0]] name[self]._file_system assign[=] call[name[resolver].Resolver.OpenFileSystem, parameter[name[file_system_path_spec]]] if compare[name[file_system_path_spec].type_indicator equal[==] name[definitions].TYPE_INDICATOR_OS] begin[:] variable[mount_point] assign[=] name[file_system_path_spec] name[self]._path_resolver assign[=] call[name[windows_path_resolver].WindowsPathResolver, parameter[name[self]._file_system, name[mount_point]]] if <ast.UnaryOp object at 0x7da1b064d1e0> begin[:] call[name[self]._ScanFileSystemForWindowsDirectory, parameter[name[self]._path_resolver]] if <ast.UnaryOp object at 0x7da1b064c6a0> begin[:] return[constant[False]] call[name[self]._path_resolver.SetEnvironmentVariable, parameter[constant[SystemRoot], name[self]._windows_directory]] call[name[self]._path_resolver.SetEnvironmentVariable, parameter[constant[WinDir], name[self]._windows_directory]] return[constant[True]]
keyword[def] identifier[ScanForWindowsVolume] ( identifier[self] , identifier[source_path] ): literal[string] identifier[windows_path_specs] = identifier[self] . identifier[GetBasePathSpecs] ( identifier[source_path] ) keyword[if] ( keyword[not] identifier[windows_path_specs] keyword[or] identifier[self] . identifier[_source_type] == identifier[definitions] . identifier[SOURCE_TYPE_FILE] ): keyword[return] keyword[False] identifier[file_system_path_spec] = identifier[windows_path_specs] [ literal[int] ] identifier[self] . identifier[_file_system] = identifier[resolver] . identifier[Resolver] . identifier[OpenFileSystem] ( identifier[file_system_path_spec] ) keyword[if] identifier[file_system_path_spec] . identifier[type_indicator] == identifier[definitions] . identifier[TYPE_INDICATOR_OS] : identifier[mount_point] = identifier[file_system_path_spec] keyword[else] : identifier[mount_point] = identifier[file_system_path_spec] . identifier[parent] identifier[self] . identifier[_path_resolver] = identifier[windows_path_resolver] . identifier[WindowsPathResolver] ( identifier[self] . identifier[_file_system] , identifier[mount_point] ) keyword[if] keyword[not] identifier[self] . identifier[_windows_directory] : identifier[self] . identifier[_ScanFileSystemForWindowsDirectory] ( identifier[self] . identifier[_path_resolver] ) keyword[if] keyword[not] identifier[self] . identifier[_windows_directory] : keyword[return] keyword[False] identifier[self] . identifier[_path_resolver] . identifier[SetEnvironmentVariable] ( literal[string] , identifier[self] . identifier[_windows_directory] ) identifier[self] . identifier[_path_resolver] . identifier[SetEnvironmentVariable] ( literal[string] , identifier[self] . identifier[_windows_directory] ) keyword[return] keyword[True]
def ScanForWindowsVolume(self, source_path): """Scans for a Windows volume. Args: source_path (str): source path. Returns: bool: True if a Windows volume was found. Raises: ScannerError: if the source path does not exists, or if the source path is not a file or directory, or if the format of or within the source file is not supported. """ windows_path_specs = self.GetBasePathSpecs(source_path) if not windows_path_specs or self._source_type == definitions.SOURCE_TYPE_FILE: return False # depends on [control=['if'], data=[]] file_system_path_spec = windows_path_specs[0] self._file_system = resolver.Resolver.OpenFileSystem(file_system_path_spec) if file_system_path_spec.type_indicator == definitions.TYPE_INDICATOR_OS: mount_point = file_system_path_spec # depends on [control=['if'], data=[]] else: mount_point = file_system_path_spec.parent self._path_resolver = windows_path_resolver.WindowsPathResolver(self._file_system, mount_point) # The source is a directory or single volume storage media image. if not self._windows_directory: self._ScanFileSystemForWindowsDirectory(self._path_resolver) # depends on [control=['if'], data=[]] if not self._windows_directory: return False # depends on [control=['if'], data=[]] self._path_resolver.SetEnvironmentVariable('SystemRoot', self._windows_directory) self._path_resolver.SetEnvironmentVariable('WinDir', self._windows_directory) return True
def _warn_about_problematic_credentials(credentials): """Determines if the credentials are problematic. Credentials from the Cloud SDK that are associated with Cloud SDK's project are problematic because they may not have APIs enabled and have limited quota. If this is the case, warn about it. """ from google.auth import _cloud_sdk if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID: warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
def function[_warn_about_problematic_credentials, parameter[credentials]]: constant[Determines if the credentials are problematic. Credentials from the Cloud SDK that are associated with Cloud SDK's project are problematic because they may not have APIs enabled and have limited quota. If this is the case, warn about it. ] from relative_module[google.auth] import module[_cloud_sdk] if compare[name[credentials].client_id equal[==] name[_cloud_sdk].CLOUD_SDK_CLIENT_ID] begin[:] call[name[warnings].warn, parameter[name[_CLOUD_SDK_CREDENTIALS_WARNING]]]
keyword[def] identifier[_warn_about_problematic_credentials] ( identifier[credentials] ): literal[string] keyword[from] identifier[google] . identifier[auth] keyword[import] identifier[_cloud_sdk] keyword[if] identifier[credentials] . identifier[client_id] == identifier[_cloud_sdk] . identifier[CLOUD_SDK_CLIENT_ID] : identifier[warnings] . identifier[warn] ( identifier[_CLOUD_SDK_CREDENTIALS_WARNING] )
def _warn_about_problematic_credentials(credentials): """Determines if the credentials are problematic. Credentials from the Cloud SDK that are associated with Cloud SDK's project are problematic because they may not have APIs enabled and have limited quota. If this is the case, warn about it. """ from google.auth import _cloud_sdk if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID: warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING) # depends on [control=['if'], data=[]]
def pairdists(alignment, subs_model, alpha=None, ncat=4, tolerance=1e-6, verbose=False): """ Load an alignment, calculate all pairwise distances and variances model parameter must be a Substitution model type from phylo_utils """ # Check if not isinstance(subs_model, phylo_utils.models.Model): raise ValueError("Can't handle this model: {}".format(model)) if alpha is None: alpha = 1.0 ncat = 1 # Set up markov model tm = TransitionMatrix(subs_model) gamma_rates = discrete_gamma(alpha, ncat) partials = alignment_to_partials(alignment) seqnames = alignment.get_names() nseq = len(seqnames) distances = np.zeros((nseq, nseq)) variances = np.zeros((nseq, nseq)) # Check the model has the appropriate size if not subs_model.size == partials[seqnames[0]].shape[1]: raise ValueError("Model {} expects {} states, but the alignment has {}".format(model.name, model.size, partials[seqnames[0]].shape[1])) nodes = [phylo_utils.likelihood.LnlModel(tm) for seq in range(nseq)] for node, header in zip(nodes, seqnames): node.set_partials(partials[header]) # retrieve partial likelihoods from partials dictionary for i, j in itertools.combinations(range(nseq), 2): brlen, var = brent_optimise(nodes[i], nodes[j], verbose=verbose) distances[i, j] = distances[j, i] = brlen variances[i, j] = variances[j, i] = var dm = DistanceMatrix.from_array(distances, names=seqnames) vm = DistanceMatrix.from_array(variances, names=seqnames) return dm, vm
def function[pairdists, parameter[alignment, subs_model, alpha, ncat, tolerance, verbose]]: constant[ Load an alignment, calculate all pairwise distances and variances model parameter must be a Substitution model type from phylo_utils ] if <ast.UnaryOp object at 0x7da18bc721a0> begin[:] <ast.Raise object at 0x7da18bc70820> if compare[name[alpha] is constant[None]] begin[:] variable[alpha] assign[=] constant[1.0] variable[ncat] assign[=] constant[1] variable[tm] assign[=] call[name[TransitionMatrix], parameter[name[subs_model]]] variable[gamma_rates] assign[=] call[name[discrete_gamma], parameter[name[alpha], name[ncat]]] variable[partials] assign[=] call[name[alignment_to_partials], parameter[name[alignment]]] variable[seqnames] assign[=] call[name[alignment].get_names, parameter[]] variable[nseq] assign[=] call[name[len], parameter[name[seqnames]]] variable[distances] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da18bc71b70>, <ast.Name object at 0x7da18bc71060>]]]] variable[variances] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da18bc71cc0>, <ast.Name object at 0x7da18bc72650>]]]] if <ast.UnaryOp object at 0x7da18bc70fd0> begin[:] <ast.Raise object at 0x7da18bc70a30> variable[nodes] assign[=] <ast.ListComp object at 0x7da18bcca590> for taget[tuple[[<ast.Name object at 0x7da2054a6620>, <ast.Name object at 0x7da2054a78b0>]]] in starred[call[name[zip], parameter[name[nodes], name[seqnames]]]] begin[:] call[name[node].set_partials, parameter[call[name[partials]][name[header]]]] for taget[tuple[[<ast.Name object at 0x7da2054a5900>, <ast.Name object at 0x7da2054a6800>]]] in starred[call[name[itertools].combinations, parameter[call[name[range], parameter[name[nseq]]], constant[2]]]] begin[:] <ast.Tuple object at 0x7da2054a6d10> assign[=] call[name[brent_optimise], parameter[call[name[nodes]][name[i]], call[name[nodes]][name[j]]]] call[name[distances]][tuple[[<ast.Name object at 0x7da18bc700d0>, <ast.Name object at 0x7da18bc705e0>]]] assign[=] name[brlen] call[name[variances]][tuple[[<ast.Name object at 0x7da18bc71240>, <ast.Name object at 0x7da18bc71840>]]] assign[=] name[var] variable[dm] assign[=] call[name[DistanceMatrix].from_array, parameter[name[distances]]] variable[vm] assign[=] call[name[DistanceMatrix].from_array, parameter[name[variances]]] return[tuple[[<ast.Name object at 0x7da18bc728f0>, <ast.Name object at 0x7da18bc71d20>]]]
keyword[def] identifier[pairdists] ( identifier[alignment] , identifier[subs_model] , identifier[alpha] = keyword[None] , identifier[ncat] = literal[int] , identifier[tolerance] = literal[int] , identifier[verbose] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[subs_model] , identifier[phylo_utils] . identifier[models] . identifier[Model] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[model] )) keyword[if] identifier[alpha] keyword[is] keyword[None] : identifier[alpha] = literal[int] identifier[ncat] = literal[int] identifier[tm] = identifier[TransitionMatrix] ( identifier[subs_model] ) identifier[gamma_rates] = identifier[discrete_gamma] ( identifier[alpha] , identifier[ncat] ) identifier[partials] = identifier[alignment_to_partials] ( identifier[alignment] ) identifier[seqnames] = identifier[alignment] . identifier[get_names] () identifier[nseq] = identifier[len] ( identifier[seqnames] ) identifier[distances] = identifier[np] . identifier[zeros] (( identifier[nseq] , identifier[nseq] )) identifier[variances] = identifier[np] . identifier[zeros] (( identifier[nseq] , identifier[nseq] )) keyword[if] keyword[not] identifier[subs_model] . identifier[size] == identifier[partials] [ identifier[seqnames] [ literal[int] ]]. identifier[shape] [ literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[model] . identifier[name] , identifier[model] . identifier[size] , identifier[partials] [ identifier[seqnames] [ literal[int] ]]. identifier[shape] [ literal[int] ])) identifier[nodes] =[ identifier[phylo_utils] . identifier[likelihood] . identifier[LnlModel] ( identifier[tm] ) keyword[for] identifier[seq] keyword[in] identifier[range] ( identifier[nseq] )] keyword[for] identifier[node] , identifier[header] keyword[in] identifier[zip] ( identifier[nodes] , identifier[seqnames] ): identifier[node] . identifier[set_partials] ( identifier[partials] [ identifier[header] ]) keyword[for] identifier[i] , identifier[j] keyword[in] identifier[itertools] . identifier[combinations] ( identifier[range] ( identifier[nseq] ), literal[int] ): identifier[brlen] , identifier[var] = identifier[brent_optimise] ( identifier[nodes] [ identifier[i] ], identifier[nodes] [ identifier[j] ], identifier[verbose] = identifier[verbose] ) identifier[distances] [ identifier[i] , identifier[j] ]= identifier[distances] [ identifier[j] , identifier[i] ]= identifier[brlen] identifier[variances] [ identifier[i] , identifier[j] ]= identifier[variances] [ identifier[j] , identifier[i] ]= identifier[var] identifier[dm] = identifier[DistanceMatrix] . identifier[from_array] ( identifier[distances] , identifier[names] = identifier[seqnames] ) identifier[vm] = identifier[DistanceMatrix] . identifier[from_array] ( identifier[variances] , identifier[names] = identifier[seqnames] ) keyword[return] identifier[dm] , identifier[vm]
def pairdists(alignment, subs_model, alpha=None, ncat=4, tolerance=1e-06, verbose=False): """ Load an alignment, calculate all pairwise distances and variances model parameter must be a Substitution model type from phylo_utils """ # Check if not isinstance(subs_model, phylo_utils.models.Model): raise ValueError("Can't handle this model: {}".format(model)) # depends on [control=['if'], data=[]] if alpha is None: alpha = 1.0 ncat = 1 # depends on [control=['if'], data=['alpha']] # Set up markov model tm = TransitionMatrix(subs_model) gamma_rates = discrete_gamma(alpha, ncat) partials = alignment_to_partials(alignment) seqnames = alignment.get_names() nseq = len(seqnames) distances = np.zeros((nseq, nseq)) variances = np.zeros((nseq, nseq)) # Check the model has the appropriate size if not subs_model.size == partials[seqnames[0]].shape[1]: raise ValueError('Model {} expects {} states, but the alignment has {}'.format(model.name, model.size, partials[seqnames[0]].shape[1])) # depends on [control=['if'], data=[]] nodes = [phylo_utils.likelihood.LnlModel(tm) for seq in range(nseq)] for (node, header) in zip(nodes, seqnames): node.set_partials(partials[header]) # retrieve partial likelihoods from partials dictionary # depends on [control=['for'], data=[]] for (i, j) in itertools.combinations(range(nseq), 2): (brlen, var) = brent_optimise(nodes[i], nodes[j], verbose=verbose) distances[i, j] = distances[j, i] = brlen variances[i, j] = variances[j, i] = var # depends on [control=['for'], data=[]] dm = DistanceMatrix.from_array(distances, names=seqnames) vm = DistanceMatrix.from_array(variances, names=seqnames) return (dm, vm)
def _exec_loop_moving_window(self, a_all, bd_all, mask, bd_idx): """Solves the kriging system by looping over all specified points. Uses only a certain number of closest points. Not very memory intensive, but the loop is done in pure Python. """ import scipy.linalg.lapack npt = bd_all.shape[0] n = bd_idx.shape[1] kvalues = np.zeros(npt) sigmasq = np.zeros(npt) for i in np.nonzero(~mask)[0]: b_selector = bd_idx[i] bd = bd_all[i] a_selector = np.concatenate((b_selector, np.array([a_all.shape[0] - 1]))) a = a_all[a_selector[:, None], a_selector] if np.any(np.absolute(bd) <= self.eps): zero_value = True zero_index = np.where(np.absolute(bd) <= self.eps) else: zero_value = False zero_index = None b = np.zeros((n+1, 1)) b[:n, 0] = - self.variogram_function(self.variogram_model_parameters, bd) if zero_value: b[zero_index[0], 0] = 0.0 b[n, 0] = 1.0 x = scipy.linalg.solve(a, b) kvalues[i] = x[:n, 0].dot(self.VALUES[b_selector]) sigmasq[i] = - x[:, 0].dot(b[:, 0]) return kvalues, sigmasq
def function[_exec_loop_moving_window, parameter[self, a_all, bd_all, mask, bd_idx]]: constant[Solves the kriging system by looping over all specified points. Uses only a certain number of closest points. Not very memory intensive, but the loop is done in pure Python. ] import module[scipy.linalg.lapack] variable[npt] assign[=] call[name[bd_all].shape][constant[0]] variable[n] assign[=] call[name[bd_idx].shape][constant[1]] variable[kvalues] assign[=] call[name[np].zeros, parameter[name[npt]]] variable[sigmasq] assign[=] call[name[np].zeros, parameter[name[npt]]] for taget[name[i]] in starred[call[call[name[np].nonzero, parameter[<ast.UnaryOp object at 0x7da20c6ab370>]]][constant[0]]] begin[:] variable[b_selector] assign[=] call[name[bd_idx]][name[i]] variable[bd] assign[=] call[name[bd_all]][name[i]] variable[a_selector] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da20c6a94b0>, <ast.Call object at 0x7da20c6aa410>]]]] variable[a] assign[=] call[name[a_all]][tuple[[<ast.Subscript object at 0x7da20c6abdf0>, <ast.Name object at 0x7da20c6aba00>]]] if call[name[np].any, parameter[compare[call[name[np].absolute, parameter[name[bd]]] less_or_equal[<=] name[self].eps]]] begin[:] variable[zero_value] assign[=] constant[True] variable[zero_index] assign[=] call[name[np].where, parameter[compare[call[name[np].absolute, parameter[name[bd]]] less_or_equal[<=] name[self].eps]]] variable[b] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da207f01c30>, <ast.Constant object at 0x7da207f012d0>]]]] call[name[b]][tuple[[<ast.Slice object at 0x7da207f00040>, <ast.Constant object at 0x7da207f01c90>]]] assign[=] <ast.UnaryOp object at 0x7da207f029e0> if name[zero_value] begin[:] call[name[b]][tuple[[<ast.Subscript object at 0x7da207f020e0>, <ast.Constant object at 0x7da207f01540>]]] assign[=] constant[0.0] call[name[b]][tuple[[<ast.Name object at 0x7da207f02200>, <ast.Constant object at 0x7da207f00070>]]] assign[=] constant[1.0] variable[x] assign[=] call[name[scipy].linalg.solve, parameter[name[a], name[b]]] call[name[kvalues]][name[i]] assign[=] call[call[name[x]][tuple[[<ast.Slice object at 0x7da207f03280>, <ast.Constant object at 0x7da207f027a0>]]].dot, parameter[call[name[self].VALUES][name[b_selector]]]] call[name[sigmasq]][name[i]] assign[=] <ast.UnaryOp object at 0x7da207f01870> return[tuple[[<ast.Name object at 0x7da207f007c0>, <ast.Name object at 0x7da207f01840>]]]
keyword[def] identifier[_exec_loop_moving_window] ( identifier[self] , identifier[a_all] , identifier[bd_all] , identifier[mask] , identifier[bd_idx] ): literal[string] keyword[import] identifier[scipy] . identifier[linalg] . identifier[lapack] identifier[npt] = identifier[bd_all] . identifier[shape] [ literal[int] ] identifier[n] = identifier[bd_idx] . identifier[shape] [ literal[int] ] identifier[kvalues] = identifier[np] . identifier[zeros] ( identifier[npt] ) identifier[sigmasq] = identifier[np] . identifier[zeros] ( identifier[npt] ) keyword[for] identifier[i] keyword[in] identifier[np] . identifier[nonzero] (~ identifier[mask] )[ literal[int] ]: identifier[b_selector] = identifier[bd_idx] [ identifier[i] ] identifier[bd] = identifier[bd_all] [ identifier[i] ] identifier[a_selector] = identifier[np] . identifier[concatenate] (( identifier[b_selector] , identifier[np] . identifier[array] ([ identifier[a_all] . identifier[shape] [ literal[int] ]- literal[int] ]))) identifier[a] = identifier[a_all] [ identifier[a_selector] [:, keyword[None] ], identifier[a_selector] ] keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[absolute] ( identifier[bd] )<= identifier[self] . identifier[eps] ): identifier[zero_value] = keyword[True] identifier[zero_index] = identifier[np] . identifier[where] ( identifier[np] . identifier[absolute] ( identifier[bd] )<= identifier[self] . identifier[eps] ) keyword[else] : identifier[zero_value] = keyword[False] identifier[zero_index] = keyword[None] identifier[b] = identifier[np] . identifier[zeros] (( identifier[n] + literal[int] , literal[int] )) identifier[b] [: identifier[n] , literal[int] ]=- identifier[self] . identifier[variogram_function] ( identifier[self] . identifier[variogram_model_parameters] , identifier[bd] ) keyword[if] identifier[zero_value] : identifier[b] [ identifier[zero_index] [ literal[int] ], literal[int] ]= literal[int] identifier[b] [ identifier[n] , literal[int] ]= literal[int] identifier[x] = identifier[scipy] . identifier[linalg] . identifier[solve] ( identifier[a] , identifier[b] ) identifier[kvalues] [ identifier[i] ]= identifier[x] [: identifier[n] , literal[int] ]. identifier[dot] ( identifier[self] . identifier[VALUES] [ identifier[b_selector] ]) identifier[sigmasq] [ identifier[i] ]=- identifier[x] [:, literal[int] ]. identifier[dot] ( identifier[b] [:, literal[int] ]) keyword[return] identifier[kvalues] , identifier[sigmasq]
def _exec_loop_moving_window(self, a_all, bd_all, mask, bd_idx): """Solves the kriging system by looping over all specified points. Uses only a certain number of closest points. Not very memory intensive, but the loop is done in pure Python. """ import scipy.linalg.lapack npt = bd_all.shape[0] n = bd_idx.shape[1] kvalues = np.zeros(npt) sigmasq = np.zeros(npt) for i in np.nonzero(~mask)[0]: b_selector = bd_idx[i] bd = bd_all[i] a_selector = np.concatenate((b_selector, np.array([a_all.shape[0] - 1]))) a = a_all[a_selector[:, None], a_selector] if np.any(np.absolute(bd) <= self.eps): zero_value = True zero_index = np.where(np.absolute(bd) <= self.eps) # depends on [control=['if'], data=[]] else: zero_value = False zero_index = None b = np.zeros((n + 1, 1)) b[:n, 0] = -self.variogram_function(self.variogram_model_parameters, bd) if zero_value: b[zero_index[0], 0] = 0.0 # depends on [control=['if'], data=[]] b[n, 0] = 1.0 x = scipy.linalg.solve(a, b) kvalues[i] = x[:n, 0].dot(self.VALUES[b_selector]) sigmasq[i] = -x[:, 0].dot(b[:, 0]) # depends on [control=['for'], data=['i']] return (kvalues, sigmasq)
def establish(self, call_id, timeout, limit=None, retry=None, max_retries=None): """Waits for the call is accepted by workers and starts to collect the results. """ rejected = 0 retried = 0 results = [] result_queue = self.result_queues[call_id] try: with Timeout(timeout, False): while True: result = result_queue.get() if result is None: rejected += 1 if retry is not None: if retried == max_retries: break retry() retried += 1 continue results.append(result) if len(results) == limit: break finally: del result_queue self.remove_result_queue(call_id) if not results: if rejected: raise Rejected('%d workers rejected' % rejected if rejected != 1 else 'A worker rejected') else: raise WorkerNotFound('failed to find worker') return results
def function[establish, parameter[self, call_id, timeout, limit, retry, max_retries]]: constant[Waits for the call is accepted by workers and starts to collect the results. ] variable[rejected] assign[=] constant[0] variable[retried] assign[=] constant[0] variable[results] assign[=] list[[]] variable[result_queue] assign[=] call[name[self].result_queues][name[call_id]] <ast.Try object at 0x7da1b0291ae0> if <ast.UnaryOp object at 0x7da1b0292e30> begin[:] if name[rejected] begin[:] <ast.Raise object at 0x7da1b02935e0> return[name[results]]
keyword[def] identifier[establish] ( identifier[self] , identifier[call_id] , identifier[timeout] , identifier[limit] = keyword[None] , identifier[retry] = keyword[None] , identifier[max_retries] = keyword[None] ): literal[string] identifier[rejected] = literal[int] identifier[retried] = literal[int] identifier[results] =[] identifier[result_queue] = identifier[self] . identifier[result_queues] [ identifier[call_id] ] keyword[try] : keyword[with] identifier[Timeout] ( identifier[timeout] , keyword[False] ): keyword[while] keyword[True] : identifier[result] = identifier[result_queue] . identifier[get] () keyword[if] identifier[result] keyword[is] keyword[None] : identifier[rejected] += literal[int] keyword[if] identifier[retry] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[retried] == identifier[max_retries] : keyword[break] identifier[retry] () identifier[retried] += literal[int] keyword[continue] identifier[results] . identifier[append] ( identifier[result] ) keyword[if] identifier[len] ( identifier[results] )== identifier[limit] : keyword[break] keyword[finally] : keyword[del] identifier[result_queue] identifier[self] . identifier[remove_result_queue] ( identifier[call_id] ) keyword[if] keyword[not] identifier[results] : keyword[if] identifier[rejected] : keyword[raise] identifier[Rejected] ( literal[string] % identifier[rejected] keyword[if] identifier[rejected] != literal[int] keyword[else] literal[string] ) keyword[else] : keyword[raise] identifier[WorkerNotFound] ( literal[string] ) keyword[return] identifier[results]
def establish(self, call_id, timeout, limit=None, retry=None, max_retries=None): """Waits for the call is accepted by workers and starts to collect the results. """ rejected = 0 retried = 0 results = [] result_queue = self.result_queues[call_id] try: with Timeout(timeout, False): while True: result = result_queue.get() if result is None: rejected += 1 if retry is not None: if retried == max_retries: break # depends on [control=['if'], data=[]] retry() retried += 1 # depends on [control=['if'], data=['retry']] continue # depends on [control=['if'], data=[]] results.append(result) if len(results) == limit: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] finally: del result_queue self.remove_result_queue(call_id) if not results: if rejected: raise Rejected('%d workers rejected' % rejected if rejected != 1 else 'A worker rejected') # depends on [control=['if'], data=[]] else: raise WorkerNotFound('failed to find worker') # depends on [control=['if'], data=[]] return results
def urlunparse(components): """Put a parsed URL back together again. This may result in a slightly different, but equivalent URL, if the URL that was parsed originally had redundant delimiters, e.g. a ? with an empty query (the draft states that these are equivalent).""" scheme, netloc, url, params, query, fragment, _coerce_result = ( _coerce_args(*components)) if params: url = "%s;%s" % (url, params) return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment)))
def function[urlunparse, parameter[components]]: constant[Put a parsed URL back together again. This may result in a slightly different, but equivalent URL, if the URL that was parsed originally had redundant delimiters, e.g. a ? with an empty query (the draft states that these are equivalent).] <ast.Tuple object at 0x7da20c6aa7a0> assign[=] call[name[_coerce_args], parameter[<ast.Starred object at 0x7da18fe910c0>]] if name[params] begin[:] variable[url] assign[=] binary_operation[constant[%s;%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b23467d0>, <ast.Name object at 0x7da1b2346fe0>]]] return[call[name[_coerce_result], parameter[call[name[urlunsplit], parameter[tuple[[<ast.Name object at 0x7da1b23440d0>, <ast.Name object at 0x7da1b2347e20>, <ast.Name object at 0x7da1b2346bc0>, <ast.Name object at 0x7da1b2347c40>, <ast.Name object at 0x7da1b2347cd0>]]]]]]]
keyword[def] identifier[urlunparse] ( identifier[components] ): literal[string] identifier[scheme] , identifier[netloc] , identifier[url] , identifier[params] , identifier[query] , identifier[fragment] , identifier[_coerce_result] =( identifier[_coerce_args] (* identifier[components] )) keyword[if] identifier[params] : identifier[url] = literal[string] %( identifier[url] , identifier[params] ) keyword[return] identifier[_coerce_result] ( identifier[urlunsplit] (( identifier[scheme] , identifier[netloc] , identifier[url] , identifier[query] , identifier[fragment] )))
def urlunparse(components): """Put a parsed URL back together again. This may result in a slightly different, but equivalent URL, if the URL that was parsed originally had redundant delimiters, e.g. a ? with an empty query (the draft states that these are equivalent).""" (scheme, netloc, url, params, query, fragment, _coerce_result) = _coerce_args(*components) if params: url = '%s;%s' % (url, params) # depends on [control=['if'], data=[]] return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment)))
def _send_event(self, title, text, tags, event_type, aggregation_key, severity='info'): """Emit an event to the Datadog Event Stream.""" event_dict = { 'timestamp': int(time()), 'source_type_name': self.SOURCE_TYPE_NAME, 'msg_title': title, 'event_type': event_type, 'alert_type': severity, 'msg_text': text, 'tags': tags, 'aggregation_key': aggregation_key, } self.event(event_dict)
def function[_send_event, parameter[self, title, text, tags, event_type, aggregation_key, severity]]: constant[Emit an event to the Datadog Event Stream.] variable[event_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b21bb250>, <ast.Constant object at 0x7da1b21bad70>, <ast.Constant object at 0x7da1b21b8b80>, <ast.Constant object at 0x7da1b21bab60>, <ast.Constant object at 0x7da1b21b88b0>, <ast.Constant object at 0x7da1b21bbbb0>, <ast.Constant object at 0x7da1b21b8f40>, <ast.Constant object at 0x7da1b21bb6a0>], [<ast.Call object at 0x7da1b21badd0>, <ast.Attribute object at 0x7da1b21baa70>, <ast.Name object at 0x7da1b21b9db0>, <ast.Name object at 0x7da1b21ba7a0>, <ast.Name object at 0x7da1b21b9c90>, <ast.Name object at 0x7da1b21b8070>, <ast.Name object at 0x7da1b21bbeb0>, <ast.Name object at 0x7da1b21ba860>]] call[name[self].event, parameter[name[event_dict]]]
keyword[def] identifier[_send_event] ( identifier[self] , identifier[title] , identifier[text] , identifier[tags] , identifier[event_type] , identifier[aggregation_key] , identifier[severity] = literal[string] ): literal[string] identifier[event_dict] ={ literal[string] : identifier[int] ( identifier[time] ()), literal[string] : identifier[self] . identifier[SOURCE_TYPE_NAME] , literal[string] : identifier[title] , literal[string] : identifier[event_type] , literal[string] : identifier[severity] , literal[string] : identifier[text] , literal[string] : identifier[tags] , literal[string] : identifier[aggregation_key] , } identifier[self] . identifier[event] ( identifier[event_dict] )
def _send_event(self, title, text, tags, event_type, aggregation_key, severity='info'): """Emit an event to the Datadog Event Stream.""" event_dict = {'timestamp': int(time()), 'source_type_name': self.SOURCE_TYPE_NAME, 'msg_title': title, 'event_type': event_type, 'alert_type': severity, 'msg_text': text, 'tags': tags, 'aggregation_key': aggregation_key} self.event(event_dict)
def location(self, x=None, y=None): ''' Temporarily move the cursor, perform work, and return to the previous location. :: with screen.location(40, 20): print('Hello, world!') ''' stream = self._stream stream.write(self.save_pos) # cursor position if x is not None and y is not None: stream.write(self.mv(y, x)) elif x is not None: stream.write(self.mv_x(x)) elif y is not None: stream.write(self.mv_y(y)) stream.flush() try: yield self finally: stream.write(self.rest_pos) stream.flush()
def function[location, parameter[self, x, y]]: constant[ Temporarily move the cursor, perform work, and return to the previous location. :: with screen.location(40, 20): print('Hello, world!') ] variable[stream] assign[=] name[self]._stream call[name[stream].write, parameter[name[self].save_pos]] if <ast.BoolOp object at 0x7da1b26a3f40> begin[:] call[name[stream].write, parameter[call[name[self].mv, parameter[name[y], name[x]]]]] call[name[stream].flush, parameter[]] <ast.Try object at 0x7da1b26a14b0>
keyword[def] identifier[location] ( identifier[self] , identifier[x] = keyword[None] , identifier[y] = keyword[None] ): literal[string] identifier[stream] = identifier[self] . identifier[_stream] identifier[stream] . identifier[write] ( identifier[self] . identifier[save_pos] ) keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] keyword[and] identifier[y] keyword[is] keyword[not] keyword[None] : identifier[stream] . identifier[write] ( identifier[self] . identifier[mv] ( identifier[y] , identifier[x] )) keyword[elif] identifier[x] keyword[is] keyword[not] keyword[None] : identifier[stream] . identifier[write] ( identifier[self] . identifier[mv_x] ( identifier[x] )) keyword[elif] identifier[y] keyword[is] keyword[not] keyword[None] : identifier[stream] . identifier[write] ( identifier[self] . identifier[mv_y] ( identifier[y] )) identifier[stream] . identifier[flush] () keyword[try] : keyword[yield] identifier[self] keyword[finally] : identifier[stream] . identifier[write] ( identifier[self] . identifier[rest_pos] ) identifier[stream] . identifier[flush] ()
def location(self, x=None, y=None): """ Temporarily move the cursor, perform work, and return to the previous location. :: with screen.location(40, 20): print('Hello, world!') """ stream = self._stream stream.write(self.save_pos) # cursor position if x is not None and y is not None: stream.write(self.mv(y, x)) # depends on [control=['if'], data=[]] elif x is not None: stream.write(self.mv_x(x)) # depends on [control=['if'], data=['x']] elif y is not None: stream.write(self.mv_y(y)) # depends on [control=['if'], data=['y']] stream.flush() try: yield self # depends on [control=['try'], data=[]] finally: stream.write(self.rest_pos) stream.flush()
def cookiestring(self, value): """"Cookie string setter""" c = Cookie.SimpleCookie(value) sc = [(i.key, i.value) for i in c.values()] self.cookies = dict(sc)
def function[cookiestring, parameter[self, value]]: constant["Cookie string setter] variable[c] assign[=] call[name[Cookie].SimpleCookie, parameter[name[value]]] variable[sc] assign[=] <ast.ListComp object at 0x7da1b2405330> name[self].cookies assign[=] call[name[dict], parameter[name[sc]]]
keyword[def] identifier[cookiestring] ( identifier[self] , identifier[value] ): literal[string] identifier[c] = identifier[Cookie] . identifier[SimpleCookie] ( identifier[value] ) identifier[sc] =[( identifier[i] . identifier[key] , identifier[i] . identifier[value] ) keyword[for] identifier[i] keyword[in] identifier[c] . identifier[values] ()] identifier[self] . identifier[cookies] = identifier[dict] ( identifier[sc] )
def cookiestring(self, value): """"Cookie string setter""" c = Cookie.SimpleCookie(value) sc = [(i.key, i.value) for i in c.values()] self.cookies = dict(sc)
def add_streamer(self, streamer): """Add a streamer to this sensor graph. Args: streamer (DataStreamer): The streamer we want to add """ if self._max_streamers is not None and len(self.streamers) >= self._max_streamers: raise ResourceUsageError("Maximum number of streamers exceeded", max_streamers=self._max_streamers) streamer.link_to_storage(self.sensor_log) streamer.index = len(self.streamers) self.streamers.append(streamer)
def function[add_streamer, parameter[self, streamer]]: constant[Add a streamer to this sensor graph. Args: streamer (DataStreamer): The streamer we want to add ] if <ast.BoolOp object at 0x7da18fe91090> begin[:] <ast.Raise object at 0x7da18fe92ad0> call[name[streamer].link_to_storage, parameter[name[self].sensor_log]] name[streamer].index assign[=] call[name[len], parameter[name[self].streamers]] call[name[self].streamers.append, parameter[name[streamer]]]
keyword[def] identifier[add_streamer] ( identifier[self] , identifier[streamer] ): literal[string] keyword[if] identifier[self] . identifier[_max_streamers] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[self] . identifier[streamers] )>= identifier[self] . identifier[_max_streamers] : keyword[raise] identifier[ResourceUsageError] ( literal[string] , identifier[max_streamers] = identifier[self] . identifier[_max_streamers] ) identifier[streamer] . identifier[link_to_storage] ( identifier[self] . identifier[sensor_log] ) identifier[streamer] . identifier[index] = identifier[len] ( identifier[self] . identifier[streamers] ) identifier[self] . identifier[streamers] . identifier[append] ( identifier[streamer] )
def add_streamer(self, streamer): """Add a streamer to this sensor graph. Args: streamer (DataStreamer): The streamer we want to add """ if self._max_streamers is not None and len(self.streamers) >= self._max_streamers: raise ResourceUsageError('Maximum number of streamers exceeded', max_streamers=self._max_streamers) # depends on [control=['if'], data=[]] streamer.link_to_storage(self.sensor_log) streamer.index = len(self.streamers) self.streamers.append(streamer)
def generate_if_then_else(self): """ Implementation of if-then-else. .. code-block:: python { 'if': { 'exclusiveMaximum': 0, }, 'then': { 'minimum': -10, }, 'else': { 'multipleOf': 2, }, } Valid values are any between -10 and 0 or any multiplication of two. """ with self.l('try:'): self.generate_func_code_block( self._definition['if'], self._variable, self._variable_name, clear_variables=True ) with self.l('except JsonSchemaException:'): if 'else' in self._definition: self.generate_func_code_block( self._definition['else'], self._variable, self._variable_name, clear_variables=True ) else: self.l('pass') if 'then' in self._definition: with self.l('else:'): self.generate_func_code_block( self._definition['then'], self._variable, self._variable_name, clear_variables=True )
def function[generate_if_then_else, parameter[self]]: constant[ Implementation of if-then-else. .. code-block:: python { 'if': { 'exclusiveMaximum': 0, }, 'then': { 'minimum': -10, }, 'else': { 'multipleOf': 2, }, } Valid values are any between -10 and 0 or any multiplication of two. ] with call[name[self].l, parameter[constant[try:]]] begin[:] call[name[self].generate_func_code_block, parameter[call[name[self]._definition][constant[if]], name[self]._variable, name[self]._variable_name]] with call[name[self].l, parameter[constant[except JsonSchemaException:]]] begin[:] if compare[constant[else] in name[self]._definition] begin[:] call[name[self].generate_func_code_block, parameter[call[name[self]._definition][constant[else]], name[self]._variable, name[self]._variable_name]] if compare[constant[then] in name[self]._definition] begin[:] with call[name[self].l, parameter[constant[else:]]] begin[:] call[name[self].generate_func_code_block, parameter[call[name[self]._definition][constant[then]], name[self]._variable, name[self]._variable_name]]
keyword[def] identifier[generate_if_then_else] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[l] ( literal[string] ): identifier[self] . identifier[generate_func_code_block] ( identifier[self] . identifier[_definition] [ literal[string] ], identifier[self] . identifier[_variable] , identifier[self] . identifier[_variable_name] , identifier[clear_variables] = keyword[True] ) keyword[with] identifier[self] . identifier[l] ( literal[string] ): keyword[if] literal[string] keyword[in] identifier[self] . identifier[_definition] : identifier[self] . identifier[generate_func_code_block] ( identifier[self] . identifier[_definition] [ literal[string] ], identifier[self] . identifier[_variable] , identifier[self] . identifier[_variable_name] , identifier[clear_variables] = keyword[True] ) keyword[else] : identifier[self] . identifier[l] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[_definition] : keyword[with] identifier[self] . identifier[l] ( literal[string] ): identifier[self] . identifier[generate_func_code_block] ( identifier[self] . identifier[_definition] [ literal[string] ], identifier[self] . identifier[_variable] , identifier[self] . identifier[_variable_name] , identifier[clear_variables] = keyword[True] )
def generate_if_then_else(self): """ Implementation of if-then-else. .. code-block:: python { 'if': { 'exclusiveMaximum': 0, }, 'then': { 'minimum': -10, }, 'else': { 'multipleOf': 2, }, } Valid values are any between -10 and 0 or any multiplication of two. """ with self.l('try:'): self.generate_func_code_block(self._definition['if'], self._variable, self._variable_name, clear_variables=True) # depends on [control=['with'], data=[]] with self.l('except JsonSchemaException:'): if 'else' in self._definition: self.generate_func_code_block(self._definition['else'], self._variable, self._variable_name, clear_variables=True) # depends on [control=['if'], data=[]] else: self.l('pass') # depends on [control=['with'], data=[]] if 'then' in self._definition: with self.l('else:'): self.generate_func_code_block(self._definition['then'], self._variable, self._variable_name, clear_variables=True) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
def list(self, platformIdentifier, configuration, libOverrides = {}): """ Returns the list of supported UE4-bundled third-party libraries """ modules = self._getThirdPartyLibs(platformIdentifier, configuration) return sorted([m['Name'] for m in modules] + [key for key in libOverrides])
def function[list, parameter[self, platformIdentifier, configuration, libOverrides]]: constant[ Returns the list of supported UE4-bundled third-party libraries ] variable[modules] assign[=] call[name[self]._getThirdPartyLibs, parameter[name[platformIdentifier], name[configuration]]] return[call[name[sorted], parameter[binary_operation[<ast.ListComp object at 0x7da18f00dd50> + <ast.ListComp object at 0x7da18f00d750>]]]]
keyword[def] identifier[list] ( identifier[self] , identifier[platformIdentifier] , identifier[configuration] , identifier[libOverrides] ={}): literal[string] identifier[modules] = identifier[self] . identifier[_getThirdPartyLibs] ( identifier[platformIdentifier] , identifier[configuration] ) keyword[return] identifier[sorted] ([ identifier[m] [ literal[string] ] keyword[for] identifier[m] keyword[in] identifier[modules] ]+[ identifier[key] keyword[for] identifier[key] keyword[in] identifier[libOverrides] ])
def list(self, platformIdentifier, configuration, libOverrides={}): """ Returns the list of supported UE4-bundled third-party libraries """ modules = self._getThirdPartyLibs(platformIdentifier, configuration) return sorted([m['Name'] for m in modules] + [key for key in libOverrides])
def validate_comma_separated_emails(value): """ Validate every email address in a comma separated list of emails. """ if not isinstance(value, (tuple, list)): raise ValidationError('Email list must be a list/tuple.') for email in value: try: validate_email_with_name(email) except ValidationError: raise ValidationError('Invalid email: %s' % email, code='invalid')
def function[validate_comma_separated_emails, parameter[value]]: constant[ Validate every email address in a comma separated list of emails. ] if <ast.UnaryOp object at 0x7da204347340> begin[:] <ast.Raise object at 0x7da204347b50> for taget[name[email]] in starred[name[value]] begin[:] <ast.Try object at 0x7da204346cb0>
keyword[def] identifier[validate_comma_separated_emails] ( identifier[value] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[tuple] , identifier[list] )): keyword[raise] identifier[ValidationError] ( literal[string] ) keyword[for] identifier[email] keyword[in] identifier[value] : keyword[try] : identifier[validate_email_with_name] ( identifier[email] ) keyword[except] identifier[ValidationError] : keyword[raise] identifier[ValidationError] ( literal[string] % identifier[email] , identifier[code] = literal[string] )
def validate_comma_separated_emails(value): """ Validate every email address in a comma separated list of emails. """ if not isinstance(value, (tuple, list)): raise ValidationError('Email list must be a list/tuple.') # depends on [control=['if'], data=[]] for email in value: try: validate_email_with_name(email) # depends on [control=['try'], data=[]] except ValidationError: raise ValidationError('Invalid email: %s' % email, code='invalid') # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['email']]
def get_source_kernel(gta, name, kernel=None): """Get the PDF for the given source.""" sm = [] zs = 0 for c in gta.components: z = c.model_counts_map(name).data.astype('float') if kernel is not None: shape = (z.shape[0],) + kernel.shape z = np.apply_over_axes(np.sum, z, axes=[1, 2]) * np.ones( shape) * kernel[np.newaxis, :, :] zs += np.sum(z) else: zs += np.sum(z) sm.append(z) sm2 = 0 for i, m in enumerate(sm): sm[i] /= zs sm2 += np.sum(sm[i] ** 2) for i, m in enumerate(sm): sm[i] /= sm2 return sm
def function[get_source_kernel, parameter[gta, name, kernel]]: constant[Get the PDF for the given source.] variable[sm] assign[=] list[[]] variable[zs] assign[=] constant[0] for taget[name[c]] in starred[name[gta].components] begin[:] variable[z] assign[=] call[call[name[c].model_counts_map, parameter[name[name]]].data.astype, parameter[constant[float]]] if compare[name[kernel] is_not constant[None]] begin[:] variable[shape] assign[=] binary_operation[tuple[[<ast.Subscript object at 0x7da2045661d0>]] + name[kernel].shape] variable[z] assign[=] binary_operation[binary_operation[call[name[np].apply_over_axes, parameter[name[np].sum, name[z]]] * call[name[np].ones, parameter[name[shape]]]] * call[name[kernel]][tuple[[<ast.Attribute object at 0x7da2045646d0>, <ast.Slice object at 0x7da2045675b0>, <ast.Slice object at 0x7da204565420>]]]] <ast.AugAssign object at 0x7da204567970> call[name[sm].append, parameter[name[z]]] variable[sm2] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da204565c00>, <ast.Name object at 0x7da204567b50>]]] in starred[call[name[enumerate], parameter[name[sm]]]] begin[:] <ast.AugAssign object at 0x7da204566620> <ast.AugAssign object at 0x7da204565fc0> for taget[tuple[[<ast.Name object at 0x7da204565300>, <ast.Name object at 0x7da204565e10>]]] in starred[call[name[enumerate], parameter[name[sm]]]] begin[:] <ast.AugAssign object at 0x7da204564b80> return[name[sm]]
keyword[def] identifier[get_source_kernel] ( identifier[gta] , identifier[name] , identifier[kernel] = keyword[None] ): literal[string] identifier[sm] =[] identifier[zs] = literal[int] keyword[for] identifier[c] keyword[in] identifier[gta] . identifier[components] : identifier[z] = identifier[c] . identifier[model_counts_map] ( identifier[name] ). identifier[data] . identifier[astype] ( literal[string] ) keyword[if] identifier[kernel] keyword[is] keyword[not] keyword[None] : identifier[shape] =( identifier[z] . identifier[shape] [ literal[int] ],)+ identifier[kernel] . identifier[shape] identifier[z] = identifier[np] . identifier[apply_over_axes] ( identifier[np] . identifier[sum] , identifier[z] , identifier[axes] =[ literal[int] , literal[int] ])* identifier[np] . identifier[ones] ( identifier[shape] )* identifier[kernel] [ identifier[np] . identifier[newaxis] ,:,:] identifier[zs] += identifier[np] . identifier[sum] ( identifier[z] ) keyword[else] : identifier[zs] += identifier[np] . identifier[sum] ( identifier[z] ) identifier[sm] . identifier[append] ( identifier[z] ) identifier[sm2] = literal[int] keyword[for] identifier[i] , identifier[m] keyword[in] identifier[enumerate] ( identifier[sm] ): identifier[sm] [ identifier[i] ]/= identifier[zs] identifier[sm2] += identifier[np] . identifier[sum] ( identifier[sm] [ identifier[i] ]** literal[int] ) keyword[for] identifier[i] , identifier[m] keyword[in] identifier[enumerate] ( identifier[sm] ): identifier[sm] [ identifier[i] ]/= identifier[sm2] keyword[return] identifier[sm]
def get_source_kernel(gta, name, kernel=None): """Get the PDF for the given source.""" sm = [] zs = 0 for c in gta.components: z = c.model_counts_map(name).data.astype('float') if kernel is not None: shape = (z.shape[0],) + kernel.shape z = np.apply_over_axes(np.sum, z, axes=[1, 2]) * np.ones(shape) * kernel[np.newaxis, :, :] zs += np.sum(z) # depends on [control=['if'], data=['kernel']] else: zs += np.sum(z) sm.append(z) # depends on [control=['for'], data=['c']] sm2 = 0 for (i, m) in enumerate(sm): sm[i] /= zs sm2 += np.sum(sm[i] ** 2) # depends on [control=['for'], data=[]] for (i, m) in enumerate(sm): sm[i] /= sm2 # depends on [control=['for'], data=[]] return sm
def _expand_nbest_translation(translation: Translation) -> List[Translation]: """ Expand nbest translations in a single Translation object to one Translation object per nbest translation. :param translation: A Translation object. :return: A list of Translation objects. """ nbest_list = [] # type = List[Translation] for target_ids, attention_matrix, score in zip(translation.nbest_translations.target_ids_list, translation.nbest_translations.attention_matrices, translation.nbest_translations.scores): nbest_list.append(Translation(target_ids, attention_matrix, score, translation.beam_histories, estimated_reference_length=translation.estimated_reference_length)) return nbest_list
def function[_expand_nbest_translation, parameter[translation]]: constant[ Expand nbest translations in a single Translation object to one Translation object per nbest translation. :param translation: A Translation object. :return: A list of Translation objects. ] variable[nbest_list] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1d22320>, <ast.Name object at 0x7da1b1d203d0>, <ast.Name object at 0x7da1b1d217e0>]]] in starred[call[name[zip], parameter[name[translation].nbest_translations.target_ids_list, name[translation].nbest_translations.attention_matrices, name[translation].nbest_translations.scores]]] begin[:] call[name[nbest_list].append, parameter[call[name[Translation], parameter[name[target_ids], name[attention_matrix], name[score], name[translation].beam_histories]]]] return[name[nbest_list]]
keyword[def] identifier[_expand_nbest_translation] ( identifier[translation] : identifier[Translation] )-> identifier[List] [ identifier[Translation] ]: literal[string] identifier[nbest_list] =[] keyword[for] identifier[target_ids] , identifier[attention_matrix] , identifier[score] keyword[in] identifier[zip] ( identifier[translation] . identifier[nbest_translations] . identifier[target_ids_list] , identifier[translation] . identifier[nbest_translations] . identifier[attention_matrices] , identifier[translation] . identifier[nbest_translations] . identifier[scores] ): identifier[nbest_list] . identifier[append] ( identifier[Translation] ( identifier[target_ids] , identifier[attention_matrix] , identifier[score] , identifier[translation] . identifier[beam_histories] , identifier[estimated_reference_length] = identifier[translation] . identifier[estimated_reference_length] )) keyword[return] identifier[nbest_list]
def _expand_nbest_translation(translation: Translation) -> List[Translation]: """ Expand nbest translations in a single Translation object to one Translation object per nbest translation. :param translation: A Translation object. :return: A list of Translation objects. """ nbest_list = [] # type = List[Translation] for (target_ids, attention_matrix, score) in zip(translation.nbest_translations.target_ids_list, translation.nbest_translations.attention_matrices, translation.nbest_translations.scores): nbest_list.append(Translation(target_ids, attention_matrix, score, translation.beam_histories, estimated_reference_length=translation.estimated_reference_length)) # depends on [control=['for'], data=[]] return nbest_list
def do_handle_log(self, workunit, level, *msg_elements): """Implementation of Reporter callback.""" if not self.is_under_main_root(workunit): return # If the element is a (msg, detail) pair, we ignore the detail. There's no # useful way to display it on the console. elements = [e if isinstance(e, six.string_types) else e[0] for e in msg_elements] msg = '\n' + ''.join(elements) if self.use_color_for_workunit(workunit, self.settings.color): msg = self._COLOR_BY_LEVEL.get(level, lambda x: x)(msg) self.emit(self._prefix(workunit, msg)) self.flush()
def function[do_handle_log, parameter[self, workunit, level]]: constant[Implementation of Reporter callback.] if <ast.UnaryOp object at 0x7da1b2293e50> begin[:] return[None] variable[elements] assign[=] <ast.ListComp object at 0x7da1b2290460> variable[msg] assign[=] binary_operation[constant[ ] + call[constant[].join, parameter[name[elements]]]] if call[name[self].use_color_for_workunit, parameter[name[workunit], name[self].settings.color]] begin[:] variable[msg] assign[=] call[call[name[self]._COLOR_BY_LEVEL.get, parameter[name[level], <ast.Lambda object at 0x7da1b22f8e80>]], parameter[name[msg]]] call[name[self].emit, parameter[call[name[self]._prefix, parameter[name[workunit], name[msg]]]]] call[name[self].flush, parameter[]]
keyword[def] identifier[do_handle_log] ( identifier[self] , identifier[workunit] , identifier[level] ,* identifier[msg_elements] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[is_under_main_root] ( identifier[workunit] ): keyword[return] identifier[elements] =[ identifier[e] keyword[if] identifier[isinstance] ( identifier[e] , identifier[six] . identifier[string_types] ) keyword[else] identifier[e] [ literal[int] ] keyword[for] identifier[e] keyword[in] identifier[msg_elements] ] identifier[msg] = literal[string] + literal[string] . identifier[join] ( identifier[elements] ) keyword[if] identifier[self] . identifier[use_color_for_workunit] ( identifier[workunit] , identifier[self] . identifier[settings] . identifier[color] ): identifier[msg] = identifier[self] . identifier[_COLOR_BY_LEVEL] . identifier[get] ( identifier[level] , keyword[lambda] identifier[x] : identifier[x] )( identifier[msg] ) identifier[self] . identifier[emit] ( identifier[self] . identifier[_prefix] ( identifier[workunit] , identifier[msg] )) identifier[self] . identifier[flush] ()
def do_handle_log(self, workunit, level, *msg_elements): """Implementation of Reporter callback.""" if not self.is_under_main_root(workunit): return # depends on [control=['if'], data=[]] # If the element is a (msg, detail) pair, we ignore the detail. There's no # useful way to display it on the console. elements = [e if isinstance(e, six.string_types) else e[0] for e in msg_elements] msg = '\n' + ''.join(elements) if self.use_color_for_workunit(workunit, self.settings.color): msg = self._COLOR_BY_LEVEL.get(level, lambda x: x)(msg) # depends on [control=['if'], data=[]] self.emit(self._prefix(workunit, msg)) self.flush()