code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
return self._getNodeData(name, self._WRITENODE, channel)
def getWriteData(self, name, channel=None)
Returns a sensor node
11.293413
9.470636
1.192466
nodeChannel = None if name in metadata: nodeChannelList = metadata[name] if len(nodeChannelList) > 1: nodeChannel = channel if channel is not None else nodeChannelList[0] elif len(nodeChannelList) == 1: nodeChannel = nodeChannelList[0] else: LOG.warning("HMDevice._getNodeData: %s not found in %s, empty nodeChannelList" % (name, metadata)) return None if nodeChannel is not None and nodeChannel in self.CHANNELS: return self._hmchannels[nodeChannel].getValue(name) LOG.error("HMDevice._getNodeData: %s not found in %s" % (name, metadata)) return None
def _getNodeData(self, name, metadata, channel=None)
Returns a data point from data
2.927428
2.878394
1.017035
nodeChannel = None if name in metadata: nodeChannelList = metadata[name] if len(nodeChannelList) > 1: nodeChannel = channel if channel is not None else nodeChannelList[0] elif len(nodeChannelList) == 1: nodeChannel = nodeChannelList[0] if nodeChannel is not None and nodeChannel in self.CHANNELS: return self._hmchannels[nodeChannel].setValue(name, data) LOG.error("HMDevice.setNodeData: %s not found with value %s on %i" % (name, data, nodeChannel)) return False
def _setNodeData(self, name, metadata, data, channel=None)
Returns a data point from data
3.749256
3.663756
1.023337
if hasattr(callback, '__call__'): if channel == 0: self._eventcallbacks.append(callback) elif not bequeath and channel > 0 and channel in self._hmchannels: self._hmchannels[channel]._eventcallbacks.append(callback) if bequeath: for channel, device in self._hmchannels.items(): device._eventcallbacks.append(callback)
def setEventCallback(self, callback, bequeath=True, channel=0)
Set additional event callbacks for the device. Set the callback for specific channels or use the device itself and let it bequeath the callback to all of its children. Signature for callback-functions: foo(address, interface_id, key, value)
3.192257
3.214991
0.992929
if channel in self.CHANNELS: return self.CHANNELS[channel].setValue(key, value) LOG.error("HMDevice.setValue: channel not found %i!" % channel)
def setValue(self, key, value, channel=1)
Some devices allow to directly set values to perform a specific task.
7.307098
6.962536
1.049488
if channel in self.CHANNELS: return self.CHANNELS[channel].getValue(key) LOG.error("HMDevice.getValue: channel not found %i!" % channel)
def getValue(self, key, channel=1)
Some devices allow to directly get values for specific parameters.
7.87538
7.434116
1.059357
try: target_temperature = float(target_temperature) except Exception as err: LOG.debug("Thermostat.set_temperature: Exception %s" % (err,)) return False self.writeNodeData("SET_TEMPERATURE", target_temperature)
def set_temperature(self, target_temperature)
Set the target temperature.
4.448206
4.382556
1.01498
set_data = True mode = None if setmode == self.AUTO_MODE: mode = 'AUTO_MODE' elif setmode == self.MANU_MODE: mode = 'MANU_MODE' set_data = self.get_set_temperature() elif setmode == self.BOOST_MODE: mode = 'BOOST_MODE' elif setmode == self.COMFORT_MODE: mode = 'COMFORT_MODE' elif setmode == self.LOWERING_MODE: mode = 'LOWERING_MODE' else: LOG.warning("Thermostat.MODE.setter: Invalid mode: %s" % str(setmode)) if mode: self.actionNodeData(mode, set_data)
def MODE(self, setmode)
Set mode.
3.185188
3.153916
1.009915
if setmode == self.BOOST_MODE: self.actionNodeData('BOOST_MODE', True) elif setmode in [self.AUTO_MODE, self.MANU_MODE]: if self.getAttributeData("BOOST_MODE"): self.actionNodeData('BOOST_MODE', False) self.actionNodeData('CONTROL_MODE', setmode)
def MODE(self, setmode)
Set mode.
4.550159
4.564587
0.996839
LOG.info("Shutting down server") self.server.shutdown() LOG.debug("ServerThread.stop: Stopping ServerThread") self.server.server_close() LOG.info("Server stopped")
def stop(self)
Shut down our XML-RPC server.
5.368222
4.764693
1.126667
'''Run the normal shovel functionality''' import os import sys import argparse import pkg_resources # First off, read the arguments parser = argparse.ArgumentParser(prog='shovel', description='Rake, for Python') parser.add_argument('method', help='The task to run') parser.add_argument('--verbose', dest='verbose', action='store_true', help='Be extra talkative') parser.add_argument('--dry-run', dest='dryRun', action='store_true', help='Show the args that would be used') ver = pkg_resources.require('shovel')[0].version parser.add_argument('--version', action='version', version='Shovel v %s' % ver, help='print the version of Shovel.') # Parse our arguments if args: clargs, remaining = parser.parse_known_args(args=args) else: # pragma: no cover clargs, remaining = parser.parse_known_args() if clargs.verbose: logger.setLevel(logging.DEBUG) args, kwargs = parse(remaining) # Import all of the files we want shovel = Shovel() # Read in any tasks that have already been defined shovel.extend(Task.clear()) for path in [ os.path.expanduser('~/.shovel.py'), os.path.expanduser('~/.shovel')]: if os.path.exists(path): # pragma: no cover shovel.read(path, os.path.expanduser('~/')) shovel_home = os.environ.get('SHOVEL_HOME') if shovel_home and os.path.exists(shovel_home): shovel.read(shovel_home, shovel_home) for path in ['shovel.py', 'shovel']: if os.path.exists(path): shovel.read(path) # If it's help we're looking for, look no further if clargs.method == 'help': print(help.shovel_help(shovel, *args, **kwargs)) elif clargs.method == 'tasks': tasks = list(v for _, v in shovel.items()) if not tasks: print('No tasks found!') else: names = list(t.fullname for t in tasks) docs = list(t.doc for t in tasks) # The width of the screen width = 80 import shutil try: width, _ = shutil.get_terminal_size(fallback=(0, width)) except AttributeError: pass # Create the format with padding for the longest name, and to # accomodate the screen width format = '%%-%is # %%-%is' % ( max(len(name) for name in names), width) for name, doc in zip(names, docs): print(format % (name, doc)) elif clargs.method: # Try to get the first command provided try: tasks = shovel.tasks(clargs.method) except KeyError: print('Could not find task "%s"' % clargs.method, file=sys.stderr) exit(1) if len(tasks) > 1: print('Specifier "%s" matches multiple tasks:' % clargs.method, file=sys.stderr) for task in tasks: print('\t%s' % task.fullname, file=sys.stderr) exit(2) task = tasks[0] if clargs.dryRun: print(task.dry(*args, **kwargs)) else: task(*args, **kwargs)
def run(*args)
Run the normal shovel functionality
2.975682
2.918493
1.019596
'''Return a list of tuples of (fullname, docstring, level) for all the tasks in the provided shovel''' result = [] for key, value in sorted(shovel.map.items()): if prefix: key = prefix + '.' + key if isinstance(value, Shovel): result.append((key, None, level)) result.extend(heirarchical_helper(value, key, level + 1)) else: result.append((key, value.doc or '(No docstring)', level)) return result
def heirarchical_helper(shovel, prefix, level=0)
Return a list of tuples of (fullname, docstring, level) for all the tasks in the provided shovel
3.330198
2.184623
1.524381
'''Given a shovel of tasks, display a heirarchical list of the tasks''' result = [] tuples = heirarchical_helper(shovel, prefix) if not tuples: return '' # We need to figure out the longest fullname length longest = max(len(name + ' ' * level) for name, _, level in tuples) fmt = '%%%is => %%-50s' % longest for name, docstring, level in tuples: if docstring == None: result.append(' ' * level + name + '/') else: docstring = re.sub(r'\s+', ' ', docstring).strip() if len(docstring) > 50: docstring = docstring[:47] + '...' result.append(fmt % (name, docstring)) return '\n'.join(result)
def heirarchical_help(shovel, prefix)
Given a shovel of tasks, display a heirarchical list of the tasks
3.938241
3.597402
1.094746
'''Return a string about help with the tasks, or lists tasks available''' # If names are provided, and the name refers to a group of tasks, print out # the tasks and a brief docstring. Otherwise, just enumerate all the tasks # available if not len(names): return heirarchical_help(shovel, '') else: for name in names: task = shovel[name] if isinstance(task, Shovel): return heirarchical_help(task, name) else: return task.help()
def shovel_help(shovel, *names)
Return a string about help with the tasks, or lists tasks available
6.764545
4.771535
1.417687
'''Either load a path and return a shovel object or return None''' obj = cls() obj.read(path, base) return obj
def load(cls, path, base=None)
Either load a path and return a shovel object or return None
11.654296
4.270509
2.729018
'''Add tasks to this particular shovel''' self._tasks.extend(tasks) for task in tasks: # We'll now go through all of our tasks and group them into # sub-shovels current = self.map modules = task.fullname.split('.') for module in modules[:-1]: if not isinstance(current[module], Shovel): logger.warn('Overriding task %s with a module' % current[module].file) shovel = Shovel() shovel.overrides = current[module] current[module] = shovel current = current[module].map # Now we'll put the task in this particular sub-shovel name = modules[-1] if name in current: logger.warn('Overriding %s with %s' % ( '.'.join(modules), task.file)) task.overrides = current[name] current[name] = task
def extend(self, tasks)
Add tasks to this particular shovel
3.916425
3.604353
1.086582
'''Import some tasks''' if base == None: base = os.getcwd() absolute = os.path.abspath(path) if os.path.isfile(absolute): # Load that particular file logger.info('Loading %s' % absolute) self.extend(Task.load(path, base)) elif os.path.isdir(absolute): # Walk this directory looking for tasks tasks = [] for root, _, files in os.walk(absolute): files = [f for f in files if f.endswith('.py')] for child in files: absolute = os.path.join(root, child) logger.info('Loading %s' % absolute) tasks.extend(Task.load(absolute, base)) self.extend(tasks)
def read(self, path, base=None)
Import some tasks
2.760704
2.564382
1.076558
'''Return all valid keys''' keys = [] for key, value in self.map.items(): if isinstance(value, Shovel): keys.extend([key + '.' + k for k in value.keys()]) else: keys.append(key) return sorted(keys)
def keys(self)
Return all valid keys
3.96242
3.660088
1.082602
'''Return a list of tuples of all the keys and tasks''' pairs = [] for key, value in self.map.items(): if isinstance(value, Shovel): pairs.extend([(key + '.' + k, v) for k, v in value.items()]) else: pairs.append((key, value)) return sorted(pairs)
def items(self)
Return a list of tuples of all the keys and tasks
4.331054
3.150447
1.374743
'''Get all the tasks that match a name''' found = self[name] if isinstance(found, Shovel): return [v for _, v in found.items()] return [found]
def tasks(self, name)
Get all the tasks that match a name
7.854498
7.296306
1.076503
'''Given a callable object, return a new callable object''' try: cls._cache.append(Task(obj)) except Exception: logger.exception('Unable to make task for %s' % repr(obj))
def make(cls, obj)
Given a callable object, return a new callable object
8.525264
6.588678
1.293926
'''Return a list of the tasks stored in a file''' base = base or os.getcwd() absolute = os.path.abspath(path) parent = os.path.dirname(absolute) name, _, _ = os.path.basename(absolute).rpartition('.py') fobj, path, description = imp.find_module(name, [parent]) try: imp.load_module(name, fobj, path, description) finally: if fobj: fobj.close() # Manipulate the full names of the tasks to be relative to the provided # base relative, _, _ = os.path.relpath(path, base).rpartition('.py') for task in cls._cache: parts = relative.split(os.path.sep) parts.append(task.name) # If it's either in shovel.py, or folder/__init__.py, then we # should consider it as being at one level above that file parts = [part.strip('.') for part in parts if part not in ('shovel', '.shovel', '__init__', '.', '..', '')] task.fullname = '.'.join(parts) logger.debug('Found task %s in %s' % (task.fullname, task.module)) return cls.clear()
def load(cls, path, base=None)
Return a list of the tasks stored in a file
4.502989
4.209925
1.069613
'''Run a task and return a dictionary with stderr, stdout and the return value. Also, the traceback from the exception if there was one''' import traceback try: from StringIO import StringIO except ImportError: from io import StringIO stdout, stderr = sys.stdout, sys.stderr sys.stdout = out = StringIO() sys.stderr = err = StringIO() result = { 'exception': None, 'stderr': None, 'stdout': None, 'return': None } try: result['return'] = self.__call__(*args, **kwargs) except Exception: result['exception'] = traceback.format_exc() sys.stdout, sys.stderr = stdout, stderr result['stderr'] = err.getvalue() result['stdout'] = out.getvalue() return result
def capture(self, *args, **kwargs)
Run a task and return a dictionary with stderr, stdout and the return value. Also, the traceback from the exception if there was one
2.842964
1.897112
1.498575
'''Perform a dry-run of the task''' return 'Would have executed:\n%s%s' % ( self.name, Args(self.spec).explain(*args, **kwargs))
def dry(self, *args, **kwargs)
Perform a dry-run of the task
10.89417
11.224578
0.970564
'''Return the help string of the task''' # This returns a help string for a given task of the form: # # ================================================== # <name> # ============================== (If supplied) # <docstring> # ============================== (If overrides other tasks) # Overrides <other task file> # ============================== # From <file> on <line> # ============================== # <name>(Argspec) result = [ '=' * 50, self.name ] # And the doc, if it exists if self.doc: result.extend([ '=' * 30, self.doc ]) override = self.overrides while override: if isinstance(override, Shovel): result.append('Overrides module') else: result.append('Overrides %s' % override.file) override = override.overrides # Print where we read this function in from result.extend([ '=' * 30, 'From %s on line %i' % (self.file, self.line), '=' * 30, '%s%s' % (self.name, str(Args(self.spec))) ]) return os.linesep.join(result)
def help(self)
Return the help string of the task
5.771911
5.591529
1.03226
'''Return a string that describes how these args are interpreted''' args = self.get(*args, **kwargs) results = ['%s = %s' % (name, value) for name, value in args.required] results.extend(['%s = %s (overridden)' % ( name, value) for name, value in args.overridden]) results.extend(['%s = %s (default)' % ( name, value) for name, value in args.defaulted]) if self._varargs: results.append('%s = %s' % (self._varargs, args.varargs)) if self._kwargs: results.append('%s = %s' % (self._kwargs, args.kwargs)) return '\n\t'.join(results)
def explain(self, *args, **kwargs)
Return a string that describes how these args are interpreted
2.540047
2.200208
1.154458
'''Evaluate this argspec with the provided arguments''' # We'll go through all of our required args and make sure they're # present required = [arg for arg in self._args if arg not in kwargs] if len(args) < len(required): raise TypeError('Missing arguments %s' % required[len(args):]) required = list(zip(required, args)) args = args[len(required):] # Now we'll look through our defaults, if there are any defaulted = [(name, default) for name, default in self._defaults if name not in kwargs] overridden = list(zip([d[0] for d in defaulted], args)) args = args[len(overridden):] defaulted = defaulted[len(overridden):] # And anything left over is in varargs if args and not self._varargs: raise TypeError('Too many arguments provided') return ArgTuple(required, overridden, defaulted, args, kwargs)
def get(self, *args, **kwargs)
Evaluate this argspec with the provided arguments
4.131336
3.736825
1.105574
'''Parse the provided string to produce *args and **kwargs''' args = [] kwargs = {} last = None for token in tokens: if token.startswith('--'): # If this is a keyword flag, but we've already got one that we've # parsed, then we're going to interpret it as a bool if last: kwargs[last] = True # See if it is the --foo=5 style last, _, value = token.strip('-').partition('=') if value: kwargs[last] = value last = None elif last != None: kwargs[last] = token last = None else: args.append(token) # If there's a dangling last, set that bool if last: kwargs[last] = True return args, kwargs
def parse(tokens)
Parse the provided string to produce *args and **kwargs
4.795243
4.26604
1.12405
'''Computes the sum of the provided numbers''' print('%s = %f' % (' + '.join(args), sum(float(arg) for arg in args)))
def sumnum(*args)
Computes the sum of the provided numbers
5.584384
5.660646
0.986528
'''Prints a name, and all keyword attributes''' print('%s has attributes:' % name) for key, value in kwargs.items(): print('\t%s => %s' % (key, value))
def attributes(name, **kwargs)
Prints a name, and all keyword attributes
4.412392
3.032544
1.455013
DJANGO_111_OR_UP = (VERSION[0] == 1 and VERSION[1] >= 11) or ( VERSION[0] >= 2 ) if DJANGO_111_OR_UP: return super(DynamicRawIDWidget, self).render( name, value, attrs, renderer=renderer ) if attrs is None: attrs = {} related_url = reverse( 'admin:{0}_{1}_changelist'.format( self.rel.to._meta.app_label, self.rel.to._meta.object_name.lower(), ), current_app=self.admin_site.name, ) params = self.url_parameters() if params: url = u'?' + u'&'.join( [u'{0}={1}'.format(k, v) for k, v in params.items()] ) else: url = u'' if "class" not in attrs: attrs[ 'class' ] = ( 'vForeignKeyRawIdAdminField' ) # The JavaScript looks for this hook. app_name = self.rel.to._meta.app_label.strip() model_name = self.rel.to._meta.object_name.lower().strip() hidden_input = super(widgets.ForeignKeyRawIdWidget, self).render( name, value, attrs ) extra_context = { 'hidden_input': hidden_input, 'name': name, 'app_name': app_name, 'model_name': model_name, 'related_url': related_url, 'url': url, } return render_to_string( 'dynamic_raw_id/admin/widgets/dynamic_raw_id_field.html', extra_context, )
def render(self, name, value, attrs=None, multi=False, renderer=None)
Django <= 1.10 variant.
2.125856
2.093839
1.015291
context = super(DynamicRawIDWidget, self).get_context( name, value, attrs ) model = self.rel.model if VERSION[0] == 2 else self.rel.to related_url = reverse( 'admin:{0}_{1}_changelist'.format( model._meta.app_label, model._meta.object_name.lower() ), current_app=self.admin_site.name, ) params = self.url_parameters() if params: url = u'?' + u'&'.join( [u'{0}={1}'.format(k, v) for k, v in params.items()] ) else: url = u'' if "class" not in attrs: attrs[ 'class' ] = ( 'vForeignKeyRawIdAdminField' ) # The JavaScript looks for this hook. app_name = model._meta.app_label.strip() model_name = model._meta.object_name.lower().strip() context.update( { 'name': name, 'app_name': app_name, 'model_name': model_name, 'related_url': related_url, 'url': url, } ) return context
def get_context(self, name, value, attrs)
Django >= 1.11 variant.
2.286465
2.234432
1.023287
return DynamicRawIDFilterForm( admin_site=admin_site, rel=rel, field_name=self.field_path, data=self.used_parameters, )
def get_form(self, request, rel, admin_site)
Return filter form.
4.827082
3.994691
1.208374
if self.form.is_valid(): # get no null params filter_params = dict( filter(lambda x: bool(x[1]), self.form.cleaned_data.items()) ) return queryset.filter(**filter_params) return queryset
def queryset(self, request, queryset)
Filter queryset using params from the form.
3.791523
3.326896
1.139658
''' Decorate a control function in order to conduct an experiment when called. :param callable candidate: your candidate function :param iterable exp_args: positional arguments passed to :class:`Experiment` :param dict exp_kwargs: keyword arguments passed to :class:`Experiment` Usage:: candidate_func = lambda: True @Experiment.decorator(candidate_func) def control_func(): return True ''' def wrapper(control): @wraps(control) def inner(*args, **kwargs): experiment = cls(*exp_args, **exp_kwargs) experiment.control(control, args=args, kwargs=kwargs) experiment.candidate(candidate, args=args, kwargs=kwargs) return experiment.conduct() return inner return wrapper
def decorator(cls, candidate, *exp_args, **exp_kwargs)
Decorate a control function in order to conduct an experiment when called. :param callable candidate: your candidate function :param iterable exp_args: positional arguments passed to :class:`Experiment` :param dict exp_kwargs: keyword arguments passed to :class:`Experiment` Usage:: candidate_func = lambda: True @Experiment.decorator(candidate_func) def control_func(): return True
3.942636
1.803023
2.186681
''' Set the experiment's control function. Must be set before ``conduct()`` is called. :param callable control_func: your control function :param iterable args: positional arguments to pass to your function :param dict kwargs: keyword arguments to pass to your function :param string name: a name for your observation :param dict context: observation-specific context :raises LaboratoryException: If attempting to set a second control case ''' if self._control is not None: raise exceptions.LaboratoryException( 'You have already established a control case' ) self._control = { 'func': control_func, 'args': args or [], 'kwargs': kwargs or {}, 'name': name, 'context': context or {}, }
def control(self, control_func, args=None, kwargs=None, name='Control', context=None)
Set the experiment's control function. Must be set before ``conduct()`` is called. :param callable control_func: your control function :param iterable args: positional arguments to pass to your function :param dict kwargs: keyword arguments to pass to your function :param string name: a name for your observation :param dict context: observation-specific context :raises LaboratoryException: If attempting to set a second control case
3.779597
1.581955
2.389193
''' Adds a candidate function to an experiment. Can be used multiple times for multiple candidates. :param callable cand_func: your control function :param iterable args: positional arguments to pass to your function :param dict kwargs: keyword arguments to pass to your function :param string name: a name for your observation :param dict context: observation-specific context ''' self._candidates.append({ 'func': cand_func, 'args': args or [], 'kwargs': kwargs or {}, 'name': name, 'context': context or {}, })
def candidate(self, cand_func, args=None, kwargs=None, name='Candidate', context=None)
Adds a candidate function to an experiment. Can be used multiple times for multiple candidates. :param callable cand_func: your control function :param iterable args: positional arguments to pass to your function :param dict kwargs: keyword arguments to pass to your function :param string name: a name for your observation :param dict context: observation-specific context
3.242547
1.457874
2.224162
''' Run control & candidate functions and return the control's return value. ``control()`` must be called first. :param bool randomize: controls whether we shuffle the order of execution between control and candidate :raise LaboratoryException: when no control case has been set :return: Control function's return value ''' if self._control is None: raise exceptions.LaboratoryException( 'Your experiment must contain a control case' ) # execute control and exit if experiment is not enabled if not self.enabled(): control = self._run_tested_func(raise_on_exception=True, **self._control) return control.value # otherwise, let's wrap an executor around all of our functions and randomise the ordering def get_func_executor(obs_def, is_control): return lambda *a, **kw: (self._run_tested_func(raise_on_exception=is_control, **obs_def), is_control) funcs = [ get_func_executor(self._control, is_control=True), ] + [get_func_executor(cand, is_control=False,) for cand in self._candidates] if randomize: random.shuffle(funcs) control = None candidates = [] # go through the randomised list and execute the functions for func in funcs: observation, is_control = func() if is_control: control = observation else: candidates.append(observation) result = Result(self, control, candidates) try: self.publish(result) except Exception: msg = 'Exception occured when publishing %s experiment data' logger.exception(msg % self.name) return control.value
def conduct(self, randomize=True)
Run control & candidate functions and return the control's return value. ``control()`` must be called first. :param bool randomize: controls whether we shuffle the order of execution between control and candidate :raise LaboratoryException: when no control case has been set :return: Control function's return value
5.647808
3.676078
1.536368
''' Compares two :class:`Observation` instances. :param Observation control: The control block's :class:`Observation` :param Observation candidate: A candidate block's :class:`Observation` :raises MismatchException: If ``Experiment.raise_on_mismatch`` is True :return bool: match? ''' if candidate.failure or control.value != candidate.value: return self._handle_comparison_mismatch(control, candidate) return True
def compare(self, control, candidate)
Compares two :class:`Observation` instances. :param Observation control: The control block's :class:`Observation` :param Observation candidate: A candidate block's :class:`Observation` :raises MismatchException: If ``Experiment.raise_on_mismatch`` is True :return bool: match?
7.033034
2.204068
3.190934
''' bytes -> str ''' if riemann.network.CASHADDR_PREFIX is None: raise ValueError('Network {} does not support cashaddresses.' .format(riemann.get_current_network_name())) data = convertbits(data, 8, 5) checksum = calculate_checksum(riemann.network.CASHADDR_PREFIX, data) payload = b32encode(data + checksum) form = '{prefix}:{payload}' return form.format( prefix=riemann.network.CASHADDR_PREFIX, payload=payload)
def encode(data)
bytes -> str
4.708553
4.328324
1.087847
''' str -> bytes ''' if riemann.network.CASHADDR_PREFIX is None: raise ValueError('Network {} does not support cashaddresses.' .format(riemann.get_current_network_name())) if data.find(riemann.network.CASHADDR_PREFIX) != 0: raise ValueError('Malformed cashaddr. Cannot locate prefix: {}' .format(riemann.netowrk.CASHADDR_PREFIX)) # the data is everything after the colon prefix, data = data.split(':') decoded = b32decode(data) if not verify_checksum(prefix, decoded): raise ValueError('Bad cash address checksum') converted = convertbits(decoded, 5, 8) return bytes(converted[:-6])
def decode(data)
str -> bytes
5.280494
4.920965
1.073061
# fail if addsalt() was not called at the right time if self.state != 1: raise Exception('addsalt() not called after init() and before update()') # salt size is to be 4x word size saltsize = self.WORDBYTES * 4 # if too short, prefix with null bytes. if too long, # truncate high order bytes if len(salt) < saltsize: salt = (chr(0)*(saltsize-len(salt)) + salt) else: salt = salt[-saltsize:] # prep the salt array self.salt[0] = self.byte2int(salt[ : 4<<self.mul]) self.salt[1] = self.byte2int(salt[ 4<<self.mul: 8<<self.mul]) self.salt[2] = self.byte2int(salt[ 8<<self.mul:12<<self.mul]) self.salt[3] = self.byte2int(salt[12<<self.mul: ])
def addsalt(self, salt)
adds a salt to the hash function (OPTIONAL) should be called AFTER Init, and BEFORE update salt: a bytestring, length determined by hashbitlen. if not of sufficient length, the bytestring will be assumed to be a big endian number and prefixed with an appropriate number of null bytes, and if too large, only the low order bytes will be used. if hashbitlen=224 or 256, then salt will be 16 bytes if hashbitlen=384 or 512, then salt will be 32 bytes
3.942866
3.671519
1.073906
self.state = 2 BLKBYTES = self.BLKBYTES # de-referenced for improved readability BLKBITS = self.BLKBITS datalen = len(data) if not datalen: return if type(data) == type(u''): # use either of the next two lines for a proper # response under both Python2 and Python3 data = data.encode('UTF-8') # converts to byte string #data = bytearray(data, 'utf-8') # use if want mutable # This next line works for Py3 but fails under # Py2 because the Py2 version of bytes() will # accept only *one* argument. Arrrrgh!!! #data = bytes(data, 'utf-8') # converts to immutable byte # string but... under p7 # bytes() wants only 1 arg # ...a dummy, 2nd argument like encoding=None # that does nothing would at least allow # compatibility between Python2 and Python3. left = len(self.cache) fill = BLKBYTES - left # if any cached data and any added new data will fill a # full block, fill and compress if left and datalen >= fill: self.cache = self.cache + data[:fill] self.t += BLKBITS # update counter self._compress(self.cache) self.cache = b'' data = data[fill:] datalen -= fill # compress new data until not enough for a full block while datalen >= BLKBYTES: self.t += BLKBITS # update counter self._compress(data[:BLKBYTES]) data = data[BLKBYTES:] datalen -= BLKBYTES # cache all leftover bytes until next call to update() if datalen > 0: self.cache = self.cache + data[:datalen]
def update(self, data)
update the state with new data, storing excess data as necessary. may be called multiple times and if a call sends less than a full block in size, the leftover is cached and will be consumed in the next call data: data to be hashed (bytestring)
7.427929
7.063405
1.051608
if self.state == 3: # we have already finalized so simply return the # previously calculated/stored hash value return self.hash if data: self.update(data) ZZ = b'\x00' ZO = b'\x01' OZ = b'\x80' OO = b'\x81' PADDING = OZ + ZZ*128 # pre-formatted padding data # copy nb. bits hash in total as a 64-bit BE word # copy nb. bits hash in total as a 128-bit BE word tt = self.t + (len(self.cache) << 3) if self.BLKBYTES == 64: msglen = self._int2eightByte(tt) else: low = tt & self.MASK high = tt >> self.WORDBITS msglen = self._int2eightByte(high) + self._int2eightByte(low) # size of block without the words at the end that count # the number of bits, 55 or 111. # Note: (((self.WORDBITS/8)*2)+1) equals ((self.WORDBITS>>2)+1) sizewithout = self.BLKBYTES - ((self.WORDBITS>>2)+1) if len(self.cache) == sizewithout: # special case of one padding byte self.t -= 8 if self.hashbitlen in [224, 384]: self.update(OZ) else: self.update(OO) else: if len(self.cache) < sizewithout: # enough space to fill the block # use t=0 if no remaining data if len(self.cache) == 0: self.nullt=1 self.t -= (sizewithout - len(self.cache)) << 3 self.update(PADDING[:sizewithout - len(self.cache)]) else: # NOT enough space, need 2 compressions # ...add marker, pad with nulls and compress self.t -= (self.BLKBYTES - len(self.cache)) << 3 self.update(PADDING[:self.BLKBYTES - len(self.cache)]) # ...now pad w/nulls leaving space for marker & bit count self.t -= (sizewithout+1) << 3 self.update(PADDING[1:sizewithout+1]) # pad with zeroes self.nullt = 1 # raise flag to set t=0 at the next _compress # append a marker byte if self.hashbitlen in [224, 384]: self.update(ZZ) else: self.update(ZO) self.t -= 8 # append the number of bits (long long) self.t -= self.BLKBYTES self.update(msglen) hashval = [] if self.BLKBYTES == 64: for h in self.h: hashval.append(self._int2fourByte(h)) else: for h in self.h: hashval.append(self._int2eightByte(h)) self.hash = b''.join(hashval)[:self.hashbitlen >> 3] self.state = 3 return self.hash
def final(self, data='')
finalize the hash -- pad and hash remaining data returns hashval, the digest
4.707017
4.591204
1.025225
''' TxIn -> TxIn ''' return TxIn( outpoint=outpoint if outpoint is not None else self.outpoint, stack_script=(stack_script if stack_script is not None else self.stack_script), redeem_script=(redeem_script if redeem_script is not None else self.redeem_script), sequence=sequence if sequence is not None else self.sequence)
def copy(self, outpoint=None, stack_script=None, redeem_script=None, sequence=None)
TxIn -> TxIn
2.18314
1.835022
1.189708
''' byte_string -> (byte_string, byte_string) ''' # Is there a better way to do this? stack_script = script_sig redeem_script = b'' try: # If the last entry deserializes, it's a p2sh input # There is a vanishingly small edge case where the pubkey # forms a deserializable script. # Edge case: serialization errors on CODESEPARATOR deserialized = serialization.deserialize(script_sig) items = deserialized.split() serialization.hex_deserialize(items[-1]) stack_script = serialization.serialize(' '.join(items[:-1])) redeem_script = serialization.serialize(items[-1]) except (IndexError, ValueError, NotImplementedError): pass return stack_script, redeem_script
def _parse_script_sig(TxIn, script_sig)
byte_string -> (byte_string, byte_string)
7.970415
7.25914
1.097983
''' byte_string -> TxIn parses a TxIn from a byte-like object ''' outpoint = Outpoint.from_bytes(byte_string[:36]) script_sig_len = VarInt.from_bytes(byte_string[36:45]) script_start = 36 + len(script_sig_len) script_end = script_start + script_sig_len.number script_sig = byte_string[script_start:script_end] sequence = byte_string[script_end:script_end + 4] if script_sig == b'': stack_script = b'' redeem_script = b'' else: stack_script, redeem_script = TxIn._parse_script_sig(script_sig) return TxIn( outpoint=outpoint, stack_script=stack_script, redeem_script=redeem_script, sequence=sequence)
def from_bytes(TxIn, byte_string)
byte_string -> TxIn parses a TxIn from a byte-like object
2.608938
2.393463
1.090027
''' Tx -> bytes ''' tx = bytes() tx += self.version tx += VarInt(len(self.tx_ins)).to_bytes() for tx_in in self.tx_ins: tx += tx_in.to_bytes() tx += VarInt(len(self.tx_outs)).to_bytes() for tx_out in self.tx_outs: tx += tx_out.to_bytes() tx += self.lock_time return bytes(tx)
def no_witness(self)
Tx -> bytes
2.402551
2.143971
1.120608
''' Tx, list(int) -> int Inputs don't know their value without the whole chain. ''' return \ sum(input_values) \ - sum([utils.le2i(o.value) for o in self.tx_outs])
def calculate_fee(self, input_values)
Tx, list(int) -> int Inputs don't know their value without the whole chain.
17.561663
5.37447
3.267608
''' Tx, byte-like, byte-like, list(TxIn), list(TxOut), list(InputWitness), byte-like -> Tx Makes a copy. Allows over-writing specific pieces. ''' return Tx(version=version if version is not None else self.version, flag=flag if flag is not None else self.flag, tx_ins=tx_ins if tx_ins is not None else self.tx_ins, tx_outs=tx_outs if tx_outs is not None else self.tx_outs, tx_witnesses=(tx_witnesses if tx_witnesses is not None else self.tx_witnesses), lock_time=(lock_time if lock_time is not None else self.lock_time))
def copy(self, version=None, flag=None, tx_ins=None, tx_outs=None, tx_witnesses=None, lock_time=None)
Tx, byte-like, byte-like, list(TxIn), list(TxOut), list(InputWitness), byte-like -> Tx Makes a copy. Allows over-writing specific pieces.
2.573353
1.494668
1.721688
''' Tx, int, byte-like, byte-like, bool -> bytearray Sighashes suck Generates the hash to be signed with SIGHASH_SINGLE https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_SINGLE https://bitcoin.stackexchange.com/questions/3890/for-sighash-single-do-the-outputs-other-than-at-the-input-index-have-8-bytes-or https://github.com/petertodd/python-bitcoinlib/blob/051ec4e28c1f6404fd46713c2810d4ebbed38de4/bitcoin/core/script.py#L913-L965 ''' if index >= len(self.tx_outs): raise NotImplementedError( 'I refuse to implement the SIGHASH_SINGLE bug.') if riemann.network.FORKID is not None: return self._sighash_forkid(index=index, script=script, prevout_value=prevout_value, sighash_type=shared.SIGHASH_SINGLE, anyone_can_pay=anyone_can_pay) if self.is_witness(): return self.segwit_sighash( index=index, script=script, prevout_value=prevout_value, sighash_type=shared.SIGHASH_SINGLE, anyone_can_pay=anyone_can_pay) copy_tx = self._sighash_prep(index=index, script=script) # Remove outputs after the one we're signing # Other tx_outs are set to -1 value and null scripts copy_tx_outs = copy_tx.tx_outs[:index + 1] copy_tx_outs = [TxOut(value=b'\xff' * 8, output_script=b'') for _ in copy_tx.tx_ins] # Null them all copy_tx_outs[index] = copy_tx.tx_outs[index] # Fix the current one # Other tx_ins sequence numbers are set to 0 copy_tx_ins = [tx_in.copy(sequence=b'\x00\x00\x00\x00') for tx_in in copy_tx.tx_ins] # Set all to 0 copy_tx_ins[index] = copy_tx.tx_ins[index] # Fix the current one copy_tx = copy_tx.copy( tx_ins=copy_tx_ins, tx_outs=copy_tx_outs) if anyone_can_pay: # Forward onwards return self._sighash_anyone_can_pay( index, copy_tx, shared.SIGHASH_SINGLE) return self._sighash_final_hashing(copy_tx, shared.SIGHASH_SINGLE)
def sighash_single(self, index, script=None, prevout_value=None, anyone_can_pay=False)
Tx, int, byte-like, byte-like, bool -> bytearray Sighashes suck Generates the hash to be signed with SIGHASH_SINGLE https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_SINGLE https://bitcoin.stackexchange.com/questions/3890/for-sighash-single-do-the-outputs-other-than-at-the-input-index-have-8-bytes-or https://github.com/petertodd/python-bitcoinlib/blob/051ec4e28c1f6404fd46713c2810d4ebbed38de4/bitcoin/core/script.py#L913-L965
3.77433
2.237849
1.686588
''' this function sets up sighash in BIP143 style https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki https://ricette.giallozafferano.it/Spaghetti-alla-Norma.html ''' data = ByteData() # 1. nVersion of the transaction (4-byte little endian) data += self.version # 2. hashPrevouts (32-byte hash) data += self._hash_prevouts(anyone_can_pay=anyone_can_pay) # 3. hashSequence (32-byte hash) data += self._hash_sequence(sighash_type=sighash_type, anyone_can_pay=anyone_can_pay) # 4. outpoint (32-byte hash + 4-byte little endian) data += self.tx_ins[index].outpoint # 5. scriptCode of the input (serialized as scripts inside CTxOuts) data += self._adjusted_script_code(script=script) # 6. value of the output spent by this input (8-byte little endian) data += prevout_value # 7. nSequence of the input (4-byte little endian) data += self.tx_ins[index].sequence # 8. hashOutputs (32-byte hash) data += self._hash_outputs(index=index, sighash_type=sighash_type) # 9. nLocktime of the transaction (4-byte little endian) data += self.lock_time # 10. sighash type of the signature (4-byte little endian) data += self._segwit_sighash_adjustment(sighash_type=sighash_type, anyone_can_pay=anyone_can_pay) return utils.hash256(data.to_bytes())
def segwit_sighash(self, index, script, prevout_value=None, sighash_type=None, anyone_can_pay=False)
this function sets up sighash in BIP143 style https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki https://ricette.giallozafferano.it/Spaghetti-alla-Norma.html
2.654376
1.969945
1.347436
''' int, byte-like, Tx, int -> bytes Applies SIGHASH_ANYONECANPAY procedure. Should be called by another SIGHASH procedure. Not on its own. https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_ANYONECANPAY ''' # The txCopy input vector is resized to a length of one. copy_tx_ins = [copy_tx.tx_ins[index]] copy_tx = copy_tx.copy(tx_ins=copy_tx_ins) return self._sighash_final_hashing( copy_tx, sighash_type | shared.SIGHASH_ANYONECANPAY)
def _sighash_anyone_can_pay(self, index, copy_tx, sighash_type)
int, byte-like, Tx, int -> bytes Applies SIGHASH_ANYONECANPAY procedure. Should be called by another SIGHASH procedure. Not on its own. https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_ANYONECANPAY
6.795709
2.933728
2.316408
''' Tx, int -> bytes Returns the hash that should be signed https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_ANYONECANPAY ''' sighash = ByteData() sighash += copy_tx.to_bytes() sighash += utils.i2le_padded(sighash_type, 4) return utils.hash256(sighash.to_bytes())
def _sighash_final_hashing(self, copy_tx, sighash_type)
Tx, int -> bytes Returns the hash that should be signed https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_ANYONECANPAY
5.364676
2.600601
2.06286
'''BIP143 hashSequence implementation Args: sighash_type (int): SIGHASH_SINGLE or SIGHASH_ALL anyone_can_pay (bool): true if ANYONECANPAY should be set Returns: (bytes): the hashSequence, a 32 byte hash ''' if anyone_can_pay or sighash_type == shared.SIGHASH_SINGLE: # If any of ANYONECANPAY, SINGLE sighash type is set, # hashSequence is a uint256 of 0x0000......0000. return b'\x00' * 32 else: # hashSequence is the double SHA256 of nSequence of all inputs; sequences = ByteData() for tx_in in self.tx_ins: sequences += tx_in.sequence return utils.hash256(sequences.to_bytes())
def _hash_sequence(self, sighash_type, anyone_can_pay)
BIP143 hashSequence implementation Args: sighash_type (int): SIGHASH_SINGLE or SIGHASH_ALL anyone_can_pay (bool): true if ANYONECANPAY should be set Returns: (bytes): the hashSequence, a 32 byte hash
4.67877
3.122375
1.498465
''' Checks if the script code pased in to the sighash function is already length-prepended This will break if there's a redeem script that's just a pushdata That won't happen in practice Args: script (bytes): the spend script Returns: (bytes): the length-prepended script (if necessary) ''' script_code = ByteData() if script[0] == len(script) - 1: return script script_code += VarInt(len(script)) script_code += script return script_code
def _adjusted_script_code(self, script)
Checks if the script code pased in to the sighash function is already length-prepended This will break if there's a redeem script that's just a pushdata That won't happen in practice Args: script (bytes): the spend script Returns: (bytes): the length-prepended script (if necessary)
8.663607
1.881953
4.60352
'''BIP143 hashOutputs implementation Args: index (int): index of input being signed sighash_type (int): SIGHASH_SINGLE or SIGHASH_ALL Returns: (bytes): the hashOutputs, a 32 byte hash ''' if sighash_type == shared.SIGHASH_ALL: # If the sighash type is ALL, # hashOutputs is the double SHA256 of all output amounts # paired up with their scriptPubKey; outputs = ByteData() for tx_out in self.tx_outs: outputs += tx_out.to_bytes() return utils.hash256(outputs.to_bytes()) elif (sighash_type == shared.SIGHASH_SINGLE and index < len(self.tx_outs)): # if sighash type is SINGLE # and the input index is smaller than the number of outputs, # hashOutputs is the double SHA256 of the output at the same index return utils.hash256(self.tx_outs[index].to_bytes()) else: # Otherwise, hashOutputs is a uint256 of 0x0000......0000 raise NotImplementedError( 'I refuse to implement the SIGHASH_SINGLE bug.')
def _hash_outputs(self, index, sighash_type)
BIP143 hashOutputs implementation Args: index (int): index of input being signed sighash_type (int): SIGHASH_SINGLE or SIGHASH_ALL Returns: (bytes): the hashOutputs, a 32 byte hash
3.790066
2.939108
1.289529
''' https://github.com/decred/dcrd/blob/master/txscript/script.go ''' copy_tx = self._sighash_prep( index=index, script=script) try: copy_tx_outs = copy_tx.tx_outs[:index + 1] copy_tx_outs = [TxOut(value=b'\xff' * 8, output_script=b'') for _ in copy_tx.tx_ins] copy_tx_outs[index] = copy_tx.tx_outs[index] except IndexError: raise NotImplementedError( 'I refuse to implement the SIGHASH_SINGLE bug.') copy_tx_ins = [tx_in.copy(sequence=b'\x00\x00\x00\x00') for tx_in in copy_tx.tx_ins] copy_tx_ins[index] = copy_tx.tx_ins[index] copy_tx = copy_tx.copy(tx_ins=copy_tx_ins, tx_outs=copy_tx_outs) if anyone_can_pay: return self._sighash_anyone_can_pay( index=index, copy_tx=copy_tx, sighash_type=shared.SIGHASH_SINGLE) return self._sighash_final_hashing( index=index, copy_tx=copy_tx, sighash_type=shared.SIGHASH_SINGLE)
def sighash_single(self, index, script=None, anyone_can_pay=False)
https://github.com/decred/dcrd/blob/master/txscript/script.go
2.634091
2.437807
1.080517
''' https://gist.github.com/davecgh/b00ec6e11f73620c3deddf160353961c https://github.com/decred/dcrd/blob/master/txscript/script.go ''' copy_tx = self._sighash_prep(index, script) if anyone_can_pay: return self._sighash_anyone_can_pay( index=index, copy_tx=copy_tx, sighash_type=shared.SIGHASH_ALL) return self._sighash_final_hashing( index=index, copy_tx=copy_tx, sighash_type=shared.SIGHASH_ALL)
def sighash_all(self, index, script=None, anyone_can_pay=False)
https://gist.github.com/davecgh/b00ec6e11f73620c3deddf160353961c https://github.com/decred/dcrd/blob/master/txscript/script.go
4.538771
2.128022
2.132858
''' str -> bytearray ''' string_tokens = script_string.split() serialized_script = bytearray() for token in string_tokens: if token == 'OP_CODESEPARATOR' or token == 'OP_PUSHDATA4': raise NotImplementedError('{} is a bad idea.'.format(token)) if token in riemann.network.CODE_TO_INT_OVERWRITE: serialized_script.extend( [riemann.network.CODE_TO_INT_OVERWRITE[token]]) elif token in CODE_TO_INT: serialized_script.extend([CODE_TO_INT[token]]) else: token_bytes = bytes.fromhex(token) if len(token_bytes) <= 75: op = 'OP_PUSH_{}'.format(len(token_bytes)) serialized_script.extend([CODE_TO_INT[op]]) serialized_script.extend(token_bytes) elif len(token_bytes) > 75 and len(token_bytes) <= 255: op = 'OP_PUSHDATA1' serialized_script.extend([CODE_TO_INT[op]]) serialized_script.extend(utils.i2le(len(token_bytes))) serialized_script.extend(token_bytes) elif len(token_bytes) > 255 and len(token_bytes) <= 1000: op = 'OP_PUSHDATA2' serialized_script.extend([CODE_TO_INT[op]]) serialized_script.extend( utils.i2le_padded(len(token_bytes), 2)) serialized_script.extend(token_bytes) else: raise NotImplementedError( 'Hex string too long to serialize.') return serialized_script
def serialize(script_string)
str -> bytearray
2.201247
2.13245
1.032262
''' bytearray -> str ''' deserialized = [] i = 0 while i < len(serialized_script): current_byte = serialized_script[i] if current_byte == 0xab: raise NotImplementedError('OP_CODESEPARATOR is a bad idea.') if current_byte <= 75 and current_byte != 0: deserialized.append( serialized_script[i + 1: i + 1 + current_byte].hex()) i += 1 + current_byte if i > len(serialized_script): raise IndexError( 'Push {} caused out of bounds exception.' .format(current_byte)) elif current_byte == 76: # next hex blob length blob_len = serialized_script[i + 1] deserialized.append( serialized_script[i + 2: i + 2 + blob_len].hex()) i += 2 + blob_len elif current_byte == 77: # next hex blob length blob_len = utils.le2i(serialized_script[i + 1: i + 3]) deserialized.append( serialized_script[i + 3: i + 3 + blob_len].hex()) i += 3 + blob_len elif current_byte == 78: raise NotImplementedError('OP_PUSHDATA4 is a bad idea.') else: if current_byte in riemann.network.INT_TO_CODE_OVERWRITE: deserialized.append( riemann.network.INT_TO_CODE_OVERWRITE[current_byte]) elif current_byte in INT_TO_CODE: deserialized.append(INT_TO_CODE[current_byte]) else: raise ValueError( 'Unsupported opcode. ' 'Got 0x%x' % serialized_script[i]) i += 1 return ' '.join(deserialized)
def deserialize(serialized_script)
bytearray -> str
2.743641
2.681999
1.022984
''' inputs for the hsig hash ''' hsig_input = z.ZcashByteData() hsig_input += self.tx_joinsplits[index].random_seed hsig_input += self.tx_joinsplits[index].nullifiers hsig_input += self.joinsplit_pubkey return hsig_input.to_bytes()
def _hsig_input(self, index)
inputs for the hsig hash
7.618648
6.894496
1.105033
''' Primary input for the zkproof ''' primary_input = z.ZcashByteData() primary_input += self.tx_joinsplits[index].anchor primary_input += self.tx_joinsplits[index].nullifiers primary_input += self.tx_joinsplits[index].commitments primary_input += self.tx_joinsplits[index].vpub_old primary_input += self.tx_joinsplits[index].vpub_new primary_input += self.hsigs[index] primary_input += self.tx_joinsplits[index].vmacs return primary_input.to_bytes()
def _primary_input(self, index)
Primary input for the zkproof
4.862043
3.849584
1.263005
''' byte-like -> SproutTx ''' version = byte_string[0:4] tx_ins = [] tx_ins_num = shared.VarInt.from_bytes(byte_string[4:]) current = 4 + len(tx_ins_num) for _ in range(tx_ins_num.number): tx_in = TxIn.from_bytes(byte_string[current:]) current += len(tx_in) tx_ins.append(tx_in) tx_outs = [] tx_outs_num = shared.VarInt.from_bytes(byte_string[current:]) current += len(tx_outs_num) for _ in range(tx_outs_num.number): tx_out = TxOut.from_bytes(byte_string[current:]) current += len(tx_out) tx_outs.append(tx_out) lock_time = byte_string[current:current + 4] current += 4 tx_joinsplits = None joinsplit_pubkey = None joinsplit_sig = None if utils.le2i(version) == 2: # If we expect joinsplits tx_joinsplits = [] tx_joinsplits_num = shared.VarInt.from_bytes(byte_string[current:]) current += len(tx_joinsplits_num) for _ in range(tx_joinsplits_num.number): joinsplit = z.SproutJoinsplit.from_bytes(byte_string[current:]) current += len(joinsplit) tx_joinsplits.append(joinsplit) joinsplit_pubkey = byte_string[current:current + 32] current += 32 joinsplit_sig = byte_string[current:current + 64] return SproutTx( version=version, tx_ins=tx_ins, tx_outs=tx_outs, lock_time=lock_time, tx_joinsplits=tx_joinsplits, joinsplit_pubkey=joinsplit_pubkey, joinsplit_sig=joinsplit_sig)
def from_bytes(SproutTx, byte_string)
byte-like -> SproutTx
1.818377
1.781573
1.020658
''' Tx, list(int) -> int ''' total_in = sum(input_values) total_out = sum([utils.le2i(tx_out.value) for tx_out in self.tx_outs]) for js in self.tx_joinsplits: total_in += utils.le2i(js.vpub_new) total_out += utils.le2i(js.vpub_old) return total_in - total_out
def calculate_fee(self, input_values)
Tx, list(int) -> int
4.902649
3.881835
1.262972
''' SproutTx, ... -> Tx Makes a copy. Allows over-writing specific pieces. ''' return SproutTx( version=version if version is not None else self.version, tx_ins=tx_ins if tx_ins is not None else self.tx_ins, tx_outs=tx_outs if tx_outs is not None else self.tx_outs, lock_time=(lock_time if lock_time is not None else self.lock_time), tx_joinsplits=(tx_joinsplits if tx_joinsplits is not None else self.tx_joinsplits), joinsplit_pubkey=(joinsplit_pubkey if joinsplit_pubkey is not None else self.joinsplit_pubkey), joinsplit_sig=(joinsplit_sig if joinsplit_sig is not None else self.joinsplit_sig))
def copy(self, version=None, tx_ins=None, tx_outs=None, lock_time=None, tx_joinsplits=None, joinsplit_pubkey=None, joinsplit_sig=None)
SproutTx, ... -> Tx Makes a copy. Allows over-writing specific pieces.
2.083966
1.569081
1.328145
''' SproutTx, int, byte-like -> SproutTx Sighashes suck Performs the sighash setup described here: https://en.bitcoin.it/wiki/OP_CHECKSIG#How_it_works https://bitcoin.stackexchange.com/questions/3374/how-to-redeem-a-basic-tx We save on complexity by refusing to support OP_CODESEPARATOR ''' if len(self.tx_ins) == 0: return self.copy(joinsplit_sig=b'') # 0 out scripts in tx_ins copy_tx_ins = [tx_in.copy(stack_script=b'', redeem_script=b'') for tx_in in self.tx_ins] # NB: The script for the current transaction input in txCopy is set to # subScript (lead in by its length as a var-integer encoded!) copy_tx_ins[index] = \ copy_tx_ins[index].copy(stack_script=b'', redeem_script=script) return self.copy(tx_ins=copy_tx_ins, joinsplit_sig=b'')
def _sighash_prep(self, index, script)
SproutTx, int, byte-like -> SproutTx Sighashes suck Performs the sighash setup described here: https://en.bitcoin.it/wiki/OP_CHECKSIG#How_it_works https://bitcoin.stackexchange.com/questions/3374/how-to-redeem-a-basic-tx We save on complexity by refusing to support OP_CODESEPARATOR
7.266994
3.459435
2.10063
''' SproutTx, int, byte-like, byte-like, bool -> bytearray Sighashes suck Generates the hash to be signed with SIGHASH_ALL https://en.bitcoin.it/wiki/OP_CHECKSIG#Hashtype_SIGHASH_ALL_.28default.29 ''' if riemann.network.FORKID is not None: return self._sighash_forkid(index=index, script=script, prevout_value=prevout_value, sighash_type=shared.SIGHASH_ALL, anyone_can_pay=anyone_can_pay) copy_tx = self._sighash_prep(index=index, script=script) if anyone_can_pay: return self._sighash_anyone_can_pay( index=index, copy_tx=copy_tx, sighash_type=shared.SIGHASH_ALL) return self._sighash_final_hashing(copy_tx, shared.SIGHASH_ALL)
def sighash_all(self, index=0, script=None, prevout_value=None, anyone_can_pay=False)
SproutTx, int, byte-like, byte-like, bool -> bytearray Sighashes suck Generates the hash to be signed with SIGHASH_ALL https://en.bitcoin.it/wiki/OP_CHECKSIG#Hashtype_SIGHASH_ALL_.28default.29
4.086933
2.277052
1.794835
''' SproutTx, int -> bytes Returns the hash that should be signed https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_ANYONECANPAY ''' sighash = z.ZcashByteData() sighash += copy_tx.to_bytes() sighash += utils.i2le_padded(sighash_type, 4) return utils.hash256(sighash.to_bytes())
def _sighash_final_hashing(self, copy_tx, sighash_type)
SproutTx, int -> bytes Returns the hash that should be signed https://en.bitcoin.it/wiki/OP_CHECKSIG#Procedure_for_Hashtype_SIGHASH_ANYONECANPAY
6.056089
2.751693
2.200859
ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5)) if segwit_decode(hrp, ret) == (None, None): return None return ret
def segwit_encode(hrp, witver, witprog)
Encode a segwit address.
3.492856
3.752781
0.930738
''' Prevents any future changes to the object ''' self._bytes = bytes(self._bytes) self.__immutable = True
def _make_immutable(self)
Prevents any future changes to the object
10.94229
5.457185
2.005116
''' byte-like -> int Finds the index of substring ''' if isinstance(substring, ByteData): substring = substring.to_bytes() return self._bytes.find(substring)
def find(self, substring)
byte-like -> int Finds the index of substring
9.202124
5.609523
1.640447
''' Raises ValueError if data is not bytes. Raises ValueError if len(data) is not length. Length may be None for unknown lengths (e.g. scripts). length=None will allow 0 length data. ''' if (not isinstance(data, ByteData) and not isinstance(data, bytes) and not isinstance(data, bytearray)): raise ValueError('Expected byte-like object. ' 'Got: {}'.format(type(data))) if length is None: return if len(data) != length: raise ValueError('Expected byte-like object with length {}. ' 'Got {} with length {}.' .format(length, type(data), len(data)))
def validate_bytes(data, length=4)
Raises ValueError if data is not bytes. Raises ValueError if len(data) is not length. Length may be None for unknown lengths (e.g. scripts). length=None will allow 0 length data.
3.584923
2.079016
1.724336
''' byte-like -> VarInt accepts arbitrary length input, gets a VarInt off the front ''' num = byte_string if num[0] <= 0xfc: num = num[0:1] non_compact = False elif num[0] == 0xfd: num = num[1:3] non_compact = (num[-1:] == b'\x00') elif num[0] == 0xfe: num = num[1:5] non_compact = (num[-2:] == b'\x00\x00') elif num[0] == 0xff: num = num[1:9] non_compact = (num[-4:] == b'\x00\x00\x00\x00') if len(num) not in [1, 2, 4, 8]: raise ValueError('Malformed VarInt. Got: {}' .format(byte_string.hex())) if (non_compact and ('overwinter' in riemann.get_current_network_name() or 'sapling' in riemann.get_current_network_name())): raise ValueError('VarInt must be compact. Got: {}' .format(byte_string.hex())) ret = VarInt( utils.le2i(num), length=len(num) + 1 if non_compact else 0) return ret
def from_bytes(VarInt, byte_string)
byte-like -> VarInt accepts arbitrary length input, gets a VarInt off the front
3.198162
2.749964
1.162983
''' SaplingTx, list(int) -> int ''' total_in = sum(input_values) total_out = sum([utils.le2i(tx_out.value) for tx_out in self.tx_outs]) shileded_net = utils.le2i(self.value_balance, signed=True) for js in self.tx_joinsplits: total_in += utils.le2i(js.vpub_new) total_out += utils.le2i(js.vpub_old) return total_in - total_out + shileded_net
def calculate_fee(self, input_values)
SaplingTx, list(int) -> int
6.285395
4.722256
1.331015
''' SaplingTx, ... -> SaplingTx Makes a copy. Allows over-writing specific pieces. ''' return SaplingTx( tx_ins=tx_ins if tx_ins is not None else self.tx_ins, tx_outs=tx_outs if tx_outs is not None else self.tx_outs, lock_time=(lock_time if lock_time is not None else self.lock_time), expiry_height=(expiry_height if expiry_height is not None else self.expiry_height), value_balance=(value_balance if value_balance is not None else self.value_balance), tx_shielded_spends=( tx_shielded_spends if tx_shielded_spends is not None else self.tx_shielded_spends), tx_shielded_outputs=( tx_shielded_outputs if tx_shielded_outputs is not None else self.tx_shielded_outputs), tx_joinsplits=(tx_joinsplits if tx_joinsplits is not None else self.tx_joinsplits), joinsplit_pubkey=(joinsplit_pubkey if joinsplit_pubkey is not None else self.joinsplit_pubkey), joinsplit_sig=(joinsplit_sig if joinsplit_sig is not None else self.joinsplit_sig), binding_sig=(binding_sig if binding_sig is not None else self.binding_sig))
def copy(self, tx_ins=None, tx_outs=None, lock_time=None, expiry_height=None, value_balance=None, tx_shielded_spends=None, tx_shielded_outputs=None, tx_joinsplits=None, joinsplit_pubkey=None, joinsplit_sig=None, binding_sig=None)
SaplingTx, ... -> SaplingTx Makes a copy. Allows over-writing specific pieces.
1.534102
1.305508
1.1751
''' byte-like -> SaplingTx ''' header = byte_string[0:4] group_id = byte_string[4:8] if header != b'\x04\x00\x00\x80' or group_id != b'\x85\x20\x2f\x89': raise ValueError( 'Bad header or group ID. Expected {} and {}. Got: {} and {}' .format(b'\x04\x00\x00\x80'.hex(), b'\x85\x20\x2f\x89'.hex(), header.hex(), group_id.hex())) tx_ins = [] tx_ins_num = shared.VarInt.from_bytes(byte_string[8:]) current = 8 + len(tx_ins_num) for _ in range(tx_ins_num.number): tx_in = TxIn.from_bytes(byte_string[current:]) current += len(tx_in) tx_ins.append(tx_in) tx_outs = [] tx_outs_num = shared.VarInt.from_bytes(byte_string[current:]) current += len(tx_outs_num) for _ in range(tx_outs_num.number): tx_out = TxOut.from_bytes(byte_string[current:]) current += len(tx_out) tx_outs.append(tx_out) lock_time = byte_string[current:current + 4] current += 4 expiry_height = byte_string[current:current + 4] current += 4 value_balance = byte_string[current:current + 8] current += 8 tx_shielded_spends = [] shielded_spends_num = shared.VarInt.from_bytes(byte_string[current:]) current += len(shielded_spends_num) for _ in range(shielded_spends_num.number): ss = SaplingShieldedSpend.from_bytes(byte_string[current:]) current += len(ss) tx_shielded_spends.append(ss) tx_shielded_outputs = [] shielded_outputs_num = shared.VarInt.from_bytes(byte_string[current:]) current += len(shielded_outputs_num) for _ in range(shielded_outputs_num.number): so = SaplingShieldedOutput.from_bytes(byte_string[current:]) current += len(so) tx_shielded_outputs.append(so) tx_joinsplits = [] tx_joinsplits_num = shared.VarInt.from_bytes(byte_string[current:]) current += len(tx_outs_num) for _ in range(tx_joinsplits_num.number): tx_joinsplit = SaplingJoinsplit.from_bytes( byte_string[current:]) current += len(tx_joinsplit) tx_joinsplits.append(tx_joinsplit) if len(tx_joinsplits) > 0: joinsplit_pubkey = byte_string[current:current + 32] current += 32 joinsplit_sig = byte_string[current:current + 64] current += 64 else: joinsplit_pubkey = None joinsplit_sig = None if len(tx_shielded_spends) + len(tx_shielded_outputs) > 0: binding_sig = byte_string[current:current + 64] current += 64 else: binding_sig = None return SaplingTx( tx_ins=tx_ins, tx_outs=tx_outs, lock_time=lock_time, expiry_height=expiry_height, value_balance=value_balance, tx_shielded_spends=tx_shielded_spends, tx_shielded_outputs=tx_shielded_outputs, tx_joinsplits=tx_joinsplits, joinsplit_pubkey=joinsplit_pubkey, joinsplit_sig=joinsplit_sig, binding_sig=binding_sig)
def from_bytes(SaplingTx, byte_string)
byte-like -> SaplingTx
1.527617
1.511956
1.010358
''' ZIP243 https://github.com/zcash/zips/blob/master/zip-0243.rst ''' if joinsplit and anyone_can_pay: raise ValueError('ANYONECANPAY can\'t be used with joinsplits') data = z.ZcashByteData() data += self.header data += self.group_id data += self._hash_prevouts(anyone_can_pay) data += self._hash_sequence(sighash_type, anyone_can_pay) data += self._hash_outputs(sighash_type, index) data += self._hash_joinsplits() data += self._hash_shielded_spends() data += self._hash_shielded_outputs() data += self.lock_time data += self.expiry_height data += self.value_balance if anyone_can_pay: sighash_type = sighash_type | shared.SIGHASH_ANYONECANPAY data += utils.i2le_padded(sighash_type, 4) if not joinsplit: data += self.tx_ins[index].outpoint data += script_code data += prevout_value data += self.tx_ins[index].sequence return utils.blake2b( data=data.to_bytes(), digest_size=32, person=b'ZcashSigHash' + bytes.fromhex('bb09b876'))
def sighash(self, sighash_type, index=0, joinsplit=False, script_code=None, anyone_can_pay=False, prevout_value=None)
ZIP243 https://github.com/zcash/zips/blob/master/zip-0243.rst
3.633491
3.132227
1.160034
if checksum: data = data + utils.hash256(data)[:4] v, prefix = to_long(256, lambda x: x, iter(data)) data = from_long(v, prefix, BASE58_BASE, lambda v: BASE58_ALPHABET[v]) return data.decode("utf8")
def encode(data, checksum=True)
Convert binary to base58 using BASE58_ALPHABET.
7.044786
5.670221
1.242418
v, prefix = to_long( BASE58_BASE, lambda c: BASE58_LOOKUP[c], s.encode("utf8")) data = from_long(v, prefix, 256, lambda x: x) if checksum: data, the_hash = data[:-4], data[-4:] if utils.hash256(data)[:4] == the_hash: return data raise ValueError("hashed base58 has bad checksum %s" % s) return data
def decode(s, checksum=True)
Convert base58 to binary using BASE58_ALPHABET.
5.996397
5.74427
1.043892
ba = bytearray() while v > 0: try: v, mod = divmod(v, base) ba.append(charset(mod)) except Exception: raise ValueError( "can't convert to character corresponding to %d" % mod) ba.extend([charset(0)] * prefix) ba.reverse() return bytes(ba)
def from_long(v, prefix, base, charset)
The inverse of to_long. Convert an integer to an arbitrary base. v: the integer value to convert prefix: the number of prefixed 0s to include base: the new base charset: an array indicating a printable character to use for each value.
3.968353
3.815641
1.040023
prefix = 0 v = 0 for c in s: v *= base try: v += lookup_f(c) except Exception: raise ValueError("bad character %s in string %s" % (c, s)) if v == 0: prefix += 1 return v, prefix
def to_long(base, lookup_f, s)
Convert an array to a (possibly bignum) integer, along with a prefix value of how many prefixed zeros there are. base: the source base lookup_f: a function to convert an element of s to a value between 0 and base-1. s: the value to convert
2.908952
2.76249
1.053018
''' str -> bytearray ''' if witness and not riemann.network.SEGWIT: raise ValueError( 'Network {} does not support witness scripts.' .format(riemann.get_current_network_name())) script_bytes = serialization.serialize(script_string) return make_sh_script_pubkey(script_bytes=script_bytes, witness=witness)
def make_sh_output_script(script_string, witness=False)
str -> bytearray
5.608117
4.583373
1.223579
''' bytearray -> bytearray ''' if witness and not riemann.network.SEGWIT: raise ValueError( 'Network {} does not support witness scripts.' .format(riemann.get_current_network_name())) output_script = bytearray() if type(pubkey) is not bytearray and type(pubkey) is not bytes: raise ValueError('Unknown pubkey format. ' 'Expected bytes. Got: {}'.format(type(pubkey))) pubkey_hash = utils.hash160(pubkey) if witness: output_script.extend(riemann.network.P2WPKH_PREFIX) output_script.extend(pubkey_hash) else: output_script.extend(b'\x76\xa9\x14') # OP_DUP OP_HASH160 PUSH14 output_script.extend(pubkey_hash) output_script.extend(b'\x88\xac') # OP_EQUALVERIFY OP_CHECKSIG return output_script
def make_pkh_output_script(pubkey, witness=False)
bytearray -> bytearray
2.692927
2.548875
1.056516
''' byte-like, byte-like -> TxOut ''' if 'decred' in riemann.get_current_network_name(): return tx.DecredTxOut( value=value, version=version, output_script=output_script) return tx.TxOut(value=value, output_script=output_script)
def _make_output(value, output_script, version=None)
byte-like, byte-like -> TxOut
6.039622
4.210248
1.434505
''' int, str -> TxOut ''' return _make_output( value=utils.i2le_padded(value, 8), output_script=make_sh_output_script(output_script, witness))
def make_sh_output(value, output_script, witness=False)
int, str -> TxOut
7.369151
4.936055
1.492923
''' int, bytearray -> TxOut ''' return _make_output( value=utils.i2le_padded(value, 8), output_script=make_pkh_output_script(pubkey, witness))
def make_pkh_output(value, pubkey, witness=False)
int, bytearray -> TxOut
6.446026
4.624394
1.393918
'''Generates OP_RETURN output for data less than 78 bytes. If data is 76 or 77 bytes, OP_PUSHDATA1 is included: <OP_RETURN><OP_PUSHDATA1><data len><data> If data is less than 76 bytes, OP_PUSHDATA1 is not included: <OP_RETURN><data len><data> 80 bytes is the default setting for an OP_RETURN output script. https://github.com/bitpay/bitcore/issues/1389 Args: data (bytes): data included in output Returns: (TxOut): TxOut object with OP_RETURN output ''' if len(data) > 77: # 77 bytes is the limit raise ValueError('Data is too long. Expected <= 77 bytes') pk_script = bytearray() pk_script.extend(b'\x6a') # OP_RETURN # OP_PUSHDATA1 only used if data is greater than 75 bytes if len(data) in [76, 77]: pk_script.extend(b'\x4c') # OP_PUSHDATA1 pk_script.extend([len(data)]) # One byte for length of data pk_script.extend(data) # Data return _make_output(utils.i2le_padded(0, 8), pk_script)
def make_op_return_output(data)
Generates OP_RETURN output for data less than 78 bytes. If data is 76 or 77 bytes, OP_PUSHDATA1 is included: <OP_RETURN><OP_PUSHDATA1><data len><data> If data is less than 76 bytes, OP_PUSHDATA1 is not included: <OP_RETURN><data len><data> 80 bytes is the default setting for an OP_RETURN output script. https://github.com/bitpay/bitcore/issues/1389 Args: data (bytes): data included in output Returns: (TxOut): TxOut object with OP_RETURN output
3.977295
2.051568
1.938661
''' byte-like, int, int -> Outpoint ''' if 'decred' in riemann.get_current_network_name(): return tx.DecredOutpoint(tx_id=tx_id_le, index=utils.i2le_padded(index, 4), tree=utils.i2le_padded(tree, 1)) return tx.Outpoint(tx_id=tx_id_le, index=utils.i2le_padded(index, 4))
def make_outpoint(tx_id_le, index, tree=None)
byte-like, int, int -> Outpoint
3.921103
3.362535
1.166115
''' str, str -> bytearray ''' stack_script += ' {}'.format( serialization.hex_serialize(redeem_script)) return serialization.serialize(stack_script)
def make_script_sig(stack_script, redeem_script)
str, str -> bytearray
11.953648
7.086501
1.68682
''' Outpoint, byte-like, byte-like, int -> TxIn ''' if 'decred' in riemann.get_current_network_name(): return tx.DecredTxIn( outpoint=outpoint, sequence=utils.i2le_padded(sequence, 4)) return tx.TxIn(outpoint=outpoint, stack_script=stack_script, redeem_script=redeem_script, sequence=utils.i2le_padded(sequence, 4))
def make_legacy_input(outpoint, stack_script, redeem_script, sequence)
Outpoint, byte-like, byte-like, int -> TxIn
4.094895
3.15491
1.297944
''' Outpoint, byte-like, byte-like, int -> (TxIn, InputWitness) ''' return (make_legacy_input(outpoint=outpoint, stack_script=stack_script, redeem_script=redeem_script, sequence=sequence), make_empty_witness())
def make_legacy_input_and_empty_witness(outpoint, stack_script, redeem_script, sequence)
Outpoint, byte-like, byte-like, int -> (TxIn, InputWitness)
3.671668
2.065017
1.778032
''' Outpoint, int -> TxIn ''' if 'decred' in riemann.get_current_network_name(): return tx.DecredTxIn( outpoint=outpoint, sequence=utils.i2le_padded(sequence, 4)) return tx.TxIn(outpoint=outpoint, stack_script=b'', redeem_script=b'', sequence=utils.i2le_padded(sequence, 4))
def make_witness_input(outpoint, sequence)
Outpoint, int -> TxIn
4.920357
4.058612
1.212325
''' Outpoint, int, list(bytearray) -> (Input, InputWitness) ''' if 'decred' in riemann.get_current_network_name(): return(make_witness_input(outpoint, sequence), make_decred_witness(value=kwargs['value'], height=kwargs['height'], index=kwargs['index'], stack_script=kwargs['stack_script'], redeem_script=kwargs['redeem_script'])) return (make_witness_input(outpoint, sequence), make_witness(stack))
def make_witness_input_and_witness(outpoint, sequence, stack=None, **kwargs)
Outpoint, int, list(bytearray) -> (Input, InputWitness)
4.351899
3.394791
1.281934
''' int, list(TxIn), list(TxOut), int, list(InputWitness) -> Tx ''' n = riemann.get_current_network_name() if 'decred' in n: return tx.DecredTx( version=utils.i2le_padded(version, 4), tx_ins=tx_ins, tx_outs=tx_outs, lock_time=utils.i2le_padded(lock_time, 4), expiry=utils.i2le_padded(expiry, 4), tx_witnesses=[tx_witnesses]) if 'sprout' in n and tx_joinsplits is not None: return tx.SproutTx( version=version, tx_ins=tx_ins, tx_outs=tx_outs, lock_time=utils.i2le_padded(lock_time, 4), tx_joinsplits=tx_joinsplits if tx_joinsplits is not None else [], joinsplit_pubkey=joinsplit_pubkey, joinsplit_sig=joinsplit_sig) if 'overwinter' in n: return tx.OverwinterTx( tx_ins=tx_ins, tx_outs=tx_outs, lock_time=utils.i2le_padded(lock_time, 4), expiry_height=utils.i2le_padded(expiry, 4), tx_joinsplits=tx_joinsplits if tx_joinsplits is not None else [], joinsplit_pubkey=joinsplit_pubkey, joinsplit_sig=joinsplit_sig) if 'sapling' in n: return tx.SaplingTx( tx_ins=tx_ins, tx_outs=tx_outs, lock_time=utils.i2le_padded(lock_time, 4), expiry_height=utils.i2le_padded(expiry, 4), value_balance=utils.i2le_padded(value_balance, 8), tx_shielded_spends=(tx_shielded_spends if tx_shielded_spends is not None else []), tx_shielded_outputs=(tx_shielded_outputs if tx_shielded_outputs is not None else []), tx_joinsplits=tx_joinsplits if tx_joinsplits is not None else [], joinsplit_pubkey=joinsplit_pubkey, joinsplit_sig=joinsplit_sig, binding_sig=binding_sig) flag = riemann.network.SEGWIT_TX_FLAG \ if tx_witnesses is not None else None return tx.Tx(version=utils.i2le_padded(version, 4), flag=flag, tx_ins=tx_ins, tx_outs=tx_outs, tx_witnesses=tx_witnesses, lock_time=utils.i2le_padded(lock_time, 4))
def make_tx(version, tx_ins, tx_outs, lock_time, expiry=None, value_balance=0, tx_shielded_spends=None, tx_shielded_outputs=None, tx_witnesses=None, tx_joinsplits=None, joinsplit_pubkey=None, joinsplit_sig=None, binding_sig=None)
int, list(TxIn), list(TxOut), int, list(InputWitness) -> Tx
1.748498
1.67933
1.041188
''' bytes -> bytes ''' length = tx.VarInt(len(byte_string)) return length.to_bytes() + byte_string
def length_prepend(byte_string)
bytes -> bytes
9.085526
5.966467
1.522765
'''Convert int to signed little endian (l.e.) hex for scripts Args: number (int): int value to convert to bytes in l.e. format Returns: (str): the hex-encoded signed LE number ''' if number == 0: return '00' for i in range(80): try: return number.to_bytes( length=i, # minimal bytes lol byteorder='little', signed=True).hex() except Exception: continue
def i2le_script(number)
Convert int to signed little endian (l.e.) hex for scripts Args: number (int): int value to convert to bytes in l.e. format Returns: (str): the hex-encoded signed LE number
7.489786
2.968855
2.522786
''' byte-like -> bytes ''' h = hashlib.new('ripemd160') h.update(msg_bytes) return h.digest()
def rmd160(msg_bytes)
byte-like -> bytes
4.105526
3.051546
1.345392
''' byte-like -> bytes ''' h = hashlib.new('ripemd160') if 'decred' in riemann.get_current_network_name(): h.update(blake256(msg_bytes)) return h.digest() h.update(sha256(msg_bytes)) return h.digest()
def hash160(msg_bytes)
byte-like -> bytes
4.195704
3.841918
1.092086
''' byte-like -> bytes ''' if 'decred' in riemann.get_current_network_name(): return blake256(blake256(msg_bytes)) return hashlib.sha256(hashlib.sha256(msg_bytes).digest()).digest()
def hash256(msg_bytes)
byte-like -> bytes
5.302706
4.571821
1.159867