code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
_plen = self._read_binary(3) _type = self._read_unpack(1) _flag = self._read_binary(1) _stid = self._read_binary(4)
def read_http(self, length)
Read Hypertext Transfer Protocol version 2. Structure of HTTP/2 packet [RFC 7230]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+
6.102954
6.492376
0.940019
seekset = file.tell() if not _termination: # FTP analysis flag, ftp = _analyse_ftp(file, length, seekset=seekset) if flag: return ftp # HTTP/1.* analysis flag, http = _analyse_httpv1(file, length, seekset=seekset) if flag: return http # NOTE: due to format similarity of HTTP/2 and TLS/SSL, HTTP/2 won't be analysed before TLS/SSL is implemented. # NB: the NOTE above is deprecated, since validations are performed # HTTP/2 analysis flag, http = _analyse_httpv2(file, length, seekset=seekset) if flag: return http # backup file offset file.seek(seekset, os.SEEK_SET) # raw packet analysis return Raw(file, length)
def analyse(file, length=None, *, _termination=False)
Analyse application layer packets.
6.005754
5.559219
1.080323
if isinstance(key, int): return Routing(key) if key not in Routing._member_map_: extend_enum(Routing, key, default) return Routing[key]
def get(key, default=-1)
Backport support for original codes.
6.567573
5.704148
1.151368
_byte = self._read_unpack(size) _prot = ETHERTYPE.get(_byte) return _prot
def _read_protos(self, size)
Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- next layer's protocol name
16.705141
16.197721
1.031327
if length == 0: from pcapkit.protocols.null import NoPayload as Protocol elif self._sigterm: from pcapkit.protocols.raw import Raw as Protocol elif proto == 0x0806: from pcapkit.protocols.link.arp import ARP as Protocol elif proto == 0x8035: from pcapkit.protocols.link.rarp import RARP as Protocol elif proto == 0x8100: from pcapkit.protocols.link.vlan import VLAN as Protocol elif proto == 0x0800: from pcapkit.protocols.internet.ipv4 import IPv4 as Protocol elif proto == 0x86DD: from pcapkit.protocols.internet.ipv6 import IPv6 as Protocol elif proto == 0x8137: from pcapkit.protocols.internet.ipx import IPX as Protocol else: from pcapkit.protocols.raw import Raw as Protocol next_ = Protocol(self._file, length, error=self._onerror, layer=self._exlayer, protocol=self._exproto) return next_
def _import_next_layer(self, proto, length)
Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * ARP -- data link layer * RARP -- data link layer * VLAN -- data link layer * IPv4 -- internet layer * IPv6 -- internet layer * IPX -- internet layer
2.6645
2.42579
1.098405
if isinstance(key, int): return HIT_Suite(key) if key not in HIT_Suite._member_map_: extend_enum(HIT_Suite, key, default) return HIT_Suite[key]
def get(key, default=-1)
Backport support for original codes.
6.282995
5.787757
1.085567
if isinstance(key, int): return Authentication(key) if key not in Authentication._member_map_: extend_enum(Authentication, key, default) return Authentication[key]
def get(key, default=-1)
Backport support for original codes.
6.637487
5.797378
1.144912
if fmt == 'pcap': # output PCAP file from pcapkit.dumpkit import PCAP as output elif fmt == 'plist': # output PLIST file from dictdumper import PLIST as output elif fmt == 'json': # output JSON file from dictdumper import JSON as output elif fmt == 'tree': # output treeview text file from dictdumper import Tree as output fmt = 'txt' elif fmt == 'html': # output JavaScript file from dictdumper import JavaScript as output fmt = 'js' elif fmt == 'xml': # output XML file from dictdumper import XML as output else: # no output file from pcapkit.dumpkit import NotImplementedIO as output if fmt is not None: warnings.warn(f'Unsupported output format: {fmt}; disabled file output feature', FormatWarning, stacklevel=stacklevel()) return output, '' try: pathlib.Path(fout).mkdir(parents=True, exist_ok=True) except FileExistsError as error: if fmt is None: warnings.warn(error.strerror, FileWarning, stacklevel=stacklevel()) else: raise FileExists(*error.args) from None return output, fmt
def make_fout(fout='./tmp', fmt='pcap')
Make root path for output. Positional arguments: * fout -- str, root path for output * fmt -- str, output format Returns: * output -- dumper of specified format
3.371609
3.45625
0.975511
# fetch flow label output = self.trace(packet, _check=False, _output=True) # dump files output(packet['frame'], name=f"Frame {packet['index']}", byteorder=self._endian, nanosecond=self._nnsecd)
def dump(self, packet)
Dump frame to output files. Positional arguments: * packet -- dict, a flow packet |-- (str) protocol -- data link type from global header |-- (int) index -- frame number |-- (Info) frame -- extracted frame info |-- (bool) syn -- TCP synchronise (SYN) flag |-- (bool) fin -- TCP finish (FIN) flag |-- (str) src -- source IP |-- (int) srcport -- TCP source port |-- (str) dst -- destination IP |-- (int) dstport -- TCP destination port |-- (numbers.Real) timestamp -- frame timestamp
22.671152
18.282583
1.240041
self._newflg = True if _check: pkt_check(packet) info = Info(packet) # Buffer Identifier BUFID = tuple(sorted([str(info.src), str(info.srcport), # pylint: disable=E1101 str(info.dst), str(info.dstport)])) # pylint: disable=E1101 # SYN = info.syn # Synchronise Flag (Establishment) # Finish Flag (Termination) FIN = info.fin # pylint: disable=E1101 # # when SYN is set, reset buffer of this seesion # if SYN and BUFID in self._buffer: # temp = self._buffer.pop(BUFID) # temp['fpout'] = (self._fproot, self._fdpext) # temp['index'] = tuple(temp['index']) # self._stream.append(Info(temp)) # initialise buffer with BUFID if BUFID not in self._buffer: label = f'{info.src}_{info.srcport}-{info.dst}_{info.dstport}-{info.timestamp}' # pylint: disable=E1101 self._buffer[BUFID] = dict( fpout=self._foutio(f'{self._fproot}/{label}.{self._fdpext}', protocol=info.protocol), # pylint: disable=E1101 index=list(), label=label, ) # trace frame record self._buffer[BUFID]['index'].append(info.index) # pylint: disable=E1101 fpout = self._buffer[BUFID]['fpout'] label = self._buffer[BUFID]['label'] # when FIN is set, submit buffer of this session if FIN: buf = self._buffer.pop(BUFID) # fpout, label = buf['fpout'], buf['label'] if self._fdpext: buf['fpout'] = f'{self._fproot}/{label}.{self._fdpext}' else: del buf['fpout'] buf['index'] = tuple(buf['index']) self._stream.append(Info(buf)) # return label or output object return fpout if _output else label
def trace(self, packet, *, _check=True, _output=False)
Trace packets. Positional arguments: * packet -- dict, a flow packet Keyword arguments: * _check -- bool, flag if run validations * _output -- bool, flag if has formatted dumper
4.058087
4.023474
1.008603
self._newflg = False ret = list() for buf in self._buffer.values(): buf = copy.deepcopy(buf) if self._fdpext: buf['fpout'] = f"{self._fproot}/{buf['label']}.{self._fdpext}" else: del buf['fpout'] buf['index'] = tuple(buf['index']) ret.append(Info(buf)) ret += self._stream return tuple(ret)
def submit(self)
Submit traced TCP flows.
8.362703
8.381309
0.99778
if length is None: length = len(self) _next = self._read_protos(1) _plen = self._read_unpack(1) _resv = self._read_fileng(2) _scpi = self._read_unpack(4) _dsnf = self._read_unpack(4) # ICV length & value _tlen = _plen * 4 - 2 _vlen = _tlen - 12 _chkv = self._read_fileng(_vlen) ah = dict( next=_next, length=_tlen, spi=_scpi, seq=_dsnf, icv=_chkv, ) if version == 6: _plen = 8 - (_tlen % 8) elif version == 4: _plen = 4 - (_tlen % 4) else: raise VersionError(f'Unknown IP version {version}') if _plen: # explicit padding in need padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'{self.alias}: invalid format') length -= ah['length'] ah['packet'] = self._read_packet(header=ah['length'], payload=length) if extension: self._protos = None return ah return self._decode_next_layer(ah, _next, length)
def read_ah(self, length, version, extension)
Read Authentication Header. Structure of AH header [RFC 4302]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Payload Len | RESERVED | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Security Parameters Index (SPI) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sequence Number Field | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + Integrity Check Value-ICV (variable) | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ah.next Next Header 1 8 ah.length Payload Length 2 16 - Reserved (must be zero) 4 32 ah.spi Security Parameters Index (SPI) 8 64 ah.seq Sequence Number Field 12 96 ah.icv Integrity Check Value (ICV)
5.513487
4.401154
1.252737
if length is None: length = len(self) _dstm = self._read_mac_addr() _srcm = self._read_mac_addr() _type = self._read_protos(2) ethernet = dict( dst=_dstm, src=_srcm, type=_type, ) length -= 14 ethernet['packet'] = self._read_packet(header=14, payload=length) return self._decode_next_layer(ethernet, _type, length)
def read_ethernet(self, length)
Read Ethernet Protocol. Structure of Ethernet Protocol header [RFC 7042]: Octets Bits Name Description 0 0 eth.dst Destination MAC Address 1 8 eth.src Source MAC Address 2 16 eth.type Protocol (Internet Layer)
4.328092
4.402922
0.983004
_byte = self._read_fileng(6) _addr = '-'.join(textwrap.wrap(_byte.hex(), 2)) return _addr
def _read_mac_addr(self)
Read MAC address.
8.298658
7.75772
1.069729
if isinstance(key, int): return Suite(key) if key not in Suite._member_map_: extend_enum(Suite, key, default) return Suite[key]
def get(key, default=-1)
Backport support for original codes.
7.438162
6.450607
1.153095
if isinstance(key, int): return ECDSA_Curve(key) if key not in ECDSA_Curve._member_map_: extend_enum(ECDSA_Curve, key, default) return ECDSA_Curve[key]
def get(key, default=-1)
Backport support for original codes.
4.662845
4.305557
1.082983
dict_ = dict() frame = packet.frame_info for field in frame.field_names: dict_[field] = getattr(frame, field) tempdict = dict_ for layer in packet.layers: tempdict[layer.layer_name.upper()] = dict() tempdict = tempdict[layer.layer_name.upper()] for field in layer.field_names: tempdict[field] = getattr(layer, field) return dict_
def packet2dict(packet)
Convert PyShark packet into dict.
2.868022
2.710774
1.058009
if 'TCP' in packet: ip = packet.ip if 'IP' in packet else packet.ipv6 tcp = packet.tcp data = dict( protocol=LINKTYPE.get(packet.layers[0].layer_name.upper()), # data link type from global header index=int(packet.number), # frame number frame=packet2dict(packet), # extracted packet syn=bool(int(tcp.flags_syn)), # TCP synchronise (SYN) flag fin=bool(int(tcp.flags_fin)), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=int(tcp.srcport), # TCP source port dstport=int(tcp.dstport), # TCP destination port timestamp=packet.frame_info.time_epoch, # timestamp ) return True, data return False, None
def tcp_traceflow(packet)
Trace packet flow for TCP.
3.819216
3.838045
0.995094
BUFID = info.bufid # Buffer Identifier DSN = info.dsn # Data Sequence Number ACK = info.ack # Acknowledgement Number FIN = info.fin # Finish Flag (Termination) RST = info.rst # Reset Connection Flag (Termination) SYN = info.syn # Synchronise Flag (Establishment) # when SYN is set, reset buffer of this session if SYN and BUFID in self._buffer: self._dtgram += self.submit(self._buffer[BUFID], bufid=BUFID) del self._buffer[BUFID] # initialise buffer with BUFID & ACK if BUFID not in self._buffer: self._buffer[BUFID] = { 'hdl': [Info(first=info.len, last=sys.maxsize)], ACK: dict( ind=[info.num], isn=info.dsn, len=info.len, raw=info.payload, ), } else: # initialise buffer with ACK if ACK not in self._buffer[BUFID]: self._buffer[BUFID][ACK] = dict( ind=[info.num], isn=info.dsn, len=info.len, raw=info.payload, ) else: # append packet index self._buffer[BUFID][ACK]['ind'].append(info.num) # record fragment payload ISN = self._buffer[BUFID][ACK]['isn'] # Initial Sequence Number RAW = self._buffer[BUFID][ACK]['raw'] # Raw Payload Data if DSN >= ISN: # if fragment goes after existing payload LEN = self._buffer[BUFID][ACK]['len'] GAP = DSN - (ISN + LEN) # gap length between payloads if GAP >= 0: # if fragment goes after existing payload RAW += bytearray(GAP) + info.payload else: # if fragment partially overlaps existing payload RAW[DSN-ISN:] = info.payload else: # if fragment exceeds existing payload LEN = info.len GAP = ISN - (DSN + LEN) # gap length between payloads self._buffer[BUFID][ACK]['isn'] = DSN if GAP >= 0: # if fragment exceeds existing payload RAW = info.payload + bytearray(GAP) + RAW else: # if fragment partially overlaps existing payload RAW = info.payload + RAW[ISN-GAP:] self._buffer[BUFID][ACK]['raw'] = RAW # update payload datagram self._buffer[BUFID][ACK]['len'] = len(RAW) # update payload length # update hole descriptor list HDL = self._buffer[BUFID]['hdl'] for (index, hole) in enumerate(HDL): # step one if info.first > hole.last: # step two continue if info.last < hole.first: # step three continue del HDL[index] # step four if info.first > hole.first: # step five new_hole = Info( first=hole.first, last=info.first - 1, ) HDL.insert(index, new_hole) index += 1 if info.last < hole.last and not FIN and not RST: # step six new_hole = Info( first=info.last + 1, last=hole.last ) HDL.insert(index, new_hole) break # step seven self._buffer[BUFID]['hdl'] = HDL # update HDL # when FIN/RST is set, submit buffer of this session if FIN or RST: self._dtgram += self.submit(self._buffer[BUFID], bufid=BUFID) del self._buffer[BUFID]
def reassembly(self, info)
Reassembly procedure. Positional arguments: * info -- Info, info dict of packets to be reassembled
2.979674
3.008483
0.990424
datagram = [] # reassembled datagram HDL = buf.pop('hdl') # hole descriptor list (remove from dict) # check through every buffer with ACK for (ack, buffer) in buf.items(): # if this buffer is not implemented # go through every hole and extract received payload if len(HDL) > 2 and self._strflg: data = [] start = stop = 0 for hole in HDL: stop = hole.first byte = buffer['raw'][start:stop] start = hole.last if byte: # strip empty payload data.append(byte) byte = buffer['raw'][start:] if byte: # strip empty payload data.append(byte) if data: # strip empty buffer packet = Info( NotImplemented=True, id=Info( src=(bufid[0], bufid[2]), dst=(bufid[1], bufid[3]), ack=ack, ), index=tuple(buffer['ind']), payload=tuple(data) or None, packets=tuple(analyse(io.BytesIO(frag), len(frag)) for frag in data), ) datagram.append(packet) # if this buffer is implemented # export payload data & convert into bytes else: data = buffer['raw'] if data: # strip empty buffer packet = Info( NotImplemented=False, id=Info( src=(bufid[0], bufid[2]), dst=(bufid[1], bufid[3]), ack=ack, ), index=tuple(buffer['ind']), payload=bytes(data) or None, packets=(analyse(io.BytesIO(data), len(data)),), ) datagram.append(packet) return datagram
def submit(self, buf, *, bufid)
Submit reassembled payload. Positional arguments: * buf -- dict, buffer dict of reassembled packets Keyword arguments: * bufid -- tuple, buffer identifier Returns: * list -- reassembled packets
4.930519
4.705096
1.04791
if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') chain = [packet.name] payload = packet.payload while not isinstance(payload, scapy_all.packet.NoPayload): chain.append(payload.name) payload = payload.payload return ':'.join(chain)
def packet2chain(packet)
Fetch Scapy packet protocol chain.
4.226532
3.793932
1.114024
if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') def wrapper(packet): dict_ = packet.fields payload = packet.payload if not isinstance(payload, scapy_all.packet.NoPayload): dict_[payload.name] = wrapper(payload) return dict_ return { 'packet': bytes(packet), packet.name: wrapper(packet), }
def packet2dict(packet, *, count=NotImplemented)
Convert Scapy packet into dict.
5.599694
5.185444
1.079887
if 'IP' in packet: ipv4 = packet['IP'] if ipv4.flags.DF: # dismiss not fragmented packet return False, None data = dict( bufid=( ipaddress.ip_address(ipv4.src), # source IP address ipaddress.ip_address(ipv4.dst), # destination IP address ipv4.id, # identification TP_PROTO.get(ipv4.proto).name, # payload protocol type ), num=count, # original packet range number fo=ipv4.frag, # fragment offset ihl=ipv4.ihl, # internet header length mf=bool(ipv4.flags.MF), # more fragment flag tl=ipv4.len, # total length, header includes header=bytearray(ipv4.raw_packet_cache), # raw bytearray type header payload=bytearray(bytes(ipv4.payload)), # raw bytearray type payload ) return True, data return False, None
def ipv4_reassembly(packet, *, count=NotImplemented)
Make data for IPv4 reassembly.
5.227325
5.021204
1.04105
if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') if 'IPv6' in packet: ipv6 = packet['IPv6'] if scapy_all.IPv6ExtHdrFragment not in ipv6: # pylint: disable=E1101 return False, None # dismiss not fragmented packet ipv6_frag = ipv6['IPv6ExtHdrFragment'] data = dict( bufid=( ipaddress.ip_address(ipv6.src), # source IP address ipaddress.ip_address(ipv6.dst), # destination IP address ipv6.fl, # label TP_PROTO.get(ipv6_frag.nh).name, # next header field in IPv6 Fragment Header ), num=count, # original packet range number fo=ipv6_frag.offset, # fragment offset ihl=len(ipv6) - len(ipv6_frag), # header length, only headers before IPv6-Frag mf=bool(ipv6_frag.m), # more fragment flag tl=len(ipv6), # total length, header includes header=bytearray(bytes(ipv6)[:-len(ipv6_frag)]), # raw bytearray type header before IPv6-Frag payload=bytearray(bytes(ipv6_frag.payload)), # raw bytearray type payload after IPv6-Frag ) return True, data return False, None
def ipv6_reassembly(packet, *, count=NotImplemented)
Make data for IPv6 reassembly.
4.958046
4.838366
1.024736
if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( bufid=( ipaddress.ip_address(ip.src), # source IP address ipaddress.ip_address(ip.dst), # destination IP address tcp.sport, # source port tcp.dport, # destination port ), num=count, # original packet range number ack=tcp.ack, # acknowledgement dsn=tcp.seq, # data sequence number syn=bool(tcp.flags.S), # synchronise flag fin=bool(tcp.flags.F), # finish flag rst=bool(tcp.flags.R), # reset connection flag payload=bytearray(bytes(tcp.payload)), # raw bytearray type payload ) raw_len = len(tcp.payload) # payload length, header excludes data['first'] = tcp.seq # this sequence number data['last'] = tcp.seq + raw_len # next (wanted) sequence number data['len'] = raw_len # payload length, header excludes return True, data return False, None
def tcp_reassembly(packet, *, count=NotImplemented)
Store data for TCP reassembly.
3.769803
3.658113
1.030532
if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( protocol=LINKTYPE.get(packet.name.upper()), # data link type from global header index=count, # frame number frame=packet2dict(packet), # extracted packet syn=bool(tcp.flags.S), # TCP synchronise (SYN) flag fin=bool(tcp.flags.F), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=tcp.sport, # TCP source port dstport=tcp.dport, # TCP destination port timestamp=time.time(), # timestamp ) return True, data return False, None
def tcp_traceflow(packet, *, count=NotImplemented)
Trace packet flow for TCP.
3.758719
3.718952
1.010693
if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) # _opts = self._read_fileng(_hlen*8+6) hopopt = dict( next=_next, length=(_hlen + 1) * 8, ) options = self._read_hopopt_options(_hlen * 8 + 6) hopopt['options'] = options[0] # tuple of option acronyms hopopt.update(options[1]) # merge option info to buffer length -= hopopt['length'] hopopt['packet'] = self._read_packet(header=hopopt['length'], payload=length) if extension: self._protos = None return hopopt return self._decode_next_layer(hopopt, _next, length)
def read_hopopt(self, length, extension)
Read IPv6 Hop-by-Hop Options. Structure of HOPOPT header [RFC 8200]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | . . . Options . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.next Next Header 1 8 hopopt.length Header Extensive Length 2 16 hopopt.options Options
5.829741
5.080232
1.147534
counter = 0 # length of read options optkind = list() # option type list options = dict() # dict of option data while counter < length: # break when eol triggered code = self._read_unpack(1) if not code: break # extract parameter abbr, desc = _HOPOPT_OPT.get(code, ('none', 'Unassigned')) data = _HOPOPT_PROC(abbr)(self, code, desc=desc) enum = _OPT_TYPE.get(code) # record parameter data counter += data['length'] if enum in optkind: if isinstance(options[abbr], tuple): options[abbr] += (Info(data),) else: options[abbr] = (Info(options[abbr]), Info(data)) else: optkind.append(enum) options[abbr] = data # check threshold if counter != length: raise ProtocolError(f'{self.alias}: invalid format') return tuple(optkind), options
def _read_hopopt_options(self, length)
Read HOPOPT options. Positional arguments: * length -- int, length of options Returns: * dict -- extracted HOPOPT options
6.344238
6.336813
1.001172
_type = self._read_opt_type(code) if code == 0: opt = dict( desc=desc, type=_type, length=1, ) elif code == 1: _size = self._read_unpack(1) _padn = self._read_fileng(_size) opt = dict( desc=desc, type=_type, length=_size + 2, padding=_padn, ) else: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') return opt
def _read_opt_pad(self, code, *, desc)
Read HOPOPT padding options. Structure of HOPOPT padding options [RFC 8200]: * Pad1 Option: +-+-+-+-+-+-+-+-+ | 0 | +-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) * PadN Option: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - | 1 | Opt Data Len | Option Data +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.pad.padding Padding
4.120522
4.192349
0.982867
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 1: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _limt = self._read_unpack(1) opt = dict( desc=desc, type=_type, length=_size + 2, limit=_limt, ) return opt
def _read_opt_tun(self, code, *, desc)
Read HOPOPT Tunnel Encapsulation Limit option. Structure of HOPOPT Tunnel Encapsulation Limit option [RFC 2473]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header |Hdr Ext Len = 0| Opt Type = 4 |Opt Data Len=1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Tun Encap Lim |PadN Opt Type=1|Opt Data Len=1 | 0 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.tun.type Option Type 0 0 hopopt.tun.type.value Option Number 0 0 hopopt.tun.type.action Action (00) 0 2 hopopt.tun.type.change Change Flag (0) 1 8 hopopt.tun.length Length of Option Data 2 16 hopopt.tun.limit Tunnel Encapsulation Limit
5.304727
4.812078
1.102378
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 2: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _rval = self._read_unpack(2) if 4 <= _rval <= 35: _dscp = f'Aggregated Reservation Nesting Level {_rval-4}' # [RFC 3175] elif 36 <= _rval <= 67: _dscp = f'QoS NSLP Aggregation Level {_rval-36}' # [RFC 5974] elif 65503 <= _rval <= 65534: _dscp = 'Reserved for experimental use' # [RFC 5350] else: _dscp = _ROUTER_ALERT.get(_rval, 'Unassigned') opt = dict( desc=desc, type=_type, length=_size + 2, value=_rval, alert=_dscp, ) return opt
def _read_opt_ra(self, code, *, desc)
Read HOPOPT Router Alert option. Structure of HOPOPT Router Alert option [RFC 2711]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0 0 0|0 0 1 0 1|0 0 0 0 0 0 1 0| Value (2 octets) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ra.type Option Type 0 0 hopopt.ra.type.value Option Number 0 0 hopopt.ra.type.action Action (00) 0 2 hopopt.ra.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.ra.value Value
5.216532
5.053229
1.032317
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 8 and _size % 8 != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _cmpt = self._read_unpack(4) _clen = self._read_unpack(1) if _clen % 2 != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _sens = self._read_unpack(1) _csum = self._read_fileng(2) opt = dict( desc=desc, type=_type, length=_size + 2, domain=_cmpt, cmpt_len=_clen * 4, level=_sens, chksum=_csum, ) if _clen: _bmap = list() for _ in range(_clen // 2): _bmap.append(self._read_binary(8)) opt['bitmap'] = tuple(_bmap) _plen = _size - _clen * 4 - 8 if _plen: self._read_fileng(_plen) return opt
def _read_opt_calipso(self, code, *, desc)
Read HOPOPT CALIPSO option. Structure of HOPOPT CALIPSO option [RFC 5570]: ------------------------------------------------------------ | Next Header | Hdr Ext Len | Option Type | Option Length| +-------------+---------------+-------------+--------------+ | CALIPSO Domain of Interpretation | +-------------+---------------+-------------+--------------+ | Cmpt Length | Sens Level | Checksum (CRC-16) | +-------------+---------------+-------------+--------------+ | Compartment Bitmap (Optional; variable length) | +-------------+---------------+-------------+--------------+ Octets Bits Name Description 0 0 hopopt.calipso.type Option Type 0 0 hopopt.calipso.type.value Option Number 0 0 hopopt.calipso.type.action Action (00) 0 2 hopopt.calipso.type.change Change Flag (0) 1 8 hopopt.calipso.length Length of Option Data 2 16 hopopt.calipso.domain CALIPSO Domain of Interpretation 6 48 hopopt.calipso.cmpt_len Cmpt Length 7 56 hopopt.calipso.level Sens Level 8 64 hopopt.calipso.chksum Checksum (CRC-16) 9 72 hopopt.calipso.bitmap Compartment Bitmap
3.819402
3.137656
1.217279
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 10: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _stlr = self._read_unpack(1) _stls = self._read_unpack(1) _psnt = self._read_unpack(2) _psnl = self._read_unpack(2) _dtlr = self._read_unpack(2) _dtls = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, scaledtlr=datetime.timedelta(seconds=_stlr), scaledtls=datetime.timedelta(seconds=_stls), psntp=_psnt, psnlr=_psnl, deltatlr=datetime.timedelta(seconds=_dtlr), deltatls=datetime.timedelta(seconds=_dtls), ) return opt
def _read_opt_pdm(self, code, *, desc)
Read HOPOPT PDM option. Structure of HOPOPT PDM option [RFC 8250]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | ScaleDTLR | ScaleDTLS | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | PSN This Packet | PSN Last Received | |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Delta Time Last Received | Delta Time Last Sent | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pdm.type Option Type 0 0 hopopt.pdm.type.value Option Number 0 0 hopopt.pdm.type.action Action (00) 0 2 hopopt.pdm.type.change Change Flag (0) 1 8 hopopt.pdm.length Length of Option Data 2 16 hopopt.pdm.scaledtlr Scale Delta Time Last Received 3 24 hopopt.pdm.scaledtls Scale Delta Time Last Sent 4 32 hopopt.pdm.psntp Packet Sequence Number This Packet 6 48 hopopt.pdm.psnlr Packet Sequence Number Last Received 8 64 hopopt.pdm.deltatlr Delta Time Last Received 10 80 hopopt.pdm.deltatls Delta Time Last Sent
3.364111
2.413496
1.393875
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 6: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _fcrr = self._read_binary(1) _func = int(_fcrr[:4], base=2) _rate = int(_fcrr[4:], base=2) _ttlv = self._read_unpack(1) _nonr = self._read_binary(4) _qsnn = int(_nonr[:30], base=2) if _func != 0 and _func != 8: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') data = dict( type=_type, length=_size + 2, func=_QS_FUNC.get(_func), rate=40000 * (2 ** _rate) / 1000, ttl=None if _func else _rate, nounce=_qsnn, ) return data
def _read_opt_qs(self, code, *, desc)
Read HOPOPT Quick Start option. Structure of HOPOPT Quick-Start option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.qs.type Option Type 0 0 hopopt.qs.type.value Option Number 0 0 hopopt.qs.type.action Action (00) 0 2 hopopt.qs.type.change Change Flag (1) 1 8 hopopt.qs.length Length of Option Data 2 16 hopopt.qs.func Function (0/8) 2 20 hopopt.qs.rate Rate Request / Report (in Kbps) 3 24 hopopt.qs.ttl QS TTL / None 4 32 hopopt.qs.nounce QS Nounce 7 62 - Reserved
4.750989
3.774642
1.25866
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _flag = self._read_binary(1) _rpld = self._read_unpack(1) _rank = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, flags=dict( down=True if int(_flag[0], base=2) else False, rank_error=True if int(_flag[1], base=2) else False, fwd_error=True if int(_flag[2], base=2) else False, ), id=_rpld, rank=_rank, ) if _size > 4: opt['data'] = self._read_fileng(_size-4) return opt
def _read_opt_rpl(self, code, *, desc)
Read HOPOPT RPL option. Structure of HOPOPT RPL option [RFC 6553]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |O|R|F|0|0|0|0|0| RPLInstanceID | SenderRank | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | (sub-TLVs) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.rpl.type Option Type 0 0 hopopt.rpl.type.value Option Number 0 0 hopopt.rpl.type.action Action (01) 0 2 hopopt.rpl.type.change Change Flag (1) 1 8 hopopt.rpl.length Length of Option Data 2 16 hopopt.rpl.flags RPL Option Flags 2 16 hopopt.rpl.flags.down Down Flag 2 17 hopopt.rpl.flags.rank_error Rank-Error Flag 2 18 hopopt.rpl.flags.fwd_error Forwarding-Error Flag 3 24 hopopt.rpl.id RPLInstanceID 4 32 hopopt.rpl.rank SenderRank 6 48 hopopt.rpl.data Sub-TLVs
3.925911
2.985913
1.314811
_type = self._read_opt_type(code) _size = self._read_unpack(1) _nval = self._read_fileng(_size) opt = dict( desc=desc, type=_type, length=_size + 2, value=_nval, ) return opt
def _read_opt_ilnp(self, code, *, desc)
Read HOPOPT ILNP Nonce option. Structure of HOPOPT ILNP Nonce option [RFC 6744]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / Nonce Value / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ilnp.type Option Type 0 0 hopopt.ilnp.type.value Option Number 0 0 hopopt.ilnp.type.action Action (10) 0 2 hopopt.ilnp.type.change Change Flag (0) 1 8 hopopt.ilnp.length Length of Option Data 2 16 hopopt.ilnp.value Nonce Value
4.927574
4.588296
1.073944
_type = self._read_opt_type(code) _size = self._read_unpack(1) _llen = self._read_unpack(1) _line = self._read_fileng(_llen) opt = dict( desc=desc, type=_type, length=_size + 2, lid_len=_llen, lid=_line, ) _plen = _size - _llen if _plen: self._read_fileng(_plen) return opt
def _read_opt_lio(self, code, *, desc)
Read HOPOPT Line-Identification option. Structure of HOPOPT Line-Identification option [RFC 6788]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | LineIDLen | Line ID... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.lio.type Option Type 0 0 hopopt.lio.type.value Option Number 0 0 hopopt.lio.type.action Action (10) 0 2 hopopt.lio.type.change Change Flag (0) 1 8 hopopt.lio.length Length of Option Data 2 16 hopopt.lio.lid_len Line ID Length 3 24 hopopt.lio.lid Line ID
4.246282
3.595422
1.181025
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _jlen = self._read_unpack(4) opt = dict( desc=desc, type=_type, length=_size + 2, payload_len=_jlen, ) return opt
def _read_opt_jumbo(self, code, *, desc)
Read HOPOPT Jumbo Payload option. Structure of HOPOPT Jumbo Payload option [RFC 2675]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Jumbo Payload Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.jumbo.type Option Type 0 0 hopopt.jumbo.type.value Option Number 0 0 hopopt.jumbo.type.action Action (11) 0 2 hopopt.jumbo.type.change Change Flag (0) 1 8 hopopt.jumbo.length Length of Option Data 2 16 hopopt.jumbo.payload_len Jumbo Payload Length
5.57259
5.102577
1.092113
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 16: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _addr = self._read_fileng(16) opt = dict( desc=desc, type=_type, length=_size + 2, ip=ipaddress.ip_address(_addr), ) return opt
def _read_opt_home(self, code, *, desc)
Read HOPOPT Home Address option. Structure of HOPOPT Home Address option [RFC 6275]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Home Address + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.home.type Option Type 0 0 hopopt.home.type.value Option Number 0 0 hopopt.home.type.action Action (11) 0 2 hopopt.home.type.change Change Flag (0) 1 8 hopopt.home.length Length of Option Data 2 16 hopopt.home.ip Home Address
5.594055
4.892287
1.143444
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 2: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _verf = self._read_binary(1) _seqn = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, version=_verf[:2], flags=dict( dup=True if int(_verf[2], base=2) else False, ret=True if int(_verf[3], base=2) else False, ), seq=_seqn, ) return opt
def _read_opt_ip_dff(self, code, *, desc)
Read HOPOPT IP_DFF option. Structure of HOPOPT IP_DFF option [RFC 6971]: 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | OptTypeDFF | OptDataLenDFF | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |VER|D|R|0|0|0|0| Sequence Number | Pad1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ip_dff.type Option Type 0 0 hopopt.ip_dff.type.value Option Number 0 0 hopopt.ip_dff.type.action Action (11) 0 2 hopopt.ip_dff.type.change Change Flag (1) 1 8 hopopt.ip_dff.length Length of Option Data 2 16 hopopt.ip_dff.version Version 2 18 hopopt.ip_dff.flags Flags 2 18 hopopt.ip_dff.flags.dup DUP Flag 2 19 hopopt.ip_dff.flags.ret RET Flag 2 20 - Reserved 3 24 hopopt.ip_dff.seq Sequence Number
4.054116
3.514278
1.153613
if isinstance(key, int): return Socket(key) if key not in Socket._member_map_: extend_enum(Socket, key, default) return Socket[key]
def get(key, default=-1)
Backport support for original codes.
7.23515
6.355645
1.138382
if not (isinstance(value, int) and 0x0000 <= value <= 0xFFFF): raise ValueError('%r is not a valid %s' % (value, cls.__name__)) if 0x0001 <= value <= 0x0BB8: extend_enum(cls, 'Registered by Xerox [0x%s]' % hex(value)[2:].upper().zfill(4), value) return cls(value) if 0x0020 <= value <= 0x003F: extend_enum(cls, 'Experimental [0x%s]' % hex(value)[2:].upper().zfill(4), value) return cls(value) if 0x0BB9 <= value <= 0xFFFF: extend_enum(cls, 'Dynamically Assigned [0x%s]' % hex(value)[2:].upper().zfill(4), value) return cls(value) if 0x4000 <= value <= 0x4FFF: extend_enum(cls, 'Dynamically Assigned Socket Numbers [0x%s]' % hex(value)[2:].upper().zfill(4), value) return cls(value) if 0x8000 <= value <= 0xFFFF: extend_enum(cls, 'Statically Assigned Socket Numbers [0x%s]' % hex(value)[2:].upper().zfill(4), value) return cls(value) super()._missing_(value)
def _missing_(cls, value)
Lookup function used when value is not found.
2.101117
2.129403
0.986716
if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) # _opts = self._read_fileng(_hlen*8+6) ipv6_opts = dict( next=_next, length=(_hlen + 1) * 8, ) options = self._read_ipv6_opts_options(_hlen * 8 + 6) ipv6_opts['options'] = options[0] # tuple of option acronyms ipv6_opts.update(options[1]) # merge option info to buffer length -= ipv6_opts['length'] ipv6_opts['packet'] = self._read_packet(header=ipv6_opts['length'], payload=length) if extension: self._protos = None return ipv6_opts return self._decode_next_layer(ipv6_opts, _next, length)
def read_ipv6_opts(self, length, extension)
Read Destination Options for IPv6. Structure of IPv6-Opts header [RFC 8200]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | . . . Options . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 opt.next Next Header 1 8 opt.length Header Extensive Length 2 16 opt.options Options
4.874895
4.643316
1.049874
bin_ = bin(kind)[2:].zfill(8) type_ = dict( value=kind, action=_IPv6_Opts_ACT.get(bin_[:2]), change=True if int(bin_[2], base=2) else False, ) return type_
def _read_opt_type(self, kind)
Read option type field. Positional arguments: * kind -- int, option kind value Returns: * dict -- extracted IPv6_Opts option Structure of option type field [RFC 791]: Octets Bits Name Descriptions 0 0 ipv6_opts.opt.type.value Option Number 0 0 ipv6_opts.opt.type.action Action (00-11) 0 2 ipv6_opts.opt.type.change Change Flag (0/1)
9.013609
4.986885
1.807463
counter = 0 # length of read options optkind = list() # option type list options = dict() # dict of option data while counter < length: # break when eol triggered code = self._read_unpack(1) if not code: break # extract parameter abbr, desc = _IPv6_Opts_OPT.get(code, ('None', 'Unassigned')) data = _IPv6_Opts_PROC(abbr)(self, code, desc=desc) enum = _OPT_TYPE.get(code) # record parameter data counter += data['length'] if enum in optkind: if isinstance(options[abbr], tuple): options[abbr] += (Info(data),) else: options[abbr] = (Info(options[abbr]), Info(data)) else: optkind.append(enum) options[abbr] = data # check threshold if counter != length: raise ProtocolError(f'{self.alias}: invalid format') return tuple(optkind), options
def _read_ipv6_opts_options(self, length)
Read IPv6_Opts options. Positional arguments: * length -- int, length of options Returns: * dict -- extracted IPv6_Opts options
6.335976
6.269453
1.010611
_type = self._read_opt_type(code) _size = self._read_unpack(1) _data = self._read_fileng(_size) opt = dict( desc=_IPv6_Opts_NULL.get(code, desc), type=_type, length=_size + 2, data=_data, ) return opt
def _read_opt_none(self, code, *, desc)
Read IPv6_Opts unassigned options. Structure of IPv6_Opts unassigned options [RFC 8200]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - | Option Type | Opt Data Len | Option Data +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - Octets Bits Name Description 0 0 ipv6_opts.opt.type Option Type 0 0 ipv6_opts.opt.type.value Option Number 0 0 ipv6_opts.opt.type.action Action (00-11) 0 2 ipv6_opts.opt.type.change Change Flag (0/1) 1 8 ipv6_opts.opt.length Length of Option Data 2 16 ipv6_opts.opt.data Option Data
6.104019
5.212582
1.171016
_type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 2: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _type = self._read_opt_type(code) _size = self._read_unpack(1) _smvr = self._read_binary(1) _seqn = self._read_unpack(1) opt = dict( desc=desc, type=_type, length=_size + 2, seed_len=_IPv6_Opts_SEED.get(int(_smvr[:2], base=2)), flags=dict( max=True if int(_smvr[2], base=2) else False, verification=True if int(_smvr[3], base=2) else False, ), seq=_seqn, ) _kind = _smvr[:2] if _kind == '00': if _size != 2: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') elif _kind == '01': if _size != 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') opt['seed_id'] = self._read_unpack(2) elif _kind == '10': if _size != 10: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') opt['seed_id'] = self._read_unpack(8) elif _kind == '11': if _size != 18: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') opt['seed_id'] = self._read_unpack(16) else: opt['seed_id'] = self._read_unpack(_size-2) _plen = _size - opt['seed_len'] if _plen: self._read_fileng(_plen) return opt
def _read_opt_mpl(self, code, *, desc)
Read IPv6_Opts MPL option. Structure of IPv6_Opts MPL option [RFC 7731]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | S |M|V| rsv | sequence | seed-id (optional) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ipv6_opts.mpl.type Option Type 0 0 ipv6_opts.mpl.type.value Option Number 0 0 ipv6_opts.mpl.type.action Action (01) 0 2 ipv6_opts.mpl.type.change Change Flag (1) 1 8 ipv6_opts.mpl.length Length of Option Data 2 16 ipv6_opts.mpl.seed_len Seed-ID Length 2 18 ipv6_opts.mpl.flags MPL Option Flags 2 18 ipv6_opts.mpl.max Maximum SEQ Flag 2 19 ipv6_opts.mpl.verification Verification Flag 2 20 - Reserved 3 24 ipv6_opts.mpl.seq Sequence 4 32 ipv6_opts.mpl.seed_id Seed-ID
2.694194
2.370979
1.136321
if isinstance(key, int): return ReturnCode(key) if key not in ReturnCode._member_map_: extend_enum(ReturnCode, key, default) return ReturnCode[key]
def get(key, default=-1)
Backport support for original codes.
5.292536
4.058308
1.304124
if not (isinstance(value, int) and 100 <= value <= 659): raise ValueError('%r is not a valid %s' % (value, cls.__name__)) code = str(value) kind = KIND.get(code[0], 'Reserved') info = INFO.get(code[1], 'Reserved') extend_enum(cls, '%s - %s [%s]' % (kind, info, value), value) return cls(value)
def _missing_(cls, value)
Lookup function used when value is not found.
4.663933
4.734315
0.985134
if isinstance(key, int): return Cipher(key) if key not in Cipher._member_map_: extend_enum(Cipher, key, default) return Cipher[key]
def get(key, default=-1)
Backport support for original codes.
7.613652
6.666433
1.142088
if isinstance(key, int): return Packet(key) if key not in Packet._member_map_: extend_enum(Packet, key, default) return Packet[key]
def get(key, default=-1)
Backport support for original codes.
6.580026
5.906434
1.114044
if isinstance(key, int): return Option(key) if key not in Option._member_map_: extend_enum(Option, key, default) return Option[key]
def get(key, default=-1)
Backport support for original codes.
6.4982
5.738735
1.13234
if isinstance(key, int): return ExtensionHeader(key) if key not in ExtensionHeader._member_map_: extend_enum(ExtensionHeader, key, default) return ExtensionHeader[key]
def get(key, default=-1)
Backport support for original codes.
6.73315
5.950639
1.1315
if isinstance(key, int): return OptionClass(key) if key not in OptionClass._member_map_: extend_enum(OptionClass, key, default) return OptionClass[key]
def get(key, default=-1)
Backport support for original codes.
6.125593
5.209102
1.17594
if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) _type = self._read_unpack(1) _left = self._read_unpack(1) ipv6_route = dict( next=_next, length=(_hlen + 1) * 8, type=_ROUTING_TYPE.get(_type, 'Unassigned'), seg_left=_left, ) _dlen = _hlen * 8 - 4 if _dlen: _func = _ROUTE_PROC.get(_type, 'none') _data = eval(f'self._read_data_type_{_func}')(_dlen) ipv6_route.update(_data) length -= ipv6_route['length'] ipv6_route['packet'] = self._read_packet(header=ipv6_route['length'], payload=length) if extension: self._protos = None return ipv6_route return self._decode_next_layer(ipv6_route, _next, length)
def read_ipv6_route(self, length, extension)
Read Routing Header for IPv6. Structure of IPv6-Route header [RFC 8200][RFC 5095]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Routing Type | Segments Left | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | . . . type-specific data . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 route.next Next Header 1 8 route.length Header Extensive Length 2 16 route.type Routing Type 3 24 route.seg_left Segments Left 4 32 route.data Type-Specific Data
4.232745
3.734671
1.133365
_data = self._read_fileng(length) data = dict( data=_data, ) return data
def _read_data_type_none(self, length)
Read IPv6-Route unknown type data. Structure of IPv6-Route unknown type data [RFC 8200][RFC 5095]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Routing Type | Segments Left | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | . . . type-specific data . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 route.next Next Header 1 8 route.length Header Extensive Length 2 16 route.type Routing Type 3 24 route.seg_left Segments Left 4 32 route.data Type-Specific Data
9.383138
11.107842
0.844731
_resv = self._read_fileng(4) _addr = list() for _ in range((length - 4) // 16): _addr.append(ipaddress.ip_address(self._read_fileng(16))) data = dict( ip=tuple(_addr), ) return data
def _read_data_type_src(self, length)
Read IPv6-Route Source Route data. Structure of IPv6-Route Source Route data [RFC 5095]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Routing Type=0| Segments Left | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Address[1] + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Address[2] + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ . . . . . . . . . +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Address[n] + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 route.next Next Header 1 8 route.length Header Extensive Length 2 16 route.type Routing Type 3 24 route.seg_left Segments Left 4 32 - Reserved 8 64 route.ip Address ............
6.214224
4.89537
1.269409
if length != 20: raise ProtocolError(f'{self.alias}: [Typeno 2] invalid format') _resv = self._read_fileng(4) _home = self._read_fileng(16) data = dict( ip=ipaddress.ip_address(_home), ) return data
def _read_data_type_2(self, length)
Read IPv6-Route Type 2 data. Structure of IPv6-Route Type 2 data [RFC 6275]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len=2 | Routing Type=2|Segments Left=1| +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Home Address + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 route.next Next Header 1 8 route.length Header Extensive Length 2 16 route.type Routing Type 3 24 route.seg_left Segments Left 4 32 - Reserved 8 64 route.ip Home Address
9.579268
7.896957
1.213033
_cmpr = self._read_binary(1) _padr = self._read_binary(1) _resv = self._read_fileng(2) _inti = int(_cmpr[:4], base=2) _inte = int(_cmpr[4:], base=2) _plen = int(_padr[:4], base=2) _ilen = 16 - _inti _elen = 16 - _inte _addr = list() for _ in (((length - 4) - _elen - _plen) // _ilen): _addr.append(ipaddress.ip_address(self._read_fileng(_ilen))) _addr.append(ipaddress.ip_address(self._read_fileng(_elen))) _pads = self._read_fileng(_plen) data = dict( cmpri=_inti, cmpre=_inte, pad=_plen, ip=tuple(_addr), ) return data
def _read_data_type_rpl(self, length)
Read IPv6-Route RPL Source data. Structure of IPv6-Route RPL Source data [RFC 6554]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Routing Type | Segments Left | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | CmprI | CmprE | Pad | Reserved | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | . . . Addresses[1..n] . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 route.next Next Header 1 8 route.length Header Extensive Length 2 16 route.type Routing Type 3 24 route.seg_left Segments Left 4 32 route.cmpri CmprI 4 36 route.cpmre CmprE 5 40 route.pad Pad Size 5 44 - Reserved 8 64 route.ip Addresses
3.998801
3.493134
1.144761
if length is None: length = len(self) packet = self._file.read(length) try: header, body = packet.split(b'\r\n\r\n', 1) except ValueError: raise ProtocolError('HTTP: invalid format', quiet=True) header_unpacked, http_receipt = self._read_http_header(header) body_unpacked = self._read_http_body(body) or None http = dict( receipt=http_receipt, header=header_unpacked, body=body_unpacked, raw=dict( header=header, body=body, packet=self._read_packet(length), ), ) self.__receipt__ = http_receipt return http
def read_http(self, length)
Read Hypertext Transfer Protocol (HTTP/1.*). Structure of HTTP/1.* packet [RFC 7230]: HTTP-message :==: start-line *( header-field CRLF ) CRLF [ message-body ]
3.469598
3.608575
0.961487
try: startline, headerfield = header.split(b'\r\n', 1) para1, para2, para3 = re.split(rb'\s+', startline, 2) fields = headerfield.split(b'\r\n') lists = (re.split(rb'\s*:\s*', field, 1) for field in fields) except ValueError: raise ProtocolError('HTTP: invalid format', quiet=True) match1 = re.match(_RE_METHOD, para1) match2 = re.match(_RE_VERSION, para3) match3 = re.match(_RE_VERSION, para1) match4 = re.match(_RE_STATUS, para2) if match1 and match2: receipt = 'request' header = dict( request=dict( method=self.decode(para1), target=self.decode(para2), version=self.decode(match2.group('version')), ), ) elif match3 and match4: receipt = 'response' header = dict( response=dict( version=self.decode(match3.group('version')), status=int(para2), phrase=self.decode(para3), ), ) else: raise ProtocolError('HTTP: invalid format', quiet=True) try: for item in lists: key = self.decode(item[0].strip()).replace(receipt, f'{receipt}_field') value = self.decode(item[1].strip()) if key in header: if isinstance(header[key], tuple): header[key] += (value,) else: header[key] = (header[key], value) else: header[key] = value except IndexError: raise ProtocolError('HTTP: invalid format', quiet=True) return header, receipt
def _read_http_header(self, header)
Read HTTP/1.* header. Structure of HTTP/1.* header [RFC 7230]: start-line :==: request-line / status-line request-line :==: method SP request-target SP HTTP-version CRLF status-line :==: HTTP-version SP status-code SP reason-phrase CRLF header-field :==: field-name ":" OWS field-value OWS
2.534997
2.454443
1.032819
lat_centroid, lng_centroid, lat_offset, lng_offset = geohash.decode_exactly(geo) corner_1 = (lat_centroid - lat_offset, lng_centroid - lng_offset)[::-1] corner_2 = (lat_centroid - lat_offset, lng_centroid + lng_offset)[::-1] corner_3 = (lat_centroid + lat_offset, lng_centroid + lng_offset)[::-1] corner_4 = (lat_centroid + lat_offset, lng_centroid - lng_offset)[::-1] return geometry.Polygon([corner_1, corner_2, corner_3, corner_4, corner_1])
def geohash_to_polygon(geo)
:param geo: String that represents the geohash. :return: Returns a Shapely's Polygon instance that represents the geohash.
1.782357
1.845361
0.965858
inner_geohashes = set() outer_geohashes = set() envelope = polygon.envelope centroid = polygon.centroid testing_geohashes = queue.Queue() testing_geohashes.put(geohash.encode(centroid.y, centroid.x, precision)) while not testing_geohashes.empty(): current_geohash = testing_geohashes.get() if current_geohash not in inner_geohashes and current_geohash not in outer_geohashes: current_polygon = geohash_to_polygon(current_geohash) condition = envelope.contains(current_polygon) if inner else envelope.intersects(current_polygon) if condition: if inner: if polygon.contains(current_polygon): inner_geohashes.add(current_geohash) else: outer_geohashes.add(current_geohash) else: if polygon.intersects(current_polygon): inner_geohashes.add(current_geohash) else: outer_geohashes.add(current_geohash) for neighbor in geohash.neighbors(current_geohash): if neighbor not in inner_geohashes and neighbor not in outer_geohashes: testing_geohashes.put(neighbor) return inner_geohashes
def polygon_to_geohashes(polygon, precision, inner=True)
:param polygon: shapely polygon. :param precision: int. Geohashes' precision that form resulting polygon. :param inner: bool, default 'True'. If false, geohashes that are completely outside from the polygon are ignored. :return: set. Set of geohashes that form the polygon.
1.802727
1.804345
0.999104
# # Ogg header # # check fields of Ogg page header chunk = file.read(OGG_FIRST_PAGE_HEADER.size) first_ogg_page = bytearray() first_ogg_page.extend(chunk) if len(chunk) < OGG_FIRST_PAGE_HEADER.size: logger().error("Not enough bytes in Ogg page header: %u, expected at least %u" % (len(chunk), OGG_FIRST_PAGE_HEADER.size)) return capture_pattern, version, header_type, granule_position, bitstream_serial_number, page_sequence_number, \ crc_checksum, page_segments = OGG_FIRST_PAGE_HEADER.unpack(chunk) if capture_pattern != b"OggS": logger().error("Invalid OGG capture pattern: %s, expected '%s'" % (repr(capture_pattern), "OggS")) return if version != 0: logger().error("Invalid OGG version: %u, expected %u" % (version, 0)) return if header_type != 2: # should be first page of stream logger().error("Invalid OGG page header type: %u, expected %u" % (header_type, 2)) return if page_sequence_number != 0: logger().error("Invalid OGG page sequence number: %u, expected %u" % (page_sequence_number, 0)) return segment_table_fmt = struct.Struct("<%uB" % (page_segments)) chunk = file.read(segment_table_fmt.size) first_ogg_page.extend(chunk) if len(chunk) < segment_table_fmt.size: logger().error("Not enough bytes for OGG segment table: %u, expected at least %u" % (len(chunk), segment_table_fmt.size)) return segment_table = segment_table_fmt.unpack(chunk) # check crc of first page first_ogg_page_size = OGG_FIRST_PAGE_HEADER.size + segment_table_fmt.size + sum(segment_table) chunk = file.read(sum(segment_table)) first_ogg_page.extend(chunk) if len(first_ogg_page) < first_ogg_page_size: logger().error("Not enough bytes for first OGG page: %u, expected at least %u" % (len(first_ogg_page), first_ogg_page_size)) return computed_crc = _compute_ogg_page_crc(first_ogg_page) if computed_crc != crc_checksum: logger().error("Invalid OGG page CRC: 0x%08x, expected 0x%08x" % (crc_checksum, computed_crc)) return # # Opus header # chunk = first_ogg_page[OGG_FIRST_PAGE_HEADER.size + segment_table_fmt.size:][:segment_table[0]] if len(chunk) < OGG_OPUS_ID_HEADER.size: logger().error("Not enough bytes for Opus Identification header: %u, " "expected at least %u" % (len(chunk), OGG_OPUS_ID_HEADER.size)) return magic, version, channel_count, preskip, input_samplerate, output_gain, \ mapping_family = OGG_OPUS_ID_HEADER.unpack(chunk[:OGG_OPUS_ID_HEADER.size]) if magic != b"OpusHead": logger().error("Invalid Opus magic number: %s, expected '%s'" % (repr(magic), "OpusHead")) return if (version >> 4) != 0: logger().error("Invalid Opus version: 0x%x, expected 0x0-0xf" % (version)) return # seek to Opus header file.seek(OGG_FIRST_PAGE_HEADER.size + segment_table_fmt.size) return output_gain
def parse_oggopus_output_gain(file)
Parse an OggOpus file headers, read and return its output gain, and set file seek position to start of Opus header.
2.218217
2.173548
1.020551
opus_header_pos = file.tell() # write Opus header with new gain file.seek(opus_header_pos + OGG_OPUS_ID_HEADER_GAIN_OFFSET) file.write(OGG_OPUS_ID_HEADER_GAIN.pack(new_output_gain)) # compute page crc file.seek(0) page = file.read(opus_header_pos + OGG_OPUS_ID_HEADER.size) computed_crc = _compute_ogg_page_crc(page) # write CRC file.seek(OGG_FIRST_PAGE_HEADER_CRC_OFFSET) file.write(OGG_FIRST_PAGE_HEADER_CRC.pack(computed_crc))
def write_oggopus_output_gain(file, new_output_gain)
Write output gain Opus header for a file. file must be an object successfully used by parse_oggopus_output_gain.
3.137673
3.009193
1.042696
page_zero_crc = page[:OGG_FIRST_PAGE_HEADER_CRC_OFFSET] + \ b"\00" * OGG_FIRST_PAGE_HEADER_CRC.size + \ page[OGG_FIRST_PAGE_HEADER_CRC_OFFSET + OGG_FIRST_PAGE_HEADER_CRC.size:] return ogg_page_crc(page_zero_crc)
def _compute_ogg_page_crc(page)
Compute CRC of an Ogg page.
3.721324
3.681761
1.010746
r = collections.OrderedDict() cmd = (ffmpeg_path or "ffmpeg", "-version") output = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, universal_newlines=True).stdout output = output.splitlines() lib_version_regex = re.compile("^\s*(lib[a-z]+)\s+([0-9]+).\s*([0-9]+).\s*([0-9]+)\s+") for line in output: match = lib_version_regex.search(line) if match: lib_name, *lib_version = match.group(1, 2, 3, 4) int_lib_version = 0 for i, d in enumerate(map(int, reversed(lib_version)), 1): int_lib_version |= d << (8 * i) r[lib_name] = int_lib_version return r
def get_ffmpeg_lib_versions(ffmpeg_path=None)
Get FFmpeg library versions as 32 bit integers, with same format as sys.hexversion. Example: 0x3040100 for FFmpeg 3.4.1
2.589715
2.545826
1.01724
return "%s=%s" % (name, ":".join("%s=%s" % (k, v) for k, v in params.items()))
def format_ffmpeg_filter(name, params)
Build a string to call a FFMpeg filter.
3.776892
3.49734
1.079933
r128_data = {} with contextlib.ExitStack() as cm: if executor is None: if thread_count is None: try: thread_count = len(os.sched_getaffinity(0)) except AttributeError: thread_count = os.cpu_count() enable_ffmpeg_threading = thread_count > (len(audio_filepaths) + int(album_gain)) executor = cm.enter_context(concurrent.futures.ThreadPoolExecutor(max_workers=thread_count)) asynchronous = False else: enable_ffmpeg_threading = False asynchronous = True loudness_tags = tuple(map(has_loudness_tag, audio_filepaths)) # remove invalid files audio_filepaths = tuple(audio_filepath for (audio_filepath, has_tags) in zip(audio_filepaths, loudness_tags) if has_tags is not None) loudness_tags = tuple(filter(None, loudness_tags)) futures = {} if album_gain: if skip_tagged and all(map(operator.itemgetter(1), loudness_tags)): logger().info("All files already have an album gain tag, skipping album gain scan") elif audio_filepaths: calc_album_peak = any(map(lambda x: os.path.splitext(x)[-1].lower() != ".opus", audio_filepaths)) futures[ALBUM_GAIN_KEY] = executor.submit(get_r128_loudness, audio_filepaths, calc_peak=calc_album_peak, enable_ffmpeg_threading=enable_ffmpeg_threading, ffmpeg_path=ffmpeg_path) for audio_filepath in audio_filepaths: if skip_tagged and has_loudness_tag(audio_filepath)[0]: logger().info("File '%s' already has a track gain tag, skipping track gain scan" % (audio_filepath)) continue if os.path.splitext(audio_filepath)[-1].lower() == ".opus": # http://www.rfcreader.com/#rfc7845_line1060 calc_peak = False else: calc_peak = True futures[audio_filepath] = executor.submit(get_r128_loudness, (audio_filepath,), calc_peak=calc_peak, enable_ffmpeg_threading=enable_ffmpeg_threading, ffmpeg_path=ffmpeg_path) if asynchronous: return futures for audio_filepath in audio_filepaths: try: r128_data[audio_filepath] = futures[audio_filepath].result() except KeyError: # track gain was skipped pass except Exception as e: # raise logger().warning("Failed to analyze file '%s': %s %s" % (audio_filepath, e.__class__.__qualname__, e)) if album_gain and audio_filepaths: try: r128_data[ALBUM_GAIN_KEY] = futures[ALBUM_GAIN_KEY].result() except KeyError: # album gain was skipped pass except Exception as e: # raise logger().warning("Failed to analyze files %s: %s %s" % (", ".join("'%s'" % (audio_filepath) for audio_filepath in audio_filepaths), e.__class__.__qualname__, e)) return r128_data
def scan(audio_filepaths, *, album_gain=False, skip_tagged=False, thread_count=None, ffmpeg_path=None, executor=None)
Analyze files, and return a dictionary of filepath to loudness metadata or filepath to future if executor is not None.
2.526629
2.464951
1.025022
track, album = False, False try: mf = mutagen.File(filepath) except mutagen.MutagenError as e: logger().warning("File '%s' %s: %s" % (filepath, e.__class__.__qualname__, e)) return if (isinstance(mf.tags, mutagen.id3.ID3) or isinstance(mf, mutagen.id3.ID3FileType)): track = ("TXXX:REPLAYGAIN_TRACK_GAIN" in mf) and ("TXXX:REPLAYGAIN_TRACK_PEAK" in mf) album = ("TXXX:REPLAYGAIN_ALBUM_GAIN" in mf) and ("TXXX:REPLAYGAIN_ALBUM_PEAK" in mf) elif isinstance(mf, mutagen.oggopus.OggOpus): track = "R128_TRACK_GAIN" in mf album = "R128_ALBUM_GAIN" in mf elif (isinstance(mf.tags, (mutagen._vorbis.VComment, mutagen.apev2.APEv2)) or isinstance(mf, (mutagen.ogg.OggFileType, mutagen.apev2.APEv2File))): track = ("REPLAYGAIN_TRACK_GAIN" in mf) and ("REPLAYGAIN_TRACK_PEAK" in mf) album = ("REPLAYGAIN_ALBUM_GAIN" in mf) and ("REPLAYGAIN_ALBUM_PEAK" in mf) elif (isinstance(mf.tags, mutagen.mp4.MP4Tags) or isinstance(mf, mutagen.mp4.MP4)): track = ("----:COM.APPLE.ITUNES:REPLAYGAIN_TRACK_GAIN" in mf) and ("----:COM.APPLE.ITUNES:REPLAYGAIN_TRACK_PEAK" in mf) album = ("----:COM.APPLE.ITUNES:REPLAYGAIN_ALBUM_GAIN" in mf) and ("----:COM.APPLE.ITUNES:REPLAYGAIN_ALBUM_PEAK" in mf) else: logger().warning("Unhandled '%s' tag format for file '%s'" % (mf.__class__.__name__, filepath)) return return track, album
def has_loudness_tag(filepath)
Return a pair of booleans indicating if filepath has a RG or R128 track/album tag, or None if file is invalid.
2.1884
2.065346
1.05958
# track loudness/peak for audio_filepath in audio_filepaths: try: loudness, peak = r128_data[audio_filepath] except KeyError: loudness, peak = "SKIPPED", "SKIPPED" else: loudness = "%.1f LUFS" % (loudness) if peak is None: peak = "-" else: peak = "%.1f dBFS" % (scale_to_gain(peak)) logger().info("File '%s': loudness = %s, sample peak = %s" % (audio_filepath, loudness, peak)) # album loudness/peak if album_dir: try: album_loudness, album_peak = r128_data[ALBUM_GAIN_KEY] except KeyError: album_loudness, album_peak = "SKIPPED", "SKIPPED" else: album_loudness = "%.1f LUFS" % (album_loudness) if album_peak is None: album_peak = "-" else: album_peak = "%.1f dBFS" % (scale_to_gain(album_peak)) logger().info("Album '%s': loudness = %s, sample peak = %s" % (album_dir, album_loudness, album_peak))
def show_scan_report(audio_filepaths, album_dir, r128_data)
Display loudness scan results.
2.08046
2.065644
1.007173
install_reqs = parse_requirements(filename=source, session=PipSession()) return [str(ir.req) for ir in install_reqs]
def get_requirements(source)
Get the requirements from the given ``source`` Parameters ---------- source: str The filename containing the requirements
2.870065
5.430376
0.52852
return ( response.status == 503 and response.headers.get('Server', '').startswith(b'cloudflare') and 'jschl_vc' in response.text and 'jschl_answer' in response.text )
def is_cloudflare_challenge(response)
Test if the given response contains the cloudflare's anti-bot protection
4.167895
3.860279
1.079688
if not self.is_cloudflare_challenge(response): return response logger = logging.getLogger('cloudflaremiddleware') logger.debug( 'Cloudflare protection detected on %s, trying to bypass...', response.url ) cloudflare_tokens, __ = get_tokens( request.url, user_agent=spider.settings.get('USER_AGENT') ) logger.debug( 'Successfully bypassed the protection for %s, re-scheduling the request', response.url ) request.cookies.update(cloudflare_tokens) request.priority = 99999 return request
def process_response(self, request, response, spider)
Handle the a Scrapy response
4.790618
4.866724
0.984362
raise VkCaptchaNeeded(url, sid)
async def enter_captcha(self, url: str, sid: str) -> str
Override this method for processing captcha. :param url: link to captcha image :param sid: captcha id. I do not know why pass here but may be useful :return captcha value
132.651047
49.374298
2.686642
html = await self._get_auth_page() url = URL('/authorize?email') for step in range(self.num_of_attempts): if url.path == '/authorize' and 'email' in url.query: # Invalid login or password and 'email' in q.query url, html = await self._process_auth_form(html) if url.path == '/login' and url.query.get('act', '') == 'authcheck': # Entering 2auth code url, html = await self._process_2auth_form(html) if url.path == '/login' and url.query.get('act', '') == 'authcheck_code': # Need captcha url, html = await self._process_auth_form(html) if url.path == '/authorize' and '__q_hash' in url.query: # Give rights for app url, html = await self._process_access_form(html) if url.path == '/blank.html': # Success self.access_token = url.query['access_token'] return raise VkAuthError('Something went wrong', 'Exceeded the number of attempts to log in')
async def authorize(self) -> None
Getting a new token from server
5.038617
4.912114
1.025753
# Prepare request params = { 'client_id': self.app_id, 'redirect_uri': 'https://oauth.vk.com/blank.html', 'display': 'mobile', 'response_type': 'token', 'v': self.API_VERSION } if self.scope: params['scope'] = self.scope # Send request status, response = await self.driver.get_text(self.AUTH_URL, params) # Process response if status != 200: error_dict = json.loads(response) raise VkAuthError(error_dict['error'], error_dict['error_description'], self.AUTH_URL, params) return response
async def _get_auth_page(self) -> str
Get authorization mobile page without js :return: html page
2.617224
2.649374
0.987865
# Parse page p = AuthPageParser() p.feed(html) p.close() # Get data from hidden inputs form_data = dict(p.inputs) form_url = p.url form_data['email'] = self.login form_data['pass'] = self.password if p.message: # Show form errors raise VkAuthError('invalid_data', p.message, form_url, form_data) elif p.captcha_url: form_data['captcha_key'] = await self.enter_captcha( "https://m.vk.com{}".format(p.captcha_url), form_data['captcha_sid'] ) form_url = "https://m.vk.com{}".format(form_url) # Send request url, html = await self.driver.post_text(form_url, form_data) return url, html
async def _process_auth_form(self, html: str) -> (str, str)
Parsing data from authorization page and filling the form and submitting the form :param html: html page :return: url and html from redirected page
3.594407
3.592304
1.000585
# Parse page p = TwoFactorCodePageParser() p.feed(html) p.close() # Prepare request data form_url = p.url form_data = dict(p.inputs) form_data['remember'] = 0 if p.message: raise VkAuthError('invalid_data', p.message, form_url, form_data) form_data['code'] = await self.enter_confirmation_code() # Send request url, html = await self.driver.post_text(form_url, form_data) return url, html
async def _process_2auth_form(self, html: str) -> (str, str)
Parsing two-factor authorization page and filling the code :param html: html page :return: url and html from redirected page
4.960008
4.620118
1.073567
# Parse page p = AccessPageParser() p.feed(html) p.close() form_url = p.url form_data = dict(p.inputs) # Send request url, html = await self.driver.post_text(form_url, form_data) return url, html
async def _process_access_form(self, html: str) -> (str, str)
Parsing page with access rights :param html: html page :return: url and html from redirected page
4.772056
4.760264
1.002477
code = await self.get_code(code) params = { 'client_id': self.app_id, 'client_secret': self.app_secret, 'redirect_uri': self.redirect_uri, 'code': code } response = await self.driver.json(self.CODE_URL, params, self.timeout) if 'error' in response: raise VkAuthError(response['error'], response['error_description'], self.CODE_URL, params) self.access_token = response['access_token']
async def authorize(self, code: str=None) -> None
Getting a new token from server
2.740054
2.653972
1.032435
if not self.base_url: await self._get_long_poll_server(need_pts) params = { 'ts': self.ts, 'key': self.key, } params.update(self.base_params) # invalid mimetype from server code, response = await self.api._session.driver.get_text( self.base_url, params, timeout=2 * self.base_params['wait'] ) if code == 403: raise VkLongPollError(403, 'smth weth wrong', self.base_url + '/', params) response = json.loads(response) failed = response.get('failed') if not failed: self.ts = response['ts'] return response if failed == 1: self.ts = response['ts'] elif failed == 4: raise VkLongPollError( 4, 'An invalid version number was passed in the version parameter', self.base_url + '/', params ) else: self.base_url = None return await self.wait()
async def wait(self, need_pts=False) -> dict
Send long poll request :param need_pts: need return the pts field
4.270388
4.253515
1.003967
'''Returns a Dirichlet PDF function''' alphap = alphas - 1 c = np.exp(gammaln(alphas.sum()) - gammaln(alphas).sum()) def dirichlet(xs): '''N x K array''' return c * (xs**alphap).prod(axis=1) return dirichlet
def pdf(alphas)
Returns a Dirichlet PDF function
5.020486
4.729168
1.0616
'''Mean and precision of Dirichlet distribution. Parameters ---------- a : array Parameters of Dirichlet distribution. Returns ------- mean : array Numbers [0,1] of the means of the Dirichlet distribution. precision : float Precision or concentration parameter of the Dirichlet distribution.''' s = a.sum() m = a / s return (m,s)
def meanprecision(a)
Mean and precision of Dirichlet distribution. Parameters ---------- a : array Parameters of Dirichlet distribution. Returns ------- mean : array Numbers [0,1] of the means of the Dirichlet distribution. precision : float Precision or concentration parameter of the Dirichlet distribution.
4.502657
1.944182
2.315964
'''Compute log likelihood of Dirichlet distribution, i.e. log p(D|a). Parameters ---------- D : 2D array where ``N`` is the number of observations, ``K`` is the number of parameters for the Dirichlet distribution. a : array Parameters for the Dirichlet distribution. Returns ------- logl : float The log likelihood of the Dirichlet distribution''' N, K = D.shape logp = log(D).mean(axis=0) return N*(gammaln(a.sum()) - gammaln(a).sum() + ((a - 1)*logp).sum())
def loglikelihood(D, a)
Compute log likelihood of Dirichlet distribution, i.e. log p(D|a). Parameters ---------- D : 2D array where ``N`` is the number of observations, ``K`` is the number of parameters for the Dirichlet distribution. a : array Parameters for the Dirichlet distribution. Returns ------- logl : float The log likelihood of the Dirichlet distribution
3.496387
1.959348
1.784464
'''Iteratively computes maximum likelihood Dirichlet distribution for an observed data set, i.e. a for which log p(D|a) is maximum. Parameters ---------- D : 2D array ``N x K`` array of numbers from [0,1] where ``N`` is the number of observations, ``K`` is the number of parameters for the Dirichlet distribution. tol : float If Euclidean distance between successive parameter arrays is less than ``tol``, calculation is taken to have converged. method : string One of ``'fixedpoint'`` and ``'meanprecision'``, designates method by which to find MLE Dirichlet distribution. Default is ``'meanprecision'``, which is faster. maxiter : int Maximum number of iterations to take calculations. Default is ``sys.maxint``. Returns ------- a : array Maximum likelihood parameters for Dirichlet distribution.''' if method == 'meanprecision': return _meanprecision(D, tol=tol, maxiter=maxiter) else: return _fixedpoint(D, tol=tol, maxiter=maxiter)
def mle(D, tol=1e-7, method='meanprecision', maxiter=None)
Iteratively computes maximum likelihood Dirichlet distribution for an observed data set, i.e. a for which log p(D|a) is maximum. Parameters ---------- D : 2D array ``N x K`` array of numbers from [0,1] where ``N`` is the number of observations, ``K`` is the number of parameters for the Dirichlet distribution. tol : float If Euclidean distance between successive parameter arrays is less than ``tol``, calculation is taken to have converged. method : string One of ``'fixedpoint'`` and ``'meanprecision'``, designates method by which to find MLE Dirichlet distribution. Default is ``'meanprecision'``, which is faster. maxiter : int Maximum number of iterations to take calculations. Default is ``sys.maxint``. Returns ------- a : array Maximum likelihood parameters for Dirichlet distribution.
4.382687
1.249536
3.507452
'''Simple fixed point iteration method for MLE of Dirichlet distribution''' N, K = D.shape logp = log(D).mean(axis=0) a0 = _init_a(D) # Start updating if maxiter is None: maxiter = MAXINT for i in xrange(maxiter): a1 = _ipsi(psi(a0.sum()) + logp) # if norm(a1-a0) < tol: if abs(loglikelihood(D, a1)-loglikelihood(D, a0)) < tol: # much faster return a1 a0 = a1 raise Exception('Failed to converge after {} iterations, values are {}.' .format(maxiter, a1))
def _fixedpoint(D, tol=1e-7, maxiter=None)
Simple fixed point iteration method for MLE of Dirichlet distribution
5.433703
4.670619
1.16338
'''Mean and precision alternating method for MLE of Dirichlet distribution''' N, K = D.shape logp = log(D).mean(axis=0) a0 = _init_a(D) s0 = a0.sum() if s0 < 0: a0 = a0/s0 s0 = 1 elif s0 == 0: a0 = ones(a.shape) / len(a) s0 = 1 m0 = a0/s0 # Start updating if maxiter is None: maxiter = MAXINT for i in xrange(maxiter): a1 = _fit_s(D, a0, logp, tol=tol) s1 = sum(a1) a1 = _fit_m(D, a1, logp, tol=tol) m = a1/s1 # if norm(a1-a0) < tol: if abs(loglikelihood(D, a1)-loglikelihood(D, a0)) < tol: # much faster return a1 a0 = a1 raise Exception('Failed to converge after {} iterations, values are {}.' .format(maxiter, a1))
def _meanprecision(D, tol=1e-7, maxiter=None)
Mean and precision alternating method for MLE of Dirichlet distribution
3.864865
3.360663
1.150031
'''Assuming a fixed mean for Dirichlet distribution, maximize likelihood for preicision a.k.a. s''' N, K = D.shape s1 = a0.sum() m = a0 / s1 mlogp = (m*logp).sum() for i in xrange(maxiter): s0 = s1 g = psi(s1) - (m*psi(s1*m)).sum() + mlogp h = _trigamma(s1) - ((m**2)*_trigamma(s1*m)).sum() if g + s1 * h < 0: s1 = 1/(1/s0 + g/h/(s0**2)) if s1 <= 0: s1 = s0 * exp(-g/(s0*h + g)) # Newton on log s if s1 <= 0: s1 = 1/(1/s0 + g/((s0**2)*h + 2*s0*g)) # Newton on 1/s if s1 <= 0: s1 = s0 - g/h # Newton if s1 <= 0: raise Exception('Unable to update s from {}'.format(s0)) a = s1 * m if abs(s1 - s0) < tol: return a raise Exception('Failed to converge after {} iterations, s is {}' .format(maxiter, s1))
def _fit_s(D, a0, logp, tol=1e-7, maxiter=1000)
Assuming a fixed mean for Dirichlet distribution, maximize likelihood for preicision a.k.a. s
4.419541
3.54824
1.245559
'''With fixed precision s, maximize mean m''' N,K = D.shape s = a0.sum() for i in xrange(maxiter): m = a0 / s a1 = _ipsi(logp + (m*(psi(a0) - logp)).sum()) a1 = a1/a1.sum() * s if norm(a1 - a0) < tol: return a1 a0 = a1 raise Exception('Failed to converge after {} iterations, s is {}' .format(maxiter, s))
def _fit_m(D, a0, logp, tol=1e-7, maxiter=1000)
With fixed precision s, maximize mean m
6.047511
4.645189
1.301887
'''Fixed version of numpy.piecewise for 0-d arrays''' x = asanyarray(x) n2 = len(funclist) if isscalar(condlist) or \ (isinstance(condlist, np.ndarray) and condlist.ndim == 0) or \ (x.ndim > 0 and condlist[0].ndim == 0): condlist = [condlist] condlist = [asarray(c, dtype=bool) for c in condlist] n = len(condlist) zerod = False # This is a hack to work around problems with NumPy's # handling of 0-d arrays and boolean indexing with # numpy.bool_ scalars if x.ndim == 0: x = x[None] zerod = True newcondlist = [] for k in range(n): if condlist[k].ndim == 0: condition = condlist[k][None] else: condition = condlist[k] newcondlist.append(condition) condlist = newcondlist if n == n2-1: # compute the "otherwise" condition. totlist = condlist[0] for k in range(1, n): totlist |= condlist[k] condlist.append(~totlist) n += 1 if (n != n2): raise ValueError( "function list and condition list must be the same") y = zeros(x.shape, x.dtype) for k in range(n): item = funclist[k] if not callable(item): y[condlist[k]] = item else: vals = x[condlist[k]] if vals.size > 0: y[condlist[k]] = item(vals, *args, **kw) if zerod: y = y.squeeze() return y
def _piecewise(x, condlist, funclist, *args, **kw)
Fixed version of numpy.piecewise for 0-d arrays
2.800144
2.688403
1.041564
'''Initial guess for Dirichlet alpha parameters given data D''' E = D.mean(axis=0) E2 = (D**2).mean(axis=0) return ((E[0] - E2[0])/(E2[0]-E[0]**2)) * E
def _init_a(D)
Initial guess for Dirichlet alpha parameters given data D
4.993094
3.58271
1.393664
'''Inverse of psi (digamma) using Newton's method. For the purposes of Dirichlet MLE, since the parameters a[i] must always satisfy a > 0, we define ipsi :: R -> (0,inf).''' y = asanyarray(y, dtype='float') x0 = _piecewise(y, [y >= -2.22, y < -2.22], [(lambda x: exp(x) + 0.5), (lambda x: -1/(x+euler))]) for i in xrange(maxiter): x1 = x0 - (psi(x0) - y)/_trigamma(x0) if norm(x1 - x0) < tol: return x1 x0 = x1 raise Exception( 'Unable to converge in {} iterations, value is {}'.format(maxiter, x1))
def _ipsi(y, tol=1.48e-9, maxiter=10)
Inverse of psi (digamma) using Newton's method. For the purposes of Dirichlet MLE, since the parameters a[i] must always satisfy a > 0, we define ipsi :: R -> (0,inf).
6.657102
3.130122
2.126787
'''Converts array of barycentric coordinates on a 2-simplex to an array of Cartesian coordinates on a 2D triangle in the first quadrant, i.e.:: >>> cartesian((1,0,0)) array([0, 0]) >>> cartesian((0,1,0)) array([0, 1]) >>> cartesian((0,0,1)) array([0.5, 0.8660254037844386]) # == [0.5, sqrt(3)/2] :param points: Points on a 2-simplex. :type points: N x 3 list or ndarray. :returns: Cartesian coordinate points. :rtype: N x 2 ndarray.''' points = np.asanyarray(points) ndim = points.ndim # will use this to have similar output shape to input if ndim == 1: points = points.reshape((1,points.size)) d = points.sum(axis=1) # in case values aren't normalized x = 0.5*(2*points[:,1] + points[:,2])/d y = (np.sqrt(3.0)/2) * points[:,2]/d out = np.vstack([x,y]).T if ndim == 1: return out.reshape((2,)) return out
def cartesian(points)
Converts array of barycentric coordinates on a 2-simplex to an array of Cartesian coordinates on a 2D triangle in the first quadrant, i.e.:: >>> cartesian((1,0,0)) array([0, 0]) >>> cartesian((0,1,0)) array([0, 1]) >>> cartesian((0,0,1)) array([0.5, 0.8660254037844386]) # == [0.5, sqrt(3)/2] :param points: Points on a 2-simplex. :type points: N x 3 list or ndarray. :returns: Cartesian coordinate points. :rtype: N x 2 ndarray.
3.500184
1.997713
1.752095
'''Inverse of :func:`cartesian`.''' points = np.asanyarray(points) ndim = points.ndim if ndim == 1: points = points.reshape((1,points.size)) c = (2/np.sqrt(3.0))*points[:,1] b = (2*points[:,0] - c)/2.0 a = 1.0 - c - b out = np.vstack([a,b,c]).T if ndim == 1: return out.reshape((3,)) return out
def barycentric(points)
Inverse of :func:`cartesian`.
2.979038
2.833521
1.051356
'''Scatter plot of barycentric 2-simplex points on a 2D triangle. :param points: Points on a 2-simplex. :type points: N x 3 list or ndarray. :param vertexlabels: Labels for corners of plot in the order ``(a, b, c)`` where ``a == (1,0,0)``, ``b == (0,1,0)``, ``c == (0,0,1)``. :type vertexlabels: 3-tuple of strings. :param **kwargs: Arguments to :func:`plt.scatter`. :type **kwargs: keyword arguments.''' if vertexlabels is None: vertexlabels = ('1','2','3') projected = cartesian(points) plt.scatter(projected[:,0], projected[:,1], **kwargs) _draw_axes(vertexlabels) return plt.gcf()
def scatter(points, vertexlabels=None, **kwargs)
Scatter plot of barycentric 2-simplex points on a 2D triangle. :param points: Points on a 2-simplex. :type points: N x 3 list or ndarray. :param vertexlabels: Labels for corners of plot in the order ``(a, b, c)`` where ``a == (1,0,0)``, ``b == (0,1,0)``, ``c == (0,0,1)``. :type vertexlabels: 3-tuple of strings. :param **kwargs: Arguments to :func:`plt.scatter`. :type **kwargs: keyword arguments.
3.916975
1.739305
2.252035
'''Contour line plot on a 2D triangle of a function evaluated at barycentric 2-simplex points. :param f: Function to evaluate on N x 3 ndarray of coordinates :type f: ``ufunc`` :param vertexlabels: Labels for corners of plot in the order ``(a, b, c)`` where ``a == (1,0,0)``, ``b == (0,1,0)``, ``c == (0,0,1)``. :type vertexlabels: 3-tuple of strings. :param **kwargs: Arguments to :func:`plt.tricontour`. :type **kwargs: keyword arguments.''' return _contour(f, vertexlabels, contourfunc=plt.tricontour, **kwargs)
def contour(f, vertexlabels=None, **kwargs)
Contour line plot on a 2D triangle of a function evaluated at barycentric 2-simplex points. :param f: Function to evaluate on N x 3 ndarray of coordinates :type f: ``ufunc`` :param vertexlabels: Labels for corners of plot in the order ``(a, b, c)`` where ``a == (1,0,0)``, ``b == (0,1,0)``, ``c == (0,0,1)``. :type vertexlabels: 3-tuple of strings. :param **kwargs: Arguments to :func:`plt.tricontour`. :type **kwargs: keyword arguments.
5.038706
1.460537
3.449901
'''Filled contour plot on a 2D triangle of a function evaluated at barycentric 2-simplex points. Function signature is identical to :func:`contour` with the caveat that ``**kwargs`` are passed on to :func:`plt.tricontourf`.''' return _contour(f, vertexlabels, contourfunc=plt.tricontourf, **kwargs)
def contourf(f, vertexlabels=None, **kwargs)
Filled contour plot on a 2D triangle of a function evaluated at barycentric 2-simplex points. Function signature is identical to :func:`contour` with the caveat that ``**kwargs`` are passed on to :func:`plt.tricontourf`.
6.584534
1.964676
3.351461
'''Workhorse function for the above, where ``contourfunc`` is the contour plotting function to use for actual plotting.''' if contourfunc is None: contourfunc = plt.tricontour if vertexlabels is None: vertexlabels = ('1','2','3') x = np.linspace(0, 1, 100) y = np.linspace(0, np.sqrt(3.0)/2.0, 100) points2d = np.transpose([np.tile(x, len(y)), np.repeat(y, len(x))]) points3d = barycentric(points2d) valid = (points3d.sum(axis=1) == 1.0) & ((0.0 <= points3d).all(axis=1)) points2d = points2d[np.where(valid),:][0] points3d = points3d[np.where(valid),:][0] z = f(points3d) contourfunc(points2d[:,0], points2d[:,1], z, **kwargs) _draw_axes(vertexlabels) return plt.gcf()
def _contour(f, vertexlabels=None, contourfunc=None, **kwargs)
Workhorse function for the above, where ``contourfunc`` is the contour plotting function to use for actual plotting.
2.959052
2.42058
1.222456
@wraps(func) def func_wrapper(*args, **kwargs): # pylint: disable=C0111, C0103 function_name = func.__name__ VALIDATORS_DISABLED = os.getenv('VALIDATORS_DISABLED', '') disabled_functions = [x.strip() for x in VALIDATORS_DISABLED.split(',')] force_run = kwargs.get('force_run', False) try: value = args[0] except IndexError: raise ValidatorUsageError('no value was supplied') if function_name in disabled_functions and not force_run: return value else: updated_kwargs = {key : kwargs[key] for key in kwargs if key != 'force_run'} return func(*args, **updated_kwargs) return func_wrapper
def disable_on_env(func)
Disable the ``func`` called if its name is present in ``VALIDATORS_DISABLED``. :param func: The function/validator to be disabled. :type func: callable :returns: If disabled, the ``value`` (first positional argument) passed to ``func``. If enabled, the result of ``func``.
3.13691
2.873688
1.091597