repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
sequencelengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
sequencelengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
JarryShaw/PyPCAPKit
src/protocols/internet/ipv4.py
IPv4._read_mode_route
def _read_mode_route(self, size, kind): """Read options with route data. Positional arguments: * size - int, length of option * kind - int, 7/131/137 (RR/LSR/SSR) Returns: * dict -- extracted option with route data Structure of these options: * [RFC 791] Loose Source Route +--------+--------+--------+---------//--------+ |10000011| length | pointer| route data | +--------+--------+--------+---------//--------+ * [RFC 791] Strict Source Route +--------+--------+--------+---------//--------+ |10001001| length | pointer| route data | +--------+--------+--------+---------//--------+ * [RFC 791] Record Route +--------+--------+--------+---------//--------+ |00000111| length | pointer| route data | +--------+--------+--------+---------//--------+ Octets Bits Name Description 0 0 ip.opt.kind Kind (7/131/137) 0 0 ip.opt.type.copy Copied Flag (0) 0 1 ip.opt.type.class Option Class (0/1) 0 3 ip.opt.type.number Option Number (3/7/9) 1 8 ip.opt.length Length 2 16 ip.opt.pointer Pointer (≥4) 3 24 ip.opt.data Route Data """ if size < 3 or (size - 3) % 4 != 0: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _rptr = self._read_unpack(1) if _rptr < 4: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=self._read_opt_type(kind), length=size, pointer=_rptr, ) counter = 4 address = list() endpoint = min(_rptr, size) while counter < endpoint: counter += 4 address.append(self._read_ipv4_addr()) data['ip'] = address or None return data
python
def _read_mode_route(self, size, kind): """Read options with route data. Positional arguments: * size - int, length of option * kind - int, 7/131/137 (RR/LSR/SSR) Returns: * dict -- extracted option with route data Structure of these options: * [RFC 791] Loose Source Route +--------+--------+--------+---------//--------+ |10000011| length | pointer| route data | +--------+--------+--------+---------//--------+ * [RFC 791] Strict Source Route +--------+--------+--------+---------//--------+ |10001001| length | pointer| route data | +--------+--------+--------+---------//--------+ * [RFC 791] Record Route +--------+--------+--------+---------//--------+ |00000111| length | pointer| route data | +--------+--------+--------+---------//--------+ Octets Bits Name Description 0 0 ip.opt.kind Kind (7/131/137) 0 0 ip.opt.type.copy Copied Flag (0) 0 1 ip.opt.type.class Option Class (0/1) 0 3 ip.opt.type.number Option Number (3/7/9) 1 8 ip.opt.length Length 2 16 ip.opt.pointer Pointer (≥4) 3 24 ip.opt.data Route Data """ if size < 3 or (size - 3) % 4 != 0: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _rptr = self._read_unpack(1) if _rptr < 4: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=self._read_opt_type(kind), length=size, pointer=_rptr, ) counter = 4 address = list() endpoint = min(_rptr, size) while counter < endpoint: counter += 4 address.append(self._read_ipv4_addr()) data['ip'] = address or None return data
[ "def", "_read_mode_route", "(", "self", ",", "size", ",", "kind", ")", ":", "if", "size", "<", "3", "or", "(", "size", "-", "3", ")", "%", "4", "!=", "0", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_rptr", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_rptr", "<", "4", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "data", "=", "dict", "(", "kind", "=", "kind", ",", "type", "=", "self", ".", "_read_opt_type", "(", "kind", ")", ",", "length", "=", "size", ",", "pointer", "=", "_rptr", ",", ")", "counter", "=", "4", "address", "=", "list", "(", ")", "endpoint", "=", "min", "(", "_rptr", ",", "size", ")", "while", "counter", "<", "endpoint", ":", "counter", "+=", "4", "address", ".", "append", "(", "self", ".", "_read_ipv4_addr", "(", ")", ")", "data", "[", "'ip'", "]", "=", "address", "or", "None", "return", "data" ]
Read options with route data. Positional arguments: * size - int, length of option * kind - int, 7/131/137 (RR/LSR/SSR) Returns: * dict -- extracted option with route data Structure of these options: * [RFC 791] Loose Source Route +--------+--------+--------+---------//--------+ |10000011| length | pointer| route data | +--------+--------+--------+---------//--------+ * [RFC 791] Strict Source Route +--------+--------+--------+---------//--------+ |10001001| length | pointer| route data | +--------+--------+--------+---------//--------+ * [RFC 791] Record Route +--------+--------+--------+---------//--------+ |00000111| length | pointer| route data | +--------+--------+--------+---------//--------+ Octets Bits Name Description 0 0 ip.opt.kind Kind (7/131/137) 0 0 ip.opt.type.copy Copied Flag (0) 0 1 ip.opt.type.class Option Class (0/1) 0 3 ip.opt.type.number Option Number (3/7/9) 1 8 ip.opt.length Length 2 16 ip.opt.pointer Pointer (≥4) 3 24 ip.opt.data Route Data
[ "Read", "options", "with", "route", "data", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ipv4.py#L448-L505
JarryShaw/PyPCAPKit
src/protocols/internet/ipv4.py
IPv4._read_mode_qs
def _read_mode_qs(self, size, kind): """Read Quick Start option. Positional arguments: * size - int, length of option * kind - int, 25 (QS) Returns: * dict -- extracted Quick Start (QS) option Structure of Quick-Start (QS) option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=8 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=8 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ip.qs.kind Kind (25) 0 0 ip.qs.type.copy Copied Flag (0) 0 1 ip.qs.type.class Option Class (0) 0 3 ip.qs.type.number Option Number (25) 1 8 ip.qs.length Length (8) 2 16 ip.qs.func Function (0/8) 2 20 ip.qs.rate Rate Request / Report (in Kbps) 3 24 ip.qs.ttl QS TTL / None 4 32 ip.qs.nounce QS Nounce 7 62 - Reserved (\x00\x00) """ if size != 8: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _type = self._read_opt_type(kind) _fcrr = self._read_binary(1) _func = int(_fcrr[:4], base=2) _rate = int(_fcrr[4:], base=2) _ttlv = self._read_unpack(1) _nonr = self._read_binary(4) _qsnn = int(_nonr[:30], base=2) if _func != 0 and _func != 8: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=_type, length=size, func=QS_FUNC.get(_func), rate=40000 * (2 ** _rate) / 1000, ttl=None if _func else _rate, nounce=_qsnn, ) return data
python
def _read_mode_qs(self, size, kind): """Read Quick Start option. Positional arguments: * size - int, length of option * kind - int, 25 (QS) Returns: * dict -- extracted Quick Start (QS) option Structure of Quick-Start (QS) option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=8 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=8 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ip.qs.kind Kind (25) 0 0 ip.qs.type.copy Copied Flag (0) 0 1 ip.qs.type.class Option Class (0) 0 3 ip.qs.type.number Option Number (25) 1 8 ip.qs.length Length (8) 2 16 ip.qs.func Function (0/8) 2 20 ip.qs.rate Rate Request / Report (in Kbps) 3 24 ip.qs.ttl QS TTL / None 4 32 ip.qs.nounce QS Nounce 7 62 - Reserved (\x00\x00) """ if size != 8: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _type = self._read_opt_type(kind) _fcrr = self._read_binary(1) _func = int(_fcrr[:4], base=2) _rate = int(_fcrr[4:], base=2) _ttlv = self._read_unpack(1) _nonr = self._read_binary(4) _qsnn = int(_nonr[:30], base=2) if _func != 0 and _func != 8: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=_type, length=size, func=QS_FUNC.get(_func), rate=40000 * (2 ** _rate) / 1000, ttl=None if _func else _rate, nounce=_qsnn, ) return data
[ "def", "_read_mode_qs", "(", "self", ",", "size", ",", "kind", ")", ":", "if", "size", "!=", "8", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_type", "=", "self", ".", "_read_opt_type", "(", "kind", ")", "_fcrr", "=", "self", ".", "_read_binary", "(", "1", ")", "_func", "=", "int", "(", "_fcrr", "[", ":", "4", "]", ",", "base", "=", "2", ")", "_rate", "=", "int", "(", "_fcrr", "[", "4", ":", "]", ",", "base", "=", "2", ")", "_ttlv", "=", "self", ".", "_read_unpack", "(", "1", ")", "_nonr", "=", "self", ".", "_read_binary", "(", "4", ")", "_qsnn", "=", "int", "(", "_nonr", "[", ":", "30", "]", ",", "base", "=", "2", ")", "if", "_func", "!=", "0", "and", "_func", "!=", "8", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "data", "=", "dict", "(", "kind", "=", "kind", ",", "type", "=", "_type", ",", "length", "=", "size", ",", "func", "=", "QS_FUNC", ".", "get", "(", "_func", ")", ",", "rate", "=", "40000", "*", "(", "2", "**", "_rate", ")", "/", "1000", ",", "ttl", "=", "None", "if", "_func", "else", "_rate", ",", "nounce", "=", "_qsnn", ",", ")", "return", "data" ]
Read Quick Start option. Positional arguments: * size - int, length of option * kind - int, 25 (QS) Returns: * dict -- extracted Quick Start (QS) option Structure of Quick-Start (QS) option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=8 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=8 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ip.qs.kind Kind (25) 0 0 ip.qs.type.copy Copied Flag (0) 0 1 ip.qs.type.class Option Class (0) 0 3 ip.qs.type.number Option Number (25) 1 8 ip.qs.length Length (8) 2 16 ip.qs.func Function (0/8) 2 20 ip.qs.rate Rate Request / Report (in Kbps) 3 24 ip.qs.ttl QS TTL / None 4 32 ip.qs.nounce QS Nounce 7 62 - Reserved (\x00\x00)
[ "Read", "Quick", "Start", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ipv4.py#L507-L574
JarryShaw/PyPCAPKit
src/protocols/internet/ipv4.py
IPv4._read_mode_ts
def _read_mode_ts(self, size, kind): """Read Time Stamp option. Positional arguments: * size - int, length of option * kind - int, 68 (TS) Returns: * dict -- extracted Time Stamp (TS) option Structure of Timestamp (TS) option [RFC 791]: +--------+--------+--------+--------+ |01000100| length | pointer|oflw|flg| +--------+--------+--------+--------+ | internet address | +--------+--------+--------+--------+ | timestamp | +--------+--------+--------+--------+ | . | . . Octets Bits Name Description 0 0 ip.ts.kind Kind (25) 0 0 ip.ts.type.copy Copied Flag (0) 0 1 ip.ts.type.class Option Class (0) 0 3 ip.ts.type.number Option Number (25) 1 8 ip.ts.length Length (≤40) 2 16 ip.ts.pointer Pointer (≥5) 3 24 ip.ts.overflow Overflow Octets 3 28 ip.ts.flag Flag 4 32 ip.ts.ip Internet Address 8 64 ip.ts.timestamp Timestamp """ if size > 40 or size < 4: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _tptr = self._read_unpack(1) _oflg = self._read_binary(1) _oflw = int(_oflg[:4], base=2) _flag = int(_oflg[4:], base=2) if _tptr < 5: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=self._read_opt_type(kind), length=size, pointer=_tptr, overflow=_oflw, flag=_flag, ) endpoint = min(_tptr, size) if _flag == 0: if (size - 4) % 4 != 0: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') counter = 5 timestamp = list() while counter < endpoint: counter += 4 time = self._read_unpack(4, lilendian=True) timestamp.append(datetime.datetime.fromtimestamp(time)) data['timestamp'] = timestamp or None elif _flag == 1 or _flag == 3: if (size - 4) % 8 != 0: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') counter = 5 ipaddress = list() timestamp = list() while counter < endpoint: counter += 8 ipaddress.append(self._read_ipv4_addr()) time = self._read_unpack(4, lilendian=True) timestamp.append(datetime.datetime.fromtimestamp(time)) data['ip'] = ipaddress or None data['timestamp'] = timestamp or None else: data['data'] = self._read_fileng(size - 4) or None return data
python
def _read_mode_ts(self, size, kind): """Read Time Stamp option. Positional arguments: * size - int, length of option * kind - int, 68 (TS) Returns: * dict -- extracted Time Stamp (TS) option Structure of Timestamp (TS) option [RFC 791]: +--------+--------+--------+--------+ |01000100| length | pointer|oflw|flg| +--------+--------+--------+--------+ | internet address | +--------+--------+--------+--------+ | timestamp | +--------+--------+--------+--------+ | . | . . Octets Bits Name Description 0 0 ip.ts.kind Kind (25) 0 0 ip.ts.type.copy Copied Flag (0) 0 1 ip.ts.type.class Option Class (0) 0 3 ip.ts.type.number Option Number (25) 1 8 ip.ts.length Length (≤40) 2 16 ip.ts.pointer Pointer (≥5) 3 24 ip.ts.overflow Overflow Octets 3 28 ip.ts.flag Flag 4 32 ip.ts.ip Internet Address 8 64 ip.ts.timestamp Timestamp """ if size > 40 or size < 4: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _tptr = self._read_unpack(1) _oflg = self._read_binary(1) _oflw = int(_oflg[:4], base=2) _flag = int(_oflg[4:], base=2) if _tptr < 5: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=self._read_opt_type(kind), length=size, pointer=_tptr, overflow=_oflw, flag=_flag, ) endpoint = min(_tptr, size) if _flag == 0: if (size - 4) % 4 != 0: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') counter = 5 timestamp = list() while counter < endpoint: counter += 4 time = self._read_unpack(4, lilendian=True) timestamp.append(datetime.datetime.fromtimestamp(time)) data['timestamp'] = timestamp or None elif _flag == 1 or _flag == 3: if (size - 4) % 8 != 0: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') counter = 5 ipaddress = list() timestamp = list() while counter < endpoint: counter += 8 ipaddress.append(self._read_ipv4_addr()) time = self._read_unpack(4, lilendian=True) timestamp.append(datetime.datetime.fromtimestamp(time)) data['ip'] = ipaddress or None data['timestamp'] = timestamp or None else: data['data'] = self._read_fileng(size - 4) or None return data
[ "def", "_read_mode_ts", "(", "self", ",", "size", ",", "kind", ")", ":", "if", "size", ">", "40", "or", "size", "<", "4", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_tptr", "=", "self", ".", "_read_unpack", "(", "1", ")", "_oflg", "=", "self", ".", "_read_binary", "(", "1", ")", "_oflw", "=", "int", "(", "_oflg", "[", ":", "4", "]", ",", "base", "=", "2", ")", "_flag", "=", "int", "(", "_oflg", "[", "4", ":", "]", ",", "base", "=", "2", ")", "if", "_tptr", "<", "5", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "data", "=", "dict", "(", "kind", "=", "kind", ",", "type", "=", "self", ".", "_read_opt_type", "(", "kind", ")", ",", "length", "=", "size", ",", "pointer", "=", "_tptr", ",", "overflow", "=", "_oflw", ",", "flag", "=", "_flag", ",", ")", "endpoint", "=", "min", "(", "_tptr", ",", "size", ")", "if", "_flag", "==", "0", ":", "if", "(", "size", "-", "4", ")", "%", "4", "!=", "0", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "counter", "=", "5", "timestamp", "=", "list", "(", ")", "while", "counter", "<", "endpoint", ":", "counter", "+=", "4", "time", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ")", "timestamp", ".", "append", "(", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "time", ")", ")", "data", "[", "'timestamp'", "]", "=", "timestamp", "or", "None", "elif", "_flag", "==", "1", "or", "_flag", "==", "3", ":", "if", "(", "size", "-", "4", ")", "%", "8", "!=", "0", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "counter", "=", "5", "ipaddress", "=", "list", "(", ")", "timestamp", "=", "list", "(", ")", "while", "counter", "<", "endpoint", ":", "counter", "+=", "8", "ipaddress", ".", "append", "(", "self", ".", "_read_ipv4_addr", "(", ")", ")", "time", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ")", "timestamp", ".", "append", "(", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "time", ")", ")", "data", "[", "'ip'", "]", "=", "ipaddress", "or", "None", "data", "[", "'timestamp'", "]", "=", "timestamp", "or", "None", "else", ":", "data", "[", "'data'", "]", "=", "self", ".", "_read_fileng", "(", "size", "-", "4", ")", "or", "None", "return", "data" ]
Read Time Stamp option. Positional arguments: * size - int, length of option * kind - int, 68 (TS) Returns: * dict -- extracted Time Stamp (TS) option Structure of Timestamp (TS) option [RFC 791]: +--------+--------+--------+--------+ |01000100| length | pointer|oflw|flg| +--------+--------+--------+--------+ | internet address | +--------+--------+--------+--------+ | timestamp | +--------+--------+--------+--------+ | . | . . Octets Bits Name Description 0 0 ip.ts.kind Kind (25) 0 0 ip.ts.type.copy Copied Flag (0) 0 1 ip.ts.type.class Option Class (0) 0 3 ip.ts.type.number Option Number (25) 1 8 ip.ts.length Length (≤40) 2 16 ip.ts.pointer Pointer (≥5) 3 24 ip.ts.overflow Overflow Octets 3 28 ip.ts.flag Flag 4 32 ip.ts.ip Internet Address 8 64 ip.ts.timestamp Timestamp
[ "Read", "Time", "Stamp", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ipv4.py#L576-L658
JarryShaw/PyPCAPKit
src/protocols/internet/ipv4.py
IPv4._read_mode_tr
def _read_mode_tr(self, size, kind): """Read Traceroute option. Positional arguments: size - int, length of option kind - int, 82 (TR) Returns: * dict -- extracted Traceroute (TR) option Structure of Traceroute (TR) option [RFC 1393][RFC 6814]: 0 8 16 24 +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ |F| C | Number | Length | ID Number | +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ | Outbound Hop Count | Return Hop Count | +---------------+---------------+---------------+---------------+ | Originator IP Address | +---------------+---------------+---------------+---------------+ Octets Bits Name Description 0 0 ip.tr.kind Kind (82) 0 0 ip.tr.type.copy Copied Flag (0) 0 1 ip.tr.type.class Option Class (0) 0 3 ip.tr.type.number Option Number (18) 1 8 ip.tr.length Length (12) 2 16 ip.tr.id ID Number 4 32 ip.tr.ohc Outbound Hop Count 6 48 ip.tr.rhc Return Hop Count 8 64 ip.tr.ip Originator IP Address """ if size != 12: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _idnm = self._read_unpack(2) _ohcn = self._read_unpack(2) _rhcn = self._read_unpack(2) _ipad = self._read_ipv4_addr() data = dict( kind=kind, type=self._read_opt_type(kind), length=size, id=_idnm, ohc=_ohcn, rhc=_rhcn, ip=_ipad, ) return data
python
def _read_mode_tr(self, size, kind): """Read Traceroute option. Positional arguments: size - int, length of option kind - int, 82 (TR) Returns: * dict -- extracted Traceroute (TR) option Structure of Traceroute (TR) option [RFC 1393][RFC 6814]: 0 8 16 24 +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ |F| C | Number | Length | ID Number | +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ | Outbound Hop Count | Return Hop Count | +---------------+---------------+---------------+---------------+ | Originator IP Address | +---------------+---------------+---------------+---------------+ Octets Bits Name Description 0 0 ip.tr.kind Kind (82) 0 0 ip.tr.type.copy Copied Flag (0) 0 1 ip.tr.type.class Option Class (0) 0 3 ip.tr.type.number Option Number (18) 1 8 ip.tr.length Length (12) 2 16 ip.tr.id ID Number 4 32 ip.tr.ohc Outbound Hop Count 6 48 ip.tr.rhc Return Hop Count 8 64 ip.tr.ip Originator IP Address """ if size != 12: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _idnm = self._read_unpack(2) _ohcn = self._read_unpack(2) _rhcn = self._read_unpack(2) _ipad = self._read_ipv4_addr() data = dict( kind=kind, type=self._read_opt_type(kind), length=size, id=_idnm, ohc=_ohcn, rhc=_rhcn, ip=_ipad, ) return data
[ "def", "_read_mode_tr", "(", "self", ",", "size", ",", "kind", ")", ":", "if", "size", "!=", "12", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_idnm", "=", "self", ".", "_read_unpack", "(", "2", ")", "_ohcn", "=", "self", ".", "_read_unpack", "(", "2", ")", "_rhcn", "=", "self", ".", "_read_unpack", "(", "2", ")", "_ipad", "=", "self", ".", "_read_ipv4_addr", "(", ")", "data", "=", "dict", "(", "kind", "=", "kind", ",", "type", "=", "self", ".", "_read_opt_type", "(", "kind", ")", ",", "length", "=", "size", ",", "id", "=", "_idnm", ",", "ohc", "=", "_ohcn", ",", "rhc", "=", "_rhcn", ",", "ip", "=", "_ipad", ",", ")", "return", "data" ]
Read Traceroute option. Positional arguments: size - int, length of option kind - int, 82 (TR) Returns: * dict -- extracted Traceroute (TR) option Structure of Traceroute (TR) option [RFC 1393][RFC 6814]: 0 8 16 24 +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ |F| C | Number | Length | ID Number | +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ | Outbound Hop Count | Return Hop Count | +---------------+---------------+---------------+---------------+ | Originator IP Address | +---------------+---------------+---------------+---------------+ Octets Bits Name Description 0 0 ip.tr.kind Kind (82) 0 0 ip.tr.type.copy Copied Flag (0) 0 1 ip.tr.type.class Option Class (0) 0 3 ip.tr.type.number Option Number (18) 1 8 ip.tr.length Length (12) 2 16 ip.tr.id ID Number 4 32 ip.tr.ohc Outbound Hop Count 6 48 ip.tr.rhc Return Hop Count 8 64 ip.tr.ip Originator IP Address
[ "Read", "Traceroute", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ipv4.py#L660-L710
JarryShaw/PyPCAPKit
src/protocols/internet/ipv4.py
IPv4._read_mode_sec
def _read_mode_sec(self, size, kind): """Read options with security info. Positional arguments: size - int, length of option kind - int, 130 (SEC )/ 133 (ESEC) Returns: * dict -- extracted option with security info (E/SEC) Structure of these options: * [RFC 1108] Security (SEC) +------------+------------+------------+-------------//----------+ | 10000010 | XXXXXXXX | SSSSSSSS | AAAAAAA[1] AAAAAAA0 | | | | | [0] | +------------+------------+------------+-------------//----------+ TYPE = 130 LENGTH CLASSIFICATION PROTECTION LEVEL AUTHORITY FLAGS * [RFC 1108] Extended Security (ESEC): +------------+------------+------------+-------//-------+ | 10000101 | 000LLLLL | AAAAAAAA | add sec info | +------------+------------+------------+-------//-------+ TYPE = 133 LENGTH ADDITIONAL ADDITIONAL SECURITY INFO SECURITY FORMAT CODE INFO Octets Bits Name Description 0 0 ip.sec.kind Kind (130) 0 0 ip.sec.type.copy Copied Flag (1) 0 1 ip.sec.type.class Option Class (0) 0 3 ip.sec.type.number Option Number (2) 1 8 ip.sec.length Length (≥3) 2 16 ip.sec.level Classification Level 3 24 ip.sec.flags Protection Authority Flags """ if size < 3: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _clvl = self._read_unpack(1) data = dict( kind=kind, type=self._read_opt_type(kind), length=size, level=_CLASSIFICATION_LEVEL.get(_clvl, _clvl), ) if size > 3: _list = list() for counter in range(3, size): _flag = self._read_binary(1) if (counter < size - 1 and not int(_flag[7], base=2)) \ or (counter == size - 1 and int(_flag[7], base=2)): raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _dict = dict() for (index, bit) in enumerate(_flag[:5]): _auth = _PROTECTION_AUTHORITY.get(index) _dict[_auth] = True if int(bit, base=2) else False _list.append(Info(_dict)) data['flags'] = tuple(_list) return data
python
def _read_mode_sec(self, size, kind): """Read options with security info. Positional arguments: size - int, length of option kind - int, 130 (SEC )/ 133 (ESEC) Returns: * dict -- extracted option with security info (E/SEC) Structure of these options: * [RFC 1108] Security (SEC) +------------+------------+------------+-------------//----------+ | 10000010 | XXXXXXXX | SSSSSSSS | AAAAAAA[1] AAAAAAA0 | | | | | [0] | +------------+------------+------------+-------------//----------+ TYPE = 130 LENGTH CLASSIFICATION PROTECTION LEVEL AUTHORITY FLAGS * [RFC 1108] Extended Security (ESEC): +------------+------------+------------+-------//-------+ | 10000101 | 000LLLLL | AAAAAAAA | add sec info | +------------+------------+------------+-------//-------+ TYPE = 133 LENGTH ADDITIONAL ADDITIONAL SECURITY INFO SECURITY FORMAT CODE INFO Octets Bits Name Description 0 0 ip.sec.kind Kind (130) 0 0 ip.sec.type.copy Copied Flag (1) 0 1 ip.sec.type.class Option Class (0) 0 3 ip.sec.type.number Option Number (2) 1 8 ip.sec.length Length (≥3) 2 16 ip.sec.level Classification Level 3 24 ip.sec.flags Protection Authority Flags """ if size < 3: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _clvl = self._read_unpack(1) data = dict( kind=kind, type=self._read_opt_type(kind), length=size, level=_CLASSIFICATION_LEVEL.get(_clvl, _clvl), ) if size > 3: _list = list() for counter in range(3, size): _flag = self._read_binary(1) if (counter < size - 1 and not int(_flag[7], base=2)) \ or (counter == size - 1 and int(_flag[7], base=2)): raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _dict = dict() for (index, bit) in enumerate(_flag[:5]): _auth = _PROTECTION_AUTHORITY.get(index) _dict[_auth] = True if int(bit, base=2) else False _list.append(Info(_dict)) data['flags'] = tuple(_list) return data
[ "def", "_read_mode_sec", "(", "self", ",", "size", ",", "kind", ")", ":", "if", "size", "<", "3", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_clvl", "=", "self", ".", "_read_unpack", "(", "1", ")", "data", "=", "dict", "(", "kind", "=", "kind", ",", "type", "=", "self", ".", "_read_opt_type", "(", "kind", ")", ",", "length", "=", "size", ",", "level", "=", "_CLASSIFICATION_LEVEL", ".", "get", "(", "_clvl", ",", "_clvl", ")", ",", ")", "if", "size", ">", "3", ":", "_list", "=", "list", "(", ")", "for", "counter", "in", "range", "(", "3", ",", "size", ")", ":", "_flag", "=", "self", ".", "_read_binary", "(", "1", ")", "if", "(", "counter", "<", "size", "-", "1", "and", "not", "int", "(", "_flag", "[", "7", "]", ",", "base", "=", "2", ")", ")", "or", "(", "counter", "==", "size", "-", "1", "and", "int", "(", "_flag", "[", "7", "]", ",", "base", "=", "2", ")", ")", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_dict", "=", "dict", "(", ")", "for", "(", "index", ",", "bit", ")", "in", "enumerate", "(", "_flag", "[", ":", "5", "]", ")", ":", "_auth", "=", "_PROTECTION_AUTHORITY", ".", "get", "(", "index", ")", "_dict", "[", "_auth", "]", "=", "True", "if", "int", "(", "bit", ",", "base", "=", "2", ")", "else", "False", "_list", ".", "append", "(", "Info", "(", "_dict", ")", ")", "data", "[", "'flags'", "]", "=", "tuple", "(", "_list", ")", "return", "data" ]
Read options with security info. Positional arguments: size - int, length of option kind - int, 130 (SEC )/ 133 (ESEC) Returns: * dict -- extracted option with security info (E/SEC) Structure of these options: * [RFC 1108] Security (SEC) +------------+------------+------------+-------------//----------+ | 10000010 | XXXXXXXX | SSSSSSSS | AAAAAAA[1] AAAAAAA0 | | | | | [0] | +------------+------------+------------+-------------//----------+ TYPE = 130 LENGTH CLASSIFICATION PROTECTION LEVEL AUTHORITY FLAGS * [RFC 1108] Extended Security (ESEC): +------------+------------+------------+-------//-------+ | 10000101 | 000LLLLL | AAAAAAAA | add sec info | +------------+------------+------------+-------//-------+ TYPE = 133 LENGTH ADDITIONAL ADDITIONAL SECURITY INFO SECURITY FORMAT CODE INFO Octets Bits Name Description 0 0 ip.sec.kind Kind (130) 0 0 ip.sec.type.copy Copied Flag (1) 0 1 ip.sec.type.class Option Class (0) 0 3 ip.sec.type.number Option Number (2) 1 8 ip.sec.length Length (≥3) 2 16 ip.sec.level Classification Level 3 24 ip.sec.flags Protection Authority Flags
[ "Read", "options", "with", "security", "info", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ipv4.py#L712-L776
JarryShaw/PyPCAPKit
src/protocols/internet/ipv4.py
IPv4._read_mode_rsralt
def _read_mode_rsralt(self, size, kind): """Read Router Alert option. Positional arguments: size - int, length of option kind - int, 148 (RTRALT) Returns: * dict -- extracted Router Alert (RTRALT) option Structure of Router Alert (RTRALT) option [RFC 2113]: +--------+--------+--------+--------+ |10010100|00000100| 2 octet value | +--------+--------+--------+--------+ Octets Bits Name Description 0 0 ip.rsralt.kind Kind (148) 0 0 ip.rsralt.type.copy Copied Flag (1) 0 1 ip.rsralt.type.class Option Class (0) 0 3 ip.rsralt.type.number Option Number (20) 1 8 ip.rsralt.length Length (4) 2 16 ip.rsralt.alert Alert 2 16 ip.rsralt.code Alert Code """ if size != 4: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _code = self._read_unpack(2) data = dict( kind=kind, type=self._read_opt_type(kind), length=size, alert=_ROUTER_ALERT.get(_code, 'Reserved'), code=_code, ) return data
python
def _read_mode_rsralt(self, size, kind): """Read Router Alert option. Positional arguments: size - int, length of option kind - int, 148 (RTRALT) Returns: * dict -- extracted Router Alert (RTRALT) option Structure of Router Alert (RTRALT) option [RFC 2113]: +--------+--------+--------+--------+ |10010100|00000100| 2 octet value | +--------+--------+--------+--------+ Octets Bits Name Description 0 0 ip.rsralt.kind Kind (148) 0 0 ip.rsralt.type.copy Copied Flag (1) 0 1 ip.rsralt.type.class Option Class (0) 0 3 ip.rsralt.type.number Option Number (20) 1 8 ip.rsralt.length Length (4) 2 16 ip.rsralt.alert Alert 2 16 ip.rsralt.code Alert Code """ if size != 4: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _code = self._read_unpack(2) data = dict( kind=kind, type=self._read_opt_type(kind), length=size, alert=_ROUTER_ALERT.get(_code, 'Reserved'), code=_code, ) return data
[ "def", "_read_mode_rsralt", "(", "self", ",", "size", ",", "kind", ")", ":", "if", "size", "!=", "4", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {kind}] invalid format'", ")", "_code", "=", "self", ".", "_read_unpack", "(", "2", ")", "data", "=", "dict", "(", "kind", "=", "kind", ",", "type", "=", "self", ".", "_read_opt_type", "(", "kind", ")", ",", "length", "=", "size", ",", "alert", "=", "_ROUTER_ALERT", ".", "get", "(", "_code", ",", "'Reserved'", ")", ",", "code", "=", "_code", ",", ")", "return", "data" ]
Read Router Alert option. Positional arguments: size - int, length of option kind - int, 148 (RTRALT) Returns: * dict -- extracted Router Alert (RTRALT) option Structure of Router Alert (RTRALT) option [RFC 2113]: +--------+--------+--------+--------+ |10010100|00000100| 2 octet value | +--------+--------+--------+--------+ Octets Bits Name Description 0 0 ip.rsralt.kind Kind (148) 0 0 ip.rsralt.type.copy Copied Flag (1) 0 1 ip.rsralt.type.class Option Class (0) 0 3 ip.rsralt.type.number Option Number (20) 1 8 ip.rsralt.length Length (4) 2 16 ip.rsralt.alert Alert 2 16 ip.rsralt.code Alert Code
[ "Read", "Router", "Alert", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ipv4.py#L778-L816
JarryShaw/PyPCAPKit
src/protocols/pcap/header.py
Header.read_header
def read_header(self): """Read global header of PCAP file. Structure of global header (C): typedef struct pcap_hdr_s { guint32 magic_number; /* magic number */ guint16 version_major; /* major version number */ guint16 version_minor; /* minor version number */ gint32 thiszone; /* GMT to local correction */ guint32 sigfigs; /* accuracy of timestamps */ guint32 snaplen; /* max length of captured packets, in octets */ guint32 network; /* data link type */ } pcap_hdr_t; """ _magn = self._read_fileng(4) if _magn == b'\xd4\xc3\xb2\xa1': lilendian = True self._nsec = False self._byte = 'little' elif _magn == b'\xa1\xb2\xc3\xd4': lilendian = False self._nsec = False self._byte = 'big' elif _magn == b'\x4d\x3c\xb2\xa1': lilendian = True self._nsec = True self._byte = 'little' elif _magn == b'\xa1\xb2\x3c\x4d': lilendian = False self._nsec = True self._byte = 'big' else: raise FileError(5, 'Unknown file format', self._file.name) _vmaj = self._read_unpack(2, lilendian=lilendian) _vmin = self._read_unpack(2, lilendian=lilendian) _zone = self._read_unpack(4, lilendian=lilendian, signed=True) _acts = self._read_unpack(4, lilendian=lilendian) _slen = self._read_unpack(4, lilendian=lilendian) _type = self._read_protos(4) _byte = self._read_packet(24) self._file = io.BytesIO(_byte) header = dict( magic_number=dict( data=_magn, byteorder=self._byte, nanosecond=self._nsec, ), version_major=_vmaj, version_minor=_vmin, thiszone=_zone, sigfigs=_acts, snaplen=_slen, network=_type, packet=_byte, ) return header
python
def read_header(self): """Read global header of PCAP file. Structure of global header (C): typedef struct pcap_hdr_s { guint32 magic_number; /* magic number */ guint16 version_major; /* major version number */ guint16 version_minor; /* minor version number */ gint32 thiszone; /* GMT to local correction */ guint32 sigfigs; /* accuracy of timestamps */ guint32 snaplen; /* max length of captured packets, in octets */ guint32 network; /* data link type */ } pcap_hdr_t; """ _magn = self._read_fileng(4) if _magn == b'\xd4\xc3\xb2\xa1': lilendian = True self._nsec = False self._byte = 'little' elif _magn == b'\xa1\xb2\xc3\xd4': lilendian = False self._nsec = False self._byte = 'big' elif _magn == b'\x4d\x3c\xb2\xa1': lilendian = True self._nsec = True self._byte = 'little' elif _magn == b'\xa1\xb2\x3c\x4d': lilendian = False self._nsec = True self._byte = 'big' else: raise FileError(5, 'Unknown file format', self._file.name) _vmaj = self._read_unpack(2, lilendian=lilendian) _vmin = self._read_unpack(2, lilendian=lilendian) _zone = self._read_unpack(4, lilendian=lilendian, signed=True) _acts = self._read_unpack(4, lilendian=lilendian) _slen = self._read_unpack(4, lilendian=lilendian) _type = self._read_protos(4) _byte = self._read_packet(24) self._file = io.BytesIO(_byte) header = dict( magic_number=dict( data=_magn, byteorder=self._byte, nanosecond=self._nsec, ), version_major=_vmaj, version_minor=_vmin, thiszone=_zone, sigfigs=_acts, snaplen=_slen, network=_type, packet=_byte, ) return header
[ "def", "read_header", "(", "self", ")", ":", "_magn", "=", "self", ".", "_read_fileng", "(", "4", ")", "if", "_magn", "==", "b'\\xd4\\xc3\\xb2\\xa1'", ":", "lilendian", "=", "True", "self", ".", "_nsec", "=", "False", "self", ".", "_byte", "=", "'little'", "elif", "_magn", "==", "b'\\xa1\\xb2\\xc3\\xd4'", ":", "lilendian", "=", "False", "self", ".", "_nsec", "=", "False", "self", ".", "_byte", "=", "'big'", "elif", "_magn", "==", "b'\\x4d\\x3c\\xb2\\xa1'", ":", "lilendian", "=", "True", "self", ".", "_nsec", "=", "True", "self", ".", "_byte", "=", "'little'", "elif", "_magn", "==", "b'\\xa1\\xb2\\x3c\\x4d'", ":", "lilendian", "=", "False", "self", ".", "_nsec", "=", "True", "self", ".", "_byte", "=", "'big'", "else", ":", "raise", "FileError", "(", "5", ",", "'Unknown file format'", ",", "self", ".", "_file", ".", "name", ")", "_vmaj", "=", "self", ".", "_read_unpack", "(", "2", ",", "lilendian", "=", "lilendian", ")", "_vmin", "=", "self", ".", "_read_unpack", "(", "2", ",", "lilendian", "=", "lilendian", ")", "_zone", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "lilendian", ",", "signed", "=", "True", ")", "_acts", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "lilendian", ")", "_slen", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "lilendian", ")", "_type", "=", "self", ".", "_read_protos", "(", "4", ")", "_byte", "=", "self", ".", "_read_packet", "(", "24", ")", "self", ".", "_file", "=", "io", ".", "BytesIO", "(", "_byte", ")", "header", "=", "dict", "(", "magic_number", "=", "dict", "(", "data", "=", "_magn", ",", "byteorder", "=", "self", ".", "_byte", ",", "nanosecond", "=", "self", ".", "_nsec", ",", ")", ",", "version_major", "=", "_vmaj", ",", "version_minor", "=", "_vmin", ",", "thiszone", "=", "_zone", ",", "sigfigs", "=", "_acts", ",", "snaplen", "=", "_slen", ",", "network", "=", "_type", ",", "packet", "=", "_byte", ",", ")", "return", "header" ]
Read global header of PCAP file. Structure of global header (C): typedef struct pcap_hdr_s { guint32 magic_number; /* magic number */ guint16 version_major; /* major version number */ guint16 version_minor; /* minor version number */ gint32 thiszone; /* GMT to local correction */ guint32 sigfigs; /* accuracy of timestamps */ guint32 snaplen; /* max length of captured packets, in octets */ guint32 network; /* data link type */ } pcap_hdr_t;
[ "Read", "global", "header", "of", "PCAP", "file", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/pcap/header.py#L110-L170
JarryShaw/PyPCAPKit
src/protocols/pcap/header.py
Header._read_protos
def _read_protos(self, size): """Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- link layer protocol name """ _byte = self._read_unpack(4, lilendian=True) _prot = LINKTYPE.get(_byte) return _prot
python
def _read_protos(self, size): """Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- link layer protocol name """ _byte = self._read_unpack(4, lilendian=True) _prot = LINKTYPE.get(_byte) return _prot
[ "def", "_read_protos", "(", "self", ",", "size", ")", ":", "_byte", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ")", "_prot", "=", "LINKTYPE", ".", "get", "(", "_byte", ")", "return", "_prot" ]
Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- link layer protocol name
[ "Read", "next", "layer", "protocol", "type", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/pcap/header.py#L190-L202
JarryShaw/PyPCAPKit
src/protocols/pcap/frame.py
Frame.read_frame
def read_frame(self): """Read each block after global header. Structure of record/package header (C): typedef struct pcaprec_hdr_s { guint32 ts_sec; /* timestamp seconds */ guint32 ts_usec; /* timestamp microseconds */ guint32 incl_len; /* number of octets of packet saved in file */ guint32 orig_len; /* actual length of packet */ } pcaprec_hdr_t; """ # _scur = self._file.tell() _temp = self._read_unpack(4, lilendian=True, quiet=True) if _temp is None: raise EOFError _tsss = _temp _tsus = self._read_unpack(4, lilendian=True) _ilen = self._read_unpack(4, lilendian=True) _olen = self._read_unpack(4, lilendian=True) if self._nsec: _epch = _tsss + _tsus / 1000000000 else: _epch = _tsss + _tsus / 1000000 _time = datetime.datetime.fromtimestamp(_epch) frame = dict( frame_info=dict( ts_sec=_tsss, ts_usec=_tsus, incl_len=_ilen, orig_len=_olen, ), time=_time, number=self._fnum, time_epoch=_epch, len=_ilen, cap_len=_olen, ) # load packet data length = frame['len'] bytes_ = self._file.read(length) # record file pointer if self._mpkt and self._mpfp: # print(self._fnum, 'ready') self._mpfp.put(self._file.tell()) self._mpkt.pool += 1 # make BytesIO from frame packet data frame['packet'] = bytes_ self._file = io.BytesIO(bytes_) # frame['packet'] = self._read_packet(header=0, payload=length, discard=True) return self._decode_next_layer(frame, length)
python
def read_frame(self): """Read each block after global header. Structure of record/package header (C): typedef struct pcaprec_hdr_s { guint32 ts_sec; /* timestamp seconds */ guint32 ts_usec; /* timestamp microseconds */ guint32 incl_len; /* number of octets of packet saved in file */ guint32 orig_len; /* actual length of packet */ } pcaprec_hdr_t; """ # _scur = self._file.tell() _temp = self._read_unpack(4, lilendian=True, quiet=True) if _temp is None: raise EOFError _tsss = _temp _tsus = self._read_unpack(4, lilendian=True) _ilen = self._read_unpack(4, lilendian=True) _olen = self._read_unpack(4, lilendian=True) if self._nsec: _epch = _tsss + _tsus / 1000000000 else: _epch = _tsss + _tsus / 1000000 _time = datetime.datetime.fromtimestamp(_epch) frame = dict( frame_info=dict( ts_sec=_tsss, ts_usec=_tsus, incl_len=_ilen, orig_len=_olen, ), time=_time, number=self._fnum, time_epoch=_epch, len=_ilen, cap_len=_olen, ) # load packet data length = frame['len'] bytes_ = self._file.read(length) # record file pointer if self._mpkt and self._mpfp: # print(self._fnum, 'ready') self._mpfp.put(self._file.tell()) self._mpkt.pool += 1 # make BytesIO from frame packet data frame['packet'] = bytes_ self._file = io.BytesIO(bytes_) # frame['packet'] = self._read_packet(header=0, payload=length, discard=True) return self._decode_next_layer(frame, length)
[ "def", "read_frame", "(", "self", ")", ":", "# _scur = self._file.tell()", "_temp", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ",", "quiet", "=", "True", ")", "if", "_temp", "is", "None", ":", "raise", "EOFError", "_tsss", "=", "_temp", "_tsus", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ")", "_ilen", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ")", "_olen", "=", "self", ".", "_read_unpack", "(", "4", ",", "lilendian", "=", "True", ")", "if", "self", ".", "_nsec", ":", "_epch", "=", "_tsss", "+", "_tsus", "/", "1000000000", "else", ":", "_epch", "=", "_tsss", "+", "_tsus", "/", "1000000", "_time", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "_epch", ")", "frame", "=", "dict", "(", "frame_info", "=", "dict", "(", "ts_sec", "=", "_tsss", ",", "ts_usec", "=", "_tsus", ",", "incl_len", "=", "_ilen", ",", "orig_len", "=", "_olen", ",", ")", ",", "time", "=", "_time", ",", "number", "=", "self", ".", "_fnum", ",", "time_epoch", "=", "_epch", ",", "len", "=", "_ilen", ",", "cap_len", "=", "_olen", ",", ")", "# load packet data", "length", "=", "frame", "[", "'len'", "]", "bytes_", "=", "self", ".", "_file", ".", "read", "(", "length", ")", "# record file pointer", "if", "self", ".", "_mpkt", "and", "self", ".", "_mpfp", ":", "# print(self._fnum, 'ready')", "self", ".", "_mpfp", ".", "put", "(", "self", ".", "_file", ".", "tell", "(", ")", ")", "self", ".", "_mpkt", ".", "pool", "+=", "1", "# make BytesIO from frame packet data", "frame", "[", "'packet'", "]", "=", "bytes_", "self", ".", "_file", "=", "io", ".", "BytesIO", "(", "bytes_", ")", "# frame['packet'] = self._read_packet(header=0, payload=length, discard=True)", "return", "self", ".", "_decode_next_layer", "(", "frame", ",", "length", ")" ]
Read each block after global header. Structure of record/package header (C): typedef struct pcaprec_hdr_s { guint32 ts_sec; /* timestamp seconds */ guint32 ts_usec; /* timestamp microseconds */ guint32 incl_len; /* number of octets of packet saved in file */ guint32 orig_len; /* actual length of packet */ } pcaprec_hdr_t;
[ "Read", "each", "block", "after", "global", "header", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/pcap/frame.py#L88-L145
JarryShaw/PyPCAPKit
src/protocols/pcap/frame.py
Frame._decode_next_layer
def _decode_next_layer(self, dict_, length=None): """Decode next layer protocol. Positional arguments: dict_ -- dict, info buffer proto -- str, next layer protocol name length -- int, valid (not padding) length Returns: * dict -- current protocol with packet extracted """ seek_cur = self._file.tell() try: next_ = self._import_next_layer(self._prot, length) except Exception: dict_['error'] = traceback.format_exc(limit=1).strip().split(os.linesep)[-1] self._file.seek(seek_cur, os.SEEK_SET) next_ = beholder(self._import_next_layer)(self, self._prot, length, error=True) info, chain = next_.info, next_.protochain # make next layer protocol name layer = next_.alias.lower() # proto = next_.__class__.__name__ # write info and protocol chain into dict self._next = next_ self._protos = chain dict_[layer] = info dict_['protocols'] = self._protos.chain return dict_
python
def _decode_next_layer(self, dict_, length=None): """Decode next layer protocol. Positional arguments: dict_ -- dict, info buffer proto -- str, next layer protocol name length -- int, valid (not padding) length Returns: * dict -- current protocol with packet extracted """ seek_cur = self._file.tell() try: next_ = self._import_next_layer(self._prot, length) except Exception: dict_['error'] = traceback.format_exc(limit=1).strip().split(os.linesep)[-1] self._file.seek(seek_cur, os.SEEK_SET) next_ = beholder(self._import_next_layer)(self, self._prot, length, error=True) info, chain = next_.info, next_.protochain # make next layer protocol name layer = next_.alias.lower() # proto = next_.__class__.__name__ # write info and protocol chain into dict self._next = next_ self._protos = chain dict_[layer] = info dict_['protocols'] = self._protos.chain return dict_
[ "def", "_decode_next_layer", "(", "self", ",", "dict_", ",", "length", "=", "None", ")", ":", "seek_cur", "=", "self", ".", "_file", ".", "tell", "(", ")", "try", ":", "next_", "=", "self", ".", "_import_next_layer", "(", "self", ".", "_prot", ",", "length", ")", "except", "Exception", ":", "dict_", "[", "'error'", "]", "=", "traceback", ".", "format_exc", "(", "limit", "=", "1", ")", ".", "strip", "(", ")", ".", "split", "(", "os", ".", "linesep", ")", "[", "-", "1", "]", "self", ".", "_file", ".", "seek", "(", "seek_cur", ",", "os", ".", "SEEK_SET", ")", "next_", "=", "beholder", "(", "self", ".", "_import_next_layer", ")", "(", "self", ",", "self", ".", "_prot", ",", "length", ",", "error", "=", "True", ")", "info", ",", "chain", "=", "next_", ".", "info", ",", "next_", ".", "protochain", "# make next layer protocol name", "layer", "=", "next_", ".", "alias", ".", "lower", "(", ")", "# proto = next_.__class__.__name__", "# write info and protocol chain into dict", "self", ".", "_next", "=", "next_", "self", ".", "_protos", "=", "chain", "dict_", "[", "layer", "]", "=", "info", "dict_", "[", "'protocols'", "]", "=", "self", ".", "_protos", ".", "chain", "return", "dict_" ]
Decode next layer protocol. Positional arguments: dict_ -- dict, info buffer proto -- str, next layer protocol name length -- int, valid (not padding) length Returns: * dict -- current protocol with packet extracted
[ "Decode", "next", "layer", "protocol", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/pcap/frame.py#L229-L259
JarryShaw/PyPCAPKit
src/protocols/pcap/frame.py
Frame._import_next_layer
def _import_next_layer(self, proto, length, error=False): """Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Keyword arguments: * error -- bool, if function call on error Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * Ethernet (data link layer) * IPv4 (internet layer) * IPv6 (internet layer) """ if proto == 1: from pcapkit.protocols.link import Ethernet as Protocol elif proto == 228: from pcapkit.protocols.internet import IPv4 as Protocol elif proto == 229: from pcapkit.protocols.internet import IPv6 as Protocol else: from pcapkit.protocols.raw import Raw as Protocol next_ = Protocol(self._file, length, error=error, layer=self._exlayer, protocol=self._exproto) return next_
python
def _import_next_layer(self, proto, length, error=False): """Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Keyword arguments: * error -- bool, if function call on error Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * Ethernet (data link layer) * IPv4 (internet layer) * IPv6 (internet layer) """ if proto == 1: from pcapkit.protocols.link import Ethernet as Protocol elif proto == 228: from pcapkit.protocols.internet import IPv4 as Protocol elif proto == 229: from pcapkit.protocols.internet import IPv6 as Protocol else: from pcapkit.protocols.raw import Raw as Protocol next_ = Protocol(self._file, length, error=error, layer=self._exlayer, protocol=self._exproto) return next_
[ "def", "_import_next_layer", "(", "self", ",", "proto", ",", "length", ",", "error", "=", "False", ")", ":", "if", "proto", "==", "1", ":", "from", "pcapkit", ".", "protocols", ".", "link", "import", "Ethernet", "as", "Protocol", "elif", "proto", "==", "228", ":", "from", "pcapkit", ".", "protocols", ".", "internet", "import", "IPv4", "as", "Protocol", "elif", "proto", "==", "229", ":", "from", "pcapkit", ".", "protocols", ".", "internet", "import", "IPv6", "as", "Protocol", "else", ":", "from", "pcapkit", ".", "protocols", ".", "raw", "import", "Raw", "as", "Protocol", "next_", "=", "Protocol", "(", "self", ".", "_file", ",", "length", ",", "error", "=", "error", ",", "layer", "=", "self", ".", "_exlayer", ",", "protocol", "=", "self", ".", "_exproto", ")", "return", "next_" ]
Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Keyword arguments: * error -- bool, if function call on error Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * Ethernet (data link layer) * IPv4 (internet layer) * IPv6 (internet layer)
[ "Import", "next", "layer", "extractor", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/pcap/frame.py#L261-L293
JarryShaw/PyPCAPKit
src/const/hip/registration_failure.py
RegistrationFailure.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return RegistrationFailure(key) if key not in RegistrationFailure._member_map_: extend_enum(RegistrationFailure, key, default) return RegistrationFailure[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return RegistrationFailure(key) if key not in RegistrationFailure._member_map_: extend_enum(RegistrationFailure, key, default) return RegistrationFailure[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "RegistrationFailure", "(", "key", ")", "if", "key", "not", "in", "RegistrationFailure", ".", "_member_map_", ":", "extend_enum", "(", "RegistrationFailure", ",", "key", ",", "default", ")", "return", "RegistrationFailure", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/hip/registration_failure.py#L23-L29
JarryShaw/PyPCAPKit
src/const/ipv4/tos_pre.py
TOS_PRE.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return TOS_PRE(key) if key not in TOS_PRE._member_map_: extend_enum(TOS_PRE, key, default) return TOS_PRE[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return TOS_PRE(key) if key not in TOS_PRE._member_map_: extend_enum(TOS_PRE, key, default) return TOS_PRE[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "TOS_PRE", "(", "key", ")", "if", "key", "not", "in", "TOS_PRE", ".", "_member_map_", ":", "extend_enum", "(", "TOS_PRE", ",", "key", ",", "default", ")", "return", "TOS_PRE", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ipv4/tos_pre.py#L22-L28
JarryShaw/PyPCAPKit
src/protocols/raw.py
Raw.read_raw
def read_raw(self, length, *, error=None): """Read raw packet data.""" if length is None: length = len(self) raw = dict( packet=self._read_fileng(length), error=error or None, ) return raw
python
def read_raw(self, length, *, error=None): """Read raw packet data.""" if length is None: length = len(self) raw = dict( packet=self._read_fileng(length), error=error or None, ) return raw
[ "def", "read_raw", "(", "self", ",", "length", ",", "*", ",", "error", "=", "None", ")", ":", "if", "length", "is", "None", ":", "length", "=", "len", "(", "self", ")", "raw", "=", "dict", "(", "packet", "=", "self", ".", "_read_fileng", "(", "length", ")", ",", "error", "=", "error", "or", "None", ",", ")", "return", "raw" ]
Read raw packet data.
[ "Read", "raw", "packet", "data", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/raw.py#L73-L83
JarryShaw/PyPCAPKit
src/const/hip/notify_message.py
NotifyMessage.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return NotifyMessage(key) if key not in NotifyMessage._member_map_: extend_enum(NotifyMessage, key, default) return NotifyMessage[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return NotifyMessage(key) if key not in NotifyMessage._member_map_: extend_enum(NotifyMessage, key, default) return NotifyMessage[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "NotifyMessage", "(", "key", ")", "if", "key", "not", "in", "NotifyMessage", ".", "_member_map_", ":", "extend_enum", "(", "NotifyMessage", ",", "key", ",", "default", ")", "return", "NotifyMessage", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/hip/notify_message.py#L49-L55
JarryShaw/PyPCAPKit
src/const/hip/notify_message.py
NotifyMessage._missing_
def _missing_(cls, value): """Lookup function used when value is not found.""" if not (isinstance(value, int) and 0 <= value <= 65535): raise ValueError('%r is not a valid %s' % (value, cls.__name__)) if 2 <= value <= 6: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 8 <= value <= 13: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 21 <= value <= 23: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 29 <= value <= 31: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 33 <= value <= 39: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 52 <= value <= 59: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 63 <= value <= 69: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 71 <= value <= 89: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 91 <= value <= 99: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 101 <= value <= 8191: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 8192 <= value <= 16383: # [RFC 7401] extend_enum(cls, 'Reserved for Private Use [%d]' % value, value) return cls(value) if 16385 <= value <= 40959: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 40960 <= value <= 65535: # [RFC 7401] extend_enum(cls, 'Reserved for Private Use [%d]' % value, value) return cls(value) super()._missing_(value)
python
def _missing_(cls, value): """Lookup function used when value is not found.""" if not (isinstance(value, int) and 0 <= value <= 65535): raise ValueError('%r is not a valid %s' % (value, cls.__name__)) if 2 <= value <= 6: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 8 <= value <= 13: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 21 <= value <= 23: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 29 <= value <= 31: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 33 <= value <= 39: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 52 <= value <= 59: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 63 <= value <= 69: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 71 <= value <= 89: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 91 <= value <= 99: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 101 <= value <= 8191: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 8192 <= value <= 16383: # [RFC 7401] extend_enum(cls, 'Reserved for Private Use [%d]' % value, value) return cls(value) if 16385 <= value <= 40959: extend_enum(cls, 'Unassigned [%d]' % value, value) return cls(value) if 40960 <= value <= 65535: # [RFC 7401] extend_enum(cls, 'Reserved for Private Use [%d]' % value, value) return cls(value) super()._missing_(value)
[ "def", "_missing_", "(", "cls", ",", "value", ")", ":", "if", "not", "(", "isinstance", "(", "value", ",", "int", ")", "and", "0", "<=", "value", "<=", "65535", ")", ":", "raise", "ValueError", "(", "'%r is not a valid %s'", "%", "(", "value", ",", "cls", ".", "__name__", ")", ")", "if", "2", "<=", "value", "<=", "6", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "8", "<=", "value", "<=", "13", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "21", "<=", "value", "<=", "23", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "29", "<=", "value", "<=", "31", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "33", "<=", "value", "<=", "39", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "52", "<=", "value", "<=", "59", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "63", "<=", "value", "<=", "69", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "71", "<=", "value", "<=", "89", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "91", "<=", "value", "<=", "99", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "101", "<=", "value", "<=", "8191", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "8192", "<=", "value", "<=", "16383", ":", "# [RFC 7401]", "extend_enum", "(", "cls", ",", "'Reserved for Private Use [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "16385", "<=", "value", "<=", "40959", ":", "extend_enum", "(", "cls", ",", "'Unassigned [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "if", "40960", "<=", "value", "<=", "65535", ":", "# [RFC 7401]", "extend_enum", "(", "cls", ",", "'Reserved for Private Use [%d]'", "%", "value", ",", "value", ")", "return", "cls", "(", "value", ")", "super", "(", ")", ".", "_missing_", "(", "value", ")" ]
Lookup function used when value is not found.
[ "Lookup", "function", "used", "when", "value", "is", "not", "found", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/hip/notify_message.py#L58-L103
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
ipv6_hdr_len
def ipv6_hdr_len(ipv6): """Calculate length of headers before IPv6-Frag""" hdr_len = ipv6.__hdr_len__ for code in (0, 60, 43): ext_hdr = ipv6.extension_hdrs.get(code) if ext_hdr is not None: hdr_len += ext_hdr.length return hdr_len
python
def ipv6_hdr_len(ipv6): """Calculate length of headers before IPv6-Frag""" hdr_len = ipv6.__hdr_len__ for code in (0, 60, 43): ext_hdr = ipv6.extension_hdrs.get(code) if ext_hdr is not None: hdr_len += ext_hdr.length return hdr_len
[ "def", "ipv6_hdr_len", "(", "ipv6", ")", ":", "hdr_len", "=", "ipv6", ".", "__hdr_len__", "for", "code", "in", "(", "0", ",", "60", ",", "43", ")", ":", "ext_hdr", "=", "ipv6", ".", "extension_hdrs", ".", "get", "(", "code", ")", "if", "ext_hdr", "is", "not", "None", ":", "hdr_len", "+=", "ext_hdr", ".", "length", "return", "hdr_len" ]
Calculate length of headers before IPv6-Frag
[ "Calculate", "length", "of", "headers", "before", "IPv6", "-", "Frag" ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L20-L27
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
packet2chain
def packet2chain(packet): """Fetch DPKT packet protocol chain.""" chain = [type(packet).__name__] payload = packet.data while not isinstance(payload, bytes): chain.append(type(payload).__name__) payload = payload.data return ':'.join(chain)
python
def packet2chain(packet): """Fetch DPKT packet protocol chain.""" chain = [type(packet).__name__] payload = packet.data while not isinstance(payload, bytes): chain.append(type(payload).__name__) payload = payload.data return ':'.join(chain)
[ "def", "packet2chain", "(", "packet", ")", ":", "chain", "=", "[", "type", "(", "packet", ")", ".", "__name__", "]", "payload", "=", "packet", ".", "data", "while", "not", "isinstance", "(", "payload", ",", "bytes", ")", ":", "chain", ".", "append", "(", "type", "(", "payload", ")", ".", "__name__", ")", "payload", "=", "payload", ".", "data", "return", "':'", ".", "join", "(", "chain", ")" ]
Fetch DPKT packet protocol chain.
[ "Fetch", "DPKT", "packet", "protocol", "chain", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L30-L37
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
packet2dict
def packet2dict(packet, timestamp, *, data_link): """Convert DPKT packet into dict.""" def wrapper(packet): dict_ = dict() for field in packet.__hdr_fields__: dict_[field] = getattr(packet, field, None) payload = packet.data if not isinstance(payload, bytes): dict_[type(payload).__name__] = wrapper(payload) return dict_ return { 'timestamp': timestamp, 'packet': packet.pack(), data_link.name: wrapper(packet), }
python
def packet2dict(packet, timestamp, *, data_link): """Convert DPKT packet into dict.""" def wrapper(packet): dict_ = dict() for field in packet.__hdr_fields__: dict_[field] = getattr(packet, field, None) payload = packet.data if not isinstance(payload, bytes): dict_[type(payload).__name__] = wrapper(payload) return dict_ return { 'timestamp': timestamp, 'packet': packet.pack(), data_link.name: wrapper(packet), }
[ "def", "packet2dict", "(", "packet", ",", "timestamp", ",", "*", ",", "data_link", ")", ":", "def", "wrapper", "(", "packet", ")", ":", "dict_", "=", "dict", "(", ")", "for", "field", "in", "packet", ".", "__hdr_fields__", ":", "dict_", "[", "field", "]", "=", "getattr", "(", "packet", ",", "field", ",", "None", ")", "payload", "=", "packet", ".", "data", "if", "not", "isinstance", "(", "payload", ",", "bytes", ")", ":", "dict_", "[", "type", "(", "payload", ")", ".", "__name__", "]", "=", "wrapper", "(", "payload", ")", "return", "dict_", "return", "{", "'timestamp'", ":", "timestamp", ",", "'packet'", ":", "packet", ".", "pack", "(", ")", ",", "data_link", ".", "name", ":", "wrapper", "(", "packet", ")", ",", "}" ]
Convert DPKT packet into dict.
[ "Convert", "DPKT", "packet", "into", "dict", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L40-L54
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
ipv4_reassembly
def ipv4_reassembly(packet, *, count=NotImplemented): """Make data for IPv4 reassembly.""" ipv4 = getattr(packet, 'ip', None) if ipv4 is not None: if ipv4.df: # dismiss not fragmented packet return False, None data = dict( bufid=( ipaddress.ip_address(ipv4.src), # source IP address ipaddress.ip_address(ipv4.dst), # destination IP address ipv4.id, # identification TP_PROTO.get(ipv4.p).name, # payload protocol type ), num=count, # original packet range number fo=ipv4.off, # fragment offset ihl=ipv4.__hdr_len__, # internet header length mf=bool(ipv4.mf), # more fragment flag tl=ipv4.len, # total length, header includes header=bytearray(ipv4.pack()[:ipv4.__hdr_len__]), # raw bytearray type header payload=bytearray(ipv4.pack()[ipv4.__hdr_len__:]), # raw bytearray type payload ) return True, data return False, None
python
def ipv4_reassembly(packet, *, count=NotImplemented): """Make data for IPv4 reassembly.""" ipv4 = getattr(packet, 'ip', None) if ipv4 is not None: if ipv4.df: # dismiss not fragmented packet return False, None data = dict( bufid=( ipaddress.ip_address(ipv4.src), # source IP address ipaddress.ip_address(ipv4.dst), # destination IP address ipv4.id, # identification TP_PROTO.get(ipv4.p).name, # payload protocol type ), num=count, # original packet range number fo=ipv4.off, # fragment offset ihl=ipv4.__hdr_len__, # internet header length mf=bool(ipv4.mf), # more fragment flag tl=ipv4.len, # total length, header includes header=bytearray(ipv4.pack()[:ipv4.__hdr_len__]), # raw bytearray type header payload=bytearray(ipv4.pack()[ipv4.__hdr_len__:]), # raw bytearray type payload ) return True, data return False, None
[ "def", "ipv4_reassembly", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "ipv4", "=", "getattr", "(", "packet", ",", "'ip'", ",", "None", ")", "if", "ipv4", "is", "not", "None", ":", "if", "ipv4", ".", "df", ":", "# dismiss not fragmented packet", "return", "False", ",", "None", "data", "=", "dict", "(", "bufid", "=", "(", "ipaddress", ".", "ip_address", "(", "ipv4", ".", "src", ")", ",", "# source IP address", "ipaddress", ".", "ip_address", "(", "ipv4", ".", "dst", ")", ",", "# destination IP address", "ipv4", ".", "id", ",", "# identification", "TP_PROTO", ".", "get", "(", "ipv4", ".", "p", ")", ".", "name", ",", "# payload protocol type", ")", ",", "num", "=", "count", ",", "# original packet range number", "fo", "=", "ipv4", ".", "off", ",", "# fragment offset", "ihl", "=", "ipv4", ".", "__hdr_len__", ",", "# internet header length", "mf", "=", "bool", "(", "ipv4", ".", "mf", ")", ",", "# more fragment flag", "tl", "=", "ipv4", ".", "len", ",", "# total length, header includes", "header", "=", "bytearray", "(", "ipv4", ".", "pack", "(", ")", "[", ":", "ipv4", ".", "__hdr_len__", "]", ")", ",", "# raw bytearray type header", "payload", "=", "bytearray", "(", "ipv4", ".", "pack", "(", ")", "[", "ipv4", ".", "__hdr_len__", ":", "]", ")", ",", "# raw bytearray type payload", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Make data for IPv4 reassembly.
[ "Make", "data", "for", "IPv4", "reassembly", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L57-L79
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
ipv6_reassembly
def ipv6_reassembly(packet, *, count=NotImplemented): """Make data for IPv6 reassembly.""" ipv6 = getattr(packet, 'ip6', None) if ipv6 is not None: ipv6_frag = ipv6.extension_hdrs.get(44) if ipv6_frag is None: # dismiss not fragmented packet return False, None hdr_len = ipv6_hdr_len(ipv6) data = dict( bufid=( ipaddress.ip_address(ipv6.src), # source IP address ipaddress.ip_address(ipv6.dst), # destination IP address ipv6.flow, # label TP_PROTO.get(ipv6_frag.nh).name, # next header field in IPv6 Fragment Header ), num=count, # original packet range number fo=ipv6_frag.nxt, # fragment offset ihl=hdr_len, # header length, only headers before IPv6-Frag mf=bool(ipv6_frag.m_flag), # more fragment flag tl=len(ipv6), # total length, header includes header=bytearray(ipv6.pack()[:hdr_len]), # raw bytearray type header before IPv6-Frag payload=bytearray(ipv6.pack()[hdr_len+ipv6_frag:]), # raw bytearray type payload after IPv6-Frag ) return True, data return False, None
python
def ipv6_reassembly(packet, *, count=NotImplemented): """Make data for IPv6 reassembly.""" ipv6 = getattr(packet, 'ip6', None) if ipv6 is not None: ipv6_frag = ipv6.extension_hdrs.get(44) if ipv6_frag is None: # dismiss not fragmented packet return False, None hdr_len = ipv6_hdr_len(ipv6) data = dict( bufid=( ipaddress.ip_address(ipv6.src), # source IP address ipaddress.ip_address(ipv6.dst), # destination IP address ipv6.flow, # label TP_PROTO.get(ipv6_frag.nh).name, # next header field in IPv6 Fragment Header ), num=count, # original packet range number fo=ipv6_frag.nxt, # fragment offset ihl=hdr_len, # header length, only headers before IPv6-Frag mf=bool(ipv6_frag.m_flag), # more fragment flag tl=len(ipv6), # total length, header includes header=bytearray(ipv6.pack()[:hdr_len]), # raw bytearray type header before IPv6-Frag payload=bytearray(ipv6.pack()[hdr_len+ipv6_frag:]), # raw bytearray type payload after IPv6-Frag ) return True, data return False, None
[ "def", "ipv6_reassembly", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "ipv6", "=", "getattr", "(", "packet", ",", "'ip6'", ",", "None", ")", "if", "ipv6", "is", "not", "None", ":", "ipv6_frag", "=", "ipv6", ".", "extension_hdrs", ".", "get", "(", "44", ")", "if", "ipv6_frag", "is", "None", ":", "# dismiss not fragmented packet", "return", "False", ",", "None", "hdr_len", "=", "ipv6_hdr_len", "(", "ipv6", ")", "data", "=", "dict", "(", "bufid", "=", "(", "ipaddress", ".", "ip_address", "(", "ipv6", ".", "src", ")", ",", "# source IP address", "ipaddress", ".", "ip_address", "(", "ipv6", ".", "dst", ")", ",", "# destination IP address", "ipv6", ".", "flow", ",", "# label", "TP_PROTO", ".", "get", "(", "ipv6_frag", ".", "nh", ")", ".", "name", ",", "# next header field in IPv6 Fragment Header", ")", ",", "num", "=", "count", ",", "# original packet range number", "fo", "=", "ipv6_frag", ".", "nxt", ",", "# fragment offset", "ihl", "=", "hdr_len", ",", "# header length, only headers before IPv6-Frag", "mf", "=", "bool", "(", "ipv6_frag", ".", "m_flag", ")", ",", "# more fragment flag", "tl", "=", "len", "(", "ipv6", ")", ",", "# total length, header includes", "header", "=", "bytearray", "(", "ipv6", ".", "pack", "(", ")", "[", ":", "hdr_len", "]", ")", ",", "# raw bytearray type header before IPv6-Frag", "payload", "=", "bytearray", "(", "ipv6", ".", "pack", "(", ")", "[", "hdr_len", "+", "ipv6_frag", ":", "]", ")", ",", "# raw bytearray type payload after IPv6-Frag", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Make data for IPv6 reassembly.
[ "Make", "data", "for", "IPv6", "reassembly", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L82-L106
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
tcp_reassembly
def tcp_reassembly(packet, *, count=NotImplemented): """Make data for TCP reassembly.""" if getattr(packet, 'ip', None): ip = packet['ip'] elif getattr(packet, 'ip6', None): ip = packet['ip6'] else: return False, None tcp = getattr(ip, 'tcp', None) if tcp is not None: flags = bin(tcp.flags)[2:].zfill(8) data = dict( bufid=( ipaddress.ip_address(ip.src), # source IP address ipaddress.ip_address(ip.dst), # destination IP address tcp.sport, # source port tcp.dport, # destination port ), num=count, # original packet range number ack=tcp.ack, # acknowledgement dsn=tcp.seq, # data sequence number rst=bool(int(flags[5])), # reset connection flag syn=bool(int(flags[6])), # synchronise flag fin=bool(int(flags[7])), # finish flag payload=bytearray(tcp.pack()[tcp.__hdr_len__:]), # raw bytearray type payload ) raw_len = len(tcp.data) # payload length, header excludes data['first'] = tcp.seq # this sequence number data['last'] = tcp.seq + raw_len # next (wanted) sequence number data['len'] = raw_len # payload length, header excludes return True, data return False, None
python
def tcp_reassembly(packet, *, count=NotImplemented): """Make data for TCP reassembly.""" if getattr(packet, 'ip', None): ip = packet['ip'] elif getattr(packet, 'ip6', None): ip = packet['ip6'] else: return False, None tcp = getattr(ip, 'tcp', None) if tcp is not None: flags = bin(tcp.flags)[2:].zfill(8) data = dict( bufid=( ipaddress.ip_address(ip.src), # source IP address ipaddress.ip_address(ip.dst), # destination IP address tcp.sport, # source port tcp.dport, # destination port ), num=count, # original packet range number ack=tcp.ack, # acknowledgement dsn=tcp.seq, # data sequence number rst=bool(int(flags[5])), # reset connection flag syn=bool(int(flags[6])), # synchronise flag fin=bool(int(flags[7])), # finish flag payload=bytearray(tcp.pack()[tcp.__hdr_len__:]), # raw bytearray type payload ) raw_len = len(tcp.data) # payload length, header excludes data['first'] = tcp.seq # this sequence number data['last'] = tcp.seq + raw_len # next (wanted) sequence number data['len'] = raw_len # payload length, header excludes return True, data return False, None
[ "def", "tcp_reassembly", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "if", "getattr", "(", "packet", ",", "'ip'", ",", "None", ")", ":", "ip", "=", "packet", "[", "'ip'", "]", "elif", "getattr", "(", "packet", ",", "'ip6'", ",", "None", ")", ":", "ip", "=", "packet", "[", "'ip6'", "]", "else", ":", "return", "False", ",", "None", "tcp", "=", "getattr", "(", "ip", ",", "'tcp'", ",", "None", ")", "if", "tcp", "is", "not", "None", ":", "flags", "=", "bin", "(", "tcp", ".", "flags", ")", "[", "2", ":", "]", ".", "zfill", "(", "8", ")", "data", "=", "dict", "(", "bufid", "=", "(", "ipaddress", ".", "ip_address", "(", "ip", ".", "src", ")", ",", "# source IP address", "ipaddress", ".", "ip_address", "(", "ip", ".", "dst", ")", ",", "# destination IP address", "tcp", ".", "sport", ",", "# source port", "tcp", ".", "dport", ",", "# destination port", ")", ",", "num", "=", "count", ",", "# original packet range number", "ack", "=", "tcp", ".", "ack", ",", "# acknowledgement", "dsn", "=", "tcp", ".", "seq", ",", "# data sequence number", "rst", "=", "bool", "(", "int", "(", "flags", "[", "5", "]", ")", ")", ",", "# reset connection flag", "syn", "=", "bool", "(", "int", "(", "flags", "[", "6", "]", ")", ")", ",", "# synchronise flag", "fin", "=", "bool", "(", "int", "(", "flags", "[", "7", "]", ")", ")", ",", "# finish flag", "payload", "=", "bytearray", "(", "tcp", ".", "pack", "(", ")", "[", "tcp", ".", "__hdr_len__", ":", "]", ")", ",", "# raw bytearray type payload", ")", "raw_len", "=", "len", "(", "tcp", ".", "data", ")", "# payload length, header excludes", "data", "[", "'first'", "]", "=", "tcp", ".", "seq", "# this sequence number", "data", "[", "'last'", "]", "=", "tcp", ".", "seq", "+", "raw_len", "# next (wanted) sequence number", "data", "[", "'len'", "]", "=", "raw_len", "# payload length, header excludes", "return", "True", ",", "data", "return", "False", ",", "None" ]
Make data for TCP reassembly.
[ "Make", "data", "for", "TCP", "reassembly", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L109-L140
JarryShaw/PyPCAPKit
src/toolkit/dpkt.py
tcp_traceflow
def tcp_traceflow(packet, timestamp, *, data_link, count=NotImplemented): """Trace packet flow for TCP.""" if getattr(packet, 'ip', None): ip = packet['ip'] elif getattr(packet, 'ip6', None): ip = packet['ip6'] else: return False, None tcp = getattr(ip, 'tcp', None) if tcp is not None: flags = bin(tcp.flags)[2:].zfill(8) data = dict( protocol=data_link, # data link type from global header index=count, # frame number frame=packet2dict(packet, timestamp, data_link=data_link), # extracted packet syn=bool(int(flags[6])), # TCP synchronise (SYN) flag fin=bool(int(flags[7])), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=tcp.sport, # TCP source port dstport=tcp.dport, # TCP destination port timestamp=timestamp, # timestamp ) return True, data return False, None
python
def tcp_traceflow(packet, timestamp, *, data_link, count=NotImplemented): """Trace packet flow for TCP.""" if getattr(packet, 'ip', None): ip = packet['ip'] elif getattr(packet, 'ip6', None): ip = packet['ip6'] else: return False, None tcp = getattr(ip, 'tcp', None) if tcp is not None: flags = bin(tcp.flags)[2:].zfill(8) data = dict( protocol=data_link, # data link type from global header index=count, # frame number frame=packet2dict(packet, timestamp, data_link=data_link), # extracted packet syn=bool(int(flags[6])), # TCP synchronise (SYN) flag fin=bool(int(flags[7])), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=tcp.sport, # TCP source port dstport=tcp.dport, # TCP destination port timestamp=timestamp, # timestamp ) return True, data return False, None
[ "def", "tcp_traceflow", "(", "packet", ",", "timestamp", ",", "*", ",", "data_link", ",", "count", "=", "NotImplemented", ")", ":", "if", "getattr", "(", "packet", ",", "'ip'", ",", "None", ")", ":", "ip", "=", "packet", "[", "'ip'", "]", "elif", "getattr", "(", "packet", ",", "'ip6'", ",", "None", ")", ":", "ip", "=", "packet", "[", "'ip6'", "]", "else", ":", "return", "False", ",", "None", "tcp", "=", "getattr", "(", "ip", ",", "'tcp'", ",", "None", ")", "if", "tcp", "is", "not", "None", ":", "flags", "=", "bin", "(", "tcp", ".", "flags", ")", "[", "2", ":", "]", ".", "zfill", "(", "8", ")", "data", "=", "dict", "(", "protocol", "=", "data_link", ",", "# data link type from global header", "index", "=", "count", ",", "# frame number", "frame", "=", "packet2dict", "(", "packet", ",", "timestamp", ",", "data_link", "=", "data_link", ")", ",", "# extracted packet", "syn", "=", "bool", "(", "int", "(", "flags", "[", "6", "]", ")", ")", ",", "# TCP synchronise (SYN) flag", "fin", "=", "bool", "(", "int", "(", "flags", "[", "7", "]", ")", ")", ",", "# TCP finish (FIN) flag", "src", "=", "ipaddress", ".", "ip_address", "(", "ip", ".", "src", ")", ",", "# source IP", "dst", "=", "ipaddress", ".", "ip_address", "(", "ip", ".", "dst", ")", ",", "# destination IP", "srcport", "=", "tcp", ".", "sport", ",", "# TCP source port", "dstport", "=", "tcp", ".", "dport", ",", "# TCP destination port", "timestamp", "=", "timestamp", ",", "# timestamp", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Trace packet flow for TCP.
[ "Trace", "packet", "flow", "for", "TCP", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/dpkt.py#L143-L167
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2.read_http
def read_http(self, length): """Read Hypertext Transfer Protocol (HTTP/2). Structure of HTTP/2 packet [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.payload Frame Payload """ if length is None: length = len(self) if length < 9: raise ProtocolError(f'HTTP/2: invalid format', quiet=True) _tlen = self._read_unpack(3) _type = self._read_unpack(1) _flag = self._read_binary(1) _rsid = self._read_binary(4) if _tlen != length: raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) if int(_rsid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) http = dict( length=_tlen, type=_HTTP_TYPE.get(_type), sid=int(_rsid[1:], base=2), packet=self._read_packet(_tlen), ) if http['type'] is None: raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) if http['type'] in ('SETTINGS', 'PING') and http['sid'] != 0: raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) _http = _HTTP_FUNC[_type](self, _tlen, _type, _flag) http.update(_http) return http
python
def read_http(self, length): """Read Hypertext Transfer Protocol (HTTP/2). Structure of HTTP/2 packet [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.payload Frame Payload """ if length is None: length = len(self) if length < 9: raise ProtocolError(f'HTTP/2: invalid format', quiet=True) _tlen = self._read_unpack(3) _type = self._read_unpack(1) _flag = self._read_binary(1) _rsid = self._read_binary(4) if _tlen != length: raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) if int(_rsid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) http = dict( length=_tlen, type=_HTTP_TYPE.get(_type), sid=int(_rsid[1:], base=2), packet=self._read_packet(_tlen), ) if http['type'] is None: raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) if http['type'] in ('SETTINGS', 'PING') and http['sid'] != 0: raise ProtocolError(f'HTTP/2: [Type {_type}] invalid format', quiet=True) _http = _HTTP_FUNC[_type](self, _tlen, _type, _flag) http.update(_http) return http
[ "def", "read_http", "(", "self", ",", "length", ")", ":", "if", "length", "is", "None", ":", "length", "=", "len", "(", "self", ")", "if", "length", "<", "9", ":", "raise", "ProtocolError", "(", "f'HTTP/2: invalid format'", ",", "quiet", "=", "True", ")", "_tlen", "=", "self", ".", "_read_unpack", "(", "3", ")", "_type", "=", "self", ".", "_read_unpack", "(", "1", ")", "_flag", "=", "self", ".", "_read_binary", "(", "1", ")", "_rsid", "=", "self", ".", "_read_binary", "(", "4", ")", "if", "_tlen", "!=", "length", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {_type}] invalid format'", ",", "quiet", "=", "True", ")", "if", "int", "(", "_rsid", "[", "0", "]", ",", "base", "=", "2", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {_type}] invalid format'", ",", "quiet", "=", "True", ")", "http", "=", "dict", "(", "length", "=", "_tlen", ",", "type", "=", "_HTTP_TYPE", ".", "get", "(", "_type", ")", ",", "sid", "=", "int", "(", "_rsid", "[", "1", ":", "]", ",", "base", "=", "2", ")", ",", "packet", "=", "self", ".", "_read_packet", "(", "_tlen", ")", ",", ")", "if", "http", "[", "'type'", "]", "is", "None", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {_type}] invalid format'", ",", "quiet", "=", "True", ")", "if", "http", "[", "'type'", "]", "in", "(", "'SETTINGS'", ",", "'PING'", ")", "and", "http", "[", "'sid'", "]", "!=", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {_type}] invalid format'", ",", "quiet", "=", "True", ")", "_http", "=", "_HTTP_FUNC", "[", "_type", "]", "(", "self", ",", "_tlen", ",", "_type", ",", "_flag", ")", "http", ".", "update", "(", "_http", ")", "return", "http" ]
Read Hypertext Transfer Protocol (HTTP/2). Structure of HTTP/2 packet [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.payload Frame Payload
[ "Read", "Hypertext", "Transfer", "Protocol", "(", "HTTP", "/", "2", ")", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L99-L155
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_none
def _read_http_none(self, size, kind, flag): """Read HTTP packet with unsigned type.""" if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, payload=self._read_fileng(size - 9) or None, ) return data
python
def _read_http_none(self, size, kind, flag): """Read HTTP packet with unsigned type.""" if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, payload=self._read_fileng(size - 9) or None, ) return data
[ "def", "_read_http_none", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "flag", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "data", "=", "dict", "(", "flags", "=", "None", ",", "payload", "=", "self", ".", "_read_fileng", "(", "size", "-", "9", ")", "or", "None", ",", ")", "return", "data" ]
Read HTTP packet with unsigned type.
[ "Read", "HTTP", "packet", "with", "unsigned", "type", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L172-L182
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_data
def _read_http_data(self, size, kind, flag): """Read HTTP/2 DATA frames. Structure of HTTP/2 DATA frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +---------------+-----------------------------------------------+ | Data (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (0) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 http.data Data ? ? - Padding (Optional) """ _plen = 0 _flag = dict( END_STREAM=False, # bit 0 PADDED=False, # bit 3 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['END_STREAM'] = True elif index == 3 and bit: _flag['PADDED'] = True _plen = self._read_unpack(1) elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _plen > size - 10: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if _flag['PADDED']: _dlen = size - _plen - 1 else: _dlen = size - _plen if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _data = self._read_fileng(_dlen) padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=_flag, data=_data, ) if _flag['PADDED']: data['ped_len'] = _plen return data
python
def _read_http_data(self, size, kind, flag): """Read HTTP/2 DATA frames. Structure of HTTP/2 DATA frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +---------------+-----------------------------------------------+ | Data (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (0) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 http.data Data ? ? - Padding (Optional) """ _plen = 0 _flag = dict( END_STREAM=False, # bit 0 PADDED=False, # bit 3 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['END_STREAM'] = True elif index == 3 and bit: _flag['PADDED'] = True _plen = self._read_unpack(1) elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _plen > size - 10: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if _flag['PADDED']: _dlen = size - _plen - 1 else: _dlen = size - _plen if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _data = self._read_fileng(_dlen) padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=_flag, data=_data, ) if _flag['PADDED']: data['ped_len'] = _plen return data
[ "def", "_read_http_data", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "_plen", "=", "0", "_flag", "=", "dict", "(", "END_STREAM", "=", "False", ",", "# bit 0", "PADDED", "=", "False", ",", "# bit 3", ")", "for", "index", ",", "bit", "in", "enumerate", "(", "flag", ")", ":", "if", "index", "==", "0", "and", "bit", ":", "_flag", "[", "'END_STREAM'", "]", "=", "True", "elif", "index", "==", "3", "and", "bit", ":", "_flag", "[", "'PADDED'", "]", "=", "True", "_plen", "=", "self", ".", "_read_unpack", "(", "1", ")", "elif", "bit", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "else", ":", "continue", "if", "_plen", ">", "size", "-", "10", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "if", "_flag", "[", "'PADDED'", "]", ":", "_dlen", "=", "size", "-", "_plen", "-", "1", "else", ":", "_dlen", "=", "size", "-", "_plen", "if", "_dlen", "<", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_data", "=", "self", ".", "_read_fileng", "(", "_dlen", ")", "padding", "=", "self", ".", "_read_binary", "(", "_plen", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "padding", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "data", "=", "dict", "(", "flags", "=", "_flag", ",", "data", "=", "_data", ",", ")", "if", "_flag", "[", "'PADDED'", "]", ":", "data", "[", "'ped_len'", "]", "=", "_plen", "return", "data" ]
Read HTTP/2 DATA frames. Structure of HTTP/2 DATA frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +---------------+-----------------------------------------------+ | Data (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (0) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 http.data Data ? ? - Padding (Optional)
[ "Read", "HTTP", "/", "2", "DATA", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L184-L252
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_headers
def _read_http_headers(self, size, kind, flag): """Read HTTP/2 HEADERS frames. Structure of HTTP/2 HEADERS frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +-+-------------+-----------------------------------------------+ |E| Stream Dependency? (31) | +-+-------------+-----------------------------------------------+ | Weight? (8) | +-+-------------+-----------------------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (1) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 http.exclusive Exclusive Flag 10 81 http.deps Stream Dependency (Optional) 14 112 http.weight Weight (Optional) 15 120 http.frag Header Block Fragment ? ? - Padding (Optional) """ _plen = 0 _elen = 0 _flag = dict( END_STREAM=False, # bit 0 END_HEADERS=False, # bit 2 PADDED=False, # bit 3 PRIORITY=False, # bit 5 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['END_STREAM'] = True elif index == 2 and bit: _flag['END_HEADERS'] = True elif index == 3 and bit: _flag['PADDED'] = True _plen = self._read_unpack(1) elif index == 5 and bit: _flag['PRIORITY'] = True _edep = self._read_binary(4) _wght = self._read_unpack(1) _elen = 5 elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _flag['PADDED']: _dlen = size - _plen - _elen - 1 else: _dlen = size - _plen - _elen if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _frag = self._read_fileng(_dlen) or None padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=_flag, frag=_frag, ) if _flag['PADDED']: data['ped_len'] = _plen if _flag['PRIORITY']: data['exclusive'] = True if int(_edep[0], base=2) else False data['deps'] = int(_edep[1:], base=2) data['weight'] = _wght + 1 return data
python
def _read_http_headers(self, size, kind, flag): """Read HTTP/2 HEADERS frames. Structure of HTTP/2 HEADERS frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +-+-------------+-----------------------------------------------+ |E| Stream Dependency? (31) | +-+-------------+-----------------------------------------------+ | Weight? (8) | +-+-------------+-----------------------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (1) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 http.exclusive Exclusive Flag 10 81 http.deps Stream Dependency (Optional) 14 112 http.weight Weight (Optional) 15 120 http.frag Header Block Fragment ? ? - Padding (Optional) """ _plen = 0 _elen = 0 _flag = dict( END_STREAM=False, # bit 0 END_HEADERS=False, # bit 2 PADDED=False, # bit 3 PRIORITY=False, # bit 5 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['END_STREAM'] = True elif index == 2 and bit: _flag['END_HEADERS'] = True elif index == 3 and bit: _flag['PADDED'] = True _plen = self._read_unpack(1) elif index == 5 and bit: _flag['PRIORITY'] = True _edep = self._read_binary(4) _wght = self._read_unpack(1) _elen = 5 elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _flag['PADDED']: _dlen = size - _plen - _elen - 1 else: _dlen = size - _plen - _elen if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _frag = self._read_fileng(_dlen) or None padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=_flag, frag=_frag, ) if _flag['PADDED']: data['ped_len'] = _plen if _flag['PRIORITY']: data['exclusive'] = True if int(_edep[0], base=2) else False data['deps'] = int(_edep[1:], base=2) data['weight'] = _wght + 1 return data
[ "def", "_read_http_headers", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "_plen", "=", "0", "_elen", "=", "0", "_flag", "=", "dict", "(", "END_STREAM", "=", "False", ",", "# bit 0", "END_HEADERS", "=", "False", ",", "# bit 2", "PADDED", "=", "False", ",", "# bit 3", "PRIORITY", "=", "False", ",", "# bit 5", ")", "for", "index", ",", "bit", "in", "enumerate", "(", "flag", ")", ":", "if", "index", "==", "0", "and", "bit", ":", "_flag", "[", "'END_STREAM'", "]", "=", "True", "elif", "index", "==", "2", "and", "bit", ":", "_flag", "[", "'END_HEADERS'", "]", "=", "True", "elif", "index", "==", "3", "and", "bit", ":", "_flag", "[", "'PADDED'", "]", "=", "True", "_plen", "=", "self", ".", "_read_unpack", "(", "1", ")", "elif", "index", "==", "5", "and", "bit", ":", "_flag", "[", "'PRIORITY'", "]", "=", "True", "_edep", "=", "self", ".", "_read_binary", "(", "4", ")", "_wght", "=", "self", ".", "_read_unpack", "(", "1", ")", "_elen", "=", "5", "elif", "bit", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "else", ":", "continue", "if", "_flag", "[", "'PADDED'", "]", ":", "_dlen", "=", "size", "-", "_plen", "-", "_elen", "-", "1", "else", ":", "_dlen", "=", "size", "-", "_plen", "-", "_elen", "if", "_dlen", "<", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_frag", "=", "self", ".", "_read_fileng", "(", "_dlen", ")", "or", "None", "padding", "=", "self", ".", "_read_binary", "(", "_plen", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "padding", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "data", "=", "dict", "(", "flags", "=", "_flag", ",", "frag", "=", "_frag", ",", ")", "if", "_flag", "[", "'PADDED'", "]", ":", "data", "[", "'ped_len'", "]", "=", "_plen", "if", "_flag", "[", "'PRIORITY'", "]", ":", "data", "[", "'exclusive'", "]", "=", "True", "if", "int", "(", "_edep", "[", "0", "]", ",", "base", "=", "2", ")", "else", "False", "data", "[", "'deps'", "]", "=", "int", "(", "_edep", "[", "1", ":", "]", ",", "base", "=", "2", ")", "data", "[", "'weight'", "]", "=", "_wght", "+", "1", "return", "data" ]
Read HTTP/2 HEADERS frames. Structure of HTTP/2 HEADERS frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +-+-------------+-----------------------------------------------+ |E| Stream Dependency? (31) | +-+-------------+-----------------------------------------------+ | Weight? (8) | +-+-------------+-----------------------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (1) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 http.exclusive Exclusive Flag 10 81 http.deps Stream Dependency (Optional) 14 112 http.weight Weight (Optional) 15 120 http.frag Header Block Fragment ? ? - Padding (Optional)
[ "Read", "HTTP", "/", "2", "HEADERS", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L254-L340
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_priority
def _read_http_priority(self, size, kind, flag): """Read HTTP/2 PRIORITY frames. Structure of HTTP/2 PRIORITY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------------------------------------------------------+ |E| Stream Dependency (31) | +-+-------------+-----------------------------------------------+ | Weight (8) | +-+-------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.exclusive Exclusive Flag 9 73 http.deps Stream Dependency 13 104 http.weight Weight """ if size != 9: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _edep = self._read_binary(4) _wght = self._read_unpack(1) data = dict( flags=None, exclusive=True if int(_edep[0], base=2) else False, deps=int(_edep[1:], base=2), weight=_wght + 1, ) return data
python
def _read_http_priority(self, size, kind, flag): """Read HTTP/2 PRIORITY frames. Structure of HTTP/2 PRIORITY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------------------------------------------------------+ |E| Stream Dependency (31) | +-+-------------+-----------------------------------------------+ | Weight (8) | +-+-------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.exclusive Exclusive Flag 9 73 http.deps Stream Dependency 13 104 http.weight Weight """ if size != 9: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _edep = self._read_binary(4) _wght = self._read_unpack(1) data = dict( flags=None, exclusive=True if int(_edep[0], base=2) else False, deps=int(_edep[1:], base=2), weight=_wght + 1, ) return data
[ "def", "_read_http_priority", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "size", "!=", "9", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "flag", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_edep", "=", "self", ".", "_read_binary", "(", "4", ")", "_wght", "=", "self", ".", "_read_unpack", "(", "1", ")", "data", "=", "dict", "(", "flags", "=", "None", ",", "exclusive", "=", "True", "if", "int", "(", "_edep", "[", "0", "]", ",", "base", "=", "2", ")", "else", "False", ",", "deps", "=", "int", "(", "_edep", "[", "1", ":", "]", ",", "base", "=", "2", ")", ",", "weight", "=", "_wght", "+", "1", ",", ")", "return", "data" ]
Read HTTP/2 PRIORITY frames. Structure of HTTP/2 PRIORITY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------------------------------------------------------+ |E| Stream Dependency (31) | +-+-------------+-----------------------------------------------+ | Weight (8) | +-+-------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.exclusive Exclusive Flag 9 73 http.deps Stream Dependency 13 104 http.weight Weight
[ "Read", "HTTP", "/", "2", "PRIORITY", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L342-L384
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_rst_stream
def _read_http_rst_stream(self, size, kind, flag): """Read HTTP/2 RST_STREAM frames. Structure of HTTP/2 RST_STREAM frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.error Error Code """ if size != 8: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _code = self._read_unpack(4) data = dict( flags=None, error=_ERROR_CODE.get(_code, _code), ) return data
python
def _read_http_rst_stream(self, size, kind, flag): """Read HTTP/2 RST_STREAM frames. Structure of HTTP/2 RST_STREAM frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.error Error Code """ if size != 8: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _code = self._read_unpack(4) data = dict( flags=None, error=_ERROR_CODE.get(_code, _code), ) return data
[ "def", "_read_http_rst_stream", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "size", "!=", "8", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "flag", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_code", "=", "self", ".", "_read_unpack", "(", "4", ")", "data", "=", "dict", "(", "flags", "=", "None", ",", "error", "=", "_ERROR_CODE", ".", "get", "(", "_code", ",", "_code", ")", ",", ")", "return", "data" ]
Read HTTP/2 RST_STREAM frames. Structure of HTTP/2 RST_STREAM frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.error Error Code
[ "Read", "HTTP", "/", "2", "RST_STREAM", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L386-L421
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_settings
def _read_http_settings(self, size, kind, flag): """Read HTTP/2 SETTINGS frames. Structure of HTTP/2 SETTINGS frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Identifier (16) | +-------------------------------+-------------------------------+ | Value (32) | +---------------------------------------------------------------+ | ...... | Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.settings Settings 9 72 http.settings.id Identifier 10 80 http.settings.value Value """ if size % 5 != 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _flag = dict( ACK=False, # bit 0 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['ACK'] = True elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _flag['ACK'] and size: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _para = dict() counter = 0 while counter < size: _stid = self._read_unpack(1) _pval = self._read_unpack(4) _pkey = _PARA_NAME.get(_stid, 'Unsigned') if _pkey in _para: if isinstance(_para[_pkey], tuple): _para[_pkey] += (_pval,) else: _para[_pkey] = (_para[_pkey], _pval) else: _para[_pkey] = _pval data = dict( flags=_flag, ) data.update(_para) return data
python
def _read_http_settings(self, size, kind, flag): """Read HTTP/2 SETTINGS frames. Structure of HTTP/2 SETTINGS frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Identifier (16) | +-------------------------------+-------------------------------+ | Value (32) | +---------------------------------------------------------------+ | ...... | Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.settings Settings 9 72 http.settings.id Identifier 10 80 http.settings.value Value """ if size % 5 != 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _flag = dict( ACK=False, # bit 0 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['ACK'] = True elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _flag['ACK'] and size: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _para = dict() counter = 0 while counter < size: _stid = self._read_unpack(1) _pval = self._read_unpack(4) _pkey = _PARA_NAME.get(_stid, 'Unsigned') if _pkey in _para: if isinstance(_para[_pkey], tuple): _para[_pkey] += (_pval,) else: _para[_pkey] = (_para[_pkey], _pval) else: _para[_pkey] = _pval data = dict( flags=_flag, ) data.update(_para) return data
[ "def", "_read_http_settings", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "size", "%", "5", "!=", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_flag", "=", "dict", "(", "ACK", "=", "False", ",", "# bit 0", ")", "for", "index", ",", "bit", "in", "enumerate", "(", "flag", ")", ":", "if", "index", "==", "0", "and", "bit", ":", "_flag", "[", "'ACK'", "]", "=", "True", "elif", "bit", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "else", ":", "continue", "if", "_flag", "[", "'ACK'", "]", "and", "size", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_para", "=", "dict", "(", ")", "counter", "=", "0", "while", "counter", "<", "size", ":", "_stid", "=", "self", ".", "_read_unpack", "(", "1", ")", "_pval", "=", "self", ".", "_read_unpack", "(", "4", ")", "_pkey", "=", "_PARA_NAME", ".", "get", "(", "_stid", ",", "'Unsigned'", ")", "if", "_pkey", "in", "_para", ":", "if", "isinstance", "(", "_para", "[", "_pkey", "]", ",", "tuple", ")", ":", "_para", "[", "_pkey", "]", "+=", "(", "_pval", ",", ")", "else", ":", "_para", "[", "_pkey", "]", "=", "(", "_para", "[", "_pkey", "]", ",", "_pval", ")", "else", ":", "_para", "[", "_pkey", "]", "=", "_pval", "data", "=", "dict", "(", "flags", "=", "_flag", ",", ")", "data", ".", "update", "(", "_para", ")", "return", "data" ]
Read HTTP/2 SETTINGS frames. Structure of HTTP/2 SETTINGS frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Identifier (16) | +-------------------------------+-------------------------------+ | Value (32) | +---------------------------------------------------------------+ | ...... | Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.settings Settings 9 72 http.settings.id Identifier 10 80 http.settings.value Value
[ "Read", "HTTP", "/", "2", "SETTINGS", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L423-L487
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_push_promise
def _read_http_push_promise(self, size, kind, flag): """Read HTTP/2 PUSH_PROMISE frames. Structure of HTTP/2 PUSH_PROMISE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +-+-------------+-----------------------------------------------+ |R| Promised Stream ID (31) | +-+-----------------------------+-------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (1) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 - Reserved 10 81 http.pid Promised Stream ID 14 112 http.frag Header Block Fragment ? ? - Padding (Optional) """ if size < 4: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _plen = 0 _flag = dict( END_HEADERS=False, # bit 2 PADDED=False, # bit 3 ) for index, bit in enumerate(flag): if index == 2 and bit: _flag['END_HEADERS'] = True elif index == 3 and bit: _flag['PADDED'] = True _plen = self._read_unpack(1) elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _flag['PADDED']: _dlen = size - _plen - 5 else: _dlen = size - _plen - 4 if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _rpid = self._read_binary(4) _frag = self._read_fileng(_dlen) or None if int(_rpid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=_flag, pid=int(_rpid[1:], base=2), frag=_frag, ) if _flag['PADDED']: data['ped_len'] = _plen return data
python
def _read_http_push_promise(self, size, kind, flag): """Read HTTP/2 PUSH_PROMISE frames. Structure of HTTP/2 PUSH_PROMISE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +-+-------------+-----------------------------------------------+ |R| Promised Stream ID (31) | +-+-----------------------------+-------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (1) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 - Reserved 10 81 http.pid Promised Stream ID 14 112 http.frag Header Block Fragment ? ? - Padding (Optional) """ if size < 4: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _plen = 0 _flag = dict( END_HEADERS=False, # bit 2 PADDED=False, # bit 3 ) for index, bit in enumerate(flag): if index == 2 and bit: _flag['END_HEADERS'] = True elif index == 3 and bit: _flag['PADDED'] = True _plen = self._read_unpack(1) elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue if _flag['PADDED']: _dlen = size - _plen - 5 else: _dlen = size - _plen - 4 if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _rpid = self._read_binary(4) _frag = self._read_fileng(_dlen) or None if int(_rpid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=_flag, pid=int(_rpid[1:], base=2), frag=_frag, ) if _flag['PADDED']: data['ped_len'] = _plen return data
[ "def", "_read_http_push_promise", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "size", "<", "4", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_plen", "=", "0", "_flag", "=", "dict", "(", "END_HEADERS", "=", "False", ",", "# bit 2", "PADDED", "=", "False", ",", "# bit 3", ")", "for", "index", ",", "bit", "in", "enumerate", "(", "flag", ")", ":", "if", "index", "==", "2", "and", "bit", ":", "_flag", "[", "'END_HEADERS'", "]", "=", "True", "elif", "index", "==", "3", "and", "bit", ":", "_flag", "[", "'PADDED'", "]", "=", "True", "_plen", "=", "self", ".", "_read_unpack", "(", "1", ")", "elif", "bit", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "else", ":", "continue", "if", "_flag", "[", "'PADDED'", "]", ":", "_dlen", "=", "size", "-", "_plen", "-", "5", "else", ":", "_dlen", "=", "size", "-", "_plen", "-", "4", "if", "_dlen", "<", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_rpid", "=", "self", ".", "_read_binary", "(", "4", ")", "_frag", "=", "self", ".", "_read_fileng", "(", "_dlen", ")", "or", "None", "if", "int", "(", "_rpid", "[", "0", "]", ",", "base", "=", "2", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "padding", "=", "self", ".", "_read_binary", "(", "_plen", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "padding", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "data", "=", "dict", "(", "flags", "=", "_flag", ",", "pid", "=", "int", "(", "_rpid", "[", "1", ":", "]", ",", "base", "=", "2", ")", ",", "frag", "=", "_frag", ",", ")", "if", "_flag", "[", "'PADDED'", "]", ":", "data", "[", "'ped_len'", "]", "=", "_plen", "return", "data" ]
Read HTTP/2 PUSH_PROMISE frames. Structure of HTTP/2 PUSH_PROMISE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------+-----------------------------------------------+ |Pad Length? (8)| +-+-------------+-----------------------------------------------+ |R| Promised Stream ID (31) | +-+-----------------------------+-------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ | Padding (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (1) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.pad_len Pad Length (Optional) 10 80 - Reserved 10 81 http.pid Promised Stream ID 14 112 http.frag Header Block Fragment ? ? - Padding (Optional)
[ "Read", "HTTP", "/", "2", "PUSH_PROMISE", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L489-L566
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_ping
def _read_http_ping(self, size, kind, flag): """Read HTTP/2 PING frames. Structure of HTTP/2 PING frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | | | Opaque Data (64) | | | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.data Opaque Data """ if size != 8: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _flag = dict( ACK=False, # bit 0 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['ACK'] = True elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue _data = self._read_fileng(8) data = dict( flags=_flag, data=_data, ) return data
python
def _read_http_ping(self, size, kind, flag): """Read HTTP/2 PING frames. Structure of HTTP/2 PING frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | | | Opaque Data (64) | | | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.data Opaque Data """ if size != 8: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _flag = dict( ACK=False, # bit 0 ) for index, bit in enumerate(flag): if index == 0 and bit: _flag['ACK'] = True elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue _data = self._read_fileng(8) data = dict( flags=_flag, data=_data, ) return data
[ "def", "_read_http_ping", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "size", "!=", "8", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_flag", "=", "dict", "(", "ACK", "=", "False", ",", "# bit 0", ")", "for", "index", ",", "bit", "in", "enumerate", "(", "flag", ")", ":", "if", "index", "==", "0", "and", "bit", ":", "_flag", "[", "'ACK'", "]", "=", "True", "elif", "bit", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "else", ":", "continue", "_data", "=", "self", ".", "_read_fileng", "(", "8", ")", "data", "=", "dict", "(", "flags", "=", "_flag", ",", "data", "=", "_data", ",", ")", "return", "data" ]
Read HTTP/2 PING frames. Structure of HTTP/2 PING frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | | | Opaque Data (64) | | | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 http.data Opaque Data
[ "Read", "HTTP", "/", "2", "PING", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L568-L614
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_goaway
def _read_http_goaway(self, size, kind, flag): """Read HTTP/2 GOAWAY frames. Structure of HTTP/2 GOAWAY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Last-Stream-ID (31) | +-+-------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ | Additional Debug Data (*) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.last_sid Last Stream ID 13 104 http.error Error Code 17 136 http.data Additional Debug Data (Optional) """ _dlen = size - 8 if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _rsid = self._read_binary(4) _code = self._read_unpack(4) _data = self._read_fileng(_dlen) or None if int(_rsid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, last_sid=int(_rsid[1:], base=2), error=_ERROR_CODE.get(_code, _code), data=_data, ) return data
python
def _read_http_goaway(self, size, kind, flag): """Read HTTP/2 GOAWAY frames. Structure of HTTP/2 GOAWAY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Last-Stream-ID (31) | +-+-------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ | Additional Debug Data (*) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.last_sid Last Stream ID 13 104 http.error Error Code 17 136 http.data Additional Debug Data (Optional) """ _dlen = size - 8 if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _rsid = self._read_binary(4) _code = self._read_unpack(4) _data = self._read_fileng(_dlen) or None if int(_rsid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, last_sid=int(_rsid[1:], base=2), error=_ERROR_CODE.get(_code, _code), data=_data, ) return data
[ "def", "_read_http_goaway", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "_dlen", "=", "size", "-", "8", "if", "_dlen", "<", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "flag", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_rsid", "=", "self", ".", "_read_binary", "(", "4", ")", "_code", "=", "self", ".", "_read_unpack", "(", "4", ")", "_data", "=", "self", ".", "_read_fileng", "(", "_dlen", ")", "or", "None", "if", "int", "(", "_rsid", "[", "0", "]", ",", "base", "=", "2", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "data", "=", "dict", "(", "flags", "=", "None", ",", "last_sid", "=", "int", "(", "_rsid", "[", "1", ":", "]", ",", "base", "=", "2", ")", ",", "error", "=", "_ERROR_CODE", ".", "get", "(", "_code", ",", "_code", ")", ",", "data", "=", "_data", ",", ")", "return", "data" ]
Read HTTP/2 GOAWAY frames. Structure of HTTP/2 GOAWAY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Last-Stream-ID (31) | +-+-------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ | Additional Debug Data (*) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.last_sid Last Stream ID 13 104 http.error Error Code 17 136 http.data Additional Debug Data (Optional)
[ "Read", "HTTP", "/", "2", "GOAWAY", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L616-L666
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_window_update
def _read_http_window_update(self, size, kind, flag): """Read HTTP/2 WINDOW_UPDATE frames. Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Window Size Increment (31) | +-+-------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.window Window Size Increment """ if size != 4: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _size = self._read_binary(4) if int(_size[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, window=int(_size[1:], base=2), ) return data
python
def _read_http_window_update(self, size, kind, flag): """Read HTTP/2 WINDOW_UPDATE frames. Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Window Size Increment (31) | +-+-------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.window Window Size Increment """ if size != 4: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _size = self._read_binary(4) if int(_size[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, window=int(_size[1:], base=2), ) return data
[ "def", "_read_http_window_update", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "if", "size", "!=", "4", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "flag", ")", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "_size", "=", "self", ".", "_read_binary", "(", "4", ")", "if", "int", "(", "_size", "[", "0", "]", ",", "base", "=", "2", ")", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "data", "=", "dict", "(", "flags", "=", "None", ",", "window", "=", "int", "(", "_size", "[", "1", ":", "]", ",", "base", "=", "2", ")", ",", ")", "return", "data" ]
Read HTTP/2 WINDOW_UPDATE frames. Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Window Size Increment (31) | +-+-------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.window Window Size Increment
[ "Read", "HTTP", "/", "2", "WINDOW_UPDATE", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L668-L707
JarryShaw/PyPCAPKit
src/protocols/application/httpv2.py
HTTPv2._read_http_continuation
def _read_http_continuation(self, size, kind, flag): """Read HTTP/2 WINDOW_UPDATE frames. Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 73 http.frag Header Block Fragment """ _flag = dict( END_HEADERS=False, # bit 2 ) for index, bit in enumerate(flag): if index == 2 and bit: _flag['END_HEADERS'] = True elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue _frag = self._read_fileng(size) or None data = dict( flags=_flag, frag=_frag, ) return data
python
def _read_http_continuation(self, size, kind, flag): """Read HTTP/2 WINDOW_UPDATE frames. Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 73 http.frag Header Block Fragment """ _flag = dict( END_HEADERS=False, # bit 2 ) for index, bit in enumerate(flag): if index == 2 and bit: _flag['END_HEADERS'] = True elif bit: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) else: continue _frag = self._read_fileng(size) or None data = dict( flags=_flag, frag=_frag, ) return data
[ "def", "_read_http_continuation", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "_flag", "=", "dict", "(", "END_HEADERS", "=", "False", ",", "# bit 2", ")", "for", "index", ",", "bit", "in", "enumerate", "(", "flag", ")", ":", "if", "index", "==", "2", "and", "bit", ":", "_flag", "[", "'END_HEADERS'", "]", "=", "True", "elif", "bit", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")", "else", ":", "continue", "_frag", "=", "self", ".", "_read_fileng", "(", "size", ")", "or", "None", "data", "=", "dict", "(", "flags", "=", "_flag", ",", "frag", "=", "_frag", ",", ")", "return", "data" ]
Read HTTP/2 WINDOW_UPDATE frames. Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +---------------------------------------------------------------+ | Header Block Fragment (*) ... +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 73 http.frag Header Block Fragment
[ "Read", "HTTP", "/", "2", "WINDOW_UPDATE", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/httpv2.py#L709-L750
JarryShaw/PyPCAPKit
src/const/ipv4/option_number.py
OptionNumber.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return OptionNumber(key) if key not in OptionNumber._member_map_: extend_enum(OptionNumber, key, default) return OptionNumber[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return OptionNumber(key) if key not in OptionNumber._member_map_: extend_enum(OptionNumber, key, default) return OptionNumber[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "OptionNumber", "(", "key", ")", "if", "key", "not", "in", "OptionNumber", ".", "_member_map_", ":", "extend_enum", "(", "OptionNumber", ",", "key", ",", "default", ")", "return", "OptionNumber", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ipv4/option_number.py#L44-L50
JarryShaw/PyPCAPKit
src/protocols/transport/transport.py
Transport._import_next_layer
def _import_next_layer(self, proto, length): """Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer """ if self._exproto == 'null' and self._exlayer == 'None': from pcapkit.protocols.raw import Raw as NextLayer else: from pcapkit.foundation.analysis import analyse as NextLayer # from pcapkit.foundation.analysis import analyse as NextLayer if length == 0: next_ = NoPayload() elif self._onerror: next_ = beholder_ng(NextLayer)(self._file, length, _termination=self._sigterm) else: next_ = NextLayer(self._file, length, _termination=self._sigterm) return next_
python
def _import_next_layer(self, proto, length): """Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer """ if self._exproto == 'null' and self._exlayer == 'None': from pcapkit.protocols.raw import Raw as NextLayer else: from pcapkit.foundation.analysis import analyse as NextLayer # from pcapkit.foundation.analysis import analyse as NextLayer if length == 0: next_ = NoPayload() elif self._onerror: next_ = beholder_ng(NextLayer)(self._file, length, _termination=self._sigterm) else: next_ = NextLayer(self._file, length, _termination=self._sigterm) return next_
[ "def", "_import_next_layer", "(", "self", ",", "proto", ",", "length", ")", ":", "if", "self", ".", "_exproto", "==", "'null'", "and", "self", ".", "_exlayer", "==", "'None'", ":", "from", "pcapkit", ".", "protocols", ".", "raw", "import", "Raw", "as", "NextLayer", "else", ":", "from", "pcapkit", ".", "foundation", ".", "analysis", "import", "analyse", "as", "NextLayer", "# from pcapkit.foundation.analysis import analyse as NextLayer", "if", "length", "==", "0", ":", "next_", "=", "NoPayload", "(", ")", "elif", "self", ".", "_onerror", ":", "next_", "=", "beholder_ng", "(", "NextLayer", ")", "(", "self", ".", "_file", ",", "length", ",", "_termination", "=", "self", ".", "_sigterm", ")", "else", ":", "next_", "=", "NextLayer", "(", "self", ".", "_file", ",", "length", ",", "_termination", "=", "self", ".", "_sigterm", ")", "return", "next_" ]
Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer
[ "Import", "next", "layer", "extractor", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/transport/transport.py#L65-L90
JarryShaw/PyPCAPKit
src/const/ipv4/protection_authority.py
ProtectionAuthority.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return ProtectionAuthority(key) if key not in ProtectionAuthority._member_map_: extend_enum(ProtectionAuthority, key, default) return ProtectionAuthority[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return ProtectionAuthority(key) if key not in ProtectionAuthority._member_map_: extend_enum(ProtectionAuthority, key, default) return ProtectionAuthority[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "ProtectionAuthority", "(", "key", ")", "if", "key", "not", "in", "ProtectionAuthority", ".", "_member_map_", ":", "extend_enum", "(", "ProtectionAuthority", ",", "key", ",", "default", ")", "return", "ProtectionAuthority", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ipv4/protection_authority.py#L22-L28
JarryShaw/PyPCAPKit
src/const/ipv6/router_alert.py
RouterAlert.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return RouterAlert(key) if key not in RouterAlert._member_map_: extend_enum(RouterAlert, key, default) return RouterAlert[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return RouterAlert(key) if key not in RouterAlert._member_map_: extend_enum(RouterAlert, key, default) return RouterAlert[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "RouterAlert", "(", "key", ")", "if", "key", "not", "in", "RouterAlert", ".", "_member_map_", ":", "extend_enum", "(", "RouterAlert", ",", "key", ",", "default", ")", "return", "RouterAlert", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ipv6/router_alert.py#L85-L91
JarryShaw/PyPCAPKit
src/const/ipv6/qs_function.py
QS_Function.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return QS_Function(key) if key not in QS_Function._member_map_: extend_enum(QS_Function, key, default) return QS_Function[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return QS_Function(key) if key not in QS_Function._member_map_: extend_enum(QS_Function, key, default) return QS_Function[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "QS_Function", "(", "key", ")", "if", "key", "not", "in", "QS_Function", ".", "_member_map_", ":", "extend_enum", "(", "QS_Function", ",", "key", ",", "default", ")", "return", "QS_Function", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ipv6/qs_function.py#L16-L22
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor.run
def run(self): """Start extraction.""" flag = True if self._exeng == 'dpkt': flag, engine = self.import_test('dpkt', name='DPKT') if flag: return self._run_dpkt(engine) elif self._exeng == 'scapy': flag, engine = self.import_test('scapy.all', name='Scapy') if flag: return self._run_scapy(engine) elif self._exeng == 'pyshark': flag, engine = self.import_test('pyshark', name='PyShark') if flag: return self._run_pyshark(engine) elif self._exeng == 'pipeline': flag, engine = self.import_test('multiprocessing', name='Pipeline Multiprocessing') self._flag_m = flag = bool(flag and (self._flag_a and CPU_CNT > 1)) if self._flag_m: return self._run_pipeline(engine) warnings.warn(f'extraction engine Pipeline Multiprocessing is not available; ' 'using default engine instead', EngineWarning, stacklevel=stacklevel()) elif self._exeng == 'server': flag, engine = self.import_test('multiprocessing', name='Server Multiprocessing') self._flag_m = flag = bool(flag and (self._flag_a and CPU_CNT > 2)) if self._flag_m: return self._run_server(engine) warnings.warn(f'extraction engine Server Multiprocessing is not available; ' 'using default engine instead', EngineWarning, stacklevel=stacklevel()) elif self._exeng not in ('default', 'pcapkit'): flag = False warnings.warn(f'unsupported extraction engine: {self._exeng}; ' 'using default engine instead', EngineWarning, stacklevel=stacklevel()) # using default/pcapkit engine self._exeng = self._exeng if flag else 'default' self.record_header() # read PCAP global header self.record_frames()
python
def run(self): """Start extraction.""" flag = True if self._exeng == 'dpkt': flag, engine = self.import_test('dpkt', name='DPKT') if flag: return self._run_dpkt(engine) elif self._exeng == 'scapy': flag, engine = self.import_test('scapy.all', name='Scapy') if flag: return self._run_scapy(engine) elif self._exeng == 'pyshark': flag, engine = self.import_test('pyshark', name='PyShark') if flag: return self._run_pyshark(engine) elif self._exeng == 'pipeline': flag, engine = self.import_test('multiprocessing', name='Pipeline Multiprocessing') self._flag_m = flag = bool(flag and (self._flag_a and CPU_CNT > 1)) if self._flag_m: return self._run_pipeline(engine) warnings.warn(f'extraction engine Pipeline Multiprocessing is not available; ' 'using default engine instead', EngineWarning, stacklevel=stacklevel()) elif self._exeng == 'server': flag, engine = self.import_test('multiprocessing', name='Server Multiprocessing') self._flag_m = flag = bool(flag and (self._flag_a and CPU_CNT > 2)) if self._flag_m: return self._run_server(engine) warnings.warn(f'extraction engine Server Multiprocessing is not available; ' 'using default engine instead', EngineWarning, stacklevel=stacklevel()) elif self._exeng not in ('default', 'pcapkit'): flag = False warnings.warn(f'unsupported extraction engine: {self._exeng}; ' 'using default engine instead', EngineWarning, stacklevel=stacklevel()) # using default/pcapkit engine self._exeng = self._exeng if flag else 'default' self.record_header() # read PCAP global header self.record_frames()
[ "def", "run", "(", "self", ")", ":", "flag", "=", "True", "if", "self", ".", "_exeng", "==", "'dpkt'", ":", "flag", ",", "engine", "=", "self", ".", "import_test", "(", "'dpkt'", ",", "name", "=", "'DPKT'", ")", "if", "flag", ":", "return", "self", ".", "_run_dpkt", "(", "engine", ")", "elif", "self", ".", "_exeng", "==", "'scapy'", ":", "flag", ",", "engine", "=", "self", ".", "import_test", "(", "'scapy.all'", ",", "name", "=", "'Scapy'", ")", "if", "flag", ":", "return", "self", ".", "_run_scapy", "(", "engine", ")", "elif", "self", ".", "_exeng", "==", "'pyshark'", ":", "flag", ",", "engine", "=", "self", ".", "import_test", "(", "'pyshark'", ",", "name", "=", "'PyShark'", ")", "if", "flag", ":", "return", "self", ".", "_run_pyshark", "(", "engine", ")", "elif", "self", ".", "_exeng", "==", "'pipeline'", ":", "flag", ",", "engine", "=", "self", ".", "import_test", "(", "'multiprocessing'", ",", "name", "=", "'Pipeline Multiprocessing'", ")", "self", ".", "_flag_m", "=", "flag", "=", "bool", "(", "flag", "and", "(", "self", ".", "_flag_a", "and", "CPU_CNT", ">", "1", ")", ")", "if", "self", ".", "_flag_m", ":", "return", "self", ".", "_run_pipeline", "(", "engine", ")", "warnings", ".", "warn", "(", "f'extraction engine Pipeline Multiprocessing is not available; '", "'using default engine instead'", ",", "EngineWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "elif", "self", ".", "_exeng", "==", "'server'", ":", "flag", ",", "engine", "=", "self", ".", "import_test", "(", "'multiprocessing'", ",", "name", "=", "'Server Multiprocessing'", ")", "self", ".", "_flag_m", "=", "flag", "=", "bool", "(", "flag", "and", "(", "self", ".", "_flag_a", "and", "CPU_CNT", ">", "2", ")", ")", "if", "self", ".", "_flag_m", ":", "return", "self", ".", "_run_server", "(", "engine", ")", "warnings", ".", "warn", "(", "f'extraction engine Server Multiprocessing is not available; '", "'using default engine instead'", ",", "EngineWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "elif", "self", ".", "_exeng", "not", "in", "(", "'default'", ",", "'pcapkit'", ")", ":", "flag", "=", "False", "warnings", ".", "warn", "(", "f'unsupported extraction engine: {self._exeng}; '", "'using default engine instead'", ",", "EngineWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "# using default/pcapkit engine", "self", ".", "_exeng", "=", "self", ".", "_exeng", "if", "flag", "else", "'default'", "self", ".", "record_header", "(", ")", "# read PCAP global header", "self", ".", "record_frames", "(", ")" ]
Start extraction.
[ "Start", "extraction", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L224-L261
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor.record_header
def record_header(self): """Read global header. - Extract global header. - Make Info object out of header properties. - Append Info. - Write plist file. """ self._gbhdr = Header(self._ifile) self._vinfo = self._gbhdr.version self._dlink = self._gbhdr.protocol self._nnsec = self._gbhdr.nanosecond if self._trace is not NotImplemented: self._trace._endian = self._gbhdr.byteorder self._trace._nnsecd = self._gbhdr.nanosecond if not self._flag_q: if self._flag_f: ofile = self._ofile(f'{self._ofnm}/Global Header.{self._fext}') ofile(self._gbhdr.info, name='Global Header') self._type = ofile.kind else: self._ofile(self._gbhdr.info, name='Global Header') self._type = self._ofile.kind
python
def record_header(self): """Read global header. - Extract global header. - Make Info object out of header properties. - Append Info. - Write plist file. """ self._gbhdr = Header(self._ifile) self._vinfo = self._gbhdr.version self._dlink = self._gbhdr.protocol self._nnsec = self._gbhdr.nanosecond if self._trace is not NotImplemented: self._trace._endian = self._gbhdr.byteorder self._trace._nnsecd = self._gbhdr.nanosecond if not self._flag_q: if self._flag_f: ofile = self._ofile(f'{self._ofnm}/Global Header.{self._fext}') ofile(self._gbhdr.info, name='Global Header') self._type = ofile.kind else: self._ofile(self._gbhdr.info, name='Global Header') self._type = self._ofile.kind
[ "def", "record_header", "(", "self", ")", ":", "self", ".", "_gbhdr", "=", "Header", "(", "self", ".", "_ifile", ")", "self", ".", "_vinfo", "=", "self", ".", "_gbhdr", ".", "version", "self", ".", "_dlink", "=", "self", ".", "_gbhdr", ".", "protocol", "self", ".", "_nnsec", "=", "self", ".", "_gbhdr", ".", "nanosecond", "if", "self", ".", "_trace", "is", "not", "NotImplemented", ":", "self", ".", "_trace", ".", "_endian", "=", "self", ".", "_gbhdr", ".", "byteorder", "self", ".", "_trace", ".", "_nnsecd", "=", "self", ".", "_gbhdr", ".", "nanosecond", "if", "not", "self", ".", "_flag_q", ":", "if", "self", ".", "_flag_f", ":", "ofile", "=", "self", ".", "_ofile", "(", "f'{self._ofnm}/Global Header.{self._fext}'", ")", "ofile", "(", "self", ".", "_gbhdr", ".", "info", ",", "name", "=", "'Global Header'", ")", "self", ".", "_type", "=", "ofile", ".", "kind", "else", ":", "self", ".", "_ofile", "(", "self", ".", "_gbhdr", ".", "info", ",", "name", "=", "'Global Header'", ")", "self", ".", "_type", "=", "self", ".", "_ofile", ".", "kind" ]
Read global header. - Extract global header. - Make Info object out of header properties. - Append Info. - Write plist file.
[ "Read", "global", "header", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L345-L370
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._cleanup
def _cleanup(self): """Cleanup after extraction & analysis.""" self._expkg = None self._extmp = None self._flag_e = True self._ifile.close()
python
def _cleanup(self): """Cleanup after extraction & analysis.""" self._expkg = None self._extmp = None self._flag_e = True self._ifile.close()
[ "def", "_cleanup", "(", "self", ")", ":", "self", ".", "_expkg", "=", "None", "self", ".", "_extmp", "=", "None", "self", ".", "_flag_e", "=", "True", "self", ".", "_ifile", ".", "close", "(", ")" ]
Cleanup after extraction & analysis.
[ "Cleanup", "after", "extraction", "&", "analysis", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L566-L571
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._aftermathmp
def _aftermathmp(self): """Aftermath for multiprocessing.""" if not self._flag_e and self._flag_m: # join processes [proc.join() for proc in self._mpprc] if self._exeng == 'server': self._mpsvc.join() # restore attributes if self._exeng == 'server': self._frame = list(self._mpfrm) self._reasm = list(self._mprsm) self._trace = copy.deepcopy(self._mpkit.trace) if self._exeng == 'pipeline': self._frame = [self._mpkit.frames[x] for x in sorted(self._mpkit.frames)] self._reasm = copy.deepcopy(self._mpkit.reassembly) self._trace = copy.deepcopy(self._mpkit.trace) # shutdown & cleanup self._mpmng.shutdown() [delattr(self, attr) for attr in filter(lambda s: s.startswith('_mp'), dir(self))] self._frnum -= 2
python
def _aftermathmp(self): """Aftermath for multiprocessing.""" if not self._flag_e and self._flag_m: # join processes [proc.join() for proc in self._mpprc] if self._exeng == 'server': self._mpsvc.join() # restore attributes if self._exeng == 'server': self._frame = list(self._mpfrm) self._reasm = list(self._mprsm) self._trace = copy.deepcopy(self._mpkit.trace) if self._exeng == 'pipeline': self._frame = [self._mpkit.frames[x] for x in sorted(self._mpkit.frames)] self._reasm = copy.deepcopy(self._mpkit.reassembly) self._trace = copy.deepcopy(self._mpkit.trace) # shutdown & cleanup self._mpmng.shutdown() [delattr(self, attr) for attr in filter(lambda s: s.startswith('_mp'), dir(self))] self._frnum -= 2
[ "def", "_aftermathmp", "(", "self", ")", ":", "if", "not", "self", ".", "_flag_e", "and", "self", ".", "_flag_m", ":", "# join processes", "[", "proc", ".", "join", "(", ")", "for", "proc", "in", "self", ".", "_mpprc", "]", "if", "self", ".", "_exeng", "==", "'server'", ":", "self", ".", "_mpsvc", ".", "join", "(", ")", "# restore attributes", "if", "self", ".", "_exeng", "==", "'server'", ":", "self", ".", "_frame", "=", "list", "(", "self", ".", "_mpfrm", ")", "self", ".", "_reasm", "=", "list", "(", "self", ".", "_mprsm", ")", "self", ".", "_trace", "=", "copy", ".", "deepcopy", "(", "self", ".", "_mpkit", ".", "trace", ")", "if", "self", ".", "_exeng", "==", "'pipeline'", ":", "self", ".", "_frame", "=", "[", "self", ".", "_mpkit", ".", "frames", "[", "x", "]", "for", "x", "in", "sorted", "(", "self", ".", "_mpkit", ".", "frames", ")", "]", "self", ".", "_reasm", "=", "copy", ".", "deepcopy", "(", "self", ".", "_mpkit", ".", "reassembly", ")", "self", ".", "_trace", "=", "copy", ".", "deepcopy", "(", "self", ".", "_mpkit", ".", "trace", ")", "# shutdown & cleanup", "self", ".", "_mpmng", ".", "shutdown", "(", ")", "[", "delattr", "(", "self", ",", "attr", ")", "for", "attr", "in", "filter", "(", "lambda", "s", ":", "s", ".", "startswith", "(", "'_mp'", ")", ",", "dir", "(", "self", ")", ")", "]", "self", ".", "_frnum", "-=", "2" ]
Aftermath for multiprocessing.
[ "Aftermath", "for", "multiprocessing", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L573-L594
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._update_eof
def _update_eof(self): """Update EOF flag.""" self._aftermathmp() self._ifile.close() self._flag_e = True
python
def _update_eof(self): """Update EOF flag.""" self._aftermathmp() self._ifile.close() self._flag_e = True
[ "def", "_update_eof", "(", "self", ")", ":", "self", ".", "_aftermathmp", "(", ")", "self", ".", "_ifile", ".", "close", "(", ")", "self", ".", "_flag_e", "=", "True" ]
Update EOF flag.
[ "Update", "EOF", "flag", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L597-L601
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._read_frame
def _read_frame(self): """Headquarters for frame reader.""" if self._exeng == 'scapy': return self._scapy_read_frame() elif self._exeng == 'dpkt': return self._dpkt_read_frame() elif self._exeng == 'pyshark': return self._pyshark_read_frame() else: return self._default_read_frame()
python
def _read_frame(self): """Headquarters for frame reader.""" if self._exeng == 'scapy': return self._scapy_read_frame() elif self._exeng == 'dpkt': return self._dpkt_read_frame() elif self._exeng == 'pyshark': return self._pyshark_read_frame() else: return self._default_read_frame()
[ "def", "_read_frame", "(", "self", ")", ":", "if", "self", ".", "_exeng", "==", "'scapy'", ":", "return", "self", ".", "_scapy_read_frame", "(", ")", "elif", "self", ".", "_exeng", "==", "'dpkt'", ":", "return", "self", ".", "_dpkt_read_frame", "(", ")", "elif", "self", ".", "_exeng", "==", "'pyshark'", ":", "return", "self", ".", "_pyshark_read_frame", "(", ")", "else", ":", "return", "self", ".", "_default_read_frame", "(", ")" ]
Headquarters for frame reader.
[ "Headquarters", "for", "frame", "reader", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L603-L612
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._default_read_frame
def _default_read_frame(self, *, frame=None, mpkit=None): """Read frames with default engine. - Extract frames and each layer of packets. - Make Info object out of frame properties. - Append Info. - Write plist & append Info. """ from pcapkit.toolkit.default import (ipv4_reassembly, ipv6_reassembly, tcp_reassembly, tcp_traceflow) # read frame header if not self._flag_m: frame = Frame(self._ifile, num=self._frnum+1, proto=self._dlink, layer=self._exlyr, protocol=self._exptl, nanosecond=self._nnsec) self._frnum += 1 # verbose output if self._flag_v: print(f' - Frame {self._frnum:>3d}: {frame.protochain}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(frame.info, name=frnum) else: self._ofile(frame.info, name=frnum) # record fragments if self._ipv4: flag, data = ipv4_reassembly(frame) if flag: self._reasm[0](data) # pylint: disable=E1102 if self._ipv6: flag, data = ipv6_reassembly(frame) if flag: self._reasm[1](data) # pylint: disable=E1102 if self._tcp: flag, data = tcp_reassembly(frame) if flag: self._reasm[2](data) # pylint: disable=E1102 # trace flows if self._flag_t: flag, data = tcp_traceflow(frame, data_link=self._dlink) if flag: self._trace(data) # record frames if self._exeng == 'pipeline': if self._flag_d: # frame._file = NotImplemented mpkit.frames[self._frnum] = frame # print(self._frnum, 'stored') mpkit.current += 1 elif self._exeng == 'server': # record frames if self._flag_d: # frame._file = NotImplemented self._frame.append(frame) # print(self._frnum, 'stored') self._frnum += 1 else: if self._flag_d: self._frame.append(frame) self._proto = frame.protochain.chain # return frame record return frame
python
def _default_read_frame(self, *, frame=None, mpkit=None): """Read frames with default engine. - Extract frames and each layer of packets. - Make Info object out of frame properties. - Append Info. - Write plist & append Info. """ from pcapkit.toolkit.default import (ipv4_reassembly, ipv6_reassembly, tcp_reassembly, tcp_traceflow) # read frame header if not self._flag_m: frame = Frame(self._ifile, num=self._frnum+1, proto=self._dlink, layer=self._exlyr, protocol=self._exptl, nanosecond=self._nnsec) self._frnum += 1 # verbose output if self._flag_v: print(f' - Frame {self._frnum:>3d}: {frame.protochain}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(frame.info, name=frnum) else: self._ofile(frame.info, name=frnum) # record fragments if self._ipv4: flag, data = ipv4_reassembly(frame) if flag: self._reasm[0](data) # pylint: disable=E1102 if self._ipv6: flag, data = ipv6_reassembly(frame) if flag: self._reasm[1](data) # pylint: disable=E1102 if self._tcp: flag, data = tcp_reassembly(frame) if flag: self._reasm[2](data) # pylint: disable=E1102 # trace flows if self._flag_t: flag, data = tcp_traceflow(frame, data_link=self._dlink) if flag: self._trace(data) # record frames if self._exeng == 'pipeline': if self._flag_d: # frame._file = NotImplemented mpkit.frames[self._frnum] = frame # print(self._frnum, 'stored') mpkit.current += 1 elif self._exeng == 'server': # record frames if self._flag_d: # frame._file = NotImplemented self._frame.append(frame) # print(self._frnum, 'stored') self._frnum += 1 else: if self._flag_d: self._frame.append(frame) self._proto = frame.protochain.chain # return frame record return frame
[ "def", "_default_read_frame", "(", "self", ",", "*", ",", "frame", "=", "None", ",", "mpkit", "=", "None", ")", ":", "from", "pcapkit", ".", "toolkit", ".", "default", "import", "(", "ipv4_reassembly", ",", "ipv6_reassembly", ",", "tcp_reassembly", ",", "tcp_traceflow", ")", "# read frame header", "if", "not", "self", ".", "_flag_m", ":", "frame", "=", "Frame", "(", "self", ".", "_ifile", ",", "num", "=", "self", ".", "_frnum", "+", "1", ",", "proto", "=", "self", ".", "_dlink", ",", "layer", "=", "self", ".", "_exlyr", ",", "protocol", "=", "self", ".", "_exptl", ",", "nanosecond", "=", "self", ".", "_nnsec", ")", "self", ".", "_frnum", "+=", "1", "# verbose output", "if", "self", ".", "_flag_v", ":", "print", "(", "f' - Frame {self._frnum:>3d}: {frame.protochain}'", ")", "# write plist", "frnum", "=", "f'Frame {self._frnum}'", "if", "not", "self", ".", "_flag_q", ":", "if", "self", ".", "_flag_f", ":", "ofile", "=", "self", ".", "_ofile", "(", "f'{self._ofnm}/{frnum}.{self._fext}'", ")", "ofile", "(", "frame", ".", "info", ",", "name", "=", "frnum", ")", "else", ":", "self", ".", "_ofile", "(", "frame", ".", "info", ",", "name", "=", "frnum", ")", "# record fragments", "if", "self", ".", "_ipv4", ":", "flag", ",", "data", "=", "ipv4_reassembly", "(", "frame", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "0", "]", "(", "data", ")", "# pylint: disable=E1102", "if", "self", ".", "_ipv6", ":", "flag", ",", "data", "=", "ipv6_reassembly", "(", "frame", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "1", "]", "(", "data", ")", "# pylint: disable=E1102", "if", "self", ".", "_tcp", ":", "flag", ",", "data", "=", "tcp_reassembly", "(", "frame", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "2", "]", "(", "data", ")", "# pylint: disable=E1102", "# trace flows", "if", "self", ".", "_flag_t", ":", "flag", ",", "data", "=", "tcp_traceflow", "(", "frame", ",", "data_link", "=", "self", ".", "_dlink", ")", "if", "flag", ":", "self", ".", "_trace", "(", "data", ")", "# record frames", "if", "self", ".", "_exeng", "==", "'pipeline'", ":", "if", "self", ".", "_flag_d", ":", "# frame._file = NotImplemented", "mpkit", ".", "frames", "[", "self", ".", "_frnum", "]", "=", "frame", "# print(self._frnum, 'stored')", "mpkit", ".", "current", "+=", "1", "elif", "self", ".", "_exeng", "==", "'server'", ":", "# record frames", "if", "self", ".", "_flag_d", ":", "# frame._file = NotImplemented", "self", ".", "_frame", ".", "append", "(", "frame", ")", "# print(self._frnum, 'stored')", "self", ".", "_frnum", "+=", "1", "else", ":", "if", "self", ".", "_flag_d", ":", "self", ".", "_frame", ".", "append", "(", "frame", ")", "self", ".", "_proto", "=", "frame", ".", "protochain", ".", "chain", "# return frame record", "return", "frame" ]
Read frames with default engine. - Extract frames and each layer of packets. - Make Info object out of frame properties. - Append Info. - Write plist & append Info.
[ "Read", "frames", "with", "default", "engine", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L614-L685
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._run_scapy
def _run_scapy(self, scapy_all): """Call scapy.all.sniff to extract PCAP files.""" # if not self._flag_a: # self._flag_a = True # warnings.warn(f"'Extractor(engine=scapy)' object is not iterable; " # "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel()) if self._exlyr != 'None' or self._exptl != 'null': warnings.warn("'Extractor(engine=scapy)' does not support protocol and layer threshold; " f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) # extract & analyse file self._expkg = scapy_all self._extmp = iter(scapy_all.sniff(offline=self._ifnm)) # start iteration self.record_frames()
python
def _run_scapy(self, scapy_all): """Call scapy.all.sniff to extract PCAP files.""" # if not self._flag_a: # self._flag_a = True # warnings.warn(f"'Extractor(engine=scapy)' object is not iterable; " # "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel()) if self._exlyr != 'None' or self._exptl != 'null': warnings.warn("'Extractor(engine=scapy)' does not support protocol and layer threshold; " f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) # extract & analyse file self._expkg = scapy_all self._extmp = iter(scapy_all.sniff(offline=self._ifnm)) # start iteration self.record_frames()
[ "def", "_run_scapy", "(", "self", ",", "scapy_all", ")", ":", "# if not self._flag_a:", "# self._flag_a = True", "# warnings.warn(f\"'Extractor(engine=scapy)' object is not iterable; \"", "# \"so 'auto=False' will be ignored\", AttributeWarning, stacklevel=stacklevel())", "if", "self", ".", "_exlyr", "!=", "'None'", "or", "self", ".", "_exptl", "!=", "'null'", ":", "warnings", ".", "warn", "(", "\"'Extractor(engine=scapy)' does not support protocol and layer threshold; \"", "f\"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored\"", ",", "AttributeWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "# extract & analyse file", "self", ".", "_expkg", "=", "scapy_all", "self", ".", "_extmp", "=", "iter", "(", "scapy_all", ".", "sniff", "(", "offline", "=", "self", ".", "_ifnm", ")", ")", "# start iteration", "self", ".", "record_frames", "(", ")" ]
Call scapy.all.sniff to extract PCAP files.
[ "Call", "scapy", ".", "all", ".", "sniff", "to", "extract", "PCAP", "files", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L687-L704
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._scapy_read_frame
def _scapy_read_frame(self): """Read frames with Scapy.""" from pcapkit.toolkit.scapy import (ipv4_reassembly, ipv6_reassembly, packet2chain, packet2dict, tcp_reassembly, tcp_traceflow) # fetch Scapy packet packet = next(self._extmp) # verbose output self._frnum += 1 self._proto = packet2chain(packet) if self._flag_v: print(f' - Frame {self._frnum:>3d}: {self._proto}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: info = packet2dict(packet) if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(info, name=frnum) else: self._ofile(info, name=frnum) # record frames if self._flag_d: # setattr(packet, 'packet2dict', packet2dict) # setattr(packet, 'packet2chain', packet2chain) self._frame.append(packet) # record fragments if self._ipv4: flag, data = ipv4_reassembly(packet, count=self._frnum) if flag: self._reasm[0](data) # pylint: disable=E1102 if self._ipv6: flag, data = ipv6_reassembly(packet, count=self._frnum) if flag: self._reasm[1](data) # pylint: disable=E1102 if self._tcp: flag, data = tcp_reassembly(packet, count=self._frnum) if flag: self._reasm[2](data) # pylint: disable=E1102 # trace flows if self._flag_t: flag, data = tcp_traceflow(packet, count=self._frnum) if flag: self._trace(data) return packet
python
def _scapy_read_frame(self): """Read frames with Scapy.""" from pcapkit.toolkit.scapy import (ipv4_reassembly, ipv6_reassembly, packet2chain, packet2dict, tcp_reassembly, tcp_traceflow) # fetch Scapy packet packet = next(self._extmp) # verbose output self._frnum += 1 self._proto = packet2chain(packet) if self._flag_v: print(f' - Frame {self._frnum:>3d}: {self._proto}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: info = packet2dict(packet) if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(info, name=frnum) else: self._ofile(info, name=frnum) # record frames if self._flag_d: # setattr(packet, 'packet2dict', packet2dict) # setattr(packet, 'packet2chain', packet2chain) self._frame.append(packet) # record fragments if self._ipv4: flag, data = ipv4_reassembly(packet, count=self._frnum) if flag: self._reasm[0](data) # pylint: disable=E1102 if self._ipv6: flag, data = ipv6_reassembly(packet, count=self._frnum) if flag: self._reasm[1](data) # pylint: disable=E1102 if self._tcp: flag, data = tcp_reassembly(packet, count=self._frnum) if flag: self._reasm[2](data) # pylint: disable=E1102 # trace flows if self._flag_t: flag, data = tcp_traceflow(packet, count=self._frnum) if flag: self._trace(data) return packet
[ "def", "_scapy_read_frame", "(", "self", ")", ":", "from", "pcapkit", ".", "toolkit", ".", "scapy", "import", "(", "ipv4_reassembly", ",", "ipv6_reassembly", ",", "packet2chain", ",", "packet2dict", ",", "tcp_reassembly", ",", "tcp_traceflow", ")", "# fetch Scapy packet", "packet", "=", "next", "(", "self", ".", "_extmp", ")", "# verbose output", "self", ".", "_frnum", "+=", "1", "self", ".", "_proto", "=", "packet2chain", "(", "packet", ")", "if", "self", ".", "_flag_v", ":", "print", "(", "f' - Frame {self._frnum:>3d}: {self._proto}'", ")", "# write plist", "frnum", "=", "f'Frame {self._frnum}'", "if", "not", "self", ".", "_flag_q", ":", "info", "=", "packet2dict", "(", "packet", ")", "if", "self", ".", "_flag_f", ":", "ofile", "=", "self", ".", "_ofile", "(", "f'{self._ofnm}/{frnum}.{self._fext}'", ")", "ofile", "(", "info", ",", "name", "=", "frnum", ")", "else", ":", "self", ".", "_ofile", "(", "info", ",", "name", "=", "frnum", ")", "# record frames", "if", "self", ".", "_flag_d", ":", "# setattr(packet, 'packet2dict', packet2dict)", "# setattr(packet, 'packet2chain', packet2chain)", "self", ".", "_frame", ".", "append", "(", "packet", ")", "# record fragments", "if", "self", ".", "_ipv4", ":", "flag", ",", "data", "=", "ipv4_reassembly", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "0", "]", "(", "data", ")", "# pylint: disable=E1102", "if", "self", ".", "_ipv6", ":", "flag", ",", "data", "=", "ipv6_reassembly", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "1", "]", "(", "data", ")", "# pylint: disable=E1102", "if", "self", ".", "_tcp", ":", "flag", ",", "data", "=", "tcp_reassembly", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "2", "]", "(", "data", ")", "# pylint: disable=E1102", "# trace flows", "if", "self", ".", "_flag_t", ":", "flag", ",", "data", "=", "tcp_traceflow", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_trace", "(", "data", ")", "return", "packet" ]
Read frames with Scapy.
[ "Read", "frames", "with", "Scapy", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L706-L757
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._run_dpkt
def _run_dpkt(self, dpkt): """Call dpkt.pcap.Reader to extract PCAP files.""" # if not self._flag_a: # self._flag_a = True # warnings.warn(f"'Extractor(engine=dpkt)' object is not iterable; " # "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel()) if self._exlyr != 'None' or self._exptl != 'null': warnings.warn("'Extractor(engine=dpkt)' does not support protocol and layer threshold; " f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) # extract global header self.record_header() self._ifile.seek(0, os.SEEK_SET) # extract & analyse file self._expkg = dpkt self._extmp = iter(dpkt.pcap.Reader(self._ifile)) # start iteration self.record_frames()
python
def _run_dpkt(self, dpkt): """Call dpkt.pcap.Reader to extract PCAP files.""" # if not self._flag_a: # self._flag_a = True # warnings.warn(f"'Extractor(engine=dpkt)' object is not iterable; " # "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel()) if self._exlyr != 'None' or self._exptl != 'null': warnings.warn("'Extractor(engine=dpkt)' does not support protocol and layer threshold; " f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) # extract global header self.record_header() self._ifile.seek(0, os.SEEK_SET) # extract & analyse file self._expkg = dpkt self._extmp = iter(dpkt.pcap.Reader(self._ifile)) # start iteration self.record_frames()
[ "def", "_run_dpkt", "(", "self", ",", "dpkt", ")", ":", "# if not self._flag_a:", "# self._flag_a = True", "# warnings.warn(f\"'Extractor(engine=dpkt)' object is not iterable; \"", "# \"so 'auto=False' will be ignored\", AttributeWarning, stacklevel=stacklevel())", "if", "self", ".", "_exlyr", "!=", "'None'", "or", "self", ".", "_exptl", "!=", "'null'", ":", "warnings", ".", "warn", "(", "\"'Extractor(engine=dpkt)' does not support protocol and layer threshold; \"", "f\"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored\"", ",", "AttributeWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "# extract global header", "self", ".", "record_header", "(", ")", "self", ".", "_ifile", ".", "seek", "(", "0", ",", "os", ".", "SEEK_SET", ")", "# extract & analyse file", "self", ".", "_expkg", "=", "dpkt", "self", ".", "_extmp", "=", "iter", "(", "dpkt", ".", "pcap", ".", "Reader", "(", "self", ".", "_ifile", ")", ")", "# start iteration", "self", ".", "record_frames", "(", ")" ]
Call dpkt.pcap.Reader to extract PCAP files.
[ "Call", "dpkt", ".", "pcap", ".", "Reader", "to", "extract", "PCAP", "files", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L759-L780
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._dpkt_read_frame
def _dpkt_read_frame(self): """Read frames.""" from pcapkit.toolkit.dpkt import (ipv4_reassembly, ipv6_reassembly, packet2chain, packet2dict, tcp_reassembly, tcp_traceflow) # fetch DPKT packet timestamp, packet = next(self._extmp) # extract packet if self._dlink.value == 1: packet = self._expkg.ethernet.Ethernet(packet) elif self._dlink.value == 228: packet = self._expkg.ip.IP(packet) elif self._dlink.value == 229: packet = self._expkg.ip6.IP6(packet) else: warnings.warn('unrecognised link layer protocol; all analysis functions ignored', DPKTWarning, stacklevel=stacklevel()) self._frnum += 1 if self._flag_d: self._frame.append(packet) return packet # verbose output self._frnum += 1 self._proto = packet2chain(packet) if self._flag_v: print(f' - Frame {self._frnum:>3d}: {self._proto}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: info = packet2dict(packet, timestamp, data_link=self._dlink) if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(info, name=frnum) else: self._ofile(info, name=frnum) # record frames if self._flag_d: setattr(packet, 'packet2dict', packet2dict) setattr(packet, 'packet2chain', packet2chain) self._frame.append(packet) # record fragments if self._ipv4: flag, data = ipv4_reassembly(packet, count=self._frnum) if flag: self._reasm[0](data) # pylint: disable=E1102 if self._ipv6: flag, data = ipv6_reassembly(packet, count=self._frnum) if flag: self._reasm[1](data) # pylint: disable=E1102 if self._tcp: flag, data = tcp_reassembly(packet, count=self._frnum) if flag: self._reasm[2](data) # pylint: disable=E1102 # trace flows if self._flag_t: flag, data = tcp_traceflow(packet, timestamp, data_link=self._dlink, count=self._frnum) if flag: self._trace(data) return packet
python
def _dpkt_read_frame(self): """Read frames.""" from pcapkit.toolkit.dpkt import (ipv4_reassembly, ipv6_reassembly, packet2chain, packet2dict, tcp_reassembly, tcp_traceflow) # fetch DPKT packet timestamp, packet = next(self._extmp) # extract packet if self._dlink.value == 1: packet = self._expkg.ethernet.Ethernet(packet) elif self._dlink.value == 228: packet = self._expkg.ip.IP(packet) elif self._dlink.value == 229: packet = self._expkg.ip6.IP6(packet) else: warnings.warn('unrecognised link layer protocol; all analysis functions ignored', DPKTWarning, stacklevel=stacklevel()) self._frnum += 1 if self._flag_d: self._frame.append(packet) return packet # verbose output self._frnum += 1 self._proto = packet2chain(packet) if self._flag_v: print(f' - Frame {self._frnum:>3d}: {self._proto}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: info = packet2dict(packet, timestamp, data_link=self._dlink) if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(info, name=frnum) else: self._ofile(info, name=frnum) # record frames if self._flag_d: setattr(packet, 'packet2dict', packet2dict) setattr(packet, 'packet2chain', packet2chain) self._frame.append(packet) # record fragments if self._ipv4: flag, data = ipv4_reassembly(packet, count=self._frnum) if flag: self._reasm[0](data) # pylint: disable=E1102 if self._ipv6: flag, data = ipv6_reassembly(packet, count=self._frnum) if flag: self._reasm[1](data) # pylint: disable=E1102 if self._tcp: flag, data = tcp_reassembly(packet, count=self._frnum) if flag: self._reasm[2](data) # pylint: disable=E1102 # trace flows if self._flag_t: flag, data = tcp_traceflow(packet, timestamp, data_link=self._dlink, count=self._frnum) if flag: self._trace(data) return packet
[ "def", "_dpkt_read_frame", "(", "self", ")", ":", "from", "pcapkit", ".", "toolkit", ".", "dpkt", "import", "(", "ipv4_reassembly", ",", "ipv6_reassembly", ",", "packet2chain", ",", "packet2dict", ",", "tcp_reassembly", ",", "tcp_traceflow", ")", "# fetch DPKT packet", "timestamp", ",", "packet", "=", "next", "(", "self", ".", "_extmp", ")", "# extract packet", "if", "self", ".", "_dlink", ".", "value", "==", "1", ":", "packet", "=", "self", ".", "_expkg", ".", "ethernet", ".", "Ethernet", "(", "packet", ")", "elif", "self", ".", "_dlink", ".", "value", "==", "228", ":", "packet", "=", "self", ".", "_expkg", ".", "ip", ".", "IP", "(", "packet", ")", "elif", "self", ".", "_dlink", ".", "value", "==", "229", ":", "packet", "=", "self", ".", "_expkg", ".", "ip6", ".", "IP6", "(", "packet", ")", "else", ":", "warnings", ".", "warn", "(", "'unrecognised link layer protocol; all analysis functions ignored'", ",", "DPKTWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "self", ".", "_frnum", "+=", "1", "if", "self", ".", "_flag_d", ":", "self", ".", "_frame", ".", "append", "(", "packet", ")", "return", "packet", "# verbose output", "self", ".", "_frnum", "+=", "1", "self", ".", "_proto", "=", "packet2chain", "(", "packet", ")", "if", "self", ".", "_flag_v", ":", "print", "(", "f' - Frame {self._frnum:>3d}: {self._proto}'", ")", "# write plist", "frnum", "=", "f'Frame {self._frnum}'", "if", "not", "self", ".", "_flag_q", ":", "info", "=", "packet2dict", "(", "packet", ",", "timestamp", ",", "data_link", "=", "self", ".", "_dlink", ")", "if", "self", ".", "_flag_f", ":", "ofile", "=", "self", ".", "_ofile", "(", "f'{self._ofnm}/{frnum}.{self._fext}'", ")", "ofile", "(", "info", ",", "name", "=", "frnum", ")", "else", ":", "self", ".", "_ofile", "(", "info", ",", "name", "=", "frnum", ")", "# record frames", "if", "self", ".", "_flag_d", ":", "setattr", "(", "packet", ",", "'packet2dict'", ",", "packet2dict", ")", "setattr", "(", "packet", ",", "'packet2chain'", ",", "packet2chain", ")", "self", ".", "_frame", ".", "append", "(", "packet", ")", "# record fragments", "if", "self", ".", "_ipv4", ":", "flag", ",", "data", "=", "ipv4_reassembly", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "0", "]", "(", "data", ")", "# pylint: disable=E1102", "if", "self", ".", "_ipv6", ":", "flag", ",", "data", "=", "ipv6_reassembly", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "1", "]", "(", "data", ")", "# pylint: disable=E1102", "if", "self", ".", "_tcp", ":", "flag", ",", "data", "=", "tcp_reassembly", "(", "packet", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_reasm", "[", "2", "]", "(", "data", ")", "# pylint: disable=E1102", "# trace flows", "if", "self", ".", "_flag_t", ":", "flag", ",", "data", "=", "tcp_traceflow", "(", "packet", ",", "timestamp", ",", "data_link", "=", "self", ".", "_dlink", ",", "count", "=", "self", ".", "_frnum", ")", "if", "flag", ":", "self", ".", "_trace", "(", "data", ")", "return", "packet" ]
Read frames.
[ "Read", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L782-L848
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._run_pyshark
def _run_pyshark(self, pyshark): """Call pyshark.FileCapture to extract PCAP files.""" # if not self._flag_a: # self._flag_a = True # warnings.warn(f"'Extractor(engine=pyshark)' object is not iterable; " # "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel()) if self._exlyr != 'None' or self._exptl != 'null': warnings.warn("'Extractor(engine=pyshark)' does not support protocol and layer threshold; " f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) if (self._ipv4 or self._ipv6 or self._tcp): self._ipv4 = self._ipv6 = self._tcp = False self._reasm = [None] * 3 warnings.warn("'Extractor(engine=pyshark)' object dose not support reassembly; " f"so 'ipv4={self._ipv4}', 'ipv6={self._ipv6}' and 'tcp={self._tcp}' will be ignored", AttributeWarning, stacklevel=stacklevel()) # extract & analyse file self._expkg = pyshark self._extmp = iter(pyshark.FileCapture(self._ifnm, keep_packets=False)) # start iteration self.record_frames()
python
def _run_pyshark(self, pyshark): """Call pyshark.FileCapture to extract PCAP files.""" # if not self._flag_a: # self._flag_a = True # warnings.warn(f"'Extractor(engine=pyshark)' object is not iterable; " # "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel()) if self._exlyr != 'None' or self._exptl != 'null': warnings.warn("'Extractor(engine=pyshark)' does not support protocol and layer threshold; " f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) if (self._ipv4 or self._ipv6 or self._tcp): self._ipv4 = self._ipv6 = self._tcp = False self._reasm = [None] * 3 warnings.warn("'Extractor(engine=pyshark)' object dose not support reassembly; " f"so 'ipv4={self._ipv4}', 'ipv6={self._ipv6}' and 'tcp={self._tcp}' will be ignored", AttributeWarning, stacklevel=stacklevel()) # extract & analyse file self._expkg = pyshark self._extmp = iter(pyshark.FileCapture(self._ifnm, keep_packets=False)) # start iteration self.record_frames()
[ "def", "_run_pyshark", "(", "self", ",", "pyshark", ")", ":", "# if not self._flag_a:", "# self._flag_a = True", "# warnings.warn(f\"'Extractor(engine=pyshark)' object is not iterable; \"", "# \"so 'auto=False' will be ignored\", AttributeWarning, stacklevel=stacklevel())", "if", "self", ".", "_exlyr", "!=", "'None'", "or", "self", ".", "_exptl", "!=", "'null'", ":", "warnings", ".", "warn", "(", "\"'Extractor(engine=pyshark)' does not support protocol and layer threshold; \"", "f\"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored\"", ",", "AttributeWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "if", "(", "self", ".", "_ipv4", "or", "self", ".", "_ipv6", "or", "self", ".", "_tcp", ")", ":", "self", ".", "_ipv4", "=", "self", ".", "_ipv6", "=", "self", ".", "_tcp", "=", "False", "self", ".", "_reasm", "=", "[", "None", "]", "*", "3", "warnings", ".", "warn", "(", "\"'Extractor(engine=pyshark)' object dose not support reassembly; \"", "f\"so 'ipv4={self._ipv4}', 'ipv6={self._ipv6}' and 'tcp={self._tcp}' will be ignored\"", ",", "AttributeWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "# extract & analyse file", "self", ".", "_expkg", "=", "pyshark", "self", ".", "_extmp", "=", "iter", "(", "pyshark", ".", "FileCapture", "(", "self", ".", "_ifnm", ",", "keep_packets", "=", "False", ")", ")", "# start iteration", "self", ".", "record_frames", "(", ")" ]
Call pyshark.FileCapture to extract PCAP files.
[ "Call", "pyshark", ".", "FileCapture", "to", "extract", "PCAP", "files", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L850-L874
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._pyshark_read_frame
def _pyshark_read_frame(self): """Read frames.""" from pcapkit.toolkit.pyshark import packet2dict, tcp_traceflow # fetch PyShark packet packet = next(self._extmp) # def _pyshark_packet2chain(packet): # """Fetch PyShark packet protocol chain.""" # return ':'.join(map(lambda layer: layer.layer_name.upper(), packet.layers)) # verbose output self._frnum = int(packet.number) self._proto = packet.frame_info.protocols if self._flag_v: print(f' - Frame {self._frnum:>3d}: {self._proto}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: info = packet2dict(packet) if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(info, name=frnum) else: self._ofile(info, name=frnum) # record frames if self._flag_d: setattr(packet, 'packet2dict', packet2dict) self._frame.append(packet) # trace flows if self._flag_t: flag, data = tcp_traceflow(packet) if flag: self._trace(data) return packet
python
def _pyshark_read_frame(self): """Read frames.""" from pcapkit.toolkit.pyshark import packet2dict, tcp_traceflow # fetch PyShark packet packet = next(self._extmp) # def _pyshark_packet2chain(packet): # """Fetch PyShark packet protocol chain.""" # return ':'.join(map(lambda layer: layer.layer_name.upper(), packet.layers)) # verbose output self._frnum = int(packet.number) self._proto = packet.frame_info.protocols if self._flag_v: print(f' - Frame {self._frnum:>3d}: {self._proto}') # write plist frnum = f'Frame {self._frnum}' if not self._flag_q: info = packet2dict(packet) if self._flag_f: ofile = self._ofile(f'{self._ofnm}/{frnum}.{self._fext}') ofile(info, name=frnum) else: self._ofile(info, name=frnum) # record frames if self._flag_d: setattr(packet, 'packet2dict', packet2dict) self._frame.append(packet) # trace flows if self._flag_t: flag, data = tcp_traceflow(packet) if flag: self._trace(data) return packet
[ "def", "_pyshark_read_frame", "(", "self", ")", ":", "from", "pcapkit", ".", "toolkit", ".", "pyshark", "import", "packet2dict", ",", "tcp_traceflow", "# fetch PyShark packet", "packet", "=", "next", "(", "self", ".", "_extmp", ")", "# def _pyshark_packet2chain(packet):", "# \"\"\"Fetch PyShark packet protocol chain.\"\"\"", "# return ':'.join(map(lambda layer: layer.layer_name.upper(), packet.layers))", "# verbose output", "self", ".", "_frnum", "=", "int", "(", "packet", ".", "number", ")", "self", ".", "_proto", "=", "packet", ".", "frame_info", ".", "protocols", "if", "self", ".", "_flag_v", ":", "print", "(", "f' - Frame {self._frnum:>3d}: {self._proto}'", ")", "# write plist", "frnum", "=", "f'Frame {self._frnum}'", "if", "not", "self", ".", "_flag_q", ":", "info", "=", "packet2dict", "(", "packet", ")", "if", "self", ".", "_flag_f", ":", "ofile", "=", "self", ".", "_ofile", "(", "f'{self._ofnm}/{frnum}.{self._fext}'", ")", "ofile", "(", "info", ",", "name", "=", "frnum", ")", "else", ":", "self", ".", "_ofile", "(", "info", ",", "name", "=", "frnum", ")", "# record frames", "if", "self", ".", "_flag_d", ":", "setattr", "(", "packet", ",", "'packet2dict'", ",", "packet2dict", ")", "self", ".", "_frame", ".", "append", "(", "packet", ")", "# trace flows", "if", "self", ".", "_flag_t", ":", "flag", ",", "data", "=", "tcp_traceflow", "(", "packet", ")", "if", "flag", ":", "self", ".", "_trace", "(", "data", ")", "return", "packet" ]
Read frames.
[ "Read", "frames", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L876-L914
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._run_pipeline
def _run_pipeline(self, multiprocessing): """Use pipeline multiprocessing to extract PCAP files.""" if not self._flag_m: raise UnsupportedCall(f"Extractor(engine={self._exeng})' has no attribute '_run_pipline'") if not self._flag_q: self._flag_q = True warnings.warn("'Extractor(engine=pipeline)' does not support output; " f"'fout={self._ofnm}' ignored", AttributeWarning, stacklevel=stacklevel()) self._frnum = 1 # frame number (revised) self._expkg = multiprocessing # multiprocessing module self._mpprc = list() # multiprocessing process list self._mpfdp = collections.defaultdict(multiprocessing.Queue) # multiprocessing file pointer self._mpmng = multiprocessing.Manager() # multiprocessing manager self._mpkit = self._mpmng.Namespace() # multiprocessing work kit self._mpkit.counter = 0 # work count (on duty) self._mpkit.pool = 1 # work pool (ready) self._mpkit.current = 1 # current frame number self._mpkit.eof = False # EOF flag self._mpkit.frames = dict() # frame storage self._mpkit.trace = self._trace # flow tracer self._mpkit.reassembly = copy.deepcopy(self._reasm) # reassembly buffers # preparation self.record_header() self._mpfdp[0].put(self._gbhdr.length) # extraction while True: # check EOF if self._mpkit.eof: self._update_eof() break # check counter if self._mpkit.pool and self._mpkit.counter < CPU_CNT: # update file offset self._ifile.seek(self._mpfdp.pop(self._frnum-1).get(), os.SEEK_SET) # create worker # print(self._frnum, 'start') proc = multiprocessing.Process( target=self._pipeline_read_frame, kwargs={'mpkit': self._mpkit, 'mpfdp': self._mpfdp[self._frnum]} ) # update status self._mpkit.pool -= 1 self._mpkit.counter += 1 # start and record proc.start() self._frnum += 1 self._mpprc.append(proc) # check buffer if len(self._mpprc) >= CPU_CNT: [proc.join() for proc in self._mpprc[:-4]] del self._mpprc[:-4]
python
def _run_pipeline(self, multiprocessing): """Use pipeline multiprocessing to extract PCAP files.""" if not self._flag_m: raise UnsupportedCall(f"Extractor(engine={self._exeng})' has no attribute '_run_pipline'") if not self._flag_q: self._flag_q = True warnings.warn("'Extractor(engine=pipeline)' does not support output; " f"'fout={self._ofnm}' ignored", AttributeWarning, stacklevel=stacklevel()) self._frnum = 1 # frame number (revised) self._expkg = multiprocessing # multiprocessing module self._mpprc = list() # multiprocessing process list self._mpfdp = collections.defaultdict(multiprocessing.Queue) # multiprocessing file pointer self._mpmng = multiprocessing.Manager() # multiprocessing manager self._mpkit = self._mpmng.Namespace() # multiprocessing work kit self._mpkit.counter = 0 # work count (on duty) self._mpkit.pool = 1 # work pool (ready) self._mpkit.current = 1 # current frame number self._mpkit.eof = False # EOF flag self._mpkit.frames = dict() # frame storage self._mpkit.trace = self._trace # flow tracer self._mpkit.reassembly = copy.deepcopy(self._reasm) # reassembly buffers # preparation self.record_header() self._mpfdp[0].put(self._gbhdr.length) # extraction while True: # check EOF if self._mpkit.eof: self._update_eof() break # check counter if self._mpkit.pool and self._mpkit.counter < CPU_CNT: # update file offset self._ifile.seek(self._mpfdp.pop(self._frnum-1).get(), os.SEEK_SET) # create worker # print(self._frnum, 'start') proc = multiprocessing.Process( target=self._pipeline_read_frame, kwargs={'mpkit': self._mpkit, 'mpfdp': self._mpfdp[self._frnum]} ) # update status self._mpkit.pool -= 1 self._mpkit.counter += 1 # start and record proc.start() self._frnum += 1 self._mpprc.append(proc) # check buffer if len(self._mpprc) >= CPU_CNT: [proc.join() for proc in self._mpprc[:-4]] del self._mpprc[:-4]
[ "def", "_run_pipeline", "(", "self", ",", "multiprocessing", ")", ":", "if", "not", "self", ".", "_flag_m", ":", "raise", "UnsupportedCall", "(", "f\"Extractor(engine={self._exeng})' has no attribute '_run_pipline'\"", ")", "if", "not", "self", ".", "_flag_q", ":", "self", ".", "_flag_q", "=", "True", "warnings", ".", "warn", "(", "\"'Extractor(engine=pipeline)' does not support output; \"", "f\"'fout={self._ofnm}' ignored\"", ",", "AttributeWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "self", ".", "_frnum", "=", "1", "# frame number (revised)", "self", ".", "_expkg", "=", "multiprocessing", "# multiprocessing module", "self", ".", "_mpprc", "=", "list", "(", ")", "# multiprocessing process list", "self", ".", "_mpfdp", "=", "collections", ".", "defaultdict", "(", "multiprocessing", ".", "Queue", ")", "# multiprocessing file pointer", "self", ".", "_mpmng", "=", "multiprocessing", ".", "Manager", "(", ")", "# multiprocessing manager", "self", ".", "_mpkit", "=", "self", ".", "_mpmng", ".", "Namespace", "(", ")", "# multiprocessing work kit", "self", ".", "_mpkit", ".", "counter", "=", "0", "# work count (on duty)", "self", ".", "_mpkit", ".", "pool", "=", "1", "# work pool (ready)", "self", ".", "_mpkit", ".", "current", "=", "1", "# current frame number", "self", ".", "_mpkit", ".", "eof", "=", "False", "# EOF flag", "self", ".", "_mpkit", ".", "frames", "=", "dict", "(", ")", "# frame storage", "self", ".", "_mpkit", ".", "trace", "=", "self", ".", "_trace", "# flow tracer", "self", ".", "_mpkit", ".", "reassembly", "=", "copy", ".", "deepcopy", "(", "self", ".", "_reasm", ")", "# reassembly buffers", "# preparation", "self", ".", "record_header", "(", ")", "self", ".", "_mpfdp", "[", "0", "]", ".", "put", "(", "self", ".", "_gbhdr", ".", "length", ")", "# extraction", "while", "True", ":", "# check EOF", "if", "self", ".", "_mpkit", ".", "eof", ":", "self", ".", "_update_eof", "(", ")", "break", "# check counter", "if", "self", ".", "_mpkit", ".", "pool", "and", "self", ".", "_mpkit", ".", "counter", "<", "CPU_CNT", ":", "# update file offset", "self", ".", "_ifile", ".", "seek", "(", "self", ".", "_mpfdp", ".", "pop", "(", "self", ".", "_frnum", "-", "1", ")", ".", "get", "(", ")", ",", "os", ".", "SEEK_SET", ")", "# create worker", "# print(self._frnum, 'start')", "proc", "=", "multiprocessing", ".", "Process", "(", "target", "=", "self", ".", "_pipeline_read_frame", ",", "kwargs", "=", "{", "'mpkit'", ":", "self", ".", "_mpkit", ",", "'mpfdp'", ":", "self", ".", "_mpfdp", "[", "self", ".", "_frnum", "]", "}", ")", "# update status", "self", ".", "_mpkit", ".", "pool", "-=", "1", "self", ".", "_mpkit", ".", "counter", "+=", "1", "# start and record", "proc", ".", "start", "(", ")", "self", ".", "_frnum", "+=", "1", "self", ".", "_mpprc", ".", "append", "(", "proc", ")", "# check buffer", "if", "len", "(", "self", ".", "_mpprc", ")", ">=", "CPU_CNT", ":", "[", "proc", ".", "join", "(", ")", "for", "proc", "in", "self", ".", "_mpprc", "[", ":", "-", "4", "]", "]", "del", "self", ".", "_mpprc", "[", ":", "-", "4", "]" ]
Use pipeline multiprocessing to extract PCAP files.
[ "Use", "pipeline", "multiprocessing", "to", "extract", "PCAP", "files", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L916-L977
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._pipeline_read_frame
def _pipeline_read_frame(self, *, mpfdp, mpkit): """Extract frame.""" # check EOF if self._flag_e: raise EOFError def _analyse_frame(*, frame, mpkit): """Analyse frame.""" # wait until ready while mpkit.current != self._frnum: time.sleep(random.randint(0, datetime.datetime.now().second) // 600) # analysis and storage # print(self._frnum, 'get') self._trace = mpkit.trace self._reasm = mpkit.reassembly self._default_read_frame(frame=frame, mpkit=mpkit) # print(self._frnum, 'analysed') mpkit.trace = copy.deepcopy(self._trace) mpkit.reassembly = copy.deepcopy(self._reasm) # print(self._frnum, 'put') # extract frame try: # extraction frame = Frame(self._ifile, num=self._frnum, proto=self._dlink, layer=self._exlyr, protocol=self._exptl, nanosecond=self._nnsec, mpkit=mpkit, mpfdp=mpfdp) # analysis _analyse_frame(frame=frame, mpkit=mpkit) except EOFError: mpkit.eof = True finally: mpkit.counter -= 1 self._ifile.close()
python
def _pipeline_read_frame(self, *, mpfdp, mpkit): """Extract frame.""" # check EOF if self._flag_e: raise EOFError def _analyse_frame(*, frame, mpkit): """Analyse frame.""" # wait until ready while mpkit.current != self._frnum: time.sleep(random.randint(0, datetime.datetime.now().second) // 600) # analysis and storage # print(self._frnum, 'get') self._trace = mpkit.trace self._reasm = mpkit.reassembly self._default_read_frame(frame=frame, mpkit=mpkit) # print(self._frnum, 'analysed') mpkit.trace = copy.deepcopy(self._trace) mpkit.reassembly = copy.deepcopy(self._reasm) # print(self._frnum, 'put') # extract frame try: # extraction frame = Frame(self._ifile, num=self._frnum, proto=self._dlink, layer=self._exlyr, protocol=self._exptl, nanosecond=self._nnsec, mpkit=mpkit, mpfdp=mpfdp) # analysis _analyse_frame(frame=frame, mpkit=mpkit) except EOFError: mpkit.eof = True finally: mpkit.counter -= 1 self._ifile.close()
[ "def", "_pipeline_read_frame", "(", "self", ",", "*", ",", "mpfdp", ",", "mpkit", ")", ":", "# check EOF", "if", "self", ".", "_flag_e", ":", "raise", "EOFError", "def", "_analyse_frame", "(", "*", ",", "frame", ",", "mpkit", ")", ":", "\"\"\"Analyse frame.\"\"\"", "# wait until ready", "while", "mpkit", ".", "current", "!=", "self", ".", "_frnum", ":", "time", ".", "sleep", "(", "random", ".", "randint", "(", "0", ",", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "second", ")", "//", "600", ")", "# analysis and storage", "# print(self._frnum, 'get')", "self", ".", "_trace", "=", "mpkit", ".", "trace", "self", ".", "_reasm", "=", "mpkit", ".", "reassembly", "self", ".", "_default_read_frame", "(", "frame", "=", "frame", ",", "mpkit", "=", "mpkit", ")", "# print(self._frnum, 'analysed')", "mpkit", ".", "trace", "=", "copy", ".", "deepcopy", "(", "self", ".", "_trace", ")", "mpkit", ".", "reassembly", "=", "copy", ".", "deepcopy", "(", "self", ".", "_reasm", ")", "# print(self._frnum, 'put')", "# extract frame", "try", ":", "# extraction", "frame", "=", "Frame", "(", "self", ".", "_ifile", ",", "num", "=", "self", ".", "_frnum", ",", "proto", "=", "self", ".", "_dlink", ",", "layer", "=", "self", ".", "_exlyr", ",", "protocol", "=", "self", ".", "_exptl", ",", "nanosecond", "=", "self", ".", "_nnsec", ",", "mpkit", "=", "mpkit", ",", "mpfdp", "=", "mpfdp", ")", "# analysis", "_analyse_frame", "(", "frame", "=", "frame", ",", "mpkit", "=", "mpkit", ")", "except", "EOFError", ":", "mpkit", ".", "eof", "=", "True", "finally", ":", "mpkit", ".", "counter", "-=", "1", "self", ".", "_ifile", ".", "close", "(", ")" ]
Extract frame.
[ "Extract", "frame", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L979-L1012
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._run_server
def _run_server(self, multiprocessing): """Use server multiprocessing to extract PCAP files.""" if not self._flag_m: raise UnsupportedCall(f"Extractor(engine={self._exeng})' has no attribute '_run_server'") if not self._flag_q: self._flag_q = True warnings.warn("'Extractor(engine=pipeline)' does not support output; " f"'fout={self._ofnm}' ignored", AttributeWarning, stacklevel=stacklevel()) self._frnum = 1 # frame number (revised) self._expkg = multiprocessing # multiprocessing module self._mpsvc = NotImplemented # multiprocessing server process self._mpprc = list() # multiprocessing process list self._mpfdp = collections.defaultdict(multiprocessing.Queue) # multiprocessing file pointer self._mpmng = multiprocessing.Manager() # multiprocessing manager self._mpbuf = self._mpmng.dict() # multiprocessing frame dict self._mpfrm = self._mpmng.list() # multiprocessing frame storage self._mprsm = self._mpmng.list() # multiprocessing reassembly buffer self._mpkit = self._mpmng.Namespace() # multiprocessing work kit self._mpkit.counter = 0 # work count (on duty) self._mpkit.pool = 1 # work pool (ready) self._mpkit.eof = False # EOF flag self._mpkit.trace = None # flow tracer # preparation self.record_header() self._mpfdp[0].put(self._gbhdr.length) self._mpsvc = multiprocessing.Process( target=self._server_analyse_frame, kwargs={'mpfrm': self._mpfrm, 'mprsm': self._mprsm, 'mpbuf': self._mpbuf, 'mpkit': self._mpkit} ) self._mpsvc.start() # extraction while True: # check EOF if self._mpkit.eof: self._update_eof() break # check counter if self._mpkit.pool and self._mpkit.counter < CPU_CNT - 1: # update file offset self._ifile.seek(self._mpfdp.pop(self._frnum-1).get(), os.SEEK_SET) # create worker # print(self._frnum, 'start') proc = multiprocessing.Process( target=self._server_extract_frame, kwargs={'mpkit': self._mpkit, 'mpbuf': self._mpbuf, 'mpfdp': self._mpfdp[self._frnum]} ) # update status self._mpkit.pool -= 1 self._mpkit.counter += 1 # start and record proc.start() self._frnum += 1 self._mpprc.append(proc) # check buffer if len(self._mpprc) >= CPU_CNT - 1: [proc.join() for proc in self._mpprc[:-4]] del self._mpprc[:-4]
python
def _run_server(self, multiprocessing): """Use server multiprocessing to extract PCAP files.""" if not self._flag_m: raise UnsupportedCall(f"Extractor(engine={self._exeng})' has no attribute '_run_server'") if not self._flag_q: self._flag_q = True warnings.warn("'Extractor(engine=pipeline)' does not support output; " f"'fout={self._ofnm}' ignored", AttributeWarning, stacklevel=stacklevel()) self._frnum = 1 # frame number (revised) self._expkg = multiprocessing # multiprocessing module self._mpsvc = NotImplemented # multiprocessing server process self._mpprc = list() # multiprocessing process list self._mpfdp = collections.defaultdict(multiprocessing.Queue) # multiprocessing file pointer self._mpmng = multiprocessing.Manager() # multiprocessing manager self._mpbuf = self._mpmng.dict() # multiprocessing frame dict self._mpfrm = self._mpmng.list() # multiprocessing frame storage self._mprsm = self._mpmng.list() # multiprocessing reassembly buffer self._mpkit = self._mpmng.Namespace() # multiprocessing work kit self._mpkit.counter = 0 # work count (on duty) self._mpkit.pool = 1 # work pool (ready) self._mpkit.eof = False # EOF flag self._mpkit.trace = None # flow tracer # preparation self.record_header() self._mpfdp[0].put(self._gbhdr.length) self._mpsvc = multiprocessing.Process( target=self._server_analyse_frame, kwargs={'mpfrm': self._mpfrm, 'mprsm': self._mprsm, 'mpbuf': self._mpbuf, 'mpkit': self._mpkit} ) self._mpsvc.start() # extraction while True: # check EOF if self._mpkit.eof: self._update_eof() break # check counter if self._mpkit.pool and self._mpkit.counter < CPU_CNT - 1: # update file offset self._ifile.seek(self._mpfdp.pop(self._frnum-1).get(), os.SEEK_SET) # create worker # print(self._frnum, 'start') proc = multiprocessing.Process( target=self._server_extract_frame, kwargs={'mpkit': self._mpkit, 'mpbuf': self._mpbuf, 'mpfdp': self._mpfdp[self._frnum]} ) # update status self._mpkit.pool -= 1 self._mpkit.counter += 1 # start and record proc.start() self._frnum += 1 self._mpprc.append(proc) # check buffer if len(self._mpprc) >= CPU_CNT - 1: [proc.join() for proc in self._mpprc[:-4]] del self._mpprc[:-4]
[ "def", "_run_server", "(", "self", ",", "multiprocessing", ")", ":", "if", "not", "self", ".", "_flag_m", ":", "raise", "UnsupportedCall", "(", "f\"Extractor(engine={self._exeng})' has no attribute '_run_server'\"", ")", "if", "not", "self", ".", "_flag_q", ":", "self", ".", "_flag_q", "=", "True", "warnings", ".", "warn", "(", "\"'Extractor(engine=pipeline)' does not support output; \"", "f\"'fout={self._ofnm}' ignored\"", ",", "AttributeWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "self", ".", "_frnum", "=", "1", "# frame number (revised)", "self", ".", "_expkg", "=", "multiprocessing", "# multiprocessing module", "self", ".", "_mpsvc", "=", "NotImplemented", "# multiprocessing server process", "self", ".", "_mpprc", "=", "list", "(", ")", "# multiprocessing process list", "self", ".", "_mpfdp", "=", "collections", ".", "defaultdict", "(", "multiprocessing", ".", "Queue", ")", "# multiprocessing file pointer", "self", ".", "_mpmng", "=", "multiprocessing", ".", "Manager", "(", ")", "# multiprocessing manager", "self", ".", "_mpbuf", "=", "self", ".", "_mpmng", ".", "dict", "(", ")", "# multiprocessing frame dict", "self", ".", "_mpfrm", "=", "self", ".", "_mpmng", ".", "list", "(", ")", "# multiprocessing frame storage", "self", ".", "_mprsm", "=", "self", ".", "_mpmng", ".", "list", "(", ")", "# multiprocessing reassembly buffer", "self", ".", "_mpkit", "=", "self", ".", "_mpmng", ".", "Namespace", "(", ")", "# multiprocessing work kit", "self", ".", "_mpkit", ".", "counter", "=", "0", "# work count (on duty)", "self", ".", "_mpkit", ".", "pool", "=", "1", "# work pool (ready)", "self", ".", "_mpkit", ".", "eof", "=", "False", "# EOF flag", "self", ".", "_mpkit", ".", "trace", "=", "None", "# flow tracer", "# preparation", "self", ".", "record_header", "(", ")", "self", ".", "_mpfdp", "[", "0", "]", ".", "put", "(", "self", ".", "_gbhdr", ".", "length", ")", "self", ".", "_mpsvc", "=", "multiprocessing", ".", "Process", "(", "target", "=", "self", ".", "_server_analyse_frame", ",", "kwargs", "=", "{", "'mpfrm'", ":", "self", ".", "_mpfrm", ",", "'mprsm'", ":", "self", ".", "_mprsm", ",", "'mpbuf'", ":", "self", ".", "_mpbuf", ",", "'mpkit'", ":", "self", ".", "_mpkit", "}", ")", "self", ".", "_mpsvc", ".", "start", "(", ")", "# extraction", "while", "True", ":", "# check EOF", "if", "self", ".", "_mpkit", ".", "eof", ":", "self", ".", "_update_eof", "(", ")", "break", "# check counter", "if", "self", ".", "_mpkit", ".", "pool", "and", "self", ".", "_mpkit", ".", "counter", "<", "CPU_CNT", "-", "1", ":", "# update file offset", "self", ".", "_ifile", ".", "seek", "(", "self", ".", "_mpfdp", ".", "pop", "(", "self", ".", "_frnum", "-", "1", ")", ".", "get", "(", ")", ",", "os", ".", "SEEK_SET", ")", "# create worker", "# print(self._frnum, 'start')", "proc", "=", "multiprocessing", ".", "Process", "(", "target", "=", "self", ".", "_server_extract_frame", ",", "kwargs", "=", "{", "'mpkit'", ":", "self", ".", "_mpkit", ",", "'mpbuf'", ":", "self", ".", "_mpbuf", ",", "'mpfdp'", ":", "self", ".", "_mpfdp", "[", "self", ".", "_frnum", "]", "}", ")", "# update status", "self", ".", "_mpkit", ".", "pool", "-=", "1", "self", ".", "_mpkit", ".", "counter", "+=", "1", "# start and record", "proc", ".", "start", "(", ")", "self", ".", "_frnum", "+=", "1", "self", ".", "_mpprc", ".", "append", "(", "proc", ")", "# check buffer", "if", "len", "(", "self", ".", "_mpprc", ")", ">=", "CPU_CNT", "-", "1", ":", "[", "proc", ".", "join", "(", ")", "for", "proc", "in", "self", ".", "_mpprc", "[", ":", "-", "4", "]", "]", "del", "self", ".", "_mpprc", "[", ":", "-", "4", "]" ]
Use server multiprocessing to extract PCAP files.
[ "Use", "server", "multiprocessing", "to", "extract", "PCAP", "files", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L1015-L1082
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._server_extract_frame
def _server_extract_frame(self, *, mpfdp, mpkit, mpbuf): """Extract frame.""" # check EOF if self._flag_e: raise EOFError # extract frame try: frame = Frame(self._ifile, num=self._frnum, proto=self._dlink, layer=self._exlyr, protocol=self._exptl, nanosecond=self._nnsec, mpkit=mpkit, mpfdp=mpfdp) # frame._file = NotImplemented mpbuf[self._frnum] = frame except EOFError: mpbuf[self._frnum] = EOFError mpkit.eof = True finally: mpkit.counter -= 1 self._ifile.close()
python
def _server_extract_frame(self, *, mpfdp, mpkit, mpbuf): """Extract frame.""" # check EOF if self._flag_e: raise EOFError # extract frame try: frame = Frame(self._ifile, num=self._frnum, proto=self._dlink, layer=self._exlyr, protocol=self._exptl, nanosecond=self._nnsec, mpkit=mpkit, mpfdp=mpfdp) # frame._file = NotImplemented mpbuf[self._frnum] = frame except EOFError: mpbuf[self._frnum] = EOFError mpkit.eof = True finally: mpkit.counter -= 1 self._ifile.close()
[ "def", "_server_extract_frame", "(", "self", ",", "*", ",", "mpfdp", ",", "mpkit", ",", "mpbuf", ")", ":", "# check EOF", "if", "self", ".", "_flag_e", ":", "raise", "EOFError", "# extract frame", "try", ":", "frame", "=", "Frame", "(", "self", ".", "_ifile", ",", "num", "=", "self", ".", "_frnum", ",", "proto", "=", "self", ".", "_dlink", ",", "layer", "=", "self", ".", "_exlyr", ",", "protocol", "=", "self", ".", "_exptl", ",", "nanosecond", "=", "self", ".", "_nnsec", ",", "mpkit", "=", "mpkit", ",", "mpfdp", "=", "mpfdp", ")", "# frame._file = NotImplemented", "mpbuf", "[", "self", ".", "_frnum", "]", "=", "frame", "except", "EOFError", ":", "mpbuf", "[", "self", ".", "_frnum", "]", "=", "EOFError", "mpkit", ".", "eof", "=", "True", "finally", ":", "mpkit", ".", "counter", "-=", "1", "self", ".", "_ifile", ".", "close", "(", ")" ]
Extract frame.
[ "Extract", "frame", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L1084-L1101
JarryShaw/PyPCAPKit
src/foundation/extraction.py
Extractor._server_analyse_frame
def _server_analyse_frame(self, *, mpkit, mpfrm, mprsm, mpbuf): """Analyse frame.""" while True: # fetch frame # print(self._frnum, 'trying') frame = mpbuf.pop(self._frnum, None) if frame is EOFError: break if frame is None: continue # print(self._frnum, 'get') self._default_read_frame(frame=frame) mpfrm += self._frame mprsm += self._reasm mpkit.trace = copy.deepcopy(self._trace)
python
def _server_analyse_frame(self, *, mpkit, mpfrm, mprsm, mpbuf): """Analyse frame.""" while True: # fetch frame # print(self._frnum, 'trying') frame = mpbuf.pop(self._frnum, None) if frame is EOFError: break if frame is None: continue # print(self._frnum, 'get') self._default_read_frame(frame=frame) mpfrm += self._frame mprsm += self._reasm mpkit.trace = copy.deepcopy(self._trace)
[ "def", "_server_analyse_frame", "(", "self", ",", "*", ",", "mpkit", ",", "mpfrm", ",", "mprsm", ",", "mpbuf", ")", ":", "while", "True", ":", "# fetch frame", "# print(self._frnum, 'trying')", "frame", "=", "mpbuf", ".", "pop", "(", "self", ".", "_frnum", ",", "None", ")", "if", "frame", "is", "EOFError", ":", "break", "if", "frame", "is", "None", ":", "continue", "# print(self._frnum, 'get')", "self", ".", "_default_read_frame", "(", "frame", "=", "frame", ")", "mpfrm", "+=", "self", ".", "_frame", "mprsm", "+=", "self", ".", "_reasm", "mpkit", ".", "trace", "=", "copy", ".", "deepcopy", "(", "self", ".", "_trace", ")" ]
Analyse frame.
[ "Analyse", "frame", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/extraction.py#L1104-L1119
JarryShaw/PyPCAPKit
src/const/tcp/checksum.py
Checksum.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Checksum(key) if key not in Checksum._member_map_: extend_enum(Checksum, key, default) return Checksum[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Checksum(key) if key not in Checksum._member_map_: extend_enum(Checksum, key, default) return Checksum[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "Checksum", "(", "key", ")", "if", "key", "not", "in", "Checksum", ".", "_member_map_", ":", "extend_enum", "(", "Checksum", ",", "key", ",", "default", ")", "return", "Checksum", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/tcp/checksum.py#L18-L24
JarryShaw/PyPCAPKit
src/const/arp/operation.py
Operation.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Operation(key) if key not in Operation._member_map_: extend_enum(Operation, key, default) return Operation[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Operation(key) if key not in Operation._member_map_: extend_enum(Operation, key, default) return Operation[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "Operation", "(", "key", ")", "if", "key", "not", "in", "Operation", ".", "_member_map_", ":", "extend_enum", "(", "Operation", ",", "key", ",", "default", ")", "return", "Operation", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/arp/operation.py#L41-L47
JarryShaw/PyPCAPKit
src/protocols/application/NotImplemented/httpv2.py
HTTPv2.read_http
def read_http(self, length): """Read Hypertext Transfer Protocol version 2. Structure of HTTP/2 packet [RFC 7230]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+ """ _plen = self._read_binary(3) _type = self._read_unpack(1) _flag = self._read_binary(1) _stid = self._read_binary(4)
python
def read_http(self, length): """Read Hypertext Transfer Protocol version 2. Structure of HTTP/2 packet [RFC 7230]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+ """ _plen = self._read_binary(3) _type = self._read_unpack(1) _flag = self._read_binary(1) _stid = self._read_binary(4)
[ "def", "read_http", "(", "self", ",", "length", ")", ":", "_plen", "=", "self", ".", "_read_binary", "(", "3", ")", "_type", "=", "self", ".", "_read_unpack", "(", "1", ")", "_flag", "=", "self", ".", "_read_binary", "(", "1", ")", "_stid", "=", "self", ".", "_read_binary", "(", "4", ")" ]
Read Hypertext Transfer Protocol version 2. Structure of HTTP/2 packet [RFC 7230]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +=+=============================================================+ | Frame Payload (0...) ... +---------------------------------------------------------------+
[ "Read", "Hypertext", "Transfer", "Protocol", "version", "2", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/application/NotImplemented/httpv2.py#L57-L75
JarryShaw/PyPCAPKit
src/foundation/analysis.py
analyse
def analyse(file, length=None, *, _termination=False): """Analyse application layer packets.""" seekset = file.tell() if not _termination: # FTP analysis flag, ftp = _analyse_ftp(file, length, seekset=seekset) if flag: return ftp # HTTP/1.* analysis flag, http = _analyse_httpv1(file, length, seekset=seekset) if flag: return http # NOTE: due to format similarity of HTTP/2 and TLS/SSL, HTTP/2 won't be analysed before TLS/SSL is implemented. # NB: the NOTE above is deprecated, since validations are performed # HTTP/2 analysis flag, http = _analyse_httpv2(file, length, seekset=seekset) if flag: return http # backup file offset file.seek(seekset, os.SEEK_SET) # raw packet analysis return Raw(file, length)
python
def analyse(file, length=None, *, _termination=False): """Analyse application layer packets.""" seekset = file.tell() if not _termination: # FTP analysis flag, ftp = _analyse_ftp(file, length, seekset=seekset) if flag: return ftp # HTTP/1.* analysis flag, http = _analyse_httpv1(file, length, seekset=seekset) if flag: return http # NOTE: due to format similarity of HTTP/2 and TLS/SSL, HTTP/2 won't be analysed before TLS/SSL is implemented. # NB: the NOTE above is deprecated, since validations are performed # HTTP/2 analysis flag, http = _analyse_httpv2(file, length, seekset=seekset) if flag: return http # backup file offset file.seek(seekset, os.SEEK_SET) # raw packet analysis return Raw(file, length)
[ "def", "analyse", "(", "file", ",", "length", "=", "None", ",", "*", ",", "_termination", "=", "False", ")", ":", "seekset", "=", "file", ".", "tell", "(", ")", "if", "not", "_termination", ":", "# FTP analysis", "flag", ",", "ftp", "=", "_analyse_ftp", "(", "file", ",", "length", ",", "seekset", "=", "seekset", ")", "if", "flag", ":", "return", "ftp", "# HTTP/1.* analysis", "flag", ",", "http", "=", "_analyse_httpv1", "(", "file", ",", "length", ",", "seekset", "=", "seekset", ")", "if", "flag", ":", "return", "http", "# NOTE: due to format similarity of HTTP/2 and TLS/SSL, HTTP/2 won't be analysed before TLS/SSL is implemented.", "# NB: the NOTE above is deprecated, since validations are performed", "# HTTP/2 analysis", "flag", ",", "http", "=", "_analyse_httpv2", "(", "file", ",", "length", ",", "seekset", "=", "seekset", ")", "if", "flag", ":", "return", "http", "# backup file offset", "file", ".", "seek", "(", "seekset", ",", "os", ".", "SEEK_SET", ")", "# raw packet analysis", "return", "Raw", "(", "file", ",", "length", ")" ]
Analyse application layer packets.
[ "Analyse", "application", "layer", "packets", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/analysis.py#L24-L50
JarryShaw/PyPCAPKit
src/const/ipv6/routing.py
Routing.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Routing(key) if key not in Routing._member_map_: extend_enum(Routing, key, default) return Routing[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Routing(key) if key not in Routing._member_map_: extend_enum(Routing, key, default) return Routing[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "Routing", "(", "key", ")", "if", "key", "not", "in", "Routing", ".", "_member_map_", ":", "extend_enum", "(", "Routing", ",", "key", ",", "default", ")", "return", "Routing", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ipv6/routing.py#L21-L27
JarryShaw/PyPCAPKit
src/protocols/link/link.py
Link._read_protos
def _read_protos(self, size): """Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- next layer's protocol name """ _byte = self._read_unpack(size) _prot = ETHERTYPE.get(_byte) return _prot
python
def _read_protos(self, size): """Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- next layer's protocol name """ _byte = self._read_unpack(size) _prot = ETHERTYPE.get(_byte) return _prot
[ "def", "_read_protos", "(", "self", ",", "size", ")", ":", "_byte", "=", "self", ".", "_read_unpack", "(", "size", ")", "_prot", "=", "ETHERTYPE", ".", "get", "(", "_byte", ")", "return", "_prot" ]
Read next layer protocol type. Positional arguments: * size -- int, buffer size Returns: * str -- next layer's protocol name
[ "Read", "next", "layer", "protocol", "type", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/link/link.py#L65-L77
JarryShaw/PyPCAPKit
src/protocols/link/link.py
Link._import_next_layer
def _import_next_layer(self, proto, length): """Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * ARP -- data link layer * RARP -- data link layer * VLAN -- data link layer * IPv4 -- internet layer * IPv6 -- internet layer * IPX -- internet layer """ if length == 0: from pcapkit.protocols.null import NoPayload as Protocol elif self._sigterm: from pcapkit.protocols.raw import Raw as Protocol elif proto == 0x0806: from pcapkit.protocols.link.arp import ARP as Protocol elif proto == 0x8035: from pcapkit.protocols.link.rarp import RARP as Protocol elif proto == 0x8100: from pcapkit.protocols.link.vlan import VLAN as Protocol elif proto == 0x0800: from pcapkit.protocols.internet.ipv4 import IPv4 as Protocol elif proto == 0x86DD: from pcapkit.protocols.internet.ipv6 import IPv6 as Protocol elif proto == 0x8137: from pcapkit.protocols.internet.ipx import IPX as Protocol else: from pcapkit.protocols.raw import Raw as Protocol next_ = Protocol(self._file, length, error=self._onerror, layer=self._exlayer, protocol=self._exproto) return next_
python
def _import_next_layer(self, proto, length): """Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * ARP -- data link layer * RARP -- data link layer * VLAN -- data link layer * IPv4 -- internet layer * IPv6 -- internet layer * IPX -- internet layer """ if length == 0: from pcapkit.protocols.null import NoPayload as Protocol elif self._sigterm: from pcapkit.protocols.raw import Raw as Protocol elif proto == 0x0806: from pcapkit.protocols.link.arp import ARP as Protocol elif proto == 0x8035: from pcapkit.protocols.link.rarp import RARP as Protocol elif proto == 0x8100: from pcapkit.protocols.link.vlan import VLAN as Protocol elif proto == 0x0800: from pcapkit.protocols.internet.ipv4 import IPv4 as Protocol elif proto == 0x86DD: from pcapkit.protocols.internet.ipv6 import IPv6 as Protocol elif proto == 0x8137: from pcapkit.protocols.internet.ipx import IPX as Protocol else: from pcapkit.protocols.raw import Raw as Protocol next_ = Protocol(self._file, length, error=self._onerror, layer=self._exlayer, protocol=self._exproto) return next_
[ "def", "_import_next_layer", "(", "self", ",", "proto", ",", "length", ")", ":", "if", "length", "==", "0", ":", "from", "pcapkit", ".", "protocols", ".", "null", "import", "NoPayload", "as", "Protocol", "elif", "self", ".", "_sigterm", ":", "from", "pcapkit", ".", "protocols", ".", "raw", "import", "Raw", "as", "Protocol", "elif", "proto", "==", "0x0806", ":", "from", "pcapkit", ".", "protocols", ".", "link", ".", "arp", "import", "ARP", "as", "Protocol", "elif", "proto", "==", "0x8035", ":", "from", "pcapkit", ".", "protocols", ".", "link", ".", "rarp", "import", "RARP", "as", "Protocol", "elif", "proto", "==", "0x8100", ":", "from", "pcapkit", ".", "protocols", ".", "link", ".", "vlan", "import", "VLAN", "as", "Protocol", "elif", "proto", "==", "0x0800", ":", "from", "pcapkit", ".", "protocols", ".", "internet", ".", "ipv4", "import", "IPv4", "as", "Protocol", "elif", "proto", "==", "0x86DD", ":", "from", "pcapkit", ".", "protocols", ".", "internet", ".", "ipv6", "import", "IPv6", "as", "Protocol", "elif", "proto", "==", "0x8137", ":", "from", "pcapkit", ".", "protocols", ".", "internet", ".", "ipx", "import", "IPX", "as", "Protocol", "else", ":", "from", "pcapkit", ".", "protocols", ".", "raw", "import", "Raw", "as", "Protocol", "next_", "=", "Protocol", "(", "self", ".", "_file", ",", "length", ",", "error", "=", "self", ".", "_onerror", ",", "layer", "=", "self", ".", "_exlayer", ",", "protocol", "=", "self", ".", "_exproto", ")", "return", "next_" ]
Import next layer extractor. Positional arguments: * proto -- str, next layer protocol name * length -- int, valid (not padding) length Returns: * bool -- flag if extraction of next layer succeeded * Info -- info of next layer * ProtoChain -- protocol chain of next layer * str -- alias of next layer Protocols: * ARP -- data link layer * RARP -- data link layer * VLAN -- data link layer * IPv4 -- internet layer * IPv6 -- internet layer * IPX -- internet layer
[ "Import", "next", "layer", "extractor", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/link/link.py#L79-L121
JarryShaw/PyPCAPKit
src/const/hip/hit_suite.py
HIT_Suite.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return HIT_Suite(key) if key not in HIT_Suite._member_map_: extend_enum(HIT_Suite, key, default) return HIT_Suite[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return HIT_Suite(key) if key not in HIT_Suite._member_map_: extend_enum(HIT_Suite, key, default) return HIT_Suite[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "HIT_Suite", "(", "key", ")", "if", "key", "not", "in", "HIT_Suite", ".", "_member_map_", ":", "extend_enum", "(", "HIT_Suite", ",", "key", ",", "default", ")", "return", "HIT_Suite", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/hip/hit_suite.py#L18-L24
JarryShaw/PyPCAPKit
src/const/ospf/authentication.py
Authentication.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Authentication(key) if key not in Authentication._member_map_: extend_enum(Authentication, key, default) return Authentication[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Authentication(key) if key not in Authentication._member_map_: extend_enum(Authentication, key, default) return Authentication[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "Authentication", "(", "key", ")", "if", "key", "not", "in", "Authentication", ".", "_member_map_", ":", "extend_enum", "(", "Authentication", ",", "key", ",", "default", ")", "return", "Authentication", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/ospf/authentication.py#L18-L24
JarryShaw/PyPCAPKit
src/foundation/traceflow.py
TraceFlow.make_fout
def make_fout(fout='./tmp', fmt='pcap'): """Make root path for output. Positional arguments: * fout -- str, root path for output * fmt -- str, output format Returns: * output -- dumper of specified format """ if fmt == 'pcap': # output PCAP file from pcapkit.dumpkit import PCAP as output elif fmt == 'plist': # output PLIST file from dictdumper import PLIST as output elif fmt == 'json': # output JSON file from dictdumper import JSON as output elif fmt == 'tree': # output treeview text file from dictdumper import Tree as output fmt = 'txt' elif fmt == 'html': # output JavaScript file from dictdumper import JavaScript as output fmt = 'js' elif fmt == 'xml': # output XML file from dictdumper import XML as output else: # no output file from pcapkit.dumpkit import NotImplementedIO as output if fmt is not None: warnings.warn(f'Unsupported output format: {fmt}; disabled file output feature', FormatWarning, stacklevel=stacklevel()) return output, '' try: pathlib.Path(fout).mkdir(parents=True, exist_ok=True) except FileExistsError as error: if fmt is None: warnings.warn(error.strerror, FileWarning, stacklevel=stacklevel()) else: raise FileExists(*error.args) from None return output, fmt
python
def make_fout(fout='./tmp', fmt='pcap'): """Make root path for output. Positional arguments: * fout -- str, root path for output * fmt -- str, output format Returns: * output -- dumper of specified format """ if fmt == 'pcap': # output PCAP file from pcapkit.dumpkit import PCAP as output elif fmt == 'plist': # output PLIST file from dictdumper import PLIST as output elif fmt == 'json': # output JSON file from dictdumper import JSON as output elif fmt == 'tree': # output treeview text file from dictdumper import Tree as output fmt = 'txt' elif fmt == 'html': # output JavaScript file from dictdumper import JavaScript as output fmt = 'js' elif fmt == 'xml': # output XML file from dictdumper import XML as output else: # no output file from pcapkit.dumpkit import NotImplementedIO as output if fmt is not None: warnings.warn(f'Unsupported output format: {fmt}; disabled file output feature', FormatWarning, stacklevel=stacklevel()) return output, '' try: pathlib.Path(fout).mkdir(parents=True, exist_ok=True) except FileExistsError as error: if fmt is None: warnings.warn(error.strerror, FileWarning, stacklevel=stacklevel()) else: raise FileExists(*error.args) from None return output, fmt
[ "def", "make_fout", "(", "fout", "=", "'./tmp'", ",", "fmt", "=", "'pcap'", ")", ":", "if", "fmt", "==", "'pcap'", ":", "# output PCAP file", "from", "pcapkit", ".", "dumpkit", "import", "PCAP", "as", "output", "elif", "fmt", "==", "'plist'", ":", "# output PLIST file", "from", "dictdumper", "import", "PLIST", "as", "output", "elif", "fmt", "==", "'json'", ":", "# output JSON file", "from", "dictdumper", "import", "JSON", "as", "output", "elif", "fmt", "==", "'tree'", ":", "# output treeview text file", "from", "dictdumper", "import", "Tree", "as", "output", "fmt", "=", "'txt'", "elif", "fmt", "==", "'html'", ":", "# output JavaScript file", "from", "dictdumper", "import", "JavaScript", "as", "output", "fmt", "=", "'js'", "elif", "fmt", "==", "'xml'", ":", "# output XML file", "from", "dictdumper", "import", "XML", "as", "output", "else", ":", "# no output file", "from", "pcapkit", ".", "dumpkit", "import", "NotImplementedIO", "as", "output", "if", "fmt", "is", "not", "None", ":", "warnings", ".", "warn", "(", "f'Unsupported output format: {fmt}; disabled file output feature'", ",", "FormatWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "return", "output", ",", "''", "try", ":", "pathlib", ".", "Path", "(", "fout", ")", ".", "mkdir", "(", "parents", "=", "True", ",", "exist_ok", "=", "True", ")", "except", "FileExistsError", "as", "error", ":", "if", "fmt", "is", "None", ":", "warnings", ".", "warn", "(", "error", ".", "strerror", ",", "FileWarning", ",", "stacklevel", "=", "stacklevel", "(", ")", ")", "else", ":", "raise", "FileExists", "(", "*", "error", ".", "args", ")", "from", "None", "return", "output", ",", "fmt" ]
Make root path for output. Positional arguments: * fout -- str, root path for output * fmt -- str, output format Returns: * output -- dumper of specified format
[ "Make", "root", "path", "for", "output", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/traceflow.py#L55-L95
JarryShaw/PyPCAPKit
src/foundation/traceflow.py
TraceFlow.dump
def dump(self, packet): """Dump frame to output files. Positional arguments: * packet -- dict, a flow packet |-- (str) protocol -- data link type from global header |-- (int) index -- frame number |-- (Info) frame -- extracted frame info |-- (bool) syn -- TCP synchronise (SYN) flag |-- (bool) fin -- TCP finish (FIN) flag |-- (str) src -- source IP |-- (int) srcport -- TCP source port |-- (str) dst -- destination IP |-- (int) dstport -- TCP destination port |-- (numbers.Real) timestamp -- frame timestamp """ # fetch flow label output = self.trace(packet, _check=False, _output=True) # dump files output(packet['frame'], name=f"Frame {packet['index']}", byteorder=self._endian, nanosecond=self._nnsecd)
python
def dump(self, packet): """Dump frame to output files. Positional arguments: * packet -- dict, a flow packet |-- (str) protocol -- data link type from global header |-- (int) index -- frame number |-- (Info) frame -- extracted frame info |-- (bool) syn -- TCP synchronise (SYN) flag |-- (bool) fin -- TCP finish (FIN) flag |-- (str) src -- source IP |-- (int) srcport -- TCP source port |-- (str) dst -- destination IP |-- (int) dstport -- TCP destination port |-- (numbers.Real) timestamp -- frame timestamp """ # fetch flow label output = self.trace(packet, _check=False, _output=True) # dump files output(packet['frame'], name=f"Frame {packet['index']}", byteorder=self._endian, nanosecond=self._nnsecd)
[ "def", "dump", "(", "self", ",", "packet", ")", ":", "# fetch flow label", "output", "=", "self", ".", "trace", "(", "packet", ",", "_check", "=", "False", ",", "_output", "=", "True", ")", "# dump files", "output", "(", "packet", "[", "'frame'", "]", ",", "name", "=", "f\"Frame {packet['index']}\"", ",", "byteorder", "=", "self", ".", "_endian", ",", "nanosecond", "=", "self", ".", "_nnsecd", ")" ]
Dump frame to output files. Positional arguments: * packet -- dict, a flow packet |-- (str) protocol -- data link type from global header |-- (int) index -- frame number |-- (Info) frame -- extracted frame info |-- (bool) syn -- TCP synchronise (SYN) flag |-- (bool) fin -- TCP finish (FIN) flag |-- (str) src -- source IP |-- (int) srcport -- TCP source port |-- (str) dst -- destination IP |-- (int) dstport -- TCP destination port |-- (numbers.Real) timestamp -- frame timestamp
[ "Dump", "frame", "to", "output", "files", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/traceflow.py#L97-L119
JarryShaw/PyPCAPKit
src/foundation/traceflow.py
TraceFlow.trace
def trace(self, packet, *, _check=True, _output=False): """Trace packets. Positional arguments: * packet -- dict, a flow packet Keyword arguments: * _check -- bool, flag if run validations * _output -- bool, flag if has formatted dumper """ self._newflg = True if _check: pkt_check(packet) info = Info(packet) # Buffer Identifier BUFID = tuple(sorted([str(info.src), str(info.srcport), # pylint: disable=E1101 str(info.dst), str(info.dstport)])) # pylint: disable=E1101 # SYN = info.syn # Synchronise Flag (Establishment) # Finish Flag (Termination) FIN = info.fin # pylint: disable=E1101 # # when SYN is set, reset buffer of this seesion # if SYN and BUFID in self._buffer: # temp = self._buffer.pop(BUFID) # temp['fpout'] = (self._fproot, self._fdpext) # temp['index'] = tuple(temp['index']) # self._stream.append(Info(temp)) # initialise buffer with BUFID if BUFID not in self._buffer: label = f'{info.src}_{info.srcport}-{info.dst}_{info.dstport}-{info.timestamp}' # pylint: disable=E1101 self._buffer[BUFID] = dict( fpout=self._foutio(f'{self._fproot}/{label}.{self._fdpext}', protocol=info.protocol), # pylint: disable=E1101 index=list(), label=label, ) # trace frame record self._buffer[BUFID]['index'].append(info.index) # pylint: disable=E1101 fpout = self._buffer[BUFID]['fpout'] label = self._buffer[BUFID]['label'] # when FIN is set, submit buffer of this session if FIN: buf = self._buffer.pop(BUFID) # fpout, label = buf['fpout'], buf['label'] if self._fdpext: buf['fpout'] = f'{self._fproot}/{label}.{self._fdpext}' else: del buf['fpout'] buf['index'] = tuple(buf['index']) self._stream.append(Info(buf)) # return label or output object return fpout if _output else label
python
def trace(self, packet, *, _check=True, _output=False): """Trace packets. Positional arguments: * packet -- dict, a flow packet Keyword arguments: * _check -- bool, flag if run validations * _output -- bool, flag if has formatted dumper """ self._newflg = True if _check: pkt_check(packet) info = Info(packet) # Buffer Identifier BUFID = tuple(sorted([str(info.src), str(info.srcport), # pylint: disable=E1101 str(info.dst), str(info.dstport)])) # pylint: disable=E1101 # SYN = info.syn # Synchronise Flag (Establishment) # Finish Flag (Termination) FIN = info.fin # pylint: disable=E1101 # # when SYN is set, reset buffer of this seesion # if SYN and BUFID in self._buffer: # temp = self._buffer.pop(BUFID) # temp['fpout'] = (self._fproot, self._fdpext) # temp['index'] = tuple(temp['index']) # self._stream.append(Info(temp)) # initialise buffer with BUFID if BUFID not in self._buffer: label = f'{info.src}_{info.srcport}-{info.dst}_{info.dstport}-{info.timestamp}' # pylint: disable=E1101 self._buffer[BUFID] = dict( fpout=self._foutio(f'{self._fproot}/{label}.{self._fdpext}', protocol=info.protocol), # pylint: disable=E1101 index=list(), label=label, ) # trace frame record self._buffer[BUFID]['index'].append(info.index) # pylint: disable=E1101 fpout = self._buffer[BUFID]['fpout'] label = self._buffer[BUFID]['label'] # when FIN is set, submit buffer of this session if FIN: buf = self._buffer.pop(BUFID) # fpout, label = buf['fpout'], buf['label'] if self._fdpext: buf['fpout'] = f'{self._fproot}/{label}.{self._fdpext}' else: del buf['fpout'] buf['index'] = tuple(buf['index']) self._stream.append(Info(buf)) # return label or output object return fpout if _output else label
[ "def", "trace", "(", "self", ",", "packet", ",", "*", ",", "_check", "=", "True", ",", "_output", "=", "False", ")", ":", "self", ".", "_newflg", "=", "True", "if", "_check", ":", "pkt_check", "(", "packet", ")", "info", "=", "Info", "(", "packet", ")", "# Buffer Identifier", "BUFID", "=", "tuple", "(", "sorted", "(", "[", "str", "(", "info", ".", "src", ")", ",", "str", "(", "info", ".", "srcport", ")", ",", "# pylint: disable=E1101", "str", "(", "info", ".", "dst", ")", ",", "str", "(", "info", ".", "dstport", ")", "]", ")", ")", "# pylint: disable=E1101", "# SYN = info.syn # Synchronise Flag (Establishment)", "# Finish Flag (Termination)", "FIN", "=", "info", ".", "fin", "# pylint: disable=E1101", "# # when SYN is set, reset buffer of this seesion", "# if SYN and BUFID in self._buffer:", "# temp = self._buffer.pop(BUFID)", "# temp['fpout'] = (self._fproot, self._fdpext)", "# temp['index'] = tuple(temp['index'])", "# self._stream.append(Info(temp))", "# initialise buffer with BUFID", "if", "BUFID", "not", "in", "self", ".", "_buffer", ":", "label", "=", "f'{info.src}_{info.srcport}-{info.dst}_{info.dstport}-{info.timestamp}'", "# pylint: disable=E1101", "self", ".", "_buffer", "[", "BUFID", "]", "=", "dict", "(", "fpout", "=", "self", ".", "_foutio", "(", "f'{self._fproot}/{label}.{self._fdpext}'", ",", "protocol", "=", "info", ".", "protocol", ")", ",", "# pylint: disable=E1101", "index", "=", "list", "(", ")", ",", "label", "=", "label", ",", ")", "# trace frame record", "self", ".", "_buffer", "[", "BUFID", "]", "[", "'index'", "]", ".", "append", "(", "info", ".", "index", ")", "# pylint: disable=E1101", "fpout", "=", "self", ".", "_buffer", "[", "BUFID", "]", "[", "'fpout'", "]", "label", "=", "self", ".", "_buffer", "[", "BUFID", "]", "[", "'label'", "]", "# when FIN is set, submit buffer of this session", "if", "FIN", ":", "buf", "=", "self", ".", "_buffer", ".", "pop", "(", "BUFID", ")", "# fpout, label = buf['fpout'], buf['label']", "if", "self", ".", "_fdpext", ":", "buf", "[", "'fpout'", "]", "=", "f'{self._fproot}/{label}.{self._fdpext}'", "else", ":", "del", "buf", "[", "'fpout'", "]", "buf", "[", "'index'", "]", "=", "tuple", "(", "buf", "[", "'index'", "]", ")", "self", ".", "_stream", ".", "append", "(", "Info", "(", "buf", ")", ")", "# return label or output object", "return", "fpout", "if", "_output", "else", "label" ]
Trace packets. Positional arguments: * packet -- dict, a flow packet Keyword arguments: * _check -- bool, flag if run validations * _output -- bool, flag if has formatted dumper
[ "Trace", "packets", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/traceflow.py#L121-L178
JarryShaw/PyPCAPKit
src/foundation/traceflow.py
TraceFlow.submit
def submit(self): """Submit traced TCP flows.""" self._newflg = False ret = list() for buf in self._buffer.values(): buf = copy.deepcopy(buf) if self._fdpext: buf['fpout'] = f"{self._fproot}/{buf['label']}.{self._fdpext}" else: del buf['fpout'] buf['index'] = tuple(buf['index']) ret.append(Info(buf)) ret += self._stream return tuple(ret)
python
def submit(self): """Submit traced TCP flows.""" self._newflg = False ret = list() for buf in self._buffer.values(): buf = copy.deepcopy(buf) if self._fdpext: buf['fpout'] = f"{self._fproot}/{buf['label']}.{self._fdpext}" else: del buf['fpout'] buf['index'] = tuple(buf['index']) ret.append(Info(buf)) ret += self._stream return tuple(ret)
[ "def", "submit", "(", "self", ")", ":", "self", ".", "_newflg", "=", "False", "ret", "=", "list", "(", ")", "for", "buf", "in", "self", ".", "_buffer", ".", "values", "(", ")", ":", "buf", "=", "copy", ".", "deepcopy", "(", "buf", ")", "if", "self", ".", "_fdpext", ":", "buf", "[", "'fpout'", "]", "=", "f\"{self._fproot}/{buf['label']}.{self._fdpext}\"", "else", ":", "del", "buf", "[", "'fpout'", "]", "buf", "[", "'index'", "]", "=", "tuple", "(", "buf", "[", "'index'", "]", ")", "ret", ".", "append", "(", "Info", "(", "buf", ")", ")", "ret", "+=", "self", ".", "_stream", "return", "tuple", "(", "ret", ")" ]
Submit traced TCP flows.
[ "Submit", "traced", "TCP", "flows", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/foundation/traceflow.py#L180-L193
JarryShaw/PyPCAPKit
src/protocols/internet/ah.py
AH.read_ah
def read_ah(self, length, version, extension): """Read Authentication Header. Structure of AH header [RFC 4302]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Payload Len | RESERVED | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Security Parameters Index (SPI) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sequence Number Field | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + Integrity Check Value-ICV (variable) | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ah.next Next Header 1 8 ah.length Payload Length 2 16 - Reserved (must be zero) 4 32 ah.spi Security Parameters Index (SPI) 8 64 ah.seq Sequence Number Field 12 96 ah.icv Integrity Check Value (ICV) """ if length is None: length = len(self) _next = self._read_protos(1) _plen = self._read_unpack(1) _resv = self._read_fileng(2) _scpi = self._read_unpack(4) _dsnf = self._read_unpack(4) # ICV length & value _tlen = _plen * 4 - 2 _vlen = _tlen - 12 _chkv = self._read_fileng(_vlen) ah = dict( next=_next, length=_tlen, spi=_scpi, seq=_dsnf, icv=_chkv, ) if version == 6: _plen = 8 - (_tlen % 8) elif version == 4: _plen = 4 - (_tlen % 4) else: raise VersionError(f'Unknown IP version {version}') if _plen: # explicit padding in need padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'{self.alias}: invalid format') length -= ah['length'] ah['packet'] = self._read_packet(header=ah['length'], payload=length) if extension: self._protos = None return ah return self._decode_next_layer(ah, _next, length)
python
def read_ah(self, length, version, extension): """Read Authentication Header. Structure of AH header [RFC 4302]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Payload Len | RESERVED | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Security Parameters Index (SPI) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sequence Number Field | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + Integrity Check Value-ICV (variable) | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ah.next Next Header 1 8 ah.length Payload Length 2 16 - Reserved (must be zero) 4 32 ah.spi Security Parameters Index (SPI) 8 64 ah.seq Sequence Number Field 12 96 ah.icv Integrity Check Value (ICV) """ if length is None: length = len(self) _next = self._read_protos(1) _plen = self._read_unpack(1) _resv = self._read_fileng(2) _scpi = self._read_unpack(4) _dsnf = self._read_unpack(4) # ICV length & value _tlen = _plen * 4 - 2 _vlen = _tlen - 12 _chkv = self._read_fileng(_vlen) ah = dict( next=_next, length=_tlen, spi=_scpi, seq=_dsnf, icv=_chkv, ) if version == 6: _plen = 8 - (_tlen % 8) elif version == 4: _plen = 4 - (_tlen % 4) else: raise VersionError(f'Unknown IP version {version}') if _plen: # explicit padding in need padding = self._read_binary(_plen) if any((int(bit, base=2) for bit in padding)): raise ProtocolError(f'{self.alias}: invalid format') length -= ah['length'] ah['packet'] = self._read_packet(header=ah['length'], payload=length) if extension: self._protos = None return ah return self._decode_next_layer(ah, _next, length)
[ "def", "read_ah", "(", "self", ",", "length", ",", "version", ",", "extension", ")", ":", "if", "length", "is", "None", ":", "length", "=", "len", "(", "self", ")", "_next", "=", "self", ".", "_read_protos", "(", "1", ")", "_plen", "=", "self", ".", "_read_unpack", "(", "1", ")", "_resv", "=", "self", ".", "_read_fileng", "(", "2", ")", "_scpi", "=", "self", ".", "_read_unpack", "(", "4", ")", "_dsnf", "=", "self", ".", "_read_unpack", "(", "4", ")", "# ICV length & value", "_tlen", "=", "_plen", "*", "4", "-", "2", "_vlen", "=", "_tlen", "-", "12", "_chkv", "=", "self", ".", "_read_fileng", "(", "_vlen", ")", "ah", "=", "dict", "(", "next", "=", "_next", ",", "length", "=", "_tlen", ",", "spi", "=", "_scpi", ",", "seq", "=", "_dsnf", ",", "icv", "=", "_chkv", ",", ")", "if", "version", "==", "6", ":", "_plen", "=", "8", "-", "(", "_tlen", "%", "8", ")", "elif", "version", "==", "4", ":", "_plen", "=", "4", "-", "(", "_tlen", "%", "4", ")", "else", ":", "raise", "VersionError", "(", "f'Unknown IP version {version}'", ")", "if", "_plen", ":", "# explicit padding in need", "padding", "=", "self", ".", "_read_binary", "(", "_plen", ")", "if", "any", "(", "(", "int", "(", "bit", ",", "base", "=", "2", ")", "for", "bit", "in", "padding", ")", ")", ":", "raise", "ProtocolError", "(", "f'{self.alias}: invalid format'", ")", "length", "-=", "ah", "[", "'length'", "]", "ah", "[", "'packet'", "]", "=", "self", ".", "_read_packet", "(", "header", "=", "ah", "[", "'length'", "]", ",", "payload", "=", "length", ")", "if", "extension", ":", "self", ".", "_protos", "=", "None", "return", "ah", "return", "self", ".", "_decode_next_layer", "(", "ah", ",", "_next", ",", "length", ")" ]
Read Authentication Header. Structure of AH header [RFC 4302]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Payload Len | RESERVED | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Security Parameters Index (SPI) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sequence Number Field | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + Integrity Check Value-ICV (variable) | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ah.next Next Header 1 8 ah.length Payload Length 2 16 - Reserved (must be zero) 4 32 ah.spi Security Parameters Index (SPI) 8 64 ah.seq Sequence Number Field 12 96 ah.icv Integrity Check Value (ICV)
[ "Read", "Authentication", "Header", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/ah.py#L91-L158
JarryShaw/PyPCAPKit
src/protocols/link/ethernet.py
Ethernet.read_ethernet
def read_ethernet(self, length): """Read Ethernet Protocol. Structure of Ethernet Protocol header [RFC 7042]: Octets Bits Name Description 0 0 eth.dst Destination MAC Address 1 8 eth.src Source MAC Address 2 16 eth.type Protocol (Internet Layer) """ if length is None: length = len(self) _dstm = self._read_mac_addr() _srcm = self._read_mac_addr() _type = self._read_protos(2) ethernet = dict( dst=_dstm, src=_srcm, type=_type, ) length -= 14 ethernet['packet'] = self._read_packet(header=14, payload=length) return self._decode_next_layer(ethernet, _type, length)
python
def read_ethernet(self, length): """Read Ethernet Protocol. Structure of Ethernet Protocol header [RFC 7042]: Octets Bits Name Description 0 0 eth.dst Destination MAC Address 1 8 eth.src Source MAC Address 2 16 eth.type Protocol (Internet Layer) """ if length is None: length = len(self) _dstm = self._read_mac_addr() _srcm = self._read_mac_addr() _type = self._read_protos(2) ethernet = dict( dst=_dstm, src=_srcm, type=_type, ) length -= 14 ethernet['packet'] = self._read_packet(header=14, payload=length) return self._decode_next_layer(ethernet, _type, length)
[ "def", "read_ethernet", "(", "self", ",", "length", ")", ":", "if", "length", "is", "None", ":", "length", "=", "len", "(", "self", ")", "_dstm", "=", "self", ".", "_read_mac_addr", "(", ")", "_srcm", "=", "self", ".", "_read_mac_addr", "(", ")", "_type", "=", "self", ".", "_read_protos", "(", "2", ")", "ethernet", "=", "dict", "(", "dst", "=", "_dstm", ",", "src", "=", "_srcm", ",", "type", "=", "_type", ",", ")", "length", "-=", "14", "ethernet", "[", "'packet'", "]", "=", "self", ".", "_read_packet", "(", "header", "=", "14", ",", "payload", "=", "length", ")", "return", "self", ".", "_decode_next_layer", "(", "ethernet", ",", "_type", ",", "length", ")" ]
Read Ethernet Protocol. Structure of Ethernet Protocol header [RFC 7042]: Octets Bits Name Description 0 0 eth.dst Destination MAC Address 1 8 eth.src Source MAC Address 2 16 eth.type Protocol (Internet Layer)
[ "Read", "Ethernet", "Protocol", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/link/ethernet.py#L92-L118
JarryShaw/PyPCAPKit
src/protocols/link/ethernet.py
Ethernet._read_mac_addr
def _read_mac_addr(self): """Read MAC address.""" _byte = self._read_fileng(6) _addr = '-'.join(textwrap.wrap(_byte.hex(), 2)) return _addr
python
def _read_mac_addr(self): """Read MAC address.""" _byte = self._read_fileng(6) _addr = '-'.join(textwrap.wrap(_byte.hex(), 2)) return _addr
[ "def", "_read_mac_addr", "(", "self", ")", ":", "_byte", "=", "self", ".", "_read_fileng", "(", "6", ")", "_addr", "=", "'-'", ".", "join", "(", "textwrap", ".", "wrap", "(", "_byte", ".", "hex", "(", ")", ",", "2", ")", ")", "return", "_addr" ]
Read MAC address.
[ "Read", "MAC", "address", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/link/ethernet.py#L135-L139
JarryShaw/PyPCAPKit
src/const/hip/suite.py
Suite.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Suite(key) if key not in Suite._member_map_: extend_enum(Suite, key, default) return Suite[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return Suite(key) if key not in Suite._member_map_: extend_enum(Suite, key, default) return Suite[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "Suite", "(", "key", ")", "if", "key", "not", "in", "Suite", ".", "_member_map_", ":", "extend_enum", "(", "Suite", ",", "key", ",", "default", ")", "return", "Suite", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/hip/suite.py#L21-L27
JarryShaw/PyPCAPKit
src/const/hip/ecdsa_curve.py
ECDSA_Curve.get
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return ECDSA_Curve(key) if key not in ECDSA_Curve._member_map_: extend_enum(ECDSA_Curve, key, default) return ECDSA_Curve[key]
python
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return ECDSA_Curve(key) if key not in ECDSA_Curve._member_map_: extend_enum(ECDSA_Curve, key, default) return ECDSA_Curve[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "ECDSA_Curve", "(", "key", ")", "if", "key", "not", "in", "ECDSA_Curve", ".", "_member_map_", ":", "extend_enum", "(", "ECDSA_Curve", ",", "key", ",", "default", ")", "return", "ECDSA_Curve", "[", "key", "]" ]
Backport support for original codes.
[ "Backport", "support", "for", "original", "codes", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/const/hip/ecdsa_curve.py#L17-L23
JarryShaw/PyPCAPKit
src/toolkit/pyshark.py
packet2dict
def packet2dict(packet): """Convert PyShark packet into dict.""" dict_ = dict() frame = packet.frame_info for field in frame.field_names: dict_[field] = getattr(frame, field) tempdict = dict_ for layer in packet.layers: tempdict[layer.layer_name.upper()] = dict() tempdict = tempdict[layer.layer_name.upper()] for field in layer.field_names: tempdict[field] = getattr(layer, field) return dict_
python
def packet2dict(packet): """Convert PyShark packet into dict.""" dict_ = dict() frame = packet.frame_info for field in frame.field_names: dict_[field] = getattr(frame, field) tempdict = dict_ for layer in packet.layers: tempdict[layer.layer_name.upper()] = dict() tempdict = tempdict[layer.layer_name.upper()] for field in layer.field_names: tempdict[field] = getattr(layer, field) return dict_
[ "def", "packet2dict", "(", "packet", ")", ":", "dict_", "=", "dict", "(", ")", "frame", "=", "packet", ".", "frame_info", "for", "field", "in", "frame", ".", "field_names", ":", "dict_", "[", "field", "]", "=", "getattr", "(", "frame", ",", "field", ")", "tempdict", "=", "dict_", "for", "layer", "in", "packet", ".", "layers", ":", "tempdict", "[", "layer", ".", "layer_name", ".", "upper", "(", ")", "]", "=", "dict", "(", ")", "tempdict", "=", "tempdict", "[", "layer", ".", "layer_name", ".", "upper", "(", ")", "]", "for", "field", "in", "layer", ".", "field_names", ":", "tempdict", "[", "field", "]", "=", "getattr", "(", "layer", ",", "field", ")", "return", "dict_" ]
Convert PyShark packet into dict.
[ "Convert", "PyShark", "packet", "into", "dict", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/pyshark.py#L17-L31
JarryShaw/PyPCAPKit
src/toolkit/pyshark.py
tcp_traceflow
def tcp_traceflow(packet): """Trace packet flow for TCP.""" if 'TCP' in packet: ip = packet.ip if 'IP' in packet else packet.ipv6 tcp = packet.tcp data = dict( protocol=LINKTYPE.get(packet.layers[0].layer_name.upper()), # data link type from global header index=int(packet.number), # frame number frame=packet2dict(packet), # extracted packet syn=bool(int(tcp.flags_syn)), # TCP synchronise (SYN) flag fin=bool(int(tcp.flags_fin)), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=int(tcp.srcport), # TCP source port dstport=int(tcp.dstport), # TCP destination port timestamp=packet.frame_info.time_epoch, # timestamp ) return True, data return False, None
python
def tcp_traceflow(packet): """Trace packet flow for TCP.""" if 'TCP' in packet: ip = packet.ip if 'IP' in packet else packet.ipv6 tcp = packet.tcp data = dict( protocol=LINKTYPE.get(packet.layers[0].layer_name.upper()), # data link type from global header index=int(packet.number), # frame number frame=packet2dict(packet), # extracted packet syn=bool(int(tcp.flags_syn)), # TCP synchronise (SYN) flag fin=bool(int(tcp.flags_fin)), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=int(tcp.srcport), # TCP source port dstport=int(tcp.dstport), # TCP destination port timestamp=packet.frame_info.time_epoch, # timestamp ) return True, data return False, None
[ "def", "tcp_traceflow", "(", "packet", ")", ":", "if", "'TCP'", "in", "packet", ":", "ip", "=", "packet", ".", "ip", "if", "'IP'", "in", "packet", "else", "packet", ".", "ipv6", "tcp", "=", "packet", ".", "tcp", "data", "=", "dict", "(", "protocol", "=", "LINKTYPE", ".", "get", "(", "packet", ".", "layers", "[", "0", "]", ".", "layer_name", ".", "upper", "(", ")", ")", ",", "# data link type from global header", "index", "=", "int", "(", "packet", ".", "number", ")", ",", "# frame number", "frame", "=", "packet2dict", "(", "packet", ")", ",", "# extracted packet", "syn", "=", "bool", "(", "int", "(", "tcp", ".", "flags_syn", ")", ")", ",", "# TCP synchronise (SYN) flag", "fin", "=", "bool", "(", "int", "(", "tcp", ".", "flags_fin", ")", ")", ",", "# TCP finish (FIN) flag", "src", "=", "ipaddress", ".", "ip_address", "(", "ip", ".", "src", ")", ",", "# source IP", "dst", "=", "ipaddress", ".", "ip_address", "(", "ip", ".", "dst", ")", ",", "# destination IP", "srcport", "=", "int", "(", "tcp", ".", "srcport", ")", ",", "# TCP source port", "dstport", "=", "int", "(", "tcp", ".", "dstport", ")", ",", "# TCP destination port", "timestamp", "=", "packet", ".", "frame_info", ".", "time_epoch", ",", "# timestamp", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Trace packet flow for TCP.
[ "Trace", "packet", "flow", "for", "TCP", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/pyshark.py#L34-L52
JarryShaw/PyPCAPKit
src/reassembly/tcp.py
TCP_Reassembly.reassembly
def reassembly(self, info): """Reassembly procedure. Positional arguments: * info -- Info, info dict of packets to be reassembled """ BUFID = info.bufid # Buffer Identifier DSN = info.dsn # Data Sequence Number ACK = info.ack # Acknowledgement Number FIN = info.fin # Finish Flag (Termination) RST = info.rst # Reset Connection Flag (Termination) SYN = info.syn # Synchronise Flag (Establishment) # when SYN is set, reset buffer of this session if SYN and BUFID in self._buffer: self._dtgram += self.submit(self._buffer[BUFID], bufid=BUFID) del self._buffer[BUFID] # initialise buffer with BUFID & ACK if BUFID not in self._buffer: self._buffer[BUFID] = { 'hdl': [Info(first=info.len, last=sys.maxsize)], ACK: dict( ind=[info.num], isn=info.dsn, len=info.len, raw=info.payload, ), } else: # initialise buffer with ACK if ACK not in self._buffer[BUFID]: self._buffer[BUFID][ACK] = dict( ind=[info.num], isn=info.dsn, len=info.len, raw=info.payload, ) else: # append packet index self._buffer[BUFID][ACK]['ind'].append(info.num) # record fragment payload ISN = self._buffer[BUFID][ACK]['isn'] # Initial Sequence Number RAW = self._buffer[BUFID][ACK]['raw'] # Raw Payload Data if DSN >= ISN: # if fragment goes after existing payload LEN = self._buffer[BUFID][ACK]['len'] GAP = DSN - (ISN + LEN) # gap length between payloads if GAP >= 0: # if fragment goes after existing payload RAW += bytearray(GAP) + info.payload else: # if fragment partially overlaps existing payload RAW[DSN-ISN:] = info.payload else: # if fragment exceeds existing payload LEN = info.len GAP = ISN - (DSN + LEN) # gap length between payloads self._buffer[BUFID][ACK]['isn'] = DSN if GAP >= 0: # if fragment exceeds existing payload RAW = info.payload + bytearray(GAP) + RAW else: # if fragment partially overlaps existing payload RAW = info.payload + RAW[ISN-GAP:] self._buffer[BUFID][ACK]['raw'] = RAW # update payload datagram self._buffer[BUFID][ACK]['len'] = len(RAW) # update payload length # update hole descriptor list HDL = self._buffer[BUFID]['hdl'] for (index, hole) in enumerate(HDL): # step one if info.first > hole.last: # step two continue if info.last < hole.first: # step three continue del HDL[index] # step four if info.first > hole.first: # step five new_hole = Info( first=hole.first, last=info.first - 1, ) HDL.insert(index, new_hole) index += 1 if info.last < hole.last and not FIN and not RST: # step six new_hole = Info( first=info.last + 1, last=hole.last ) HDL.insert(index, new_hole) break # step seven self._buffer[BUFID]['hdl'] = HDL # update HDL # when FIN/RST is set, submit buffer of this session if FIN or RST: self._dtgram += self.submit(self._buffer[BUFID], bufid=BUFID) del self._buffer[BUFID]
python
def reassembly(self, info): """Reassembly procedure. Positional arguments: * info -- Info, info dict of packets to be reassembled """ BUFID = info.bufid # Buffer Identifier DSN = info.dsn # Data Sequence Number ACK = info.ack # Acknowledgement Number FIN = info.fin # Finish Flag (Termination) RST = info.rst # Reset Connection Flag (Termination) SYN = info.syn # Synchronise Flag (Establishment) # when SYN is set, reset buffer of this session if SYN and BUFID in self._buffer: self._dtgram += self.submit(self._buffer[BUFID], bufid=BUFID) del self._buffer[BUFID] # initialise buffer with BUFID & ACK if BUFID not in self._buffer: self._buffer[BUFID] = { 'hdl': [Info(first=info.len, last=sys.maxsize)], ACK: dict( ind=[info.num], isn=info.dsn, len=info.len, raw=info.payload, ), } else: # initialise buffer with ACK if ACK not in self._buffer[BUFID]: self._buffer[BUFID][ACK] = dict( ind=[info.num], isn=info.dsn, len=info.len, raw=info.payload, ) else: # append packet index self._buffer[BUFID][ACK]['ind'].append(info.num) # record fragment payload ISN = self._buffer[BUFID][ACK]['isn'] # Initial Sequence Number RAW = self._buffer[BUFID][ACK]['raw'] # Raw Payload Data if DSN >= ISN: # if fragment goes after existing payload LEN = self._buffer[BUFID][ACK]['len'] GAP = DSN - (ISN + LEN) # gap length between payloads if GAP >= 0: # if fragment goes after existing payload RAW += bytearray(GAP) + info.payload else: # if fragment partially overlaps existing payload RAW[DSN-ISN:] = info.payload else: # if fragment exceeds existing payload LEN = info.len GAP = ISN - (DSN + LEN) # gap length between payloads self._buffer[BUFID][ACK]['isn'] = DSN if GAP >= 0: # if fragment exceeds existing payload RAW = info.payload + bytearray(GAP) + RAW else: # if fragment partially overlaps existing payload RAW = info.payload + RAW[ISN-GAP:] self._buffer[BUFID][ACK]['raw'] = RAW # update payload datagram self._buffer[BUFID][ACK]['len'] = len(RAW) # update payload length # update hole descriptor list HDL = self._buffer[BUFID]['hdl'] for (index, hole) in enumerate(HDL): # step one if info.first > hole.last: # step two continue if info.last < hole.first: # step three continue del HDL[index] # step four if info.first > hole.first: # step five new_hole = Info( first=hole.first, last=info.first - 1, ) HDL.insert(index, new_hole) index += 1 if info.last < hole.last and not FIN and not RST: # step six new_hole = Info( first=info.last + 1, last=hole.last ) HDL.insert(index, new_hole) break # step seven self._buffer[BUFID]['hdl'] = HDL # update HDL # when FIN/RST is set, submit buffer of this session if FIN or RST: self._dtgram += self.submit(self._buffer[BUFID], bufid=BUFID) del self._buffer[BUFID]
[ "def", "reassembly", "(", "self", ",", "info", ")", ":", "BUFID", "=", "info", ".", "bufid", "# Buffer Identifier", "DSN", "=", "info", ".", "dsn", "# Data Sequence Number", "ACK", "=", "info", ".", "ack", "# Acknowledgement Number", "FIN", "=", "info", ".", "fin", "# Finish Flag (Termination)", "RST", "=", "info", ".", "rst", "# Reset Connection Flag (Termination)", "SYN", "=", "info", ".", "syn", "# Synchronise Flag (Establishment)", "# when SYN is set, reset buffer of this session", "if", "SYN", "and", "BUFID", "in", "self", ".", "_buffer", ":", "self", ".", "_dtgram", "+=", "self", ".", "submit", "(", "self", ".", "_buffer", "[", "BUFID", "]", ",", "bufid", "=", "BUFID", ")", "del", "self", ".", "_buffer", "[", "BUFID", "]", "# initialise buffer with BUFID & ACK", "if", "BUFID", "not", "in", "self", ".", "_buffer", ":", "self", ".", "_buffer", "[", "BUFID", "]", "=", "{", "'hdl'", ":", "[", "Info", "(", "first", "=", "info", ".", "len", ",", "last", "=", "sys", ".", "maxsize", ")", "]", ",", "ACK", ":", "dict", "(", "ind", "=", "[", "info", ".", "num", "]", ",", "isn", "=", "info", ".", "dsn", ",", "len", "=", "info", ".", "len", ",", "raw", "=", "info", ".", "payload", ",", ")", ",", "}", "else", ":", "# initialise buffer with ACK", "if", "ACK", "not", "in", "self", ".", "_buffer", "[", "BUFID", "]", ":", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "=", "dict", "(", "ind", "=", "[", "info", ".", "num", "]", ",", "isn", "=", "info", ".", "dsn", ",", "len", "=", "info", ".", "len", ",", "raw", "=", "info", ".", "payload", ",", ")", "else", ":", "# append packet index", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'ind'", "]", ".", "append", "(", "info", ".", "num", ")", "# record fragment payload", "ISN", "=", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'isn'", "]", "# Initial Sequence Number", "RAW", "=", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'raw'", "]", "# Raw Payload Data", "if", "DSN", ">=", "ISN", ":", "# if fragment goes after existing payload", "LEN", "=", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'len'", "]", "GAP", "=", "DSN", "-", "(", "ISN", "+", "LEN", ")", "# gap length between payloads", "if", "GAP", ">=", "0", ":", "# if fragment goes after existing payload", "RAW", "+=", "bytearray", "(", "GAP", ")", "+", "info", ".", "payload", "else", ":", "# if fragment partially overlaps existing payload", "RAW", "[", "DSN", "-", "ISN", ":", "]", "=", "info", ".", "payload", "else", ":", "# if fragment exceeds existing payload", "LEN", "=", "info", ".", "len", "GAP", "=", "ISN", "-", "(", "DSN", "+", "LEN", ")", "# gap length between payloads", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'isn'", "]", "=", "DSN", "if", "GAP", ">=", "0", ":", "# if fragment exceeds existing payload", "RAW", "=", "info", ".", "payload", "+", "bytearray", "(", "GAP", ")", "+", "RAW", "else", ":", "# if fragment partially overlaps existing payload", "RAW", "=", "info", ".", "payload", "+", "RAW", "[", "ISN", "-", "GAP", ":", "]", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'raw'", "]", "=", "RAW", "# update payload datagram", "self", ".", "_buffer", "[", "BUFID", "]", "[", "ACK", "]", "[", "'len'", "]", "=", "len", "(", "RAW", ")", "# update payload length", "# update hole descriptor list", "HDL", "=", "self", ".", "_buffer", "[", "BUFID", "]", "[", "'hdl'", "]", "for", "(", "index", ",", "hole", ")", "in", "enumerate", "(", "HDL", ")", ":", "# step one", "if", "info", ".", "first", ">", "hole", ".", "last", ":", "# step two", "continue", "if", "info", ".", "last", "<", "hole", ".", "first", ":", "# step three", "continue", "del", "HDL", "[", "index", "]", "# step four", "if", "info", ".", "first", ">", "hole", ".", "first", ":", "# step five", "new_hole", "=", "Info", "(", "first", "=", "hole", ".", "first", ",", "last", "=", "info", ".", "first", "-", "1", ",", ")", "HDL", ".", "insert", "(", "index", ",", "new_hole", ")", "index", "+=", "1", "if", "info", ".", "last", "<", "hole", ".", "last", "and", "not", "FIN", "and", "not", "RST", ":", "# step six", "new_hole", "=", "Info", "(", "first", "=", "info", ".", "last", "+", "1", ",", "last", "=", "hole", ".", "last", ")", "HDL", ".", "insert", "(", "index", ",", "new_hole", ")", "break", "# step seven", "self", ".", "_buffer", "[", "BUFID", "]", "[", "'hdl'", "]", "=", "HDL", "# update HDL", "# when FIN/RST is set, submit buffer of this session", "if", "FIN", "or", "RST", ":", "self", ".", "_dtgram", "+=", "self", ".", "submit", "(", "self", ".", "_buffer", "[", "BUFID", "]", ",", "bufid", "=", "BUFID", ")", "del", "self", ".", "_buffer", "[", "BUFID", "]" ]
Reassembly procedure. Positional arguments: * info -- Info, info dict of packets to be reassembled
[ "Reassembly", "procedure", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/reassembly/tcp.py#L228-L319
JarryShaw/PyPCAPKit
src/reassembly/tcp.py
TCP_Reassembly.submit
def submit(self, buf, *, bufid): """Submit reassembled payload. Positional arguments: * buf -- dict, buffer dict of reassembled packets Keyword arguments: * bufid -- tuple, buffer identifier Returns: * list -- reassembled packets """ datagram = [] # reassembled datagram HDL = buf.pop('hdl') # hole descriptor list (remove from dict) # check through every buffer with ACK for (ack, buffer) in buf.items(): # if this buffer is not implemented # go through every hole and extract received payload if len(HDL) > 2 and self._strflg: data = [] start = stop = 0 for hole in HDL: stop = hole.first byte = buffer['raw'][start:stop] start = hole.last if byte: # strip empty payload data.append(byte) byte = buffer['raw'][start:] if byte: # strip empty payload data.append(byte) if data: # strip empty buffer packet = Info( NotImplemented=True, id=Info( src=(bufid[0], bufid[2]), dst=(bufid[1], bufid[3]), ack=ack, ), index=tuple(buffer['ind']), payload=tuple(data) or None, packets=tuple(analyse(io.BytesIO(frag), len(frag)) for frag in data), ) datagram.append(packet) # if this buffer is implemented # export payload data & convert into bytes else: data = buffer['raw'] if data: # strip empty buffer packet = Info( NotImplemented=False, id=Info( src=(bufid[0], bufid[2]), dst=(bufid[1], bufid[3]), ack=ack, ), index=tuple(buffer['ind']), payload=bytes(data) or None, packets=(analyse(io.BytesIO(data), len(data)),), ) datagram.append(packet) return datagram
python
def submit(self, buf, *, bufid): """Submit reassembled payload. Positional arguments: * buf -- dict, buffer dict of reassembled packets Keyword arguments: * bufid -- tuple, buffer identifier Returns: * list -- reassembled packets """ datagram = [] # reassembled datagram HDL = buf.pop('hdl') # hole descriptor list (remove from dict) # check through every buffer with ACK for (ack, buffer) in buf.items(): # if this buffer is not implemented # go through every hole and extract received payload if len(HDL) > 2 and self._strflg: data = [] start = stop = 0 for hole in HDL: stop = hole.first byte = buffer['raw'][start:stop] start = hole.last if byte: # strip empty payload data.append(byte) byte = buffer['raw'][start:] if byte: # strip empty payload data.append(byte) if data: # strip empty buffer packet = Info( NotImplemented=True, id=Info( src=(bufid[0], bufid[2]), dst=(bufid[1], bufid[3]), ack=ack, ), index=tuple(buffer['ind']), payload=tuple(data) or None, packets=tuple(analyse(io.BytesIO(frag), len(frag)) for frag in data), ) datagram.append(packet) # if this buffer is implemented # export payload data & convert into bytes else: data = buffer['raw'] if data: # strip empty buffer packet = Info( NotImplemented=False, id=Info( src=(bufid[0], bufid[2]), dst=(bufid[1], bufid[3]), ack=ack, ), index=tuple(buffer['ind']), payload=bytes(data) or None, packets=(analyse(io.BytesIO(data), len(data)),), ) datagram.append(packet) return datagram
[ "def", "submit", "(", "self", ",", "buf", ",", "*", ",", "bufid", ")", ":", "datagram", "=", "[", "]", "# reassembled datagram", "HDL", "=", "buf", ".", "pop", "(", "'hdl'", ")", "# hole descriptor list (remove from dict)", "# check through every buffer with ACK", "for", "(", "ack", ",", "buffer", ")", "in", "buf", ".", "items", "(", ")", ":", "# if this buffer is not implemented", "# go through every hole and extract received payload", "if", "len", "(", "HDL", ")", ">", "2", "and", "self", ".", "_strflg", ":", "data", "=", "[", "]", "start", "=", "stop", "=", "0", "for", "hole", "in", "HDL", ":", "stop", "=", "hole", ".", "first", "byte", "=", "buffer", "[", "'raw'", "]", "[", "start", ":", "stop", "]", "start", "=", "hole", ".", "last", "if", "byte", ":", "# strip empty payload", "data", ".", "append", "(", "byte", ")", "byte", "=", "buffer", "[", "'raw'", "]", "[", "start", ":", "]", "if", "byte", ":", "# strip empty payload", "data", ".", "append", "(", "byte", ")", "if", "data", ":", "# strip empty buffer", "packet", "=", "Info", "(", "NotImplemented", "=", "True", ",", "id", "=", "Info", "(", "src", "=", "(", "bufid", "[", "0", "]", ",", "bufid", "[", "2", "]", ")", ",", "dst", "=", "(", "bufid", "[", "1", "]", ",", "bufid", "[", "3", "]", ")", ",", "ack", "=", "ack", ",", ")", ",", "index", "=", "tuple", "(", "buffer", "[", "'ind'", "]", ")", ",", "payload", "=", "tuple", "(", "data", ")", "or", "None", ",", "packets", "=", "tuple", "(", "analyse", "(", "io", ".", "BytesIO", "(", "frag", ")", ",", "len", "(", "frag", ")", ")", "for", "frag", "in", "data", ")", ",", ")", "datagram", ".", "append", "(", "packet", ")", "# if this buffer is implemented", "# export payload data & convert into bytes", "else", ":", "data", "=", "buffer", "[", "'raw'", "]", "if", "data", ":", "# strip empty buffer", "packet", "=", "Info", "(", "NotImplemented", "=", "False", ",", "id", "=", "Info", "(", "src", "=", "(", "bufid", "[", "0", "]", ",", "bufid", "[", "2", "]", ")", ",", "dst", "=", "(", "bufid", "[", "1", "]", ",", "bufid", "[", "3", "]", ")", ",", "ack", "=", "ack", ",", ")", ",", "index", "=", "tuple", "(", "buffer", "[", "'ind'", "]", ")", ",", "payload", "=", "bytes", "(", "data", ")", "or", "None", ",", "packets", "=", "(", "analyse", "(", "io", ".", "BytesIO", "(", "data", ")", ",", "len", "(", "data", ")", ")", ",", ")", ",", ")", "datagram", ".", "append", "(", "packet", ")", "return", "datagram" ]
Submit reassembled payload. Positional arguments: * buf -- dict, buffer dict of reassembled packets Keyword arguments: * bufid -- tuple, buffer identifier Returns: * list -- reassembled packets
[ "Submit", "reassembled", "payload", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/reassembly/tcp.py#L321-L383
JarryShaw/PyPCAPKit
src/toolkit/scapy.py
packet2chain
def packet2chain(packet): """Fetch Scapy packet protocol chain.""" if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') chain = [packet.name] payload = packet.payload while not isinstance(payload, scapy_all.packet.NoPayload): chain.append(payload.name) payload = payload.payload return ':'.join(chain)
python
def packet2chain(packet): """Fetch Scapy packet protocol chain.""" if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') chain = [packet.name] payload = packet.payload while not isinstance(payload, scapy_all.packet.NoPayload): chain.append(payload.name) payload = payload.payload return ':'.join(chain)
[ "def", "packet2chain", "(", "packet", ")", ":", "if", "scapy_all", "is", "None", ":", "raise", "ModuleNotFound", "(", "\"No module named 'scapy'\"", ",", "name", "=", "'scapy'", ")", "chain", "=", "[", "packet", ".", "name", "]", "payload", "=", "packet", ".", "payload", "while", "not", "isinstance", "(", "payload", ",", "scapy_all", ".", "packet", ".", "NoPayload", ")", ":", "chain", ".", "append", "(", "payload", ".", "name", ")", "payload", "=", "payload", ".", "payload", "return", "':'", ".", "join", "(", "chain", ")" ]
Fetch Scapy packet protocol chain.
[ "Fetch", "Scapy", "packet", "protocol", "chain", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/scapy.py#L36-L45
JarryShaw/PyPCAPKit
src/toolkit/scapy.py
packet2dict
def packet2dict(packet, *, count=NotImplemented): """Convert Scapy packet into dict.""" if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') def wrapper(packet): dict_ = packet.fields payload = packet.payload if not isinstance(payload, scapy_all.packet.NoPayload): dict_[payload.name] = wrapper(payload) return dict_ return { 'packet': bytes(packet), packet.name: wrapper(packet), }
python
def packet2dict(packet, *, count=NotImplemented): """Convert Scapy packet into dict.""" if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') def wrapper(packet): dict_ = packet.fields payload = packet.payload if not isinstance(payload, scapy_all.packet.NoPayload): dict_[payload.name] = wrapper(payload) return dict_ return { 'packet': bytes(packet), packet.name: wrapper(packet), }
[ "def", "packet2dict", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "if", "scapy_all", "is", "None", ":", "raise", "ModuleNotFound", "(", "\"No module named 'scapy'\"", ",", "name", "=", "'scapy'", ")", "def", "wrapper", "(", "packet", ")", ":", "dict_", "=", "packet", ".", "fields", "payload", "=", "packet", ".", "payload", "if", "not", "isinstance", "(", "payload", ",", "scapy_all", ".", "packet", ".", "NoPayload", ")", ":", "dict_", "[", "payload", ".", "name", "]", "=", "wrapper", "(", "payload", ")", "return", "dict_", "return", "{", "'packet'", ":", "bytes", "(", "packet", ")", ",", "packet", ".", "name", ":", "wrapper", "(", "packet", ")", ",", "}" ]
Convert Scapy packet into dict.
[ "Convert", "Scapy", "packet", "into", "dict", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/scapy.py#L48-L63
JarryShaw/PyPCAPKit
src/toolkit/scapy.py
ipv4_reassembly
def ipv4_reassembly(packet, *, count=NotImplemented): """Make data for IPv4 reassembly.""" if 'IP' in packet: ipv4 = packet['IP'] if ipv4.flags.DF: # dismiss not fragmented packet return False, None data = dict( bufid=( ipaddress.ip_address(ipv4.src), # source IP address ipaddress.ip_address(ipv4.dst), # destination IP address ipv4.id, # identification TP_PROTO.get(ipv4.proto).name, # payload protocol type ), num=count, # original packet range number fo=ipv4.frag, # fragment offset ihl=ipv4.ihl, # internet header length mf=bool(ipv4.flags.MF), # more fragment flag tl=ipv4.len, # total length, header includes header=bytearray(ipv4.raw_packet_cache), # raw bytearray type header payload=bytearray(bytes(ipv4.payload)), # raw bytearray type payload ) return True, data return False, None
python
def ipv4_reassembly(packet, *, count=NotImplemented): """Make data for IPv4 reassembly.""" if 'IP' in packet: ipv4 = packet['IP'] if ipv4.flags.DF: # dismiss not fragmented packet return False, None data = dict( bufid=( ipaddress.ip_address(ipv4.src), # source IP address ipaddress.ip_address(ipv4.dst), # destination IP address ipv4.id, # identification TP_PROTO.get(ipv4.proto).name, # payload protocol type ), num=count, # original packet range number fo=ipv4.frag, # fragment offset ihl=ipv4.ihl, # internet header length mf=bool(ipv4.flags.MF), # more fragment flag tl=ipv4.len, # total length, header includes header=bytearray(ipv4.raw_packet_cache), # raw bytearray type header payload=bytearray(bytes(ipv4.payload)), # raw bytearray type payload ) return True, data return False, None
[ "def", "ipv4_reassembly", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "if", "'IP'", "in", "packet", ":", "ipv4", "=", "packet", "[", "'IP'", "]", "if", "ipv4", ".", "flags", ".", "DF", ":", "# dismiss not fragmented packet", "return", "False", ",", "None", "data", "=", "dict", "(", "bufid", "=", "(", "ipaddress", ".", "ip_address", "(", "ipv4", ".", "src", ")", ",", "# source IP address", "ipaddress", ".", "ip_address", "(", "ipv4", ".", "dst", ")", ",", "# destination IP address", "ipv4", ".", "id", ",", "# identification", "TP_PROTO", ".", "get", "(", "ipv4", ".", "proto", ")", ".", "name", ",", "# payload protocol type", ")", ",", "num", "=", "count", ",", "# original packet range number", "fo", "=", "ipv4", ".", "frag", ",", "# fragment offset", "ihl", "=", "ipv4", ".", "ihl", ",", "# internet header length", "mf", "=", "bool", "(", "ipv4", ".", "flags", ".", "MF", ")", ",", "# more fragment flag", "tl", "=", "ipv4", ".", "len", ",", "# total length, header includes", "header", "=", "bytearray", "(", "ipv4", ".", "raw_packet_cache", ")", ",", "# raw bytearray type header", "payload", "=", "bytearray", "(", "bytes", "(", "ipv4", ".", "payload", ")", ")", ",", "# raw bytearray type payload", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Make data for IPv4 reassembly.
[ "Make", "data", "for", "IPv4", "reassembly", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/scapy.py#L66-L88
JarryShaw/PyPCAPKit
src/toolkit/scapy.py
ipv6_reassembly
def ipv6_reassembly(packet, *, count=NotImplemented): """Make data for IPv6 reassembly.""" if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') if 'IPv6' in packet: ipv6 = packet['IPv6'] if scapy_all.IPv6ExtHdrFragment not in ipv6: # pylint: disable=E1101 return False, None # dismiss not fragmented packet ipv6_frag = ipv6['IPv6ExtHdrFragment'] data = dict( bufid=( ipaddress.ip_address(ipv6.src), # source IP address ipaddress.ip_address(ipv6.dst), # destination IP address ipv6.fl, # label TP_PROTO.get(ipv6_frag.nh).name, # next header field in IPv6 Fragment Header ), num=count, # original packet range number fo=ipv6_frag.offset, # fragment offset ihl=len(ipv6) - len(ipv6_frag), # header length, only headers before IPv6-Frag mf=bool(ipv6_frag.m), # more fragment flag tl=len(ipv6), # total length, header includes header=bytearray(bytes(ipv6)[:-len(ipv6_frag)]), # raw bytearray type header before IPv6-Frag payload=bytearray(bytes(ipv6_frag.payload)), # raw bytearray type payload after IPv6-Frag ) return True, data return False, None
python
def ipv6_reassembly(packet, *, count=NotImplemented): """Make data for IPv6 reassembly.""" if scapy_all is None: raise ModuleNotFound("No module named 'scapy'", name='scapy') if 'IPv6' in packet: ipv6 = packet['IPv6'] if scapy_all.IPv6ExtHdrFragment not in ipv6: # pylint: disable=E1101 return False, None # dismiss not fragmented packet ipv6_frag = ipv6['IPv6ExtHdrFragment'] data = dict( bufid=( ipaddress.ip_address(ipv6.src), # source IP address ipaddress.ip_address(ipv6.dst), # destination IP address ipv6.fl, # label TP_PROTO.get(ipv6_frag.nh).name, # next header field in IPv6 Fragment Header ), num=count, # original packet range number fo=ipv6_frag.offset, # fragment offset ihl=len(ipv6) - len(ipv6_frag), # header length, only headers before IPv6-Frag mf=bool(ipv6_frag.m), # more fragment flag tl=len(ipv6), # total length, header includes header=bytearray(bytes(ipv6)[:-len(ipv6_frag)]), # raw bytearray type header before IPv6-Frag payload=bytearray(bytes(ipv6_frag.payload)), # raw bytearray type payload after IPv6-Frag ) return True, data return False, None
[ "def", "ipv6_reassembly", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "if", "scapy_all", "is", "None", ":", "raise", "ModuleNotFound", "(", "\"No module named 'scapy'\"", ",", "name", "=", "'scapy'", ")", "if", "'IPv6'", "in", "packet", ":", "ipv6", "=", "packet", "[", "'IPv6'", "]", "if", "scapy_all", ".", "IPv6ExtHdrFragment", "not", "in", "ipv6", ":", "# pylint: disable=E1101", "return", "False", ",", "None", "# dismiss not fragmented packet", "ipv6_frag", "=", "ipv6", "[", "'IPv6ExtHdrFragment'", "]", "data", "=", "dict", "(", "bufid", "=", "(", "ipaddress", ".", "ip_address", "(", "ipv6", ".", "src", ")", ",", "# source IP address", "ipaddress", ".", "ip_address", "(", "ipv6", ".", "dst", ")", ",", "# destination IP address", "ipv6", ".", "fl", ",", "# label", "TP_PROTO", ".", "get", "(", "ipv6_frag", ".", "nh", ")", ".", "name", ",", "# next header field in IPv6 Fragment Header", ")", ",", "num", "=", "count", ",", "# original packet range number", "fo", "=", "ipv6_frag", ".", "offset", ",", "# fragment offset", "ihl", "=", "len", "(", "ipv6", ")", "-", "len", "(", "ipv6_frag", ")", ",", "# header length, only headers before IPv6-Frag", "mf", "=", "bool", "(", "ipv6_frag", ".", "m", ")", ",", "# more fragment flag", "tl", "=", "len", "(", "ipv6", ")", ",", "# total length, header includes", "header", "=", "bytearray", "(", "bytes", "(", "ipv6", ")", "[", ":", "-", "len", "(", "ipv6_frag", ")", "]", ")", ",", "# raw bytearray type header before IPv6-Frag", "payload", "=", "bytearray", "(", "bytes", "(", "ipv6_frag", ".", "payload", ")", ")", ",", "# raw bytearray type payload after IPv6-Frag", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Make data for IPv6 reassembly.
[ "Make", "data", "for", "IPv6", "reassembly", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/scapy.py#L91-L116
JarryShaw/PyPCAPKit
src/toolkit/scapy.py
tcp_reassembly
def tcp_reassembly(packet, *, count=NotImplemented): """Store data for TCP reassembly.""" if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( bufid=( ipaddress.ip_address(ip.src), # source IP address ipaddress.ip_address(ip.dst), # destination IP address tcp.sport, # source port tcp.dport, # destination port ), num=count, # original packet range number ack=tcp.ack, # acknowledgement dsn=tcp.seq, # data sequence number syn=bool(tcp.flags.S), # synchronise flag fin=bool(tcp.flags.F), # finish flag rst=bool(tcp.flags.R), # reset connection flag payload=bytearray(bytes(tcp.payload)), # raw bytearray type payload ) raw_len = len(tcp.payload) # payload length, header excludes data['first'] = tcp.seq # this sequence number data['last'] = tcp.seq + raw_len # next (wanted) sequence number data['len'] = raw_len # payload length, header excludes return True, data return False, None
python
def tcp_reassembly(packet, *, count=NotImplemented): """Store data for TCP reassembly.""" if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( bufid=( ipaddress.ip_address(ip.src), # source IP address ipaddress.ip_address(ip.dst), # destination IP address tcp.sport, # source port tcp.dport, # destination port ), num=count, # original packet range number ack=tcp.ack, # acknowledgement dsn=tcp.seq, # data sequence number syn=bool(tcp.flags.S), # synchronise flag fin=bool(tcp.flags.F), # finish flag rst=bool(tcp.flags.R), # reset connection flag payload=bytearray(bytes(tcp.payload)), # raw bytearray type payload ) raw_len = len(tcp.payload) # payload length, header excludes data['first'] = tcp.seq # this sequence number data['last'] = tcp.seq + raw_len # next (wanted) sequence number data['len'] = raw_len # payload length, header excludes return True, data return False, None
[ "def", "tcp_reassembly", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "if", "'TCP'", "in", "packet", ":", "ip", "=", "packet", "[", "'IP'", "]", "if", "'IP'", "in", "packet", "else", "packet", "[", "'IPv6'", "]", "tcp", "=", "packet", "[", "'TCP'", "]", "data", "=", "dict", "(", "bufid", "=", "(", "ipaddress", ".", "ip_address", "(", "ip", ".", "src", ")", ",", "# source IP address", "ipaddress", ".", "ip_address", "(", "ip", ".", "dst", ")", ",", "# destination IP address", "tcp", ".", "sport", ",", "# source port", "tcp", ".", "dport", ",", "# destination port", ")", ",", "num", "=", "count", ",", "# original packet range number", "ack", "=", "tcp", ".", "ack", ",", "# acknowledgement", "dsn", "=", "tcp", ".", "seq", ",", "# data sequence number", "syn", "=", "bool", "(", "tcp", ".", "flags", ".", "S", ")", ",", "# synchronise flag", "fin", "=", "bool", "(", "tcp", ".", "flags", ".", "F", ")", ",", "# finish flag", "rst", "=", "bool", "(", "tcp", ".", "flags", ".", "R", ")", ",", "# reset connection flag", "payload", "=", "bytearray", "(", "bytes", "(", "tcp", ".", "payload", ")", ")", ",", "# raw bytearray type payload", ")", "raw_len", "=", "len", "(", "tcp", ".", "payload", ")", "# payload length, header excludes", "data", "[", "'first'", "]", "=", "tcp", ".", "seq", "# this sequence number", "data", "[", "'last'", "]", "=", "tcp", ".", "seq", "+", "raw_len", "# next (wanted) sequence number", "data", "[", "'len'", "]", "=", "raw_len", "# payload length, header excludes", "return", "True", ",", "data", "return", "False", ",", "None" ]
Store data for TCP reassembly.
[ "Store", "data", "for", "TCP", "reassembly", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/scapy.py#L119-L144
JarryShaw/PyPCAPKit
src/toolkit/scapy.py
tcp_traceflow
def tcp_traceflow(packet, *, count=NotImplemented): """Trace packet flow for TCP.""" if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( protocol=LINKTYPE.get(packet.name.upper()), # data link type from global header index=count, # frame number frame=packet2dict(packet), # extracted packet syn=bool(tcp.flags.S), # TCP synchronise (SYN) flag fin=bool(tcp.flags.F), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=tcp.sport, # TCP source port dstport=tcp.dport, # TCP destination port timestamp=time.time(), # timestamp ) return True, data return False, None
python
def tcp_traceflow(packet, *, count=NotImplemented): """Trace packet flow for TCP.""" if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( protocol=LINKTYPE.get(packet.name.upper()), # data link type from global header index=count, # frame number frame=packet2dict(packet), # extracted packet syn=bool(tcp.flags.S), # TCP synchronise (SYN) flag fin=bool(tcp.flags.F), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=tcp.sport, # TCP source port dstport=tcp.dport, # TCP destination port timestamp=time.time(), # timestamp ) return True, data return False, None
[ "def", "tcp_traceflow", "(", "packet", ",", "*", ",", "count", "=", "NotImplemented", ")", ":", "if", "'TCP'", "in", "packet", ":", "ip", "=", "packet", "[", "'IP'", "]", "if", "'IP'", "in", "packet", "else", "packet", "[", "'IPv6'", "]", "tcp", "=", "packet", "[", "'TCP'", "]", "data", "=", "dict", "(", "protocol", "=", "LINKTYPE", ".", "get", "(", "packet", ".", "name", ".", "upper", "(", ")", ")", ",", "# data link type from global header", "index", "=", "count", ",", "# frame number", "frame", "=", "packet2dict", "(", "packet", ")", ",", "# extracted packet", "syn", "=", "bool", "(", "tcp", ".", "flags", ".", "S", ")", ",", "# TCP synchronise (SYN) flag", "fin", "=", "bool", "(", "tcp", ".", "flags", ".", "F", ")", ",", "# TCP finish (FIN) flag", "src", "=", "ipaddress", ".", "ip_address", "(", "ip", ".", "src", ")", ",", "# source IP", "dst", "=", "ipaddress", ".", "ip_address", "(", "ip", ".", "dst", ")", ",", "# destination IP", "srcport", "=", "tcp", ".", "sport", ",", "# TCP source port", "dstport", "=", "tcp", ".", "dport", ",", "# TCP destination port", "timestamp", "=", "time", ".", "time", "(", ")", ",", "# timestamp", ")", "return", "True", ",", "data", "return", "False", ",", "None" ]
Trace packet flow for TCP.
[ "Trace", "packet", "flow", "for", "TCP", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/toolkit/scapy.py#L147-L165
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT.read_hopopt
def read_hopopt(self, length, extension): """Read IPv6 Hop-by-Hop Options. Structure of HOPOPT header [RFC 8200]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | . . . Options . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.next Next Header 1 8 hopopt.length Header Extensive Length 2 16 hopopt.options Options """ if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) # _opts = self._read_fileng(_hlen*8+6) hopopt = dict( next=_next, length=(_hlen + 1) * 8, ) options = self._read_hopopt_options(_hlen * 8 + 6) hopopt['options'] = options[0] # tuple of option acronyms hopopt.update(options[1]) # merge option info to buffer length -= hopopt['length'] hopopt['packet'] = self._read_packet(header=hopopt['length'], payload=length) if extension: self._protos = None return hopopt return self._decode_next_layer(hopopt, _next, length)
python
def read_hopopt(self, length, extension): """Read IPv6 Hop-by-Hop Options. Structure of HOPOPT header [RFC 8200]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | . . . Options . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.next Next Header 1 8 hopopt.length Header Extensive Length 2 16 hopopt.options Options """ if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) # _opts = self._read_fileng(_hlen*8+6) hopopt = dict( next=_next, length=(_hlen + 1) * 8, ) options = self._read_hopopt_options(_hlen * 8 + 6) hopopt['options'] = options[0] # tuple of option acronyms hopopt.update(options[1]) # merge option info to buffer length -= hopopt['length'] hopopt['packet'] = self._read_packet(header=hopopt['length'], payload=length) if extension: self._protos = None return hopopt return self._decode_next_layer(hopopt, _next, length)
[ "def", "read_hopopt", "(", "self", ",", "length", ",", "extension", ")", ":", "if", "length", "is", "None", ":", "length", "=", "len", "(", "self", ")", "_next", "=", "self", ".", "_read_protos", "(", "1", ")", "_hlen", "=", "self", ".", "_read_unpack", "(", "1", ")", "# _opts = self._read_fileng(_hlen*8+6)", "hopopt", "=", "dict", "(", "next", "=", "_next", ",", "length", "=", "(", "_hlen", "+", "1", ")", "*", "8", ",", ")", "options", "=", "self", ".", "_read_hopopt_options", "(", "_hlen", "*", "8", "+", "6", ")", "hopopt", "[", "'options'", "]", "=", "options", "[", "0", "]", "# tuple of option acronyms", "hopopt", ".", "update", "(", "options", "[", "1", "]", ")", "# merge option info to buffer", "length", "-=", "hopopt", "[", "'length'", "]", "hopopt", "[", "'packet'", "]", "=", "self", ".", "_read_packet", "(", "header", "=", "hopopt", "[", "'length'", "]", ",", "payload", "=", "length", ")", "if", "extension", ":", "self", ".", "_protos", "=", "None", "return", "hopopt", "return", "self", ".", "_decode_next_layer", "(", "hopopt", ",", "_next", ",", "length", ")" ]
Read IPv6 Hop-by-Hop Options. Structure of HOPOPT header [RFC 8200]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | . . . Options . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.next Next Header 1 8 hopopt.length Header Extensive Length 2 16 hopopt.options Options
[ "Read", "IPv6", "Hop", "-", "by", "-", "Hop", "Options", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L147-L189
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_hopopt_options
def _read_hopopt_options(self, length): """Read HOPOPT options. Positional arguments: * length -- int, length of options Returns: * dict -- extracted HOPOPT options """ counter = 0 # length of read options optkind = list() # option type list options = dict() # dict of option data while counter < length: # break when eol triggered code = self._read_unpack(1) if not code: break # extract parameter abbr, desc = _HOPOPT_OPT.get(code, ('none', 'Unassigned')) data = _HOPOPT_PROC(abbr)(self, code, desc=desc) enum = _OPT_TYPE.get(code) # record parameter data counter += data['length'] if enum in optkind: if isinstance(options[abbr], tuple): options[abbr] += (Info(data),) else: options[abbr] = (Info(options[abbr]), Info(data)) else: optkind.append(enum) options[abbr] = data # check threshold if counter != length: raise ProtocolError(f'{self.alias}: invalid format') return tuple(optkind), options
python
def _read_hopopt_options(self, length): """Read HOPOPT options. Positional arguments: * length -- int, length of options Returns: * dict -- extracted HOPOPT options """ counter = 0 # length of read options optkind = list() # option type list options = dict() # dict of option data while counter < length: # break when eol triggered code = self._read_unpack(1) if not code: break # extract parameter abbr, desc = _HOPOPT_OPT.get(code, ('none', 'Unassigned')) data = _HOPOPT_PROC(abbr)(self, code, desc=desc) enum = _OPT_TYPE.get(code) # record parameter data counter += data['length'] if enum in optkind: if isinstance(options[abbr], tuple): options[abbr] += (Info(data),) else: options[abbr] = (Info(options[abbr]), Info(data)) else: optkind.append(enum) options[abbr] = data # check threshold if counter != length: raise ProtocolError(f'{self.alias}: invalid format') return tuple(optkind), options
[ "def", "_read_hopopt_options", "(", "self", ",", "length", ")", ":", "counter", "=", "0", "# length of read options", "optkind", "=", "list", "(", ")", "# option type list", "options", "=", "dict", "(", ")", "# dict of option data", "while", "counter", "<", "length", ":", "# break when eol triggered", "code", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "not", "code", ":", "break", "# extract parameter", "abbr", ",", "desc", "=", "_HOPOPT_OPT", ".", "get", "(", "code", ",", "(", "'none'", ",", "'Unassigned'", ")", ")", "data", "=", "_HOPOPT_PROC", "(", "abbr", ")", "(", "self", ",", "code", ",", "desc", "=", "desc", ")", "enum", "=", "_OPT_TYPE", ".", "get", "(", "code", ")", "# record parameter data", "counter", "+=", "data", "[", "'length'", "]", "if", "enum", "in", "optkind", ":", "if", "isinstance", "(", "options", "[", "abbr", "]", ",", "tuple", ")", ":", "options", "[", "abbr", "]", "+=", "(", "Info", "(", "data", ")", ",", ")", "else", ":", "options", "[", "abbr", "]", "=", "(", "Info", "(", "options", "[", "abbr", "]", ")", ",", "Info", "(", "data", ")", ")", "else", ":", "optkind", ".", "append", "(", "enum", ")", "options", "[", "abbr", "]", "=", "data", "# check threshold", "if", "counter", "!=", "length", ":", "raise", "ProtocolError", "(", "f'{self.alias}: invalid format'", ")", "return", "tuple", "(", "optkind", ")", ",", "options" ]
Read HOPOPT options. Positional arguments: * length -- int, length of options Returns: * dict -- extracted HOPOPT options
[ "Read", "HOPOPT", "options", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L234-L274
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_pad
def _read_opt_pad(self, code, *, desc): """Read HOPOPT padding options. Structure of HOPOPT padding options [RFC 8200]: * Pad1 Option: +-+-+-+-+-+-+-+-+ | 0 | +-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) * PadN Option: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - | 1 | Opt Data Len | Option Data +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.pad.padding Padding """ _type = self._read_opt_type(code) if code == 0: opt = dict( desc=desc, type=_type, length=1, ) elif code == 1: _size = self._read_unpack(1) _padn = self._read_fileng(_size) opt = dict( desc=desc, type=_type, length=_size + 2, padding=_padn, ) else: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') return opt
python
def _read_opt_pad(self, code, *, desc): """Read HOPOPT padding options. Structure of HOPOPT padding options [RFC 8200]: * Pad1 Option: +-+-+-+-+-+-+-+-+ | 0 | +-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) * PadN Option: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - | 1 | Opt Data Len | Option Data +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.pad.padding Padding """ _type = self._read_opt_type(code) if code == 0: opt = dict( desc=desc, type=_type, length=1, ) elif code == 1: _size = self._read_unpack(1) _padn = self._read_fileng(_size) opt = dict( desc=desc, type=_type, length=_size + 2, padding=_padn, ) else: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') return opt
[ "def", "_read_opt_pad", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "if", "code", "==", "0", ":", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "1", ",", ")", "elif", "code", "==", "1", ":", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "_padn", "=", "self", ".", "_read_fileng", "(", "_size", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "padding", "=", "_padn", ",", ")", "else", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "return", "opt" ]
Read HOPOPT padding options. Structure of HOPOPT padding options [RFC 8200]: * Pad1 Option: +-+-+-+-+-+-+-+-+ | 0 | +-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) * PadN Option: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - | 1 | Opt Data Len | Option Data +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- - - - - - - - - Octets Bits Name Description 0 0 hopopt.pad.type Option Type 0 0 hopopt.pad.type.value Option Number 0 0 hopopt.pad.type.action Action (00) 0 2 hopopt.pad.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.pad.padding Padding
[ "Read", "HOPOPT", "padding", "options", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L306-L356
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_tun
def _read_opt_tun(self, code, *, desc): """Read HOPOPT Tunnel Encapsulation Limit option. Structure of HOPOPT Tunnel Encapsulation Limit option [RFC 2473]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header |Hdr Ext Len = 0| Opt Type = 4 |Opt Data Len=1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Tun Encap Lim |PadN Opt Type=1|Opt Data Len=1 | 0 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.tun.type Option Type 0 0 hopopt.tun.type.value Option Number 0 0 hopopt.tun.type.action Action (00) 0 2 hopopt.tun.type.change Change Flag (0) 1 8 hopopt.tun.length Length of Option Data 2 16 hopopt.tun.limit Tunnel Encapsulation Limit """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 1: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _limt = self._read_unpack(1) opt = dict( desc=desc, type=_type, length=_size + 2, limit=_limt, ) return opt
python
def _read_opt_tun(self, code, *, desc): """Read HOPOPT Tunnel Encapsulation Limit option. Structure of HOPOPT Tunnel Encapsulation Limit option [RFC 2473]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header |Hdr Ext Len = 0| Opt Type = 4 |Opt Data Len=1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Tun Encap Lim |PadN Opt Type=1|Opt Data Len=1 | 0 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.tun.type Option Type 0 0 hopopt.tun.type.value Option Number 0 0 hopopt.tun.type.action Action (00) 0 2 hopopt.tun.type.change Change Flag (0) 1 8 hopopt.tun.length Length of Option Data 2 16 hopopt.tun.limit Tunnel Encapsulation Limit """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 1: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _limt = self._read_unpack(1) opt = dict( desc=desc, type=_type, length=_size + 2, limit=_limt, ) return opt
[ "def", "_read_opt_tun", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "!=", "1", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_limt", "=", "self", ".", "_read_unpack", "(", "1", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "limit", "=", "_limt", ",", ")", "return", "opt" ]
Read HOPOPT Tunnel Encapsulation Limit option. Structure of HOPOPT Tunnel Encapsulation Limit option [RFC 2473]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header |Hdr Ext Len = 0| Opt Type = 4 |Opt Data Len=1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Tun Encap Lim |PadN Opt Type=1|Opt Data Len=1 | 0 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.tun.type Option Type 0 0 hopopt.tun.type.value Option Number 0 0 hopopt.tun.type.action Action (00) 0 2 hopopt.tun.type.change Change Flag (0) 1 8 hopopt.tun.length Length of Option Data 2 16 hopopt.tun.limit Tunnel Encapsulation Limit
[ "Read", "HOPOPT", "Tunnel", "Encapsulation", "Limit", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L358-L390
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_ra
def _read_opt_ra(self, code, *, desc): """Read HOPOPT Router Alert option. Structure of HOPOPT Router Alert option [RFC 2711]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0 0 0|0 0 1 0 1|0 0 0 0 0 0 1 0| Value (2 octets) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ra.type Option Type 0 0 hopopt.ra.type.value Option Number 0 0 hopopt.ra.type.action Action (00) 0 2 hopopt.ra.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.ra.value Value """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 2: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _rval = self._read_unpack(2) if 4 <= _rval <= 35: _dscp = f'Aggregated Reservation Nesting Level {_rval-4}' # [RFC 3175] elif 36 <= _rval <= 67: _dscp = f'QoS NSLP Aggregation Level {_rval-36}' # [RFC 5974] elif 65503 <= _rval <= 65534: _dscp = 'Reserved for experimental use' # [RFC 5350] else: _dscp = _ROUTER_ALERT.get(_rval, 'Unassigned') opt = dict( desc=desc, type=_type, length=_size + 2, value=_rval, alert=_dscp, ) return opt
python
def _read_opt_ra(self, code, *, desc): """Read HOPOPT Router Alert option. Structure of HOPOPT Router Alert option [RFC 2711]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0 0 0|0 0 1 0 1|0 0 0 0 0 0 1 0| Value (2 octets) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ra.type Option Type 0 0 hopopt.ra.type.value Option Number 0 0 hopopt.ra.type.action Action (00) 0 2 hopopt.ra.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.ra.value Value """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 2: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _rval = self._read_unpack(2) if 4 <= _rval <= 35: _dscp = f'Aggregated Reservation Nesting Level {_rval-4}' # [RFC 3175] elif 36 <= _rval <= 67: _dscp = f'QoS NSLP Aggregation Level {_rval-36}' # [RFC 5974] elif 65503 <= _rval <= 65534: _dscp = 'Reserved for experimental use' # [RFC 5350] else: _dscp = _ROUTER_ALERT.get(_rval, 'Unassigned') opt = dict( desc=desc, type=_type, length=_size + 2, value=_rval, alert=_dscp, ) return opt
[ "def", "_read_opt_ra", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "!=", "2", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_rval", "=", "self", ".", "_read_unpack", "(", "2", ")", "if", "4", "<=", "_rval", "<=", "35", ":", "_dscp", "=", "f'Aggregated Reservation Nesting Level {_rval-4}'", "# [RFC 3175]", "elif", "36", "<=", "_rval", "<=", "67", ":", "_dscp", "=", "f'QoS NSLP Aggregation Level {_rval-36}'", "# [RFC 5974]", "elif", "65503", "<=", "_rval", "<=", "65534", ":", "_dscp", "=", "'Reserved for experimental use'", "# [RFC 5350]", "else", ":", "_dscp", "=", "_ROUTER_ALERT", ".", "get", "(", "_rval", ",", "'Unassigned'", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "value", "=", "_rval", ",", "alert", "=", "_dscp", ",", ")", "return", "opt" ]
Read HOPOPT Router Alert option. Structure of HOPOPT Router Alert option [RFC 2711]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0 0 0|0 0 1 0 1|0 0 0 0 0 0 1 0| Value (2 octets) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ra.type Option Type 0 0 hopopt.ra.type.value Option Number 0 0 hopopt.ra.type.action Action (00) 0 2 hopopt.ra.type.change Change Flag (0) 1 8 hopopt.opt.length Length of Option Data 2 16 hopopt.ra.value Value
[ "Read", "HOPOPT", "Router", "Alert", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L392-L432
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_calipso
def _read_opt_calipso(self, code, *, desc): """Read HOPOPT CALIPSO option. Structure of HOPOPT CALIPSO option [RFC 5570]: ------------------------------------------------------------ | Next Header | Hdr Ext Len | Option Type | Option Length| +-------------+---------------+-------------+--------------+ | CALIPSO Domain of Interpretation | +-------------+---------------+-------------+--------------+ | Cmpt Length | Sens Level | Checksum (CRC-16) | +-------------+---------------+-------------+--------------+ | Compartment Bitmap (Optional; variable length) | +-------------+---------------+-------------+--------------+ Octets Bits Name Description 0 0 hopopt.calipso.type Option Type 0 0 hopopt.calipso.type.value Option Number 0 0 hopopt.calipso.type.action Action (00) 0 2 hopopt.calipso.type.change Change Flag (0) 1 8 hopopt.calipso.length Length of Option Data 2 16 hopopt.calipso.domain CALIPSO Domain of Interpretation 6 48 hopopt.calipso.cmpt_len Cmpt Length 7 56 hopopt.calipso.level Sens Level 8 64 hopopt.calipso.chksum Checksum (CRC-16) 9 72 hopopt.calipso.bitmap Compartment Bitmap """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 8 and _size % 8 != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _cmpt = self._read_unpack(4) _clen = self._read_unpack(1) if _clen % 2 != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _sens = self._read_unpack(1) _csum = self._read_fileng(2) opt = dict( desc=desc, type=_type, length=_size + 2, domain=_cmpt, cmpt_len=_clen * 4, level=_sens, chksum=_csum, ) if _clen: _bmap = list() for _ in range(_clen // 2): _bmap.append(self._read_binary(8)) opt['bitmap'] = tuple(_bmap) _plen = _size - _clen * 4 - 8 if _plen: self._read_fileng(_plen) return opt
python
def _read_opt_calipso(self, code, *, desc): """Read HOPOPT CALIPSO option. Structure of HOPOPT CALIPSO option [RFC 5570]: ------------------------------------------------------------ | Next Header | Hdr Ext Len | Option Type | Option Length| +-------------+---------------+-------------+--------------+ | CALIPSO Domain of Interpretation | +-------------+---------------+-------------+--------------+ | Cmpt Length | Sens Level | Checksum (CRC-16) | +-------------+---------------+-------------+--------------+ | Compartment Bitmap (Optional; variable length) | +-------------+---------------+-------------+--------------+ Octets Bits Name Description 0 0 hopopt.calipso.type Option Type 0 0 hopopt.calipso.type.value Option Number 0 0 hopopt.calipso.type.action Action (00) 0 2 hopopt.calipso.type.change Change Flag (0) 1 8 hopopt.calipso.length Length of Option Data 2 16 hopopt.calipso.domain CALIPSO Domain of Interpretation 6 48 hopopt.calipso.cmpt_len Cmpt Length 7 56 hopopt.calipso.level Sens Level 8 64 hopopt.calipso.chksum Checksum (CRC-16) 9 72 hopopt.calipso.bitmap Compartment Bitmap """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 8 and _size % 8 != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _cmpt = self._read_unpack(4) _clen = self._read_unpack(1) if _clen % 2 != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _sens = self._read_unpack(1) _csum = self._read_fileng(2) opt = dict( desc=desc, type=_type, length=_size + 2, domain=_cmpt, cmpt_len=_clen * 4, level=_sens, chksum=_csum, ) if _clen: _bmap = list() for _ in range(_clen // 2): _bmap.append(self._read_binary(8)) opt['bitmap'] = tuple(_bmap) _plen = _size - _clen * 4 - 8 if _plen: self._read_fileng(_plen) return opt
[ "def", "_read_opt_calipso", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "<", "8", "and", "_size", "%", "8", "!=", "0", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_cmpt", "=", "self", ".", "_read_unpack", "(", "4", ")", "_clen", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_clen", "%", "2", "!=", "0", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_sens", "=", "self", ".", "_read_unpack", "(", "1", ")", "_csum", "=", "self", ".", "_read_fileng", "(", "2", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "domain", "=", "_cmpt", ",", "cmpt_len", "=", "_clen", "*", "4", ",", "level", "=", "_sens", ",", "chksum", "=", "_csum", ",", ")", "if", "_clen", ":", "_bmap", "=", "list", "(", ")", "for", "_", "in", "range", "(", "_clen", "//", "2", ")", ":", "_bmap", ".", "append", "(", "self", ".", "_read_binary", "(", "8", ")", ")", "opt", "[", "'bitmap'", "]", "=", "tuple", "(", "_bmap", ")", "_plen", "=", "_size", "-", "_clen", "*", "4", "-", "8", "if", "_plen", ":", "self", ".", "_read_fileng", "(", "_plen", ")", "return", "opt" ]
Read HOPOPT CALIPSO option. Structure of HOPOPT CALIPSO option [RFC 5570]: ------------------------------------------------------------ | Next Header | Hdr Ext Len | Option Type | Option Length| +-------------+---------------+-------------+--------------+ | CALIPSO Domain of Interpretation | +-------------+---------------+-------------+--------------+ | Cmpt Length | Sens Level | Checksum (CRC-16) | +-------------+---------------+-------------+--------------+ | Compartment Bitmap (Optional; variable length) | +-------------+---------------+-------------+--------------+ Octets Bits Name Description 0 0 hopopt.calipso.type Option Type 0 0 hopopt.calipso.type.value Option Number 0 0 hopopt.calipso.type.action Action (00) 0 2 hopopt.calipso.type.change Change Flag (0) 1 8 hopopt.calipso.length Length of Option Data 2 16 hopopt.calipso.domain CALIPSO Domain of Interpretation 6 48 hopopt.calipso.cmpt_len Cmpt Length 7 56 hopopt.calipso.level Sens Level 8 64 hopopt.calipso.chksum Checksum (CRC-16) 9 72 hopopt.calipso.bitmap Compartment Bitmap
[ "Read", "HOPOPT", "CALIPSO", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L434-L492
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_smf_dpd
def _read_opt_smf_dpd(self, code, *, desc): """Read HOPOPT SMF_DPD option. Structure of HOPOPT SMF_DPD option [RFC 5570]: * IPv6 SMF_DPD Option Header in I-DPD mode 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ... |0|0|0| 01000 | Opt. Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0|TidTy| TidLen| TaggerId (optional) ... | +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | Identifier ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.smf_dpd.type Option Type 0 0 hopopt.smf_dpd.type.value Option Number 0 0 hopopt.smf_dpd.type.action Action (00) 0 2 hopopt.smf_dpd.type.change Change Flag (0) 1 8 hopopt.smf_dpd.length Length of Option Data 2 16 hopopt.smf_dpd.dpd_type DPD Type (0) 2 17 hopopt.smf_dpd.tid_type TaggerID Type 2 20 hopopt.smf_dpd.tid_len TaggerID Length 3 24 hopopt.smf_dpd.tid TaggerID ? ? hopopt.smf_dpd.id Identifier * IPv6 SMF_DPD Option Header in H-DPD Mode 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ... |0|0|0| OptType | Opt. Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |1| Hash Assist Value (HAV) ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.smf_dpd.type Option Type 0 0 hopopt.smf_dpd.type.value Option Number 0 0 hopopt.smf_dpd.type.action Action (00) 0 2 hopopt.smf_dpd.type.change Change Flag (0) 1 8 hopopt.smf_dpd.length Length of Option Data 2 16 hopopt.smf_dpd.dpd_type DPD Type (1) 2 17 hopopt.smf_dpd.hav Hash Assist Value """ _type = self._read_opt_type(code) _size = self._read_unpack(1) _tidd = self._read_binary(1) if _tidd[0] == '0': _mode = 'I-DPD' _tidt = _TID_TYPE.get(_tidd[1:4], 'Unassigned') _tidl = int(_tidd[4:], base=2) if _tidt == 'NULL': if _tidl != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _iden = self._read_fileng(_size-1) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, id=_iden, ) elif _tidt == 'IPv4': if _tidl != 3: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _tidf = self._read_fileng(4) _iden = self._read_fileng(_size-4) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, tid=ipaddress.ip_address(_tidf), id=_iden, ) elif _tidt == 'IPv6': if _tidl != 15: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _tidf = self._read_fileng(15) _iden = self._read_fileng(_size-15) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, tid=ipaddress.ip_address(_tidf), id=_iden, ) else: _tidf = self._read_unpack(_tidl+1) _iden = self._read_fileng(_size-_tidl-2) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, tid=_tidf, id=_iden, ) elif _tidd[0] == '1': _data = self._read_binary(_size-1) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, hav=_tidd[1:] + _data, ) else: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') return opt
python
def _read_opt_smf_dpd(self, code, *, desc): """Read HOPOPT SMF_DPD option. Structure of HOPOPT SMF_DPD option [RFC 5570]: * IPv6 SMF_DPD Option Header in I-DPD mode 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ... |0|0|0| 01000 | Opt. Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0|TidTy| TidLen| TaggerId (optional) ... | +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | Identifier ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.smf_dpd.type Option Type 0 0 hopopt.smf_dpd.type.value Option Number 0 0 hopopt.smf_dpd.type.action Action (00) 0 2 hopopt.smf_dpd.type.change Change Flag (0) 1 8 hopopt.smf_dpd.length Length of Option Data 2 16 hopopt.smf_dpd.dpd_type DPD Type (0) 2 17 hopopt.smf_dpd.tid_type TaggerID Type 2 20 hopopt.smf_dpd.tid_len TaggerID Length 3 24 hopopt.smf_dpd.tid TaggerID ? ? hopopt.smf_dpd.id Identifier * IPv6 SMF_DPD Option Header in H-DPD Mode 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ... |0|0|0| OptType | Opt. Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |1| Hash Assist Value (HAV) ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.smf_dpd.type Option Type 0 0 hopopt.smf_dpd.type.value Option Number 0 0 hopopt.smf_dpd.type.action Action (00) 0 2 hopopt.smf_dpd.type.change Change Flag (0) 1 8 hopopt.smf_dpd.length Length of Option Data 2 16 hopopt.smf_dpd.dpd_type DPD Type (1) 2 17 hopopt.smf_dpd.hav Hash Assist Value """ _type = self._read_opt_type(code) _size = self._read_unpack(1) _tidd = self._read_binary(1) if _tidd[0] == '0': _mode = 'I-DPD' _tidt = _TID_TYPE.get(_tidd[1:4], 'Unassigned') _tidl = int(_tidd[4:], base=2) if _tidt == 'NULL': if _tidl != 0: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _iden = self._read_fileng(_size-1) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, id=_iden, ) elif _tidt == 'IPv4': if _tidl != 3: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _tidf = self._read_fileng(4) _iden = self._read_fileng(_size-4) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, tid=ipaddress.ip_address(_tidf), id=_iden, ) elif _tidt == 'IPv6': if _tidl != 15: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _tidf = self._read_fileng(15) _iden = self._read_fileng(_size-15) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, tid=ipaddress.ip_address(_tidf), id=_iden, ) else: _tidf = self._read_unpack(_tidl+1) _iden = self._read_fileng(_size-_tidl-2) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, tid_len=_tidl, tid=_tidf, id=_iden, ) elif _tidd[0] == '1': _data = self._read_binary(_size-1) opt = dict( desc=desc, type=_type, length=_size + 2, dpd_type=_mode, tid_type=_tidt, hav=_tidd[1:] + _data, ) else: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') return opt
[ "def", "_read_opt_smf_dpd", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "_tidd", "=", "self", ".", "_read_binary", "(", "1", ")", "if", "_tidd", "[", "0", "]", "==", "'0'", ":", "_mode", "=", "'I-DPD'", "_tidt", "=", "_TID_TYPE", ".", "get", "(", "_tidd", "[", "1", ":", "4", "]", ",", "'Unassigned'", ")", "_tidl", "=", "int", "(", "_tidd", "[", "4", ":", "]", ",", "base", "=", "2", ")", "if", "_tidt", "==", "'NULL'", ":", "if", "_tidl", "!=", "0", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_iden", "=", "self", ".", "_read_fileng", "(", "_size", "-", "1", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "dpd_type", "=", "_mode", ",", "tid_type", "=", "_tidt", ",", "tid_len", "=", "_tidl", ",", "id", "=", "_iden", ",", ")", "elif", "_tidt", "==", "'IPv4'", ":", "if", "_tidl", "!=", "3", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_tidf", "=", "self", ".", "_read_fileng", "(", "4", ")", "_iden", "=", "self", ".", "_read_fileng", "(", "_size", "-", "4", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "dpd_type", "=", "_mode", ",", "tid_type", "=", "_tidt", ",", "tid_len", "=", "_tidl", ",", "tid", "=", "ipaddress", ".", "ip_address", "(", "_tidf", ")", ",", "id", "=", "_iden", ",", ")", "elif", "_tidt", "==", "'IPv6'", ":", "if", "_tidl", "!=", "15", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_tidf", "=", "self", ".", "_read_fileng", "(", "15", ")", "_iden", "=", "self", ".", "_read_fileng", "(", "_size", "-", "15", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "dpd_type", "=", "_mode", ",", "tid_type", "=", "_tidt", ",", "tid_len", "=", "_tidl", ",", "tid", "=", "ipaddress", ".", "ip_address", "(", "_tidf", ")", ",", "id", "=", "_iden", ",", ")", "else", ":", "_tidf", "=", "self", ".", "_read_unpack", "(", "_tidl", "+", "1", ")", "_iden", "=", "self", ".", "_read_fileng", "(", "_size", "-", "_tidl", "-", "2", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "dpd_type", "=", "_mode", ",", "tid_type", "=", "_tidt", ",", "tid_len", "=", "_tidl", ",", "tid", "=", "_tidf", ",", "id", "=", "_iden", ",", ")", "elif", "_tidd", "[", "0", "]", "==", "'1'", ":", "_data", "=", "self", ".", "_read_binary", "(", "_size", "-", "1", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "dpd_type", "=", "_mode", ",", "tid_type", "=", "_tidt", ",", "hav", "=", "_tidd", "[", "1", ":", "]", "+", "_data", ",", ")", "else", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "return", "opt" ]
Read HOPOPT SMF_DPD option. Structure of HOPOPT SMF_DPD option [RFC 5570]: * IPv6 SMF_DPD Option Header in I-DPD mode 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ... |0|0|0| 01000 | Opt. Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |0|TidTy| TidLen| TaggerId (optional) ... | +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | Identifier ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.smf_dpd.type Option Type 0 0 hopopt.smf_dpd.type.value Option Number 0 0 hopopt.smf_dpd.type.action Action (00) 0 2 hopopt.smf_dpd.type.change Change Flag (0) 1 8 hopopt.smf_dpd.length Length of Option Data 2 16 hopopt.smf_dpd.dpd_type DPD Type (0) 2 17 hopopt.smf_dpd.tid_type TaggerID Type 2 20 hopopt.smf_dpd.tid_len TaggerID Length 3 24 hopopt.smf_dpd.tid TaggerID ? ? hopopt.smf_dpd.id Identifier * IPv6 SMF_DPD Option Header in H-DPD Mode 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ... |0|0|0| OptType | Opt. Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |1| Hash Assist Value (HAV) ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.smf_dpd.type Option Type 0 0 hopopt.smf_dpd.type.value Option Number 0 0 hopopt.smf_dpd.type.action Action (00) 0 2 hopopt.smf_dpd.type.change Change Flag (0) 1 8 hopopt.smf_dpd.length Length of Option Data 2 16 hopopt.smf_dpd.dpd_type DPD Type (1) 2 17 hopopt.smf_dpd.hav Hash Assist Value
[ "Read", "HOPOPT", "SMF_DPD", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L494-L622
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_pdm
def _read_opt_pdm(self, code, *, desc): """Read HOPOPT PDM option. Structure of HOPOPT PDM option [RFC 8250]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | ScaleDTLR | ScaleDTLS | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | PSN This Packet | PSN Last Received | |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Delta Time Last Received | Delta Time Last Sent | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pdm.type Option Type 0 0 hopopt.pdm.type.value Option Number 0 0 hopopt.pdm.type.action Action (00) 0 2 hopopt.pdm.type.change Change Flag (0) 1 8 hopopt.pdm.length Length of Option Data 2 16 hopopt.pdm.scaledtlr Scale Delta Time Last Received 3 24 hopopt.pdm.scaledtls Scale Delta Time Last Sent 4 32 hopopt.pdm.psntp Packet Sequence Number This Packet 6 48 hopopt.pdm.psnlr Packet Sequence Number Last Received 8 64 hopopt.pdm.deltatlr Delta Time Last Received 10 80 hopopt.pdm.deltatls Delta Time Last Sent """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 10: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _stlr = self._read_unpack(1) _stls = self._read_unpack(1) _psnt = self._read_unpack(2) _psnl = self._read_unpack(2) _dtlr = self._read_unpack(2) _dtls = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, scaledtlr=datetime.timedelta(seconds=_stlr), scaledtls=datetime.timedelta(seconds=_stls), psntp=_psnt, psnlr=_psnl, deltatlr=datetime.timedelta(seconds=_dtlr), deltatls=datetime.timedelta(seconds=_dtls), ) return opt
python
def _read_opt_pdm(self, code, *, desc): """Read HOPOPT PDM option. Structure of HOPOPT PDM option [RFC 8250]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | ScaleDTLR | ScaleDTLS | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | PSN This Packet | PSN Last Received | |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Delta Time Last Received | Delta Time Last Sent | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pdm.type Option Type 0 0 hopopt.pdm.type.value Option Number 0 0 hopopt.pdm.type.action Action (00) 0 2 hopopt.pdm.type.change Change Flag (0) 1 8 hopopt.pdm.length Length of Option Data 2 16 hopopt.pdm.scaledtlr Scale Delta Time Last Received 3 24 hopopt.pdm.scaledtls Scale Delta Time Last Sent 4 32 hopopt.pdm.psntp Packet Sequence Number This Packet 6 48 hopopt.pdm.psnlr Packet Sequence Number Last Received 8 64 hopopt.pdm.deltatlr Delta Time Last Received 10 80 hopopt.pdm.deltatls Delta Time Last Sent """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 10: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _stlr = self._read_unpack(1) _stls = self._read_unpack(1) _psnt = self._read_unpack(2) _psnl = self._read_unpack(2) _dtlr = self._read_unpack(2) _dtls = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, scaledtlr=datetime.timedelta(seconds=_stlr), scaledtls=datetime.timedelta(seconds=_stls), psntp=_psnt, psnlr=_psnl, deltatlr=datetime.timedelta(seconds=_dtlr), deltatls=datetime.timedelta(seconds=_dtls), ) return opt
[ "def", "_read_opt_pdm", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "!=", "10", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_stlr", "=", "self", ".", "_read_unpack", "(", "1", ")", "_stls", "=", "self", ".", "_read_unpack", "(", "1", ")", "_psnt", "=", "self", ".", "_read_unpack", "(", "2", ")", "_psnl", "=", "self", ".", "_read_unpack", "(", "2", ")", "_dtlr", "=", "self", ".", "_read_unpack", "(", "2", ")", "_dtls", "=", "self", ".", "_read_unpack", "(", "2", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "scaledtlr", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "_stlr", ")", ",", "scaledtls", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "_stls", ")", ",", "psntp", "=", "_psnt", ",", "psnlr", "=", "_psnl", ",", "deltatlr", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "_dtlr", ")", ",", "deltatls", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "_dtls", ")", ",", ")", "return", "opt" ]
Read HOPOPT PDM option. Structure of HOPOPT PDM option [RFC 8250]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | ScaleDTLR | ScaleDTLS | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | PSN This Packet | PSN Last Received | |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Delta Time Last Received | Delta Time Last Sent | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.pdm.type Option Type 0 0 hopopt.pdm.type.value Option Number 0 0 hopopt.pdm.type.action Action (00) 0 2 hopopt.pdm.type.change Change Flag (0) 1 8 hopopt.pdm.length Length of Option Data 2 16 hopopt.pdm.scaledtlr Scale Delta Time Last Received 3 24 hopopt.pdm.scaledtls Scale Delta Time Last Sent 4 32 hopopt.pdm.psntp Packet Sequence Number This Packet 6 48 hopopt.pdm.psnlr Packet Sequence Number Last Received 8 64 hopopt.pdm.deltatlr Delta Time Last Received 10 80 hopopt.pdm.deltatls Delta Time Last Sent
[ "Read", "HOPOPT", "PDM", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L624-L675
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_qs
def _read_opt_qs(self, code, *, desc): """Read HOPOPT Quick Start option. Structure of HOPOPT Quick-Start option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.qs.type Option Type 0 0 hopopt.qs.type.value Option Number 0 0 hopopt.qs.type.action Action (00) 0 2 hopopt.qs.type.change Change Flag (1) 1 8 hopopt.qs.length Length of Option Data 2 16 hopopt.qs.func Function (0/8) 2 20 hopopt.qs.rate Rate Request / Report (in Kbps) 3 24 hopopt.qs.ttl QS TTL / None 4 32 hopopt.qs.nounce QS Nounce 7 62 - Reserved """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 6: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _fcrr = self._read_binary(1) _func = int(_fcrr[:4], base=2) _rate = int(_fcrr[4:], base=2) _ttlv = self._read_unpack(1) _nonr = self._read_binary(4) _qsnn = int(_nonr[:30], base=2) if _func != 0 and _func != 8: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') data = dict( type=_type, length=_size + 2, func=_QS_FUNC.get(_func), rate=40000 * (2 ** _rate) / 1000, ttl=None if _func else _rate, nounce=_qsnn, ) return data
python
def _read_opt_qs(self, code, *, desc): """Read HOPOPT Quick Start option. Structure of HOPOPT Quick-Start option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.qs.type Option Type 0 0 hopopt.qs.type.value Option Number 0 0 hopopt.qs.type.action Action (00) 0 2 hopopt.qs.type.change Change Flag (1) 1 8 hopopt.qs.length Length of Option Data 2 16 hopopt.qs.func Function (0/8) 2 20 hopopt.qs.rate Rate Request / Report (in Kbps) 3 24 hopopt.qs.ttl QS TTL / None 4 32 hopopt.qs.nounce QS Nounce 7 62 - Reserved """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 6: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _fcrr = self._read_binary(1) _func = int(_fcrr[:4], base=2) _rate = int(_fcrr[4:], base=2) _ttlv = self._read_unpack(1) _nonr = self._read_binary(4) _qsnn = int(_nonr[:30], base=2) if _func != 0 and _func != 8: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') data = dict( type=_type, length=_size + 2, func=_QS_FUNC.get(_func), rate=40000 * (2 ** _rate) / 1000, ttl=None if _func else _rate, nounce=_qsnn, ) return data
[ "def", "_read_opt_qs", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "!=", "6", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_fcrr", "=", "self", ".", "_read_binary", "(", "1", ")", "_func", "=", "int", "(", "_fcrr", "[", ":", "4", "]", ",", "base", "=", "2", ")", "_rate", "=", "int", "(", "_fcrr", "[", "4", ":", "]", ",", "base", "=", "2", ")", "_ttlv", "=", "self", ".", "_read_unpack", "(", "1", ")", "_nonr", "=", "self", ".", "_read_binary", "(", "4", ")", "_qsnn", "=", "int", "(", "_nonr", "[", ":", "30", "]", ",", "base", "=", "2", ")", "if", "_func", "!=", "0", "and", "_func", "!=", "8", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "data", "=", "dict", "(", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "func", "=", "_QS_FUNC", ".", "get", "(", "_func", ")", ",", "rate", "=", "40000", "*", "(", "2", "**", "_rate", ")", "/", "1000", ",", "ttl", "=", "None", "if", "_func", "else", "_rate", ",", "nounce", "=", "_qsnn", ",", ")", "return", "data" ]
Read HOPOPT Quick Start option. Structure of HOPOPT Quick-Start option [RFC 4782]: * A Quick-Start Request. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | QS TTL | | | | 0000 |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * Report of Approved Rate. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option | Length=6 | Func. | Rate | Not Used | | | | 1000 | Report| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.qs.type Option Type 0 0 hopopt.qs.type.value Option Number 0 0 hopopt.qs.type.action Action (00) 0 2 hopopt.qs.type.change Change Flag (1) 1 8 hopopt.qs.length Length of Option Data 2 16 hopopt.qs.func Function (0/8) 2 20 hopopt.qs.rate Rate Request / Report (in Kbps) 3 24 hopopt.qs.ttl QS TTL / None 4 32 hopopt.qs.nounce QS Nounce 7 62 - Reserved
[ "Read", "HOPOPT", "Quick", "Start", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L677-L737
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_rpl
def _read_opt_rpl(self, code, *, desc): """Read HOPOPT RPL option. Structure of HOPOPT RPL option [RFC 6553]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |O|R|F|0|0|0|0|0| RPLInstanceID | SenderRank | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | (sub-TLVs) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.rpl.type Option Type 0 0 hopopt.rpl.type.value Option Number 0 0 hopopt.rpl.type.action Action (01) 0 2 hopopt.rpl.type.change Change Flag (1) 1 8 hopopt.rpl.length Length of Option Data 2 16 hopopt.rpl.flags RPL Option Flags 2 16 hopopt.rpl.flags.down Down Flag 2 17 hopopt.rpl.flags.rank_error Rank-Error Flag 2 18 hopopt.rpl.flags.fwd_error Forwarding-Error Flag 3 24 hopopt.rpl.id RPLInstanceID 4 32 hopopt.rpl.rank SenderRank 6 48 hopopt.rpl.data Sub-TLVs """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _flag = self._read_binary(1) _rpld = self._read_unpack(1) _rank = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, flags=dict( down=True if int(_flag[0], base=2) else False, rank_error=True if int(_flag[1], base=2) else False, fwd_error=True if int(_flag[2], base=2) else False, ), id=_rpld, rank=_rank, ) if _size > 4: opt['data'] = self._read_fileng(_size-4) return opt
python
def _read_opt_rpl(self, code, *, desc): """Read HOPOPT RPL option. Structure of HOPOPT RPL option [RFC 6553]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |O|R|F|0|0|0|0|0| RPLInstanceID | SenderRank | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | (sub-TLVs) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.rpl.type Option Type 0 0 hopopt.rpl.type.value Option Number 0 0 hopopt.rpl.type.action Action (01) 0 2 hopopt.rpl.type.change Change Flag (1) 1 8 hopopt.rpl.length Length of Option Data 2 16 hopopt.rpl.flags RPL Option Flags 2 16 hopopt.rpl.flags.down Down Flag 2 17 hopopt.rpl.flags.rank_error Rank-Error Flag 2 18 hopopt.rpl.flags.fwd_error Forwarding-Error Flag 3 24 hopopt.rpl.id RPLInstanceID 4 32 hopopt.rpl.rank SenderRank 6 48 hopopt.rpl.data Sub-TLVs """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size < 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _flag = self._read_binary(1) _rpld = self._read_unpack(1) _rank = self._read_unpack(2) opt = dict( desc=desc, type=_type, length=_size + 2, flags=dict( down=True if int(_flag[0], base=2) else False, rank_error=True if int(_flag[1], base=2) else False, fwd_error=True if int(_flag[2], base=2) else False, ), id=_rpld, rank=_rank, ) if _size > 4: opt['data'] = self._read_fileng(_size-4) return opt
[ "def", "_read_opt_rpl", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "<", "4", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_flag", "=", "self", ".", "_read_binary", "(", "1", ")", "_rpld", "=", "self", ".", "_read_unpack", "(", "1", ")", "_rank", "=", "self", ".", "_read_unpack", "(", "2", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "flags", "=", "dict", "(", "down", "=", "True", "if", "int", "(", "_flag", "[", "0", "]", ",", "base", "=", "2", ")", "else", "False", ",", "rank_error", "=", "True", "if", "int", "(", "_flag", "[", "1", "]", ",", "base", "=", "2", ")", "else", "False", ",", "fwd_error", "=", "True", "if", "int", "(", "_flag", "[", "2", "]", ",", "base", "=", "2", ")", "else", "False", ",", ")", ",", "id", "=", "_rpld", ",", "rank", "=", "_rank", ",", ")", "if", "_size", ">", "4", ":", "opt", "[", "'data'", "]", "=", "self", ".", "_read_fileng", "(", "_size", "-", "4", ")", "return", "opt" ]
Read HOPOPT RPL option. Structure of HOPOPT RPL option [RFC 6553]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |O|R|F|0|0|0|0|0| RPLInstanceID | SenderRank | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | (sub-TLVs) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.rpl.type Option Type 0 0 hopopt.rpl.type.value Option Number 0 0 hopopt.rpl.type.action Action (01) 0 2 hopopt.rpl.type.change Change Flag (1) 1 8 hopopt.rpl.length Length of Option Data 2 16 hopopt.rpl.flags RPL Option Flags 2 16 hopopt.rpl.flags.down Down Flag 2 17 hopopt.rpl.flags.rank_error Rank-Error Flag 2 18 hopopt.rpl.flags.fwd_error Forwarding-Error Flag 3 24 hopopt.rpl.id RPLInstanceID 4 32 hopopt.rpl.rank SenderRank 6 48 hopopt.rpl.data Sub-TLVs
[ "Read", "HOPOPT", "RPL", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L739-L792
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_ilnp
def _read_opt_ilnp(self, code, *, desc): """Read HOPOPT ILNP Nonce option. Structure of HOPOPT ILNP Nonce option [RFC 6744]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / Nonce Value / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ilnp.type Option Type 0 0 hopopt.ilnp.type.value Option Number 0 0 hopopt.ilnp.type.action Action (10) 0 2 hopopt.ilnp.type.change Change Flag (0) 1 8 hopopt.ilnp.length Length of Option Data 2 16 hopopt.ilnp.value Nonce Value """ _type = self._read_opt_type(code) _size = self._read_unpack(1) _nval = self._read_fileng(_size) opt = dict( desc=desc, type=_type, length=_size + 2, value=_nval, ) return opt
python
def _read_opt_ilnp(self, code, *, desc): """Read HOPOPT ILNP Nonce option. Structure of HOPOPT ILNP Nonce option [RFC 6744]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / Nonce Value / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ilnp.type Option Type 0 0 hopopt.ilnp.type.value Option Number 0 0 hopopt.ilnp.type.action Action (10) 0 2 hopopt.ilnp.type.change Change Flag (0) 1 8 hopopt.ilnp.length Length of Option Data 2 16 hopopt.ilnp.value Nonce Value """ _type = self._read_opt_type(code) _size = self._read_unpack(1) _nval = self._read_fileng(_size) opt = dict( desc=desc, type=_type, length=_size + 2, value=_nval, ) return opt
[ "def", "_read_opt_ilnp", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "_nval", "=", "self", ".", "_read_fileng", "(", "_size", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "value", "=", "_nval", ",", ")", "return", "opt" ]
Read HOPOPT ILNP Nonce option. Structure of HOPOPT ILNP Nonce option [RFC 6744]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Hdr Ext Len | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / Nonce Value / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.ilnp.type Option Type 0 0 hopopt.ilnp.type.value Option Number 0 0 hopopt.ilnp.type.action Action (10) 0 2 hopopt.ilnp.type.change Change Flag (0) 1 8 hopopt.ilnp.length Length of Option Data 2 16 hopopt.ilnp.value Nonce Value
[ "Read", "HOPOPT", "ILNP", "Nonce", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L865-L897
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_lio
def _read_opt_lio(self, code, *, desc): """Read HOPOPT Line-Identification option. Structure of HOPOPT Line-Identification option [RFC 6788]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | LineIDLen | Line ID... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.lio.type Option Type 0 0 hopopt.lio.type.value Option Number 0 0 hopopt.lio.type.action Action (10) 0 2 hopopt.lio.type.change Change Flag (0) 1 8 hopopt.lio.length Length of Option Data 2 16 hopopt.lio.lid_len Line ID Length 3 24 hopopt.lio.lid Line ID """ _type = self._read_opt_type(code) _size = self._read_unpack(1) _llen = self._read_unpack(1) _line = self._read_fileng(_llen) opt = dict( desc=desc, type=_type, length=_size + 2, lid_len=_llen, lid=_line, ) _plen = _size - _llen if _plen: self._read_fileng(_plen) return opt
python
def _read_opt_lio(self, code, *, desc): """Read HOPOPT Line-Identification option. Structure of HOPOPT Line-Identification option [RFC 6788]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | LineIDLen | Line ID... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.lio.type Option Type 0 0 hopopt.lio.type.value Option Number 0 0 hopopt.lio.type.action Action (10) 0 2 hopopt.lio.type.change Change Flag (0) 1 8 hopopt.lio.length Length of Option Data 2 16 hopopt.lio.lid_len Line ID Length 3 24 hopopt.lio.lid Line ID """ _type = self._read_opt_type(code) _size = self._read_unpack(1) _llen = self._read_unpack(1) _line = self._read_fileng(_llen) opt = dict( desc=desc, type=_type, length=_size + 2, lid_len=_llen, lid=_line, ) _plen = _size - _llen if _plen: self._read_fileng(_plen) return opt
[ "def", "_read_opt_lio", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "_llen", "=", "self", ".", "_read_unpack", "(", "1", ")", "_line", "=", "self", ".", "_read_fileng", "(", "_llen", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "lid_len", "=", "_llen", ",", "lid", "=", "_line", ",", ")", "_plen", "=", "_size", "-", "_llen", "if", "_plen", ":", "self", ".", "_read_fileng", "(", "_plen", ")", "return", "opt" ]
Read HOPOPT Line-Identification option. Structure of HOPOPT Line-Identification option [RFC 6788]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | LineIDLen | Line ID... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.lio.type Option Type 0 0 hopopt.lio.type.value Option Number 0 0 hopopt.lio.type.action Action (10) 0 2 hopopt.lio.type.change Change Flag (0) 1 8 hopopt.lio.length Length of Option Data 2 16 hopopt.lio.lid_len Line ID Length 3 24 hopopt.lio.lid Line ID
[ "Read", "HOPOPT", "Line", "-", "Identification", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L899-L938
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_jumbo
def _read_opt_jumbo(self, code, *, desc): """Read HOPOPT Jumbo Payload option. Structure of HOPOPT Jumbo Payload option [RFC 2675]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Jumbo Payload Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.jumbo.type Option Type 0 0 hopopt.jumbo.type.value Option Number 0 0 hopopt.jumbo.type.action Action (11) 0 2 hopopt.jumbo.type.change Change Flag (0) 1 8 hopopt.jumbo.length Length of Option Data 2 16 hopopt.jumbo.payload_len Jumbo Payload Length """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _jlen = self._read_unpack(4) opt = dict( desc=desc, type=_type, length=_size + 2, payload_len=_jlen, ) return opt
python
def _read_opt_jumbo(self, code, *, desc): """Read HOPOPT Jumbo Payload option. Structure of HOPOPT Jumbo Payload option [RFC 2675]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Jumbo Payload Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.jumbo.type Option Type 0 0 hopopt.jumbo.type.value Option Number 0 0 hopopt.jumbo.type.action Action (11) 0 2 hopopt.jumbo.type.change Change Flag (0) 1 8 hopopt.jumbo.length Length of Option Data 2 16 hopopt.jumbo.payload_len Jumbo Payload Length """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 4: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _jlen = self._read_unpack(4) opt = dict( desc=desc, type=_type, length=_size + 2, payload_len=_jlen, ) return opt
[ "def", "_read_opt_jumbo", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "!=", "4", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_jlen", "=", "self", ".", "_read_unpack", "(", "4", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "payload_len", "=", "_jlen", ",", ")", "return", "opt" ]
Read HOPOPT Jumbo Payload option. Structure of HOPOPT Jumbo Payload option [RFC 2675]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Opt Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Jumbo Payload Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.jumbo.type Option Type 0 0 hopopt.jumbo.type.value Option Number 0 0 hopopt.jumbo.type.action Action (11) 0 2 hopopt.jumbo.type.change Change Flag (0) 1 8 hopopt.jumbo.length Length of Option Data 2 16 hopopt.jumbo.payload_len Jumbo Payload Length
[ "Read", "HOPOPT", "Jumbo", "Payload", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L940-L972
JarryShaw/PyPCAPKit
src/protocols/internet/hopopt.py
HOPOPT._read_opt_home
def _read_opt_home(self, code, *, desc): """Read HOPOPT Home Address option. Structure of HOPOPT Home Address option [RFC 6275]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Home Address + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.home.type Option Type 0 0 hopopt.home.type.value Option Number 0 0 hopopt.home.type.action Action (11) 0 2 hopopt.home.type.change Change Flag (0) 1 8 hopopt.home.length Length of Option Data 2 16 hopopt.home.ip Home Address """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 16: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _addr = self._read_fileng(16) opt = dict( desc=desc, type=_type, length=_size + 2, ip=ipaddress.ip_address(_addr), ) return opt
python
def _read_opt_home(self, code, *, desc): """Read HOPOPT Home Address option. Structure of HOPOPT Home Address option [RFC 6275]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Home Address + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.home.type Option Type 0 0 hopopt.home.type.value Option Number 0 0 hopopt.home.type.action Action (11) 0 2 hopopt.home.type.change Change Flag (0) 1 8 hopopt.home.length Length of Option Data 2 16 hopopt.home.ip Home Address """ _type = self._read_opt_type(code) _size = self._read_unpack(1) if _size != 16: raise ProtocolError(f'{self.alias}: [Optno {code}] invalid format') _addr = self._read_fileng(16) opt = dict( desc=desc, type=_type, length=_size + 2, ip=ipaddress.ip_address(_addr), ) return opt
[ "def", "_read_opt_home", "(", "self", ",", "code", ",", "*", ",", "desc", ")", ":", "_type", "=", "self", ".", "_read_opt_type", "(", "code", ")", "_size", "=", "self", ".", "_read_unpack", "(", "1", ")", "if", "_size", "!=", "16", ":", "raise", "ProtocolError", "(", "f'{self.alias}: [Optno {code}] invalid format'", ")", "_addr", "=", "self", ".", "_read_fileng", "(", "16", ")", "opt", "=", "dict", "(", "desc", "=", "desc", ",", "type", "=", "_type", ",", "length", "=", "_size", "+", "2", ",", "ip", "=", "ipaddress", ".", "ip_address", "(", "_addr", ")", ",", ")", "return", "opt" ]
Read HOPOPT Home Address option. Structure of HOPOPT Home Address option [RFC 6275]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Option Type | Option Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | + + | | + Home Address + | | + + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hopopt.home.type Option Type 0 0 hopopt.home.type.value Option Number 0 0 hopopt.home.type.action Action (11) 0 2 hopopt.home.type.change Change Flag (0) 1 8 hopopt.home.length Length of Option Data 2 16 hopopt.home.ip Home Address
[ "Read", "HOPOPT", "Home", "Address", "option", "." ]
train
https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hopopt.py#L974-L1014