content
stringlengths 1
1.04M
| input_ids
sequencelengths 1
774k
| ratio_char_token
float64 0.38
22.9
| token_count
int64 1
774k
|
---|---|---|---|
import re,sys, json
knownRetTypes = {
"V" : "Void" ,
"Z" : "boolean",
"B" : "byte",
"S" : "short",
"C" : "char",
"I" : "int",
"J" : "long",
"F" :"float",
"D" : "double"
}
if __name__== "__main__":
if len(sys.argv) > 1:
print("parsing descriptors of file " + sys.argv[1] )
parseDescriptorsFile(sys.argv[1])
else:
print ("arg required ( filename )")
| [
198,
11748,
302,
11,
17597,
11,
33918,
198,
198,
4002,
9781,
31431,
220,
796,
1391,
198,
197,
1,
53,
1,
1058,
366,
53,
1868,
1,
837,
198,
197,
1,
57,
1,
197,
25,
366,
2127,
21052,
1600,
198,
197,
1,
33,
1,
197,
25,
366,
26327,
1600,
198,
197,
1,
50,
1,
197,
25,
366,
19509,
1600,
198,
197,
1,
34,
1,
197,
25,
366,
10641,
1600,
198,
197,
1,
40,
1,
197,
25,
366,
600,
1600,
198,
197,
1,
41,
1,
197,
25,
366,
6511,
1600,
198,
197,
1,
37,
1,
197,
11097,
22468,
1600,
198,
197,
1,
35,
1,
197,
25,
366,
23352,
1,
198,
92,
628,
197,
628,
628,
198,
361,
11593,
3672,
834,
855,
366,
834,
12417,
834,
1298,
198,
197,
361,
18896,
7,
17597,
13,
853,
85,
8,
1875,
352,
25,
198,
197,
197,
4798,
7203,
79,
945,
278,
12145,
669,
286,
2393,
366,
1343,
25064,
13,
853,
85,
58,
16,
60,
220,
1267,
198,
197,
197,
29572,
24564,
1968,
669,
8979,
7,
17597,
13,
853,
85,
58,
16,
12962,
198,
197,
17772,
25,
198,
197,
197,
4798,
5855,
853,
2672,
357,
29472,
1267,
4943,
628,
628
] | 1.973958 | 192 |
import pytest
from render2.src.shared.shared_logging import get_logger, truncate, prep_for_logging, TRUNCATE_TEXT, TRUNCATE_LENGTH
LONG_STRING = "zxcvbnmasdfghjklqwertyuiop1234567890zxcvbnmasdfghjklqwertyu" \
"iop1234567890zxcvbnmasdzxcvkjapeorijfaldkcfjadfjapsoeifjadf"
TRUNCATED_STRING = f"{LONG_STRING[:(64 - TRUNCATE_LENGTH)]}{TRUNCATE_TEXT}"
# --------------------------------------------------------------
# Tests
# --------------------------------------------------------------
@pytest.mark.unit
def test_prep_for_logging_truncate_long_string_in_content():
"""Make sure data longer than max length gets truncated.
as a by-product, this also tests that 'None' is properly handled (not truncated)."""
# Setup
max_length = 32
truncated_string = f"{LONG_STRING[:(max_length - TRUNCATE_LENGTH)]}{TRUNCATE_TEXT}"
job = {'data': None, 'content_type': 'html', 'content': LONG_STRING}
expected = {'data': None, 'content_type': 'html', 'content': truncated_string}
# Execute
_job_for_logging = prep_for_logging(job, max_length=max_length)
# Verify
assert expected == _job_for_logging
assert len(_job_for_logging['content']) == max_length
@pytest.mark.unit
def test_prep_for_logging_truncate_long_string_in_data():
"""Truncate string in data field"""
# Setup
max_length = 32
truncated_string = f"{LONG_STRING[:(max_length - TRUNCATE_LENGTH)]}{TRUNCATE_TEXT}"
job = {'data': LONG_STRING, 'content_type': 'html', 'content': 'this_is_short'}
expected = {'data': truncated_string, 'content_type': 'html', 'content': 'this_is_short'}
# Execute
_job_for_logging = prep_for_logging(job, max_length=max_length)
# Verify
assert expected == _job_for_logging
assert len(_job_for_logging['data']) == max_length
@pytest.mark.unit
def test_prep_for_logging_truncate_long_bytes_string_in_data():
"""Truncate bytes string"""
# Setup
max_length = 32
truncated_string = f"{LONG_STRING[:(max_length - TRUNCATE_LENGTH)]}{TRUNCATE_TEXT}"
job = {'data': LONG_STRING.encode('utf-8'), 'content_type': 'html', 'content': 'this_is_short'}
expected = {'data': truncated_string, 'content_type': 'html', 'content': 'this_is_short'}
# Execute
_job_for_logging = prep_for_logging(job, max_length=max_length)
# Verify
assert expected == _job_for_logging
assert len(_job_for_logging['data']) == max_length
@pytest.mark.unit
def test_prep_for_logging_no_fields_truncated():
"""Test no fields are altered if they are all equal or less than
the max length."""
# Setup
max_length = 13
job = {'data': 'this_is_short', 'content_type': 'html', 'content': 'this_is_short'}
expected = job.copy()
# Execute and verify
assert expected == prep_for_logging(job, max_length=max_length)
@pytest.mark.unit
def test_prep_for_logging_return_only_truncated_text_due_to_small_max_length():
"""Make sure both data can be redacted and html can be truncated."""
# Setup
max_length = 5
job = {'data': None, 'content_type': 'html', 'content': LONG_STRING}
expected = {'data': None, 'content_type': 'html', 'content': TRUNCATE_TEXT}
# Execute
_job_for_logging = prep_for_logging(job, max_length=max_length)
# Verify
assert expected == _job_for_logging
assert TRUNCATE_LENGTH == len(_job_for_logging['content'])
@pytest.mark.unit
def test_record_truncation(caplog):
"""Ensure that the total LogRecord message is not over maximum size"""
# Setup
too_long = u"\U0001F926" * 65000
too_long_bytes = len(too_long.encode('utf-8'))
logger = get_logger("test")
# Execute
logger.info(f"{too_long}")
msg = caplog.messages[-1]
truncated_bytes = len(msg.encode('utf-8'))
# Verify
assert truncated_bytes < too_long_bytes
assert truncated_bytes < 265000 | [
11748,
12972,
9288,
198,
198,
6738,
8543,
17,
13,
10677,
13,
28710,
13,
28710,
62,
6404,
2667,
1330,
651,
62,
6404,
1362,
11,
40122,
378,
11,
3143,
62,
1640,
62,
6404,
2667,
11,
7579,
4944,
34,
6158,
62,
32541,
11,
7579,
4944,
34,
6158,
62,
43,
49494,
198,
198,
43,
18494,
62,
18601,
2751,
796,
366,
89,
25306,
85,
9374,
5356,
7568,
456,
73,
41582,
80,
15448,
774,
9019,
404,
10163,
2231,
30924,
3829,
89,
25306,
85,
9374,
5356,
7568,
456,
73,
41582,
80,
15448,
774,
84,
1,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
14922,
10163,
2231,
30924,
3829,
89,
25306,
85,
9374,
5356,
67,
89,
25306,
85,
42421,
1758,
273,
2926,
69,
1940,
74,
12993,
38442,
69,
73,
499,
568,
68,
361,
38442,
69,
1,
198,
5446,
4944,
34,
11617,
62,
18601,
2751,
796,
277,
1,
90,
43,
18494,
62,
18601,
2751,
58,
37498,
2414,
532,
7579,
4944,
34,
6158,
62,
43,
49494,
15437,
18477,
5446,
4944,
34,
6158,
62,
32541,
36786,
198,
198,
2,
20368,
1783,
26171,
198,
2,
30307,
198,
2,
20368,
1783,
26171,
628,
198,
31,
9078,
9288,
13,
4102,
13,
20850,
198,
4299,
1332,
62,
46012,
62,
1640,
62,
6404,
2667,
62,
2213,
19524,
378,
62,
6511,
62,
8841,
62,
259,
62,
11299,
33529,
198,
220,
220,
220,
37227,
12050,
1654,
1366,
2392,
621,
3509,
4129,
3011,
40122,
515,
13,
628,
220,
220,
220,
355,
257,
416,
12,
11167,
11,
428,
635,
5254,
326,
705,
14202,
6,
318,
6105,
12118,
357,
1662,
40122,
515,
21387,
15931,
628,
220,
220,
220,
1303,
31122,
198,
220,
220,
220,
3509,
62,
13664,
796,
3933,
198,
220,
220,
220,
40122,
515,
62,
8841,
796,
277,
1,
90,
43,
18494,
62,
18601,
2751,
58,
37498,
9806,
62,
13664,
532,
7579,
4944,
34,
6158,
62,
43,
49494,
15437,
18477,
5446,
4944,
34,
6158,
62,
32541,
36786,
198,
220,
220,
220,
1693,
796,
1391,
6,
7890,
10354,
6045,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
44533,
62,
18601,
2751,
92,
198,
220,
220,
220,
2938,
796,
1391,
6,
7890,
10354,
6045,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
40122,
515,
62,
8841,
92,
628,
220,
220,
220,
1303,
8393,
1133,
198,
220,
220,
220,
4808,
21858,
62,
1640,
62,
6404,
2667,
796,
3143,
62,
1640,
62,
6404,
2667,
7,
21858,
11,
3509,
62,
13664,
28,
9806,
62,
13664,
8,
628,
220,
220,
220,
1303,
49899,
198,
220,
220,
220,
6818,
2938,
6624,
4808,
21858,
62,
1640,
62,
6404,
2667,
198,
220,
220,
220,
6818,
18896,
28264,
21858,
62,
1640,
62,
6404,
2667,
17816,
11299,
6,
12962,
6624,
3509,
62,
13664,
628,
198,
31,
9078,
9288,
13,
4102,
13,
20850,
198,
4299,
1332,
62,
46012,
62,
1640,
62,
6404,
2667,
62,
2213,
19524,
378,
62,
6511,
62,
8841,
62,
259,
62,
7890,
33529,
198,
220,
220,
220,
37227,
2898,
19524,
378,
4731,
287,
1366,
2214,
37811,
628,
220,
220,
220,
1303,
31122,
198,
220,
220,
220,
3509,
62,
13664,
796,
3933,
198,
220,
220,
220,
40122,
515,
62,
8841,
796,
277,
1,
90,
43,
18494,
62,
18601,
2751,
58,
37498,
9806,
62,
13664,
532,
7579,
4944,
34,
6158,
62,
43,
49494,
15437,
18477,
5446,
4944,
34,
6158,
62,
32541,
36786,
198,
220,
220,
220,
1693,
796,
1391,
6,
7890,
10354,
44533,
62,
18601,
2751,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
705,
5661,
62,
271,
62,
19509,
6,
92,
198,
220,
220,
220,
2938,
796,
1391,
6,
7890,
10354,
40122,
515,
62,
8841,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
705,
5661,
62,
271,
62,
19509,
6,
92,
628,
220,
220,
220,
1303,
8393,
1133,
198,
220,
220,
220,
4808,
21858,
62,
1640,
62,
6404,
2667,
796,
3143,
62,
1640,
62,
6404,
2667,
7,
21858,
11,
3509,
62,
13664,
28,
9806,
62,
13664,
8,
628,
220,
220,
220,
1303,
49899,
198,
220,
220,
220,
6818,
2938,
6624,
4808,
21858,
62,
1640,
62,
6404,
2667,
198,
220,
220,
220,
6818,
18896,
28264,
21858,
62,
1640,
62,
6404,
2667,
17816,
7890,
6,
12962,
6624,
3509,
62,
13664,
628,
198,
31,
9078,
9288,
13,
4102,
13,
20850,
198,
4299,
1332,
62,
46012,
62,
1640,
62,
6404,
2667,
62,
2213,
19524,
378,
62,
6511,
62,
33661,
62,
8841,
62,
259,
62,
7890,
33529,
198,
220,
220,
220,
37227,
2898,
19524,
378,
9881,
4731,
37811,
628,
220,
220,
220,
1303,
31122,
198,
220,
220,
220,
3509,
62,
13664,
796,
3933,
198,
220,
220,
220,
40122,
515,
62,
8841,
796,
277,
1,
90,
43,
18494,
62,
18601,
2751,
58,
37498,
9806,
62,
13664,
532,
7579,
4944,
34,
6158,
62,
43,
49494,
15437,
18477,
5446,
4944,
34,
6158,
62,
32541,
36786,
198,
220,
220,
220,
1693,
796,
1391,
6,
7890,
10354,
44533,
62,
18601,
2751,
13,
268,
8189,
10786,
40477,
12,
23,
33809,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
705,
5661,
62,
271,
62,
19509,
6,
92,
198,
220,
220,
220,
2938,
796,
1391,
6,
7890,
10354,
40122,
515,
62,
8841,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
705,
5661,
62,
271,
62,
19509,
6,
92,
628,
220,
220,
220,
1303,
8393,
1133,
198,
220,
220,
220,
4808,
21858,
62,
1640,
62,
6404,
2667,
796,
3143,
62,
1640,
62,
6404,
2667,
7,
21858,
11,
3509,
62,
13664,
28,
9806,
62,
13664,
8,
628,
220,
220,
220,
1303,
49899,
198,
220,
220,
220,
6818,
2938,
6624,
4808,
21858,
62,
1640,
62,
6404,
2667,
198,
220,
220,
220,
6818,
18896,
28264,
21858,
62,
1640,
62,
6404,
2667,
17816,
7890,
6,
12962,
6624,
3509,
62,
13664,
628,
198,
31,
9078,
9288,
13,
4102,
13,
20850,
198,
4299,
1332,
62,
46012,
62,
1640,
62,
6404,
2667,
62,
3919,
62,
25747,
62,
2213,
19524,
515,
33529,
198,
220,
220,
220,
37227,
14402,
645,
7032,
389,
14294,
611,
484,
389,
477,
4961,
393,
1342,
621,
198,
220,
220,
220,
262,
3509,
4129,
526,
15931,
628,
220,
220,
220,
1303,
31122,
198,
220,
220,
220,
3509,
62,
13664,
796,
1511,
198,
220,
220,
220,
1693,
796,
1391,
6,
7890,
10354,
705,
5661,
62,
271,
62,
19509,
3256,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
705,
5661,
62,
271,
62,
19509,
6,
92,
198,
220,
220,
220,
2938,
796,
1693,
13,
30073,
3419,
628,
220,
220,
220,
1303,
8393,
1133,
290,
11767,
198,
220,
220,
220,
6818,
2938,
6624,
3143,
62,
1640,
62,
6404,
2667,
7,
21858,
11,
3509,
62,
13664,
28,
9806,
62,
13664,
8,
628,
198,
31,
9078,
9288,
13,
4102,
13,
20850,
198,
4299,
1332,
62,
46012,
62,
1640,
62,
6404,
2667,
62,
7783,
62,
8807,
62,
2213,
19524,
515,
62,
5239,
62,
23301,
62,
1462,
62,
17470,
62,
9806,
62,
13664,
33529,
198,
220,
220,
220,
37227,
12050,
1654,
1111,
1366,
460,
307,
44740,
290,
27711,
460,
307,
40122,
515,
526,
15931,
628,
220,
220,
220,
1303,
31122,
198,
220,
220,
220,
3509,
62,
13664,
796,
642,
198,
220,
220,
220,
1693,
796,
1391,
6,
7890,
10354,
6045,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
44533,
62,
18601,
2751,
92,
198,
220,
220,
220,
2938,
796,
1391,
6,
7890,
10354,
6045,
11,
705,
11299,
62,
4906,
10354,
705,
6494,
3256,
705,
11299,
10354,
7579,
4944,
34,
6158,
62,
32541,
92,
628,
220,
220,
220,
1303,
8393,
1133,
198,
220,
220,
220,
4808,
21858,
62,
1640,
62,
6404,
2667,
796,
3143,
62,
1640,
62,
6404,
2667,
7,
21858,
11,
3509,
62,
13664,
28,
9806,
62,
13664,
8,
628,
220,
220,
220,
1303,
49899,
198,
220,
220,
220,
6818,
2938,
6624,
4808,
21858,
62,
1640,
62,
6404,
2667,
198,
220,
220,
220,
6818,
7579,
4944,
34,
6158,
62,
43,
49494,
6624,
18896,
28264,
21858,
62,
1640,
62,
6404,
2667,
17816,
11299,
6,
12962,
628,
198,
31,
9078,
9288,
13,
4102,
13,
20850,
198,
4299,
1332,
62,
22105,
62,
2213,
19524,
341,
7,
6888,
489,
519,
2599,
198,
220,
220,
220,
37227,
4834,
19532,
326,
262,
2472,
5972,
23739,
3275,
318,
407,
625,
5415,
2546,
37811,
628,
220,
220,
220,
1303,
31122,
198,
220,
220,
220,
1165,
62,
6511,
796,
334,
1,
59,
52,
18005,
37,
24,
2075,
1,
1635,
6135,
830,
198,
220,
220,
220,
1165,
62,
6511,
62,
33661,
796,
18896,
7,
18820,
62,
6511,
13,
268,
8189,
10786,
40477,
12,
23,
6,
4008,
198,
220,
220,
220,
49706,
796,
651,
62,
6404,
1362,
7203,
9288,
4943,
628,
220,
220,
220,
1303,
8393,
1133,
198,
220,
220,
220,
49706,
13,
10951,
7,
69,
1,
90,
18820,
62,
6511,
92,
4943,
198,
220,
220,
220,
31456,
796,
1275,
489,
519,
13,
37348,
1095,
58,
12,
16,
60,
198,
220,
220,
220,
40122,
515,
62,
33661,
796,
18896,
7,
19662,
13,
268,
8189,
10786,
40477,
12,
23,
6,
4008,
628,
220,
220,
220,
1303,
49899,
198,
220,
220,
220,
6818,
40122,
515,
62,
33661,
1279,
1165,
62,
6511,
62,
33661,
198,
220,
220,
220,
6818,
40122,
515,
62,
33661,
1279,
2608,
27641
] | 2.578457 | 1,504 |
from rest_framework import serializers
from .models import StaffLog, CompanyLog
from accounts.serializers import UserSerializer
from company.serializers import CompanySerializer
| [
6738,
1334,
62,
30604,
1330,
11389,
11341,
198,
198,
6738,
764,
27530,
1330,
9983,
11187,
11,
5834,
11187,
198,
6738,
5504,
13,
46911,
11341,
1330,
11787,
32634,
7509,
198,
6738,
1664,
13,
46911,
11341,
1330,
5834,
32634,
7509,
628,
198
] | 4.525 | 40 |
from dg_calibration import reflectance
def toa_reflectance(radata, mtdFile, band_ids):
"""Estimate toa reflectance from radiometric data
ignoring atmospheric, topographic and BRDF effects
Parameters
----------
radata : ndarray shape (nbands, ny, nx)
radiance data
mtdFile : str
path to IMD metadata file
band_ids : sequence of int
band IDs
Returns
-------
ndarray
reflectance
"""
return reflectance.radiance_to_reflectance(radata, mtdFile, band_ids=band_ids)
| [
6738,
288,
70,
62,
9948,
571,
1358,
1330,
4079,
590,
628,
198,
4299,
284,
64,
62,
35051,
590,
7,
6335,
1045,
11,
285,
8671,
8979,
11,
4097,
62,
2340,
2599,
198,
220,
220,
220,
37227,
22362,
1920,
284,
64,
4079,
590,
422,
19772,
16996,
1366,
198,
220,
220,
220,
220,
220,
220,
15482,
20938,
11,
1353,
6826,
290,
11177,
8068,
3048,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
2511,
1045,
1058,
299,
67,
18747,
5485,
357,
77,
21397,
11,
299,
88,
11,
299,
87,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2511,
3610,
1366,
198,
220,
220,
220,
285,
8671,
8979,
1058,
965,
198,
220,
220,
220,
220,
220,
220,
220,
3108,
284,
8959,
35,
20150,
2393,
198,
220,
220,
220,
4097,
62,
2340,
1058,
8379,
286,
493,
198,
220,
220,
220,
220,
220,
220,
220,
4097,
32373,
628,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
299,
67,
18747,
198,
220,
220,
220,
220,
220,
220,
220,
4079,
590,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1441,
4079,
590,
13,
6335,
3610,
62,
1462,
62,
35051,
590,
7,
6335,
1045,
11,
285,
8671,
8979,
11,
4097,
62,
2340,
28,
3903,
62,
2340,
8,
198
] | 2.575472 | 212 |
###############
# Repository: https://github.com/lgervasoni/urbansprawl
# MIT License
###############
from shapely.geometry import GeometryCollection
import geopandas as gpd
import pandas as pd
import os
import numpy as np
import osmnx as ox
from osmnx import log
from .utils import get_population_extract_filename
DATA_SOURCES = ["insee", "gpw"]
##############################
# I/O for population data
##############################
def get_df_extract(df_data, poly_gdf, operation="within"):
"""
Indexes input geo-data frame within an input region of interest
If the region of interest is given as a polygon, its bounding box is indexed
Parameters
----------
df_data : geopandas.GeoDataFrame
input data frame to index
poly_gdf : geopandas.GeoDataFrame
geodataframe containing the region of interest in form of polygon
operation : string
the desired spatial join operation: 'within' or 'intersects'
Returns
----------
geopandas.GeoDataFrame
returns the population data frame indexed within the region of interest
"""
# Project to same system coordinates
poly_gdf = ox.project_gdf(poly_gdf, to_crs=df_data.crs)
# Spatial join
df_extract = gpd.sjoin(df_data, poly_gdf, op=operation)
# Keep original columns
df_extract = df_extract[df_data.columns]
return df_extract
def get_population_df(
pop_shapefile, pop_data_file, data_source, to_crs, poly_gdf
):
"""
Read the population shapefile from input filename/s
Index the data within the bounding box
Project to desired CRS
Parameters
----------
pop_shapefile : string
population count shapefile
pop_data_file : string
population data additional file (required for INSEE format)
data_source : string
desired population data source
to_crs : dict
desired coordinate reference system
poly_gdf : geopandas.GeoDataFrame
geodataframe containing the region of interest in form of polygon
Returns
----------
geopandas.GeoDataFrame
returns the indexed and projected population data frame
"""
#######################################
# Load GPW/INSEE population data
#######################################
# Read population data
df_pop = gpd.read_file(pop_shapefile)
# Extract region of interest (EPSG 4326)
# Filter geometries not contained in bounding box
df_pop = get_df_extract(df_pop, poly_gdf)
if data_source is "insee":
#######################################
# Additional step for INSEE data
#######################################
# Read dbf files
data_pop = gpd.read_file(pop_data_file)
# Get columns of interest
data_pop = data_pop[["idINSPIRE", "ind_c"]]
df_pop = df_pop[["geometry", "idINSPIRE"]]
# Inner join to obtain population count data associated to each geometry
df_pop = pd.merge(df_pop, data_pop, how="inner", on="idINSPIRE")
# Rename population count column
df_pop.rename(
columns={"ind_c": "pop_count", "DN": "pop_count"}, inplace=True
)
return ox.project_gdf(df_pop, to_crs=to_crs)
def get_extract_population_data(
city_ref,
data_source,
pop_shapefile=None,
pop_data_file=None,
to_crs={"init": "epsg:4326"},
polygons_gdf=None,
):
"""Get data population extract of desired data source for input city,
calculating the convex hull of input buildings geodataframe
The population data frame is projected to the desired coordinate reference
system
Stores the extracted shapefile
Returns the stored population data for input 'data source' and 'city
reference' if it was previously stored
Parameters
----------
city_ref : string
name of input city
data_source : string
desired population data source
pop_shapefile : string
path of population count shapefile
pop_data_file : string
path of population data additional file (required for INSEE format)
to_crs : dict
desired coordinate reference system
polygons_gdf : geopandas.GeoDataFrame
polygons (e.g. buildings) for input region of interest which
will determine the shape to extract
Returns
----------
geopandas.GeoDataFrame
returns the extracted population data
"""
# Input data source type given?
assert data_source in DATA_SOURCES
# Population extract exists?
if os.path.exists(get_population_extract_filename(city_ref, data_source)):
log("Population extract exists for input city: " + city_ref)
return gpd.read_file(
get_population_extract_filename(city_ref, data_source)
)
# Input shape given?
assert not (np.all(polygons_gdf is None))
# Input population shapefile given?
assert pop_shapefile is not None
# All input files given?
assert not ((data_source == "insee") and (pop_data_file is None))
# Get buildings convex hull
polygon = GeometryCollection(
polygons_gdf.geometry.values.tolist()
).convex_hull
# Convert to geo-dataframe with defined CRS
poly_gdf = gpd.GeoDataFrame(
[polygon], columns=["geometry"], crs=polygons_gdf.crs
)
# Compute extract
df_pop = get_population_df(
pop_shapefile, pop_data_file, data_source, to_crs, poly_gdf
)
# Save to shapefile
df_pop.to_file(
get_population_extract_filename(city_ref, data_source),
driver="ESRI Shapefile",
)
return df_pop
| [
7804,
4242,
21017,
198,
2,
1432,
13264,
25,
3740,
1378,
12567,
13,
785,
14,
75,
70,
712,
888,
72,
14,
5945,
504,
79,
13132,
198,
2,
17168,
13789,
198,
7804,
4242,
21017,
198,
198,
6738,
5485,
306,
13,
469,
15748,
1330,
2269,
15748,
36307,
198,
11748,
30324,
392,
292,
355,
27809,
67,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
267,
5796,
77,
87,
355,
12018,
198,
198,
6738,
267,
5796,
77,
87,
1330,
2604,
198,
198,
6738,
764,
26791,
1330,
651,
62,
39748,
62,
2302,
974,
62,
34345,
198,
198,
26947,
62,
50,
2606,
7397,
1546,
796,
14631,
259,
3826,
1600,
366,
31197,
86,
8973,
198,
198,
14468,
7804,
4242,
2235,
198,
2,
314,
14,
46,
329,
3265,
1366,
198,
14468,
7804,
4242,
2235,
628,
198,
4299,
651,
62,
7568,
62,
2302,
974,
7,
7568,
62,
7890,
11,
7514,
62,
70,
7568,
11,
4905,
2625,
33479,
1,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
12901,
274,
5128,
40087,
12,
7890,
5739,
1626,
281,
5128,
3814,
286,
1393,
198,
220,
220,
220,
220,
220,
220,
220,
1002,
262,
3814,
286,
1393,
318,
1813,
355,
257,
7514,
14520,
11,
663,
5421,
278,
3091,
318,
41497,
628,
220,
220,
220,
220,
220,
220,
220,
40117,
198,
220,
220,
220,
220,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
220,
220,
220,
220,
47764,
62,
7890,
1058,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5128,
1366,
5739,
284,
6376,
198,
220,
220,
220,
220,
220,
220,
220,
7514,
62,
70,
7568,
1058,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4903,
375,
1045,
14535,
7268,
262,
3814,
286,
1393,
287,
1296,
286,
7514,
14520,
198,
220,
220,
220,
220,
220,
220,
220,
4905,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
262,
10348,
21739,
4654,
4905,
25,
705,
33479,
6,
393,
705,
3849,
8831,
82,
6,
628,
220,
220,
220,
220,
220,
220,
220,
16409,
198,
220,
220,
220,
220,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
220,
220,
220,
220,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5860,
262,
3265,
1366,
5739,
41497,
1626,
262,
3814,
286,
1393,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
1303,
4935,
284,
976,
1080,
22715,
198,
220,
220,
220,
7514,
62,
70,
7568,
796,
12018,
13,
16302,
62,
70,
7568,
7,
35428,
62,
70,
7568,
11,
284,
62,
66,
3808,
28,
7568,
62,
7890,
13,
66,
3808,
8,
198,
220,
220,
220,
1303,
1338,
34961,
4654,
198,
220,
220,
220,
47764,
62,
2302,
974,
796,
27809,
67,
13,
82,
22179,
7,
7568,
62,
7890,
11,
7514,
62,
70,
7568,
11,
1034,
28,
27184,
8,
198,
220,
220,
220,
1303,
9175,
2656,
15180,
198,
220,
220,
220,
47764,
62,
2302,
974,
796,
47764,
62,
2302,
974,
58,
7568,
62,
7890,
13,
28665,
82,
60,
198,
220,
220,
220,
1441,
47764,
62,
2302,
974,
628,
198,
4299,
651,
62,
39748,
62,
7568,
7,
198,
220,
220,
220,
1461,
62,
43358,
7753,
11,
1461,
62,
7890,
62,
7753,
11,
1366,
62,
10459,
11,
284,
62,
66,
3808,
11,
7514,
62,
70,
7568,
198,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
4149,
262,
3265,
5485,
7753,
422,
5128,
29472,
14,
82,
198,
220,
220,
220,
220,
220,
220,
220,
12901,
262,
1366,
1626,
262,
5421,
278,
3091,
198,
220,
220,
220,
220,
220,
220,
220,
4935,
284,
10348,
327,
6998,
628,
220,
220,
220,
220,
220,
220,
220,
40117,
198,
220,
220,
220,
220,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
220,
220,
220,
220,
1461,
62,
43358,
7753,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3265,
954,
5485,
7753,
198,
220,
220,
220,
220,
220,
220,
220,
1461,
62,
7890,
62,
7753,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3265,
1366,
3224,
2393,
357,
35827,
329,
3268,
36078,
5794,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1366,
62,
10459,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10348,
3265,
1366,
2723,
198,
220,
220,
220,
220,
220,
220,
220,
284,
62,
66,
3808,
1058,
8633,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10348,
20435,
4941,
1080,
198,
220,
220,
220,
220,
220,
220,
220,
7514,
62,
70,
7568,
1058,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4903,
375,
1045,
14535,
7268,
262,
3814,
286,
1393,
287,
1296,
286,
7514,
14520,
628,
220,
220,
220,
220,
220,
220,
220,
16409,
198,
220,
220,
220,
220,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
220,
220,
220,
220,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5860,
262,
41497,
290,
13301,
3265,
1366,
5739,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
1303,
29113,
4242,
2235,
198,
220,
220,
220,
1303,
8778,
14714,
54,
14,
1268,
36078,
3265,
1366,
198,
220,
220,
220,
1303,
29113,
4242,
2235,
198,
220,
220,
220,
1303,
4149,
3265,
1366,
198,
220,
220,
220,
47764,
62,
12924,
796,
27809,
67,
13,
961,
62,
7753,
7,
12924,
62,
43358,
7753,
8,
628,
220,
220,
220,
1303,
29677,
3814,
286,
1393,
357,
36,
3705,
38,
5946,
2075,
8,
198,
220,
220,
220,
1303,
25853,
4903,
908,
1678,
407,
7763,
287,
5421,
278,
3091,
198,
220,
220,
220,
47764,
62,
12924,
796,
651,
62,
7568,
62,
2302,
974,
7,
7568,
62,
12924,
11,
7514,
62,
70,
7568,
8,
628,
220,
220,
220,
611,
1366,
62,
10459,
318,
366,
259,
3826,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
29113,
4242,
2235,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
15891,
2239,
329,
3268,
36078,
1366,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
29113,
4242,
2235,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
4149,
288,
19881,
3696,
198,
220,
220,
220,
220,
220,
220,
220,
1366,
62,
12924,
796,
27809,
67,
13,
961,
62,
7753,
7,
12924,
62,
7890,
62,
7753,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
3497,
15180,
286,
1393,
198,
220,
220,
220,
220,
220,
220,
220,
1366,
62,
12924,
796,
1366,
62,
12924,
58,
14692,
312,
1268,
4303,
41736,
1600,
366,
521,
62,
66,
8973,
60,
198,
220,
220,
220,
220,
220,
220,
220,
47764,
62,
12924,
796,
47764,
62,
12924,
58,
14692,
469,
15748,
1600,
366,
312,
1268,
4303,
41736,
8973,
60,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
24877,
4654,
284,
7330,
3265,
954,
1366,
3917,
284,
1123,
22939,
198,
220,
220,
220,
220,
220,
220,
220,
47764,
62,
12924,
796,
279,
67,
13,
647,
469,
7,
7568,
62,
12924,
11,
1366,
62,
12924,
11,
703,
2625,
5083,
1600,
319,
2625,
312,
1268,
4303,
41736,
4943,
628,
220,
220,
220,
220,
220,
220,
220,
1303,
7152,
480,
3265,
954,
5721,
198,
220,
220,
220,
47764,
62,
12924,
13,
918,
480,
7,
198,
220,
220,
220,
220,
220,
220,
220,
15180,
28,
4895,
521,
62,
66,
1298,
366,
12924,
62,
9127,
1600,
366,
35504,
1298,
366,
12924,
62,
9127,
25719,
287,
5372,
28,
17821,
198,
220,
220,
220,
1267,
628,
220,
220,
220,
1441,
12018,
13,
16302,
62,
70,
7568,
7,
7568,
62,
12924,
11,
284,
62,
66,
3808,
28,
1462,
62,
66,
3808,
8,
628,
198,
4299,
651,
62,
2302,
974,
62,
39748,
62,
7890,
7,
198,
220,
220,
220,
1748,
62,
5420,
11,
198,
220,
220,
220,
1366,
62,
10459,
11,
198,
220,
220,
220,
1461,
62,
43358,
7753,
28,
14202,
11,
198,
220,
220,
220,
1461,
62,
7890,
62,
7753,
28,
14202,
11,
198,
220,
220,
220,
284,
62,
66,
3808,
28,
4895,
15003,
1298,
366,
25386,
70,
25,
3559,
2075,
25719,
198,
220,
220,
220,
25052,
684,
62,
70,
7568,
28,
14202,
11,
198,
2599,
198,
220,
220,
220,
37227,
3855,
1366,
3265,
7925,
286,
10348,
1366,
2723,
329,
5128,
1748,
11,
198,
220,
220,
220,
26019,
262,
24748,
87,
23644,
286,
5128,
6832,
4903,
375,
1045,
14535,
628,
220,
220,
220,
383,
3265,
1366,
5739,
318,
13301,
284,
262,
10348,
20435,
4941,
198,
220,
220,
220,
1080,
628,
220,
220,
220,
41835,
262,
21242,
5485,
7753,
628,
220,
220,
220,
16409,
262,
8574,
3265,
1366,
329,
5128,
705,
7890,
2723,
6,
290,
705,
19205,
198,
220,
220,
220,
4941,
6,
611,
340,
373,
4271,
8574,
628,
220,
220,
220,
220,
220,
220,
220,
40117,
198,
220,
220,
220,
220,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
220,
220,
220,
220,
1748,
62,
5420,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1438,
286,
5128,
1748,
198,
220,
220,
220,
220,
220,
220,
220,
1366,
62,
10459,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10348,
3265,
1366,
2723,
198,
220,
220,
220,
220,
220,
220,
220,
1461,
62,
43358,
7753,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3108,
286,
3265,
954,
5485,
7753,
198,
220,
220,
220,
220,
220,
220,
220,
1461,
62,
7890,
62,
7753,
1058,
4731,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3108,
286,
3265,
1366,
3224,
2393,
357,
35827,
329,
3268,
36078,
5794,
8,
198,
220,
220,
220,
220,
220,
220,
220,
284,
62,
66,
3808,
1058,
8633,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10348,
20435,
4941,
1080,
198,
220,
220,
220,
220,
220,
220,
220,
25052,
684,
62,
70,
7568,
1058,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
25052,
684,
357,
68,
13,
70,
13,
6832,
8,
329,
5128,
3814,
286,
1393,
543,
198,
220,
220,
220,
220,
220,
220,
220,
481,
5004,
262,
5485,
284,
7925,
628,
220,
220,
220,
220,
220,
220,
220,
16409,
198,
220,
220,
220,
220,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
220,
220,
220,
220,
30324,
392,
292,
13,
10082,
78,
6601,
19778,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5860,
262,
21242,
3265,
1366,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
1303,
23412,
1366,
2723,
2099,
1813,
30,
198,
220,
220,
220,
6818,
1366,
62,
10459,
287,
42865,
62,
50,
2606,
7397,
1546,
628,
220,
220,
220,
1303,
20133,
7925,
7160,
30,
198,
220,
220,
220,
611,
28686,
13,
6978,
13,
1069,
1023,
7,
1136,
62,
39748,
62,
2302,
974,
62,
34345,
7,
19205,
62,
5420,
11,
1366,
62,
10459,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
2604,
7203,
45251,
7925,
7160,
329,
5128,
1748,
25,
366,
1343,
1748,
62,
5420,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
27809,
67,
13,
961,
62,
7753,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
651,
62,
39748,
62,
2302,
974,
62,
34345,
7,
19205,
62,
5420,
11,
1366,
62,
10459,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1267,
628,
220,
220,
220,
220,
220,
220,
220,
1303,
23412,
5485,
1813,
30,
198,
220,
220,
220,
6818,
407,
357,
37659,
13,
439,
7,
35428,
70,
684,
62,
70,
7568,
318,
6045,
4008,
198,
220,
220,
220,
1303,
23412,
3265,
5485,
7753,
1813,
30,
198,
220,
220,
220,
6818,
1461,
62,
43358,
7753,
318,
407,
6045,
198,
220,
220,
220,
1303,
1439,
5128,
3696,
1813,
30,
198,
220,
220,
220,
6818,
407,
14808,
7890,
62,
10459,
6624,
366,
259,
3826,
4943,
290,
357,
12924,
62,
7890,
62,
7753,
318,
6045,
4008,
628,
220,
220,
220,
1303,
3497,
6832,
24748,
87,
23644,
198,
220,
220,
220,
7514,
14520,
796,
2269,
15748,
36307,
7,
198,
220,
220,
220,
220,
220,
220,
220,
25052,
684,
62,
70,
7568,
13,
469,
15748,
13,
27160,
13,
83,
349,
396,
3419,
198,
220,
220,
220,
6739,
1102,
303,
87,
62,
71,
724,
198,
220,
220,
220,
1303,
38240,
284,
40087,
12,
7890,
14535,
351,
5447,
327,
6998,
198,
220,
220,
220,
7514,
62,
70,
7568,
796,
27809,
67,
13,
10082,
78,
6601,
19778,
7,
198,
220,
220,
220,
220,
220,
220,
220,
685,
35428,
14520,
4357,
15180,
28,
14692,
469,
15748,
33116,
1067,
82,
28,
35428,
70,
684,
62,
70,
7568,
13,
66,
3808,
198,
220,
220,
220,
1267,
628,
220,
220,
220,
1303,
3082,
1133,
7925,
198,
220,
220,
220,
47764,
62,
12924,
796,
651,
62,
39748,
62,
7568,
7,
198,
220,
220,
220,
220,
220,
220,
220,
1461,
62,
43358,
7753,
11,
1461,
62,
7890,
62,
7753,
11,
1366,
62,
10459,
11,
284,
62,
66,
3808,
11,
7514,
62,
70,
7568,
198,
220,
220,
220,
1267,
628,
220,
220,
220,
1303,
12793,
284,
5485,
7753,
198,
220,
220,
220,
47764,
62,
12924,
13,
1462,
62,
7753,
7,
198,
220,
220,
220,
220,
220,
220,
220,
651,
62,
39748,
62,
2302,
974,
62,
34345,
7,
19205,
62,
5420,
11,
1366,
62,
10459,
828,
198,
220,
220,
220,
220,
220,
220,
220,
4639,
2625,
1546,
7112,
25959,
7753,
1600,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
1441,
47764,
62,
12924,
198
] | 2.526564 | 2,334 |
# --------------------------------------------------------
# Tensorflow Phrase Detection
# Licensed under The MIT License [see LICENSE for details]
# Written by Bryan Plummer based on code from Ross Girshick
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import matplotlib
matplotlib.use('agg')
from datasets.imdb import imdb
import datasets.ds_utils as ds_utils
from model.config import cfg, get_output_vocab
import os.path as osp
import sys
import os
import numpy as np
import scipy.sparse
import scipy.io as sio
import pickle
import json
import uuid
import h5py
import string
| [
2,
20368,
22369,
198,
2,
309,
22854,
11125,
1380,
22789,
46254,
198,
2,
49962,
739,
383,
17168,
13789,
685,
3826,
38559,
24290,
329,
3307,
60,
198,
2,
22503,
416,
17857,
1345,
31647,
1912,
319,
2438,
422,
9847,
23837,
1477,
624,
198,
2,
20368,
22369,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
198,
11748,
2603,
29487,
8019,
198,
6759,
29487,
8019,
13,
1904,
10786,
9460,
11537,
198,
198,
6738,
40522,
13,
320,
9945,
1330,
545,
9945,
198,
11748,
40522,
13,
9310,
62,
26791,
355,
288,
82,
62,
26791,
198,
6738,
2746,
13,
11250,
1330,
30218,
70,
11,
651,
62,
22915,
62,
18893,
397,
198,
11748,
28686,
13,
6978,
355,
267,
2777,
198,
11748,
25064,
198,
11748,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
13,
82,
29572,
198,
11748,
629,
541,
88,
13,
952,
355,
264,
952,
198,
11748,
2298,
293,
198,
11748,
33918,
198,
11748,
334,
27112,
198,
11748,
289,
20,
9078,
198,
11748,
4731,
628
] | 3.927374 | 179 |
from __future__ import division
import glob, os
import numpy as np
import cv2
import torch.utils.data as torch_data
import yaml
import utils.radiate_utils as radiate_utils
from utils.calibration import Calibration
| [
6738,
11593,
37443,
834,
1330,
7297,
198,
11748,
15095,
11,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
269,
85,
17,
198,
11748,
28034,
13,
26791,
13,
7890,
355,
28034,
62,
7890,
198,
11748,
331,
43695,
198,
11748,
3384,
4487,
13,
6335,
9386,
62,
26791,
355,
2511,
9386,
62,
26791,
198,
6738,
3384,
4487,
13,
9948,
571,
1358,
1330,
2199,
571,
1358,
198
] | 3.34375 | 64 |
# -*- coding: utf-8 -*-
from flask import Blueprint
from flask_journey import route
from .services import get_pilots, get_pilot
from .schemas import pilot, pilots, query
bp = Blueprint('pilots', __name__)
@route(bp, '/<pilot_id>', methods=['GET'], marshal_with=pilot)
@route(bp, '/', methods=['GET'], _query=query, marshal_with=pilots, validate=False)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
42903,
1330,
39932,
198,
6738,
42903,
62,
73,
5604,
1330,
6339,
198,
198,
6738,
764,
30416,
1330,
651,
62,
79,
346,
1747,
11,
651,
62,
79,
23439,
198,
6738,
764,
1416,
4411,
292,
1330,
8022,
11,
14982,
11,
12405,
628,
198,
46583,
796,
39932,
10786,
79,
346,
1747,
3256,
11593,
3672,
834,
8,
628,
198,
31,
38629,
7,
46583,
11,
31051,
27,
79,
23439,
62,
312,
29,
3256,
5050,
28,
17816,
18851,
6,
4357,
22397,
282,
62,
4480,
28,
79,
23439,
8,
628,
198,
31,
38629,
7,
46583,
11,
31051,
3256,
5050,
28,
17816,
18851,
6,
4357,
4808,
22766,
28,
22766,
11,
22397,
282,
62,
4480,
28,
79,
346,
1747,
11,
26571,
28,
25101,
8,
628
] | 2.734848 | 132 |
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
setup(
name="redisfe",
version="0.0.1",
packages=find_packages(),
entry_points={"console_scripts": ("redisfe=redisfe.main:main",)},
)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
900,
37623,
10141,
1330,
1064,
62,
43789,
11,
9058,
198,
198,
40406,
7,
198,
220,
220,
220,
1438,
2625,
445,
4468,
68,
1600,
198,
220,
220,
220,
2196,
2625,
15,
13,
15,
13,
16,
1600,
198,
220,
220,
220,
10392,
28,
19796,
62,
43789,
22784,
198,
220,
220,
220,
5726,
62,
13033,
28,
4895,
41947,
62,
46521,
1298,
5855,
445,
4468,
68,
28,
445,
4468,
68,
13,
12417,
25,
12417,
1600,
8,
5512,
198,
8,
198
] | 2.391304 | 92 |
# ----------------------------------------------------------------------------
# Title: Scientific Visualisation - Python & Matplotlib
# Author: Nicolas P. Rougier
# License: BSD
# ----------------------------------------------------------------------------
import numpy as np
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(5, 2))
ax = fig.add_subplot(111, xlim=(2002.5, 2021.5), ylim=(0, 6.5), yticks=([]))
ax.tick_params("x", labelsize="x-small", which="major")
plt.plot([2002.5, 2021.5], [0, 0], color="black", linewidth=1.0, clip_on=False)
X = np.arange(2003, 2022)
Y = np.zeros(len(X))
plt.scatter(
X,
Y,
s=50,
linewidth=1.0,
zorder=10,
clip_on=False,
edgecolor="black",
facecolor="white",
)
annotate(ax, 2021, 4, "3.4")
annotate(ax, 2020, 3, "3.3")
annotate(ax, 2019, 4, "3.2")
annotate(ax, 2019, 2, "3.1")
annotate(ax, 2018, 3, "3.0", y0=1.5)
annotate(ax, 2018, 1, "2.2", fc="#777777")
annotate(ax, 2017, 4, "2.1", y0=2.5)
annotate(ax, 2017, 2, "2.0")
annotate(ax, 2015, 2, "1.5")
annotate(ax, 2014, 1, "1.4")
annotate(ax, 2013, 2, "1.3")
annotate(ax, 2012, 1, "1.2")
annotate(ax, 2011, 3, "1.1", y0=2.5)
annotate(ax, 2011, 2, "1.0")
annotate(ax, 2009, 1, "0.99")
annotate(ax, 2003, 1, "0.10")
x0, x1 = 2002.5, 2011.9
ax.plot([x0, x1], [5, 5], color="black", linewidth=1, marker="|", clip_on=False)
ax.text((x0 + x1) / 2, 5.1, "J.D. Hunter", ha="center", va="bottom", size="x-small")
x0, x1 = 2012.1, 2017.9
ax.plot([x0, x1], [5, 5], color="black", linewidth=1, marker="|", clip_on=False)
ax.text((x0 + x1) / 2, 5.1, "M. Droettboom", ha="center", va="bottom", size="x-small")
x0, x1 = 2014.1, 2021.5
ax.plot([x0, x1 + 1], [6, 6], color="black", linewidth=1, marker="|")
ax.text((x0 + x1) / 2, 6.1, "T. Caswell", ha="center", va="bottom", size="x-small")
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.set_xticks(np.arange(2003, 2022, 2))
plt.tight_layout()
plt.savefig("../../figures/introduction/matplotlib-timeline.pdf")
plt.savefig("../../figures/introduction/matplotlib-timeline.png", dpi=300)
plt.show()
| [
2,
16529,
10541,
198,
2,
11851,
25,
220,
220,
22060,
15612,
5612,
532,
11361,
1222,
6550,
29487,
8019,
198,
2,
6434,
25,
220,
29737,
350,
13,
13876,
70,
959,
198,
2,
13789,
25,
347,
10305,
198,
2,
16529,
10541,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
628,
198,
198,
5647,
796,
458,
83,
13,
26875,
7,
5647,
7857,
16193,
20,
11,
362,
4008,
198,
897,
796,
2336,
13,
2860,
62,
7266,
29487,
7,
16243,
11,
2124,
2475,
16193,
16942,
13,
20,
11,
33448,
13,
20,
828,
331,
2475,
16193,
15,
11,
718,
13,
20,
828,
331,
83,
3378,
16193,
21737,
4008,
198,
897,
13,
42298,
62,
37266,
7203,
87,
1600,
14722,
1096,
2625,
87,
12,
17470,
1600,
543,
2625,
22478,
4943,
198,
489,
83,
13,
29487,
26933,
16942,
13,
20,
11,
33448,
13,
20,
4357,
685,
15,
11,
657,
4357,
3124,
2625,
13424,
1600,
9493,
413,
5649,
28,
16,
13,
15,
11,
10651,
62,
261,
28,
25101,
8,
198,
55,
796,
45941,
13,
283,
858,
7,
16088,
11,
33160,
8,
198,
56,
796,
45941,
13,
9107,
418,
7,
11925,
7,
55,
4008,
198,
489,
83,
13,
1416,
1436,
7,
198,
220,
220,
220,
1395,
11,
198,
220,
220,
220,
575,
11,
198,
220,
220,
220,
264,
28,
1120,
11,
198,
220,
220,
220,
9493,
413,
5649,
28,
16,
13,
15,
11,
198,
220,
220,
220,
1976,
2875,
28,
940,
11,
198,
220,
220,
220,
10651,
62,
261,
28,
25101,
11,
198,
220,
220,
220,
5743,
8043,
2625,
13424,
1600,
198,
220,
220,
220,
1986,
8043,
2625,
11186,
1600,
198,
8,
198,
198,
34574,
378,
7,
897,
11,
33448,
11,
604,
11,
366,
18,
13,
19,
4943,
198,
34574,
378,
7,
897,
11,
12131,
11,
513,
11,
366,
18,
13,
18,
4943,
198,
34574,
378,
7,
897,
11,
13130,
11,
604,
11,
366,
18,
13,
17,
4943,
198,
34574,
378,
7,
897,
11,
13130,
11,
362,
11,
366,
18,
13,
16,
4943,
198,
34574,
378,
7,
897,
11,
2864,
11,
513,
11,
366,
18,
13,
15,
1600,
331,
15,
28,
16,
13,
20,
8,
198,
34574,
378,
7,
897,
11,
2864,
11,
352,
11,
366,
17,
13,
17,
1600,
277,
66,
25698,
3324,
3324,
3324,
4943,
198,
34574,
378,
7,
897,
11,
2177,
11,
604,
11,
366,
17,
13,
16,
1600,
331,
15,
28,
17,
13,
20,
8,
198,
34574,
378,
7,
897,
11,
2177,
11,
362,
11,
366,
17,
13,
15,
4943,
198,
34574,
378,
7,
897,
11,
1853,
11,
362,
11,
366,
16,
13,
20,
4943,
198,
34574,
378,
7,
897,
11,
1946,
11,
352,
11,
366,
16,
13,
19,
4943,
198,
34574,
378,
7,
897,
11,
2211,
11,
362,
11,
366,
16,
13,
18,
4943,
198,
34574,
378,
7,
897,
11,
2321,
11,
352,
11,
366,
16,
13,
17,
4943,
198,
34574,
378,
7,
897,
11,
2813,
11,
513,
11,
366,
16,
13,
16,
1600,
331,
15,
28,
17,
13,
20,
8,
198,
34574,
378,
7,
897,
11,
2813,
11,
362,
11,
366,
16,
13,
15,
4943,
198,
34574,
378,
7,
897,
11,
3717,
11,
352,
11,
366,
15,
13,
2079,
4943,
198,
34574,
378,
7,
897,
11,
5816,
11,
352,
11,
366,
15,
13,
940,
4943,
198,
198,
87,
15,
11,
2124,
16,
796,
6244,
13,
20,
11,
2813,
13,
24,
198,
897,
13,
29487,
26933,
87,
15,
11,
2124,
16,
4357,
685,
20,
11,
642,
4357,
3124,
2625,
13424,
1600,
9493,
413,
5649,
28,
16,
11,
18364,
2625,
91,
1600,
10651,
62,
261,
28,
25101,
8,
198,
897,
13,
5239,
19510,
87,
15,
1343,
2124,
16,
8,
1220,
362,
11,
642,
13,
16,
11,
366,
41,
13,
35,
13,
9055,
1600,
387,
2625,
16159,
1600,
46935,
2625,
22487,
1600,
2546,
2625,
87,
12,
17470,
4943,
198,
198,
87,
15,
11,
2124,
16,
796,
2321,
13,
16,
11,
2177,
13,
24,
198,
897,
13,
29487,
26933,
87,
15,
11,
2124,
16,
4357,
685,
20,
11,
642,
4357,
3124,
2625,
13424,
1600,
9493,
413,
5649,
28,
16,
11,
18364,
2625,
91,
1600,
10651,
62,
261,
28,
25101,
8,
198,
897,
13,
5239,
19510,
87,
15,
1343,
2124,
16,
8,
1220,
362,
11,
642,
13,
16,
11,
366,
44,
13,
21045,
3087,
2127,
296,
1600,
387,
2625,
16159,
1600,
46935,
2625,
22487,
1600,
2546,
2625,
87,
12,
17470,
4943,
198,
198,
87,
15,
11,
2124,
16,
796,
1946,
13,
16,
11,
33448,
13,
20,
198,
897,
13,
29487,
26933,
87,
15,
11,
2124,
16,
1343,
352,
4357,
685,
21,
11,
718,
4357,
3124,
2625,
13424,
1600,
9493,
413,
5649,
28,
16,
11,
18364,
2625,
91,
4943,
198,
897,
13,
5239,
19510,
87,
15,
1343,
2124,
16,
8,
1220,
362,
11,
718,
13,
16,
11,
366,
51,
13,
11294,
4053,
1600,
387,
2625,
16159,
1600,
46935,
2625,
22487,
1600,
2546,
2625,
87,
12,
17470,
4943,
198,
198,
897,
13,
2777,
1127,
14692,
3506,
1,
4083,
2617,
62,
23504,
7,
25101,
8,
198,
897,
13,
2777,
1127,
14692,
9464,
1,
4083,
2617,
62,
23504,
7,
25101,
8,
198,
897,
13,
2777,
1127,
14692,
4852,
1,
4083,
2617,
62,
23504,
7,
25101,
8,
198,
897,
13,
2777,
1127,
14692,
22487,
1,
4083,
2617,
62,
23504,
7,
25101,
8,
198,
897,
13,
2617,
62,
742,
3378,
7,
37659,
13,
283,
858,
7,
16088,
11,
33160,
11,
362,
4008,
198,
198,
489,
83,
13,
33464,
62,
39786,
3419,
198,
489,
83,
13,
21928,
5647,
7203,
40720,
40720,
5647,
942,
14,
27427,
596,
14,
6759,
29487,
8019,
12,
16514,
4470,
13,
12315,
4943,
198,
489,
83,
13,
21928,
5647,
7203,
40720,
40720,
5647,
942,
14,
27427,
596,
14,
6759,
29487,
8019,
12,
16514,
4470,
13,
11134,
1600,
288,
14415,
28,
6200,
8,
198,
489,
83,
13,
12860,
3419,
198
] | 2.281971 | 954 |
import tensorly as tl
import numpy as np
from src._als import als,nn_als
from src._herals import her_Als,nn_her_Als
from src._cprand import CPRAND, nn_CPRAND
from src._hercprand import her_CPRAND,nn_her_CPRAND
from src._base import init_factors,random_init_fac
import copy
import matplotlib.pyplot as plt
def speedup(list_N,r,list_S,list_P,tol,noise_level=0.1,scale=True,nn=False,nb_tensors=5):
"""
Calculate the speed up of her CPRAND vs ALS, her ALS and CPRAND
Parameters
----------
list_N : list
list of dimensions (in the increasing order)
r : int
rank of the tensor
list_S : list
list of the sample sizes, same length as list_P
list_P : list
list of the err sample sizes, same length as list_P
tol : double
tolerance for the 4 algorithms
noise_level : float, optional
noise_level of the tensor. The default is 0.1.
scale : boolean, optional
whether to scale the condition number of factors or not. The default is True.
nn : boolean, optional
use nn methods or not. The default is False.
Returns
-------
None.
"""
vsals = np.zeros((len(list_N),len(list_S)))
vsherals = np.zeros((len(list_N),len(list_S)))
vscprand = np.zeros((len(list_N),len(list_S)))
for i in range(len(list_N)) :
time_als = 0
time_herals = 0
time_hercprand = np.zeros(len(list_S))
time_cprand = np.zeros(len(list_S))
for k in range(nb_tensors):
fac_true,noise = init_factors(list_N[i], list_N[i], list_N[i], r,noise_level=noise_level,scale=scale,nn=nn)
t=tl.cp_to_tensor((None,fac_true))+noise
if k==0 :
factors=random_init_fac(t,r)
if nn==False :
weights2,factors2,it2,error2,time2=als(t,r,factors=copy.deepcopy(factors),it_max=10000,tol=tol,time_rec=True)
weights1,factors1,it1,error1,cpt1,time1=her_Als(t,r,factors=copy.deepcopy(factors),it_max=10000,tol=tol,time_rec=True)
else :
weights2,factors2,it2,error2,time2=nn_als(t,r,factors=copy.deepcopy(factors),it_max=10000,tol=tol,time_rec=True)
weights1,factors1,it1,error1,cpt1,time1=nn_her_Als(t,r,factors=copy.deepcopy(factors),it_max=10000,tol=tol,time_rec=True)
time_als += np.cumsum(time2)[it2-1]
time_herals += np.cumsum(time1)[it1-1]
for s in range(len(list_S)):
if(nn==False):
weights3,factors3,it3,error3,time3=CPRAND(t,r,list_S[s],list_P[s],factors=copy.deepcopy(factors),exact_err=False,it_max=10000,err_it_max=10000,tol=tol,time_rec=True)
weights4,factors4,it4,error4,cpt4,time4=her_CPRAND(t,r,list_S[s],list_P[s],factors=copy.deepcopy(factors),exact_err=False,it_max=10000,err_it_max=10000,tol=tol,time_rec=True)
else :
weights3,factors3,it3,error3,time3=nn_CPRAND(t,r,list_S[s],list_P[s],factors=copy.deepcopy(factors),exact_err=False,it_max=10000,err_it_max=10000,tol=tol,time_rec=True)
weights4,factors4,it4,error4,cpt4,time4=nn_her_CPRAND(t,r,list_S[s],list_P[s],factors=copy.deepcopy(factors),exact_err=False,it_max=10000,err_it_max=10000,tol=tol,time_rec=True)
time_hercprand[s] += np.cumsum(time4)[it4-1]
time_cprand[s] =+ np.cumsum(time3)[it3-1]
vsals[i,:] = time_als / copy.deepcopy(time_hercprand)
vsherals[i,:] =time_herals/copy.deepcopy(time_hercprand)
vscprand[i,:] =copy.deepcopy(time_cprand)/copy.deepcopy(time_hercprand)
# plot
plt.figure(0)
for s in range(len(list_S)):
legend = "S = " + str(list_S[s]) +" , P = " + str(list_P[s])
plt.plot(list_N, vsals[:,s],label=legend)
plt.axhline(y = 1, color = 'k',linestyle = '--',label="speed up = 1")
plt.xlabel('N')
plt.ylabel('Speed up factor')
plt.legend(loc='best')
plt.title('Speed up vs als')
plt.figure(1)
for s in range(len(list_S)):
legend = "S = " + str(list_S[s]) +" , P = " + str(list_P[s])
plt.plot(list_N,vsherals[:,s],label=legend)
plt.axhline(y = 1, color = 'k',linestyle = '--',label="speed up = 1")
plt.xlabel('N')
plt.ylabel('Speed up factor')
plt.legend(loc='best')
plt.title('Speed up vs herals')
plt.figure(2)
for s in range(len(list_S)):
legend = "S = " + str(list_S[s]) +" , P = " + str(list_P[s])
plt.plot(list_N,vscprand[:,s],label=legend)
plt.axhline(y = 1, color = 'k',linestyle = '--',label="speed up = 1")
plt.xlabel('N')
plt.ylabel('Speed up factor')
plt.legend(loc='best')
plt.title('Speed up vs cprand')
| [
11748,
11192,
273,
306,
355,
256,
75,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
12351,
13557,
874,
1330,
435,
82,
11,
20471,
62,
874,
198,
6738,
12351,
13557,
372,
874,
1330,
607,
62,
2348,
82,
11,
20471,
62,
372,
62,
2348,
82,
198,
6738,
12351,
13557,
66,
1050,
392,
1330,
42920,
6981,
11,
299,
77,
62,
34,
4805,
6981,
198,
6738,
12351,
13557,
372,
66,
1050,
392,
1330,
607,
62,
34,
4805,
6981,
11,
20471,
62,
372,
62,
34,
4805,
6981,
198,
6738,
12351,
13557,
8692,
1330,
2315,
62,
22584,
669,
11,
25120,
62,
15003,
62,
38942,
198,
11748,
4866,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
4299,
2866,
929,
7,
4868,
62,
45,
11,
81,
11,
4868,
62,
50,
11,
4868,
62,
47,
11,
83,
349,
11,
3919,
786,
62,
5715,
28,
15,
13,
16,
11,
9888,
28,
17821,
11,
20471,
28,
25101,
11,
46803,
62,
83,
641,
669,
28,
20,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
27131,
378,
262,
2866,
510,
286,
607,
42920,
6981,
3691,
27249,
11,
607,
27249,
290,
42920,
6981,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
1351,
62,
45,
1058,
1351,
198,
220,
220,
220,
220,
220,
220,
220,
1351,
286,
15225,
357,
259,
262,
3649,
1502,
8,
198,
220,
220,
220,
374,
1058,
493,
198,
220,
220,
220,
220,
220,
220,
220,
4279,
286,
262,
11192,
273,
198,
220,
220,
220,
1351,
62,
50,
1058,
1351,
198,
220,
220,
220,
220,
220,
220,
220,
1351,
286,
262,
6291,
10620,
11,
976,
4129,
355,
1351,
62,
47,
198,
220,
220,
220,
1351,
62,
47,
1058,
1351,
198,
220,
220,
220,
220,
220,
220,
220,
1351,
286,
262,
11454,
6291,
10620,
11,
976,
4129,
355,
1351,
62,
47,
198,
220,
220,
220,
284,
75,
1058,
4274,
198,
220,
220,
220,
220,
220,
220,
220,
15621,
329,
262,
604,
16113,
198,
220,
220,
220,
7838,
62,
5715,
1058,
12178,
11,
11902,
198,
220,
220,
220,
220,
220,
220,
220,
7838,
62,
5715,
286,
262,
11192,
273,
13,
383,
4277,
318,
657,
13,
16,
13,
198,
220,
220,
220,
5046,
1058,
25131,
11,
11902,
198,
220,
220,
220,
220,
220,
220,
220,
1771,
284,
5046,
262,
4006,
1271,
286,
5087,
393,
407,
13,
383,
4277,
318,
6407,
13,
198,
220,
220,
220,
299,
77,
1058,
25131,
11,
11902,
198,
220,
220,
220,
220,
220,
220,
220,
779,
299,
77,
5050,
393,
407,
13,
383,
4277,
318,
10352,
13,
628,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
6045,
13,
628,
220,
220,
220,
37227,
198,
220,
220,
220,
3691,
874,
796,
45941,
13,
9107,
418,
19510,
11925,
7,
4868,
62,
45,
828,
11925,
7,
4868,
62,
50,
22305,
198,
220,
220,
220,
3691,
372,
874,
796,
45941,
13,
9107,
418,
19510,
11925,
7,
4868,
62,
45,
828,
11925,
7,
4868,
62,
50,
22305,
198,
220,
220,
220,
410,
1416,
1050,
392,
796,
45941,
13,
9107,
418,
19510,
11925,
7,
4868,
62,
45,
828,
11925,
7,
4868,
62,
50,
22305,
198,
220,
220,
220,
329,
1312,
287,
2837,
7,
11925,
7,
4868,
62,
45,
4008,
1058,
198,
220,
220,
220,
220,
220,
220,
220,
640,
62,
874,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
640,
62,
372,
874,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
640,
62,
372,
66,
1050,
392,
796,
45941,
13,
9107,
418,
7,
11925,
7,
4868,
62,
50,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
640,
62,
66,
1050,
392,
796,
45941,
13,
9107,
418,
7,
11925,
7,
4868,
62,
50,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
329,
479,
287,
2837,
7,
46803,
62,
83,
641,
669,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1777,
62,
7942,
11,
3919,
786,
796,
2315,
62,
22584,
669,
7,
4868,
62,
45,
58,
72,
4357,
1351,
62,
45,
58,
72,
4357,
1351,
62,
45,
58,
72,
4357,
374,
11,
3919,
786,
62,
5715,
28,
3919,
786,
62,
5715,
11,
9888,
28,
9888,
11,
20471,
28,
20471,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
256,
28,
28781,
13,
13155,
62,
1462,
62,
83,
22854,
19510,
14202,
11,
38942,
62,
7942,
4008,
10,
3919,
786,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
479,
855,
15,
1058,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5087,
28,
25120,
62,
15003,
62,
38942,
7,
83,
11,
81,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
299,
77,
855,
25101,
1058,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
17,
11,
22584,
669,
17,
11,
270,
17,
11,
18224,
17,
11,
2435,
17,
28,
874,
7,
83,
11,
81,
11,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
16,
11,
22584,
669,
16,
11,
270,
16,
11,
18224,
16,
11,
66,
457,
16,
11,
2435,
16,
28,
372,
62,
2348,
82,
7,
83,
11,
81,
11,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
1058,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
17,
11,
22584,
669,
17,
11,
270,
17,
11,
18224,
17,
11,
2435,
17,
28,
20471,
62,
874,
7,
83,
11,
81,
11,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
16,
11,
22584,
669,
16,
11,
270,
16,
11,
18224,
16,
11,
66,
457,
16,
11,
2435,
16,
28,
20471,
62,
372,
62,
2348,
82,
7,
83,
11,
81,
11,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
640,
62,
874,
15853,
45941,
13,
66,
5700,
388,
7,
2435,
17,
38381,
270,
17,
12,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
640,
62,
372,
874,
15853,
45941,
13,
66,
5700,
388,
7,
2435,
16,
38381,
270,
16,
12,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
264,
287,
2837,
7,
11925,
7,
4868,
62,
50,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
20471,
855,
25101,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
18,
11,
22584,
669,
18,
11,
270,
18,
11,
18224,
18,
11,
2435,
18,
28,
34,
4805,
6981,
7,
83,
11,
81,
11,
4868,
62,
50,
58,
82,
4357,
4868,
62,
47,
58,
82,
4357,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
1069,
529,
62,
8056,
28,
25101,
11,
270,
62,
9806,
28,
49388,
11,
8056,
62,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
19,
11,
22584,
669,
19,
11,
270,
19,
11,
18224,
19,
11,
66,
457,
19,
11,
2435,
19,
28,
372,
62,
34,
4805,
6981,
7,
83,
11,
81,
11,
4868,
62,
50,
58,
82,
4357,
4868,
62,
47,
58,
82,
4357,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
1069,
529,
62,
8056,
28,
25101,
11,
270,
62,
9806,
28,
49388,
11,
8056,
62,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
1058,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
18,
11,
22584,
669,
18,
11,
270,
18,
11,
18224,
18,
11,
2435,
18,
28,
20471,
62,
34,
4805,
6981,
7,
83,
11,
81,
11,
4868,
62,
50,
58,
82,
4357,
4868,
62,
47,
58,
82,
4357,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
1069,
529,
62,
8056,
28,
25101,
11,
270,
62,
9806,
28,
49388,
11,
8056,
62,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
19590,
19,
11,
22584,
669,
19,
11,
270,
19,
11,
18224,
19,
11,
66,
457,
19,
11,
2435,
19,
28,
20471,
62,
372,
62,
34,
4805,
6981,
7,
83,
11,
81,
11,
4868,
62,
50,
58,
82,
4357,
4868,
62,
47,
58,
82,
4357,
22584,
669,
28,
30073,
13,
22089,
30073,
7,
22584,
669,
828,
1069,
529,
62,
8056,
28,
25101,
11,
270,
62,
9806,
28,
49388,
11,
8056,
62,
270,
62,
9806,
28,
49388,
11,
83,
349,
28,
83,
349,
11,
2435,
62,
8344,
28,
17821,
8,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
640,
62,
372,
66,
1050,
392,
58,
82,
60,
15853,
45941,
13,
66,
5700,
388,
7,
2435,
19,
38381,
270,
19,
12,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
640,
62,
66,
1050,
392,
58,
82,
60,
796,
10,
45941,
13,
66,
5700,
388,
7,
2435,
18,
38381,
270,
18,
12,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
628,
220,
220,
220,
220,
220,
220,
220,
3691,
874,
58,
72,
11,
47715,
796,
640,
62,
874,
1220,
4866,
13,
22089,
30073,
7,
2435,
62,
372,
66,
1050,
392,
8,
198,
220,
220,
220,
220,
220,
220,
220,
3691,
372,
874,
58,
72,
11,
47715,
796,
2435,
62,
372,
874,
14,
30073,
13,
22089,
30073,
7,
2435,
62,
372,
66,
1050,
392,
8,
198,
220,
220,
220,
220,
220,
220,
220,
410,
1416,
1050,
392,
58,
72,
11,
47715,
796,
30073,
13,
22089,
30073,
7,
2435,
62,
66,
1050,
392,
20679,
30073,
13,
22089,
30073,
7,
2435,
62,
372,
66,
1050,
392,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
628,
220,
220,
220,
1303,
7110,
198,
220,
220,
220,
458,
83,
13,
26875,
7,
15,
8,
198,
220,
220,
220,
329,
264,
287,
2837,
7,
11925,
7,
4868,
62,
50,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
8177,
796,
366,
50,
796,
366,
1343,
965,
7,
4868,
62,
50,
58,
82,
12962,
1343,
1,
837,
350,
796,
366,
1343,
965,
7,
4868,
62,
47,
58,
82,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
458,
83,
13,
29487,
7,
4868,
62,
45,
11,
3691,
874,
58,
45299,
82,
4357,
18242,
28,
1455,
437,
8,
198,
220,
220,
220,
458,
83,
13,
897,
71,
1370,
7,
88,
796,
352,
11,
220,
3124,
796,
705,
74,
3256,
2815,
10992,
796,
705,
438,
3256,
18242,
2625,
12287,
510,
796,
352,
4943,
198,
220,
220,
220,
458,
83,
13,
87,
18242,
10786,
45,
11537,
198,
220,
220,
220,
458,
83,
13,
2645,
9608,
10786,
22785,
510,
5766,
11537,
198,
220,
220,
220,
458,
83,
13,
1455,
437,
7,
17946,
11639,
13466,
11537,
198,
220,
220,
220,
458,
83,
13,
7839,
10786,
22785,
510,
3691,
435,
82,
11537,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
458,
83,
13,
26875,
7,
16,
8,
198,
220,
220,
220,
329,
264,
287,
2837,
7,
11925,
7,
4868,
62,
50,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
8177,
796,
366,
50,
796,
366,
1343,
965,
7,
4868,
62,
50,
58,
82,
12962,
1343,
1,
837,
350,
796,
366,
1343,
965,
7,
4868,
62,
47,
58,
82,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
458,
83,
13,
29487,
7,
4868,
62,
45,
11,
14259,
372,
874,
58,
45299,
82,
4357,
18242,
28,
1455,
437,
8,
198,
220,
220,
220,
458,
83,
13,
897,
71,
1370,
7,
88,
796,
352,
11,
3124,
796,
705,
74,
3256,
2815,
10992,
796,
705,
438,
3256,
18242,
2625,
12287,
510,
796,
352,
4943,
198,
220,
220,
220,
458,
83,
13,
87,
18242,
10786,
45,
11537,
198,
220,
220,
220,
458,
83,
13,
2645,
9608,
10786,
22785,
510,
5766,
11537,
198,
220,
220,
220,
458,
83,
13,
1455,
437,
7,
17946,
11639,
13466,
11537,
198,
220,
220,
220,
458,
83,
13,
7839,
10786,
22785,
510,
3691,
607,
874,
11537,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
458,
83,
13,
26875,
7,
17,
8,
198,
220,
220,
220,
329,
264,
287,
2837,
7,
11925,
7,
4868,
62,
50,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
8177,
796,
366,
50,
796,
366,
1343,
965,
7,
4868,
62,
50,
58,
82,
12962,
1343,
1,
837,
350,
796,
366,
1343,
965,
7,
4868,
62,
47,
58,
82,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
458,
83,
13,
29487,
7,
4868,
62,
45,
11,
85,
1416,
1050,
392,
58,
45299,
82,
4357,
18242,
28,
1455,
437,
8,
198,
220,
220,
220,
458,
83,
13,
897,
71,
1370,
7,
88,
796,
352,
11,
3124,
796,
705,
74,
3256,
2815,
10992,
796,
705,
438,
3256,
18242,
2625,
12287,
510,
796,
352,
4943,
198,
220,
220,
220,
458,
83,
13,
87,
18242,
10786,
45,
11537,
198,
220,
220,
220,
458,
83,
13,
2645,
9608,
10786,
22785,
510,
5766,
11537,
198,
220,
220,
220,
458,
83,
13,
1455,
437,
7,
17946,
11639,
13466,
11537,
198,
220,
220,
220,
458,
83,
13,
7839,
10786,
22785,
510,
3691,
269,
1050,
392,
11537,
628,
628,
628
] | 1.954379 | 2,455 |
from datetime import date
from flask import abort, Flask, Response
import json
from pyliturgical import calendar
app = Flask(__name__)
@app.route('/reformed/<date_str>')
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8080, debug=True)
| [
6738,
4818,
8079,
1330,
3128,
198,
6738,
42903,
1330,
15614,
11,
46947,
11,
18261,
198,
11748,
33918,
198,
6738,
279,
2645,
270,
31839,
1330,
11845,
198,
198,
1324,
796,
46947,
7,
834,
3672,
834,
8,
628,
198,
31,
1324,
13,
38629,
10786,
14,
260,
12214,
14,
27,
4475,
62,
2536,
29,
11537,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
598,
13,
5143,
7,
4774,
11639,
16799,
13,
15,
13,
15,
13,
16,
3256,
2493,
28,
1795,
1795,
11,
14257,
28,
17821,
8,
198
] | 2.771739 | 92 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import ipdb
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import *
from classLSTMCore import LSTMCore
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
198,
11748,
28686,
198,
11748,
20966,
9945,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
1635,
198,
198,
6738,
1398,
43,
2257,
9655,
382,
1330,
406,
2257,
9655,
382,
628
] | 3.506173 | 81 |
import unittest
from io import StringIO
from spacegraphcats.catlas.graph_io import read_from_gxt, write_to_gxt
from spacegraphcats.catlas.graph import Graph
if __name__ == "__main__":
unittest.main()
| [
11748,
555,
715,
395,
198,
6738,
33245,
1330,
10903,
9399,
198,
198,
6738,
2272,
34960,
24619,
13,
9246,
21921,
13,
34960,
62,
952,
1330,
1100,
62,
6738,
62,
70,
742,
11,
3551,
62,
1462,
62,
70,
742,
198,
6738,
2272,
34960,
24619,
13,
9246,
21921,
13,
34960,
1330,
29681,
628,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
555,
715,
395,
13,
12417,
3419,
198
] | 2.849315 | 73 |
from models.base_model import BaseModel
import tensorflow as tf
import numpy as np
from label_storage import LabelStorage
from tqdm import tqdm
import time
from copy import deepcopy
# Three heads acting on the rnn output of size batchxlengthxoutput_size
# They predict IoU, whether the Gt exists, and the shift to GT bounding box
# IoU between two bounding boxes computation in TF
# such that IoU with GT could be optimized.
| [
6738,
4981,
13,
8692,
62,
19849,
1330,
7308,
17633,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
6167,
62,
35350,
1330,
36052,
31425,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
11748,
640,
198,
6738,
4866,
1330,
2769,
30073,
628,
198,
2,
7683,
6665,
7205,
319,
262,
374,
20471,
5072,
286,
2546,
15458,
87,
13664,
87,
22915,
62,
7857,
198,
2,
1119,
4331,
27853,
52,
11,
1771,
262,
402,
83,
7160,
11,
290,
262,
6482,
284,
7963,
5421,
278,
3091,
198,
198,
2,
27853,
52,
1022,
734,
5421,
278,
10559,
29964,
287,
24958,
198,
2,
884,
326,
27853,
52,
351,
7963,
714,
307,
23392,
13,
628
] | 3.730435 | 115 |
from django.urls import include, path
from .me import views as me_views
from .auth import views as auth_views
from .services import urls as services_urls
app_name = 'multauth'
urlpatterns = [
path('me/', me_views.MeView.as_view(), name='me'),
path('me/password/', me_views.MePasswordView.as_view(), name='me-password'),
path('me/passcode/', me_views.MePasscodeView.as_view(), name='me-passcode'),
path('signin/', auth_views.SigninView.as_view(), name='signin'),
path('signup/', auth_views.SignupView.as_view(), name='signup'),
path('signup/verification/', auth_views.SignupVerificationView.as_view(), name='signup-verification'),
path(r'^', include(services_urls)),
]
| [
6738,
42625,
14208,
13,
6371,
82,
1330,
2291,
11,
3108,
198,
198,
6738,
764,
1326,
1330,
5009,
355,
502,
62,
33571,
198,
6738,
764,
18439,
1330,
5009,
355,
6284,
62,
33571,
198,
6738,
764,
30416,
1330,
2956,
7278,
355,
2594,
62,
6371,
82,
628,
198,
1324,
62,
3672,
796,
705,
16680,
18439,
6,
198,
198,
6371,
33279,
82,
796,
685,
198,
220,
220,
220,
3108,
10786,
1326,
14,
3256,
502,
62,
33571,
13,
5308,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
1326,
33809,
198,
220,
220,
220,
3108,
10786,
1326,
14,
28712,
14,
3256,
502,
62,
33571,
13,
5308,
35215,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
1326,
12,
28712,
33809,
198,
220,
220,
220,
3108,
10786,
1326,
14,
6603,
8189,
14,
3256,
502,
62,
33571,
13,
5308,
14478,
8189,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
1326,
12,
6603,
8189,
33809,
198,
220,
220,
220,
3108,
10786,
12683,
259,
14,
3256,
6284,
62,
33571,
13,
11712,
259,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
12683,
259,
33809,
198,
220,
220,
220,
3108,
10786,
12683,
929,
14,
3256,
6284,
62,
33571,
13,
11712,
929,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
12683,
929,
33809,
198,
220,
220,
220,
3108,
10786,
12683,
929,
14,
332,
2649,
14,
3256,
6284,
62,
33571,
13,
11712,
929,
13414,
2649,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
12683,
929,
12,
332,
2649,
33809,
628,
220,
220,
220,
3108,
7,
81,
6,
61,
3256,
2291,
7,
30416,
62,
6371,
82,
36911,
198,
60,
198
] | 2.759843 | 254 |
import pytest
import packerlicious.post_processor as post_processor
| [
11748,
12972,
9288,
198,
198,
11748,
2353,
263,
677,
699,
13,
7353,
62,
41341,
355,
1281,
62,
41341,
628,
628,
198
] | 3.47619 | 21 |
# 2015 lab 1
print('Hello World')
| [
2,
1853,
2248,
352,
198,
198,
4798,
10786,
15496,
2159,
11537,
198
] | 2.916667 | 12 |
from odoo import models, fields, api, _
from odoo.exceptions import ValidationError, Warning
| [
6738,
16298,
2238,
1330,
4981,
11,
7032,
11,
40391,
11,
4808,
198,
6738,
16298,
2238,
13,
1069,
11755,
1330,
3254,
24765,
12331,
11,
15932,
628
] | 3.76 | 25 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from typing import List, Dict
import math
import random
import aiohttp
import asyncio
import discord
from discord.ext import commands, tasks
from contents.character.Investigator import Investigator
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
201,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
6738,
19720,
1330,
7343,
11,
360,
713,
201,
198,
11748,
10688,
201,
198,
11748,
4738,
201,
198,
11748,
257,
952,
4023,
201,
198,
11748,
30351,
952,
201,
198,
201,
198,
11748,
36446,
201,
198,
6738,
36446,
13,
2302,
1330,
9729,
11,
8861,
201,
198,
201,
198,
6738,
10154,
13,
22769,
13,
19070,
23823,
1330,
49499,
201,
198,
201,
198,
201,
198
] | 3.022989 | 87 |
# Copyright Aaron Stanek 2021
# See LICENSE for more details
import sys
if sys.version_info[0] != 3 or sys.version_info[1] < 6:
raise Exception("Python Password Utility requires Python 3.6 or later. Compatibility with any major versions after Python 3 is not guaranteed.")
import hashlib
import secrets
import time
from .chars import normalize_valid_chars, create_character_map
# try to use SHA-3 if possible
# default to SHA-2 if you have to
if "sha3_512" in hashlib.algorithms_available:
SHA512 = lambda x : hashlib.sha3_512(x).digest()
SHA512_number = 3
else:
SHA512 = lambda x : hashlib.sha512(x).digest()
SHA512_number = 2
# this class is used to guarantee
# that the input to every hash
# is different
| [
2,
15069,
12139,
7299,
988,
33448,
201,
198,
2,
4091,
38559,
24290,
329,
517,
3307,
201,
198,
201,
198,
11748,
25064,
201,
198,
201,
198,
361,
25064,
13,
9641,
62,
10951,
58,
15,
60,
14512,
513,
393,
25064,
13,
9641,
62,
10951,
58,
16,
60,
1279,
718,
25,
201,
198,
220,
220,
220,
5298,
35528,
7203,
37906,
30275,
34030,
4433,
11361,
513,
13,
21,
393,
1568,
13,
46021,
351,
597,
1688,
6300,
706,
11361,
513,
318,
407,
11462,
19570,
201,
198,
201,
198,
11748,
12234,
8019,
201,
198,
11748,
13141,
201,
198,
11748,
640,
201,
198,
6738,
764,
354,
945,
1330,
3487,
1096,
62,
12102,
62,
354,
945,
11,
2251,
62,
22769,
62,
8899,
201,
198,
201,
198,
2,
1949,
284,
779,
25630,
12,
18,
611,
1744,
201,
198,
2,
4277,
284,
25630,
12,
17,
611,
345,
423,
284,
201,
198,
201,
198,
361,
366,
26270,
18,
62,
25836,
1,
287,
12234,
8019,
13,
282,
7727,
907,
62,
15182,
25,
201,
198,
220,
220,
220,
25630,
25836,
796,
37456,
2124,
1058,
12234,
8019,
13,
26270,
18,
62,
25836,
7,
87,
737,
12894,
395,
3419,
201,
198,
220,
220,
220,
25630,
25836,
62,
17618,
796,
513,
201,
198,
17772,
25,
201,
198,
220,
220,
220,
25630,
25836,
796,
37456,
2124,
1058,
12234,
8019,
13,
26270,
25836,
7,
87,
737,
12894,
395,
3419,
201,
198,
220,
220,
220,
25630,
25836,
62,
17618,
796,
362,
201,
198,
220,
220,
220,
1303,
428,
1398,
318,
973,
284,
9149,
201,
198,
220,
220,
220,
1303,
326,
262,
5128,
284,
790,
12234,
201,
198,
220,
220,
220,
1303,
318,
1180,
201,
198
] | 2.894737 | 266 |
# ===========================================================================
# tfrecords_utils.py-------------------------------------------------------
# ===========================================================================
""" The following functions can be used to convert a value to a type compatible with tf.Example.
The tf.train.Feature message type can accept one of the following three types. Most other generic types can be coerced into one of these:
tf.train.BytesList : string / byte
tf.train.FloatList : float (float32) / double (float64)
tf.train.Int64List : bool / enum / int32 / uint32 / int64 / uint64
In order to convert a standard TensorFlow type to a tf.Example-compatible tf.train.Feature, you can use the shortcut functions below. Note that each function takes a scalar input value and returns a tf.train.Feature containing one of the three list types above.
"""
# import ------------------------------------------------------------------
# ---------------------------------------------------------------------------
from dl_multi.__init__ import _logger
import dl_multi.utils.general as glu
import dl_multi.utils.imgio
from dl_multi.utils import imgtools
import numpy as np
import pathlib
import tensorflow as tf
import tifffile
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def _bytes_feature(value, serialize=False):
"""Returns a bytes_list from a string / byte.
Parameters
----------
value : string / byte
Returns
-------
feature : bytes_list
Converted value compatible with tf.Example.
"""
if isinstance(value, type(tf.constant(0))):
value = value.numpy() # BytesList won't unpack a string from an EagerTensor.
feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
return feature if not serialize else feature.SerializeToString()
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def _float_feature(value, serialize=False):
"""Returns a float_list from a float / double.
Parameters
----------
value : float / double
Returns
-------
feature : float_list
Converted value compatible with tf.Example.
"""
feature = tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
return feature if not serialize else feature.SerializeToString()
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def _int64_feature(value, serialize=False):
"""Returns an int64_list from a bool / enum / int / uint.
Parameters
----------
value : double bool / enum / int / uint
Returns
-------
feature : int64_list
Converted value compatible with tf.Example.
"""
feature = tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
return feature if not serialize else feature.SerializeToString()
# Create a dictionary describing the features. The key of the dict should be the same with the key in writing function.
_feature_specs = {
"features" : {
"rows": tf.io.FixedLenFeature([], tf.int64),
"cols": tf.io.FixedLenFeature([], tf.int64),
"image": tf.io.FixedLenFeature([], tf.string),
"height": tf.io.FixedLenFeature([], tf.string),
"label": tf.io.FixedLenFeature([], tf.string)
},
"images" : [
{"spec": "image", "channels": 3, "type" : tf.uint8, "ext": ".tif"},
{"spec": "height", "channels": 1, "type" : tf.float32, "ext": ".tif"},
{"spec": "label", "channels": 1, "type" : tf.uint8, "ext": ".tif"}
]
}
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def write_old_tfrecord(files, param_specs, param_tfrecord, param_label=dict()):
"""Create a dictionary with features that may be relevant."""
_logger.debug("Start creation of tfrecords with settings:\nparam_tfrecord:\t{}\nparam_label:\t{}".format(param_tfrecord, param_label))
# settings ------------------------------------------------------------
# -----------------------------------------------------------------------
img_in = dl_multi.utils.imgio.get_data(files, param_specs, param_label=param_label)
tfrecord_file = glu.Folder().set_folder(**param_tfrecord["tfrecord"])
# execution -----------------------------------------------------------
# -----------------------------------------------------------------------
_logger.debug("[SAVE] '{}'".format(tfrecord_file))
with tf.io.TFRecordWriter(tfrecord_file) as writer:
for item in iter(img_in):
for item_spec in iter(item):
print(item_spec.path)
# img = item.spec("image").data
# tf_example = get_tfrecord_features(
# img.shape,
# img.tostring(),
# item.spec("height").data.tostring(),
# imgtools.labels_to_image(item.spec("label").data, param_label).tostring()
# )
# writer.write(tf_example.SerializeToString())
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def write_tfrecord(files, param_specs, param_tfrecord, param_label=dict()):
"""Create a dictionary with features that may be relevant."""
_logger.debug("Start creation of tfrecords with settings:\nparam_tfrecord:\t{}\nparam_label:\t{}".format(param_tfrecord, param_label))
# settings ------------------------------------------------------------
# -----------------------------------------------------------------------
img_in = dl_multi.utils.imgio.get_data(files, param_specs, param_label=param_label)
tfrecord_file = glu.Folder().set_folder(**param_tfrecord["tfrecord"])
# execution -----------------------------------------------------------
# -----------------------------------------------------------------------
_logger.debug("[SAVE] '{}'".format(tfrecord_file))
with tf.io.TFRecordWriter(tfrecord_file) as writer:
for data_set in iter(img_in):
# Create a dictionary describing the features. The key of the dict should be the same with the key in writing function.
shape = data_set.spec("image").data.shape
feature = {
"rows": _int64_feature(shape[0]),
"cols": _int64_feature(shape[1]),
}
for data_item in iter(data_set):
feature[data_item.spec] = _bytes_feature(data_item.data.tostring())
writer.write(tf.train.Example(
features=tf.train.Features(feature=feature)
).SerializeToString())
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def get_tfrecord_features(shape, image_string, height_string, mask_string):
"""Create a dictionary with features that may be relevant."""
# image_shape = tf.image.decode_jpeg(image_string).shape
# Create a dictionary describing the features. The key of the dict should be the same with the key in writing function.
feature = {
"rows": _int64_feature(shape[0]),
"cols": _int64_feature(shape[1]),
"image": _bytes_feature(image_string),
"height": _bytes_feature(height_string),
"label": _bytes_feature(mask_string),
}
return tf.train.Example(
features=tf.train.Features(
feature=feature)
)
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def read_tfrecord_attempt(tfrecord_queue):
"""Return image/annotation tensors that are created by reading tfrecord file.
The function accepts tfrecord filenames queue as an input which is usually
can be created using tf.train.string_input_producer() where filename
is specified with desired number of epochs. This function takes queue
produced by aforemention tf.train.string_input_producer() and defines
tensors converted from raw binary representations into
reshaped image/annotation tensors.
Parameters
----------
tfrecord_filenames_queue : tfrecord filename queue
String queue object from tf.train.string_input_producer()
Returns
-------
image, annotation : tuple of tf.int32 (image, annotation)
Tuple of image/annotation tensors
"""
reader = tf.TFRecordReader()
_, serialized_example = reader.read(tfrecord_queue)
# Create a dictionary describing the features. The key of the dict should be the same with the key in writing function.
features = tf.io.parse_single_example(
serialized_example,
features={
'height': tf.io.FixedLenFeature([], tf.int64),
'width': tf.io.FixedLenFeature([], tf.int64),
'data_raw': tf.io.FixedLenFeature([], tf.string),
'mask_raw': tf.io.FixedLenFeature([], tf.string)
}
)
image = tf.decode_raw(features['data_raw'], tf.float32)
annotation = tf.decode_raw(features['mask_raw'], tf.uint8)
height = tf.cast(features['height'], tf.int32)
width = tf.cast(features['width'], tf.int32)
image_shape = tf.stack([height, width, 4])
annotation_shape = tf.stack([height, width, 1])
image = tf.reshape(image, image_shape)
annotation = tf.reshape(annotation, annotation_shape)
return image, annotation | [
2,
38093,
2559,
855,
198,
2,
220,
220,
48700,
8344,
3669,
62,
26791,
13,
9078,
3880,
19351,
6329,
198,
2,
38093,
2559,
855,
198,
198,
37811,
383,
1708,
5499,
460,
307,
973,
284,
10385,
257,
1988,
284,
257,
2099,
11670,
351,
48700,
13,
16281,
13,
628,
220,
220,
220,
383,
48700,
13,
27432,
13,
38816,
3275,
2099,
460,
2453,
530,
286,
262,
1708,
1115,
3858,
13,
4042,
584,
14276,
3858,
460,
307,
48474,
656,
530,
286,
777,
25,
628,
220,
220,
220,
48700,
13,
27432,
13,
45992,
8053,
1058,
4731,
1220,
18022,
198,
220,
220,
220,
48700,
13,
27432,
13,
43879,
8053,
1058,
12178,
357,
22468,
2624,
8,
1220,
4274,
357,
22468,
2414,
8,
628,
220,
220,
220,
48700,
13,
27432,
13,
5317,
2414,
8053,
1058,
20512,
1220,
33829,
1220,
493,
2624,
1220,
20398,
2624,
1220,
493,
2414,
1220,
20398,
2414,
628,
220,
220,
220,
554,
1502,
284,
10385,
257,
3210,
309,
22854,
37535,
2099,
284,
257,
48700,
13,
16281,
12,
38532,
48700,
13,
27432,
13,
38816,
11,
345,
460,
779,
262,
29401,
5499,
2174,
13,
5740,
326,
1123,
2163,
2753,
257,
16578,
283,
5128,
1988,
290,
5860,
257,
48700,
13,
27432,
13,
38816,
7268,
530,
286,
262,
1115,
1351,
3858,
2029,
13,
198,
37811,
198,
198,
2,
220,
220,
1330,
16529,
438,
198,
2,
16529,
32284,
198,
6738,
220,
288,
75,
62,
41684,
13,
834,
15003,
834,
1330,
4808,
6404,
1362,
198,
11748,
288,
75,
62,
41684,
13,
26791,
13,
24622,
355,
1278,
84,
198,
11748,
288,
75,
62,
41684,
13,
26791,
13,
9600,
952,
198,
6738,
288,
75,
62,
41684,
13,
26791,
1330,
545,
13655,
10141,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
3108,
8019,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
11748,
256,
361,
487,
576,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
4808,
33661,
62,
30053,
7,
8367,
11,
11389,
1096,
28,
25101,
2599,
198,
220,
220,
220,
37227,
35561,
257,
9881,
62,
4868,
422,
257,
4731,
1220,
18022,
13,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
1988,
1058,
4731,
1220,
18022,
628,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
3895,
1058,
9881,
62,
4868,
198,
220,
220,
220,
220,
220,
220,
220,
43433,
1988,
11670,
351,
48700,
13,
16281,
13,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
611,
318,
39098,
7,
8367,
11,
2099,
7,
27110,
13,
9979,
415,
7,
15,
4008,
2599,
198,
220,
220,
220,
220,
220,
1988,
796,
1988,
13,
77,
32152,
3419,
1303,
2750,
4879,
8053,
1839,
470,
555,
8002,
257,
4731,
422,
281,
412,
3536,
51,
22854,
13,
198,
220,
220,
220,
3895,
796,
48700,
13,
27432,
13,
38816,
7,
33661,
62,
4868,
28,
27110,
13,
27432,
13,
45992,
8053,
7,
8367,
41888,
8367,
60,
4008,
198,
220,
220,
220,
1441,
3895,
611,
407,
11389,
1096,
2073,
3895,
13,
32634,
1096,
2514,
10100,
3419,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
4808,
22468,
62,
30053,
7,
8367,
11,
11389,
1096,
28,
25101,
2599,
198,
220,
220,
220,
37227,
35561,
257,
12178,
62,
4868,
422,
257,
12178,
1220,
4274,
13,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
1988,
1058,
12178,
1220,
4274,
628,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
3895,
1058,
12178,
62,
4868,
198,
220,
220,
220,
220,
220,
220,
220,
43433,
1988,
11670,
351,
48700,
13,
16281,
13,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
3895,
796,
48700,
13,
27432,
13,
38816,
7,
22468,
62,
4868,
28,
27110,
13,
27432,
13,
43879,
8053,
7,
8367,
41888,
8367,
60,
4008,
198,
220,
220,
220,
1441,
3895,
611,
407,
11389,
1096,
2073,
3895,
13,
32634,
1096,
2514,
10100,
3419,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
4808,
600,
2414,
62,
30053,
7,
8367,
11,
11389,
1096,
28,
25101,
2599,
198,
220,
220,
220,
37227,
35561,
281,
493,
2414,
62,
4868,
422,
257,
20512,
1220,
33829,
1220,
493,
1220,
20398,
13,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
1988,
1058,
4274,
20512,
1220,
33829,
1220,
493,
1220,
20398,
628,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
3895,
1058,
493,
2414,
62,
4868,
198,
220,
220,
220,
220,
220,
220,
220,
43433,
1988,
11670,
351,
48700,
13,
16281,
13,
198,
220,
220,
220,
37227,
220,
220,
198,
220,
220,
220,
3895,
796,
48700,
13,
27432,
13,
38816,
7,
600,
2414,
62,
4868,
28,
27110,
13,
27432,
13,
5317,
2414,
8053,
7,
8367,
41888,
8367,
60,
4008,
198,
220,
220,
220,
1441,
3895,
611,
407,
11389,
1096,
2073,
3895,
13,
32634,
1096,
2514,
10100,
3419,
198,
198,
2,
13610,
257,
22155,
12059,
262,
3033,
13,
383,
1994,
286,
262,
8633,
815,
307,
262,
976,
351,
262,
1994,
287,
3597,
2163,
13,
198,
198,
62,
30053,
62,
4125,
6359,
796,
1391,
198,
220,
220,
220,
366,
40890,
1,
1058,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
366,
8516,
1298,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
600,
2414,
828,
198,
220,
220,
220,
220,
220,
220,
220,
366,
4033,
82,
1298,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
600,
2414,
828,
198,
220,
220,
220,
220,
220,
220,
220,
366,
9060,
1298,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
8841,
828,
198,
220,
220,
220,
220,
220,
220,
220,
366,
17015,
1298,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
8841,
828,
198,
220,
220,
220,
220,
220,
220,
220,
366,
18242,
1298,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
8841,
8,
198,
220,
220,
220,
8964,
220,
198,
220,
220,
220,
366,
17566,
1,
1058,
685,
198,
220,
220,
220,
220,
220,
220,
220,
19779,
16684,
1298,
366,
9060,
1600,
366,
354,
8961,
1298,
513,
11,
366,
4906,
1,
1058,
48700,
13,
28611,
23,
11,
366,
2302,
1298,
27071,
49929,
25719,
198,
220,
220,
220,
220,
220,
220,
220,
19779,
16684,
1298,
366,
17015,
1600,
366,
354,
8961,
1298,
352,
11,
366,
4906,
1,
1058,
48700,
13,
22468,
2624,
11,
366,
2302,
1298,
27071,
49929,
25719,
198,
220,
220,
220,
220,
220,
220,
220,
19779,
16684,
1298,
366,
18242,
1600,
366,
354,
8961,
1298,
352,
11,
366,
4906,
1,
1058,
48700,
13,
28611,
23,
11,
366,
2302,
1298,
27071,
49929,
20662,
220,
198,
220,
220,
220,
2361,
198,
92,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
3551,
62,
727,
62,
27110,
22105,
7,
16624,
11,
5772,
62,
4125,
6359,
11,
5772,
62,
27110,
22105,
11,
5772,
62,
18242,
28,
11600,
3419,
2599,
198,
220,
220,
220,
37227,
16447,
257,
22155,
351,
3033,
326,
743,
307,
5981,
526,
15931,
628,
220,
220,
220,
4808,
6404,
1362,
13,
24442,
7203,
10434,
6282,
286,
48700,
8344,
3669,
351,
6460,
7479,
77,
17143,
62,
27110,
22105,
7479,
83,
90,
32239,
77,
17143,
62,
18242,
7479,
83,
90,
92,
1911,
18982,
7,
17143,
62,
27110,
22105,
11,
5772,
62,
18242,
4008,
220,
220,
220,
220,
220,
220,
628,
220,
220,
220,
1303,
220,
220,
6460,
20368,
1783,
10541,
198,
220,
220,
220,
1303,
16529,
26866,
198,
220,
220,
220,
33705,
62,
259,
796,
288,
75,
62,
41684,
13,
26791,
13,
9600,
952,
13,
1136,
62,
7890,
7,
16624,
11,
5772,
62,
4125,
6359,
11,
5772,
62,
18242,
28,
17143,
62,
18242,
8,
198,
220,
220,
220,
220,
198,
220,
220,
220,
48700,
22105,
62,
7753,
796,
1278,
84,
13,
41092,
22446,
2617,
62,
43551,
7,
1174,
17143,
62,
27110,
22105,
14692,
27110,
22105,
8973,
8,
198,
220,
220,
198,
220,
220,
220,
1303,
220,
220,
9706,
20368,
22369,
6329,
198,
220,
220,
220,
1303,
16529,
26866,
220,
220,
198,
220,
220,
220,
4808,
6404,
1362,
13,
24442,
7203,
58,
4090,
6089,
60,
705,
90,
92,
6,
1911,
18982,
7,
27110,
22105,
62,
7753,
4008,
198,
220,
220,
220,
351,
48700,
13,
952,
13,
10234,
23739,
34379,
7,
27110,
22105,
62,
7753,
8,
355,
6260,
25,
198,
220,
220,
220,
220,
220,
220,
220,
329,
2378,
287,
11629,
7,
9600,
62,
259,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
2378,
62,
16684,
287,
11629,
7,
9186,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
7,
9186,
62,
16684,
13,
6978,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
33705,
796,
2378,
13,
16684,
7203,
9060,
11074,
7890,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
48700,
62,
20688,
796,
651,
62,
27110,
22105,
62,
40890,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
220,
220,
220,
220,
33705,
13,
43358,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
220,
220,
220,
220,
33705,
13,
83,
455,
1806,
22784,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
220,
220,
220,
220,
2378,
13,
16684,
7203,
17015,
11074,
7890,
13,
83,
455,
1806,
22784,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
220,
220,
220,
220,
545,
13655,
10141,
13,
23912,
1424,
62,
1462,
62,
9060,
7,
9186,
13,
16684,
7203,
18242,
11074,
7890,
11,
5772,
62,
18242,
737,
83,
455,
1806,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
6260,
13,
13564,
7,
27110,
62,
20688,
13,
32634,
1096,
2514,
10100,
28955,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
3551,
62,
27110,
22105,
7,
16624,
11,
5772,
62,
4125,
6359,
11,
5772,
62,
27110,
22105,
11,
5772,
62,
18242,
28,
11600,
3419,
2599,
198,
220,
220,
220,
37227,
16447,
257,
22155,
351,
3033,
326,
743,
307,
5981,
526,
15931,
628,
220,
220,
220,
4808,
6404,
1362,
13,
24442,
7203,
10434,
6282,
286,
48700,
8344,
3669,
351,
6460,
7479,
77,
17143,
62,
27110,
22105,
7479,
83,
90,
32239,
77,
17143,
62,
18242,
7479,
83,
90,
92,
1911,
18982,
7,
17143,
62,
27110,
22105,
11,
5772,
62,
18242,
4008,
220,
220,
220,
220,
220,
220,
628,
220,
220,
220,
1303,
220,
220,
6460,
20368,
1783,
10541,
198,
220,
220,
220,
1303,
16529,
26866,
198,
220,
220,
220,
33705,
62,
259,
796,
288,
75,
62,
41684,
13,
26791,
13,
9600,
952,
13,
1136,
62,
7890,
7,
16624,
11,
5772,
62,
4125,
6359,
11,
5772,
62,
18242,
28,
17143,
62,
18242,
8,
198,
220,
220,
220,
220,
198,
220,
220,
220,
48700,
22105,
62,
7753,
796,
1278,
84,
13,
41092,
22446,
2617,
62,
43551,
7,
1174,
17143,
62,
27110,
22105,
14692,
27110,
22105,
8973,
8,
198,
220,
220,
198,
220,
220,
220,
1303,
220,
220,
9706,
20368,
22369,
6329,
198,
220,
220,
220,
1303,
16529,
26866,
220,
220,
198,
220,
220,
220,
4808,
6404,
1362,
13,
24442,
7203,
58,
4090,
6089,
60,
705,
90,
92,
6,
1911,
18982,
7,
27110,
22105,
62,
7753,
4008,
198,
220,
220,
220,
351,
48700,
13,
952,
13,
10234,
23739,
34379,
7,
27110,
22105,
62,
7753,
8,
355,
6260,
25,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1366,
62,
2617,
287,
11629,
7,
9600,
62,
259,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
13610,
257,
22155,
12059,
262,
3033,
13,
383,
1994,
286,
262,
8633,
815,
307,
262,
976,
351,
262,
1994,
287,
3597,
2163,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5485,
796,
1366,
62,
2617,
13,
16684,
7203,
9060,
11074,
7890,
13,
43358,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3895,
796,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
8516,
1298,
4808,
600,
2414,
62,
30053,
7,
43358,
58,
15,
46570,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
4033,
82,
1298,
4808,
600,
2414,
62,
30053,
7,
43358,
58,
16,
46570,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1782,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
1366,
62,
9186,
287,
11629,
7,
7890,
62,
2617,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3895,
58,
7890,
62,
9186,
13,
16684,
60,
796,
4808,
33661,
62,
30053,
7,
7890,
62,
9186,
13,
7890,
13,
83,
455,
1806,
28955,
220,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6260,
13,
13564,
7,
27110,
13,
27432,
13,
16281,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3033,
28,
27110,
13,
27432,
13,
23595,
7,
30053,
28,
30053,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6739,
32634,
1096,
2514,
10100,
28955,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
651,
62,
27110,
22105,
62,
40890,
7,
43358,
11,
2939,
62,
8841,
11,
6001,
62,
8841,
11,
9335,
62,
8841,
2599,
198,
220,
220,
220,
37227,
16447,
257,
22155,
351,
3033,
326,
743,
307,
5981,
526,
15931,
628,
220,
220,
220,
1303,
2939,
62,
43358,
796,
48700,
13,
9060,
13,
12501,
1098,
62,
73,
22071,
7,
9060,
62,
8841,
737,
43358,
220,
220,
220,
220,
198,
220,
220,
220,
220,
198,
220,
220,
220,
1303,
13610,
257,
22155,
12059,
262,
3033,
13,
383,
1994,
286,
262,
8633,
815,
307,
262,
976,
351,
262,
1994,
287,
3597,
2163,
13,
198,
220,
220,
220,
3895,
796,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
366,
8516,
1298,
4808,
600,
2414,
62,
30053,
7,
43358,
58,
15,
46570,
198,
220,
220,
220,
220,
220,
220,
220,
366,
4033,
82,
1298,
4808,
600,
2414,
62,
30053,
7,
43358,
58,
16,
46570,
198,
220,
220,
220,
220,
220,
220,
220,
366,
9060,
1298,
4808,
33661,
62,
30053,
7,
9060,
62,
8841,
828,
198,
220,
220,
220,
220,
220,
220,
220,
366,
17015,
1298,
4808,
33661,
62,
30053,
7,
17015,
62,
8841,
828,
198,
220,
220,
220,
220,
220,
220,
220,
366,
18242,
1298,
4808,
33661,
62,
30053,
7,
27932,
62,
8841,
828,
198,
220,
220,
220,
1782,
628,
220,
220,
220,
1441,
48700,
13,
27432,
13,
16281,
7,
198,
220,
220,
220,
220,
220,
220,
220,
3033,
28,
27110,
13,
27432,
13,
23595,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3895,
28,
30053,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
198,
2,
220,
220,
2163,
16529,
198,
2,
16529,
32284,
198,
4299,
1100,
62,
27110,
22105,
62,
1078,
1791,
7,
27110,
22105,
62,
36560,
2599,
198,
220,
220,
220,
37227,
13615,
2939,
14,
1236,
14221,
11192,
669,
326,
389,
2727,
416,
3555,
48700,
22105,
2393,
13,
628,
220,
220,
220,
383,
2163,
18178,
48700,
22105,
1226,
268,
1047,
16834,
355,
281,
5128,
543,
318,
3221,
198,
220,
220,
220,
460,
307,
2727,
1262,
48700,
13,
27432,
13,
8841,
62,
15414,
62,
18230,
2189,
3419,
810,
29472,
198,
220,
220,
220,
318,
7368,
351,
10348,
1271,
286,
36835,
82,
13,
770,
2163,
2753,
16834,
198,
220,
220,
220,
4635,
416,
18036,
434,
295,
48700,
13,
27432,
13,
8841,
62,
15414,
62,
18230,
2189,
3419,
290,
15738,
198,
220,
220,
220,
11192,
669,
11513,
422,
8246,
13934,
24612,
656,
198,
220,
220,
220,
27179,
5813,
2939,
14,
1236,
14221,
11192,
669,
13,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
48700,
22105,
62,
10379,
268,
1047,
62,
36560,
1058,
48700,
22105,
29472,
16834,
198,
220,
220,
220,
220,
220,
220,
220,
10903,
16834,
2134,
422,
48700,
13,
27432,
13,
8841,
62,
15414,
62,
18230,
2189,
3419,
198,
220,
220,
220,
220,
198,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
2939,
11,
23025,
1058,
46545,
286,
48700,
13,
600,
2624,
357,
9060,
11,
23025,
8,
198,
220,
220,
220,
220,
220,
220,
220,
309,
29291,
286,
2939,
14,
1236,
14221,
11192,
669,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
198,
220,
220,
220,
9173,
796,
48700,
13,
10234,
23739,
33634,
3419,
628,
220,
220,
220,
4808,
11,
11389,
1143,
62,
20688,
796,
9173,
13,
961,
7,
27110,
22105,
62,
36560,
8,
628,
220,
220,
220,
1303,
13610,
257,
22155,
12059,
262,
3033,
13,
383,
1994,
286,
262,
8633,
815,
307,
262,
976,
351,
262,
1994,
287,
3597,
2163,
13,
198,
220,
220,
220,
3033,
796,
48700,
13,
952,
13,
29572,
62,
29762,
62,
20688,
7,
198,
220,
220,
220,
220,
220,
11389,
1143,
62,
20688,
11,
198,
220,
220,
220,
220,
220,
3033,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
705,
17015,
10354,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
600,
2414,
828,
198,
220,
220,
220,
220,
220,
220,
220,
705,
10394,
10354,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
600,
2414,
828,
198,
220,
220,
220,
220,
220,
220,
220,
705,
7890,
62,
1831,
10354,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
8841,
828,
198,
220,
220,
220,
220,
220,
220,
220,
705,
27932,
62,
1831,
10354,
48700,
13,
952,
13,
13715,
30659,
38816,
26933,
4357,
48700,
13,
8841,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1782,
198,
220,
220,
220,
1267,
628,
220,
220,
220,
2939,
796,
48700,
13,
12501,
1098,
62,
1831,
7,
40890,
17816,
7890,
62,
1831,
6,
4357,
48700,
13,
22468,
2624,
8,
198,
220,
220,
220,
23025,
796,
48700,
13,
12501,
1098,
62,
1831,
7,
40890,
17816,
27932,
62,
1831,
6,
4357,
48700,
13,
28611,
23,
8,
198,
220,
220,
220,
220,
198,
220,
220,
220,
6001,
796,
48700,
13,
2701,
7,
40890,
17816,
17015,
6,
4357,
48700,
13,
600,
2624,
8,
198,
220,
220,
220,
9647,
796,
48700,
13,
2701,
7,
40890,
17816,
10394,
6,
4357,
48700,
13,
600,
2624,
8,
198,
220,
220,
220,
220,
198,
220,
220,
220,
2939,
62,
43358,
796,
48700,
13,
25558,
26933,
17015,
11,
9647,
11,
604,
12962,
198,
220,
220,
220,
23025,
62,
43358,
796,
48700,
13,
25558,
26933,
17015,
11,
9647,
11,
352,
12962,
198,
220,
220,
220,
2939,
796,
48700,
13,
3447,
1758,
7,
9060,
11,
2939,
62,
43358,
8,
198,
220,
220,
220,
23025,
796,
48700,
13,
3447,
1758,
7,
1236,
14221,
11,
23025,
62,
43358,
8,
198,
220,
220,
220,
220,
198,
220,
220,
220,
1441,
2939,
11,
23025,
220,
220
] | 3.333749 | 3,209 |
# -*- coding: utf-8 -*-
"""
oreos.core
~~~~~~~~~~
The creamy white center.
"""
from .monkeys import SimpleCookie
def dict_from_string(s):
''''''
cookies = dict()
c = SimpleCookie()
c.load(s)
for k,v in c.items():
cookies.update({k: v.value})
return cookies | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
382,
418,
13,
7295,
198,
15116,
4907,
198,
198,
464,
27892,
2330,
3641,
13,
198,
37811,
198,
198,
6738,
764,
2144,
13083,
1330,
17427,
34,
18055,
628,
198,
4299,
8633,
62,
6738,
62,
8841,
7,
82,
2599,
198,
220,
220,
220,
705,
39115,
6,
628,
198,
220,
220,
220,
14746,
796,
8633,
3419,
628,
220,
220,
220,
269,
796,
17427,
34,
18055,
3419,
198,
220,
220,
220,
269,
13,
2220,
7,
82,
8,
628,
220,
220,
220,
329,
479,
11,
85,
287,
269,
13,
23814,
33529,
198,
220,
220,
220,
220,
220,
220,
220,
14746,
13,
19119,
15090,
74,
25,
410,
13,
8367,
30072,
628,
220,
220,
220,
1441,
14746
] | 2.346457 | 127 |
# -*- coding: utf-8 -*-
from bravado_core.spec import Spec
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
49025,
4533,
62,
7295,
13,
16684,
1330,
18291,
628,
628,
628
] | 2.461538 | 26 |
import sys
import string
import numpy
from numpy import *
import os.path
import pickle
import re
from types import FloatType
import getopt, sys
import copy
import gzip
from btk.common import *
from btk.stream import *
from btk.feature import *
from btk.matrix import *
from btk.utils import *
#from pygsl import *
from pygsl import multiminimize
from pygsl import sf
import pygsl.errors as errors
from btk import dbase
from btk.modulated import *
from btk.subbandBeamforming import *
from btk.beamformer import *
APPZERO = 1.0E-20
# @memo fun_MK() and dfun_MK() are call back functions for pygsl.
# You can easily implement a new MK beamformer by writing a new class derived from
# a class 'MKSubbandBeamformer' which have methods, normalizeWa( wa ),
# calcKurtosis( srcX, fbinX, wa ) and gradient( srcX, fbinX, wa ).
# @class maximum empirical kurtosis beamformer
# usage:
# 1. construct an object, mkBf = MKSubbandBeamformerGGDr( spectralSources )
# 2. calculate the fixed weights, mkBf.calcFixedWeights( sampleRate, delay )
# 3. accumulate input vectors, mkBf.accumObservations( sFrame, eFrame, R )
# 4. calculate the covariance matricies of the inputs, mkBf.calcCov()
# 5. estimate active weight vectors, mkBf.estimateActiveWeights( fbinX, startpoint )
# @class maximum empirical kurtosis beamformer.
# The entire weight is normalized at each step in the steepest gradient algorithm.
# usage:
# 1. construct an object, mkBf = MEKSubbandBeamformer_nrm( spectralSources )
# 2. calculate the fixed weights, mkBf.calcFixedWeights( sampleRate, delay )
# 3. accumulate input vectors, mkBf.accumObservations( sFrame, eFrame, R )
# 4. calculate the covariance matricies of the inputs, mkBf.calcCov()
# 5. estimate active weight vectors, mkBf.estimateActiveWeights( fbinX, startpoint )
| [
11748,
25064,
198,
11748,
4731,
198,
11748,
299,
32152,
198,
6738,
299,
32152,
1330,
1635,
198,
11748,
28686,
13,
6978,
198,
11748,
2298,
293,
198,
11748,
302,
198,
6738,
3858,
1330,
48436,
6030,
198,
11748,
651,
8738,
11,
25064,
198,
11748,
4866,
198,
11748,
308,
13344,
198,
198,
6738,
275,
30488,
13,
11321,
1330,
1635,
198,
6738,
275,
30488,
13,
5532,
1330,
1635,
198,
6738,
275,
30488,
13,
30053,
1330,
1635,
198,
6738,
275,
30488,
13,
6759,
8609,
1330,
1635,
198,
6738,
275,
30488,
13,
26791,
1330,
1635,
198,
198,
2,
6738,
12972,
70,
6649,
1330,
1635,
198,
6738,
12972,
70,
6649,
1330,
43104,
259,
48439,
198,
6738,
12972,
70,
6649,
1330,
264,
69,
198,
11748,
12972,
70,
6649,
13,
48277,
355,
8563,
198,
198,
6738,
275,
30488,
1330,
288,
8692,
198,
6738,
275,
30488,
13,
4666,
4817,
1330,
1635,
198,
6738,
275,
30488,
13,
7266,
3903,
3856,
321,
15464,
1330,
1635,
198,
6738,
275,
30488,
13,
40045,
16354,
1330,
1635,
198,
198,
24805,
57,
34812,
796,
352,
13,
15,
36,
12,
1238,
198,
198,
2,
2488,
11883,
78,
1257,
62,
33907,
3419,
290,
288,
12543,
62,
33907,
3419,
389,
869,
736,
5499,
329,
12972,
70,
6649,
13,
198,
2,
220,
220,
220,
220,
220,
220,
921,
460,
3538,
3494,
257,
649,
20553,
15584,
16354,
416,
3597,
257,
649,
1398,
10944,
422,
198,
2,
220,
220,
220,
220,
220,
220,
257,
1398,
705,
33907,
7004,
3903,
3856,
321,
16354,
6,
543,
423,
5050,
11,
3487,
1096,
33484,
7,
2082,
10612,
198,
2,
220,
220,
220,
220,
220,
220,
42302,
42,
3325,
5958,
7,
12351,
55,
11,
277,
8800,
55,
11,
2082,
1267,
290,
31312,
7,
12351,
55,
11,
277,
8800,
55,
11,
2082,
6739,
198,
198,
2,
2488,
4871,
5415,
21594,
479,
3325,
5958,
15584,
16354,
220,
198,
2,
8748,
25,
198,
2,
352,
13,
5678,
281,
2134,
11,
33480,
33,
69,
796,
20553,
7004,
3903,
3856,
321,
16354,
11190,
6187,
7,
37410,
21188,
220,
1267,
198,
2,
362,
13,
15284,
262,
5969,
19590,
11,
33480,
33,
69,
13,
9948,
66,
13715,
1135,
2337,
7,
6291,
32184,
11,
5711,
1267,
198,
2,
513,
13,
29915,
5128,
30104,
11,
33480,
33,
69,
13,
4134,
388,
31310,
712,
602,
7,
264,
19778,
11,
304,
19778,
11,
371,
1267,
198,
2,
604,
13,
15284,
262,
44829,
590,
2603,
1173,
444,
286,
262,
17311,
11,
33480,
33,
69,
13,
9948,
66,
34,
709,
3419,
198,
2,
642,
13,
8636,
4075,
3463,
30104,
11,
33480,
33,
69,
13,
395,
1920,
13739,
1135,
2337,
7,
277,
8800,
55,
11,
923,
4122,
1267,
198,
198,
2,
2488,
4871,
5415,
21594,
479,
3325,
5958,
15584,
16354,
13,
198,
2,
220,
220,
220,
220,
220,
220,
220,
383,
2104,
3463,
318,
39279,
379,
1123,
2239,
287,
262,
14559,
395,
31312,
11862,
13,
198,
2,
8748,
25,
198,
2,
352,
13,
5678,
281,
2134,
11,
33480,
33,
69,
796,
11948,
42,
7004,
3903,
3856,
321,
16354,
62,
77,
26224,
7,
37410,
21188,
220,
1267,
198,
2,
362,
13,
15284,
262,
5969,
19590,
11,
33480,
33,
69,
13,
9948,
66,
13715,
1135,
2337,
7,
6291,
32184,
11,
5711,
1267,
198,
2,
513,
13,
29915,
5128,
30104,
11,
33480,
33,
69,
13,
4134,
388,
31310,
712,
602,
7,
264,
19778,
11,
304,
19778,
11,
371,
1267,
198,
2,
604,
13,
15284,
262,
44829,
590,
2603,
1173,
444,
286,
262,
17311,
11,
33480,
33,
69,
13,
9948,
66,
34,
709,
3419,
198,
2,
642,
13,
8636,
4075,
3463,
30104,
11,
33480,
33,
69,
13,
395,
1920,
13739,
1135,
2337,
7,
277,
8800,
55,
11,
923,
4122,
1267,
628
] | 3.053782 | 595 |
# -*- coding: utf-8 -*-
from django import forms
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
42625,
14208,
1330,
5107,
628
] | 2.5 | 20 |
import os
target_names = ['-to-process.txt.subbed', '_to_process.txt.subbed', '_to-process.txt.subbed', '-to_process.txt.subbed', '-tp.txt.subbed', '_tp.txt.subbed']
target = "-Processing"
for dirname, dirs, files in os.walk('.'):
if target in dirname and 'tagged' not in dirname:
for filename in files:
if any(filename.endswith(ending) for ending in target_names):
inputname = "/Users/Torri/Documents/Grad stuff/Thesis stuff/Data - Novels/Processing/" + dirname + "/" + filename
inputfile = open(inputname, 'r')
for ending in target_names:
if filename.endswith(ending):
new_filename = filename.replace(ending, '_split.txt')
new_filename = new_filename.replace(' ', '_')
new_filename = new_filename.replace(',', '')
new_filename = new_filename.replace('!', '')
print dirname + new_filename
new_file = open("/Users/Torri/Documents/Grad stuff/Thesis stuff/Data - Novels/Processing/" + dirname + "/" + new_filename, 'w')
for line in inputfile:
for word in line.split():
#word = word.lower()
word = word.rstrip('-\n\r\'.')
word = word.lstrip("\'")
print >>new_file, word
inputfile.close() | [
11748,
28686,
201,
198,
201,
198,
16793,
62,
14933,
796,
685,
29001,
1462,
12,
14681,
13,
14116,
13,
7266,
3077,
3256,
705,
62,
1462,
62,
14681,
13,
14116,
13,
7266,
3077,
3256,
705,
62,
1462,
12,
14681,
13,
14116,
13,
7266,
3077,
3256,
705,
12,
1462,
62,
14681,
13,
14116,
13,
7266,
3077,
3256,
705,
12,
34788,
13,
14116,
13,
7266,
3077,
3256,
705,
62,
34788,
13,
14116,
13,
7266,
3077,
20520,
201,
198,
16793,
796,
27444,
18709,
278,
1,
201,
198,
201,
198,
1640,
26672,
3672,
11,
288,
17062,
11,
3696,
287,
28686,
13,
11152,
10786,
2637,
2599,
201,
198,
220,
220,
220,
611,
2496,
287,
26672,
3672,
290,
705,
12985,
2004,
6,
407,
287,
26672,
3672,
25,
220,
201,
198,
220,
220,
220,
220,
220,
220,
220,
329,
29472,
287,
3696,
25,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
597,
7,
34345,
13,
437,
2032,
342,
7,
1571,
8,
329,
7464,
287,
2496,
62,
14933,
2599,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5128,
3672,
796,
12813,
14490,
14,
15884,
380,
14,
38354,
14,
42731,
3404,
14,
464,
13429,
3404,
14,
6601,
532,
24467,
82,
14,
18709,
278,
30487,
1343,
26672,
3672,
1343,
12813,
1,
1343,
29472,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5128,
7753,
796,
1280,
7,
15414,
3672,
11,
705,
81,
11537,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
7464,
287,
2496,
62,
14933,
25,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
29472,
13,
437,
2032,
342,
7,
1571,
2599,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
34345,
796,
29472,
13,
33491,
7,
1571,
11,
705,
62,
35312,
13,
14116,
11537,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
34345,
796,
649,
62,
34345,
13,
33491,
10786,
46083,
705,
62,
11537,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
34345,
796,
649,
62,
34345,
13,
33491,
7,
3256,
3256,
10148,
8,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
34345,
796,
649,
62,
34345,
13,
33491,
10786,
0,
3256,
10148,
8,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
26672,
3672,
1343,
649,
62,
34345,
201,
198,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
7753,
796,
1280,
7203,
14,
14490,
14,
15884,
380,
14,
38354,
14,
42731,
3404,
14,
464,
13429,
3404,
14,
6601,
532,
24467,
82,
14,
18709,
278,
30487,
1343,
26672,
3672,
1343,
12813,
1,
1343,
649,
62,
34345,
11,
705,
86,
11537,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
1627,
287,
5128,
7753,
25,
201,
198,
197,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
1573,
287,
1627,
13,
35312,
33529,
201,
198,
201,
198,
197,
197,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
4775,
796,
1573,
13,
21037,
3419,
201,
198,
197,
197,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1573,
796,
1573,
13,
81,
36311,
10786,
12,
59,
77,
59,
81,
43054,
2637,
8,
201,
198,
197,
197,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1573,
796,
1573,
13,
75,
36311,
7203,
43054,
4943,
201,
198,
197,
197,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
3605,
62,
7753,
11,
1573,
201,
198,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5128,
7753,
13,
19836,
3419
] | 2.035511 | 704 |
#!/usr/bin/env python3
###
# YoSon
# @treqtl/xinput.py
# produce xtreqtl input files by matching rs numbers from trait and iv summary statistics
###
import pandas as pd
import numpy as np
import os, sys
from sys import argv
from os import walk
from treqtl_input import read_dir
if __name__ == '__main__':
# if main, run test with input files
ewkdir = argv[1]
efile = argv[2]
gwkdir = argv[3]
outdf = xinput(ewkdir, efile, gwkdir)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
21017,
198,
2,
25455,
31056,
198,
2,
2488,
33945,
80,
28781,
14,
87,
15414,
13,
9078,
198,
2,
4439,
220,
742,
42180,
28781,
5128,
3696,
416,
12336,
44608,
3146,
422,
16708,
290,
21628,
10638,
7869,
198,
21017,
628,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
28686,
11,
25064,
198,
6738,
25064,
1330,
1822,
85,
198,
6738,
28686,
1330,
2513,
198,
198,
6738,
2054,
80,
28781,
62,
15414,
1330,
1100,
62,
15908,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
198,
220,
1303,
611,
1388,
11,
1057,
1332,
351,
5128,
3696,
198,
220,
304,
43021,
15908,
796,
1822,
85,
58,
16,
60,
198,
220,
304,
7753,
796,
1822,
85,
58,
17,
60,
198,
220,
308,
43021,
15908,
796,
1822,
85,
58,
18,
60,
198,
220,
220,
198,
220,
503,
7568,
796,
2124,
15414,
7,
413,
74,
15908,
11,
304,
7753,
11,
308,
43021,
15908,
8,
198,
220,
220,
198,
220,
220,
198,
220,
220
] | 2.561111 | 180 |
from aioli import Package
from .controller import HttpController
from .service import OpenApiService
from .config import ConfigSchema
export = Package(
controllers=[HttpController],
services=[OpenApiService],
config=ConfigSchema,
auto_meta=True
)
| [
6738,
257,
1669,
72,
1330,
15717,
198,
198,
6738,
764,
36500,
1330,
367,
29281,
22130,
198,
6738,
764,
15271,
1330,
4946,
32,
14415,
16177,
198,
6738,
764,
11250,
1330,
17056,
27054,
2611,
198,
198,
39344,
796,
15717,
7,
198,
220,
220,
220,
20624,
41888,
43481,
22130,
4357,
198,
220,
220,
220,
2594,
41888,
11505,
32,
14415,
16177,
4357,
198,
220,
220,
220,
4566,
28,
16934,
27054,
2611,
11,
198,
220,
220,
220,
8295,
62,
28961,
28,
17821,
198,
8,
198
] | 3.3125 | 80 |
import matplotlib.pyplot as plt
from sklearn.metrics import roc_auc_score
from sklearn.metrics import roc_curve, auc
import itertools
import numpy as np
import tensorflow as tf
from tensorflow.keras.preprocessing import sequence
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import Dense, Dropout, Activation
from tensorflow.keras.layers import Embedding, BatchNormalization
from tensorflow.keras.layers import Conv1D, GlobalMaxPooling1D
from sklearn.model_selection import train_test_split
from sklearn import metrics
import dataset
import evaluation
from dataset import Tokenizer
from tfutils import SaveBestModelOnMemory
# from tfx.layers.embeddings import WordEmbeddingInitializer
# classification 中 multi labels 文件
# 多分类绘制ROC、PRF等曲线的例子
# 用sigmoid进行多标签分类
# [0, 1, 1, 0, 1]
# 处理数据
X, y, categoricals = dataset.load_THUCNews_title_label()
X_train, X_test, y_train, y_test = train_test_split(
X, y, train_size=0.7, random_state=732)
num_classes = len(categoricals)
# 转化成字id
ctokenizer = Tokenizer()
# 严格的交叉验证,只在训练集上构建全局词表
ctokenizer.fit(X_train)
X_train = ctokenizer.transform(X_train)
X_test = ctokenizer.transform(X_test)
# maxlen = tokenizer.find_best_maxlen(X_train, mode="mean")
maxlen = 48
print("max length is", maxlen)
X_train = sequence.pad_sequences(
X_train,
maxlen=maxlen,
dtype="int32",
padding="post",
truncating="post",
value=0)
X_test = sequence.pad_sequences(
X_test,
maxlen=maxlen,
dtype="int32",
padding="post",
truncating="post",
value=0)
y_train = tf.keras.utils.to_categorical(y_train)
y_test = tf.keras.utils.to_categorical(y_test)
# 模型
input_dim = ctokenizer.vocab_size
# output_dim = tokenizer.find_embedding_dims(input_dim)
output_dim = 128
# wi = WordEmbeddingInitializer(wm.vocab, path="/home/zhiwen/workspace/dataset/word2vec_baike/word2vec_baike")
# input_dim, output_dim = wi.shape
inputs = Input(shape=(maxlen,)) # (batch_size, maxlen)
x = Embedding(input_dim, output_dim,
embeddings_initializer="glorot_normal",
input_length=maxlen,
trainable=True,
mask_zero=True)(inputs) # (batch_size, maxlen, output_dim)
x = Dropout(0.2)(x)
x = Conv1D(filters=200,
kernel_size=2,
padding="same",
activation="relu",
strides=1)(x)
x = Conv1D(filters=200,
kernel_size=3,
padding="same",
activation="relu",
strides=1)(x)
x = GlobalMaxPooling1D()(x)
x = Dense(100)(x)
x = Dropout(0.2)(x)
x = Activation("relu")(x)
outputs = Dense(num_classes, activation="softmax")(x)
model = Model(inputs, outputs)
model.compile(loss="categorical_crossentropy",
optimizer="adam",
metrics=["accuracy"])
# 训练
batch_size = 32
epochs = 8
callbacks = [SaveBestModelOnMemory()]
model.fit(X_train, y_train,
batch_size=batch_size,
epochs=epochs,
callbacks=callbacks,
validation_split=0.1)
model.summary()
y_pred = model.predict(X_test)
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(num_classes):
fpr[i], tpr[i], _ = roc_curve(y_test[:, i], y_pred[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
fpr["micro"], tpr["micro"], _ = roc_curve(y_test.ravel(), y_pred.ravel())
roc_auc["micro"] = auc(fpr["micro"], tpr["micro"])
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(num_classes)]))
mean_tpr = np.zeros_like(all_fpr)
for i in range(num_classes):
mean_tpr += np.interp(all_fpr, fpr[i], tpr[i])
mean_tpr /= num_classes
fpr["macro"] = all_fpr
tpr["macro"] = mean_tpr
roc_auc["macro"] = auc(fpr["macro"], tpr["macro"])
plt.figure()
plt.plot(fpr["micro"], tpr["micro"],
label='micro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["micro"]),
color='deeppink', linestyle=':', linewidth=4)
plt.plot(fpr["macro"], tpr["macro"],
label='macro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["macro"]),
color='navy', linestyle=':', linewidth=4)
lw = 1
colors = itertools.cycle(
['aqua', 'darkorange', 'cornflowerblue', 'blue', 'red'])
linestyles = itertools.cycle([''])
for i, color in zip(range(num_classes), colors):
plt.plot(fpr[i], tpr[i], color=color, lw=lw,
label='ROC curve of class {0} (area = {1:0.2f})'.format(i, roc_auc[i]))
plt.plot([0, 1], [0, 1], 'k--', lw=lw)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('ROC to multi-class')
plt.legend(loc="lower right")
plt.show()
| [
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
1341,
35720,
13,
4164,
10466,
1330,
686,
66,
62,
14272,
62,
26675,
198,
6738,
1341,
35720,
13,
4164,
10466,
1330,
686,
66,
62,
22019,
303,
11,
257,
1229,
198,
11748,
340,
861,
10141,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
3866,
36948,
1330,
8379,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
27530,
1330,
9104,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
75,
6962,
1330,
23412,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
75,
6962,
1330,
360,
1072,
11,
14258,
448,
11,
13144,
341,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
75,
6962,
1330,
13302,
6048,
278,
11,
347,
963,
26447,
1634,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
75,
6962,
1330,
34872,
16,
35,
11,
8060,
11518,
27201,
278,
16,
35,
198,
6738,
1341,
35720,
13,
19849,
62,
49283,
1330,
4512,
62,
9288,
62,
35312,
198,
6738,
1341,
35720,
1330,
20731,
198,
198,
11748,
27039,
198,
11748,
12660,
198,
6738,
27039,
1330,
29130,
7509,
198,
198,
6738,
48700,
26791,
1330,
12793,
13014,
17633,
2202,
30871,
198,
2,
422,
256,
21373,
13,
75,
6962,
13,
20521,
67,
654,
1330,
9678,
31567,
6048,
278,
24243,
7509,
198,
198,
2,
17923,
220,
40792,
5021,
14722,
10545,
244,
229,
20015,
114,
198,
2,
36469,
248,
26344,
228,
163,
109,
119,
163,
119,
246,
26344,
114,
49,
4503,
23513,
4805,
37,
163,
255,
231,
162,
249,
110,
163,
118,
123,
21410,
160,
122,
233,
36310,
198,
198,
2,
13328,
242,
101,
82,
17225,
1868,
32573,
249,
26193,
234,
13783,
248,
43718,
229,
163,
255,
122,
26344,
228,
163,
109,
119,
198,
2,
685,
15,
11,
352,
11,
352,
11,
657,
11,
352,
60,
198,
198,
2,
36469,
226,
49426,
228,
46763,
108,
162,
235,
106,
198,
55,
11,
331,
11,
4253,
12409,
82,
796,
27039,
13,
2220,
62,
4221,
9598,
9980,
62,
7839,
62,
18242,
3419,
198,
55,
62,
27432,
11,
1395,
62,
9288,
11,
331,
62,
27432,
11,
331,
62,
9288,
796,
4512,
62,
9288,
62,
35312,
7,
198,
220,
220,
220,
1395,
11,
331,
11,
4512,
62,
7857,
28,
15,
13,
22,
11,
4738,
62,
5219,
28,
22,
2624,
8,
198,
198,
22510,
62,
37724,
796,
18896,
7,
66,
2397,
12409,
82,
8,
198,
2,
5525,
121,
105,
44293,
244,
22755,
238,
27764,
245,
312,
198,
310,
4233,
7509,
796,
29130,
7509,
3419,
198,
2,
220,
10310,
98,
43718,
120,
21410,
12859,
97,
20998,
231,
165,
103,
234,
46237,
223,
171,
120,
234,
20998,
103,
28839,
101,
164,
106,
255,
163,
119,
225,
37239,
228,
41468,
162,
252,
226,
161,
119,
118,
17739,
101,
161,
109,
222,
46237,
235,
26193,
101,
198,
310,
4233,
7509,
13,
11147,
7,
55,
62,
27432,
8,
198,
55,
62,
27432,
796,
269,
30001,
7509,
13,
35636,
7,
55,
62,
27432,
8,
198,
55,
62,
9288,
796,
269,
30001,
7509,
13,
35636,
7,
55,
62,
9288,
8,
198,
198,
2,
3509,
11925,
796,
11241,
7509,
13,
19796,
62,
13466,
62,
9806,
11925,
7,
55,
62,
27432,
11,
4235,
2625,
32604,
4943,
198,
9806,
11925,
796,
4764,
198,
4798,
7203,
9806,
4129,
318,
1600,
3509,
11925,
8,
198,
55,
62,
27432,
796,
8379,
13,
15636,
62,
3107,
3007,
7,
198,
220,
220,
220,
1395,
62,
27432,
11,
198,
220,
220,
220,
3509,
11925,
28,
9806,
11925,
11,
198,
220,
220,
220,
288,
4906,
2625,
600,
2624,
1600,
198,
220,
220,
220,
24511,
2625,
7353,
1600,
198,
220,
220,
220,
40122,
803,
2625,
7353,
1600,
198,
220,
220,
220,
1988,
28,
15,
8,
198,
198,
55,
62,
9288,
796,
8379,
13,
15636,
62,
3107,
3007,
7,
198,
220,
220,
220,
1395,
62,
9288,
11,
198,
220,
220,
220,
3509,
11925,
28,
9806,
11925,
11,
198,
220,
220,
220,
288,
4906,
2625,
600,
2624,
1600,
198,
220,
220,
220,
24511,
2625,
7353,
1600,
198,
220,
220,
220,
40122,
803,
2625,
7353,
1600,
198,
220,
220,
220,
1988,
28,
15,
8,
198,
198,
88,
62,
27432,
796,
48700,
13,
6122,
292,
13,
26791,
13,
1462,
62,
66,
2397,
12409,
7,
88,
62,
27432,
8,
198,
88,
62,
9288,
796,
48700,
13,
6122,
292,
13,
26791,
13,
1462,
62,
66,
2397,
12409,
7,
88,
62,
9288,
8,
198,
198,
2,
10545,
101,
94,
161,
252,
233,
198,
15414,
62,
27740,
796,
269,
30001,
7509,
13,
18893,
397,
62,
7857,
198,
2,
5072,
62,
27740,
796,
11241,
7509,
13,
19796,
62,
20521,
12083,
62,
67,
12078,
7,
15414,
62,
27740,
8,
198,
22915,
62,
27740,
796,
13108,
198,
198,
2,
45967,
796,
9678,
31567,
6048,
278,
24243,
7509,
7,
26377,
13,
18893,
397,
11,
3108,
35922,
11195,
14,
89,
5303,
21006,
14,
5225,
10223,
14,
19608,
292,
316,
14,
4775,
17,
35138,
62,
7012,
522,
14,
4775,
17,
35138,
62,
7012,
522,
4943,
198,
2,
5128,
62,
27740,
11,
5072,
62,
27740,
796,
45967,
13,
43358,
198,
198,
15414,
82,
796,
23412,
7,
43358,
16193,
9806,
11925,
11,
4008,
220,
1303,
357,
43501,
62,
7857,
11,
3509,
11925,
8,
198,
87,
796,
13302,
6048,
278,
7,
15414,
62,
27740,
11,
5072,
62,
27740,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11525,
67,
654,
62,
36733,
7509,
2625,
70,
4685,
313,
62,
11265,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5128,
62,
13664,
28,
9806,
11925,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4512,
540,
28,
17821,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
9335,
62,
22570,
28,
17821,
5769,
15414,
82,
8,
220,
1303,
357,
43501,
62,
7857,
11,
3509,
11925,
11,
5072,
62,
27740,
8,
198,
198,
87,
796,
14258,
448,
7,
15,
13,
17,
5769,
87,
8,
198,
87,
796,
34872,
16,
35,
7,
10379,
1010,
28,
2167,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
9720,
62,
7857,
28,
17,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
24511,
2625,
31642,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
14916,
2625,
260,
2290,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
35002,
28,
16,
5769,
87,
8,
198,
198,
87,
796,
34872,
16,
35,
7,
10379,
1010,
28,
2167,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
9720,
62,
7857,
28,
18,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
24511,
2625,
31642,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
14916,
2625,
260,
2290,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
35002,
28,
16,
5769,
87,
8,
198,
198,
87,
796,
8060,
11518,
27201,
278,
16,
35,
3419,
7,
87,
8,
198,
87,
796,
360,
1072,
7,
3064,
5769,
87,
8,
198,
87,
796,
14258,
448,
7,
15,
13,
17,
5769,
87,
8,
198,
87,
796,
13144,
341,
7203,
260,
2290,
4943,
7,
87,
8,
198,
22915,
82,
796,
360,
1072,
7,
22510,
62,
37724,
11,
14916,
2625,
4215,
9806,
4943,
7,
87,
8,
198,
198,
19849,
796,
9104,
7,
15414,
82,
11,
23862,
8,
198,
19849,
13,
5589,
576,
7,
22462,
2625,
66,
2397,
12409,
62,
19692,
298,
28338,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6436,
7509,
2625,
324,
321,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
20731,
28,
14692,
4134,
23843,
8973,
8,
198,
198,
2,
5525,
106,
255,
163,
119,
225,
198,
43501,
62,
7857,
796,
3933,
198,
538,
5374,
82,
796,
807,
198,
198,
13345,
10146,
796,
685,
16928,
13014,
17633,
2202,
30871,
3419,
60,
198,
19849,
13,
11147,
7,
55,
62,
27432,
11,
331,
62,
27432,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
15458,
62,
7857,
28,
43501,
62,
7857,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
36835,
82,
28,
538,
5374,
82,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
869,
10146,
28,
13345,
10146,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
21201,
62,
35312,
28,
15,
13,
16,
8,
198,
19849,
13,
49736,
3419,
198,
198,
88,
62,
28764,
796,
2746,
13,
79,
17407,
7,
55,
62,
9288,
8,
628,
198,
69,
1050,
796,
8633,
3419,
198,
83,
1050,
796,
8633,
3419,
198,
12204,
62,
14272,
796,
8633,
3419,
198,
198,
1640,
1312,
287,
2837,
7,
22510,
62,
37724,
2599,
198,
220,
220,
220,
277,
1050,
58,
72,
4357,
256,
1050,
58,
72,
4357,
4808,
796,
686,
66,
62,
22019,
303,
7,
88,
62,
9288,
58,
45299,
1312,
4357,
331,
62,
28764,
58,
45299,
1312,
12962,
198,
220,
220,
220,
686,
66,
62,
14272,
58,
72,
60,
796,
257,
1229,
7,
69,
1050,
58,
72,
4357,
256,
1050,
58,
72,
12962,
198,
198,
69,
1050,
14692,
24055,
33116,
256,
1050,
14692,
24055,
33116,
4808,
796,
686,
66,
62,
22019,
303,
7,
88,
62,
9288,
13,
25843,
22784,
331,
62,
28764,
13,
25843,
28955,
198,
12204,
62,
14272,
14692,
24055,
8973,
796,
257,
1229,
7,
69,
1050,
14692,
24055,
33116,
256,
1050,
14692,
24055,
8973,
8,
198,
198,
439,
62,
69,
1050,
796,
45941,
13,
34642,
7,
37659,
13,
1102,
9246,
268,
378,
26933,
69,
1050,
58,
72,
60,
329,
1312,
287,
2837,
7,
22510,
62,
37724,
15437,
4008,
198,
32604,
62,
83,
1050,
796,
45941,
13,
9107,
418,
62,
2339,
7,
439,
62,
69,
1050,
8,
198,
1640,
1312,
287,
2837,
7,
22510,
62,
37724,
2599,
198,
220,
220,
220,
1612,
62,
83,
1050,
15853,
45941,
13,
3849,
79,
7,
439,
62,
69,
1050,
11,
277,
1050,
58,
72,
4357,
256,
1050,
58,
72,
12962,
198,
32604,
62,
83,
1050,
1220,
28,
997,
62,
37724,
198,
198,
69,
1050,
14692,
20285,
305,
8973,
796,
477,
62,
69,
1050,
198,
83,
1050,
14692,
20285,
305,
8973,
796,
1612,
62,
83,
1050,
198,
12204,
62,
14272,
14692,
20285,
305,
8973,
796,
257,
1229,
7,
69,
1050,
14692,
20285,
305,
33116,
256,
1050,
14692,
20285,
305,
8973,
8,
198,
198,
489,
83,
13,
26875,
3419,
198,
489,
83,
13,
29487,
7,
69,
1050,
14692,
24055,
33116,
256,
1050,
14692,
24055,
33116,
198,
220,
220,
220,
220,
220,
220,
220,
220,
6167,
11639,
24055,
12,
23913,
371,
4503,
12133,
357,
20337,
796,
1391,
15,
25,
15,
13,
17,
69,
30072,
6,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
4458,
18982,
7,
12204,
62,
14272,
14692,
24055,
8973,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
3124,
11639,
67,
1453,
381,
676,
3256,
9493,
10992,
28,
10354,
3256,
9493,
413,
5649,
28,
19,
8,
198,
198,
489,
83,
13,
29487,
7,
69,
1050,
14692,
20285,
305,
33116,
256,
1050,
14692,
20285,
305,
33116,
198,
220,
220,
220,
220,
220,
220,
220,
220,
6167,
11639,
20285,
305,
12,
23913,
371,
4503,
12133,
357,
20337,
796,
1391,
15,
25,
15,
13,
17,
69,
30072,
6,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
4458,
18982,
7,
12204,
62,
14272,
14692,
20285,
305,
8973,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
3124,
11639,
77,
2830,
3256,
9493,
10992,
28,
10354,
3256,
9493,
413,
5649,
28,
19,
8,
198,
198,
75,
86,
796,
352,
198,
4033,
669,
796,
340,
861,
10141,
13,
13696,
7,
198,
220,
220,
220,
37250,
36129,
64,
3256,
705,
21953,
43745,
3256,
705,
20772,
25547,
17585,
3256,
705,
17585,
3256,
705,
445,
6,
12962,
198,
2815,
42530,
796,
340,
861,
10141,
13,
13696,
26933,
7061,
12962,
198,
1640,
1312,
11,
3124,
287,
19974,
7,
9521,
7,
22510,
62,
37724,
828,
7577,
2599,
198,
220,
220,
220,
458,
83,
13,
29487,
7,
69,
1050,
58,
72,
4357,
256,
1050,
58,
72,
4357,
3124,
28,
8043,
11,
300,
86,
28,
75,
86,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6167,
11639,
49,
4503,
12133,
286,
1398,
1391,
15,
92,
357,
20337,
796,
1391,
16,
25,
15,
13,
17,
69,
30072,
4458,
18982,
7,
72,
11,
686,
66,
62,
14272,
58,
72,
60,
4008,
198,
198,
489,
83,
13,
29487,
26933,
15,
11,
352,
4357,
685,
15,
11,
352,
4357,
705,
74,
438,
3256,
300,
86,
28,
75,
86,
8,
198,
489,
83,
13,
87,
2475,
26933,
15,
13,
15,
11,
352,
13,
15,
12962,
198,
489,
83,
13,
88,
2475,
26933,
15,
13,
15,
11,
352,
13,
2713,
12962,
198,
489,
83,
13,
87,
18242,
10786,
25101,
33733,
14806,
11537,
198,
489,
83,
13,
2645,
9608,
10786,
17821,
33733,
14806,
11537,
198,
489,
83,
13,
7839,
10786,
49,
4503,
284,
5021,
12,
4871,
11537,
198,
489,
83,
13,
1455,
437,
7,
17946,
2625,
21037,
826,
4943,
198,
489,
83,
13,
12860,
3419,
198
] | 2.137119 | 2,166 |
"""Special options for messages from bot."""
from pydantic import BaseModel
from botx.models.messages.sending.options import NotificationOptions
class ResultOptions(BaseModel):
"""Configuration for command result or notification that is send to BotX API."""
#: send message only when stealth mode is enabled.
stealth_mode: bool = False
#: use in-text mentions
raw_mentions: bool = False
#: message options for configuring notifications.
notification_opts: NotificationOptions = NotificationOptions()
| [
37811,
13409,
3689,
329,
6218,
422,
10214,
526,
15931,
198,
6738,
279,
5173,
5109,
1330,
7308,
17633,
198,
198,
6738,
10214,
87,
13,
27530,
13,
37348,
1095,
13,
82,
1571,
13,
25811,
1330,
42808,
29046,
628,
198,
4871,
25414,
29046,
7,
14881,
17633,
2599,
198,
220,
220,
220,
37227,
38149,
329,
3141,
1255,
393,
14483,
326,
318,
3758,
284,
18579,
55,
7824,
526,
15931,
628,
220,
220,
220,
1303,
25,
3758,
3275,
691,
618,
19159,
4235,
318,
9343,
13,
198,
220,
220,
220,
19159,
62,
14171,
25,
20512,
796,
10352,
628,
220,
220,
220,
1303,
25,
779,
287,
12,
5239,
15802,
198,
220,
220,
220,
8246,
62,
434,
507,
25,
20512,
796,
10352,
628,
220,
220,
220,
1303,
25,
3275,
3689,
329,
4566,
870,
19605,
13,
198,
220,
220,
220,
14483,
62,
404,
912,
25,
42808,
29046,
796,
42808,
29046,
3419,
198
] | 3.760563 | 142 |
# Import sqlite3 para tratar os erros
import _sqlite3
# Importado para formatar a data
from datetime import date, datetime
# Importa a função de relatório de pedidos
from source.db.tblOrder import selectAllOrderInformation, selectAllOrderBetweenDate
# Exibe todos os pedidos
# Exibe todos os pedidos de acordo com o periodo informado | [
2,
17267,
44161,
578,
18,
31215,
491,
9459,
28686,
1931,
4951,
198,
11748,
4808,
25410,
578,
18,
198,
198,
2,
17267,
4533,
31215,
5794,
283,
257,
1366,
198,
6738,
4818,
8079,
1330,
3128,
11,
4818,
8079,
198,
198,
2,
17267,
64,
257,
1257,
16175,
28749,
390,
48993,
10205,
27250,
390,
7190,
312,
418,
198,
6738,
2723,
13,
9945,
13,
83,
2436,
18743,
1330,
2922,
3237,
18743,
21918,
11,
2922,
3237,
18743,
25262,
10430,
198,
198,
2,
1475,
32438,
284,
37427,
28686,
7190,
312,
418,
198,
198,
2,
1475,
32438,
284,
37427,
28686,
7190,
312,
418,
390,
936,
585,
78,
401,
267,
2278,
78,
4175,
4533
] | 3.209524 | 105 |
from tinydb import Query, where
from pa import get_db
from pa.config import Config
| [
6738,
7009,
9945,
1330,
43301,
11,
810,
198,
6738,
14187,
1330,
651,
62,
9945,
198,
6738,
14187,
13,
11250,
1330,
17056,
628
] | 3.818182 | 22 |
#!python3
# coding: utf-8
# Consider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
#
# 22=4, 23=8, 24=16, 25=32
# 32=9, 33=27, 34=81, 35=243
# 42=16, 43=64, 44=256, 45=1024
# 52=25, 53=125, 54=625, 55=3125
# If they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:
#
# 4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
#
# How many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
#https://projecteuler.net/problem=29
from time import perf_counter
import matplotlib.pyplot as plt
from math import log
yset = []
ylist = []
xline = []
i = 1
while i < 101:
start = perf_counter()
using_set(i)
end = perf_counter()
yset.append(end - start)
xline.append(i)
start = perf_counter()
using_list(i)
end = perf_counter()
ylist.append(end-start)
i += (i+int(log(i)))
print(i)
plt.plot(xline, yset, label="set")
plt.plot(xline, ylist, label="list")
plt.xlabel("number of items")
plt.ylabel("time (seconds)")
plt.title("Set vs List time performance")
plt.legend()
plt.show()
| [
2,
0,
29412,
18,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
12642,
477,
18253,
17790,
286,
450,
329,
362,
41305,
257,
41305,
642,
290,
362,
41305,
275,
41305,
642,
25,
198,
2,
220,
198,
2,
2534,
28,
19,
11,
2242,
28,
23,
11,
1987,
28,
1433,
11,
1679,
28,
2624,
198,
2,
3933,
28,
24,
11,
4747,
28,
1983,
11,
4974,
28,
6659,
11,
3439,
28,
26660,
198,
2,
5433,
28,
1433,
11,
5946,
28,
2414,
11,
5846,
28,
11645,
11,
4153,
28,
35500,
198,
2,
6740,
28,
1495,
11,
7192,
28,
11623,
11,
7175,
28,
26704,
11,
5996,
28,
18,
11623,
198,
2,
1002,
484,
389,
788,
4624,
287,
29052,
1502,
11,
351,
597,
29819,
4615,
11,
356,
651,
262,
1708,
8379,
286,
1315,
7310,
2846,
25,
198,
2,
220,
198,
2,
604,
11,
807,
11,
860,
11,
1467,
11,
1679,
11,
2681,
11,
3933,
11,
5598,
11,
9773,
11,
13151,
11,
35989,
11,
17759,
11,
48868,
11,
28119,
11,
513,
11623,
198,
2,
220,
198,
2,
1374,
867,
7310,
2846,
389,
287,
262,
8379,
7560,
416,
450,
329,
362,
41305,
257,
41305,
1802,
290,
362,
41305,
275,
41305,
1802,
30,
198,
2,
5450,
1378,
16302,
68,
18173,
13,
3262,
14,
45573,
28,
1959,
198,
198,
6738,
640,
1330,
23035,
62,
24588,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
10688,
1330,
2604,
198,
198,
893,
316,
796,
17635,
198,
2645,
396,
796,
17635,
198,
87,
1370,
796,
17635,
198,
72,
796,
352,
198,
4514,
1312,
1279,
8949,
25,
198,
220,
220,
220,
923,
796,
23035,
62,
24588,
3419,
198,
220,
220,
220,
1262,
62,
2617,
7,
72,
8,
198,
220,
220,
220,
886,
796,
23035,
62,
24588,
3419,
198,
220,
220,
220,
331,
2617,
13,
33295,
7,
437,
532,
923,
8,
198,
220,
220,
220,
2124,
1370,
13,
33295,
7,
72,
8,
628,
220,
220,
220,
923,
796,
23035,
62,
24588,
3419,
198,
220,
220,
220,
1262,
62,
4868,
7,
72,
8,
198,
220,
220,
220,
886,
796,
23035,
62,
24588,
3419,
198,
220,
220,
220,
331,
4868,
13,
33295,
7,
437,
12,
9688,
8,
198,
220,
220,
220,
1312,
15853,
357,
72,
10,
600,
7,
6404,
7,
72,
22305,
198,
220,
220,
220,
3601,
7,
72,
8,
198,
489,
83,
13,
29487,
7,
87,
1370,
11,
331,
2617,
11,
6167,
2625,
2617,
4943,
198,
489,
83,
13,
29487,
7,
87,
1370,
11,
331,
4868,
11,
6167,
2625,
4868,
4943,
198,
489,
83,
13,
87,
18242,
7203,
17618,
286,
3709,
4943,
198,
489,
83,
13,
2645,
9608,
7203,
2435,
357,
43012,
8,
4943,
198,
489,
83,
13,
7839,
7203,
7248,
3691,
7343,
640,
2854,
4943,
198,
489,
83,
13,
1455,
437,
3419,
198,
489,
83,
13,
12860,
3419,
198
] | 2.481481 | 459 |
import gspread
import time
from oauth2client.service_account import ServiceAccountCredentials | [
11748,
308,
43639,
198,
11748,
640,
198,
6738,
267,
18439,
17,
16366,
13,
15271,
62,
23317,
1330,
220,
4809,
30116,
34,
445,
14817
] | 4.086957 | 23 |
import markdown
with open("index.md", 'r') as md:
output = markdown.markdown(md.read())
with open("public/index.html", 'w') as out:
out.write(output)
| [
11748,
1317,
2902,
198,
198,
4480,
1280,
7203,
9630,
13,
9132,
1600,
705,
81,
11537,
355,
45243,
25,
198,
220,
220,
220,
5072,
796,
1317,
2902,
13,
4102,
2902,
7,
9132,
13,
961,
28955,
198,
220,
220,
220,
351,
1280,
7203,
11377,
14,
9630,
13,
6494,
1600,
705,
86,
11537,
355,
503,
25,
198,
220,
220,
220,
220,
220,
220,
220,
503,
13,
13564,
7,
22915,
8,
198
] | 2.455882 | 68 |
from numpy.linalg import inv
import numpy as np
import pykov
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from itertools import cycle
import matplotlib
from matplotlib.pyplot import *
import brewer2mpl
import seaborn as sns
from scipy.stats import ks_2samp
from scipy.stats import mode
import pandas as pd
files = ["3","4","5","6","7","8","9"]
Ptype=["nsf","no_nsf"]
data=[]
datafitnesses=[]
for fNumber in files:
iterations=0
if(fNumber=="9"):
iterations=100
else:
iterations=1000
for ptype in Ptype:
for i in range(iterations):
maxF=0
with open("local-search-july-2017/"+ptype+fNumber) as f:
lines = f.readlines()
#reading the files
for k in range(0, len(lines)):
line = lines[k]
if(str(i)+") - gen" in line):
k=k+3
line = lines[k]
linex = line.split(",")
fitnesses = []
#reading the search space
#1 - 0, 2 - 3, 3 - 0, 4 - 3, 5 - 3, 6 - 1, 7 - 0, 8 - 1
for item in linex:
itemx = item.split("-")
fitnesses.append(float(itemx[1]))
fdata=[]
fdata.append(fNumber)
fdata.append(ptype)
fdata.append(float(itemx[1]))
datafitnesses.append(fdata)
#calculation of good enough fitness
modeF=mode(fitnesses)
maxF=max(fitnesses)
minF=min(fitnesses)
vge=modeF[0]+(maxF-modeF[0])/2
#reading the transition probabilities
if("it("+str(i)+");" in line):
s1=line.split(" ")
mSize=int(s1[1])
P= np.array([]).reshape(0,mSize)
for j in range(mSize):
line = lines[k+j+1]
line=line.rstrip()
row = line.split(" ")
a = np.array([])
for item in row:
itt = float(item)
a = np.append(a, itt)
P = np.vstack([P,a])
lenP=len(P)
rm= []
nvge=[]
allRm=[]
listS=[]
#Find absorbing states and optima
for j in range(lenP):
flag=0
ff = 0
for s in range(lenP):
# if there are no outgoing probabilities, then this is a local/global optimum.
if(P[j,s]>0):
ff = 1
if(j not in listS and s not in listS):
# plateoux of two solutions
if(P[j,s]==1.0 and P[s,j]==1.0):
flag=1
listS.append(j)
# absorbing state
if(P[j,s]==1.0 and j==s):
flag=1
listS.append(j)
for k in range(lenP):
if(k not in listS):
# plateoux of three solutions
if(P[j,s]==1.0 and P[s,k]==1.0 and P[k,j]==1.0):
flag=1
listS.append(j)
# plateoux of four solutions
if(P[j,s]==1.0 and P[s,j]>0 and P[s,k]>0 and (P[s,j]+P[s,k])==1.0 and P[k,s]==1.0):
flag=1
listS.append(j)
if(P[j,s]==1.0 and P[s,j]>0 and P[s,k]>0 and (P[s,j]+P[s,k])==1.0 and P[k,j]==1.0):
flag=1
listS.append(j)
# list that keep track of absorbing states and local/global optima
if(flag==1 or ff==0):
rm.append(j)
allRm.append(j)
if(fitnesses[j]<vge):
nvge.append(j)
allRm.append(j)
keptFitnesses = []
removedFitnesses = []
nvgeFitnesses = []
keep=[]
for j in range(lenP):
if(j in nvge):
nvgeFitnesses.append(fitnesses[j])
if(j not in rm and j not in nvge):
keptFitnesses.append(fitnesses[j])
keep.append(j)
if(j in rm):
removedFitnesses.append(fitnesses[j])
R=np.zeros((len(keep),len(rm)), dtype='float')
#create a vector of 1s for calculating number of visits
mat1=[]
# canonical representation by removing absorbing states and local
for j in range(len(keep)):
mat1.append(1)
for s in range(len(rm)):
R[j,s]=P[keep[j],rm[s]]
#removing
P=np.delete(P, allRm, axis=1)
P=np.delete(P, allRm, axis=0)
sm=0.0
sb=0.0
try:
if(len(P)>0):
iM=np.identity(len(P))
mM=iM-P
# Fundamental matrix
N = inv(mM)
# probability of reaching an absorbing state from any point
M=np.dot(N,R)
# expected number of steps to absorbion from any state
B=np.dot(N,mat1)
colsM = M.shape[1]
nrows=N.shape[0]
# calculating the probability of reaching a global optima
globalC=0
for j in range(colsM):
# if the absorbing state or optimum is a global optimum
if(removedFitnesses[j]==maxF):
globalC=globalC+1
sumTemp=sum(row[j] for row in M)
avgTemp=sumTemp/nrows
sm=sm+avgTemp
sm=sm/globalC
'''
colsN = N.shape[1]
for j in range(colsN):
if(keptFitnesses[j]==max):
tempf=0
for s in range(colsM):
if(M[j,s]>0.0):
tempf=1
if(tempf==0):
sumTemp=sum(row[j] for row in N)
avgTemp=sumTemp/nrows
if(avgTemp>=1.0):
avgTemp=1.0
sm=sm+avgTemp
'''
else:
countO=0
colsR = R.shape[1]
for j in range(colsR):
# if the absorbing state or optimum is a global optimum
if(removedFitnesses[j]==maxF):
countO=countO+1
sm=countO/colsR
nrows=B.shape[0]
globalC=0
for j in range(nrows):
if(removedFitnesses[j]==maxF):
globalC=globalC+1
sb=sb+B[j]
sb=sb/globalC
recD=[]
recD.append(fNumber)
recD.append(ptype)
#probability reaching global optimum
recD.append(sm)
#number of steps
recD.append(sb)
recD.append(globalC)
data.append(recD)
except:
print("error"+fNumber)
# drawing the boxplots
df = pd.DataFrame(data, columns=["N","PType","Probability","Steps","NGlobal"])
df.to_csv("MCresults.csv")
df2 = pd.DataFrame(datafitnesses, columns=["N","PType","Fitness"])
df2.to_csv("MCfitnesses.csv")
| [
6738,
299,
32152,
13,
75,
1292,
70,
1330,
800,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
12972,
21862,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
2603,
29487,
8019,
13,
66,
2070,
355,
269,
2070,
198,
6738,
340,
861,
10141,
1330,
6772,
198,
11748,
2603,
29487,
8019,
198,
6738,
2603,
29487,
8019,
13,
9078,
29487,
1330,
1635,
198,
11748,
47695,
17,
76,
489,
198,
11748,
384,
397,
1211,
355,
3013,
82,
198,
6738,
629,
541,
88,
13,
34242,
1330,
479,
82,
62,
17,
82,
696,
198,
6738,
629,
541,
88,
13,
34242,
1330,
4235,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
16624,
796,
14631,
18,
2430,
19,
2430,
20,
2430,
21,
2430,
22,
2430,
23,
2430,
24,
8973,
198,
47,
4906,
28,
14692,
5907,
69,
2430,
3919,
62,
5907,
69,
8973,
198,
7890,
28,
21737,
198,
7890,
69,
3659,
274,
28,
21737,
198,
1640,
277,
15057,
287,
3696,
25,
198,
220,
220,
220,
34820,
28,
15,
198,
220,
220,
220,
611,
7,
69,
15057,
855,
1,
24,
1,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
34820,
28,
3064,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
34820,
28,
12825,
198,
220,
220,
220,
329,
279,
4906,
287,
350,
4906,
25,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1312,
287,
2837,
7,
2676,
602,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3509,
37,
28,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
351,
1280,
7203,
12001,
12,
12947,
12,
73,
2062,
12,
5539,
30487,
10,
457,
2981,
10,
69,
15057,
8,
355,
277,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3951,
796,
277,
13,
961,
6615,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
25782,
262,
3696,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
479,
287,
2837,
7,
15,
11,
18896,
7,
6615,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
796,
3951,
58,
74,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
2536,
7,
72,
47762,
4943,
532,
2429,
1,
287,
1627,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
479,
28,
74,
10,
18,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
796,
3951,
58,
74,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
87,
796,
1627,
13,
35312,
7,
2430,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
13547,
274,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
25782,
262,
2989,
2272,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
16,
532,
657,
11,
362,
532,
513,
11,
513,
532,
657,
11,
604,
532,
513,
11,
642,
532,
513,
11,
718,
532,
352,
11,
767,
532,
657,
11,
807,
532,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
2378,
287,
1627,
87,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2378,
87,
796,
2378,
13,
35312,
7203,
12,
4943,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
13547,
274,
13,
33295,
7,
22468,
7,
9186,
87,
58,
16,
60,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
277,
7890,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
277,
7890,
13,
33295,
7,
69,
15057,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
277,
7890,
13,
33295,
7,
457,
2981,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
277,
7890,
13,
33295,
7,
22468,
7,
9186,
87,
58,
16,
60,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1366,
69,
3659,
274,
13,
33295,
7,
69,
7890,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
9948,
14902,
286,
922,
1576,
13547,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4235,
37,
28,
14171,
7,
69,
3659,
274,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3509,
37,
28,
9806,
7,
69,
3659,
274,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
949,
37,
28,
1084,
7,
69,
3659,
274,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
410,
469,
28,
14171,
37,
58,
15,
60,
33747,
9806,
37,
12,
14171,
37,
58,
15,
12962,
14,
17,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
25782,
262,
6801,
39522,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7203,
270,
7203,
10,
2536,
7,
72,
47762,
15341,
1,
287,
1627,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
16,
28,
1370,
13,
35312,
7203,
366,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
285,
10699,
28,
600,
7,
82,
16,
58,
16,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
350,
28,
45941,
13,
18747,
7,
21737,
737,
3447,
1758,
7,
15,
11,
76,
10699,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
76,
10699,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
796,
3951,
58,
74,
10,
73,
10,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
28,
1370,
13,
81,
36311,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5752,
796,
1627,
13,
35312,
7203,
366,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
257,
796,
45941,
13,
18747,
26933,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
2378,
287,
5752,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
340,
83,
796,
12178,
7,
9186,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
257,
796,
45941,
13,
33295,
7,
64,
11,
340,
83,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
350,
796,
45941,
13,
85,
25558,
26933,
47,
11,
64,
12962,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
18896,
47,
28,
11925,
7,
47,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
42721,
28,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
85,
469,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
477,
49,
76,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1351,
50,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
16742,
34418,
2585,
290,
6436,
64,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
11925,
47,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6056,
28,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
31246,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
264,
287,
2837,
7,
11925,
47,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
611,
612,
389,
645,
28181,
39522,
11,
788,
428,
318,
257,
1957,
14,
20541,
39475,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
47,
58,
73,
11,
82,
60,
29,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
31246,
796,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
73,
407,
287,
1351,
50,
290,
264,
407,
287,
1351,
50,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
7480,
22193,
286,
734,
8136,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
47,
58,
73,
11,
82,
60,
855,
16,
13,
15,
290,
350,
58,
82,
11,
73,
60,
855,
16,
13,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6056,
28,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1351,
50,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
34418,
1181,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
47,
58,
73,
11,
82,
60,
855,
16,
13,
15,
290,
474,
855,
82,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6056,
28,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1351,
50,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
479,
287,
2837,
7,
11925,
47,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
74,
407,
287,
1351,
50,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
7480,
22193,
286,
1115,
8136,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
47,
58,
73,
11,
82,
60,
855,
16,
13,
15,
290,
350,
58,
82,
11,
74,
60,
855,
16,
13,
15,
290,
350,
58,
74,
11,
73,
60,
855,
16,
13,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6056,
28,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1351,
50,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
7480,
22193,
286,
1440,
8136,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
47,
58,
73,
11,
82,
60,
855,
16,
13,
15,
290,
350,
58,
82,
11,
73,
60,
29,
15,
290,
350,
58,
82,
11,
74,
60,
29,
15,
290,
357,
47,
58,
82,
11,
73,
48688,
47,
58,
82,
11,
74,
12962,
855,
16,
13,
15,
290,
350,
58,
74,
11,
82,
60,
855,
16,
13,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6056,
28,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1351,
50,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
47,
58,
73,
11,
82,
60,
855,
16,
13,
15,
290,
350,
58,
82,
11,
73,
60,
29,
15,
290,
350,
58,
82,
11,
74,
60,
29,
15,
290,
357,
47,
58,
82,
11,
73,
48688,
47,
58,
82,
11,
74,
12962,
855,
16,
13,
15,
290,
350,
58,
74,
11,
73,
60,
855,
16,
13,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6056,
28,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1351,
50,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
1351,
326,
1394,
2610,
286,
34418,
2585,
290,
1957,
14,
20541,
6436,
64,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
32109,
855,
16,
393,
31246,
855,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
42721,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
477,
49,
76,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
69,
3659,
274,
58,
73,
60,
27,
85,
469,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
85,
469,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
477,
49,
76,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4030,
37,
3659,
274,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4615,
37,
3659,
274,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
85,
469,
37,
3659,
274,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1394,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
11925,
47,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
73,
287,
299,
85,
469,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
85,
469,
37,
3659,
274,
13,
33295,
7,
69,
3659,
274,
58,
73,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
73,
407,
287,
42721,
290,
474,
407,
287,
299,
85,
469,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4030,
37,
3659,
274,
13,
33295,
7,
69,
3659,
274,
58,
73,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1394,
13,
33295,
7,
73,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
73,
287,
42721,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4615,
37,
3659,
274,
13,
33295,
7,
69,
3659,
274,
58,
73,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
371,
28,
37659,
13,
9107,
418,
19510,
11925,
7,
14894,
828,
11925,
7,
26224,
36911,
288,
4906,
11639,
22468,
11537,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
17953,
257,
15879,
286,
352,
82,
329,
26019,
1271,
286,
11864,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2603,
16,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
40091,
10552,
416,
10829,
34418,
2585,
290,
1957,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
11925,
7,
14894,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2603,
16,
13,
33295,
7,
16,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
264,
287,
2837,
7,
11925,
7,
26224,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
371,
58,
73,
11,
82,
22241,
47,
58,
14894,
58,
73,
4357,
26224,
58,
82,
11907,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
2787,
5165,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
350,
28,
37659,
13,
33678,
7,
47,
11,
477,
49,
76,
11,
16488,
28,
16,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
350,
28,
37659,
13,
33678,
7,
47,
11,
477,
49,
76,
11,
16488,
28,
15,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
895,
28,
15,
13,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
65,
28,
15,
13,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1949,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
11925,
7,
47,
8,
29,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
44,
28,
37659,
13,
738,
414,
7,
11925,
7,
47,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
47676,
28,
72,
44,
12,
47,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
49983,
17593,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
399,
796,
800,
7,
76,
44,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
12867,
286,
8978,
281,
34418,
1181,
422,
597,
966,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
337,
28,
37659,
13,
26518,
7,
45,
11,
49,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
2938,
1271,
286,
4831,
284,
17565,
295,
422,
597,
1181,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
347,
28,
37659,
13,
26518,
7,
45,
11,
6759,
16,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
951,
82,
44,
796,
337,
13,
43358,
58,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
8516,
28,
45,
13,
43358,
58,
15,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
26019,
262,
12867,
286,
8978,
257,
3298,
6436,
64,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3298,
34,
28,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
4033,
82,
44,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
611,
262,
34418,
1181,
393,
39475,
318,
257,
3298,
39475,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
2787,
2668,
37,
3659,
274,
58,
73,
60,
855,
9806,
37,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3298,
34,
28,
20541,
34,
10,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2160,
30782,
28,
16345,
7,
808,
58,
73,
60,
329,
5752,
287,
337,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
42781,
30782,
28,
16345,
30782,
14,
77,
8516,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
895,
28,
5796,
10,
615,
70,
30782,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
895,
28,
5796,
14,
20541,
34,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
7061,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
951,
82,
45,
796,
399,
13,
43358,
58,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
4033,
82,
45,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
45089,
37,
3659,
274,
58,
73,
60,
855,
9806,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
20218,
69,
28,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
264,
287,
2837,
7,
4033,
82,
44,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
44,
58,
73,
11,
82,
60,
29,
15,
13,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
20218,
69,
28,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
29510,
69,
855,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2160,
30782,
28,
16345,
7,
808,
58,
73,
60,
329,
5752,
287,
399,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
42781,
30782,
28,
16345,
30782,
14,
77,
8516,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
615,
70,
30782,
29,
28,
16,
13,
15,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
42781,
30782,
28,
16,
13,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
895,
28,
5796,
10,
615,
70,
30782,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
7061,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
954,
46,
28,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
951,
82,
49,
796,
371,
13,
43358,
58,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
4033,
82,
49,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
611,
262,
34418,
1181,
393,
39475,
318,
257,
3298,
39475,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
2787,
2668,
37,
3659,
274,
58,
73,
60,
855,
9806,
37,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
954,
46,
28,
9127,
46,
10,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
895,
28,
9127,
46,
14,
4033,
82,
49,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
8516,
28,
33,
13,
43358,
58,
15,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3298,
34,
28,
15,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
77,
8516,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
7,
2787,
2668,
37,
3659,
274,
58,
73,
60,
855,
9806,
37,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3298,
34,
28,
20541,
34,
10,
16,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
65,
28,
36299,
10,
33,
58,
73,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
65,
28,
36299,
14,
20541,
34,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
664,
35,
28,
21737,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
664,
35,
13,
33295,
7,
69,
15057,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
664,
35,
13,
33295,
7,
457,
2981,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
1676,
65,
1799,
8978,
3298,
39475,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
664,
35,
13,
33295,
7,
5796,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
17618,
286,
4831,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
664,
35,
13,
33295,
7,
36299,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
664,
35,
13,
33295,
7,
20541,
34,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1366,
13,
33295,
7,
8344,
35,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2845,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
7203,
18224,
1,
10,
69,
15057,
8,
628,
220,
220,
220,
1303,
8263,
262,
3091,
489,
1747,
198,
220,
220,
220,
220,
198,
7568,
796,
279,
67,
13,
6601,
19778,
7,
7890,
11,
15180,
28,
14692,
45,
2430,
47,
6030,
2430,
2964,
65,
1799,
2430,
8600,
82,
2430,
45,
22289,
8973,
8,
198,
7568,
13,
1462,
62,
40664,
7203,
9655,
43420,
13,
40664,
4943,
198,
7568,
17,
796,
279,
67,
13,
6601,
19778,
7,
7890,
69,
3659,
274,
11,
15180,
28,
14692,
45,
2430,
47,
6030,
2430,
37,
3659,
8973,
8,
198,
7568,
17,
13,
1462,
62,
40664,
7203,
9655,
69,
3659,
274,
13,
40664,
4943,
198
] | 1.36962 | 7,551 |
#!/usr/bin/env python3
import argparse
import cv2
import depthai as dai
import socket
from pipelines import goal_edge_depth_detection
import logging
from common import target_finder
from common.mjpeg_stream import MjpegStream
from networktables.util import NetworkTables
from common.utils import FPSHandler
parser = argparse.ArgumentParser()
parser.add_argument('-d', dest='debug', action="store_true", default=False, help='Start in Debug Mode')
args = parser.parse_args()
log = logging.getLogger(__name__)
if __name__ == '__main__':
log.info("Starting goal-depth-detection-host")
if args.debug:
MainDebug().run()
else:
Main().run()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
11748,
1822,
29572,
198,
11748,
269,
85,
17,
198,
11748,
6795,
1872,
355,
288,
1872,
198,
11748,
17802,
198,
198,
6738,
31108,
1330,
3061,
62,
14907,
62,
18053,
62,
15255,
3213,
198,
11748,
18931,
198,
6738,
2219,
1330,
2496,
62,
22805,
198,
198,
6738,
2219,
13,
76,
73,
22071,
62,
5532,
1330,
337,
73,
22071,
12124,
198,
6738,
3127,
83,
2977,
13,
22602,
1330,
7311,
51,
2977,
198,
6738,
2219,
13,
26791,
1330,
22082,
25060,
198,
198,
48610,
796,
1822,
29572,
13,
28100,
1713,
46677,
3419,
198,
48610,
13,
2860,
62,
49140,
10786,
12,
67,
3256,
2244,
11639,
24442,
3256,
2223,
2625,
8095,
62,
7942,
1600,
4277,
28,
25101,
11,
1037,
11639,
10434,
287,
31687,
10363,
11537,
198,
22046,
796,
30751,
13,
29572,
62,
22046,
3419,
198,
198,
6404,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
628,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
2604,
13,
10951,
7203,
22851,
3061,
12,
18053,
12,
15255,
3213,
12,
4774,
4943,
198,
220,
220,
220,
611,
26498,
13,
24442,
25,
198,
220,
220,
220,
220,
220,
220,
220,
8774,
27509,
22446,
5143,
3419,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
8774,
22446,
5143,
3419,
198
] | 3.004484 | 223 |
from Crypto.Cipher import AES
import base64, hashlib, json
from app.services import payment
from app.models import Vault
from app.utils import further_processing, standardize_response
| [
6738,
36579,
13,
34,
10803,
1330,
34329,
198,
11748,
2779,
2414,
11,
12234,
8019,
11,
33918,
198,
6738,
598,
13,
30416,
1330,
6074,
198,
6738,
598,
13,
27530,
1330,
23450,
198,
6738,
598,
13,
26791,
1330,
2252,
62,
36948,
11,
3210,
1096,
62,
26209,
198
] | 4.088889 | 45 |
"""
This module contains unit tests, for the most important functions of
ruspy.estimation.estimation_cost_parameters. The values to compare the results with
are saved in resources/estimation_test. The setting of the test is documented in the
inputs section in test module.
"""
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal
from ruspy.config import TEST_RESOURCES_DIR
from ruspy.estimation.estimation_transitions import create_transition_matrix
from ruspy.model_code.choice_probabilities import choice_prob_gumbel
from ruspy.model_code.cost_functions import calc_obs_costs
from ruspy.model_code.cost_functions import lin_cost
from ruspy.model_code.fix_point_alg import calc_fixp
from ruspy.test.ranodm_init import random_init
@pytest.fixture
@pytest.fixture
| [
37811,
198,
1212,
8265,
4909,
4326,
5254,
11,
329,
262,
749,
1593,
5499,
286,
198,
14932,
9078,
13,
395,
18991,
13,
395,
18991,
62,
15805,
62,
17143,
7307,
13,
383,
3815,
284,
8996,
262,
2482,
351,
198,
533,
7448,
287,
4133,
14,
395,
18991,
62,
9288,
13,
383,
4634,
286,
262,
1332,
318,
12395,
287,
262,
198,
15414,
82,
2665,
287,
1332,
8265,
13,
198,
37811,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
12972,
9288,
198,
6738,
299,
32152,
13,
33407,
1330,
6818,
62,
18747,
62,
28177,
62,
40496,
198,
198,
6738,
374,
385,
9078,
13,
11250,
1330,
43001,
62,
19535,
2606,
7397,
1546,
62,
34720,
198,
6738,
374,
385,
9078,
13,
395,
18991,
13,
395,
18991,
62,
7645,
1756,
1330,
2251,
62,
7645,
653,
62,
6759,
8609,
198,
6738,
374,
385,
9078,
13,
19849,
62,
8189,
13,
25541,
62,
1676,
65,
5738,
1330,
3572,
62,
1676,
65,
62,
70,
2178,
417,
198,
6738,
374,
385,
9078,
13,
19849,
62,
8189,
13,
15805,
62,
12543,
2733,
1330,
42302,
62,
8158,
62,
15805,
82,
198,
6738,
374,
385,
9078,
13,
19849,
62,
8189,
13,
15805,
62,
12543,
2733,
1330,
9493,
62,
15805,
198,
6738,
374,
385,
9078,
13,
19849,
62,
8189,
13,
13049,
62,
4122,
62,
14016,
1330,
42302,
62,
13049,
79,
198,
6738,
374,
385,
9078,
13,
9288,
13,
2596,
375,
76,
62,
15003,
1330,
4738,
62,
15003,
628,
198,
31,
9078,
9288,
13,
69,
9602,
628,
198,
31,
9078,
9288,
13,
69,
9602,
628,
628,
628
] | 3.25 | 248 |
from itertools import product
import numpy as np
from typing import Tuple
from IMLearn.learners.classifiers import DecisionStump
from IMLearn.metalearners import AdaBoost
from utils import *
import plotly.graph_objects as go
from plotly.subplots import make_subplots
pio.renderers.default = "browser"
def generate_data(n: int, noise_ratio: float) -> Tuple[np.ndarray, np.ndarray]:
"""
Generate a dataset in R^2 of specified size
Parameters
----------
n: int
Number of samples to generate
noise_ratio: float
Ratio of labels to invert
Returns
-------
X: np.ndarray of shape (n_samples,2)
Design matrix of samples
y: np.ndarray of shape (n_samples,)
Labels of samples
"""
'''
generate samples X with shape: (num_samples, 2) and labels y with shape (num_samples).
num_samples: the number of samples to generate
noise_ratio: invert the label for this ratio of the samples
'''
X, y = np.random.rand(n, 2) * 2 - 1, np.ones(n)
y[np.sum(X ** 2, axis=1) < 0.5 ** 2] = -1
y[np.random.choice(n, int(noise_ratio * n))] *= -1
return X, y
def add_partial_decision_boundary(fig, X, y, t, learner, lims, row=None, col=None):
"""
Plot the decision boundary of ensemble with t estimators
"""
# symbols = np.array(["circle", "x"])[((y + 1) / 2).astype(int)]
predict = lambda X_: learner.partial_predict(X_, t)
accuracy = 1 - learner.partial_loss(X, y, t)
fig.add_trace(decision_surface(predict, lims[0], lims[1], showscale=False),
row=row, col=col)
class0 = y == -1
fig.add_trace(go.Scatter(x=X[class0][:, 0], y=X[class0][:, 1], mode="markers",
name="Class -1", legendgroup='Class -1', showlegend=False,
marker=dict(color="red", symbol="circle", line=dict(color="black", width=1))),
row=row, col=col)
class1 = y == 1
fig.add_trace(go.Scatter(x=X[class1][:, 0], y=X[class1][:, 1], mode="markers",
name="Class 1", legendgroup='Class 1', showlegend=False,
marker=dict(color="blue", symbol="x", line=dict(color="black", width=1))),
row=row, col=col)
fig.update_xaxes(title_text="x", row=row, col=col)
fig.update_yaxes(title_text="y", row=row, col=col)
if row is None:
fig.update_layout(title_text=f"Decision boundary of ensemble with {t} estimators, Accuracy: {accuracy:.3f}")
else:
fig.layout.annotations[2*(row-1)+col-1].update(text=f"Using {t} estimators, Accuracy: {accuracy: .2f}")
return fig
if __name__ == '__main__':
np.random.seed(0)
fit_and_evaluate_adaboost(0)
fit_and_evaluate_adaboost(0.4)
| [
6738,
340,
861,
10141,
1330,
1720,
198,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
19720,
1330,
309,
29291,
198,
6738,
314,
5805,
451,
77,
13,
35720,
364,
13,
4871,
13350,
1330,
26423,
1273,
931,
198,
6738,
314,
5805,
451,
77,
13,
28469,
451,
2741,
1330,
47395,
45686,
198,
6738,
3384,
4487,
1330,
1635,
198,
11748,
7110,
306,
13,
34960,
62,
48205,
355,
467,
198,
6738,
7110,
306,
13,
7266,
489,
1747,
1330,
787,
62,
7266,
489,
1747,
198,
198,
79,
952,
13,
10920,
19288,
13,
12286,
796,
366,
40259,
1,
628,
198,
4299,
7716,
62,
7890,
7,
77,
25,
493,
11,
7838,
62,
10366,
952,
25,
12178,
8,
4613,
309,
29291,
58,
37659,
13,
358,
18747,
11,
45941,
13,
358,
18747,
5974,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
2980,
378,
257,
27039,
287,
371,
61,
17,
286,
7368,
2546,
628,
220,
220,
220,
40117,
198,
220,
220,
220,
24200,
438,
198,
220,
220,
220,
299,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
7913,
286,
8405,
284,
7716,
628,
220,
220,
220,
7838,
62,
10366,
952,
25,
12178,
198,
220,
220,
220,
220,
220,
220,
220,
33956,
286,
14722,
284,
287,
1851,
628,
220,
220,
220,
16409,
198,
220,
220,
220,
35656,
198,
220,
220,
220,
1395,
25,
45941,
13,
358,
18747,
286,
5485,
357,
77,
62,
82,
12629,
11,
17,
8,
198,
220,
220,
220,
220,
220,
220,
220,
8495,
17593,
286,
8405,
628,
220,
220,
220,
331,
25,
45941,
13,
358,
18747,
286,
5485,
357,
77,
62,
82,
12629,
35751,
198,
220,
220,
220,
220,
220,
220,
220,
3498,
1424,
286,
8405,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
705,
7061,
198,
220,
220,
220,
7716,
8405,
1395,
351,
5485,
25,
357,
22510,
62,
82,
12629,
11,
362,
8,
290,
14722,
331,
351,
5485,
357,
22510,
62,
82,
12629,
737,
198,
220,
220,
220,
997,
62,
82,
12629,
25,
262,
1271,
286,
8405,
284,
7716,
198,
220,
220,
220,
7838,
62,
10366,
952,
25,
287,
1851,
262,
6167,
329,
428,
8064,
286,
262,
8405,
198,
220,
220,
220,
705,
7061,
198,
220,
220,
220,
1395,
11,
331,
796,
45941,
13,
25120,
13,
25192,
7,
77,
11,
362,
8,
1635,
362,
532,
352,
11,
45941,
13,
1952,
7,
77,
8,
198,
220,
220,
220,
331,
58,
37659,
13,
16345,
7,
55,
12429,
362,
11,
16488,
28,
16,
8,
1279,
657,
13,
20,
12429,
362,
60,
796,
532,
16,
198,
220,
220,
220,
331,
58,
37659,
13,
25120,
13,
25541,
7,
77,
11,
493,
7,
3919,
786,
62,
10366,
952,
1635,
299,
4008,
60,
1635,
28,
532,
16,
198,
220,
220,
220,
1441,
1395,
11,
331,
628,
198,
4299,
751,
62,
47172,
62,
12501,
1166,
62,
7784,
560,
7,
5647,
11,
1395,
11,
331,
11,
256,
11,
22454,
1008,
11,
1761,
82,
11,
5752,
28,
14202,
11,
951,
28,
14202,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
28114,
262,
2551,
18645,
286,
34549,
351,
256,
3959,
2024,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1303,
14354,
796,
45941,
13,
18747,
7,
14692,
45597,
1600,
366,
87,
8973,
38381,
19510,
88,
1343,
352,
8,
1220,
362,
737,
459,
2981,
7,
600,
15437,
198,
220,
220,
220,
4331,
796,
37456,
1395,
62,
25,
22454,
1008,
13,
47172,
62,
79,
17407,
7,
55,
62,
11,
256,
8,
198,
220,
220,
220,
9922,
796,
352,
532,
22454,
1008,
13,
47172,
62,
22462,
7,
55,
11,
331,
11,
256,
8,
628,
220,
220,
220,
2336,
13,
2860,
62,
40546,
7,
12501,
1166,
62,
42029,
7,
79,
17407,
11,
1761,
82,
58,
15,
4357,
1761,
82,
58,
16,
4357,
905,
9888,
28,
25101,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5752,
28,
808,
11,
951,
28,
4033,
8,
628,
220,
220,
220,
1398,
15,
796,
331,
6624,
532,
16,
198,
220,
220,
220,
2336,
13,
2860,
62,
40546,
7,
2188,
13,
3351,
1436,
7,
87,
28,
55,
58,
4871,
15,
7131,
45299,
657,
4357,
331,
28,
55,
58,
4871,
15,
7131,
45299,
352,
4357,
4235,
2625,
4102,
364,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1438,
2625,
9487,
532,
16,
1600,
8177,
8094,
11639,
9487,
532,
16,
3256,
905,
1455,
437,
28,
25101,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
18364,
28,
11600,
7,
8043,
2625,
445,
1600,
6194,
2625,
45597,
1600,
1627,
28,
11600,
7,
8043,
2625,
13424,
1600,
9647,
28,
16,
4008,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5752,
28,
808,
11,
951,
28,
4033,
8,
628,
220,
220,
220,
1398,
16,
796,
331,
6624,
352,
198,
220,
220,
220,
2336,
13,
2860,
62,
40546,
7,
2188,
13,
3351,
1436,
7,
87,
28,
55,
58,
4871,
16,
7131,
45299,
657,
4357,
331,
28,
55,
58,
4871,
16,
7131,
45299,
352,
4357,
4235,
2625,
4102,
364,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1438,
2625,
9487,
352,
1600,
8177,
8094,
11639,
9487,
352,
3256,
905,
1455,
437,
28,
25101,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
18364,
28,
11600,
7,
8043,
2625,
17585,
1600,
6194,
2625,
87,
1600,
1627,
28,
11600,
7,
8043,
2625,
13424,
1600,
9647,
28,
16,
4008,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5752,
28,
808,
11,
951,
28,
4033,
8,
628,
220,
220,
220,
2336,
13,
19119,
62,
87,
897,
274,
7,
7839,
62,
5239,
2625,
87,
1600,
5752,
28,
808,
11,
951,
28,
4033,
8,
198,
220,
220,
220,
2336,
13,
19119,
62,
88,
897,
274,
7,
7839,
62,
5239,
2625,
88,
1600,
5752,
28,
808,
11,
951,
28,
4033,
8,
198,
220,
220,
220,
611,
5752,
318,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
2336,
13,
19119,
62,
39786,
7,
7839,
62,
5239,
28,
69,
1,
10707,
1166,
18645,
286,
34549,
351,
1391,
83,
92,
3959,
2024,
11,
33222,
25,
1391,
4134,
23843,
25,
13,
18,
69,
92,
4943,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
2336,
13,
39786,
13,
34574,
602,
58,
17,
9,
7,
808,
12,
16,
47762,
4033,
12,
16,
4083,
19119,
7,
5239,
28,
69,
1,
12814,
1391,
83,
92,
3959,
2024,
11,
33222,
25,
1391,
4134,
23843,
25,
764,
17,
69,
92,
4943,
628,
220,
220,
220,
1441,
2336,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
45941,
13,
25120,
13,
28826,
7,
15,
8,
198,
220,
220,
220,
4197,
62,
392,
62,
49786,
62,
324,
34748,
455,
7,
15,
8,
198,
220,
220,
220,
4197,
62,
392,
62,
49786,
62,
324,
34748,
455,
7,
15,
13,
19,
8,
198
] | 2.275041 | 1,218 |
import torch
import torch.nn.functional as F
import torch.optim as optim
from model import Model
from video_dataset import Dataset
from tensorboard_logger import log_value
import utils
import numpy as np
from torch.autograd import Variable
from classificationMAP import getClassificationMAP as cmAP
from detectionMAP import getDetectionMAP as dmAP
import scipy.io as sio
# torch.set_default_tensor_type('torch.FloatTensor')
| [
11748,
28034,
201,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
201,
198,
11748,
28034,
13,
40085,
355,
6436,
201,
198,
6738,
2746,
1330,
9104,
201,
198,
6738,
2008,
62,
19608,
292,
316,
1330,
16092,
292,
316,
201,
198,
6738,
11192,
273,
3526,
62,
6404,
1362,
1330,
2604,
62,
8367,
201,
198,
11748,
3384,
4487,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
35748,
201,
198,
6738,
17923,
33767,
1330,
651,
9487,
2649,
33767,
355,
12067,
2969,
201,
198,
6738,
13326,
33767,
1330,
651,
11242,
3213,
33767,
355,
288,
76,
2969,
201,
198,
11748,
629,
541,
88,
13,
952,
355,
264,
952,
201,
198,
201,
198,
2,
28034,
13,
2617,
62,
12286,
62,
83,
22854,
62,
4906,
10786,
13165,
354,
13,
43879,
51,
22854,
11537,
201,
198,
201,
198
] | 3.195652 | 138 |
'''
BSD 3-Clause License
Copyright (c) 2019, Donald N. Bockoven III
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
from __future__ import division
import math as m
import Tkinter as tk
import tkMessageBox
import ttk
import tkFont
import tkFileDialog
import bolt_group_istantaneous_center as bolt_ic
if __name__ == '__main__':
main()
| [
7061,
6,
201,
198,
21800,
513,
12,
2601,
682,
13789,
201,
198,
15269,
357,
66,
8,
13130,
11,
3759,
399,
13,
347,
735,
16206,
6711,
201,
198,
3237,
2489,
10395,
13,
201,
198,
7738,
396,
3890,
290,
779,
287,
2723,
290,
13934,
5107,
11,
351,
393,
1231,
201,
198,
4666,
2649,
11,
389,
10431,
2810,
326,
262,
1708,
3403,
389,
1138,
25,
201,
198,
9,
2297,
396,
2455,
507,
286,
2723,
2438,
1276,
12377,
262,
2029,
6634,
4003,
11,
428,
201,
198,
220,
1351,
286,
3403,
290,
262,
1708,
37592,
13,
201,
198,
9,
2297,
396,
2455,
507,
287,
13934,
1296,
1276,
22919,
262,
2029,
6634,
4003,
11,
201,
198,
220,
428,
1351,
286,
3403,
290,
262,
1708,
37592,
287,
262,
10314,
201,
198,
220,
290,
14,
273,
584,
5696,
2810,
351,
262,
6082,
13,
201,
198,
9,
16126,
262,
1438,
286,
262,
6634,
15762,
4249,
262,
3891,
286,
663,
201,
198,
220,
20420,
743,
307,
973,
284,
11438,
393,
7719,
3186,
10944,
422,
201,
198,
220,
428,
3788,
1231,
2176,
3161,
3194,
7170,
13,
201,
198,
43559,
47466,
3180,
36592,
2389,
1961,
11050,
3336,
27975,
38162,
9947,
367,
15173,
4877,
5357,
27342,
9865,
3843,
20673,
366,
1921,
3180,
1,
201,
198,
6981,
15529,
7788,
32761,
6375,
8959,
49094,
34764,
11015,
11,
47783,
2751,
11,
21728,
5626,
40880,
5390,
11,
3336,
201,
198,
3955,
49094,
34764,
11015,
3963,
34482,
3398,
1565,
5603,
25382,
5357,
376,
46144,
7473,
317,
16652,
2149,
37232,
33079,
48933,
15986,
201,
198,
26288,
48778,
1961,
13,
3268,
8005,
49261,
50163,
3336,
27975,
38162,
9947,
49707,
14418,
6375,
27342,
9865,
3843,
20673,
9348,
43031,
19146,
201,
198,
13775,
15529,
42242,
11,
3268,
17931,
23988,
11,
19387,
25256,
1847,
11,
38846,
11,
7788,
3620,
6489,
13153,
11,
6375,
7102,
5188,
10917,
3525,
12576,
201,
198,
35,
2390,
25552,
357,
1268,
39149,
2751,
11,
21728,
5626,
40880,
5390,
11,
41755,
11335,
10979,
3963,
28932,
2257,
2043,
37780,
21090,
50,
6375,
201,
198,
35009,
53,
34444,
26,
406,
18420,
3963,
23210,
11,
42865,
11,
6375,
4810,
19238,
29722,
26,
6375,
43949,
44180,
23255,
49,
8577,
24131,
8,
29630,
36,
5959,
201,
198,
8141,
2937,
1961,
5357,
6177,
15529,
3336,
15513,
3963,
43031,
25382,
11,
7655,
2767,
16879,
3268,
27342,
10659,
11,
19269,
18379,
43031,
25382,
11,
201,
198,
1581,
309,
9863,
357,
1268,
39149,
2751,
399,
7156,
43,
3528,
18310,
6375,
25401,
54,
24352,
8,
5923,
1797,
2751,
3268,
15529,
34882,
16289,
3963,
3336,
23210,
201,
198,
19238,
12680,
47466,
11,
45886,
16876,
5984,
29817,
1961,
3963,
3336,
28069,
11584,
25382,
3963,
13558,
3398,
29506,
11879,
13,
201,
198,
7061,
6,
201,
198,
201,
198,
6738,
11593,
37443,
834,
1330,
7297,
201,
198,
11748,
10688,
355,
285,
201,
198,
11748,
309,
74,
3849,
355,
256,
74,
201,
198,
11748,
256,
74,
12837,
14253,
201,
198,
11748,
256,
30488,
201,
198,
11748,
256,
74,
23252,
201,
198,
11748,
256,
74,
8979,
44204,
201,
198,
11748,
18100,
62,
8094,
62,
10167,
11655,
62,
16159,
355,
18100,
62,
291,
201,
198,
201,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
201,
198,
220,
220,
220,
1388,
3419,
201,
198
] | 3.439306 | 519 |
#-----------------------------------------------------------------------------
# This file is part of 'SLAC Firmware Standard Library'.
# It is subject to the license terms in the LICENSE.txt file found in the
# top-level directory of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of 'SLAC Firmware Standard Library', including this file,
# may be copied, modified, propagated, or distributed except according to
# the terms contained in the LICENSE.txt file.
#-----------------------------------------------------------------------------
import pyrogue as pr
import surf.devices.silabs as silabs
import csv
import click
import fnmatch
| [
2,
10097,
32501,
198,
2,
770,
2393,
318,
636,
286,
705,
8634,
2246,
31623,
1574,
8997,
10074,
4458,
198,
2,
632,
318,
2426,
284,
262,
5964,
2846,
287,
262,
38559,
24290,
13,
14116,
2393,
1043,
287,
262,
198,
2,
1353,
12,
5715,
8619,
286,
428,
6082,
290,
379,
25,
198,
2,
220,
220,
220,
3740,
1378,
10414,
23079,
13,
6649,
330,
13,
14192,
3841,
13,
15532,
14,
13812,
14,
381,
533,
70,
14,
43,
2149,
24290,
13,
6494,
13,
198,
2,
1400,
636,
286,
705,
8634,
2246,
31623,
1574,
8997,
10074,
3256,
1390,
428,
2393,
11,
198,
2,
743,
307,
18984,
11,
9518,
11,
8928,
515,
11,
393,
9387,
2845,
1864,
284,
198,
2,
262,
2846,
7763,
287,
262,
38559,
24290,
13,
14116,
2393,
13,
198,
2,
10097,
32501,
198,
198,
11748,
12972,
3828,
518,
355,
778,
198,
11748,
9053,
13,
42034,
13,
18217,
8937,
355,
3313,
8937,
198,
11748,
269,
21370,
198,
11748,
3904,
198,
11748,
24714,
15699,
198
] | 4.3625 | 160 |
# def hypotenuse(x, y):
# return 0.0
#
# print(hypotenuse(3, 4))
#
# def hypotenuse(x, y):
# square_x = x**2
# square_y = y**2
# print('square_x is', square_x)
# print('square_y is', square_y)
# return 0.0
#
# print(hypotenuse(3, 4))
#
# def hypotenuse(x, y):
# from math import sqrt
# square_x = x**2
# square_y = y**2
# h_square = square_x + square_y
# print('hypotenuse square is', h_square)
# result = sqrt(h_square)
# return result
#
# print(hypotenuse(3, 4))
print(hypotenuse(3, 4))
| [
2,
825,
8813,
268,
1904,
7,
87,
11,
331,
2599,
198,
2,
220,
220,
220,
220,
1441,
657,
13,
15,
198,
2,
198,
2,
3601,
7,
36362,
313,
268,
1904,
7,
18,
11,
604,
4008,
198,
2,
198,
2,
825,
8813,
268,
1904,
7,
87,
11,
331,
2599,
198,
2,
220,
220,
220,
220,
6616,
62,
87,
796,
2124,
1174,
17,
198,
2,
220,
220,
220,
220,
6616,
62,
88,
796,
331,
1174,
17,
198,
2,
220,
220,
220,
220,
3601,
10786,
23415,
62,
87,
318,
3256,
6616,
62,
87,
8,
198,
2,
220,
220,
220,
220,
3601,
10786,
23415,
62,
88,
318,
3256,
6616,
62,
88,
8,
198,
2,
220,
220,
220,
220,
1441,
657,
13,
15,
198,
2,
198,
2,
3601,
7,
36362,
313,
268,
1904,
7,
18,
11,
604,
4008,
198,
2,
198,
2,
825,
8813,
268,
1904,
7,
87,
11,
331,
2599,
198,
2,
220,
220,
220,
220,
422,
10688,
1330,
19862,
17034,
198,
2,
220,
220,
220,
220,
6616,
62,
87,
796,
2124,
1174,
17,
198,
2,
220,
220,
220,
220,
6616,
62,
88,
796,
331,
1174,
17,
198,
2,
220,
220,
220,
220,
289,
62,
23415,
796,
6616,
62,
87,
1343,
6616,
62,
88,
198,
2,
220,
220,
220,
220,
3601,
10786,
36362,
313,
268,
1904,
6616,
318,
3256,
289,
62,
23415,
8,
198,
2,
220,
220,
220,
220,
1255,
796,
19862,
17034,
7,
71,
62,
23415,
8,
198,
2,
220,
220,
220,
220,
1441,
1255,
198,
2,
198,
2,
3601,
7,
36362,
313,
268,
1904,
7,
18,
11,
604,
4008,
198,
198,
4798,
7,
36362,
313,
268,
1904,
7,
18,
11,
604,
4008,
198
] | 2 | 271 |
#!/usr/bin/env python
# Plots stargazers of repositories.
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from sklearn.neighbors import KernelDensity
# Based on: https://jakevdp.github.io/blog/2013/12/01/kernel-density-estimation/
def kde_sklearn(x, x_grid, bandwidth=0.2, **kwargs):
"""Kernel Density Estimation with Scikit-learn"""
kde_skl = KernelDensity(bandwidth=bandwidth, **kwargs)
kde_skl.fit(x[:, np.newaxis])
# score_samples() returns the log-likelihood of the samples
log_pdf = kde_skl.score_samples(x_grid[:, np.newaxis])
return np.exp(log_pdf)
# read CSV with base image count:
df = pd.read_csv('./data/stargazers.csv').sort_values('stargazers', ascending=True)
plot_data = [df['stargazers']]
grid = np.linspace(1, 40000, 5000)
fig, ax = plt.subplots()
for data in plot_data:
ax.plot(grid, kde_sklearn(data, grid, bandwidth=50), alpha=0.8)
ax.legend(labels=['Overall', 'Top 1000', 'Top 100'])
ax.legend(loc='upper left')
ax.set_xlabel('Project stargazers')
# ax.set_yscale('log')
# ax.set_ylim(-0.5, 5)
plt.show() | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
1345,
1747,
336,
853,
1031,
364,
286,
38072,
13,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
1341,
35720,
13,
710,
394,
32289,
1330,
32169,
35,
6377,
198,
198,
2,
13403,
319,
25,
3740,
1378,
73,
539,
20306,
79,
13,
12567,
13,
952,
14,
14036,
14,
6390,
14,
1065,
14,
486,
14,
33885,
12,
43337,
12,
395,
18991,
14,
198,
4299,
479,
2934,
62,
8135,
35720,
7,
87,
11,
2124,
62,
25928,
11,
19484,
28,
15,
13,
17,
11,
12429,
46265,
22046,
2599,
198,
220,
37227,
42,
7948,
360,
6377,
10062,
18991,
351,
10286,
15813,
12,
35720,
37811,
198,
220,
479,
2934,
62,
8135,
75,
796,
32169,
35,
6377,
7,
3903,
10394,
28,
3903,
10394,
11,
12429,
46265,
22046,
8,
198,
220,
479,
2934,
62,
8135,
75,
13,
11147,
7,
87,
58,
45299,
45941,
13,
3605,
22704,
12962,
198,
220,
1303,
4776,
62,
82,
12629,
3419,
5860,
262,
2604,
12,
2339,
11935,
286,
262,
8405,
198,
220,
2604,
62,
12315,
796,
479,
2934,
62,
8135,
75,
13,
26675,
62,
82,
12629,
7,
87,
62,
25928,
58,
45299,
45941,
13,
3605,
22704,
12962,
198,
220,
1441,
45941,
13,
11201,
7,
6404,
62,
12315,
8,
628,
198,
2,
1100,
44189,
351,
2779,
2939,
954,
25,
198,
7568,
796,
279,
67,
13,
961,
62,
40664,
7,
4458,
14,
7890,
14,
301,
853,
1031,
364,
13,
40664,
27691,
30619,
62,
27160,
10786,
301,
853,
1031,
364,
3256,
41988,
28,
17821,
8,
198,
198,
29487,
62,
7890,
796,
685,
7568,
17816,
301,
853,
1031,
364,
6,
11907,
628,
198,
25928,
796,
45941,
13,
21602,
10223,
7,
16,
11,
604,
2388,
11,
23336,
8,
198,
5647,
11,
7877,
796,
458,
83,
13,
7266,
489,
1747,
3419,
198,
1640,
1366,
287,
7110,
62,
7890,
25,
198,
220,
7877,
13,
29487,
7,
25928,
11,
479,
2934,
62,
8135,
35720,
7,
7890,
11,
10706,
11,
19484,
28,
1120,
828,
17130,
28,
15,
13,
23,
8,
198,
897,
13,
1455,
437,
7,
23912,
1424,
28,
17816,
16350,
3256,
705,
9126,
8576,
3256,
705,
9126,
1802,
6,
12962,
198,
897,
13,
1455,
437,
7,
17946,
11639,
45828,
1364,
11537,
198,
897,
13,
2617,
62,
87,
18242,
10786,
16775,
336,
853,
1031,
364,
11537,
198,
198,
2,
7877,
13,
2617,
62,
28349,
1000,
10786,
6404,
11537,
198,
198,
2,
7877,
13,
2617,
62,
88,
2475,
32590,
15,
13,
20,
11,
642,
8,
198,
198,
489,
83,
13,
12860,
3419
] | 2.515222 | 427 |
#
# Copyright (c) 2015 Juniper Networks, Inc. All rights reserved.
#
from analytic_client import AnalyticApiClient
import time, socket, os
from topology_uve import LinkUve
import gevent
from gevent.lock import Semaphore
from opserver.consistent_schdlr import ConsistentScheduler
from topology_config_handler import TopologyConfigHandler
import traceback
import ConfigParser
import signal
import random
import hashlib
from sandesh.topology_info.ttypes import TopologyInfo, TopologyUVE
from sandesh.link.ttypes import RemoteType, RemoteIfInfo, VRouterL2IfInfo,\
VRouterL2IfUVE
| [
2,
198,
2,
15069,
357,
66,
8,
1853,
7653,
9346,
27862,
11,
3457,
13,
1439,
2489,
10395,
13,
198,
2,
198,
6738,
49166,
62,
16366,
1330,
16213,
13370,
32,
14415,
11792,
198,
11748,
640,
11,
17802,
11,
28686,
198,
6738,
1353,
1435,
62,
45177,
1330,
7502,
52,
303,
198,
11748,
4903,
1151,
198,
6738,
4903,
1151,
13,
5354,
1330,
12449,
6570,
382,
198,
6738,
39628,
18497,
13,
5936,
7609,
62,
20601,
67,
14050,
1330,
3515,
7609,
50,
1740,
18173,
198,
6738,
1353,
1435,
62,
11250,
62,
30281,
1330,
5849,
1435,
16934,
25060,
198,
11748,
12854,
1891,
198,
11748,
17056,
46677,
198,
11748,
6737,
198,
11748,
4738,
198,
11748,
12234,
8019,
198,
6738,
6450,
5069,
13,
4852,
1435,
62,
10951,
13,
83,
19199,
1330,
5849,
1435,
12360,
11,
5849,
1435,
52,
6089,
198,
6738,
6450,
5069,
13,
8726,
13,
83,
19199,
1330,
21520,
6030,
11,
21520,
1532,
12360,
11,
6453,
39605,
43,
17,
1532,
12360,
11,
59,
198,
220,
220,
220,
6453,
39605,
43,
17,
1532,
52,
6089,
628
] | 3.452381 | 168 |
# conda install scikit-learn
# conda install -c conda-forge scikit-optimize
# conda install -c conda-forge rdkit
import pandas as pd
# from Tools.Clustering.butina import cluster_molecules
from molml.Datastructures.molecule import Dataset
from molml.Data import read_csv
from molml.Representations.descriptors import ecfp
from molml.Representations.strings import smiles_one_hot
from sklearn.ensemble import GradientBoostingRegressor
from molml.Tools.optimize import BayesianOpt
from molml.Tools.metrics import rmse
import numpy as np
molecules = read_csv(f"example_data/CHEMBL2047_EC50.csv", smiles_col='smiles', label_col='exp_mean [nM]')
data = Dataset(molecules[:50], name='CHEMBL2047', transform=smiles_one_hot, target_transform=minlog)
data.process()
data.show(10)
from molml.Tools.cluster import spectral
from molml.Viz.multivariate import TSNE, PCA
import seaborn as sns
clusters = spectral(molecules, k=10)
tsne = TSNE(n_components=2, perplexity=50, n_iter=500)
tsne.fit(molecules, use_n_principal_components=50)
tsne.show(color_by=clusters, palette=sns.color_palette("hls", 10))
pca = PCA(n_components=2)
pca.fit(molecules)
pca.show(color_by=clusters, palette=sns.color_palette("hls", 10))
from molml.Tools.splitting import stratified_split_molecules
train, test, val = stratified_split_molecules(molecules, labels=clusters)
data = Dataset(molecules, name='CHEMBL2047', transform=ecfp, target_transform=minlog)
data.process()
data.show(13)
hpm = {"learning_rate": [0.1, 0.01],
"max_depth": [1, 2, 3, 4, 5, 6, 7, 8],
"n_estimators": [5, 10, 20, 100, 200, 300]}
model = GradientBoostingRegressor
opt = BayesianOpt(model, data)
opt.opt(hpm, rmse, cv=5, n_calls=20)
opt.show()
# def fold_split_knn(dataset, k: int = 10, random_state: int = 42):
# from sklearn.cluster import KMeans
#
# clust = KMeans(n_clusters=10)
# clust.fit(x)
history = [(1,0.7201,0.7201),(2,0.6329,0.6329),(3,0.6305,0.6305),(4,0.6323,0.6305),(5,0.7195,0.6305),(6,0.6137,0.6137),
(7,0.6201,0.6137),(8,0.6239,0.6137),(9,0.6404,0.6137),(10,0.6264,0.6137),(11,0.6718,0.6137),(12,0.6368,0.6137),
(13,0.6337,0.6137),(14,0.6502,0.6137),(15,0.6235,0.6137),(16,0.6303,0.6137),(17,0.6171,0.6137),(18,0.6268,0.6137),
(19,0.6117,0.6117),(20,0.6170,0.6117)]
history = pd.DataFrame( columns=['Iteration', 'Score', 'Best Score'])
history['Score'].tolist()[-1]
len(history['Score'])
pd.DataFrame({'Iteration': [21], 'Score': [0.544], 'Best Score': [0.544]})
## TODO active learning
# split data train test -> make TSNE
# optimize model on train
# train model
# predict on test
# find most uncertain compounds
#
# python setup.py bdist_wheel
# python -m pip install dist/MoleculeACE-1.0.5-py3-none-any.whl
#
# twine upload dist/*
| [
2,
1779,
64,
2721,
629,
1134,
270,
12,
35720,
198,
2,
1779,
64,
2721,
532,
66,
1779,
64,
12,
30293,
629,
1134,
270,
12,
40085,
1096,
198,
2,
1779,
64,
2721,
532,
66,
1779,
64,
12,
30293,
374,
67,
15813,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
2,
422,
20003,
13,
2601,
436,
1586,
13,
4360,
1437,
1330,
13946,
62,
76,
2305,
13930,
198,
6738,
18605,
4029,
13,
27354,
459,
1356,
942,
13,
76,
2305,
23172,
1330,
16092,
292,
316,
198,
6738,
18605,
4029,
13,
6601,
1330,
1100,
62,
40664,
198,
6738,
18605,
4029,
13,
40171,
602,
13,
20147,
1968,
669,
1330,
9940,
46428,
198,
6738,
18605,
4029,
13,
40171,
602,
13,
37336,
1330,
21845,
62,
505,
62,
8940,
198,
6738,
1341,
35720,
13,
1072,
11306,
1330,
17701,
1153,
45686,
278,
8081,
44292,
198,
6738,
18605,
4029,
13,
33637,
13,
40085,
1096,
1330,
4696,
35610,
27871,
198,
6738,
18605,
4029,
13,
33637,
13,
4164,
10466,
1330,
42721,
325,
198,
11748,
299,
32152,
355,
45941,
198,
198,
76,
2305,
13930,
796,
1100,
62,
40664,
7,
69,
1,
20688,
62,
7890,
14,
3398,
3620,
9148,
1238,
2857,
62,
2943,
1120,
13,
40664,
1600,
21845,
62,
4033,
11639,
5796,
2915,
3256,
6167,
62,
4033,
11639,
11201,
62,
32604,
685,
77,
44,
60,
11537,
198,
198,
7890,
796,
16092,
292,
316,
7,
76,
2305,
13930,
58,
25,
1120,
4357,
1438,
11639,
3398,
3620,
9148,
1238,
2857,
3256,
6121,
28,
5796,
2915,
62,
505,
62,
8940,
11,
2496,
62,
35636,
28,
1084,
6404,
8,
198,
7890,
13,
14681,
3419,
198,
198,
7890,
13,
12860,
7,
940,
8,
198,
198,
6738,
18605,
4029,
13,
33637,
13,
565,
5819,
1330,
37410,
198,
6738,
18605,
4029,
13,
53,
528,
13,
16680,
42524,
1330,
26136,
12161,
11,
4217,
32,
198,
11748,
384,
397,
1211,
355,
3013,
82,
198,
198,
565,
13654,
796,
37410,
7,
76,
2305,
13930,
11,
479,
28,
940,
8,
628,
198,
912,
710,
796,
26136,
12161,
7,
77,
62,
5589,
3906,
28,
17,
11,
35682,
414,
28,
1120,
11,
299,
62,
2676,
28,
4059,
8,
198,
912,
710,
13,
11147,
7,
76,
2305,
13930,
11,
779,
62,
77,
62,
1050,
1939,
8521,
62,
5589,
3906,
28,
1120,
8,
198,
912,
710,
13,
12860,
7,
8043,
62,
1525,
28,
565,
13654,
11,
27043,
28,
82,
5907,
13,
8043,
62,
18596,
5857,
7203,
71,
7278,
1600,
838,
4008,
198,
198,
79,
6888,
796,
4217,
32,
7,
77,
62,
5589,
3906,
28,
17,
8,
198,
79,
6888,
13,
11147,
7,
76,
2305,
13930,
8,
198,
79,
6888,
13,
12860,
7,
8043,
62,
1525,
28,
565,
13654,
11,
27043,
28,
82,
5907,
13,
8043,
62,
18596,
5857,
7203,
71,
7278,
1600,
838,
4008,
628,
198,
198,
6738,
18605,
4029,
13,
33637,
13,
22018,
2535,
1330,
25369,
1431,
62,
35312,
62,
76,
2305,
13930,
198,
198,
27432,
11,
1332,
11,
1188,
796,
25369,
1431,
62,
35312,
62,
76,
2305,
13930,
7,
76,
2305,
13930,
11,
14722,
28,
565,
13654,
8,
628,
198,
7890,
796,
16092,
292,
316,
7,
76,
2305,
13930,
11,
1438,
11639,
3398,
3620,
9148,
1238,
2857,
3256,
6121,
28,
721,
46428,
11,
2496,
62,
35636,
28,
1084,
6404,
8,
198,
7890,
13,
14681,
3419,
198,
198,
7890,
13,
12860,
7,
1485,
8,
198,
198,
71,
4426,
796,
19779,
40684,
62,
4873,
1298,
685,
15,
13,
16,
11,
657,
13,
486,
4357,
198,
220,
220,
220,
220,
220,
220,
366,
9806,
62,
18053,
1298,
685,
16,
11,
362,
11,
513,
11,
604,
11,
642,
11,
718,
11,
767,
11,
807,
4357,
198,
220,
220,
220,
220,
220,
220,
366,
77,
62,
395,
320,
2024,
1298,
685,
20,
11,
838,
11,
1160,
11,
1802,
11,
939,
11,
5867,
48999,
198,
198,
19849,
796,
17701,
1153,
45686,
278,
8081,
44292,
198,
198,
8738,
796,
4696,
35610,
27871,
7,
19849,
11,
1366,
8,
198,
8738,
13,
8738,
7,
71,
4426,
11,
42721,
325,
11,
269,
85,
28,
20,
11,
299,
62,
66,
5691,
28,
1238,
8,
198,
8738,
13,
12860,
3419,
628,
198,
2,
825,
5591,
62,
35312,
62,
15418,
77,
7,
19608,
292,
316,
11,
479,
25,
493,
796,
838,
11,
4738,
62,
5219,
25,
493,
796,
5433,
2599,
198,
2,
220,
220,
220,
220,
422,
1341,
35720,
13,
565,
5819,
1330,
509,
5308,
504,
198,
2,
198,
2,
220,
220,
220,
220,
32966,
796,
509,
5308,
504,
7,
77,
62,
565,
13654,
28,
940,
8,
198,
2,
220,
220,
220,
220,
32966,
13,
11147,
7,
87,
8,
628,
198,
23569,
796,
47527,
16,
11,
15,
13,
22,
1264,
11,
15,
13,
22,
1264,
828,
7,
17,
11,
15,
13,
5066,
1959,
11,
15,
13,
5066,
1959,
828,
7,
18,
11,
15,
13,
21,
22515,
11,
15,
13,
21,
22515,
828,
7,
19,
11,
15,
13,
5066,
1954,
11,
15,
13,
21,
22515,
828,
7,
20,
11,
15,
13,
22,
22186,
11,
15,
13,
21,
22515,
828,
7,
21,
11,
15,
13,
21,
19708,
11,
15,
13,
21,
19708,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
357,
22,
11,
15,
13,
21,
1264,
11,
15,
13,
21,
19708,
828,
7,
23,
11,
15,
13,
21,
23516,
11,
15,
13,
21,
19708,
828,
7,
24,
11,
15,
13,
21,
26429,
11,
15,
13,
21,
19708,
828,
7,
940,
11,
15,
13,
21,
18897,
11,
15,
13,
21,
19708,
828,
7,
1157,
11,
15,
13,
3134,
1507,
11,
15,
13,
21,
19708,
828,
7,
1065,
11,
15,
13,
21,
27412,
11,
15,
13,
21,
19708,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
357,
1485,
11,
15,
13,
21,
31496,
11,
15,
13,
21,
19708,
828,
7,
1415,
11,
15,
13,
17544,
17,
11,
15,
13,
21,
19708,
828,
7,
1314,
11,
15,
13,
21,
22370,
11,
15,
13,
21,
19708,
828,
7,
1433,
11,
15,
13,
21,
22572,
11,
15,
13,
21,
19708,
828,
7,
1558,
11,
15,
13,
21,
27192,
11,
15,
13,
21,
19708,
828,
7,
1507,
11,
15,
13,
21,
25022,
11,
15,
13,
21,
19708,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
357,
1129,
11,
15,
13,
21,
17657,
11,
15,
13,
21,
17657,
828,
7,
1238,
11,
15,
13,
21,
17279,
11,
15,
13,
21,
17657,
15437,
628,
198,
23569,
796,
279,
67,
13,
6601,
19778,
7,
15180,
28,
17816,
29993,
341,
3256,
705,
26595,
3256,
705,
13014,
15178,
6,
12962,
198,
198,
23569,
17816,
26595,
6,
4083,
83,
349,
396,
3419,
58,
12,
16,
60,
198,
11925,
7,
23569,
17816,
26595,
6,
12962,
198,
30094,
13,
6601,
19778,
15090,
6,
29993,
341,
10354,
685,
2481,
4357,
705,
26595,
10354,
685,
15,
13,
47576,
4357,
705,
13014,
15178,
10354,
685,
15,
13,
47576,
60,
30072,
628,
198,
2235,
16926,
46,
4075,
4673,
198,
2,
6626,
1366,
4512,
1332,
4613,
787,
26136,
12161,
198,
2,
27183,
2746,
319,
4512,
198,
2,
4512,
2746,
198,
2,
4331,
319,
1332,
198,
2,
1064,
749,
8627,
16439,
198,
2,
198,
198,
2,
21015,
9058,
13,
9078,
275,
17080,
62,
22001,
198,
2,
21015,
532,
76,
7347,
2721,
1233,
14,
44,
2305,
23172,
11598,
12,
16,
13,
15,
13,
20,
12,
9078,
18,
12,
23108,
12,
1092,
13,
1929,
75,
198,
2,
198,
2,
665,
500,
9516,
1233,
15211,
628,
198
] | 2.307884 | 1,205 |
alphabet = "0123456789."
code = input()
grid = []
variables = []
loops = 10
for i in range(100):
grid.append(00)
while code[0] != "3" or code[1] != "." or code[-1] != "4":
code = input("Code invalid. ")
code += "000000"
i = 2
while i < len(code) - 6:
variables = []
variables.append(int(code[i+1] + code[i+2]))
variables.append(int(code[i+3] + code[i+4]))
variables.append(int(code[i+5] + code[i+6]))
if code[i] == "0":
grid[variables[0]] = grid[variables[1]] + grid[variables[2]]
i += 7
elif code[i] == "1":
grid[variables[0]] = grid[variables[1]] - grid[variables[2]]
i += 7
elif code[i] == "2":
grid[variables[0]] = grid[variables[1]] * grid[variables[2]]
i += 7
elif code[i] == "3":
grid[variables[0]] = grid[variables[1]] / grid[variables[2]]
i += 7
elif code[i] == "4":
i = len(code)
elif code[i] == "5":
print(chr(grid[variables[0]]),end='')
i += 3
elif code[i] == "6":
grid[variables[0]] = variables[1]
i += 5
elif code[i] == "7":
grid[variables[0]] = ord(input())
i += 3
elif code[i] == "8":
if grid[variables[0]] == 0:
found = False
nests = 0
while found == False:
i += 1
if code[i] == "8":
nests += 1
elif code[i] == "9":
if nests == 0:
i += 1
found = True
else:
nests -= 1
elif grid[variables[0]] != 0:
i += 1
found = True
elif code[i] == "9":
storei = i
nests = 0
returned = False
while returned == False:
i -= 1
if code[i] == "9":
nests += 1
elif code[i] == "8":
if nests == 0:
if grid[int(str(code[i+1]) + str(code[i+2]))] == 0:
i = storei
returned = True
else:
returned = True
else:
print("Error found with character " + code[i])
| [
17307,
8380,
796,
366,
486,
1954,
2231,
3134,
4531,
526,
198,
8189,
796,
5128,
3419,
198,
25928,
796,
17635,
198,
25641,
2977,
796,
17635,
198,
5439,
2840,
796,
838,
198,
198,
1640,
1312,
287,
2837,
7,
3064,
2599,
198,
220,
220,
220,
10706,
13,
33295,
7,
405,
8,
198,
198,
4514,
2438,
58,
15,
60,
14512,
366,
18,
1,
393,
2438,
58,
16,
60,
14512,
366,
526,
393,
2438,
58,
12,
16,
60,
14512,
366,
19,
1298,
198,
220,
220,
220,
2438,
796,
5128,
7203,
10669,
12515,
13,
366,
8,
198,
198,
8189,
15853,
366,
10535,
1,
198,
198,
72,
796,
362,
198,
198,
4514,
1312,
1279,
18896,
7,
8189,
8,
532,
718,
25,
198,
220,
220,
220,
9633,
796,
17635,
198,
220,
220,
220,
9633,
13,
33295,
7,
600,
7,
8189,
58,
72,
10,
16,
60,
1343,
2438,
58,
72,
10,
17,
60,
4008,
198,
220,
220,
220,
9633,
13,
33295,
7,
600,
7,
8189,
58,
72,
10,
18,
60,
1343,
2438,
58,
72,
10,
19,
60,
4008,
198,
220,
220,
220,
9633,
13,
33295,
7,
600,
7,
8189,
58,
72,
10,
20,
60,
1343,
2438,
58,
72,
10,
21,
60,
4008,
198,
220,
220,
220,
611,
2438,
58,
72,
60,
6624,
366,
15,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
25641,
2977,
58,
15,
11907,
796,
10706,
58,
25641,
2977,
58,
16,
11907,
1343,
10706,
58,
25641,
2977,
58,
17,
11907,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
767,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
16,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
25641,
2977,
58,
15,
11907,
796,
10706,
58,
25641,
2977,
58,
16,
11907,
532,
10706,
58,
25641,
2977,
58,
17,
11907,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
767,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
17,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
25641,
2977,
58,
15,
11907,
796,
10706,
58,
25641,
2977,
58,
16,
11907,
1635,
10706,
58,
25641,
2977,
58,
17,
11907,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
767,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
18,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
25641,
2977,
58,
15,
11907,
796,
10706,
58,
25641,
2977,
58,
16,
11907,
1220,
10706,
58,
25641,
2977,
58,
17,
11907,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
767,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
19,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
796,
18896,
7,
8189,
8,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
20,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
7,
354,
81,
7,
25928,
58,
25641,
2977,
58,
15,
11907,
828,
437,
28,
7061,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
513,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
21,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
25641,
2977,
58,
15,
11907,
796,
9633,
58,
16,
60,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
642,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
22,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
25641,
2977,
58,
15,
11907,
796,
2760,
7,
15414,
28955,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
513,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
23,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
611,
10706,
58,
25641,
2977,
58,
15,
11907,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1043,
796,
10352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
44382,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
981,
1043,
6624,
10352,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
2438,
58,
72,
60,
6624,
366,
23,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
44382,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
24,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
44382,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1043,
796,
6407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
44382,
48185,
352,
198,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
10706,
58,
25641,
2977,
58,
15,
11907,
14512,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1043,
796,
6407,
198,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
24,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
3650,
72,
796,
1312,
198,
220,
220,
220,
220,
220,
220,
220,
44382,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
4504,
796,
10352,
198,
220,
220,
220,
220,
220,
220,
220,
981,
4504,
6624,
10352,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
48185,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
2438,
58,
72,
60,
6624,
366,
24,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
44382,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
2438,
58,
72,
60,
6624,
366,
23,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
44382,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
10706,
58,
600,
7,
2536,
7,
8189,
58,
72,
10,
16,
12962,
1343,
965,
7,
8189,
58,
72,
10,
17,
60,
4008,
60,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
796,
3650,
72,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4504,
796,
6407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4504,
796,
6407,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
7203,
12331,
1043,
351,
2095,
366,
1343,
2438,
58,
72,
12962,
198
] | 1.740188 | 1,274 |
# coding: utf-8
"""
Paasta API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from paasta_tools.paastaapi.configuration import Configuration
class MarathonAutoscalingInfo(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'current_instances': 'int',
'current_utilization': 'float',
'max_instances': 'int',
'min_instances': 'int',
'target_instances': 'int'
}
attribute_map = {
'current_instances': 'current_instances',
'current_utilization': 'current_utilization',
'max_instances': 'max_instances',
'min_instances': 'min_instances',
'target_instances': 'target_instances'
}
def __init__(self, current_instances=None, current_utilization=None, max_instances=None, min_instances=None, target_instances=None, local_vars_configuration=None): # noqa: E501
"""MarathonAutoscalingInfo - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._current_instances = None
self._current_utilization = None
self._max_instances = None
self._min_instances = None
self._target_instances = None
self.discriminator = None
if current_instances is not None:
self.current_instances = current_instances
if current_utilization is not None:
self.current_utilization = current_utilization
if max_instances is not None:
self.max_instances = max_instances
if min_instances is not None:
self.min_instances = min_instances
if target_instances is not None:
self.target_instances = target_instances
@property
def current_instances(self):
"""Gets the current_instances of this MarathonAutoscalingInfo. # noqa: E501
The number of instances of the service currently running # noqa: E501
:return: The current_instances of this MarathonAutoscalingInfo. # noqa: E501
:rtype: int
"""
return self._current_instances
@current_instances.setter
def current_instances(self, current_instances):
"""Sets the current_instances of this MarathonAutoscalingInfo.
The number of instances of the service currently running # noqa: E501
:param current_instances: The current_instances of this MarathonAutoscalingInfo. # noqa: E501
:type current_instances: int
"""
self._current_instances = current_instances
@property
def current_utilization(self):
"""Gets the current_utilization of this MarathonAutoscalingInfo. # noqa: E501
The current utilization of the instances' allocated resources # noqa: E501
:return: The current_utilization of this MarathonAutoscalingInfo. # noqa: E501
:rtype: float
"""
return self._current_utilization
@current_utilization.setter
def current_utilization(self, current_utilization):
"""Sets the current_utilization of this MarathonAutoscalingInfo.
The current utilization of the instances' allocated resources # noqa: E501
:param current_utilization: The current_utilization of this MarathonAutoscalingInfo. # noqa: E501
:type current_utilization: float
"""
self._current_utilization = current_utilization
@property
def max_instances(self):
"""Gets the max_instances of this MarathonAutoscalingInfo. # noqa: E501
The maximum number of instances that the autoscaler will scale to # noqa: E501
:return: The max_instances of this MarathonAutoscalingInfo. # noqa: E501
:rtype: int
"""
return self._max_instances
@max_instances.setter
def max_instances(self, max_instances):
"""Sets the max_instances of this MarathonAutoscalingInfo.
The maximum number of instances that the autoscaler will scale to # noqa: E501
:param max_instances: The max_instances of this MarathonAutoscalingInfo. # noqa: E501
:type max_instances: int
"""
self._max_instances = max_instances
@property
def min_instances(self):
"""Gets the min_instances of this MarathonAutoscalingInfo. # noqa: E501
The minimum number of instances that the autoscaler will scale to # noqa: E501
:return: The min_instances of this MarathonAutoscalingInfo. # noqa: E501
:rtype: int
"""
return self._min_instances
@min_instances.setter
def min_instances(self, min_instances):
"""Sets the min_instances of this MarathonAutoscalingInfo.
The minimum number of instances that the autoscaler will scale to # noqa: E501
:param min_instances: The min_instances of this MarathonAutoscalingInfo. # noqa: E501
:type min_instances: int
"""
self._min_instances = min_instances
@property
def target_instances(self):
"""Gets the target_instances of this MarathonAutoscalingInfo. # noqa: E501
The autoscaler's current target number of instances of this service to run # noqa: E501
:return: The target_instances of this MarathonAutoscalingInfo. # noqa: E501
:rtype: int
"""
return self._target_instances
@target_instances.setter
def target_instances(self, target_instances):
"""Sets the target_instances of this MarathonAutoscalingInfo.
The autoscaler's current target number of instances of this service to run # noqa: E501
:param target_instances: The target_instances of this MarathonAutoscalingInfo. # noqa: E501
:type target_instances: int
"""
self._target_instances = target_instances
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MarathonAutoscalingInfo):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, MarathonAutoscalingInfo):
return True
return self.to_dict() != other.to_dict()
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
37811,
198,
220,
220,
220,
11243,
40197,
7824,
628,
220,
220,
220,
1400,
6764,
2810,
357,
27568,
416,
4946,
15042,
35986,
3740,
1378,
12567,
13,
785,
14,
9654,
499,
270,
10141,
14,
9654,
15042,
12,
8612,
1352,
8,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
383,
2196,
286,
262,
4946,
17614,
3188,
25,
352,
13,
15,
13,
15,
198,
220,
220,
220,
2980,
515,
416,
25,
3740,
1378,
9654,
15042,
12,
8612,
1352,
13,
13670,
198,
37811,
628,
198,
11748,
279,
4798,
198,
11748,
302,
220,
1303,
645,
20402,
25,
376,
21844,
198,
198,
11748,
2237,
198,
198,
6738,
14187,
40197,
62,
31391,
13,
8957,
40197,
15042,
13,
11250,
3924,
1330,
28373,
628,
198,
4871,
24828,
16541,
17500,
4272,
12360,
7,
15252,
2599,
198,
220,
220,
220,
37227,
16580,
25,
770,
1398,
318,
8295,
7560,
416,
4946,
17614,
35986,
13,
198,
220,
220,
220,
6524,
25,
3740,
1378,
9654,
15042,
12,
8612,
1352,
13,
13670,
628,
220,
220,
220,
2141,
407,
4370,
262,
1398,
14500,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
37227,
198,
220,
220,
220,
49213,
25,
198,
220,
220,
220,
220,
220,
1280,
15042,
62,
19199,
357,
11600,
2599,
383,
1994,
318,
11688,
1438,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
290,
262,
1988,
318,
11688,
2099,
13,
198,
220,
220,
220,
220,
220,
11688,
62,
8899,
357,
11600,
2599,
383,
1994,
318,
11688,
1438,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
290,
262,
1988,
318,
33918,
1994,
287,
6770,
13,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1280,
15042,
62,
19199,
796,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
705,
14421,
62,
8625,
1817,
10354,
705,
600,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
14421,
62,
22602,
1634,
10354,
705,
22468,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
9806,
62,
8625,
1817,
10354,
705,
600,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
1084,
62,
8625,
1817,
10354,
705,
600,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
16793,
62,
8625,
1817,
10354,
705,
600,
6,
198,
220,
220,
220,
1782,
628,
220,
220,
220,
11688,
62,
8899,
796,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
705,
14421,
62,
8625,
1817,
10354,
705,
14421,
62,
8625,
1817,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
14421,
62,
22602,
1634,
10354,
705,
14421,
62,
22602,
1634,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
9806,
62,
8625,
1817,
10354,
705,
9806,
62,
8625,
1817,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
1084,
62,
8625,
1817,
10354,
705,
1084,
62,
8625,
1817,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
705,
16793,
62,
8625,
1817,
10354,
705,
16793,
62,
8625,
1817,
6,
198,
220,
220,
220,
1782,
628,
220,
220,
220,
825,
11593,
15003,
834,
7,
944,
11,
1459,
62,
8625,
1817,
28,
14202,
11,
1459,
62,
22602,
1634,
28,
14202,
11,
3509,
62,
8625,
1817,
28,
14202,
11,
949,
62,
8625,
1817,
28,
14202,
11,
2496,
62,
8625,
1817,
28,
14202,
11,
1957,
62,
85,
945,
62,
11250,
3924,
28,
14202,
2599,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
7676,
12938,
16541,
17500,
4272,
12360,
532,
257,
2746,
5447,
287,
4946,
17614,
37811,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
611,
1957,
62,
85,
945,
62,
11250,
3924,
318,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1957,
62,
85,
945,
62,
11250,
3924,
796,
28373,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
12001,
62,
85,
945,
62,
11250,
3924,
796,
1957,
62,
85,
945,
62,
11250,
3924,
628,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
14421,
62,
8625,
1817,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
14421,
62,
22602,
1634,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
9806,
62,
8625,
1817,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
1084,
62,
8625,
1817,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
16793,
62,
8625,
1817,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
15410,
3036,
20900,
796,
6045,
628,
220,
220,
220,
220,
220,
220,
220,
611,
1459,
62,
8625,
1817,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
14421,
62,
8625,
1817,
796,
1459,
62,
8625,
1817,
198,
220,
220,
220,
220,
220,
220,
220,
611,
1459,
62,
22602,
1634,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
14421,
62,
22602,
1634,
796,
1459,
62,
22602,
1634,
198,
220,
220,
220,
220,
220,
220,
220,
611,
3509,
62,
8625,
1817,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
9806,
62,
8625,
1817,
796,
3509,
62,
8625,
1817,
198,
220,
220,
220,
220,
220,
220,
220,
611,
949,
62,
8625,
1817,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
1084,
62,
8625,
1817,
796,
949,
62,
8625,
1817,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2496,
62,
8625,
1817,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
16793,
62,
8625,
1817,
796,
2496,
62,
8625,
1817,
628,
220,
220,
220,
2488,
26745,
198,
220,
220,
220,
825,
1459,
62,
8625,
1817,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
38,
1039,
262,
1459,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
383,
1271,
286,
10245,
286,
262,
2139,
3058,
2491,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
383,
1459,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
81,
4906,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13557,
14421,
62,
8625,
1817,
628,
220,
220,
220,
2488,
14421,
62,
8625,
1817,
13,
2617,
353,
198,
220,
220,
220,
825,
1459,
62,
8625,
1817,
7,
944,
11,
1459,
62,
8625,
1817,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
50,
1039,
262,
1459,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
628,
220,
220,
220,
220,
220,
220,
220,
383,
1271,
286,
10245,
286,
262,
2139,
3058,
2491,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
1459,
62,
8625,
1817,
25,
383,
1459,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
4906,
1459,
62,
8625,
1817,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
628,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
14421,
62,
8625,
1817,
796,
1459,
62,
8625,
1817,
628,
220,
220,
220,
2488,
26745,
198,
220,
220,
220,
825,
1459,
62,
22602,
1634,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
38,
1039,
262,
1459,
62,
22602,
1634,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
383,
1459,
32121,
286,
262,
10245,
6,
19171,
4133,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
383,
1459,
62,
22602,
1634,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
81,
4906,
25,
12178,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13557,
14421,
62,
22602,
1634,
628,
220,
220,
220,
2488,
14421,
62,
22602,
1634,
13,
2617,
353,
198,
220,
220,
220,
825,
1459,
62,
22602,
1634,
7,
944,
11,
1459,
62,
22602,
1634,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
50,
1039,
262,
1459,
62,
22602,
1634,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
628,
220,
220,
220,
220,
220,
220,
220,
383,
1459,
32121,
286,
262,
10245,
6,
19171,
4133,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
1459,
62,
22602,
1634,
25,
383,
1459,
62,
22602,
1634,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
4906,
1459,
62,
22602,
1634,
25,
12178,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
628,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
14421,
62,
22602,
1634,
796,
1459,
62,
22602,
1634,
628,
220,
220,
220,
2488,
26745,
198,
220,
220,
220,
825,
3509,
62,
8625,
1817,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
38,
1039,
262,
3509,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
383,
5415,
1271,
286,
10245,
326,
262,
44619,
9948,
263,
481,
5046,
284,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
383,
3509,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
81,
4906,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13557,
9806,
62,
8625,
1817,
628,
220,
220,
220,
2488,
9806,
62,
8625,
1817,
13,
2617,
353,
198,
220,
220,
220,
825,
3509,
62,
8625,
1817,
7,
944,
11,
3509,
62,
8625,
1817,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
50,
1039,
262,
3509,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
628,
220,
220,
220,
220,
220,
220,
220,
383,
5415,
1271,
286,
10245,
326,
262,
44619,
9948,
263,
481,
5046,
284,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
3509,
62,
8625,
1817,
25,
383,
3509,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
4906,
3509,
62,
8625,
1817,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
628,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
9806,
62,
8625,
1817,
796,
3509,
62,
8625,
1817,
628,
220,
220,
220,
2488,
26745,
198,
220,
220,
220,
825,
949,
62,
8625,
1817,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
38,
1039,
262,
949,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
383,
5288,
1271,
286,
10245,
326,
262,
44619,
9948,
263,
481,
5046,
284,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
383,
949,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
81,
4906,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13557,
1084,
62,
8625,
1817,
628,
220,
220,
220,
2488,
1084,
62,
8625,
1817,
13,
2617,
353,
198,
220,
220,
220,
825,
949,
62,
8625,
1817,
7,
944,
11,
949,
62,
8625,
1817,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
50,
1039,
262,
949,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
628,
220,
220,
220,
220,
220,
220,
220,
383,
5288,
1271,
286,
10245,
326,
262,
44619,
9948,
263,
481,
5046,
284,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
949,
62,
8625,
1817,
25,
383,
949,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
4906,
949,
62,
8625,
1817,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
628,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
1084,
62,
8625,
1817,
796,
949,
62,
8625,
1817,
628,
220,
220,
220,
2488,
26745,
198,
220,
220,
220,
825,
2496,
62,
8625,
1817,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
38,
1039,
262,
2496,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
383,
44619,
9948,
263,
338,
1459,
2496,
1271,
286,
10245,
286,
428,
2139,
284,
1057,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
383,
2496,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
81,
4906,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13557,
16793,
62,
8625,
1817,
628,
220,
220,
220,
2488,
16793,
62,
8625,
1817,
13,
2617,
353,
198,
220,
220,
220,
825,
2496,
62,
8625,
1817,
7,
944,
11,
2496,
62,
8625,
1817,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
50,
1039,
262,
2496,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
628,
220,
220,
220,
220,
220,
220,
220,
383,
44619,
9948,
263,
338,
1459,
2496,
1271,
286,
10245,
286,
428,
2139,
284,
1057,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
2496,
62,
8625,
1817,
25,
383,
2496,
62,
8625,
1817,
286,
428,
24828,
16541,
17500,
4272,
12360,
13,
220,
1303,
645,
20402,
25,
412,
33548,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
4906,
2496,
62,
8625,
1817,
25,
493,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
628,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
16793,
62,
8625,
1817,
796,
2496,
62,
8625,
1817,
628,
220,
220,
220,
825,
284,
62,
11600,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
35561,
262,
2746,
6608,
355,
257,
8633,
37811,
198,
220,
220,
220,
220,
220,
220,
220,
1255,
796,
23884,
628,
220,
220,
220,
220,
220,
220,
220,
329,
708,
81,
11,
4808,
287,
2237,
13,
2676,
23814,
7,
944,
13,
9654,
15042,
62,
19199,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1988,
796,
651,
35226,
7,
944,
11,
708,
81,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
318,
39098,
7,
8367,
11,
1351,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1255,
58,
35226,
60,
796,
1351,
7,
8899,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37456,
2124,
25,
2124,
13,
1462,
62,
11600,
3419,
611,
468,
35226,
7,
87,
11,
366,
1462,
62,
11600,
4943,
2073,
2124,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1988,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
15306,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
468,
35226,
7,
8367,
11,
366,
1462,
62,
11600,
1,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1255,
58,
35226,
60,
796,
1988,
13,
1462,
62,
11600,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
318,
39098,
7,
8367,
11,
8633,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1255,
58,
35226,
60,
796,
8633,
7,
8899,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37456,
2378,
25,
357,
9186,
58,
15,
4357,
2378,
58,
16,
4083,
1462,
62,
11600,
28955,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
468,
35226,
7,
9186,
58,
16,
4357,
366,
1462,
62,
11600,
4943,
2073,
2378,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1988,
13,
23814,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
15306,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1255,
58,
35226,
60,
796,
1988,
628,
220,
220,
220,
220,
220,
220,
220,
1441,
1255,
628,
220,
220,
220,
825,
284,
62,
2536,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
35561,
262,
4731,
10552,
286,
262,
2746,
37811,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
279,
4798,
13,
79,
18982,
7,
944,
13,
1462,
62,
11600,
28955,
628,
220,
220,
220,
825,
11593,
260,
1050,
834,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
1890,
4600,
4798,
63,
290,
4600,
381,
22272,
63,
37811,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13,
1462,
62,
2536,
3419,
628,
220,
220,
220,
825,
11593,
27363,
834,
7,
944,
11,
584,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
35561,
2081,
611,
1111,
5563,
389,
4961,
37811,
198,
220,
220,
220,
220,
220,
220,
220,
611,
407,
318,
39098,
7,
847,
11,
24828,
16541,
17500,
4272,
12360,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
10352,
628,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13,
1462,
62,
11600,
3419,
6624,
584,
13,
1462,
62,
11600,
3419,
628,
220,
220,
220,
825,
11593,
710,
834,
7,
944,
11,
584,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
35561,
2081,
611,
1111,
5563,
389,
407,
4961,
37811,
198,
220,
220,
220,
220,
220,
220,
220,
611,
407,
318,
39098,
7,
847,
11,
24828,
16541,
17500,
4272,
12360,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
6407,
628,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13,
1462,
62,
11600,
3419,
14512,
584,
13,
1462,
62,
11600,
3419,
198
] | 2.46485 | 3,229 |
#!/usr/bin/env python
import numpy as np
from pymvg.test.utils import _build_points_3d, make_M
import os
from pymvg.util import normalize
from pymvg.camera_model import CameraModel
DRAW=int(os.environ.get('DRAW','0'))
if DRAW:
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from pymvg.plot_utils import plot_camera
if __name__=='__main__':
test_simple_projection()
test_lookat()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
279,
4948,
45119,
13,
9288,
13,
26791,
1330,
4808,
11249,
62,
13033,
62,
18,
67,
11,
787,
62,
44,
198,
11748,
28686,
198,
198,
6738,
279,
4948,
45119,
13,
22602,
1330,
3487,
1096,
198,
6738,
279,
4948,
45119,
13,
25695,
62,
19849,
1330,
20432,
17633,
198,
198,
35,
20530,
28,
600,
7,
418,
13,
268,
2268,
13,
1136,
10786,
35,
20530,
41707,
15,
6,
4008,
198,
361,
360,
20530,
25,
198,
220,
220,
220,
1330,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
220,
220,
220,
422,
285,
489,
62,
25981,
74,
896,
13,
76,
29487,
18,
67,
1330,
12176,
274,
18,
35,
198,
220,
220,
220,
422,
279,
4948,
45119,
13,
29487,
62,
26791,
1330,
7110,
62,
25695,
198,
198,
361,
11593,
3672,
834,
855,
6,
834,
12417,
834,
10354,
198,
220,
220,
220,
1332,
62,
36439,
62,
16302,
295,
3419,
198,
220,
220,
220,
1332,
62,
5460,
265,
3419,
198
] | 2.502924 | 171 |
name = "colored_graph"
| [
3672,
796,
366,
25717,
62,
34960,
1,
198
] | 2.875 | 8 |
import random
import math
from common import (
ROW_COUNT,
COLUMN_COUNT,
MINIMAX,
MONTE_CARLO,
RANDOM,
RANDOM_IMPR,
Observer,
)
YELLOW_PLAYER = 1
RED_PLAYER = -1
PLAYERS = {1: "Yellow", -1: "Red"}
class Bot(Observer):
"""
This class handles the different bots that were used.
It includes a Random Bot, an Improved Random Bot, the MCTS bot,
and the MiniMax bot.
"""
def __init__(
self, game, bot_type=None, depth=None, iteration=None, pruning=True
):
"""
Constructor of the Bot class.
:param game: corresponding Connect4Game instance
:param bot_type: specifies the bot (MCTS, MiniMax, Random, ...)
:param depth: depth used in the Minimax algorithm if the Minimax bot is used
:param iteration: number of iterations used in the MCTS algorithm in case the MCTS bot is used
:param pruning: boolean used for the pruning in the Minimax algorithm if the Minimax bot is used
"""
self._game = game
# Bot type determines how the bot picks his moves
self._type = bot_type
if self._type == MINIMAX:
self._depth = depth
self._pruning = pruning
elif self._type == MONTE_CARLO:
self._iteration = iteration
def make_move(self):
"""
Picks the column in which the bot should place the next disc.
The considered moving options depend on the bot type.
:return: the column number where the bot should play the next move
"""
# print(PLAYERS[self._game._turn] + " is about to play :")
column = None
# In case the bot type is RANDOM, the bot checks for winning moves, and if there aren't,
# then picks a valid random move.
if self._type == RANDOM:
win_col = self.get_winning_move()
if win_col is not None:
column = win_col
else:
column = self.get_random_move()
# In case the bot type is RANDOM IMPROVED, the bot checks for winning moves, and if there aren't,
# then checks if there is any move that blocks a direct winning move for the opponent.
# If there is no such move, it picks a valid random move.
elif self._type == RANDOM_IMPR:
win_col = self.get_winning_move()
if win_col is not None:
# print("Winning column :", win_col)
column = win_col
else:
def_move = self.get_defensive_move()
if def_move is not None:
# print("Defensive column :", def_move)
column = def_move
else:
column = self.get_random_move()
# print("Random move", column)
elif self._type == MINIMAX:
column, minimax_score = self.minimax(
self._game._board,
self._depth,
-math.inf,
math.inf,
True,
self._pruning,
)
# print(column)
elif self._type == MONTE_CARLO:
o = Node(self._game.copy_state())
column = self.monte_carlo_tree_search(self._iteration, o, 2.0)
else:
column = 0
# print("-------------------------")
self._game.place(column)
def get_winning_move(self):
"""
Checks whether there is a winning column available for the next
move of the bot.
:return: winning column
"""
column = None
for c_win in range(self._game._cols):
for r in range(self._game._rows):
if self._game._board[c_win][r] == 0:
self._game._board[c_win][r] = self._game._turn
is_winner = self._game.check_win((c_win, r))
self._game._board[c_win][r] = 0
if is_winner:
column = c_win
return column
break
return column
def get_valid_locations(self, board):
"""
Returns all the valid columns where the player can play, aka the columns
that are not full
:param board: actual state of the game, board of the game
:return: list of all valid column indices
"""
free_cols = []
for i in range(COLUMN_COUNT):
if board[i][ROW_COUNT - 1] == 0:
free_cols.append(i)
# print()
if len(free_cols) == 0:
return None
return free_cols
def get_random_move(self):
"""
Picks a valid random column where the bot can play his next move.
:return: valid random column
"""
free_cols = self.get_valid_locations(self._game._board)
column = random.choice(free_cols)
return column
def get_defensive_move(self):
"""
Checks whether the bot could play a move that blocks a direct winning
move from the opponent.
:return: column to be played to avoid losing immediatly
"""
column = None
for c_win in range(self._game._cols):
for r in range(self._game._rows):
if self._game._board[c_win][r] == 0:
self._game._board[c_win][r] = -1 * self._game._turn
is_winner = self._game.check_win((c_win, r))
self._game._board[c_win][r] = 0
if is_winner:
column = c_win
return column
break
return column
class Node:
"""
This class is used to represent nodes of the tree of boards used during
Monte-Carlo Tree Search.
"""
def add_child(self, child_state, move):
"""
Add a child to the current node.
:param child_state: state of the child to add
:param move: move to do to get to the newly added child
"""
child = Node(child_state, parent=self)
self.children.append(child)
self.children_moves.append(move)
def update(self, reward):
"""
Update the node's reward (indicates how good a certain node is
according to the MCTS algorithm)
:param reward: reward to be added to the node
"""
self.reward += reward
self.visits += 1
def fully_explored(self):
"""
Checks if the node is fully explored (which means we can not add
any more children to this node)
:return: True of False depending on if it is fully epxlored or not
"""
if len(self.children) == len(self.state.get_valid_locations()):
return True
return False
| [
11748,
4738,
198,
11748,
10688,
198,
6738,
2219,
1330,
357,
198,
220,
220,
220,
371,
3913,
62,
34,
28270,
11,
198,
220,
220,
220,
20444,
5883,
45,
62,
34,
28270,
11,
198,
220,
220,
220,
20625,
3955,
25922,
11,
198,
220,
220,
220,
25000,
9328,
62,
20034,
21982,
11,
198,
220,
220,
220,
46920,
2662,
11,
198,
220,
220,
220,
46920,
2662,
62,
3955,
4805,
11,
198,
220,
220,
220,
27058,
11,
198,
8,
198,
198,
56,
23304,
3913,
62,
31519,
1137,
796,
352,
198,
22083,
62,
31519,
1137,
796,
532,
16,
198,
198,
31519,
4877,
796,
1391,
16,
25,
366,
39499,
1600,
532,
16,
25,
366,
7738,
20662,
628,
198,
4871,
18579,
7,
31310,
18497,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
770,
1398,
17105,
262,
1180,
29641,
326,
547,
973,
13,
198,
220,
220,
220,
632,
3407,
257,
14534,
18579,
11,
281,
24125,
14534,
18579,
11,
262,
337,
4177,
50,
10214,
11,
198,
220,
220,
220,
290,
262,
12558,
11518,
10214,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
825,
11593,
15003,
834,
7,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
11,
983,
11,
10214,
62,
4906,
28,
14202,
11,
6795,
28,
14202,
11,
24415,
28,
14202,
11,
778,
46493,
28,
17821,
198,
220,
220,
220,
15179,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
28407,
273,
286,
262,
18579,
1398,
13,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
983,
25,
11188,
8113,
19,
8777,
4554,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
10214,
62,
4906,
25,
26052,
262,
10214,
357,
44,
4177,
50,
11,
12558,
11518,
11,
14534,
11,
2644,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
6795,
25,
6795,
973,
287,
262,
1855,
320,
897,
11862,
611,
262,
1855,
320,
897,
10214,
318,
973,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
24415,
25,
1271,
286,
34820,
973,
287,
262,
337,
4177,
50,
11862,
287,
1339,
262,
337,
4177,
50,
10214,
318,
973,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
778,
46493,
25,
25131,
973,
329,
262,
778,
46493,
287,
262,
1855,
320,
897,
11862,
611,
262,
1855,
320,
897,
10214,
318,
973,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
796,
983,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
18579,
2099,
15947,
703,
262,
10214,
11103,
465,
6100,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
4906,
796,
10214,
62,
4906,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2116,
13557,
4906,
6624,
20625,
3955,
25922,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
18053,
796,
6795,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
1050,
46493,
796,
778,
46493,
198,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
2116,
13557,
4906,
6624,
25000,
9328,
62,
20034,
21982,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
2676,
341,
796,
24415,
628,
220,
220,
220,
825,
787,
62,
21084,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
33957,
262,
5721,
287,
543,
262,
10214,
815,
1295,
262,
1306,
1221,
13,
198,
220,
220,
220,
220,
220,
220,
220,
383,
3177,
3867,
3689,
4745,
319,
262,
10214,
2099,
13,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
262,
5721,
1271,
810,
262,
10214,
815,
711,
262,
1306,
1445,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
7,
31519,
4877,
58,
944,
13557,
6057,
13557,
15344,
60,
1343,
366,
318,
546,
284,
711,
1058,
4943,
198,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
554,
1339,
262,
10214,
2099,
318,
46920,
2662,
11,
262,
10214,
8794,
329,
5442,
6100,
11,
290,
611,
612,
3588,
470,
11,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
788,
11103,
257,
4938,
4738,
1445,
13,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2116,
13557,
4906,
6624,
46920,
2662,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1592,
62,
4033,
796,
2116,
13,
1136,
62,
14463,
62,
21084,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
1592,
62,
4033,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
1592,
62,
4033,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
2116,
13,
1136,
62,
25120,
62,
21084,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
554,
1339,
262,
10214,
2099,
318,
46920,
2662,
8959,
41283,
1961,
11,
262,
10214,
8794,
329,
5442,
6100,
11,
290,
611,
612,
3588,
470,
11,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
788,
8794,
611,
612,
318,
597,
1445,
326,
7021,
257,
1277,
5442,
1445,
329,
262,
6125,
13,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
1002,
612,
318,
645,
884,
1445,
11,
340,
11103,
257,
4938,
4738,
1445,
13,
198,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
2116,
13557,
4906,
6624,
46920,
2662,
62,
3955,
4805,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1592,
62,
4033,
796,
2116,
13,
1136,
62,
14463,
62,
21084,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
1592,
62,
4033,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
7203,
16643,
768,
5721,
1058,
1600,
1592,
62,
4033,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
1592,
62,
4033,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
825,
62,
21084,
796,
2116,
13,
1136,
62,
4299,
2021,
62,
21084,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
825,
62,
21084,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
7203,
7469,
2021,
5721,
1058,
1600,
825,
62,
21084,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
825,
62,
21084,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
2116,
13,
1136,
62,
25120,
62,
21084,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
7203,
29531,
1445,
1600,
5721,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
2116,
13557,
4906,
6624,
20625,
3955,
25922,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
11,
10356,
897,
62,
26675,
796,
2116,
13,
1084,
320,
897,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
13557,
3526,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
18053,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
532,
11018,
13,
10745,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10688,
13,
10745,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6407,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
1050,
46493,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
7,
28665,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1288,
361,
2116,
13557,
4906,
6624,
25000,
9328,
62,
20034,
21982,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
267,
796,
19081,
7,
944,
13557,
6057,
13,
30073,
62,
5219,
28955,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
2116,
13,
2144,
660,
62,
7718,
5439,
62,
21048,
62,
12947,
7,
944,
13557,
2676,
341,
11,
267,
11,
362,
13,
15,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
657,
628,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
7203,
22369,
12,
4943,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
13,
5372,
7,
28665,
8,
628,
220,
220,
220,
825,
651,
62,
14463,
62,
21084,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
47719,
1771,
612,
318,
257,
5442,
5721,
1695,
329,
262,
1306,
198,
220,
220,
220,
220,
220,
220,
220,
1445,
286,
262,
10214,
13,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
5442,
5721,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
329,
269,
62,
5404,
287,
2837,
7,
944,
13557,
6057,
13557,
4033,
82,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
374,
287,
2837,
7,
944,
13557,
6057,
13557,
8516,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
2116,
13557,
6057,
13557,
3526,
58,
66,
62,
5404,
7131,
81,
60,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
13557,
3526,
58,
66,
62,
5404,
7131,
81,
60,
796,
2116,
13557,
6057,
13557,
15344,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
318,
62,
39791,
796,
2116,
13557,
6057,
13,
9122,
62,
5404,
19510,
66,
62,
5404,
11,
374,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
13557,
3526,
58,
66,
62,
5404,
7131,
81,
60,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
318,
62,
39791,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
269,
62,
5404,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
5721,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2270,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
5721,
628,
220,
220,
220,
825,
651,
62,
12102,
62,
17946,
602,
7,
944,
11,
3096,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
16409,
477,
262,
4938,
15180,
810,
262,
2137,
460,
711,
11,
22430,
262,
15180,
198,
220,
220,
220,
220,
220,
220,
220,
326,
389,
407,
1336,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
3096,
25,
4036,
1181,
286,
262,
983,
11,
3096,
286,
262,
983,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
1351,
286,
477,
4938,
5721,
36525,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1479,
62,
4033,
82,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1312,
287,
2837,
7,
25154,
5883,
45,
62,
34,
28270,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
3096,
58,
72,
7131,
49,
3913,
62,
34,
28270,
532,
352,
60,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1479,
62,
4033,
82,
13,
33295,
7,
72,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3601,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
5787,
62,
4033,
82,
8,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
1479,
62,
4033,
82,
628,
220,
220,
220,
825,
651,
62,
25120,
62,
21084,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
33957,
257,
4938,
4738,
5721,
810,
262,
10214,
460,
711,
465,
1306,
1445,
13,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
4938,
4738,
5721,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1479,
62,
4033,
82,
796,
2116,
13,
1136,
62,
12102,
62,
17946,
602,
7,
944,
13557,
6057,
13557,
3526,
8,
198,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
4738,
13,
25541,
7,
5787,
62,
4033,
82,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
5721,
628,
220,
220,
220,
825,
651,
62,
4299,
2021,
62,
21084,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
47719,
1771,
262,
10214,
714,
711,
257,
1445,
326,
7021,
257,
1277,
5442,
198,
220,
220,
220,
220,
220,
220,
220,
1445,
422,
262,
6125,
13,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
5721,
284,
307,
2826,
284,
3368,
6078,
2729,
265,
306,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
6045,
198,
220,
220,
220,
220,
220,
220,
220,
329,
269,
62,
5404,
287,
2837,
7,
944,
13557,
6057,
13557,
4033,
82,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
374,
287,
2837,
7,
944,
13557,
6057,
13557,
8516,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
2116,
13557,
6057,
13557,
3526,
58,
66,
62,
5404,
7131,
81,
60,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
13557,
3526,
58,
66,
62,
5404,
7131,
81,
60,
796,
532,
16,
1635,
2116,
13557,
6057,
13557,
15344,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
318,
62,
39791,
796,
2116,
13557,
6057,
13,
9122,
62,
5404,
19510,
66,
62,
5404,
11,
374,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
6057,
13557,
3526,
58,
66,
62,
5404,
7131,
81,
60,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
318,
62,
39791,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5721,
796,
269,
62,
5404,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
5721,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2270,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
5721,
628,
198,
4871,
19081,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
770,
1398,
318,
973,
284,
2380,
13760,
286,
262,
5509,
286,
11490,
973,
1141,
198,
220,
220,
220,
22489,
12,
9914,
5439,
12200,
11140,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
825,
751,
62,
9410,
7,
944,
11,
1200,
62,
5219,
11,
1445,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
3060,
257,
1200,
284,
262,
1459,
10139,
13,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
1200,
62,
5219,
25,
1181,
286,
262,
1200,
284,
751,
198,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
1445,
25,
1445,
284,
466,
284,
651,
284,
262,
8308,
2087,
1200,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1200,
796,
19081,
7,
9410,
62,
5219,
11,
2560,
28,
944,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
17197,
13,
33295,
7,
9410,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
17197,
62,
76,
5241,
13,
33295,
7,
21084,
8,
628,
220,
220,
220,
825,
4296,
7,
944,
11,
6721,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
10133,
262,
10139,
338,
6721,
357,
521,
16856,
703,
922,
257,
1728,
10139,
318,
198,
220,
220,
220,
220,
220,
220,
220,
1864,
284,
262,
337,
4177,
50,
11862,
8,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
17143,
6721,
25,
6721,
284,
307,
2087,
284,
262,
10139,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
260,
904,
15853,
6721,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
4703,
896,
15853,
352,
628,
220,
220,
220,
825,
3938,
62,
20676,
1850,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
47719,
611,
262,
10139,
318,
3938,
18782,
357,
4758,
1724,
356,
460,
407,
751,
198,
220,
220,
220,
220,
220,
220,
220,
597,
517,
1751,
284,
428,
10139,
8,
628,
220,
220,
220,
220,
220,
220,
220,
1058,
7783,
25,
6407,
286,
10352,
6906,
319,
611,
340,
318,
3938,
2462,
87,
75,
1850,
393,
407,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
944,
13,
17197,
8,
6624,
18896,
7,
944,
13,
5219,
13,
1136,
62,
12102,
62,
17946,
602,
3419,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
6407,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
10352,
198
] | 2.143263 | 3,169 |
from revoscalepy import rx_lin_mod, rx_serialize_model, rx_summary
import pandas as pd
import pyodbc
import os
conn_str = 'Driver=SQL Server;Server=<Server Name>;Database=MLDB;Uid=<User Name>;Pwd=<Password>;'
cnxn = pyodbc.connect(conn_str)
cnxn.setencoding("utf-8")
inputsql = 'select "RentalCount", "Year", "Month", "Day", "WeekDay", "Snow", "Holiday", "FWeekDay" from dbo.rental_data where Year < 2015'
rental_train_data = pd.read_sql(inputsql, cnxn)
rental_train_data["Holiday"] = rental_train_data["Holiday"].astype("category")
rental_train_data["Snow"] = rental_train_data["Snow"].astype("category")
rental_train_data["WeekDay"] = rental_train_data["WeekDay"].astype("category")
linmod_model = rx_lin_mod("RentalCount ~ Month + Day + WeekDay + Snow + Holiday", data = rental_train_data)
trained_model = rx_serialize_model(linmod_model, realtime_scoring_only = True)
print(rx_summary("RentalCount ~ Month + Day + WeekDay + Snow + Holiday", rental_train_data))
# Dump learned model to file
with open(r'c:\model\trained_model.pickle', mode='wb') as f:
f.write(trained_model)
# Dump learned model to Table
cursor=cnxn.cursor()
cursor.execute(\
'''
MERGE rental_models AS target
USING (SELECT ? as model_name) AS source
ON(target.model_name = source.model_name)
WHEN MATCHED THEN UPDATE SET native_model = ?
WHEN NOT MATCHED BY TARGET THEN INSERT (model_name, lang, native_model) VALUES(?,?,?);
''', \
("linear_model", trained_model, "linear_model", "Python", trained_model))
cnxn.commit()
| [
6738,
2710,
418,
9948,
538,
88,
1330,
374,
87,
62,
2815,
62,
4666,
11,
374,
87,
62,
46911,
1096,
62,
19849,
11,
374,
87,
62,
49736,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
12972,
375,
15630,
198,
11748,
28686,
198,
198,
37043,
62,
2536,
796,
705,
32103,
28,
17861,
9652,
26,
10697,
28,
27,
10697,
6530,
29,
26,
38105,
28,
5805,
11012,
26,
52,
312,
28,
27,
12982,
6530,
29,
26,
47,
16993,
28,
27,
35215,
29,
26,
6,
198,
31522,
87,
77,
796,
12972,
375,
15630,
13,
8443,
7,
37043,
62,
2536,
8,
198,
31522,
87,
77,
13,
2617,
12685,
7656,
7203,
40477,
12,
23,
4943,
198,
15414,
25410,
796,
705,
19738,
366,
49,
2470,
12332,
1600,
366,
17688,
1600,
366,
31948,
1600,
366,
12393,
1600,
366,
20916,
12393,
1600,
366,
28974,
1600,
366,
28115,
2567,
1600,
366,
37,
20916,
12393,
1,
422,
288,
2127,
13,
1156,
282,
62,
7890,
810,
6280,
1279,
1853,
6,
198,
1156,
282,
62,
27432,
62,
7890,
796,
279,
67,
13,
961,
62,
25410,
7,
15414,
25410,
11,
269,
77,
87,
77,
8,
198,
198,
1156,
282,
62,
27432,
62,
7890,
14692,
28115,
2567,
8973,
796,
14447,
62,
27432,
62,
7890,
14692,
28115,
2567,
1,
4083,
459,
2981,
7203,
22872,
4943,
198,
1156,
282,
62,
27432,
62,
7890,
14692,
28974,
8973,
796,
14447,
62,
27432,
62,
7890,
14692,
28974,
1,
4083,
459,
2981,
7203,
22872,
4943,
198,
1156,
282,
62,
27432,
62,
7890,
14692,
20916,
12393,
8973,
796,
14447,
62,
27432,
62,
7890,
14692,
20916,
12393,
1,
4083,
459,
2981,
7203,
22872,
4943,
198,
198,
2815,
4666,
62,
19849,
796,
374,
87,
62,
2815,
62,
4666,
7203,
49,
2470,
12332,
5299,
16061,
1343,
3596,
1343,
6119,
12393,
1343,
7967,
1343,
22770,
1600,
1366,
796,
14447,
62,
27432,
62,
7890,
8,
198,
35311,
62,
19849,
796,
374,
87,
62,
46911,
1096,
62,
19849,
7,
2815,
4666,
62,
19849,
11,
1103,
2435,
62,
46536,
62,
8807,
796,
6407,
8,
198,
198,
4798,
7,
40914,
62,
49736,
7203,
49,
2470,
12332,
5299,
16061,
1343,
3596,
1343,
6119,
12393,
1343,
7967,
1343,
22770,
1600,
14447,
62,
27432,
62,
7890,
4008,
198,
198,
2,
360,
931,
4499,
2746,
284,
2393,
198,
4480,
1280,
7,
81,
6,
66,
7479,
19849,
59,
35311,
62,
19849,
13,
27729,
293,
3256,
4235,
11639,
39346,
11537,
355,
277,
25,
198,
220,
220,
220,
277,
13,
13564,
7,
35311,
62,
19849,
8,
198,
198,
2,
360,
931,
4499,
2746,
284,
8655,
198,
66,
21471,
28,
31522,
87,
77,
13,
66,
21471,
3419,
198,
66,
21471,
13,
41049,
38016,
198,
7061,
6,
198,
29296,
8264,
14447,
62,
27530,
7054,
2496,
198,
2937,
2751,
357,
46506,
5633,
355,
2746,
62,
3672,
8,
7054,
2723,
198,
1340,
7,
16793,
13,
19849,
62,
3672,
796,
2723,
13,
19849,
62,
3672,
8,
198,
12418,
1677,
337,
11417,
1961,
42243,
35717,
25823,
6868,
62,
19849,
796,
5633,
198,
12418,
1677,
5626,
337,
11417,
1961,
11050,
309,
46095,
42243,
29194,
17395,
357,
19849,
62,
3672,
11,
42392,
11,
6868,
62,
19849,
8,
26173,
35409,
7,
21747,
21747,
30,
1776,
198,
7061,
3256,
3467,
198,
7203,
29127,
62,
19849,
1600,
8776,
62,
19849,
11,
366,
29127,
62,
19849,
1600,
366,
37906,
1600,
8776,
62,
19849,
4008,
198,
31522,
87,
77,
13,
41509,
3419,
198
] | 2.764273 | 543 |
import numpy as np
def uniform_grid(n_centers, low, high):
"""
This function is used to create the parameters of uniformly spaced radial
basis functions with 25% of overlap. It creates a uniformly spaced grid of
``n_centers[i]`` points in each ``ranges[i]``. Also returns a vector
containing the appropriate scales of the radial basis functions.
Args:
n_centers (list): number of centers of each dimension;
low (np.ndarray): lowest value for each dimension;
high (np.ndarray): highest value for each dimension.
Returns:
The uniformly spaced grid and the scale vector.
"""
n_features = len(low)
b = np.zeros(n_features)
c = list()
tot_points = 1
for i, n in enumerate(n_centers):
start = low[i]
end = high[i]
b[i] = (end - start) ** 2 / n ** 3
m = abs(start - end) / n
if n == 1:
c_i = (start + end) / 2.
c.append(np.array([c_i]))
else:
c_i = np.linspace(start - m * .1, end + m * .1, n)
c.append(c_i)
tot_points *= n
n_rows = 1
n_cols = 0
grid = np.zeros((tot_points, n_features))
for discrete_values in c:
i1 = 0
dim = len(discrete_values)
for i in range(dim):
for r in range(n_rows):
idx_r = r + i * n_rows
for c in range(n_cols):
grid[idx_r, c] = grid[r, c]
grid[idx_r, n_cols] = discrete_values[i1]
i1 += 1
n_cols += 1
n_rows *= len(discrete_values)
return grid, b
| [
11748,
299,
32152,
355,
45941,
628,
198,
4299,
8187,
62,
25928,
7,
77,
62,
1087,
364,
11,
1877,
11,
1029,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
770,
2163,
318,
973,
284,
2251,
262,
10007,
286,
42096,
38980,
44503,
198,
220,
220,
220,
4308,
5499,
351,
1679,
4,
286,
21721,
13,
632,
8075,
257,
42096,
38980,
10706,
286,
198,
220,
220,
220,
7559,
77,
62,
1087,
364,
58,
72,
60,
15506,
2173,
287,
1123,
7559,
81,
6231,
58,
72,
60,
15506,
13,
4418,
5860,
257,
15879,
198,
220,
220,
220,
7268,
262,
5035,
16252,
286,
262,
44503,
4308,
5499,
13,
628,
220,
220,
220,
943,
14542,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
299,
62,
1087,
364,
357,
4868,
2599,
1271,
286,
10399,
286,
1123,
15793,
26,
198,
220,
220,
220,
220,
220,
220,
220,
220,
1877,
357,
37659,
13,
358,
18747,
2599,
9016,
1988,
329,
1123,
15793,
26,
198,
220,
220,
220,
220,
220,
220,
220,
220,
1029,
357,
37659,
13,
358,
18747,
2599,
4511,
1988,
329,
1123,
15793,
13,
628,
220,
220,
220,
16409,
25,
198,
220,
220,
220,
220,
220,
220,
220,
383,
42096,
38980,
10706,
290,
262,
5046,
15879,
13,
628,
220,
220,
220,
37227,
198,
220,
220,
220,
299,
62,
40890,
796,
18896,
7,
9319,
8,
198,
220,
220,
220,
275,
796,
45941,
13,
9107,
418,
7,
77,
62,
40890,
8,
198,
220,
220,
220,
269,
796,
1351,
3419,
198,
220,
220,
220,
2006,
62,
13033,
796,
352,
198,
220,
220,
220,
329,
1312,
11,
299,
287,
27056,
378,
7,
77,
62,
1087,
364,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
923,
796,
1877,
58,
72,
60,
198,
220,
220,
220,
220,
220,
220,
220,
886,
796,
1029,
58,
72,
60,
628,
220,
220,
220,
220,
220,
220,
220,
275,
58,
72,
60,
796,
357,
437,
532,
923,
8,
12429,
362,
1220,
299,
12429,
513,
198,
220,
220,
220,
220,
220,
220,
220,
285,
796,
2352,
7,
9688,
532,
886,
8,
1220,
299,
198,
220,
220,
220,
220,
220,
220,
220,
611,
299,
6624,
352,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
269,
62,
72,
796,
357,
9688,
1343,
886,
8,
1220,
362,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
269,
13,
33295,
7,
37659,
13,
18747,
26933,
66,
62,
72,
60,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
269,
62,
72,
796,
45941,
13,
21602,
10223,
7,
9688,
532,
285,
1635,
764,
16,
11,
886,
1343,
285,
1635,
764,
16,
11,
299,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
269,
13,
33295,
7,
66,
62,
72,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2006,
62,
13033,
1635,
28,
299,
628,
220,
220,
220,
299,
62,
8516,
796,
352,
198,
220,
220,
220,
299,
62,
4033,
82,
796,
657,
628,
220,
220,
220,
10706,
796,
45941,
13,
9107,
418,
19510,
83,
313,
62,
13033,
11,
299,
62,
40890,
4008,
628,
220,
220,
220,
329,
28810,
62,
27160,
287,
269,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1312,
16,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
5391,
796,
18896,
7,
15410,
8374,
62,
27160,
8,
628,
220,
220,
220,
220,
220,
220,
220,
329,
1312,
287,
2837,
7,
27740,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
374,
287,
2837,
7,
77,
62,
8516,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4686,
87,
62,
81,
796,
374,
1343,
1312,
1635,
299,
62,
8516,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
269,
287,
2837,
7,
77,
62,
4033,
82,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
312,
87,
62,
81,
11,
269,
60,
796,
10706,
58,
81,
11,
269,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10706,
58,
312,
87,
62,
81,
11,
299,
62,
4033,
82,
60,
796,
28810,
62,
27160,
58,
72,
16,
60,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1312,
16,
15853,
352,
628,
220,
220,
220,
220,
220,
220,
220,
299,
62,
4033,
82,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
299,
62,
8516,
1635,
28,
18896,
7,
15410,
8374,
62,
27160,
8,
628,
220,
220,
220,
1441,
10706,
11,
275,
198
] | 2.086008 | 779 |
from PyQt5.QtChart import *
import PyQt5.QtCore as QtCore
import PyQt5.QtGui as QtGui
import PyQt5.QtWidgets as QtWidgets
import config
import nav
import yfinance as yf
| [
6738,
9485,
48,
83,
20,
13,
48,
83,
45488,
1330,
1635,
198,
11748,
9485,
48,
83,
20,
13,
48,
83,
14055,
355,
33734,
14055,
198,
11748,
9485,
48,
83,
20,
13,
48,
83,
8205,
72,
355,
33734,
8205,
72,
198,
11748,
9485,
48,
83,
20,
13,
48,
83,
54,
312,
11407,
355,
33734,
54,
312,
11407,
198,
198,
11748,
4566,
198,
11748,
6812,
198,
11748,
331,
69,
14149,
355,
331,
69,
628,
628,
628,
628,
628,
628,
628,
198
] | 2.329114 | 79 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Maximum flow by Dinic
# jill-jênn vie et christoph dürr - 2015-2018
from collections import deque
from sys import setrecursionlimit
from tryalgo.graph import add_reverse_arcs
setrecursionlimit(5010) # necessary for big graphs
# snip{
def dinic(graph, capacity, source, target):
"""Maximum flow by Dinic
:param graph: directed graph in listlist or listdict format
:param capacity: in matrix format or same listdict graph
:param int source: vertex
:param int target: vertex
:returns: skew symmetric flow matrix, flow value
:complexity: :math:`O(|V|^2 |E|)`
"""
assert source != target
add_reverse_arcs(graph, capacity)
Q = deque()
total = 0
n = len(graph)
flow = [[0] * n for u in range(n)] # flow initially empty
while True: # repeat while we can increase
Q.appendleft(source)
lev = [None] * n # build levels, None = inaccessible
lev[source] = 0 # by BFS
while Q:
u = Q.pop()
for v in graph[u]:
if lev[v] is None and capacity[u][v] > flow[u][v]:
lev[v] = lev[u] + 1
Q.appendleft(v)
if lev[target] is None: # stop if sink is not reachable
return flow, total
up_bound = sum(capacity[source][v] for v in graph[source]) - total
total += _dinic_step(graph, capacity, lev, flow, source, target,
up_bound)
def _dinic_step(graph, capacity, lev, flow, u, target, limit):
""" tenter de pousser le plus de flot de u à target, sans dépasser limit
"""
if limit <= 0:
return 0
if u == target:
return limit
val = 0
for v in graph[u]:
residual = capacity[u][v] - flow[u][v]
if lev[v] == lev[u] + 1 and residual > 0:
z = min(limit, residual)
aug = _dinic_step(graph, capacity, lev, flow, v, target, z)
flow[u][v] += aug
flow[v][u] -= aug
val += aug
limit -= aug
if val == 0:
lev[u] = None # remove unreachable node
return val
# snip}
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
22246,
5202,
416,
23448,
291,
198,
2,
474,
359,
12,
73,
25792,
20471,
410,
494,
2123,
33826,
2522,
288,
9116,
21062,
532,
1853,
12,
7908,
628,
198,
6738,
17268,
1330,
390,
4188,
198,
6738,
25064,
1330,
900,
8344,
24197,
32374,
198,
6738,
1949,
282,
2188,
13,
34960,
1330,
751,
62,
50188,
62,
5605,
82,
628,
198,
2617,
8344,
24197,
32374,
7,
20,
20943,
8,
220,
1303,
3306,
329,
1263,
28770,
628,
198,
2,
3013,
541,
90,
198,
4299,
16278,
291,
7,
34960,
11,
5339,
11,
2723,
11,
2496,
2599,
198,
220,
220,
220,
37227,
40541,
5202,
416,
23448,
291,
628,
220,
220,
220,
1058,
17143,
4823,
25,
7924,
4823,
287,
1351,
4868,
393,
1351,
11600,
5794,
198,
220,
220,
220,
1058,
17143,
5339,
25,
287,
17593,
5794,
393,
976,
1351,
11600,
4823,
198,
220,
220,
220,
1058,
17143,
493,
2723,
25,
37423,
198,
220,
220,
220,
1058,
17143,
493,
2496,
25,
37423,
198,
220,
220,
220,
1058,
7783,
82,
25,
43370,
23606,
19482,
5202,
17593,
11,
5202,
1988,
198,
220,
220,
220,
1058,
41887,
414,
25,
1058,
11018,
25,
63,
46,
7,
91,
53,
91,
61,
17,
930,
36,
91,
8,
63,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
6818,
2723,
14512,
2496,
198,
220,
220,
220,
751,
62,
50188,
62,
5605,
82,
7,
34960,
11,
5339,
8,
198,
220,
220,
220,
1195,
796,
390,
4188,
3419,
198,
220,
220,
220,
2472,
796,
657,
198,
220,
220,
220,
299,
796,
18896,
7,
34960,
8,
198,
220,
220,
220,
5202,
796,
16410,
15,
60,
1635,
299,
329,
334,
287,
2837,
7,
77,
15437,
220,
220,
1303,
5202,
7317,
6565,
198,
220,
220,
220,
981,
6407,
25,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
9585,
981,
356,
460,
2620,
198,
220,
220,
220,
220,
220,
220,
220,
1195,
13,
33295,
9464,
7,
10459,
8,
198,
220,
220,
220,
220,
220,
220,
220,
23145,
796,
685,
14202,
60,
1635,
299,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
1382,
2974,
11,
6045,
796,
40604,
198,
220,
220,
220,
220,
220,
220,
220,
23145,
58,
10459,
60,
796,
657,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
416,
347,
10652,
198,
220,
220,
220,
220,
220,
220,
220,
981,
1195,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
334,
796,
1195,
13,
12924,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
410,
287,
4823,
58,
84,
5974,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
23145,
58,
85,
60,
318,
6045,
290,
5339,
58,
84,
7131,
85,
60,
1875,
5202,
58,
84,
7131,
85,
5974,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
23145,
58,
85,
60,
796,
23145,
58,
84,
60,
1343,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1195,
13,
33295,
9464,
7,
85,
8,
628,
220,
220,
220,
220,
220,
220,
220,
611,
23145,
58,
16793,
60,
318,
6045,
25,
220,
220,
1303,
2245,
611,
14595,
318,
407,
3151,
540,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
5202,
11,
2472,
198,
220,
220,
220,
220,
220,
220,
220,
510,
62,
7784,
796,
2160,
7,
42404,
58,
10459,
7131,
85,
60,
329,
410,
287,
4823,
58,
10459,
12962,
532,
2472,
198,
220,
220,
220,
220,
220,
220,
220,
2472,
15853,
4808,
25194,
291,
62,
9662,
7,
34960,
11,
5339,
11,
23145,
11,
5202,
11,
2723,
11,
2496,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
510,
62,
7784,
8,
628,
198,
4299,
4808,
25194,
291,
62,
9662,
7,
34960,
11,
5339,
11,
23145,
11,
5202,
11,
334,
11,
2496,
11,
4179,
2599,
198,
220,
220,
220,
37227,
256,
9255,
390,
279,
516,
2655,
443,
5556,
390,
781,
313,
390,
334,
28141,
2496,
11,
38078,
39073,
6603,
263,
4179,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
611,
4179,
19841,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
657,
198,
220,
220,
220,
611,
334,
6624,
2496,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
4179,
198,
220,
220,
220,
1188,
796,
657,
198,
220,
220,
220,
329,
410,
287,
4823,
58,
84,
5974,
198,
220,
220,
220,
220,
220,
220,
220,
29598,
796,
5339,
58,
84,
7131,
85,
60,
532,
5202,
58,
84,
7131,
85,
60,
198,
220,
220,
220,
220,
220,
220,
220,
611,
23145,
58,
85,
60,
6624,
23145,
58,
84,
60,
1343,
352,
290,
29598,
1875,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1976,
796,
949,
7,
32374,
11,
29598,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
16339,
796,
4808,
25194,
291,
62,
9662,
7,
34960,
11,
5339,
11,
23145,
11,
5202,
11,
410,
11,
2496,
11,
1976,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5202,
58,
84,
7131,
85,
60,
15853,
16339,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5202,
58,
85,
7131,
84,
60,
48185,
16339,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1188,
15853,
16339,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4179,
48185,
16339,
198,
220,
220,
220,
611,
1188,
6624,
657,
25,
198,
220,
220,
220,
220,
220,
220,
220,
23145,
58,
84,
60,
796,
6045,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
4781,
14880,
34446,
10139,
198,
220,
220,
220,
1441,
1188,
198,
2,
3013,
541,
92,
198
] | 2.195025 | 1,005 |
#!/usr/bin/env python
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Finetuning multi-lingual models on XNLI (e.g. Bert, DistilBERT, XLM).
Adapted from `examples/text-classification/run_glue.py`"""
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from typing import Optional
import datasets
import numpy as np
from datasets import load_dataset, load_metric
import transformers
from transformers import (
AutoConfig,
AutoModelForSequenceClassification,
AutoTokenizer,
DataCollatorWithPadding,
EvalPrediction,
HfArgumentParser,
Trainer,
TrainingArguments,
default_data_collator,
set_seed,
)
from transformers.trainer_utils import get_last_checkpoint
from transformers.utils import check_min_version
from transformers.utils.versions import require_version
# Will error if the minimal version of Transformers is not installed. Remove at your own risks.
check_min_version("4.20.0.dev0")
require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/text-classification/requirements.txt")
logger = logging.getLogger(__name__)
@dataclass
class DataTrainingArguments:
"""
Arguments pertaining to what data we are going to input our model for training and eval.
Using `HfArgumentParser` we can turn this class
into argparse arguments to be able to specify them on
the command line.
"""
max_seq_length: Optional[int] = field(
default=128,
metadata={
"help": (
"The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded."
)
},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached preprocessed datasets or not."}
)
pad_to_max_length: bool = field(
default=True,
metadata={
"help": (
"Whether to pad all samples to `max_seq_length`. "
"If False, will pad the samples dynamically when batching to the maximum length in the batch."
)
},
)
max_train_samples: Optional[int] = field(
default=None,
metadata={
"help": (
"For debugging purposes or quicker training, truncate the number of training examples to this "
"value if set."
)
},
)
max_eval_samples: Optional[int] = field(
default=None,
metadata={
"help": (
"For debugging purposes or quicker training, truncate the number of evaluation examples to this "
"value if set."
)
},
)
max_predict_samples: Optional[int] = field(
default=None,
metadata={
"help": (
"For debugging purposes or quicker training, truncate the number of prediction examples to this "
"value if set."
)
},
)
server_ip: Optional[str] = field(default=None, metadata={"help": "For distant debugging."})
server_port: Optional[str] = field(default=None, metadata={"help": "For distant debugging."})
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
"""
model_name_or_path: str = field(
default=None, metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
)
language: str = field(
default=None, metadata={"help": "Evaluation language. Also train language if `train_language` is set to None."}
)
train_language: Optional[str] = field(
default=None, metadata={"help": "Train language if it is different from the evaluation language."}
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
)
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
)
cache_dir: Optional[str] = field(
default=None,
metadata={"help": "Where do you want to store the pretrained models downloaded from huggingface.co"},
)
do_lower_case: Optional[bool] = field(
default=False,
metadata={"help": "arg to indicate if tokenizer should do lower case in AutoTokenizer.from_pretrained()"},
)
use_fast_tokenizer: bool = field(
default=True,
metadata={"help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not."},
)
model_revision: str = field(
default="main",
metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
)
use_auth_token: bool = field(
default=False,
metadata={
"help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script "
"with private models)."
)
},
)
ignore_mismatched_sizes: bool = field(
default=False,
metadata={"help": "Will enable to load a pretrained model whose head dimensions are different."},
)
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
28,
40477,
12,
23,
198,
2,
15069,
2864,
383,
3012,
9552,
15417,
4816,
46665,
290,
383,
12905,
2667,
32388,
3457,
13,
1074,
13,
198,
2,
15069,
357,
66,
8,
2864,
11,
15127,
23929,
44680,
6234,
13,
220,
1439,
2489,
10395,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
7330,
257,
4866,
286,
262,
13789,
379,
198,
2,
198,
2,
220,
220,
220,
220,
2638,
1378,
2503,
13,
43073,
13,
2398,
14,
677,
4541,
14,
43,
2149,
24290,
12,
17,
13,
15,
198,
2,
198,
2,
17486,
2672,
416,
9723,
1099,
393,
4987,
284,
287,
3597,
11,
3788,
198,
2,
9387,
739,
262,
13789,
318,
9387,
319,
281,
366,
1921,
3180,
1,
29809,
1797,
11,
198,
2,
42881,
34764,
11015,
6375,
7102,
49828,
11053,
3963,
15529,
509,
12115,
11,
2035,
4911,
393,
17142,
13,
198,
2,
4091,
262,
13789,
329,
262,
2176,
3303,
15030,
21627,
290,
198,
2,
11247,
739,
262,
13789,
13,
198,
37811,
4463,
316,
46493,
5021,
12,
1359,
723,
4981,
319,
1395,
45,
31271,
357,
68,
13,
70,
13,
22108,
11,
4307,
346,
13246,
51,
11,
16276,
44,
737,
198,
220,
220,
220,
30019,
276,
422,
4600,
1069,
12629,
14,
5239,
12,
4871,
2649,
14,
5143,
62,
4743,
518,
13,
9078,
63,
37811,
198,
198,
11748,
18931,
198,
11748,
28686,
198,
11748,
4738,
198,
11748,
25064,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
11,
2214,
198,
6738,
19720,
1330,
32233,
198,
198,
11748,
40522,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
40522,
1330,
3440,
62,
19608,
292,
316,
11,
3440,
62,
4164,
1173,
198,
198,
11748,
6121,
364,
198,
6738,
6121,
364,
1330,
357,
198,
220,
220,
220,
11160,
16934,
11,
198,
220,
220,
220,
11160,
17633,
1890,
44015,
594,
9487,
2649,
11,
198,
220,
220,
220,
11160,
30642,
7509,
11,
198,
220,
220,
220,
6060,
22667,
1352,
3152,
47,
26872,
11,
198,
220,
220,
220,
26439,
39156,
2867,
11,
198,
220,
220,
220,
367,
69,
28100,
1713,
46677,
11,
198,
220,
220,
220,
31924,
11,
198,
220,
220,
220,
13614,
28100,
2886,
11,
198,
220,
220,
220,
4277,
62,
7890,
62,
26000,
1352,
11,
198,
220,
220,
220,
900,
62,
28826,
11,
198,
8,
198,
6738,
6121,
364,
13,
2213,
10613,
62,
26791,
1330,
651,
62,
12957,
62,
9122,
4122,
198,
6738,
6121,
364,
13,
26791,
1330,
2198,
62,
1084,
62,
9641,
198,
6738,
6121,
364,
13,
26791,
13,
47178,
1330,
2421,
62,
9641,
628,
198,
2,
2561,
4049,
611,
262,
10926,
2196,
286,
39185,
318,
407,
6589,
13,
17220,
379,
534,
898,
7476,
13,
198,
9122,
62,
1084,
62,
9641,
7203,
19,
13,
1238,
13,
15,
13,
7959,
15,
4943,
198,
198,
46115,
62,
9641,
7203,
19608,
292,
1039,
29,
28,
16,
13,
23,
13,
15,
1600,
366,
2514,
4259,
25,
7347,
2721,
532,
81,
6096,
14,
9078,
13165,
354,
14,
5239,
12,
4871,
2649,
14,
8897,
18883,
13,
14116,
4943,
198,
198,
6404,
1362,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
628,
198,
31,
19608,
330,
31172,
198,
4871,
6060,
44357,
28100,
2886,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
20559,
2886,
27113,
284,
644,
1366,
356,
389,
1016,
284,
5128,
674,
2746,
329,
3047,
290,
5418,
13,
628,
220,
220,
220,
8554,
4600,
39,
69,
28100,
1713,
46677,
63,
356,
460,
1210,
428,
1398,
198,
220,
220,
220,
656,
1822,
29572,
7159,
284,
307,
1498,
284,
11986,
606,
319,
198,
220,
220,
220,
262,
3141,
1627,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
3509,
62,
41068,
62,
13664,
25,
32233,
58,
600,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
12762,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
16794,
1298,
357,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
464,
5415,
2472,
5128,
8379,
4129,
706,
11241,
1634,
13,
24604,
3007,
2392,
366,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
14813,
428,
481,
307,
40122,
515,
11,
16311,
12238,
481,
307,
44582,
526,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
8964,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
49312,
62,
23870,
25,
20512,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
25101,
11,
20150,
28,
4895,
16794,
1298,
366,
5886,
13564,
262,
39986,
662,
14681,
276,
40522,
393,
407,
526,
92,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
14841,
62,
1462,
62,
9806,
62,
13664,
25,
20512,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
17821,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
16794,
1298,
357,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
15354,
284,
14841,
477,
8405,
284,
4600,
9806,
62,
41068,
62,
13664,
44646,
366,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
1532,
10352,
11,
481,
14841,
262,
8405,
32366,
618,
15458,
278,
284,
262,
5415,
4129,
287,
262,
15458,
526,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
8964,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
3509,
62,
27432,
62,
82,
12629,
25,
32233,
58,
600,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
16794,
1298,
357,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
1890,
28769,
4959,
393,
20061,
3047,
11,
40122,
378,
262,
1271,
286,
3047,
6096,
284,
428,
366,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
8367,
611,
900,
526,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
8964,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
3509,
62,
18206,
62,
82,
12629,
25,
32233,
58,
600,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
16794,
1298,
357,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
1890,
28769,
4959,
393,
20061,
3047,
11,
40122,
378,
262,
1271,
286,
12660,
6096,
284,
428,
366,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
8367,
611,
900,
526,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
8964,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
3509,
62,
79,
17407,
62,
82,
12629,
25,
32233,
58,
600,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
16794,
1298,
357,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
1890,
28769,
4959,
393,
20061,
3047,
11,
40122,
378,
262,
1271,
286,
17724,
6096,
284,
428,
366,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
8367,
611,
900,
526,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
8964,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
4382,
62,
541,
25,
32233,
58,
2536,
60,
796,
2214,
7,
12286,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
1890,
12899,
28769,
526,
30072,
198,
220,
220,
220,
4382,
62,
634,
25,
32233,
58,
2536,
60,
796,
2214,
7,
12286,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
1890,
12899,
28769,
526,
30072,
628,
198,
31,
19608,
330,
31172,
198,
4871,
9104,
28100,
2886,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
20559,
2886,
27113,
284,
543,
2746,
14,
11250,
14,
30001,
7509,
356,
389,
1016,
284,
3734,
12,
83,
1726,
422,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
2746,
62,
3672,
62,
273,
62,
6978,
25,
965,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
15235,
284,
2181,
13363,
2746,
393,
2746,
27421,
422,
46292,
2550,
13,
1073,
14,
27530,
20662,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
3303,
25,
965,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
36,
2100,
2288,
3303,
13,
4418,
4512,
3303,
611,
4600,
27432,
62,
16129,
63,
318,
900,
284,
6045,
526,
92,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
4512,
62,
16129,
25,
32233,
58,
2536,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
44077,
3303,
611,
340,
318,
1180,
422,
262,
12660,
3303,
526,
92,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
4566,
62,
3672,
25,
32233,
58,
2536,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
47,
1186,
13363,
4566,
1438,
393,
3108,
611,
407,
262,
976,
355,
2746,
62,
3672,
20662,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
11241,
7509,
62,
3672,
25,
32233,
58,
2536,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
20150,
28,
4895,
16794,
1298,
366,
47,
1186,
13363,
11241,
7509,
1438,
393,
3108,
611,
407,
262,
976,
355,
2746,
62,
3672,
20662,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
12940,
62,
15908,
25,
32233,
58,
2536,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
28,
4895,
16794,
1298,
366,
8496,
466,
345,
765,
284,
3650,
262,
2181,
13363,
4981,
15680,
422,
46292,
2550,
13,
1073,
25719,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
466,
62,
21037,
62,
7442,
25,
32233,
58,
30388,
60,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
25101,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
28,
4895,
16794,
1298,
366,
853,
284,
7603,
611,
11241,
7509,
815,
466,
2793,
1339,
287,
11160,
30642,
7509,
13,
6738,
62,
5310,
13363,
3419,
25719,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
779,
62,
7217,
62,
30001,
7509,
25,
20512,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
17821,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
28,
4895,
16794,
1298,
366,
15354,
284,
779,
530,
286,
262,
3049,
11241,
7509,
357,
17078,
416,
262,
11241,
11341,
5888,
8,
393,
407,
526,
5512,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
2746,
62,
260,
10178,
25,
965,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
2625,
12417,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
28,
4895,
16794,
1298,
366,
464,
2176,
2746,
2196,
284,
779,
357,
5171,
307,
257,
8478,
1438,
11,
7621,
1438,
393,
4589,
4686,
21387,
5512,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
779,
62,
18439,
62,
30001,
25,
20512,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
25101,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
34758,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
16794,
1298,
357,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
8743,
779,
262,
11241,
7560,
618,
2491,
4600,
35636,
364,
12,
44506,
17594,
63,
357,
49986,
284,
779,
428,
4226,
366,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
366,
4480,
2839,
4981,
21387,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
8964,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
8856,
62,
76,
1042,
14265,
62,
82,
4340,
25,
20512,
796,
2214,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
25101,
11,
198,
220,
220,
220,
220,
220,
220,
220,
20150,
28,
4895,
16794,
1298,
366,
8743,
7139,
284,
3440,
257,
2181,
13363,
2746,
3025,
1182,
15225,
389,
1180,
526,
5512,
198,
220,
220,
220,
1267,
628,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
1388,
3419,
198
] | 2.698029 | 2,232 |
from swsscommon import swsscommon
import time
import re
import json
| [
6738,
1509,
824,
11321,
1330,
1509,
824,
11321,
198,
11748,
640,
198,
11748,
302,
198,
11748,
33918,
198
] | 3.777778 | 18 |
# *****************************************************************
# Copyright 2015 MIT Lincoln Laboratory
# Project: SPAR
# Authors: JCH
# Description: Various classes to inform user of progress
#
# Modifications:
# Date Name Modification
# ---- ---- ------------
# 19 Oct 2012 jch Original file
# *****************************************************************
"""
This module holds various progress-informers: classes which will keep track
of various forms of progress (file-processing, row-generating, etc) and
keep the user appropriately informed of progress.
"""
import os
import sys
this_dir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.join(this_dir, '..', '..')
sys.path.append(base_dir)
import datetime
| [
2,
41906,
17174,
9,
198,
2,
220,
15069,
1853,
17168,
12406,
18643,
220,
220,
198,
2,
220,
4935,
25,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6226,
1503,
198,
2,
220,
46665,
25,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
449,
3398,
198,
2,
220,
12489,
25,
220,
220,
220,
220,
220,
220,
220,
26386,
6097,
284,
4175,
2836,
286,
4371,
198,
2,
220,
198,
2,
220,
3401,
6637,
25,
198,
2,
220,
7536,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6530,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3401,
2649,
198,
2,
220,
13498,
220,
220,
220,
220,
220,
220,
220,
220,
220,
13498,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10541,
198,
2,
220,
678,
2556,
2321,
220,
474,
354,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
13745,
2393,
198,
2,
41906,
17174,
9,
198,
198,
37811,
198,
1212,
8265,
6622,
2972,
4371,
12,
259,
687,
364,
25,
6097,
543,
481,
1394,
2610,
198,
1659,
2972,
5107,
286,
4371,
357,
7753,
12,
36948,
11,
5752,
12,
8612,
803,
11,
3503,
8,
290,
198,
14894,
262,
2836,
20431,
7981,
286,
4371,
13,
198,
198,
37811,
198,
198,
11748,
28686,
198,
11748,
25064,
198,
5661,
62,
15908,
796,
28686,
13,
6978,
13,
15908,
3672,
7,
418,
13,
6978,
13,
397,
2777,
776,
7,
834,
7753,
834,
4008,
198,
8692,
62,
15908,
796,
28686,
13,
6978,
13,
22179,
7,
5661,
62,
15908,
11,
705,
492,
3256,
705,
492,
11537,
198,
17597,
13,
6978,
13,
33295,
7,
8692,
62,
15908,
8,
198,
198,
11748,
4818,
8079,
198,
220,
220,
220,
220,
220,
220,
220,
220,
628,
220,
220,
220,
220,
198,
220,
220,
220,
220,
198,
220,
220,
220,
220,
198,
220,
198
] | 2.834437 | 302 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for next-sentence prediction task on ROCStories.
"""
import collections
from absl import logging
import gin
import gin.tf
import tensorflow.compat.v2 as tf
gfile = tf.io.gfile
@gin.configurable
class LinearModel(tf.keras.Model):
"""Multi-layer perceptron with embedding matrix at end."""
def __init__(
self,
num_input_sentences=None,
embedding_matrix=None,
embedding_dim=None):
"""Creates a small MLP, then multiplies outputs by embedding matrix.
Either an embedding matrix or an embedding dimension should be specified.
If the former, predictions are made by multiplying the NN outputs by this
embedding matrix. If only an embedding dimension is provided, call()
outputs an embedding, but no predictions.
Args:
num_input_sentences: Integer number of input sentences.
embedding_matrix: Matrix of size [embedding_dim * num_last_ouputs]
embedding_dim: Matrix of size [embedding_dim * num_last_ouputs]
"""
super(LinearModel, self).__init__()
assert (embedding_matrix is None) ^ (embedding_dim is None)
self._loss_object = tf.keras.losses.SparseCategoricalCrossentropy(
from_logits=True)
self._num_input_sentences = num_input_sentences
self.embedding_matrix = embedding_matrix
if self.embedding_matrix is not None:
self._embedding_dim = self.embedding_matrix.shape[1]
else:
self._embedding_dim = embedding_dim
x_input, x_output = self._build_network()
super(LinearModel, self).__init__(
inputs=x_input, outputs=x_output, name='model')
@gin.configurable('LinearModel.hparams')
def _build_network(self,
relu_layers=(2048, 1024),
dropout_amount=0.5,
normalize_embeddings=False,
final_dropout=True,
small_context_loss_weight=0.0,
max_num_distractors=-1):
"""Builds the network.
Args:
relu_layers: Dimensions of linear+RELU layers to add to MLP. These do not
need to include the final projection down to embedding_dim.
dropout_amount: If training, how much dropout to use in each layer.
normalize_embeddings: If True, normalize sentence embeddings (both
input and predicted) to mean 0, unit variance.
final_dropout: If True, adds dropout to the final embedding layer.
small_context_loss_weight: If >0, in addition to the loss with many
distractors, add another loss where the only distractors are the
sentences of the context.
max_num_distractors: If non-negative, randomly pick a window of this many
distractors around the true 5th sentence.
Returns:
A Keras model.
"""
self.small_context_loss_weight = small_context_loss_weight
self._max_num_distractors = max_num_distractors
# x starts off with dimension [batch_size x num_sentences x emb_size].
# Convert it to [batch_size x (num_sentences*emb_size)].
x_input = tf.keras.Input(
shape=[self._num_input_sentences, self._embedding_dim])
flattened_shape = [-1, self._num_input_sentences * self._embedding_dim]
x = tf.reshape(x_input, flattened_shape)
mlp = tf.keras.Sequential()
if normalize_embeddings:
mlp.add(tf.keras.layers.LayerNormalization(axis=1))
for layer_output_dim in relu_layers:
mlp.add(
tf.keras.layers.Dense(layer_output_dim, activation='relu'))
mlp.add(tf.keras.layers.Dropout(dropout_amount))
# Final layer bring us back to embedding dimension.
mlp.add(tf.keras.layers.Dense(self._embedding_dim, activation='linear'))
if final_dropout:
mlp.add(tf.keras.layers.Dropout(dropout_amount))
if normalize_embeddings:
mlp.add(tf.keras.layers.LayerNormalization(axis=1))
return x_input, mlp(x)
def create_metrics(self):
"""Outputs a dictionary containing all the metrics we want to log."""
metrics = [
tf.keras.metrics.Mean(name='train_loss'),
tf.keras.metrics.SparseCategoricalAccuracy(name='train_acc'),
tf.keras.metrics.Accuracy(name='valid_nolabel_acc'),
tf.keras.metrics.Accuracy(name='train_subset_acc'),
tf.keras.metrics.Accuracy(name='valid_spring2016_acc'),
tf.keras.metrics.Accuracy(name='valid_winter2018_acc')]
if self.small_context_loss_weight > 0.0:
metrics.append(tf.keras.metrics.Mean(name='main_loss'))
metrics.append(tf.keras.metrics.Mean(name='small_context_loss'))
metrics = collections.OrderedDict((m.name, m) for m in metrics)
return metrics
@gin.configurable
class ResidualModel(LinearModel):
"""Residual multi-layer perceptron with embedding matrix at end."""
@gin.configurable('ResidualModel.hparams')
def _build_network(self,
residual_layer_size=1024,
num_residual_layers=2,
dropout_amount=0.5,
small_context_loss_weight=0.0,
max_num_distractors=-1):
"""Builds an MLP with residual connections.
Args:
residual_layer_size: Dimension for linear layer to add to MLP.
num_residual_layers: Number of residual layer.
dropout_amount: If training, how much dropout to use in each layer.
small_context_loss_weight: If >0, in addition to the loss with many
distractors, add another loss where the only distractors are the
sentences of the context.
max_num_distractors: The maximum number of distractors provided at each
train step.
Returns:
The input and output tensors for the network, with the input being a
placeholder variable.
"""
self.small_context_loss_weight = small_context_loss_weight
self._max_num_distractors = max_num_distractors
# x starts off with dimension [batch_size x num_sentences x emb_size].
# Convert it to [batch_size x (num_sentences*emb_size)].
x_input = tf.keras.Input(
shape=[self._num_input_sentences, self._embedding_dim])
flattened_shape = [-1, self._num_input_sentences * self._embedding_dim]
x = tf.reshape(x_input, flattened_shape)
x = tf.keras.layers.LayerNormalization(axis=1)(x)
# First bring dimension down to desired.
x = tf.keras.layers.Dense(residual_layer_size)(x)
# Add specified number of residual layers.
for _ in range(num_residual_layers):
x = block(x, residual_layer_size)
# Go back up to desired dimension.
x = tf.keras.layers.Dense(self._embedding_dim, activation='linear')(x)
x = tf.keras.layers.LayerNormalization(axis=1)(x)
return x_input, x
@gin.configurable(allowlist=['network_class'])
def build_model(num_input_sentences,
embedding_matrix=None,
embedding_dim=None,
network_class=None):
"""Creates the model object and returns it."""
if network_class is None:
# Default to the fully connected model.
model = LinearModel(num_input_sentences, embedding_matrix, embedding_dim)
else:
model = network_class(num_input_sentences, embedding_matrix, embedding_dim)
return model
| [
2,
19617,
28,
40477,
12,
23,
198,
2,
15069,
33160,
383,
3012,
4992,
46665,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
7330,
257,
4866,
286,
262,
13789,
379,
198,
2,
198,
2,
220,
220,
220,
220,
2638,
1378,
2503,
13,
43073,
13,
2398,
14,
677,
4541,
14,
43,
2149,
24290,
12,
17,
13,
15,
198,
2,
198,
2,
17486,
2672,
416,
9723,
1099,
393,
4987,
284,
287,
3597,
11,
3788,
198,
2,
9387,
739,
262,
13789,
318,
9387,
319,
281,
366,
1921,
3180,
1,
29809,
1797,
11,
198,
2,
42881,
34764,
11015,
6375,
7102,
49828,
11053,
3963,
15529,
509,
12115,
11,
2035,
4911,
393,
17142,
13,
198,
2,
4091,
262,
13789,
329,
262,
2176,
3303,
15030,
21627,
290,
198,
2,
11247,
739,
262,
13789,
13,
198,
198,
37811,
5841,
1424,
329,
1306,
12,
34086,
594,
17724,
4876,
319,
371,
4503,
1273,
1749,
13,
198,
37811,
198,
198,
11748,
17268,
198,
198,
6738,
2352,
75,
1330,
18931,
198,
11748,
39733,
198,
11748,
39733,
13,
27110,
198,
11748,
11192,
273,
11125,
13,
5589,
265,
13,
85,
17,
355,
48700,
198,
198,
70,
7753,
796,
48700,
13,
952,
13,
70,
7753,
628,
198,
31,
1655,
13,
11250,
11970,
198,
4871,
44800,
17633,
7,
27110,
13,
6122,
292,
13,
17633,
2599,
198,
220,
37227,
29800,
12,
29289,
34953,
1313,
351,
11525,
12083,
17593,
379,
886,
526,
15931,
628,
220,
825,
11593,
15003,
834,
7,
198,
220,
220,
220,
220,
220,
2116,
11,
198,
220,
220,
220,
220,
220,
997,
62,
15414,
62,
34086,
3007,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
11525,
12083,
62,
6759,
8609,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
11525,
12083,
62,
27740,
28,
14202,
2599,
198,
220,
220,
220,
37227,
16719,
274,
257,
1402,
10373,
47,
11,
788,
15082,
444,
23862,
416,
11525,
12083,
17593,
13,
628,
220,
220,
220,
15467,
281,
11525,
12083,
17593,
393,
281,
11525,
12083,
15793,
815,
307,
7368,
13,
198,
220,
220,
220,
1002,
262,
1966,
11,
16277,
389,
925,
416,
48816,
262,
399,
45,
23862,
416,
428,
198,
220,
220,
220,
11525,
12083,
17593,
13,
1002,
691,
281,
11525,
12083,
15793,
318,
2810,
11,
869,
3419,
198,
220,
220,
220,
23862,
281,
11525,
12083,
11,
475,
645,
16277,
13,
628,
220,
220,
220,
943,
14542,
25,
198,
220,
220,
220,
220,
220,
997,
62,
15414,
62,
34086,
3007,
25,
34142,
1271,
286,
5128,
13439,
13,
198,
220,
220,
220,
220,
220,
11525,
12083,
62,
6759,
8609,
25,
24936,
286,
2546,
685,
20521,
12083,
62,
27740,
1635,
997,
62,
12957,
62,
280,
1996,
82,
60,
198,
220,
220,
220,
220,
220,
11525,
12083,
62,
27740,
25,
24936,
286,
2546,
685,
20521,
12083,
62,
27740,
1635,
997,
62,
12957,
62,
280,
1996,
82,
60,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
2208,
7,
14993,
451,
17633,
11,
2116,
737,
834,
15003,
834,
3419,
198,
220,
220,
220,
6818,
357,
20521,
12083,
62,
6759,
8609,
318,
6045,
8,
10563,
357,
20521,
12083,
62,
27740,
318,
6045,
8,
628,
220,
220,
220,
2116,
13557,
22462,
62,
15252,
796,
48700,
13,
6122,
292,
13,
22462,
274,
13,
50,
29572,
34,
2397,
12409,
21544,
298,
28338,
7,
198,
220,
220,
220,
220,
220,
220,
220,
422,
62,
6404,
896,
28,
17821,
8,
628,
220,
220,
220,
2116,
13557,
22510,
62,
15414,
62,
34086,
3007,
796,
997,
62,
15414,
62,
34086,
3007,
198,
220,
220,
220,
2116,
13,
20521,
12083,
62,
6759,
8609,
796,
11525,
12083,
62,
6759,
8609,
628,
220,
220,
220,
611,
2116,
13,
20521,
12083,
62,
6759,
8609,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
2116,
13557,
20521,
12083,
62,
27740,
796,
2116,
13,
20521,
12083,
62,
6759,
8609,
13,
43358,
58,
16,
60,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
2116,
13557,
20521,
12083,
62,
27740,
796,
11525,
12083,
62,
27740,
628,
220,
220,
220,
2124,
62,
15414,
11,
2124,
62,
22915,
796,
2116,
13557,
11249,
62,
27349,
3419,
198,
220,
220,
220,
2208,
7,
14993,
451,
17633,
11,
2116,
737,
834,
15003,
834,
7,
198,
220,
220,
220,
220,
220,
220,
220,
17311,
28,
87,
62,
15414,
11,
23862,
28,
87,
62,
22915,
11,
1438,
11639,
19849,
11537,
628,
220,
2488,
1655,
13,
11250,
11970,
10786,
14993,
451,
17633,
13,
71,
37266,
11537,
198,
220,
825,
4808,
11249,
62,
27349,
7,
944,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
823,
84,
62,
75,
6962,
16193,
1238,
2780,
11,
28119,
828,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4268,
448,
62,
17287,
28,
15,
13,
20,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3487,
1096,
62,
20521,
67,
654,
28,
25101,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2457,
62,
14781,
448,
28,
17821,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1402,
62,
22866,
62,
22462,
62,
6551,
28,
15,
13,
15,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3509,
62,
22510,
62,
17080,
974,
669,
10779,
16,
2599,
198,
220,
220,
220,
37227,
15580,
82,
262,
3127,
13,
628,
220,
220,
220,
943,
14542,
25,
198,
220,
220,
220,
220,
220,
823,
84,
62,
75,
6962,
25,
41265,
286,
14174,
10,
16448,
52,
11685,
284,
751,
284,
10373,
47,
13,
2312,
466,
407,
198,
220,
220,
220,
220,
220,
220,
220,
761,
284,
2291,
262,
2457,
20128,
866,
284,
11525,
12083,
62,
27740,
13,
198,
220,
220,
220,
220,
220,
4268,
448,
62,
17287,
25,
1002,
3047,
11,
703,
881,
4268,
448,
284,
779,
287,
1123,
7679,
13,
198,
220,
220,
220,
220,
220,
3487,
1096,
62,
20521,
67,
654,
25,
1002,
6407,
11,
3487,
1096,
6827,
11525,
67,
654,
357,
16885,
198,
220,
220,
220,
220,
220,
220,
220,
5128,
290,
11001,
8,
284,
1612,
657,
11,
4326,
24198,
13,
198,
220,
220,
220,
220,
220,
2457,
62,
14781,
448,
25,
1002,
6407,
11,
6673,
4268,
448,
284,
262,
2457,
11525,
12083,
7679,
13,
198,
220,
220,
220,
220,
220,
1402,
62,
22866,
62,
22462,
62,
6551,
25,
1002,
1875,
15,
11,
287,
3090,
284,
262,
2994,
351,
867,
198,
220,
220,
220,
220,
220,
220,
220,
11786,
669,
11,
751,
1194,
2994,
810,
262,
691,
11786,
669,
389,
262,
198,
220,
220,
220,
220,
220,
220,
220,
13439,
286,
262,
4732,
13,
198,
220,
220,
220,
220,
220,
3509,
62,
22510,
62,
17080,
974,
669,
25,
1002,
1729,
12,
31591,
11,
15456,
2298,
257,
4324,
286,
428,
867,
198,
220,
220,
220,
220,
220,
220,
220,
11786,
669,
1088,
262,
2081,
642,
400,
6827,
13,
628,
220,
220,
220,
16409,
25,
198,
220,
220,
220,
220,
220,
220,
220,
317,
17337,
292,
2746,
13,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
2116,
13,
17470,
62,
22866,
62,
22462,
62,
6551,
796,
1402,
62,
22866,
62,
22462,
62,
6551,
198,
220,
220,
220,
2116,
13557,
9806,
62,
22510,
62,
17080,
974,
669,
796,
3509,
62,
22510,
62,
17080,
974,
669,
628,
220,
220,
220,
1303,
2124,
4940,
572,
351,
15793,
685,
43501,
62,
7857,
2124,
997,
62,
34086,
3007,
2124,
4072,
62,
7857,
4083,
198,
220,
220,
220,
1303,
38240,
340,
284,
685,
43501,
62,
7857,
2124,
357,
22510,
62,
34086,
3007,
9,
24419,
62,
7857,
25295,
198,
220,
220,
220,
2124,
62,
15414,
796,
48700,
13,
6122,
292,
13,
20560,
7,
198,
220,
220,
220,
220,
220,
220,
220,
5485,
41888,
944,
13557,
22510,
62,
15414,
62,
34086,
3007,
11,
2116,
13557,
20521,
12083,
62,
27740,
12962,
198,
220,
220,
220,
45096,
62,
43358,
796,
25915,
16,
11,
2116,
13557,
22510,
62,
15414,
62,
34086,
3007,
1635,
2116,
13557,
20521,
12083,
62,
27740,
60,
198,
220,
220,
220,
2124,
796,
48700,
13,
3447,
1758,
7,
87,
62,
15414,
11,
45096,
62,
43358,
8,
628,
220,
220,
220,
25962,
79,
796,
48700,
13,
6122,
292,
13,
44015,
1843,
3419,
198,
220,
220,
220,
611,
3487,
1096,
62,
20521,
67,
654,
25,
198,
220,
220,
220,
220,
220,
25962,
79,
13,
2860,
7,
27110,
13,
6122,
292,
13,
75,
6962,
13,
49925,
26447,
1634,
7,
22704,
28,
16,
4008,
198,
220,
220,
220,
329,
7679,
62,
22915,
62,
27740,
287,
823,
84,
62,
75,
6962,
25,
198,
220,
220,
220,
220,
220,
25962,
79,
13,
2860,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
75,
6962,
13,
35,
1072,
7,
29289,
62,
22915,
62,
27740,
11,
14916,
11639,
260,
2290,
6,
4008,
198,
220,
220,
220,
220,
220,
25962,
79,
13,
2860,
7,
27110,
13,
6122,
292,
13,
75,
6962,
13,
26932,
448,
7,
14781,
448,
62,
17287,
4008,
628,
220,
220,
220,
1303,
8125,
7679,
2222,
514,
736,
284,
11525,
12083,
15793,
13,
198,
220,
220,
220,
25962,
79,
13,
2860,
7,
27110,
13,
6122,
292,
13,
75,
6962,
13,
35,
1072,
7,
944,
13557,
20521,
12083,
62,
27740,
11,
14916,
11639,
29127,
6,
4008,
198,
220,
220,
220,
611,
2457,
62,
14781,
448,
25,
198,
220,
220,
220,
220,
220,
25962,
79,
13,
2860,
7,
27110,
13,
6122,
292,
13,
75,
6962,
13,
26932,
448,
7,
14781,
448,
62,
17287,
4008,
198,
220,
220,
220,
611,
3487,
1096,
62,
20521,
67,
654,
25,
198,
220,
220,
220,
220,
220,
25962,
79,
13,
2860,
7,
27110,
13,
6122,
292,
13,
75,
6962,
13,
49925,
26447,
1634,
7,
22704,
28,
16,
4008,
198,
220,
220,
220,
1441,
2124,
62,
15414,
11,
25962,
79,
7,
87,
8,
628,
220,
825,
2251,
62,
4164,
10466,
7,
944,
2599,
198,
220,
220,
220,
37227,
26410,
82,
257,
22155,
7268,
477,
262,
20731,
356,
765,
284,
2604,
526,
15931,
628,
220,
220,
220,
20731,
796,
685,
198,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
4164,
10466,
13,
5308,
272,
7,
3672,
11639,
27432,
62,
22462,
33809,
198,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
4164,
10466,
13,
50,
29572,
34,
2397,
12409,
17320,
23843,
7,
3672,
11639,
27432,
62,
4134,
33809,
198,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
4164,
10466,
13,
17320,
23843,
7,
3672,
11639,
12102,
62,
77,
349,
9608,
62,
4134,
33809,
198,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
4164,
10466,
13,
17320,
23843,
7,
3672,
11639,
27432,
62,
7266,
2617,
62,
4134,
33809,
198,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
4164,
10466,
13,
17320,
23843,
7,
3672,
11639,
12102,
62,
16469,
5304,
62,
4134,
33809,
198,
220,
220,
220,
220,
220,
220,
220,
48700,
13,
6122,
292,
13,
4164,
10466,
13,
17320,
23843,
7,
3672,
11639,
12102,
62,
40078,
7908,
62,
4134,
11537,
60,
628,
220,
220,
220,
611,
2116,
13,
17470,
62,
22866,
62,
22462,
62,
6551,
1875,
657,
13,
15,
25,
198,
220,
220,
220,
220,
220,
20731,
13,
33295,
7,
27110,
13,
6122,
292,
13,
4164,
10466,
13,
5308,
272,
7,
3672,
11639,
12417,
62,
22462,
6,
4008,
198,
220,
220,
220,
220,
220,
20731,
13,
33295,
7,
27110,
13,
6122,
292,
13,
4164,
10466,
13,
5308,
272,
7,
3672,
11639,
17470,
62,
22866,
62,
22462,
6,
4008,
628,
220,
220,
220,
20731,
796,
17268,
13,
35422,
1068,
35,
713,
19510,
76,
13,
3672,
11,
285,
8,
329,
285,
287,
20731,
8,
198,
220,
220,
220,
1441,
20731,
628,
198,
31,
1655,
13,
11250,
11970,
198,
4871,
1874,
312,
723,
17633,
7,
14993,
451,
17633,
2599,
198,
220,
37227,
4965,
312,
723,
5021,
12,
29289,
34953,
1313,
351,
11525,
12083,
17593,
379,
886,
526,
15931,
628,
220,
2488,
1655,
13,
11250,
11970,
10786,
4965,
312,
723,
17633,
13,
71,
37266,
11537,
198,
220,
825,
4808,
11249,
62,
27349,
7,
944,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
29598,
62,
29289,
62,
7857,
28,
35500,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
997,
62,
411,
312,
723,
62,
75,
6962,
28,
17,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4268,
448,
62,
17287,
28,
15,
13,
20,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1402,
62,
22866,
62,
22462,
62,
6551,
28,
15,
13,
15,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3509,
62,
22510,
62,
17080,
974,
669,
10779,
16,
2599,
198,
220,
220,
220,
37227,
15580,
82,
281,
10373,
47,
351,
29598,
8787,
13,
628,
220,
220,
220,
943,
14542,
25,
198,
220,
220,
220,
220,
220,
29598,
62,
29289,
62,
7857,
25,
34024,
329,
14174,
7679,
284,
751,
284,
10373,
47,
13,
198,
220,
220,
220,
220,
220,
997,
62,
411,
312,
723,
62,
75,
6962,
25,
7913,
286,
29598,
7679,
13,
198,
220,
220,
220,
220,
220,
4268,
448,
62,
17287,
25,
1002,
3047,
11,
703,
881,
4268,
448,
284,
779,
287,
1123,
7679,
13,
198,
220,
220,
220,
220,
220,
1402,
62,
22866,
62,
22462,
62,
6551,
25,
1002,
1875,
15,
11,
287,
3090,
284,
262,
2994,
351,
867,
198,
220,
220,
220,
220,
220,
220,
220,
11786,
669,
11,
751,
1194,
2994,
810,
262,
691,
11786,
669,
389,
262,
198,
220,
220,
220,
220,
220,
220,
220,
13439,
286,
262,
4732,
13,
198,
220,
220,
220,
220,
220,
3509,
62,
22510,
62,
17080,
974,
669,
25,
383,
5415,
1271,
286,
11786,
669,
2810,
379,
1123,
198,
220,
220,
220,
220,
220,
220,
220,
4512,
2239,
13,
628,
220,
220,
220,
16409,
25,
198,
220,
220,
220,
220,
220,
383,
5128,
290,
5072,
11192,
669,
329,
262,
3127,
11,
351,
262,
5128,
852,
257,
198,
220,
220,
220,
220,
220,
46076,
7885,
13,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
2116,
13,
17470,
62,
22866,
62,
22462,
62,
6551,
796,
1402,
62,
22866,
62,
22462,
62,
6551,
198,
220,
220,
220,
2116,
13557,
9806,
62,
22510,
62,
17080,
974,
669,
796,
3509,
62,
22510,
62,
17080,
974,
669,
628,
220,
220,
220,
1303,
2124,
4940,
572,
351,
15793,
685,
43501,
62,
7857,
2124,
997,
62,
34086,
3007,
2124,
4072,
62,
7857,
4083,
198,
220,
220,
220,
1303,
38240,
340,
284,
685,
43501,
62,
7857,
2124,
357,
22510,
62,
34086,
3007,
9,
24419,
62,
7857,
25295,
198,
220,
220,
220,
2124,
62,
15414,
796,
48700,
13,
6122,
292,
13,
20560,
7,
198,
220,
220,
220,
220,
220,
220,
220,
5485,
41888,
944,
13557,
22510,
62,
15414,
62,
34086,
3007,
11,
2116,
13557,
20521,
12083,
62,
27740,
12962,
198,
220,
220,
220,
45096,
62,
43358,
796,
25915,
16,
11,
2116,
13557,
22510,
62,
15414,
62,
34086,
3007,
1635,
2116,
13557,
20521,
12083,
62,
27740,
60,
198,
220,
220,
220,
2124,
796,
48700,
13,
3447,
1758,
7,
87,
62,
15414,
11,
45096,
62,
43358,
8,
628,
220,
220,
220,
2124,
796,
48700,
13,
6122,
292,
13,
75,
6962,
13,
49925,
26447,
1634,
7,
22704,
28,
16,
5769,
87,
8,
628,
220,
220,
220,
1303,
3274,
2222,
15793,
866,
284,
10348,
13,
198,
220,
220,
220,
2124,
796,
48700,
13,
6122,
292,
13,
75,
6962,
13,
35,
1072,
7,
411,
312,
723,
62,
29289,
62,
7857,
5769,
87,
8,
628,
220,
220,
220,
1303,
3060,
7368,
1271,
286,
29598,
11685,
13,
198,
220,
220,
220,
329,
4808,
287,
2837,
7,
22510,
62,
411,
312,
723,
62,
75,
6962,
2599,
198,
220,
220,
220,
220,
220,
2124,
796,
2512,
7,
87,
11,
29598,
62,
29289,
62,
7857,
8,
628,
220,
220,
220,
1303,
1514,
736,
510,
284,
10348,
15793,
13,
198,
220,
220,
220,
2124,
796,
48700,
13,
6122,
292,
13,
75,
6962,
13,
35,
1072,
7,
944,
13557,
20521,
12083,
62,
27740,
11,
14916,
11639,
29127,
6,
5769,
87,
8,
198,
220,
220,
220,
2124,
796,
48700,
13,
6122,
292,
13,
75,
6962,
13,
49925,
26447,
1634,
7,
22704,
28,
16,
5769,
87,
8,
198,
220,
220,
220,
1441,
2124,
62,
15414,
11,
2124,
628,
198,
31,
1655,
13,
11250,
11970,
7,
12154,
4868,
28,
17816,
27349,
62,
4871,
6,
12962,
198,
4299,
1382,
62,
19849,
7,
22510,
62,
15414,
62,
34086,
3007,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11525,
12083,
62,
6759,
8609,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11525,
12083,
62,
27740,
28,
14202,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3127,
62,
4871,
28,
14202,
2599,
198,
220,
37227,
16719,
274,
262,
2746,
2134,
290,
5860,
340,
526,
15931,
198,
220,
611,
3127,
62,
4871,
318,
6045,
25,
198,
220,
220,
220,
1303,
15161,
284,
262,
3938,
5884,
2746,
13,
198,
220,
220,
220,
2746,
796,
44800,
17633,
7,
22510,
62,
15414,
62,
34086,
3007,
11,
11525,
12083,
62,
6759,
8609,
11,
11525,
12083,
62,
27740,
8,
198,
220,
2073,
25,
198,
220,
220,
220,
2746,
796,
3127,
62,
4871,
7,
22510,
62,
15414,
62,
34086,
3007,
11,
11525,
12083,
62,
6759,
8609,
11,
11525,
12083,
62,
27740,
8,
198,
220,
1441,
2746,
198
] | 2.590361 | 2,988 |
import abc
# Good old composite pattern
# This is used when we want to create a hierachy of instances that contain other instances,
# but we want to operate on all instances somewhat equally
# Here the composite instances can contain other composites or leafs
# All implement the operation method, where the composite will be sure to
# call the same method on all its childred
# Note that some methods are not implemented on Leaf as that does not make sense.
# They throw errors for the sake of safety, but they kinda need to be there
# so that Composites and Leafs can be treated in a similar way
c1 = Composite()
c1.add(Leaf())
c1.add(Leaf())
c2 = Composite()
c2.add(Leaf())
c2.add(c1)
print(c2.operation()) | [
11748,
450,
66,
198,
2,
4599,
1468,
24185,
3912,
198,
2,
770,
318,
973,
618,
356,
765,
284,
2251,
257,
13550,
35586,
286,
10245,
326,
3994,
584,
10245,
11,
198,
2,
475,
356,
765,
284,
8076,
319,
477,
10245,
6454,
8603,
198,
198,
2,
3423,
262,
24185,
10245,
460,
3994,
584,
18882,
2737,
393,
12835,
82,
198,
2,
1439,
3494,
262,
4905,
2446,
11,
810,
262,
24185,
481,
307,
1654,
284,
198,
2,
869,
262,
976,
2446,
319,
477,
663,
1200,
445,
198,
198,
2,
5740,
326,
617,
5050,
389,
407,
9177,
319,
14697,
355,
326,
857,
407,
787,
2565,
13,
198,
2,
1119,
3714,
8563,
329,
262,
11060,
286,
3747,
11,
475,
484,
17855,
761,
284,
307,
612,
220,
198,
2,
523,
326,
29936,
2737,
290,
25479,
460,
307,
5716,
287,
257,
2092,
835,
198,
198,
66,
16,
796,
49355,
3419,
198,
66,
16,
13,
2860,
7,
3123,
1878,
28955,
198,
66,
16,
13,
2860,
7,
3123,
1878,
28955,
198,
198,
66,
17,
796,
49355,
3419,
198,
66,
17,
13,
2860,
7,
3123,
1878,
28955,
198,
66,
17,
13,
2860,
7,
66,
16,
8,
198,
198,
4798,
7,
66,
17,
13,
27184,
28955
] | 3.685567 | 194 |
from unittest import TestCase
from itertools import product
from genki.http.url.parse import parse_url, url_parse_result
from genki.http.request import RequestBuilder
from genki.http.constants import Scheme
from genki.http.url.exceptions import InvalidURL
| [
6738,
555,
715,
395,
1330,
6208,
20448,
198,
6738,
340,
861,
10141,
1330,
1720,
198,
198,
6738,
2429,
4106,
13,
4023,
13,
6371,
13,
29572,
1330,
21136,
62,
6371,
11,
19016,
62,
29572,
62,
20274,
198,
6738,
2429,
4106,
13,
4023,
13,
25927,
1330,
19390,
32875,
198,
6738,
2429,
4106,
13,
4023,
13,
9979,
1187,
1330,
32448,
198,
6738,
2429,
4106,
13,
4023,
13,
6371,
13,
1069,
11755,
1330,
17665,
21886,
628,
198
] | 3.547945 | 73 |
from textwrap import dedent
from typing import List
import pytest
import gen
from gen.tests.utils import make_arguments, true_false_msg, validate_error
class TestAdminRouterTLSConfig:
"""
Tests for the Admin Router TLS Config creation.
"""
def test_default(self):
"""
By default, the configuration specifies certain TLS settings.
This test is a sanity check for the configuration template logic
rather than a particularly useful feature test.
"""
config_path = '/etc/adminrouter-tls.conf'
arguments = make_arguments(new_arguments={})
generated = gen.generate(arguments=arguments)
package = generated.templates['dcos-config.yaml']['package']
[config] = [item for item in package if item['path'] == config_path]
expected_configuration = dedent(
"""\
# Ref: https://github.com/cloudflare/sslconfig/blob/master/conf
# Modulo ChaCha20 cipher.
ssl_ciphers EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
ssl_prefer_server_ciphers on;
# To manually test which TLS versions are enabled on a node, use
# `openssl` commands.
#
# See comments on https://jira.mesosphere.com/browse/DCOS-13437 for more
# details.
ssl_protocols TLSv1.1 TLSv1.2;
"""
)
assert config['content'] == expected_configuration
class TestToggleTLS1:
"""
Tests for toggling TLS 1.0.
To manually test that this is, in fact, a working toggle for TLS 1.0, use
`openssl` commands.
See comments on https://jira.mesosphere.com/browse/DCOS-13437 for more
details.
"""
def supported_ssl_protocols(self, new_config_arguments) -> List[str]:
"""
This finds a line which looks like the following:
ssl protocols TLSv1, TLSv1.1;
in the Admin Router TLS configuration.
It then returns the listed protocols.
Args:
new_config_arguments: Arguments which are added to the 'standard'
set of arguments before generating configuration files.
Returns:
A ``list`` of supported SSL protocols.
"""
arguments = make_arguments(new_arguments=new_config_arguments)
generated = gen.generate(arguments=arguments)
package = generated.templates['dcos-config.yaml']['package']
config_path = '/etc/adminrouter-tls.conf'
[config] = [item for item in package if item['path'] == config_path]
[ssl_protocols_line] = [
line for line in config['content'].split('\n') if
# We strip whitespace from the beginning of the line as NGINX
# configuration lines can start with whitespace.
line.lstrip().startswith('ssl_protocols ')
]
ssl_protocols_line = ssl_protocols_line.strip(';')
protocols = ssl_protocols_line.split()[1:]
return protocols
def test_validation(self):
"""
The config variable `tls_1_0_enabled` must be 'true' or 'false'.
"""
validate_error(
new_arguments={'adminrouter_tls_1_0_enabled': 'foo'},
key='adminrouter_tls_1_0_enabled',
message=true_false_msg,
)
@pytest.mark.parametrize(
'new_arguments', [{}, {'adminrouter_tls_1_0_enabled': 'false'}]
)
def test_default(self, new_arguments):
"""
By default TLS 1.0 is disabled, and therefore by default the config
variable is set to 'false'.
This test is parametrized to demonstrate that having no configuration
produces the same results as setting the config variable to `'false'`.
"""
protocols = self.supported_ssl_protocols(
new_config_arguments=new_arguments,
)
assert protocols == ['TLSv1.1', 'TLSv1.2']
def test_enable(self):
"""
Setting the config variable to 'true' enables TLS 1.0.
"""
new_arguments = {'adminrouter_tls_1_0_enabled': 'true'}
protocols = self.supported_ssl_protocols(
new_config_arguments=new_arguments,
)
assert protocols == ['TLSv1', 'TLSv1.1', 'TLSv1.2']
| [
6738,
2420,
37150,
1330,
4648,
298,
198,
6738,
19720,
1330,
7343,
198,
198,
11748,
12972,
9288,
198,
198,
11748,
2429,
198,
6738,
2429,
13,
41989,
13,
26791,
1330,
787,
62,
853,
2886,
11,
2081,
62,
9562,
62,
19662,
11,
26571,
62,
18224,
628,
198,
4871,
6208,
46787,
49,
39605,
51,
6561,
16934,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
30307,
329,
262,
32053,
48538,
33855,
17056,
6282,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
825,
1332,
62,
12286,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
2750,
4277,
11,
262,
8398,
26052,
1728,
33855,
6460,
13,
628,
220,
220,
220,
220,
220,
220,
220,
770,
1332,
318,
257,
34182,
2198,
329,
262,
8398,
11055,
9156,
198,
220,
220,
220,
220,
220,
220,
220,
2138,
621,
257,
3573,
4465,
3895,
1332,
13,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
4566,
62,
6978,
796,
31051,
14784,
14,
28482,
472,
353,
12,
83,
7278,
13,
10414,
6,
198,
220,
220,
220,
220,
220,
220,
220,
7159,
796,
787,
62,
853,
2886,
7,
3605,
62,
853,
2886,
34758,
30072,
198,
220,
220,
220,
220,
220,
220,
220,
7560,
796,
2429,
13,
8612,
378,
7,
853,
2886,
28,
853,
2886,
8,
198,
220,
220,
220,
220,
220,
220,
220,
5301,
796,
7560,
13,
11498,
17041,
17816,
67,
6966,
12,
11250,
13,
88,
43695,
6,
7131,
6,
26495,
20520,
198,
220,
220,
220,
220,
220,
220,
220,
685,
11250,
60,
796,
685,
9186,
329,
2378,
287,
5301,
611,
2378,
17816,
6978,
20520,
6624,
4566,
62,
6978,
60,
628,
220,
220,
220,
220,
220,
220,
220,
2938,
62,
11250,
3924,
796,
4648,
298,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37227,
59,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
6524,
25,
3740,
1378,
12567,
13,
785,
14,
17721,
2704,
533,
14,
45163,
11250,
14,
2436,
672,
14,
9866,
14,
10414,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3401,
43348,
20703,
1925,
64,
1238,
38012,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
6649,
62,
66,
541,
7084,
412,
27295,
39,
10,
32,
1546,
12762,
25,
49,
4090,
10,
32,
1546,
12762,
25,
36,
27295,
39,
10,
32,
1546,
11645,
25,
49,
4090,
10,
32,
1546,
11645,
25,
36,
27295,
39,
10,
18,
30910,
25,
49,
4090,
10,
18,
30910,
25,
0,
12740,
20,
26,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
6649,
62,
3866,
2232,
62,
15388,
62,
66,
541,
7084,
319,
26,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
1675,
14500,
1332,
543,
33855,
6300,
389,
9343,
319,
257,
10139,
11,
779,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
4600,
44813,
6649,
63,
9729,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
4091,
3651,
319,
3740,
1378,
73,
8704,
13,
6880,
22829,
13,
785,
14,
25367,
325,
14,
9697,
2640,
12,
19880,
2718,
329,
517,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3307,
13,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
6649,
62,
11235,
4668,
82,
33855,
85,
16,
13,
16,
33855,
85,
16,
13,
17,
26,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
6818,
4566,
17816,
11299,
20520,
6624,
2938,
62,
11250,
3924,
628,
198,
4871,
6208,
51,
20258,
51,
6561,
16,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
30307,
329,
284,
1130,
1359,
33855,
352,
13,
15,
13,
628,
220,
220,
220,
1675,
14500,
1332,
326,
428,
318,
11,
287,
1109,
11,
257,
1762,
19846,
329,
33855,
352,
13,
15,
11,
779,
198,
220,
220,
220,
4600,
44813,
6649,
63,
9729,
13,
628,
220,
220,
220,
4091,
3651,
319,
3740,
1378,
73,
8704,
13,
6880,
22829,
13,
785,
14,
25367,
325,
14,
9697,
2640,
12,
19880,
2718,
329,
517,
198,
220,
220,
220,
3307,
13,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
825,
4855,
62,
45163,
62,
11235,
4668,
82,
7,
944,
11,
649,
62,
11250,
62,
853,
2886,
8,
4613,
7343,
58,
2536,
5974,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
770,
7228,
257,
1627,
543,
3073,
588,
262,
1708,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
264,
6649,
19565,
33855,
85,
16,
11,
33855,
85,
16,
13,
16,
26,
198,
220,
220,
220,
220,
220,
220,
220,
287,
262,
32053,
48538,
33855,
8398,
13,
198,
220,
220,
220,
220,
220,
220,
220,
632,
788,
5860,
262,
5610,
19565,
13,
628,
220,
220,
220,
220,
220,
220,
220,
943,
14542,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
11250,
62,
853,
2886,
25,
20559,
2886,
543,
389,
2087,
284,
262,
705,
20307,
6,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
900,
286,
7159,
878,
15453,
8398,
3696,
13,
628,
220,
220,
220,
220,
220,
220,
220,
16409,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
317,
7559,
4868,
15506,
286,
4855,
25952,
19565,
13,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
7159,
796,
787,
62,
853,
2886,
7,
3605,
62,
853,
2886,
28,
3605,
62,
11250,
62,
853,
2886,
8,
198,
220,
220,
220,
220,
220,
220,
220,
7560,
796,
2429,
13,
8612,
378,
7,
853,
2886,
28,
853,
2886,
8,
198,
220,
220,
220,
220,
220,
220,
220,
5301,
796,
7560,
13,
11498,
17041,
17816,
67,
6966,
12,
11250,
13,
88,
43695,
6,
7131,
6,
26495,
20520,
198,
220,
220,
220,
220,
220,
220,
220,
4566,
62,
6978,
796,
31051,
14784,
14,
28482,
472,
353,
12,
83,
7278,
13,
10414,
6,
198,
220,
220,
220,
220,
220,
220,
220,
685,
11250,
60,
796,
685,
9186,
329,
2378,
287,
5301,
611,
2378,
17816,
6978,
20520,
6624,
4566,
62,
6978,
60,
198,
220,
220,
220,
220,
220,
220,
220,
685,
45163,
62,
11235,
4668,
82,
62,
1370,
60,
796,
685,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
329,
1627,
287,
4566,
17816,
11299,
6,
4083,
35312,
10786,
59,
77,
11537,
611,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
775,
10283,
13216,
10223,
422,
262,
3726,
286,
262,
1627,
355,
39058,
1268,
55,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
8398,
3951,
460,
923,
351,
13216,
10223,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
13,
75,
36311,
22446,
9688,
2032,
342,
10786,
45163,
62,
11235,
4668,
82,
705,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2361,
198,
220,
220,
220,
220,
220,
220,
220,
264,
6649,
62,
11235,
4668,
82,
62,
1370,
796,
264,
6649,
62,
11235,
4668,
82,
62,
1370,
13,
36311,
10786,
26,
11537,
198,
220,
220,
220,
220,
220,
220,
220,
19565,
796,
264,
6649,
62,
11235,
4668,
82,
62,
1370,
13,
35312,
3419,
58,
16,
47715,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
19565,
628,
220,
220,
220,
825,
1332,
62,
12102,
341,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
383,
4566,
7885,
4600,
83,
7278,
62,
16,
62,
15,
62,
25616,
63,
1276,
307,
705,
7942,
6,
393,
705,
9562,
4458,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
26571,
62,
18224,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
853,
2886,
34758,
6,
28482,
472,
353,
62,
83,
7278,
62,
16,
62,
15,
62,
25616,
10354,
705,
21943,
6,
5512,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1994,
11639,
28482,
472,
353,
62,
83,
7278,
62,
16,
62,
15,
62,
25616,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3275,
28,
7942,
62,
9562,
62,
19662,
11,
198,
220,
220,
220,
220,
220,
220,
220,
1267,
628,
220,
220,
220,
2488,
9078,
9288,
13,
4102,
13,
17143,
316,
380,
2736,
7,
198,
220,
220,
220,
220,
220,
220,
220,
705,
3605,
62,
853,
2886,
3256,
685,
90,
5512,
1391,
6,
28482,
472,
353,
62,
83,
7278,
62,
16,
62,
15,
62,
25616,
10354,
705,
9562,
6,
92,
60,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
825,
1332,
62,
12286,
7,
944,
11,
649,
62,
853,
2886,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
2750,
4277,
33855,
352,
13,
15,
318,
10058,
11,
290,
4361,
416,
4277,
262,
4566,
198,
220,
220,
220,
220,
220,
220,
220,
7885,
318,
900,
284,
705,
9562,
4458,
628,
220,
220,
220,
220,
220,
220,
220,
770,
1332,
318,
5772,
316,
380,
8863,
284,
10176,
326,
1719,
645,
8398,
198,
220,
220,
220,
220,
220,
220,
220,
11073,
262,
976,
2482,
355,
4634,
262,
4566,
7885,
284,
4600,
6,
9562,
6,
44646,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
19565,
796,
2116,
13,
15999,
62,
45163,
62,
11235,
4668,
82,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
11250,
62,
853,
2886,
28,
3605,
62,
853,
2886,
11,
198,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
6818,
19565,
6624,
37250,
51,
6561,
85,
16,
13,
16,
3256,
705,
51,
6561,
85,
16,
13,
17,
20520,
628,
220,
220,
220,
825,
1332,
62,
21633,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
25700,
262,
4566,
7885,
284,
705,
7942,
6,
13536,
33855,
352,
13,
15,
13,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
649,
62,
853,
2886,
796,
1391,
6,
28482,
472,
353,
62,
83,
7278,
62,
16,
62,
15,
62,
25616,
10354,
705,
7942,
6,
92,
198,
220,
220,
220,
220,
220,
220,
220,
19565,
796,
2116,
13,
15999,
62,
45163,
62,
11235,
4668,
82,
7,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
62,
11250,
62,
853,
2886,
28,
3605,
62,
853,
2886,
11,
198,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
6818,
19565,
6624,
37250,
51,
6561,
85,
16,
3256,
705,
51,
6561,
85,
16,
13,
16,
3256,
705,
51,
6561,
85,
16,
13,
17,
20520,
198
] | 2.314995 | 1,854 |
#!/usr/bin/env python
engine = 'innodb'
host = 'localhost'
db_name = ''
user = ''
passwd = ''
skip_tables = ()
import MySQLdb
db = MySQLdb.connect(user=user, passwd=passwd, db=db_name, host=host)
c = db.cursor()
c.execute("show tables")
row = c.fetchone()
while row:
table = row[0]
print 'Converting Table: %s' % table
e = db.cursor()
e.execute("SHOW TABLE STATUS from `%s` LIKE '%s'" % (db_name, table))
info = e.fetchone()
if table in skip_tables or info[1] == engine:
print 'Skipping'
row = c.fetchone()
continue
e.execute('ALTER TABLE `%s` ENGINE = %s, tablespace ts_1 storage disk' % (MySQLdb.escape_string(table), engine))
row = c.fetchone()
print 'Done'
c.close()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
18392,
796,
705,
3732,
375,
65,
6,
198,
4774,
796,
705,
36750,
6,
198,
9945,
62,
3672,
796,
10148,
198,
7220,
796,
10148,
198,
6603,
16993,
796,
10148,
198,
48267,
62,
83,
2977,
796,
7499,
198,
198,
11748,
33476,
9945,
198,
198,
9945,
796,
33476,
9945,
13,
8443,
7,
7220,
28,
7220,
11,
1208,
16993,
28,
6603,
16993,
11,
20613,
28,
9945,
62,
3672,
11,
2583,
28,
4774,
8,
198,
198,
66,
796,
20613,
13,
66,
21471,
3419,
198,
66,
13,
41049,
7203,
12860,
8893,
4943,
198,
198,
808,
796,
269,
13,
69,
7569,
505,
3419,
198,
4514,
5752,
25,
198,
220,
220,
220,
3084,
796,
5752,
58,
15,
60,
198,
220,
220,
220,
3601,
705,
3103,
48820,
8655,
25,
4064,
82,
6,
4064,
3084,
198,
220,
220,
220,
304,
796,
20613,
13,
66,
21471,
3419,
198,
220,
220,
220,
304,
13,
41049,
7203,
9693,
3913,
43679,
15486,
2937,
422,
4600,
4,
82,
63,
34178,
705,
4,
82,
29653,
4064,
357,
9945,
62,
3672,
11,
3084,
4008,
198,
220,
220,
220,
7508,
796,
304,
13,
69,
7569,
505,
3419,
198,
220,
220,
220,
611,
3084,
287,
14267,
62,
83,
2977,
393,
7508,
58,
16,
60,
6624,
3113,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
705,
50,
4106,
2105,
6,
198,
220,
220,
220,
220,
220,
220,
220,
5752,
796,
269,
13,
69,
7569,
505,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
2555,
198,
220,
220,
220,
304,
13,
41049,
10786,
1847,
5781,
43679,
4600,
4,
82,
63,
36924,
8881,
796,
4064,
82,
11,
8893,
10223,
40379,
62,
16,
6143,
11898,
6,
4064,
357,
3666,
17861,
9945,
13,
41915,
62,
8841,
7,
11487,
828,
3113,
4008,
198,
220,
220,
220,
5752,
796,
269,
13,
69,
7569,
505,
3419,
198,
220,
220,
220,
3601,
705,
45677,
6,
198,
66,
13,
19836,
3419,
198
] | 2.333333 | 315 |
from .bsdict import bsdict, memoizer
| [
6738,
764,
1443,
11600,
1330,
275,
82,
11600,
11,
16155,
7509,
198
] | 3.083333 | 12 |
t = int(input())
while(t>0):
a=list(map(int,input().split(' ')))
D=a[0]
d=a[1]
p=a[2]
q=a[3]
remainder=D%d
n=D//d
value=(n*p*d) + (d*q*(n*(n-1)//2))+(p*remainder+(remainder*q*n))
print(value,"\n")
t=t-1
| [
83,
796,
493,
7,
15414,
28955,
198,
4514,
7,
83,
29,
15,
2599,
198,
220,
220,
220,
257,
28,
4868,
7,
8899,
7,
600,
11,
15414,
22446,
35312,
10786,
705,
22305,
198,
220,
220,
220,
360,
28,
64,
58,
15,
60,
198,
220,
220,
220,
288,
28,
64,
58,
16,
60,
198,
220,
220,
220,
279,
28,
64,
58,
17,
60,
198,
220,
220,
220,
10662,
28,
64,
58,
18,
60,
198,
220,
220,
220,
17675,
28,
35,
4,
67,
198,
220,
220,
220,
299,
28,
35,
1003,
67,
198,
220,
220,
220,
1988,
16193,
77,
9,
79,
9,
67,
8,
1343,
357,
67,
9,
80,
9,
7,
77,
9,
7,
77,
12,
16,
8,
1003,
17,
4008,
33747,
79,
9,
2787,
391,
1082,
33747,
2787,
391,
1082,
9,
80,
9,
77,
4008,
198,
220,
220,
220,
3601,
7,
8367,
553,
59,
77,
4943,
198,
220,
220,
220,
256,
28,
83,
12,
16,
198,
220,
220,
220,
220
] | 1.563291 | 158 |
from django.db import models
from cajas.users.models.user import User
from cajas.office.models.officeCountry import OfficeCountry
class BoxDailySquare(models.Model):
"""Modelo para la caja de un cuadre diario
"""
user = models.ForeignKey(
User,
verbose_name='Usuario',
on_delete=models.SET_NULL,
blank=True, null=True,
related_name='related_daily_box'
)
office = models.ForeignKey(
OfficeCountry,
verbose_name='Oficina',
related_name='related_daily_square_boxes',
blank=True, null=True,
on_delete=models.SET_NULL
)
balance = models.IntegerField(
"Saldo de la caja",
default=0
)
is_active = models.BooleanField(
"Caja Activa?",
default=True
)
last_movement_id = models.IntegerField(
'id último movimiento',
default=0
)
is_closed = models.BooleanField(
"Caja cerrada?",
default=False
)
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
6738,
269,
1228,
292,
13,
18417,
13,
27530,
13,
7220,
1330,
11787,
198,
6738,
269,
1228,
292,
13,
31810,
13,
27530,
13,
31810,
33921,
1330,
4452,
33921,
628,
198,
4871,
8315,
28545,
48011,
7,
27530,
13,
17633,
2599,
198,
220,
220,
220,
37227,
17633,
78,
31215,
8591,
269,
27792,
390,
555,
18912,
324,
260,
2566,
4982,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
2836,
796,
4981,
13,
33616,
9218,
7,
198,
220,
220,
220,
220,
220,
220,
220,
11787,
11,
198,
220,
220,
220,
220,
220,
220,
220,
15942,
577,
62,
3672,
11639,
52,
2385,
4982,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
319,
62,
33678,
28,
27530,
13,
28480,
62,
33991,
11,
198,
220,
220,
220,
220,
220,
220,
220,
9178,
28,
17821,
11,
9242,
28,
17821,
11,
198,
220,
220,
220,
220,
220,
220,
220,
3519,
62,
3672,
11639,
5363,
62,
29468,
62,
3524,
6,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
2607,
796,
4981,
13,
33616,
9218,
7,
198,
220,
220,
220,
220,
220,
220,
220,
4452,
33921,
11,
198,
220,
220,
220,
220,
220,
220,
220,
15942,
577,
62,
3672,
11639,
5189,
291,
1437,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
3519,
62,
3672,
11639,
5363,
62,
29468,
62,
23415,
62,
29305,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
9178,
28,
17821,
11,
9242,
28,
17821,
11,
198,
220,
220,
220,
220,
220,
220,
220,
319,
62,
33678,
28,
27530,
13,
28480,
62,
33991,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
5236,
796,
4981,
13,
46541,
15878,
7,
198,
220,
220,
220,
220,
220,
220,
220,
366,
50,
41476,
390,
8591,
269,
27792,
1600,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
15,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
318,
62,
5275,
796,
4981,
13,
46120,
13087,
15878,
7,
198,
220,
220,
220,
220,
220,
220,
220,
366,
34,
27792,
2191,
12151,
35379,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
17821,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
938,
62,
21084,
434,
62,
312,
796,
4981,
13,
46541,
15878,
7,
198,
220,
220,
220,
220,
220,
220,
220,
705,
312,
6184,
118,
2528,
25147,
1409,
320,
1153,
78,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
15,
198,
220,
220,
220,
1267,
198,
220,
220,
220,
318,
62,
20225,
796,
4981,
13,
46120,
13087,
15878,
7,
198,
220,
220,
220,
220,
220,
220,
220,
366,
34,
27792,
269,
8056,
4763,
35379,
198,
220,
220,
220,
220,
220,
220,
220,
4277,
28,
25101,
198,
220,
220,
220,
1267,
198
] | 2.215247 | 446 |
from monster import Monster
| [
6738,
9234,
1330,
12635,
628
] | 5.8 | 5 |
import dump_instance
| [
198,
198,
11748,
10285,
62,
39098,
628
] | 3.428571 | 7 |
"""twoject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import url, include
from twapp import views as twapp_views
from django.contrib.auth import views as auth_views
from rest_framework import routers
from rest_framework_simplejwt.views import TokenRefreshView
from knox import views as knox_views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('twapp.urls')),
path('auth/login/', twapp_views.LoginView.as_view(), name="login"),
path('auth/login/refresh/', TokenRefreshView.as_view(), name='login_refresh'),
path('auth/register/', twapp_views.RegisterView.as_view(), name='register'),
path('auth/logout/', knox_views.LogoutView.as_view(), name="logout"),
path('auth/logoutall/', knox_views.LogoutAllView.as_view(), name="logoutall"),
]
| [
37811,
11545,
752,
10289,
28373,
198,
198,
464,
4600,
6371,
33279,
82,
63,
1351,
11926,
32336,
284,
5009,
13,
1114,
517,
1321,
3387,
766,
25,
198,
220,
220,
220,
3740,
1378,
31628,
13,
28241,
648,
404,
305,
752,
13,
785,
14,
268,
14,
18,
13,
17,
14,
4852,
873,
14,
4023,
14,
6371,
82,
14,
198,
27730,
25,
198,
22203,
5009,
198,
220,
220,
220,
352,
13,
3060,
281,
1330,
25,
220,
422,
616,
62,
1324,
1330,
5009,
198,
220,
220,
220,
362,
13,
3060,
257,
10289,
284,
19016,
33279,
82,
25,
220,
3108,
10786,
3256,
5009,
13,
11195,
11,
1438,
11639,
11195,
11537,
198,
9487,
12,
3106,
5009,
198,
220,
220,
220,
352,
13,
3060,
281,
1330,
25,
220,
422,
584,
62,
1324,
13,
33571,
1330,
5995,
198,
220,
220,
220,
362,
13,
3060,
257,
10289,
284,
19016,
33279,
82,
25,
220,
3108,
10786,
3256,
5995,
13,
292,
62,
1177,
22784,
1438,
11639,
11195,
11537,
198,
818,
6360,
1194,
10289,
10414,
198,
220,
220,
220,
352,
13,
17267,
262,
2291,
3419,
2163,
25,
422,
42625,
14208,
13,
6371,
82,
1330,
2291,
11,
3108,
198,
220,
220,
220,
362,
13,
3060,
257,
10289,
284,
19016,
33279,
82,
25,
220,
3108,
10786,
14036,
14,
3256,
2291,
10786,
14036,
13,
6371,
82,
6,
4008,
198,
37811,
198,
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
42625,
14208,
13,
6371,
82,
1330,
3108,
198,
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
11,
2291,
198,
6738,
665,
1324,
1330,
5009,
355,
665,
1324,
62,
33571,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
1330,
5009,
355,
6284,
62,
33571,
198,
6738,
1334,
62,
30604,
1330,
41144,
198,
6738,
1334,
62,
30604,
62,
36439,
73,
46569,
13,
33571,
1330,
29130,
8134,
3447,
7680,
198,
6738,
638,
1140,
1330,
5009,
355,
638,
1140,
62,
33571,
198,
6738,
1334,
62,
30604,
13,
18439,
30001,
13,
33571,
1330,
7330,
62,
18439,
62,
30001,
628,
198,
6371,
33279,
82,
796,
685,
198,
220,
220,
220,
3108,
10786,
28482,
14,
3256,
13169,
13,
15654,
13,
6371,
82,
828,
198,
220,
220,
220,
3108,
10786,
3256,
2291,
10786,
4246,
1324,
13,
6371,
82,
11537,
828,
198,
220,
220,
220,
3108,
10786,
18439,
14,
38235,
14,
3256,
665,
1324,
62,
33571,
13,
47790,
7680,
13,
292,
62,
1177,
22784,
1438,
2625,
38235,
12340,
198,
220,
220,
220,
3108,
10786,
18439,
14,
38235,
14,
5420,
3447,
14,
3256,
29130,
8134,
3447,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
38235,
62,
5420,
3447,
33809,
198,
220,
220,
220,
3108,
10786,
18439,
14,
30238,
14,
3256,
665,
1324,
62,
33571,
13,
38804,
7680,
13,
292,
62,
1177,
22784,
1438,
11639,
30238,
33809,
198,
220,
220,
220,
3108,
10786,
18439,
14,
6404,
448,
14,
3256,
638,
1140,
62,
33571,
13,
11187,
448,
7680,
13,
292,
62,
1177,
22784,
1438,
2625,
6404,
448,
12340,
198,
220,
220,
220,
3108,
10786,
18439,
14,
6404,
448,
439,
14,
3256,
638,
1140,
62,
33571,
13,
11187,
448,
3237,
7680,
13,
292,
62,
1177,
22784,
1438,
2625,
6404,
448,
439,
12340,
198,
60,
198
] | 2.952941 | 510 |
'''
Date: 2022-01-11 16:05:39
LastEditors: Waterking
LastEditTime: 2022-01-12 18:21:49
FilePath: /stocknet-code/src/stat_logger.py
'''
#!/usr/local/bin/python
import metrics as metrics
from ConfigLoader import logger
| [
7061,
6,
198,
10430,
25,
33160,
12,
486,
12,
1157,
1467,
25,
2713,
25,
2670,
198,
5956,
18378,
669,
25,
5638,
3364,
198,
5956,
18378,
7575,
25,
33160,
12,
486,
12,
1065,
1248,
25,
2481,
25,
2920,
198,
8979,
15235,
25,
1220,
13578,
3262,
12,
8189,
14,
10677,
14,
14269,
62,
6404,
1362,
13,
9078,
198,
7061,
6,
198,
2,
48443,
14629,
14,
12001,
14,
8800,
14,
29412,
198,
11748,
20731,
355,
20731,
198,
6738,
17056,
17401,
1330,
49706,
628,
628
] | 2.716049 | 81 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Copyright 2009 Red Hat, Inc -
# written by seth vidal skvidal at fedoraproject.org
import os
import sys
import fnmatch
import time
import yumbased
import shutil
from bz2 import BZ2File
from urlgrabber import grabber
import tempfile
import stat
import fcntl
import subprocess
from select import select
from yum import misc, Errors
from yum.repoMDObject import RepoMD, RepoData
from yum.sqlutils import executeSQL
from yum.packageSack import MetaSack
from yum.packages import YumAvailablePackage
import rpmUtils.transaction
from utils import _, errorprint, MDError, lzma, _available_compression
import readMetadata
try:
import sqlite3 as sqlite
except ImportError:
import sqlite
try:
import sqlitecachec
except ImportError:
pass
from utils import _gzipOpen, compressFile, compressOpen, checkAndMakeDir, GzipFile, \
checksum_and_rename, split_list_into_equal_chunks
from utils import num_cpus_online
import deltarpms
__version__ = '0.9.9'
class SplitMetaDataGenerator(MetaDataGenerator):
"""takes a series of dirs and creates repodata for all of them
most commonly used with -u media:// - if no outputdir is specified
it will create the repodata in the first dir in the list of dirs
"""
def doPkgMetadata(self):
"""all the heavy lifting for the package metadata"""
if len(self.conf.directories) == 1:
MetaDataGenerator.doPkgMetadata(self)
return
if self.conf.update:
self._setup_old_metadata_lookup()
filematrix = {}
for mydir in self.conf.directories:
if os.path.isabs(mydir):
thisdir = mydir
else:
if mydir.startswith('../'):
thisdir = os.path.realpath(mydir)
else:
thisdir = os.path.join(self.conf.basedir, mydir)
filematrix[mydir] = self.getFileList(thisdir, '.rpm')
# pkglist is a bit different for split media, as we have to know
# which dir. it belongs to. So we walk the dir. and then filter.
# We could be faster by not walking the dir. ... but meh.
if self.conf.pkglist:
pkglist = set(self.conf.pkglist)
pkgs = []
for fname in filematrix[mydir]:
if fname not in pkglist:
continue
pkgs.append(fname)
filematrix[mydir] = pkgs
self.trimRpms(filematrix[mydir])
self.pkgcount += len(filematrix[mydir])
mediano = 1
self.current_pkg = 0
self.conf.baseurl = self._getFragmentUrl(self.conf.baseurl, mediano)
try:
self.openMetadataDocs()
for mydir in self.conf.directories:
self.conf.baseurl = self._getFragmentUrl(self.conf.baseurl, mediano)
self.writeMetadataDocs(filematrix[mydir], mydir)
mediano += 1
self.conf.baseurl = self._getFragmentUrl(self.conf.baseurl, 1)
self.closeMetadataDocs()
except (IOError, OSError) as e:
raise MDError(_('Cannot access/write repodata files: %s') % e)
| [
2,
770,
1430,
318,
1479,
3788,
26,
345,
460,
17678,
4163,
340,
290,
14,
273,
13096,
198,
2,
340,
739,
262,
2846,
286,
262,
22961,
3611,
5094,
13789,
355,
3199,
416,
198,
2,
262,
3232,
10442,
5693,
26,
2035,
2196,
362,
286,
262,
13789,
11,
393,
198,
2,
357,
265,
534,
3038,
8,
597,
1568,
2196,
13,
198,
2,
198,
2,
770,
1430,
318,
9387,
287,
262,
2911,
326,
340,
481,
307,
4465,
11,
198,
2,
475,
42881,
15529,
34764,
56,
26,
1231,
772,
262,
17142,
18215,
286,
198,
2,
34482,
3398,
1565,
5603,
25382,
393,
376,
46144,
7473,
317,
16652,
2149,
37232,
33079,
48933,
13,
220,
4091,
262,
198,
2,
22961,
10074,
3611,
5094,
13789,
329,
517,
3307,
13,
198,
2,
198,
2,
921,
815,
423,
2722,
257,
4866,
286,
262,
22961,
3611,
5094,
13789,
198,
2,
1863,
351,
428,
1430,
26,
611,
407,
11,
3551,
284,
262,
3232,
10442,
198,
2,
5693,
11,
3457,
1539,
7863,
10857,
8474,
532,
26264,
25508,
11,
6182,
11,
8779,
7816,
16243,
12,
12952,
22,
11,
4916,
13,
198,
2,
15069,
3717,
220,
2297,
10983,
11,
3457,
532,
198,
2,
3194,
416,
900,
71,
410,
11624,
1341,
85,
11624,
379,
11672,
273,
499,
305,
752,
13,
2398,
198,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
24714,
15699,
198,
11748,
640,
198,
11748,
331,
2178,
839,
198,
11748,
4423,
346,
198,
6738,
220,
275,
89,
17,
1330,
347,
57,
17,
8979,
198,
6738,
19016,
32393,
527,
1330,
5552,
527,
198,
11748,
20218,
7753,
198,
11748,
1185,
198,
11748,
277,
66,
429,
75,
198,
11748,
850,
14681,
198,
6738,
2922,
1330,
2922,
198,
198,
6738,
331,
388,
1330,
12747,
11,
44225,
198,
6738,
331,
388,
13,
260,
7501,
12740,
10267,
1330,
1432,
78,
12740,
11,
1432,
78,
6601,
198,
6738,
331,
388,
13,
25410,
26791,
1330,
12260,
17861,
198,
6738,
331,
388,
13,
26495,
50,
441,
1330,
30277,
50,
441,
198,
6738,
331,
388,
13,
43789,
1330,
575,
388,
10493,
27813,
198,
198,
11748,
37542,
18274,
4487,
13,
7645,
2673,
198,
6738,
3384,
4487,
1330,
4808,
11,
4049,
4798,
11,
337,
7206,
81,
1472,
11,
300,
89,
2611,
11,
4808,
15182,
62,
5589,
2234,
198,
11748,
1100,
9171,
14706,
198,
28311,
25,
198,
220,
220,
220,
1330,
44161,
578,
18,
355,
44161,
578,
198,
16341,
17267,
12331,
25,
198,
220,
220,
220,
1330,
44161,
578,
198,
198,
28311,
25,
198,
220,
220,
220,
1330,
44161,
578,
23870,
66,
198,
16341,
17267,
12331,
25,
198,
220,
220,
220,
1208,
198,
198,
6738,
3384,
4487,
1330,
4808,
70,
13344,
11505,
11,
27413,
8979,
11,
27413,
11505,
11,
2198,
1870,
12050,
35277,
11,
402,
13344,
8979,
11,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
8794,
388,
62,
392,
62,
918,
480,
11,
6626,
62,
4868,
62,
20424,
62,
40496,
62,
354,
14125,
198,
6738,
3384,
4487,
1330,
997,
62,
13155,
385,
62,
25119,
198,
11748,
1619,
83,
5117,
907,
198,
198,
834,
9641,
834,
796,
705,
15,
13,
24,
13,
24,
6,
628,
628,
628,
198,
4871,
27758,
48526,
6601,
8645,
1352,
7,
48526,
6601,
8645,
1352,
2599,
198,
220,
220,
220,
37227,
83,
1124,
257,
2168,
286,
288,
17062,
290,
8075,
1128,
375,
1045,
329,
477,
286,
606,
198,
220,
220,
220,
220,
220,
220,
749,
8811,
973,
351,
532,
84,
2056,
1378,
532,
611,
645,
5072,
15908,
318,
7368,
198,
220,
220,
220,
220,
220,
220,
340,
481,
2251,
262,
1128,
375,
1045,
287,
262,
717,
26672,
287,
262,
1351,
286,
288,
17062,
198,
220,
220,
220,
220,
220,
220,
37227,
628,
220,
220,
220,
825,
466,
47,
10025,
9171,
14706,
7,
944,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
439,
262,
4334,
16842,
329,
262,
5301,
20150,
37811,
198,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
944,
13,
10414,
13,
12942,
1749,
8,
6624,
352,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
30277,
6601,
8645,
1352,
13,
4598,
47,
10025,
9171,
14706,
7,
944,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
628,
220,
220,
220,
220,
220,
220,
220,
611,
2116,
13,
10414,
13,
19119,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13557,
40406,
62,
727,
62,
38993,
62,
5460,
929,
3419,
628,
220,
220,
220,
220,
220,
220,
220,
2393,
6759,
8609,
796,
23884,
198,
220,
220,
220,
220,
220,
220,
220,
329,
616,
15908,
287,
2116,
13,
10414,
13,
12942,
1749,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
28686,
13,
6978,
13,
271,
8937,
7,
1820,
15908,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
428,
15908,
796,
616,
15908,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
616,
15908,
13,
9688,
2032,
342,
10786,
40720,
6,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
428,
15908,
796,
28686,
13,
6978,
13,
5305,
6978,
7,
1820,
15908,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
428,
15908,
796,
28686,
13,
6978,
13,
22179,
7,
944,
13,
10414,
13,
3106,
343,
11,
616,
15908,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2393,
6759,
8609,
58,
1820,
15908,
60,
796,
2116,
13,
1136,
8979,
8053,
7,
5661,
15908,
11,
45302,
48235,
11537,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
220,
279,
74,
4743,
396,
318,
257,
1643,
1180,
329,
6626,
2056,
11,
355,
356,
423,
284,
760,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
543,
26672,
13,
340,
14448,
284,
13,
1406,
356,
2513,
262,
26672,
13,
290,
788,
8106,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
775,
714,
307,
5443,
416,
407,
6155,
262,
26672,
13,
2644,
475,
502,
71,
13,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
2116,
13,
10414,
13,
79,
74,
4743,
396,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
279,
74,
4743,
396,
796,
900,
7,
944,
13,
10414,
13,
79,
74,
4743,
396,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
279,
10025,
82,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
277,
3672,
287,
2393,
6759,
8609,
58,
1820,
15908,
5974,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
277,
3672,
407,
287,
279,
74,
4743,
396,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2555,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
279,
10025,
82,
13,
33295,
7,
69,
3672,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2393,
6759,
8609,
58,
1820,
15908,
60,
796,
279,
10025,
82,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
2213,
320,
49,
79,
907,
7,
7753,
6759,
8609,
58,
1820,
15908,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
35339,
9127,
15853,
18896,
7,
7753,
6759,
8609,
58,
1820,
15908,
12962,
628,
220,
220,
220,
220,
220,
220,
220,
1117,
10115,
796,
352,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
14421,
62,
35339,
796,
657,
198,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
10414,
13,
8692,
6371,
796,
2116,
13557,
1136,
42974,
434,
28165,
7,
944,
13,
10414,
13,
8692,
6371,
11,
1117,
10115,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1949,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
9654,
9171,
14706,
23579,
82,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
616,
15908,
287,
2116,
13,
10414,
13,
12942,
1749,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
10414,
13,
8692,
6371,
796,
2116,
13557,
1136,
42974,
434,
28165,
7,
944,
13,
10414,
13,
8692,
6371,
11,
1117,
10115,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
13564,
9171,
14706,
23579,
82,
7,
7753,
6759,
8609,
58,
1820,
15908,
4357,
616,
15908,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1117,
10115,
15853,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
10414,
13,
8692,
6371,
796,
2116,
13557,
1136,
42974,
434,
28165,
7,
944,
13,
10414,
13,
8692,
6371,
11,
352,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
19836,
9171,
14706,
23579,
82,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
2845,
357,
9399,
12331,
11,
440,
5188,
81,
1472,
8,
355,
304,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5298,
337,
7206,
81,
1472,
28264,
10786,
34,
34574,
1895,
14,
13564,
1128,
375,
1045,
3696,
25,
4064,
82,
11537,
4064,
304,
8,
628,
198
] | 2.398414 | 1,639 |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
import json
def create_pretrain_mask(tokens, mask_cnt, vocab_list):
"""
masking subwords(15% of entire subwords)
- mask_cnt: len(subwords) * 0.15
- [MASK]: 80% of masking candidate token
- original token: 10% of masking candidate token
- another token: 10% of masking candidate token
"""
candidate_idx = []
## subwords in the same list augment a sementic word
## eg. [[0], [1], [2], [4, 5]] -> token_idx 4 + 5 is semantic word
# A list represent a sementic word
for i, token in enumerate(tokens):
if token == '[CLS]' or token == '[SEP]':
continue
if 0 < len(candidate_idx) and token.find(u'\u2581') < 0: # LOWER ONE EIGHTH BLOCK
# if 0 < len(candidate_idx) and token.find('_') < 0: # test code
candidate_idx[-1].append(i)
else:
candidate_idx.append([i])
np.random.shuffle(candidate_idx)
mask_lms = []
for idx_set in candidate_idx:
# check if len(mask_lms) exceeds threshold
if len(mask_lms) >= mask_cnt:
break
if len(mask_lms) + len(idx_set) > mask_cnt:
continue
## masking subwords with 15% probability
## mask_cnt is len(subwords) * 0.15
# iter subwords idx
for sub_idx in idx_set:
masked_token = None
### assign value to masked token: [MASK], original token, random token
# 80% of masking candidate are replaced with '[MASK]' token
if np.random.uniform() < 0.8:
masked_token = '[MASK]'
# remainng 20% of masking candidate
else:
# 10% of remaining preserve original token
if np.random.uniform() < 0.5:
masked_token = tokens[sub_idx]
# 10% of ones are replaced with rnadom token
else:
masked_token = np.random.choice(vocab_list)
### replace subword with masked_token value
mask_lms.append({'idx': sub_idx, 'label':tokens[sub_idx]})
tokens[sub_idx] = masked_token
mask_lms = sorted(mask_lms, key=lambda x: x['idx'])
mask_idx = [mask_dict['idx'] for mask_dict in mask_lms]
mask_label = [mask_dict['label'] for mask_dict in mask_lms]
# print(candidate_idx)
# print(mask_lms)
print(mask_idx, mask_label)
return tokens, mask_idx, k_label
def truncate_token(tokenA, tokenB, max_seq):
"""
truncate long sequence
"""
while True:
total_len = len(tokenA) + len(tokenB)
print('max token {}\ntotal_len {} = {} + {}'.format(max_seq, total_len, len(tokenA), len(tokenB)))
if total_len <= max_seq:
break
if len(tokenA) > len(tokenB):
tokenA.pop()
else:
tokenB.pop()
def create_pretrain_instances(paragraph_ls, paragraph_idx, paragraph, n_seq, mask_prob, vocab_list):
"""
create NSP train set
"""
# 3 special token: [CLS], [SEP] for sent A, [SEP] for sent B
max_seq_len = n_seq - 2 - 1
target_seq_len = max_seq_len # [CLS], segmentA, segmentA, ..., [SEP], segmentB, segmentB, ...
instances = []
temp_sentence = []
temp_sent_seq_length = 0 # num of tokens
max_num_tokens = 256
target_seq_len = np.random.randint(2, max_num_tokens) # min len of tokens
for i, sent in enumerate(paragraph):
## A. not the last sentence of the paragraph
temp_sentence.append(sent)
temp_sent_seq_length += len(sent)
## B. check if it is the last sentence of the paragraph
## or temp_sent_seq_length is longer than or equal to target_seq_len
if i == len(paragraph) - 1 or temp_sent_seq_length >= target_seq_len:
if temp_sentence:
## A. sentence A segment: from 0 to a_end
a_end = 1
if len(temp_sentence) != 1:
a_end = np.random.randint(1, len(temp_sentence))
# append the sentences to tokenA
# from the front to the back
tokenA = []
for _, s in enumerate(temp_sentence[:a_end]):
tokenA.extend(s)
## B. sentence B segment
tokenB = []
# A. Actual next
# is_next will be the label for NSP pretrain
if len(temp_sentence) > 1 and np.random.uniform() >= 0.5:
is_next = True
for j in range(a_end, len(temp_sentence)):
tokenB.extend(temp_sentence[j])
# B. random next
else:
is_next = False
tokenB_len = target_seq_len - len(tokenA)
random_para_idx = para_idx
while para_idx == random_para_idx:
random_para_idx = np.random.randint(0, len(paragraph_ls))
random_para = paragraph[random_para_idx]
random_start = np.random.randint(0, len(random_para))
for j in range(random_start, len(random_para)):
tokenB.extend(random_para[j])
truncate_token(tokenA, tokenB, max_seq)
assert 0 < len(tokenA)
assert 0 < len(tokenB)
tokens = ["[CLS]"] + tokenA + ["[SEP]"] + tokenB + ["[SEP]"]
segment = [0]*(len(tokenA) + 2) + [1]*(len(tokenB) + 1)
tokens, mask_idx, mask_label = \
create_pretrain_mask(tokens, int((len(tokens)-3)*mask_prob), vocab_list)
instance = {
'tokens': tokens,
'segment': segment,
'is_next': is_next,
'mask_idx': mask_idx,
'mask_label': mask_label
}
instances.append(instance)
# reset segment candidate
temp_sentence = []
temp_sent_seq_length = 0
return instances
def make_pretrain_data(vocab, in_file, out_file, count, n_seq, mask_prob):
"""
read text and return train data set format
"""
vocab_list = []
for id_ in range(vocab.get_piece_size()):
if not vocab.is_unknown(id_):
vocab_list.append(vocab.id_to_piece(id_))
paragraph_ls = []
with open(in_file, 'r') as in_f:
paragraph = []
for i, sent in enumerate(in_f):
sent = sent.strip()
## blank means end of the paragraph
if sent == '':
# if not the beggining of the paragraph
# it is the end of the paragraph
if 0 < len(paragraph):
paragraph_ls.append(paragraph)
paragraph = [] # generate new paragraph list
# check if exceeding 100 thaousand paragraphs
if 1e+5 < len(paragraph_ls):
break
## subwords in list is part of semantic token
# eg. ['▁지','미','▁카','터']
else:
pieces = vocab.encode_as_pieces(sent)
if 0 < len(pieces):
paragraph.append(pieces)
if paragraph:
paragraph_ls.append(paragraph)
# masking def: create_pretrain_mask
for index in range(count):
output = out_file.format(index)
# if os.path.isfile(output):
# continue
with open(output, 'w') as out_f:
for i, paragraph in enumerate(paragraph_ls):
masking_info = create_pretrain_instances(paragraph_ls, i, paragraph, n_seq, mask_prob, vocab_list)
for elem in masking_info:
out_f.write(json.dumps(elem))
out_f.write('\n')
class PretrainDataset(Dataset):
"""
eg. instance
{tokens:
['[CLS]', '▁지', ', '대학교', '를', '▁졸업', '하였다', '.', '▁그', '▁후', ...],
segment:
[0, 0, 0, 0, 0, 0, ..., 1, 1, 1],
is_next: True,
mask_idx:
[16, 21, ..., 41],
mask_label:
['▁192', '▁1', '일', '▁~', '는', ..., '▁조지', '법을']}
"""
def pretrain_collate_fn(inputs):
"""
padding batch
"""
labels_cls, labels_lm, inputs, segments = list(zip(*inputs))
labels_lm = torch.nn.utils.rnn.pad_sequence(labels_lm, batch_first=True, padding_value=-1)
inputs = torch.nn.utils.rnn.pad_sequence(inputs, batch_first=True, padding_value=0)
segments = torch.nn.utils.rnn.pad_sequence(segments, batch_first=True, padding_value=0)
batch = [
torch.stack(labels_cls, dim=0),
labels_lm,
inputs,
segments,
]
return batch | [
11748,
299,
32152,
355,
45941,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
6738,
28034,
13,
26791,
13,
7890,
1330,
16092,
292,
316,
11,
6060,
17401,
198,
11748,
33918,
198,
198,
4299,
2251,
62,
5310,
3201,
62,
27932,
7,
83,
482,
641,
11,
9335,
62,
66,
429,
11,
12776,
397,
62,
4868,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
9335,
278,
850,
10879,
7,
1314,
4,
286,
2104,
850,
10879,
8,
198,
220,
220,
220,
532,
9335,
62,
66,
429,
25,
18896,
7,
7266,
10879,
8,
1635,
657,
13,
1314,
198,
220,
220,
220,
532,
685,
31180,
42,
5974,
4019,
4,
286,
9335,
278,
4540,
11241,
198,
220,
220,
220,
532,
2656,
11241,
25,
838,
4,
286,
9335,
278,
4540,
11241,
198,
220,
220,
220,
532,
1194,
11241,
25,
838,
4,
286,
9335,
278,
4540,
11241,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
4540,
62,
312,
87,
796,
17635,
628,
220,
220,
220,
22492,
850,
10879,
287,
262,
976,
1351,
35016,
257,
264,
972,
291,
1573,
220,
198,
220,
220,
220,
22492,
29206,
13,
16410,
15,
4357,
685,
16,
4357,
685,
17,
4357,
685,
19,
11,
642,
11907,
4613,
11241,
62,
312,
87,
604,
1343,
642,
318,
37865,
1573,
198,
220,
220,
220,
1303,
317,
1351,
2380,
257,
264,
972,
291,
1573,
198,
220,
220,
220,
329,
1312,
11,
11241,
287,
27056,
378,
7,
83,
482,
641,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
611,
11241,
6624,
44438,
5097,
50,
49946,
393,
11241,
6624,
44438,
5188,
47,
60,
10354,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2555,
198,
220,
220,
220,
220,
220,
220,
220,
611,
657,
1279,
18896,
7,
46188,
20540,
62,
312,
87,
8,
290,
11241,
13,
19796,
7,
84,
6,
59,
84,
1495,
6659,
11537,
1279,
657,
25,
1303,
220,
406,
36048,
16329,
412,
18060,
4221,
9878,
11290,
198,
2,
220,
220,
220,
220,
220,
220,
220,
611,
657,
1279,
18896,
7,
46188,
20540,
62,
312,
87,
8,
290,
11241,
13,
19796,
10786,
62,
11537,
1279,
657,
25,
1303,
220,
1332,
2438,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4540,
62,
312,
87,
58,
12,
16,
4083,
33295,
7,
72,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4540,
62,
312,
87,
13,
33295,
26933,
72,
12962,
198,
220,
220,
220,
45941,
13,
25120,
13,
1477,
18137,
7,
46188,
20540,
62,
312,
87,
8,
628,
220,
220,
220,
9335,
62,
75,
907,
796,
17635,
198,
220,
220,
220,
329,
4686,
87,
62,
2617,
287,
4540,
62,
312,
87,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
2198,
611,
18896,
7,
27932,
62,
75,
907,
8,
21695,
11387,
198,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
27932,
62,
75,
907,
8,
18189,
9335,
62,
66,
429,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2270,
198,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
27932,
62,
75,
907,
8,
1343,
18896,
7,
312,
87,
62,
2617,
8,
1875,
9335,
62,
66,
429,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2555,
628,
220,
220,
220,
220,
220,
220,
220,
22492,
9335,
278,
850,
10879,
351,
1315,
4,
12867,
198,
220,
220,
220,
220,
220,
220,
220,
22492,
9335,
62,
66,
429,
318,
18896,
7,
7266,
10879,
8,
1635,
657,
13,
1314,
220,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
11629,
850,
10879,
4686,
87,
198,
220,
220,
220,
220,
220,
220,
220,
329,
850,
62,
312,
87,
287,
4686,
87,
62,
2617,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
29229,
62,
30001,
796,
6045,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
44386,
8333,
1988,
284,
29229,
11241,
25,
685,
31180,
42,
4357,
2656,
11241,
11,
4738,
11241,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
4019,
4,
286,
9335,
278,
4540,
389,
6928,
351,
44438,
31180,
42,
49946,
11241,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
45941,
13,
25120,
13,
403,
6933,
3419,
1279,
657,
13,
23,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
29229,
62,
30001,
796,
44438,
31180,
42,
49946,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
3520,
782,
1160,
4,
286,
9335,
278,
4540,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
838,
4,
286,
5637,
12201,
2656,
11241,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
45941,
13,
25120,
13,
403,
6933,
3419,
1279,
657,
13,
20,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
29229,
62,
30001,
796,
16326,
58,
7266,
62,
312,
87,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
838,
4,
286,
3392,
389,
6928,
351,
374,
77,
324,
296,
11241,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
29229,
62,
30001,
796,
45941,
13,
25120,
13,
25541,
7,
18893,
397,
62,
4868,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
44386,
6330,
850,
4775,
351,
29229,
62,
30001,
1988,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
9335,
62,
75,
907,
13,
33295,
15090,
6,
312,
87,
10354,
850,
62,
312,
87,
11,
705,
18242,
10354,
83,
482,
641,
58,
7266,
62,
312,
87,
60,
30072,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
16326,
58,
7266,
62,
312,
87,
60,
796,
29229,
62,
30001,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
9335,
62,
75,
907,
796,
23243,
7,
27932,
62,
75,
907,
11,
1994,
28,
50033,
2124,
25,
2124,
17816,
312,
87,
6,
12962,
198,
220,
220,
220,
9335,
62,
312,
87,
796,
685,
27932,
62,
11600,
17816,
312,
87,
20520,
329,
9335,
62,
11600,
287,
9335,
62,
75,
907,
60,
198,
220,
220,
220,
9335,
62,
18242,
796,
685,
27932,
62,
11600,
17816,
18242,
20520,
329,
9335,
62,
11600,
287,
9335,
62,
75,
907,
60,
198,
2,
220,
220,
220,
220,
3601,
7,
46188,
20540,
62,
312,
87,
8,
198,
2,
220,
220,
220,
220,
3601,
7,
27932,
62,
75,
907,
8,
198,
220,
220,
220,
3601,
7,
27932,
62,
312,
87,
11,
9335,
62,
18242,
8,
198,
220,
220,
220,
1441,
16326,
11,
9335,
62,
312,
87,
11,
479,
62,
18242,
198,
198,
4299,
40122,
378,
62,
30001,
7,
30001,
32,
11,
11241,
33,
11,
3509,
62,
41068,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
40122,
378,
890,
8379,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
981,
6407,
25,
198,
220,
220,
220,
220,
220,
220,
220,
2472,
62,
11925,
796,
18896,
7,
30001,
32,
8,
1343,
18896,
7,
30001,
33,
8,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
10786,
9806,
11241,
23884,
59,
429,
4997,
62,
11925,
23884,
796,
23884,
1343,
23884,
4458,
18982,
7,
9806,
62,
41068,
11,
2472,
62,
11925,
11,
18896,
7,
30001,
32,
828,
18896,
7,
30001,
33,
22305,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2472,
62,
11925,
19841,
3509,
62,
41068,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2270,
198,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
30001,
32,
8,
1875,
18896,
7,
30001,
33,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
32,
13,
12924,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
33,
13,
12924,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
4299,
2251,
62,
5310,
3201,
62,
8625,
1817,
7,
20360,
62,
7278,
11,
7322,
62,
312,
87,
11,
7322,
11,
299,
62,
41068,
11,
9335,
62,
1676,
65,
11,
12776,
397,
62,
4868,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
2251,
399,
4303,
4512,
900,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1303,
513,
2041,
11241,
25,
685,
5097,
50,
4357,
685,
5188,
47,
60,
329,
1908,
317,
11,
685,
5188,
47,
60,
329,
1908,
347,
198,
220,
220,
220,
3509,
62,
41068,
62,
11925,
796,
299,
62,
41068,
532,
362,
532,
352,
198,
220,
220,
220,
2496,
62,
41068,
62,
11925,
796,
3509,
62,
41068,
62,
11925,
1303,
685,
5097,
50,
4357,
10618,
32,
11,
10618,
32,
11,
2644,
11,
685,
5188,
47,
4357,
10618,
33,
11,
10618,
33,
11,
2644,
628,
220,
220,
220,
10245,
796,
17635,
198,
220,
220,
220,
20218,
62,
34086,
594,
796,
17635,
198,
220,
220,
220,
20218,
62,
34086,
62,
41068,
62,
13664,
796,
657,
1303,
997,
286,
16326,
628,
220,
220,
220,
3509,
62,
22510,
62,
83,
482,
641,
796,
17759,
198,
220,
220,
220,
2496,
62,
41068,
62,
11925,
796,
45941,
13,
25120,
13,
25192,
600,
7,
17,
11,
3509,
62,
22510,
62,
83,
482,
641,
8,
1303,
949,
18896,
286,
16326,
198,
220,
220,
220,
329,
1312,
11,
1908,
287,
27056,
378,
7,
20360,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
22492,
317,
13,
407,
262,
938,
6827,
286,
262,
7322,
198,
220,
220,
220,
220,
220,
220,
220,
20218,
62,
34086,
594,
13,
33295,
7,
34086,
8,
198,
220,
220,
220,
220,
220,
220,
220,
20218,
62,
34086,
62,
41068,
62,
13664,
15853,
18896,
7,
34086,
8,
628,
220,
220,
220,
220,
220,
220,
220,
22492,
347,
13,
2198,
611,
340,
318,
262,
938,
6827,
286,
262,
7322,
198,
220,
220,
220,
220,
220,
220,
220,
22492,
393,
20218,
62,
34086,
62,
41068,
62,
13664,
318,
2392,
621,
393,
4961,
284,
2496,
62,
41068,
62,
11925,
198,
220,
220,
220,
220,
220,
220,
220,
611,
1312,
6624,
18896,
7,
20360,
8,
532,
352,
393,
20218,
62,
34086,
62,
41068,
62,
13664,
18189,
2496,
62,
41068,
62,
11925,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
20218,
62,
34086,
594,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
22492,
317,
13,
6827,
317,
10618,
25,
422,
657,
284,
257,
62,
437,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
257,
62,
437,
796,
352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
29510,
62,
34086,
594,
8,
14512,
352,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
257,
62,
437,
796,
45941,
13,
25120,
13,
25192,
600,
7,
16,
11,
18896,
7,
29510,
62,
34086,
594,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
24443,
262,
13439,
284,
11241,
32,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
422,
262,
2166,
284,
262,
736,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
32,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
4808,
11,
264,
287,
27056,
378,
7,
29510,
62,
34086,
594,
58,
25,
64,
62,
437,
60,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
32,
13,
2302,
437,
7,
82,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
22492,
347,
13,
6827,
347,
10618,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
33,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
317,
13,
33520,
1306,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
318,
62,
19545,
481,
307,
262,
6167,
329,
399,
4303,
2181,
3201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
18896,
7,
29510,
62,
34086,
594,
8,
1875,
352,
290,
45941,
13,
25120,
13,
403,
6933,
3419,
18189,
657,
13,
20,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
318,
62,
19545,
796,
6407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
64,
62,
437,
11,
18896,
7,
29510,
62,
34086,
594,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
33,
13,
2302,
437,
7,
29510,
62,
34086,
594,
58,
73,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
347,
13,
4738,
1306,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
318,
62,
19545,
796,
10352,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
33,
62,
11925,
796,
2496,
62,
41068,
62,
11925,
532,
18896,
7,
30001,
32,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4738,
62,
1845,
64,
62,
312,
87,
796,
31215,
62,
312,
87,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
981,
31215,
62,
312,
87,
6624,
4738,
62,
1845,
64,
62,
312,
87,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4738,
62,
1845,
64,
62,
312,
87,
796,
45941,
13,
25120,
13,
25192,
600,
7,
15,
11,
18896,
7,
20360,
62,
7278,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4738,
62,
1845,
64,
796,
7322,
58,
25120,
62,
1845,
64,
62,
312,
87,
60,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4738,
62,
9688,
796,
45941,
13,
25120,
13,
25192,
600,
7,
15,
11,
18896,
7,
25120,
62,
1845,
64,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
474,
287,
2837,
7,
25120,
62,
9688,
11,
18896,
7,
25120,
62,
1845,
64,
8,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11241,
33,
13,
2302,
437,
7,
25120,
62,
1845,
64,
58,
73,
12962,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
40122,
378,
62,
30001,
7,
30001,
32,
11,
11241,
33,
11,
3509,
62,
41068,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6818,
657,
1279,
18896,
7,
30001,
32,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6818,
657,
1279,
18896,
7,
30001,
33,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
16326,
796,
14631,
58,
5097,
50,
60,
8973,
1343,
11241,
32,
1343,
14631,
58,
5188,
47,
60,
8973,
1343,
11241,
33,
1343,
14631,
58,
5188,
47,
60,
8973,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10618,
796,
685,
15,
60,
9,
7,
11925,
7,
30001,
32,
8,
220,
1343,
362,
8,
1343,
685,
16,
60,
9,
7,
11925,
7,
30001,
33,
8,
1343,
352,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
16326,
11,
9335,
62,
312,
87,
11,
9335,
62,
18242,
796,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2251,
62,
5310,
3201,
62,
27932,
7,
83,
482,
641,
11,
493,
19510,
11925,
7,
83,
482,
641,
13219,
18,
27493,
27932,
62,
1676,
65,
828,
12776,
397,
62,
4868,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4554,
796,
1391,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
83,
482,
641,
10354,
16326,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
325,
5154,
10354,
10618,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
271,
62,
19545,
10354,
318,
62,
19545,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
27932,
62,
312,
87,
10354,
9335,
62,
312,
87,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
705,
27932,
62,
18242,
10354,
9335,
62,
18242,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1782,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10245,
13,
33295,
7,
39098,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
13259,
10618,
4540,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
20218,
62,
34086,
594,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
20218,
62,
34086,
62,
41068,
62,
13664,
796,
657,
198,
220,
220,
220,
220,
198,
220,
220,
220,
1441,
10245,
198,
198,
4299,
787,
62,
5310,
3201,
62,
7890,
7,
18893,
397,
11,
287,
62,
7753,
11,
503,
62,
7753,
11,
954,
11,
299,
62,
41068,
11,
9335,
62,
1676,
65,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1100,
2420,
290,
1441,
4512,
1366,
900,
5794,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
12776,
397,
62,
4868,
796,
17635,
198,
220,
220,
220,
329,
4686,
62,
287,
2837,
7,
18893,
397,
13,
1136,
62,
12239,
62,
7857,
3419,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
611,
407,
12776,
397,
13,
271,
62,
34680,
7,
312,
62,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
12776,
397,
62,
4868,
13,
33295,
7,
18893,
397,
13,
312,
62,
1462,
62,
12239,
7,
312,
62,
4008,
198,
220,
220,
220,
220,
198,
220,
220,
220,
7322,
62,
7278,
796,
17635,
198,
220,
220,
220,
351,
1280,
7,
259,
62,
7753,
11,
705,
81,
11537,
355,
287,
62,
69,
25,
198,
220,
220,
220,
220,
220,
220,
220,
7322,
796,
17635,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1312,
11,
1908,
287,
27056,
378,
7,
259,
62,
69,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1908,
796,
1908,
13,
36311,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
22492,
9178,
1724,
886,
286,
262,
7322,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
1908,
6624,
10148,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
611,
407,
262,
44887,
3191,
286,
262,
7322,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
340,
318,
262,
886,
286,
262,
7322,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
657,
1279,
18896,
7,
20360,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
7322,
62,
7278,
13,
33295,
7,
20360,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
7322,
796,
17635,
1303,
7716,
649,
7322,
1351,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
2198,
611,
23353,
1802,
28110,
29910,
23549,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
352,
68,
10,
20,
1279,
18896,
7,
20360,
62,
7278,
2599,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2270,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
22492,
850,
10879,
287,
1351,
318,
636,
286,
37865,
11241,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
29206,
13,
37250,
5008,
223,
168,
100,
222,
41707,
167,
107,
116,
41707,
5008,
223,
168,
117,
112,
41707,
169,
226,
108,
20520,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
5207,
796,
12776,
397,
13,
268,
8189,
62,
292,
62,
34154,
7,
34086,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
657,
1279,
18896,
7,
34154,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
7322,
13,
33295,
7,
34154,
8,
198,
220,
220,
220,
220,
220,
220,
220,
611,
7322,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
7322,
62,
7278,
13,
33295,
7,
20360,
8,
198,
220,
220,
220,
1303,
9335,
278,
825,
25,
2251,
62,
5310,
3201,
62,
27932,
198,
220,
220,
220,
329,
6376,
287,
2837,
7,
9127,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
5072,
796,
503,
62,
7753,
13,
18982,
7,
9630,
8,
198,
2,
220,
220,
220,
220,
220,
220,
220,
220,
611,
28686,
13,
6978,
13,
4468,
576,
7,
22915,
2599,
198,
2,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2555,
198,
220,
220,
220,
220,
220,
220,
220,
351,
1280,
7,
22915,
11,
705,
86,
11537,
355,
503,
62,
69,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
1312,
11,
7322,
287,
27056,
378,
7,
20360,
62,
7278,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
9335,
278,
62,
10951,
796,
2251,
62,
5310,
3201,
62,
8625,
1817,
7,
20360,
62,
7278,
11,
1312,
11,
7322,
11,
299,
62,
41068,
11,
9335,
62,
1676,
65,
11,
12776,
397,
62,
4868,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
9766,
76,
287,
9335,
278,
62,
10951,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
503,
62,
69,
13,
13564,
7,
17752,
13,
67,
8142,
7,
68,
10671,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
503,
62,
69,
13,
13564,
10786,
59,
77,
11537,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
198,
4871,
37123,
3201,
27354,
292,
316,
7,
27354,
292,
316,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
29206,
13,
4554,
198,
220,
220,
220,
1391,
83,
482,
641,
25,
198,
220,
220,
220,
220,
220,
220,
220,
37250,
58,
5097,
50,
60,
3256,
705,
5008,
223,
168,
100,
222,
3256,
46083,
705,
167,
234,
222,
47991,
247,
166,
113,
238,
3256,
705,
167,
98,
120,
3256,
705,
5008,
223,
168,
94,
116,
168,
245,
227,
3256,
705,
47991,
246,
168,
246,
222,
46695,
97,
3256,
705,
2637,
11,
705,
5008,
223,
166,
115,
116,
3256,
705,
5008,
223,
169,
249,
226,
3256,
2644,
4357,
198,
220,
220,
220,
10618,
25,
198,
220,
220,
220,
220,
220,
220,
220,
685,
15,
11,
657,
11,
657,
11,
657,
11,
657,
11,
657,
11,
2644,
11,
352,
11,
352,
11,
352,
4357,
198,
220,
220,
220,
318,
62,
19545,
25,
6407,
11,
198,
220,
220,
220,
9335,
62,
312,
87,
25,
220,
198,
220,
220,
220,
220,
220,
220,
220,
685,
1433,
11,
2310,
11,
2644,
11,
6073,
4357,
198,
220,
220,
220,
9335,
62,
18242,
25,
198,
220,
220,
220,
220,
220,
220,
220,
37250,
5008,
223,
17477,
3256,
705,
5008,
223,
16,
3256,
705,
35975,
120,
3256,
705,
5008,
223,
93,
3256,
705,
167,
232,
242,
3256,
2644,
11,
705,
5008,
223,
168,
94,
108,
168,
100,
222,
3256,
705,
167,
110,
243,
35975,
226,
20520,
92,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
198,
4299,
2181,
3201,
62,
26000,
378,
62,
22184,
7,
15414,
82,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
24511,
15458,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
14722,
62,
565,
82,
11,
14722,
62,
75,
76,
11,
17311,
11,
17894,
796,
1351,
7,
13344,
46491,
15414,
82,
4008,
198,
220,
220,
220,
14722,
62,
75,
76,
796,
28034,
13,
20471,
13,
26791,
13,
81,
20471,
13,
15636,
62,
43167,
7,
23912,
1424,
62,
75,
76,
11,
15458,
62,
11085,
28,
17821,
11,
24511,
62,
8367,
10779,
16,
8,
198,
220,
220,
220,
17311,
796,
28034,
13,
20471,
13,
26791,
13,
81,
20471,
13,
15636,
62,
43167,
7,
15414,
82,
11,
15458,
62,
11085,
28,
17821,
11,
24511,
62,
8367,
28,
15,
8,
198,
220,
220,
220,
17894,
796,
28034,
13,
20471,
13,
26791,
13,
81,
20471,
13,
15636,
62,
43167,
7,
325,
11726,
11,
15458,
62,
11085,
28,
17821,
11,
24511,
62,
8367,
28,
15,
8,
198,
220,
220,
220,
220,
198,
220,
220,
220,
15458,
796,
685,
198,
220,
220,
220,
220,
220,
220,
220,
28034,
13,
25558,
7,
23912,
1424,
62,
565,
82,
11,
5391,
28,
15,
828,
198,
220,
220,
220,
220,
220,
220,
220,
14722,
62,
75,
76,
11,
198,
220,
220,
220,
220,
220,
220,
220,
17311,
11,
198,
220,
220,
220,
220,
220,
220,
220,
17894,
11,
198,
220,
220,
220,
2361,
198,
220,
220,
220,
1441,
15458
] | 1.926819 | 4,646 |
from matscholar import Rester
import bson
import tqdm
import os
import pymongo
client = pymongo.MongoClient('mongodb+srv://%s:%[email protected]/test:27017' %
(os.getenv('ATLAS_USER_RW'), os.getenv('ATLAS_USER_PASSWORD_RW')), authSource='admin')
db = client['matstract_db']
c = db.MRS_abstracts
LIMIT = 0
rester = Rester()
print(c.count_documents({}, limit=5))
for d in tqdm.tqdm(c.find({}, limit=LIMIT)):
id = bson.ObjectId(d["_id"])
suggestions = rester.get_journal_suggestion(abstract=d["abstract"])
# print(d)
c.update({"_id": id}, {"$set": {"journal_suggestions": suggestions}})
# print(d["abstract"])
# print(suggestions)
# print("-----------\n\n\n\n")
| [
6738,
46054,
354,
6192,
1330,
1874,
353,
198,
11748,
275,
1559,
198,
11748,
256,
80,
36020,
198,
11748,
28686,
198,
11748,
279,
4948,
25162,
198,
198,
16366,
796,
279,
4948,
25162,
13,
44,
25162,
11792,
10786,
31059,
375,
65,
10,
27891,
85,
1378,
4,
82,
25,
4,
82,
31,
6759,
8709,
12,
74,
303,
3901,
13,
31059,
375,
65,
13,
3262,
14,
9288,
25,
1983,
29326,
6,
4064,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
357,
418,
13,
1136,
24330,
10786,
1404,
43,
1921,
62,
29904,
62,
46747,
33809,
28686,
13,
1136,
24330,
10786,
1404,
43,
1921,
62,
29904,
62,
47924,
54,
12532,
62,
46747,
11537,
828,
6284,
7416,
11639,
28482,
11537,
198,
9945,
796,
5456,
17816,
6759,
8709,
62,
9945,
20520,
198,
66,
796,
20613,
13,
44,
6998,
62,
397,
8709,
82,
198,
198,
43,
3955,
2043,
796,
657,
198,
260,
1706,
796,
1874,
353,
3419,
198,
198,
4798,
7,
66,
13,
9127,
62,
15390,
2886,
15090,
5512,
4179,
28,
20,
4008,
198,
198,
1640,
288,
287,
256,
80,
36020,
13,
83,
80,
36020,
7,
66,
13,
19796,
15090,
5512,
4179,
28,
43,
3955,
2043,
8,
2599,
198,
220,
220,
220,
4686,
796,
275,
1559,
13,
10267,
7390,
7,
67,
14692,
62,
312,
8973,
8,
198,
220,
220,
220,
11776,
796,
1334,
263,
13,
1136,
62,
24891,
62,
47811,
295,
7,
397,
8709,
28,
67,
14692,
397,
8709,
8973,
8,
198,
220,
220,
220,
1303,
3601,
7,
67,
8,
198,
220,
220,
220,
269,
13,
19119,
7,
4895,
62,
312,
1298,
4686,
5512,
19779,
3,
2617,
1298,
19779,
24891,
62,
47811,
507,
1298,
11776,
11709,
8,
628,
220,
220,
220,
1303,
3601,
7,
67,
14692,
397,
8709,
8973,
8,
198,
220,
220,
220,
1303,
3601,
7,
47811,
507,
8,
198,
220,
220,
220,
1303,
3601,
7203,
32284,
59,
77,
59,
77,
59,
77,
59,
77,
4943,
628,
198
] | 2.220544 | 331 |
#!/usr/local/bin/python
# -*-: coding utf-8 -*-
""" Snips core and nlu server. """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.agent import Agent
import os
import os.path
import re
from rasa_core.domain import TemplateDomain
from rasa_core.featurizers import Featurizer
from rasa_core.interpreter import NaturalLanguageInterpreter
from rasa_core.policies.ensemble import PolicyEnsemble
from rasa_core.utils import read_yaml_file
from rasa_core.policies.keras_policy import KerasPolicy
from rasa_core.policies.memoization import MemoizationPolicy
from rasa_nlu.utils.md_to_json import MarkdownToJson
from rasa_nlu.utils.md_to_json import comment_regex,synonym_regex,intent_regex,INTENT_PARSING_STATE,SYNONYM_PARSING_STATE
# Customised Agent class to use custom SnipsDomain and pass core server through to the Domain for scope access
# Customised Domain to allow reference to core server for access to sessionId and other server scope.
| [
2,
48443,
14629,
14,
12001,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
25,
19617,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
5489,
2419,
4755,
290,
299,
2290,
4382,
13,
37227,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
374,
15462,
62,
7295,
13,
25781,
1330,
15906,
198,
11748,
28686,
198,
11748,
28686,
13,
6978,
198,
11748,
302,
198,
198,
6738,
374,
15462,
62,
7295,
13,
27830,
1330,
37350,
43961,
198,
6738,
374,
15462,
62,
7295,
13,
5036,
2541,
11341,
1330,
5452,
2541,
7509,
198,
6738,
374,
15462,
62,
7295,
13,
3849,
3866,
353,
1330,
12068,
32065,
9492,
3866,
353,
198,
6738,
374,
15462,
62,
7295,
13,
79,
4160,
444,
13,
1072,
11306,
1330,
220,
7820,
4834,
15140,
198,
6738,
374,
15462,
62,
7295,
13,
26791,
1330,
1100,
62,
88,
43695,
62,
7753,
198,
6738,
374,
15462,
62,
7295,
13,
79,
4160,
444,
13,
6122,
292,
62,
30586,
1330,
17337,
292,
36727,
198,
6738,
374,
15462,
62,
7295,
13,
79,
4160,
444,
13,
11883,
78,
1634,
1330,
4942,
78,
1634,
36727,
198,
198,
6738,
374,
15462,
62,
77,
2290,
13,
26791,
13,
9132,
62,
1462,
62,
17752,
1330,
2940,
2902,
2514,
41,
1559,
198,
6738,
374,
15462,
62,
77,
2290,
13,
26791,
13,
9132,
62,
1462,
62,
17752,
1330,
2912,
62,
260,
25636,
11,
28869,
5177,
62,
260,
25636,
11,
48536,
62,
260,
25636,
11,
12394,
3525,
62,
27082,
50,
2751,
62,
44724,
11,
23060,
45,
40508,
44,
62,
27082,
50,
2751,
62,
44724,
628,
198,
2,
8562,
1417,
15906,
1398,
284,
779,
2183,
5489,
2419,
43961,
290,
1208,
4755,
4382,
832,
284,
262,
20021,
329,
8354,
1895,
198,
198,
2,
8562,
1417,
20021,
284,
1249,
4941,
284,
4755,
4382,
329,
1895,
284,
6246,
7390,
290,
584,
4382,
8354,
13,
220,
220,
220,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198
] | 3.177515 | 338 |
from getratings.models.ratings import Ratings
| [
6738,
651,
10366,
654,
13,
27530,
13,
10366,
654,
1330,
36826,
201,
198,
201,
198
] | 3.266667 | 15 |
# https://machinelearningmastery.com/predict-sentiment-movie-reviews-using-deep-learning/
import os
import numpy as np
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.utils import to_categorical
from keras.callbacks import EarlyStopping, TensorBoard, ModelCheckpoint
from keras.layers import Embedding, Conv1D, MaxPooling1D, Flatten, Dense, Input, Dropout
from keras.models import Model
import matplotlib.pyplot as plt
from keras.layers import LSTM, Bidirectional
import pickle
do_early_stopping = True
# top words to be considered in Tokenizer
NUM_WORDS = 20000
# Length of phrases for padding if shorter or cropping if longer
MAX_SEQUENCE_LENGTH = 500
EMBEDDING_DIM = 300
# preparing train-set from text data
train_text = np.load('Res/train_text.npy')
train_label = np.load('Res/train_label.npy')
print('TrainSet is composed of %s texts.' % len(train_text))
# preparing test-set from text data
test_text = np.load('Res/test_text.npy')
test_label = np.load('Res/test_label.npy')
print('TestSet is composed of %s texts.' % len(test_text))
# Formatting text samples and labels in tensors.
with open('Res/tokenizer.pickle', 'rb') as handle:
tokenizer = pickle.load(handle)
train_sequences = tokenizer.texts_to_sequences(train_text) # Splits words by space (split=” “), Filters out punctuation, Converts text to lowercase. For each text returns a list of integers (same words a codified by same integer)
test_sequences = tokenizer.texts_to_sequences(test_text)
word_index = tokenizer.word_index # dictionary mapping words (str) to their index starting from 0 (int)
print('Found %s unique tokens.' % len(word_index))
train_data = pad_sequences(train_sequences, maxlen=MAX_SEQUENCE_LENGTH) # each element of sequences is cropped or padded to reach maxlen
test_data = pad_sequences(test_sequences, maxlen=MAX_SEQUENCE_LENGTH)
train_label = np.asarray(train_label)
test_label = np.asarray(test_label)
print('Shape of data tensor:', train_data.shape)
#shuffle dataset
indices = np.arange(train_data.shape[0])
np.random.shuffle(indices)
train_data = train_data[indices]
train_label = train_label[indices]
# split the data into a training set and a validation set
num_validation_samples = int(0.1 * train_data.shape[0])
x_train = train_data[:-num_validation_samples]
y_train = train_label[:-num_validation_samples]
x_val = train_data[-num_validation_samples:]
y_val = train_label[-num_validation_samples:]
x_test = test_data
y_test = test_label
embedding_matrix = np.load('Res/embedding_matrix.npy')
#All that the Embedding layer does is to map the integer inputs to the vectors found at the corresponding index in the embedding matrix, i.e. the sequence [1, 2] would be converted to [embeddings[1], embeddings[2]]. This means that the output of the Embedding layer will be a 3D tensor of shape (samples, sequence_length, embedding_dim).
sequence_input = Input(shape=(MAX_SEQUENCE_LENGTH,), dtype='int32')
embedding_layer = Embedding(len(word_index)+1, EMBEDDING_DIM, weights=[embedding_matrix], input_length=MAX_SEQUENCE_LENGTH, trainable=False)
x = embedding_layer(sequence_input)
x = Dropout(0.3)(x)
x = Bidirectional(LSTM(100))(x)
x = Dropout(0.3)(x)
prob = Dense(1, activation='sigmoid')(x)
model = Model(sequence_input, prob)
model.compile(loss='binary_crossentropy',optimizer='adam', metrics=['accuracy'])
tensorboard = TensorBoard(log_dir='./GraphLSTM', histogram_freq=0, write_graph=True)
print('model compiled')
print(model.summary())
early_stopping = EarlyStopping(monitor='val_loss', patience = 2, mode = 'min')
cp = ModelCheckpoint('ModelBLSTM.h5', monitor='val_acc', save_best_only=True, mode='max')
if do_early_stopping:
print('using early stopping strategy')
history = model.fit(x_train, y_train, validation_data=(x_val, y_val), epochs=4, batch_size=128, callbacks = [early_stopping, tensorboard])
else:
history = model.fit(x_train, y_train, validation_data=(x_val, y_val), epochs=8, batch_size=128)
loss, acc = model.evaluate(x_test, y_test)
print("loss: "+str(loss))
print("accuracy: "+str(acc))
model.save('my_model3.h5')
plotting(history)
| [
198,
2,
3740,
1378,
30243,
40684,
9866,
88,
13,
785,
14,
79,
17407,
12,
34086,
3681,
12,
41364,
12,
19023,
82,
12,
3500,
12,
22089,
12,
40684,
14,
198,
11748,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
41927,
292,
13,
3866,
36948,
13,
5239,
1330,
29130,
7509,
198,
6738,
41927,
292,
13,
3866,
36948,
13,
43167,
1330,
14841,
62,
3107,
3007,
198,
6738,
41927,
292,
13,
26791,
1330,
284,
62,
66,
2397,
12409,
198,
6738,
41927,
292,
13,
13345,
10146,
1330,
12556,
1273,
33307,
11,
309,
22854,
29828,
11,
9104,
9787,
4122,
198,
198,
6738,
41927,
292,
13,
75,
6962,
1330,
13302,
6048,
278,
11,
34872,
16,
35,
11,
5436,
27201,
278,
16,
35,
11,
1610,
41769,
11,
360,
1072,
11,
23412,
11,
14258,
448,
198,
6738,
41927,
292,
13,
27530,
1330,
9104,
198,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
6738,
41927,
292,
13,
75,
6962,
1330,
406,
2257,
44,
11,
43484,
4154,
282,
628,
198,
11748,
2298,
293,
628,
198,
4598,
62,
11458,
62,
301,
33307,
796,
6407,
198,
2,
1353,
2456,
284,
307,
3177,
287,
29130,
7509,
198,
41359,
62,
45359,
5258,
796,
939,
405,
198,
198,
2,
22313,
286,
20144,
329,
24511,
611,
12238,
393,
6763,
2105,
611,
2392,
198,
22921,
62,
5188,
10917,
18310,
62,
43,
49494,
796,
5323,
198,
198,
3620,
33,
1961,
35,
2751,
62,
35,
3955,
796,
5867,
198,
198,
2,
10629,
4512,
12,
2617,
422,
2420,
1366,
198,
27432,
62,
5239,
796,
45941,
13,
2220,
10786,
4965,
14,
27432,
62,
5239,
13,
77,
9078,
11537,
198,
27432,
62,
18242,
796,
45941,
13,
2220,
10786,
4965,
14,
27432,
62,
18242,
13,
77,
9078,
11537,
198,
198,
4798,
10786,
44077,
7248,
318,
13160,
286,
4064,
82,
13399,
2637,
4064,
18896,
7,
27432,
62,
5239,
4008,
198,
198,
2,
10629,
1332,
12,
2617,
422,
2420,
1366,
198,
9288,
62,
5239,
796,
45941,
13,
2220,
10786,
4965,
14,
9288,
62,
5239,
13,
77,
9078,
11537,
198,
9288,
62,
18242,
796,
45941,
13,
2220,
10786,
4965,
14,
9288,
62,
18242,
13,
77,
9078,
11537,
198,
198,
4798,
10786,
14402,
7248,
318,
13160,
286,
4064,
82,
13399,
2637,
4064,
18896,
7,
9288,
62,
5239,
4008,
198,
198,
2,
18980,
889,
2420,
8405,
290,
14722,
287,
11192,
669,
13,
198,
4480,
1280,
10786,
4965,
14,
30001,
7509,
13,
27729,
293,
3256,
705,
26145,
11537,
355,
5412,
25,
198,
220,
220,
220,
11241,
7509,
796,
2298,
293,
13,
2220,
7,
28144,
8,
198,
198,
27432,
62,
3107,
3007,
796,
11241,
7509,
13,
5239,
82,
62,
1462,
62,
3107,
3007,
7,
27432,
62,
5239,
8,
1303,
13341,
896,
2456,
416,
2272,
357,
35312,
28,
447,
251,
564,
250,
828,
7066,
1010,
503,
21025,
2288,
11,
1482,
24040,
2420,
284,
2793,
7442,
13,
1114,
1123,
2420,
5860,
257,
1351,
286,
37014,
357,
31642,
2456,
257,
14873,
1431,
416,
976,
18253,
8,
198,
198,
9288,
62,
3107,
3007,
796,
11241,
7509,
13,
5239,
82,
62,
1462,
62,
3107,
3007,
7,
9288,
62,
5239,
8,
198,
4775,
62,
9630,
796,
11241,
7509,
13,
4775,
62,
9630,
1303,
22155,
16855,
2456,
357,
2536,
8,
284,
511,
6376,
3599,
422,
657,
357,
600,
8,
198,
4798,
10786,
21077,
4064,
82,
3748,
16326,
2637,
4064,
18896,
7,
4775,
62,
9630,
4008,
198,
198,
27432,
62,
7890,
796,
14841,
62,
3107,
3007,
7,
27432,
62,
3107,
3007,
11,
3509,
11925,
28,
22921,
62,
5188,
10917,
18310,
62,
43,
49494,
8,
1303,
1123,
5002,
286,
16311,
318,
48998,
393,
44582,
284,
3151,
3509,
11925,
1849,
198,
9288,
62,
7890,
796,
14841,
62,
3107,
3007,
7,
9288,
62,
3107,
3007,
11,
3509,
11925,
28,
22921,
62,
5188,
10917,
18310,
62,
43,
49494,
8,
198,
198,
27432,
62,
18242,
796,
45941,
13,
292,
18747,
7,
27432,
62,
18242,
8,
198,
9288,
62,
18242,
796,
45941,
13,
292,
18747,
7,
9288,
62,
18242,
8,
198,
4798,
10786,
33383,
286,
1366,
11192,
273,
25,
3256,
4512,
62,
7890,
13,
43358,
8,
198,
198,
2,
1477,
18137,
27039,
198,
521,
1063,
796,
45941,
13,
283,
858,
7,
27432,
62,
7890,
13,
43358,
58,
15,
12962,
198,
37659,
13,
25120,
13,
1477,
18137,
7,
521,
1063,
8,
198,
27432,
62,
7890,
796,
4512,
62,
7890,
58,
521,
1063,
60,
198,
27432,
62,
18242,
796,
4512,
62,
18242,
58,
521,
1063,
60,
198,
198,
2,
6626,
262,
1366,
656,
257,
3047,
900,
290,
257,
21201,
900,
198,
198,
22510,
62,
12102,
341,
62,
82,
12629,
796,
493,
7,
15,
13,
16,
1635,
4512,
62,
7890,
13,
43358,
58,
15,
12962,
198,
198,
87,
62,
27432,
796,
4512,
62,
7890,
58,
21912,
22510,
62,
12102,
341,
62,
82,
12629,
60,
198,
88,
62,
27432,
796,
4512,
62,
18242,
58,
21912,
22510,
62,
12102,
341,
62,
82,
12629,
60,
198,
198,
87,
62,
2100,
796,
4512,
62,
7890,
58,
12,
22510,
62,
12102,
341,
62,
82,
12629,
47715,
198,
88,
62,
2100,
796,
4512,
62,
18242,
58,
12,
22510,
62,
12102,
341,
62,
82,
12629,
47715,
198,
198,
87,
62,
9288,
796,
1332,
62,
7890,
198,
88,
62,
9288,
796,
1332,
62,
18242,
628,
198,
20521,
12083,
62,
6759,
8609,
796,
45941,
13,
2220,
10786,
4965,
14,
20521,
12083,
62,
6759,
8609,
13,
77,
9078,
11537,
628,
198,
2,
3237,
326,
262,
13302,
6048,
278,
7679,
857,
318,
284,
3975,
262,
18253,
17311,
284,
262,
30104,
1043,
379,
262,
11188,
6376,
287,
262,
11525,
12083,
17593,
11,
1312,
13,
68,
13,
262,
8379,
685,
16,
11,
362,
60,
561,
307,
11513,
284,
685,
20521,
67,
654,
58,
16,
4357,
11525,
67,
654,
58,
17,
60,
4083,
770,
1724,
326,
262,
5072,
286,
262,
13302,
6048,
278,
7679,
481,
307,
257,
513,
35,
11192,
273,
286,
5485,
357,
82,
12629,
11,
8379,
62,
13664,
11,
11525,
12083,
62,
27740,
737,
198,
198,
43167,
62,
15414,
796,
23412,
7,
43358,
16193,
22921,
62,
5188,
10917,
18310,
62,
43,
49494,
11,
828,
288,
4906,
11639,
600,
2624,
11537,
198,
198,
20521,
12083,
62,
29289,
796,
13302,
6048,
278,
7,
11925,
7,
4775,
62,
9630,
47762,
16,
11,
412,
10744,
1961,
35,
2751,
62,
35,
3955,
11,
19590,
41888,
20521,
12083,
62,
6759,
8609,
4357,
5128,
62,
13664,
28,
22921,
62,
5188,
10917,
18310,
62,
43,
49494,
11,
4512,
540,
28,
25101,
8,
198,
198,
87,
796,
11525,
12083,
62,
29289,
7,
43167,
62,
15414,
8,
198,
87,
796,
14258,
448,
7,
15,
13,
18,
5769,
87,
8,
198,
87,
796,
43484,
4154,
282,
7,
43,
2257,
44,
7,
3064,
4008,
7,
87,
8,
198,
87,
796,
14258,
448,
7,
15,
13,
18,
5769,
87,
8,
198,
1676,
65,
796,
360,
1072,
7,
16,
11,
14916,
11639,
82,
17225,
1868,
6,
5769,
87,
8,
198,
198,
19849,
796,
9104,
7,
43167,
62,
15414,
11,
1861,
8,
198,
198,
19849,
13,
5589,
576,
7,
22462,
11639,
39491,
62,
19692,
298,
28338,
3256,
40085,
7509,
11639,
324,
321,
3256,
20731,
28,
17816,
4134,
23843,
6,
12962,
198,
198,
83,
22854,
3526,
796,
309,
22854,
29828,
7,
6404,
62,
15908,
28,
4458,
14,
37065,
43,
2257,
44,
3256,
1554,
21857,
62,
19503,
80,
28,
15,
11,
3551,
62,
34960,
28,
17821,
8,
198,
198,
4798,
10786,
19849,
14102,
11537,
198,
198,
4798,
7,
19849,
13,
49736,
28955,
220,
198,
198,
11458,
62,
301,
33307,
796,
12556,
1273,
33307,
7,
41143,
11639,
2100,
62,
22462,
3256,
16336,
796,
362,
11,
4235,
796,
705,
1084,
11537,
198,
13155,
796,
9104,
9787,
4122,
10786,
17633,
9148,
2257,
44,
13,
71,
20,
3256,
5671,
11639,
2100,
62,
4134,
3256,
3613,
62,
13466,
62,
8807,
28,
17821,
11,
4235,
11639,
9806,
11537,
628,
198,
361,
466,
62,
11458,
62,
301,
33307,
25,
198,
220,
220,
220,
3601,
10786,
3500,
1903,
12225,
4811,
11537,
198,
220,
220,
220,
2106,
796,
2746,
13,
11147,
7,
87,
62,
27432,
11,
331,
62,
27432,
11,
21201,
62,
7890,
16193,
87,
62,
2100,
11,
331,
62,
2100,
828,
36835,
82,
28,
19,
11,
15458,
62,
7857,
28,
12762,
11,
869,
10146,
796,
685,
11458,
62,
301,
33307,
11,
11192,
273,
3526,
12962,
198,
17772,
25,
198,
220,
220,
220,
2106,
796,
2746,
13,
11147,
7,
87,
62,
27432,
11,
331,
62,
27432,
11,
21201,
62,
7890,
16193,
87,
62,
2100,
11,
331,
62,
2100,
828,
36835,
82,
28,
23,
11,
15458,
62,
7857,
28,
12762,
8,
628,
198,
22462,
11,
697,
796,
2746,
13,
49786,
7,
87,
62,
9288,
11,
331,
62,
9288,
8,
198,
198,
4798,
7203,
22462,
25,
43825,
2536,
7,
22462,
4008,
198,
4798,
7203,
4134,
23843,
25,
43825,
2536,
7,
4134,
4008,
220,
198,
198,
19849,
13,
21928,
10786,
1820,
62,
19849,
18,
13,
71,
20,
11537,
198,
198,
29487,
889,
7,
23569,
8,
628,
198
] | 2.893426 | 1,445 |
import mbuild as mb
class NH(mb.Compound):
"""A nitrogen with a hydrogen and two open ports. """
if __name__ == '__main__':
nh = NH()
| [
11748,
285,
11249,
355,
285,
65,
628,
198,
4871,
24451,
7,
2022,
13,
7293,
633,
2599,
198,
220,
220,
220,
37227,
32,
23417,
351,
257,
17669,
290,
734,
1280,
14090,
13,
37227,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
299,
71,
796,
24451,
3419,
198
] | 2.685185 | 54 |
from Jumpscale import j
import os
__version__ = "0.0.1"
| [
6738,
449,
8142,
38765,
1330,
474,
198,
11748,
28686,
198,
198,
834,
9641,
834,
796,
366,
15,
13,
15,
13,
16,
1,
628
] | 2.521739 | 23 |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from instrosetta.interfaces.optomechanics import filter_wheel_pb2 as instrosetta_dot_interfaces_dot_optomechanics_dot_filter__wheel__pb2
| [
2,
2980,
515,
416,
262,
308,
49,
5662,
11361,
8435,
17050,
13877,
13,
8410,
5626,
48483,
0,
198,
11748,
1036,
14751,
198,
198,
6738,
916,
305,
2617,
8326,
13,
3849,
32186,
13,
8738,
462,
3147,
873,
1330,
8106,
62,
22001,
62,
40842,
17,
355,
916,
305,
2617,
8326,
62,
26518,
62,
3849,
32186,
62,
26518,
62,
8738,
462,
3147,
873,
62,
26518,
62,
24455,
834,
22001,
834,
40842,
17,
628,
628
] | 3.140845 | 71 |
print('hello')
"""
Compare the number of operations and the time needed
to compute Fibonacci numbers recursively versus that
needed to compute them iteratively
"""
# recursive work
# Python program to display the Fibonacci sequence
import time
recursive_data = Data_tracker()
number_of_terms = 40
recursive_data.start_time = time.time()
# check if the number of terms is valid
if number_of_terms <= 0:
print("Plese enter a positive integer")
else:
print(f"Fibonacci number for {number_of_terms} terms:")
print(recur_fibo((number_of_terms - 1), recursive_data))
recursive_data.stop_time = time.time()
print('\n\nRECUSIVE DATA')
recursive_data.print_function_data()
# iterative work
# https://www.programiz.com/python-programming/examples/fibonacci-sequence
# Program to display the Fibonacci sequence up to n-th term
iterative_data = Data_tracker()
# first two terms
n1, n2 = 0, 1
count = 0
# check if the number of terms is valid
if number_of_terms <= 0:
print("Please enter a positive integer")
# if there is only one term, return n1
elif number_of_terms == 1:
print("Fibonacci sequence upto",number_of_terms,":")
print(n1)
# generate fibonacci sequence
else:
print("Fibonacci sequence:")
iterative_data.start_time = time.time()
while count < number_of_terms:
iterative_data.increment_if_count()
#print(n1)
iterative_data.increment_add_count()
nth = n1 + n2
# update values
iterative_data.increment_assignment_count()
n1 = n2
iterative_data.increment_assignment_count()
n2 = nth
iterative_data.increment_assignment_count()
count += 1
iterative_data.stop_time = time.time()
print('\n\nITERATIVE DATA')
iterative_data.print_function_data()
| [
4798,
10786,
31373,
11537,
198,
198,
37811,
198,
41488,
262,
1271,
286,
4560,
290,
262,
640,
2622,
198,
1462,
24061,
41566,
261,
44456,
3146,
664,
1834,
2280,
9051,
326,
198,
27938,
284,
24061,
606,
11629,
9404,
198,
37811,
628,
198,
2,
45115,
670,
198,
2,
11361,
1430,
284,
3359,
262,
41566,
261,
44456,
8379,
198,
198,
11748,
640,
198,
198,
8344,
30753,
62,
7890,
796,
6060,
62,
2213,
10735,
3419,
198,
198,
17618,
62,
1659,
62,
38707,
796,
2319,
198,
198,
8344,
30753,
62,
7890,
13,
9688,
62,
2435,
796,
640,
13,
2435,
3419,
198,
198,
2,
2198,
611,
262,
1271,
286,
2846,
318,
4938,
198,
361,
1271,
62,
1659,
62,
38707,
19841,
657,
25,
198,
220,
220,
3601,
7203,
47,
829,
68,
3802,
257,
3967,
18253,
4943,
198,
17772,
25,
198,
220,
220,
3601,
7,
69,
1,
37,
571,
261,
44456,
1271,
329,
1391,
17618,
62,
1659,
62,
38707,
92,
2846,
25,
4943,
198,
220,
220,
3601,
7,
8344,
333,
62,
69,
26762,
19510,
17618,
62,
1659,
62,
38707,
532,
352,
828,
45115,
62,
7890,
4008,
198,
198,
8344,
30753,
62,
7890,
13,
11338,
62,
2435,
796,
640,
13,
2435,
3419,
198,
198,
4798,
10786,
59,
77,
59,
77,
38827,
2937,
9306,
42865,
11537,
198,
8344,
30753,
62,
7890,
13,
4798,
62,
8818,
62,
7890,
3419,
198,
198,
2,
11629,
876,
670,
628,
198,
2,
3740,
1378,
2503,
13,
23065,
528,
13,
785,
14,
29412,
12,
23065,
2229,
14,
1069,
12629,
14,
69,
571,
261,
44456,
12,
43167,
198,
198,
2,
6118,
284,
3359,
262,
41566,
261,
44456,
8379,
510,
284,
299,
12,
400,
3381,
198,
198,
2676,
876,
62,
7890,
796,
6060,
62,
2213,
10735,
3419,
198,
198,
2,
717,
734,
2846,
198,
77,
16,
11,
299,
17,
796,
657,
11,
352,
198,
9127,
796,
657,
198,
198,
2,
2198,
611,
262,
1271,
286,
2846,
318,
4938,
198,
361,
1271,
62,
1659,
62,
38707,
19841,
657,
25,
198,
220,
220,
3601,
7203,
5492,
3802,
257,
3967,
18253,
4943,
198,
2,
611,
612,
318,
691,
530,
3381,
11,
1441,
299,
16,
198,
417,
361,
1271,
62,
1659,
62,
38707,
6624,
352,
25,
198,
220,
220,
3601,
7203,
37,
571,
261,
44456,
8379,
18529,
78,
1600,
17618,
62,
1659,
62,
38707,
553,
25,
4943,
198,
220,
220,
3601,
7,
77,
16,
8,
198,
2,
7716,
12900,
261,
44456,
8379,
198,
17772,
25,
198,
220,
220,
3601,
7203,
37,
571,
261,
44456,
8379,
25,
4943,
198,
220,
220,
11629,
876,
62,
7890,
13,
9688,
62,
2435,
796,
640,
13,
2435,
3419,
198,
220,
220,
981,
954,
1279,
1271,
62,
1659,
62,
38707,
25,
198,
220,
220,
220,
220,
220,
220,
11629,
876,
62,
7890,
13,
24988,
434,
62,
361,
62,
9127,
3419,
198,
220,
220,
220,
220,
220,
220,
1303,
4798,
7,
77,
16,
8,
198,
220,
220,
220,
220,
220,
220,
11629,
876,
62,
7890,
13,
24988,
434,
62,
2860,
62,
9127,
3419,
198,
220,
220,
220,
220,
220,
220,
299,
400,
796,
299,
16,
1343,
299,
17,
198,
220,
220,
220,
220,
220,
220,
1303,
4296,
3815,
198,
220,
220,
220,
220,
220,
220,
11629,
876,
62,
7890,
13,
24988,
434,
62,
562,
16747,
62,
9127,
3419,
198,
220,
220,
220,
220,
220,
220,
299,
16,
796,
299,
17,
198,
220,
220,
220,
220,
220,
220,
11629,
876,
62,
7890,
13,
24988,
434,
62,
562,
16747,
62,
9127,
3419,
198,
220,
220,
220,
220,
220,
220,
299,
17,
796,
299,
400,
198,
220,
220,
220,
220,
220,
220,
11629,
876,
62,
7890,
13,
24988,
434,
62,
562,
16747,
62,
9127,
3419,
198,
220,
220,
220,
220,
220,
220,
954,
15853,
352,
198,
198,
2676,
876,
62,
7890,
13,
11338,
62,
2435,
796,
640,
13,
2435,
3419,
198,
4798,
10786,
59,
77,
59,
77,
2043,
1137,
37045,
42865,
11537,
198,
2676,
876,
62,
7890,
13,
4798,
62,
8818,
62,
7890,
3419,
198
] | 2.742991 | 642 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
arch linux network helper module
"""
# Arch has two different kinds of network configuration. More recently,
# there's 'netcfg' and previously (for lack of a better term) 'legacy'.
#
# legacy uses:
# - 1 shell-script-style global configuration (/etc/rc.conf)
# - one IP per interface
# - routes are per interface
# - gateways are global
# - DNS is per interface
#
# netcfg uses:
# - multiple shell-script-style network configurations, 1 per interface
# - one IP per configuration
# - routes are per interface
# - gateways are per interface
# - DNS is global (/etc/resolv.conf)
#
# netcfg is designed for one IP per configuration, but it's not tolerant
# of the older style colon interfaces for IP aliasing. So we have to use
# a hack to get IP aliasing working:
# https://bbs.archlinux.org/viewtopic.php?pid=951573#p951573
#
# Arch is a rolling release, meaning new features and updated packages
# roll out on a unpredictable schedule. It also means there is no such
# thing as v1.0 or v2.0. We check if the netcfg package is installed to
# determine which format should be used.
import os
import re
import time
import subprocess
import logging
from cStringIO import StringIO
import commands.network
CONF_FILE = "/etc/rc.conf"
NETWORK_DIR = "/etc/network.d"
NETCTL_DIR = "/etc/netctl/"
def get_hostname():
"""
Just required to check /etc/rc.conf for SysVInit based Archlinux images.
All updated SystemD supporting images have it at default /etc/hostname
Will fetch current hostname of VM if any and return.
Looks at /etc/rc.conf config for Archlinux server using SysVInit.
"""
try:
with open(CONF_FILE) as hostname_fyl:
for line in hostname_fyl.readlines():
hn = re.search('HOSTNAME="(.*)"', line)
if hn:
return hn.group(1)
return None
except Exception, e:
logging.info("Init support Arch hostname enquiry failed: %s." % str(e))
return None
def get_hostname_file(infile, hostname):
"""
Update hostname on system
"""
outfile = StringIO()
found = False
for line in infile:
line = line.strip()
if '=' in line:
k, v = line.split('=', 1)
k = k.strip()
if k == "HOSTNAME":
print >> outfile, 'HOSTNAME="%s"' % hostname
found = True
else:
print >> outfile, line
else:
print >> outfile, line
if not found:
print >> outfile, 'HOSTNAME="%s"' % hostname
outfile.seek(0)
return outfile.read()
def _update_rc_conf_legacy(infile, interfaces):
"""
Return data for (sub-)interfaces and routes
"""
# Updating this file happens in two phases since it's non-trivial to
# update. The INTERFACES and ROUTES variables the key lines, but they
# will in turn reference other variables, which may be before or after.
# As a result, we need to load the entire file, find the main variables
# and then remove the reference variables. When that is done, we add
# the lines for the new config.
# First generate new config
ifaces = []
routes = []
gateway4, gateway6 = commands.network.get_gateways(interfaces)
ifnames = interfaces.keys()
ifnames.sort()
for ifname_prefix in ifnames:
interface = interfaces[ifname_prefix]
ip4s = interface['ip4s']
ip6s = interface['ip6s']
ifname_suffix_num = 0
for ip4, ip6 in map(None, ip4s, ip6s):
if ifname_suffix_num:
ifname = "%s:%d" % (ifname_prefix, ifname_suffix_num)
else:
ifname = ifname_prefix
line = [ifname]
if ip4:
line.append('%(address)s netmask %(netmask)s' % ip4)
if ip6:
line.append('add %(address)s/%(prefixlen)s' % ip6)
ifname_suffix_num += 1
ifaces.append((ifname.replace(':', '_'), ' '.join(line)))
for i, route in enumerate(interface['routes']):
if route['network'] == '0.0.0.0' and \
route['netmask'] == '0.0.0.0' and \
route['gateway'] == gateway4:
continue
line = "-net %(network)s netmask %(netmask)s gw %(gateway)s" % \
route
routes.append(('%s_route%d' % (ifname_prefix, i), line))
if gateway4:
routes.append(('gateway', 'default gw %s' % gateway4))
if gateway6:
routes.append(('gateway6', 'default gw %s' % gateway6))
# Then load old file
lines, variables = _parse_config(infile)
# Update INTERFACES
lineno = variables.get('INTERFACES')
if lineno is not None:
# Remove old lines
for name in _parse_variable(lines[lineno], strip_bang=True):
if name in variables:
lines[variables[name]] = None
else:
lines.append('')
lineno = len(lines) - 1
config = []
names = []
for name, line in ifaces:
config.append('%s="%s"' % (name, line))
names.append(name)
config.append('INTERFACES=(%s)' % ' '.join(names))
lines[lineno] = '\n'.join(config)
# Update ROUTES
lineno = variables.get('ROUTES')
if lineno is not None:
# Remove old lines
for name in _parse_variable(lines[lineno], strip_bang=True):
if name in variables:
lines[variables[name]] = None
else:
lines.append('')
lineno = len(lines) - 1
config = []
names = []
for name, line in routes:
config.append('%s="%s"' % (name, line))
names.append(name)
config.append('ROUTES=(%s)' % ' '.join(names))
lines[lineno] = '\n'.join(config)
# (Possibly) comment out NETWORKS
lineno = variables.get('NETWORKS')
if lineno is not None:
for name in _parse_variable(lines[lineno], strip_bang=True):
nlineno = variables.get(name)
if nlineno is not None:
lines[nlineno] = '#' + lines[lineno]
lines[lineno] = '#' + lines[lineno]
# (Possibly) update DAEMONS
lineno = variables.get('DAEMONS')
if lineno is not None:
daemons = _parse_variable(lines[lineno])
try:
network = daemons.index('!network')
daemons[network] = 'network'
if '@net-profiles' in daemons:
daemons.remove('@net-profiles')
lines[lineno] = 'DAEMONS=(%s)' % ' '.join(daemons)
except ValueError:
pass
# Filter out any removed lines
lines = filter(lambda l: l is not None, lines)
# Serialize into new file
outfile = StringIO()
for line in lines:
print >> outfile, line
outfile.seek(0)
return outfile.read()
def _get_file_data_netcfg(ifname, interface):
"""
Return data for (sub-)interfaces
"""
ifaces = []
label = interface['label']
ip4s = interface['ip4s']
ip6s = interface['ip6s']
gateway4 = interface['gateway4']
gateway6 = interface['gateway6']
dns = interface['dns']
outfile = StringIO()
if label:
print >>outfile, "# Label %s" % label
print >>outfile, 'CONNECTION="ethernet"'
print >>outfile, 'INTERFACE=%s' % ifname
if ip4s:
ip4 = ip4s.pop(0)
print >>outfile, 'IP="static"'
print >>outfile, 'ADDR="%(address)s"' % ip4
print >>outfile, 'NETMASK="%(netmask)s"' % ip4
if gateway4:
print >>outfile, 'GATEWAY="%s"' % gateway4
if ip6s:
ip6 = ip6s.pop(0)
print >>outfile, 'IP6="static"'
print >>outfile, 'ADDR6="%(address)s/%(prefixlen)s"' % ip6
if gateway6:
print >>outfile, 'GATEWAY6="%s"' % gateway6
routes = ['"%(network)s/%(netmask)s via %(gateway)s"' % route
for route in interface['routes'] if not
route['network'] == '0.0.0.0' and not
route['netmask'] == '0.0.0.0' and not
route['gateway'] == gateway4]
if routes:
print >>outfile, 'ROUTES=(%s)' % ' '.join(routes)
if dns:
print >>outfile, 'DNS=(%s)' % ' '.join(dns)
# Finally add remaining aliases. This is kind of hacky, see comment at
# top for explanation
aliases = ['%(address)s/%(netmask)s' % ip4 for ip4 in ip4s] + \
['%(address)s/%(prefixlen)s' % ip6 for ip6 in ip6s]
if aliases:
commands = '; '.join(['ip addr add %s dev %s' % (a, ifname)
for a in aliases])
print >>outfile, 'POST_UP="%s"' % commands
aliases.reverse()
commands = '; '.join(['ip addr del %s dev %s' % (a, ifname)
for a in aliases])
print >>outfile, 'PRE_DOWN="%s"' % commands
outfile.seek(0)
return outfile.read()
def process_interface_files_legacy(update_files, interfaces):
"""Generate changeset for interface configuration"""
infile = StringIO(update_files.get(CONF_FILE, ''))
data = _update_rc_conf_legacy(infile, interfaces)
update_files[CONF_FILE] = data
def process_interface_files_netctl(update_files, interfaces):
"""Generate changeset for interface configuration"""
# Enumerate all of the existing network files
remove_files = set()
for filename in os.listdir(NETCTL_DIR):
filepath = os.path.join(NETCTL_DIR, filename)
if not filename.endswith('~') and not os.path.isdir(filepath):
remove_files.add(filepath)
netnames = []
for ifname, interface in interfaces.iteritems():
data = _get_file_data_netctl(ifname, interface)
filepath = os.path.join(NETCTL_DIR, ifname)
update_files[filepath] = data
if filepath in remove_files:
remove_files.remove(filepath)
netnames.append(ifname)
return remove_files, netnames
def process_interface_files_netcfg(update_files, interfaces):
"""Generate changeset for interface configuration"""
# Enumerate all of the existing network files
remove_files = set()
for filename in os.listdir(NETWORK_DIR):
filepath = os.path.join(NETWORK_DIR, filename)
if not filename.endswith('~') and not os.path.isdir(filepath):
remove_files.add(filepath)
netnames = []
for ifname, interface in interfaces.iteritems():
data = _get_file_data_netcfg(ifname, interface)
filepath = os.path.join(NETWORK_DIR, ifname)
update_files[filepath] = data
if filepath in remove_files:
remove_files.remove(filepath)
netnames.append(ifname)
infile = StringIO(update_files.get(CONF_FILE, ''))
data = _update_rc_conf_netcfg(infile, netnames)
update_files[CONF_FILE] = data
return remove_files, netnames
| [
2,
43907,
25,
7400,
11338,
28,
19,
6482,
10394,
28,
19,
2705,
8658,
11338,
28,
19,
198,
2,
198,
2,
220,
15069,
357,
66,
8,
2813,
4946,
25558,
11,
11419,
13,
198,
2,
220,
1439,
6923,
33876,
13,
198,
2,
198,
2,
220,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
220,
220,
220,
220,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
921,
743,
7330,
198,
2,
220,
220,
220,
220,
257,
4866,
286,
262,
13789,
379,
198,
2,
198,
2,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2638,
1378,
2503,
13,
43073,
13,
2398,
14,
677,
4541,
14,
43,
2149,
24290,
12,
17,
13,
15,
198,
2,
198,
2,
220,
220,
220,
220,
17486,
2672,
416,
9723,
1099,
393,
4987,
284,
287,
3597,
11,
3788,
198,
2,
220,
220,
220,
220,
9387,
739,
262,
13789,
318,
9387,
319,
281,
366,
1921,
3180,
1,
29809,
1797,
11,
42881,
198,
2,
220,
220,
220,
220,
34764,
11015,
6375,
7102,
49828,
11053,
3963,
15529,
509,
12115,
11,
2035,
4911,
393,
17142,
13,
4091,
262,
198,
2,
220,
220,
220,
220,
13789,
329,
262,
2176,
3303,
15030,
21627,
290,
11247,
198,
2,
220,
220,
220,
220,
739,
262,
13789,
13,
198,
2,
198,
198,
37811,
198,
998,
32639,
3127,
31904,
8265,
198,
37811,
198,
198,
2,
5579,
468,
734,
1180,
6982,
286,
3127,
8398,
13,
3125,
2904,
11,
198,
2,
612,
338,
705,
3262,
37581,
6,
290,
4271,
357,
1640,
3092,
286,
257,
1365,
3381,
8,
705,
1455,
1590,
4458,
198,
2,
198,
2,
10655,
3544,
25,
198,
2,
532,
352,
7582,
12,
12048,
12,
7635,
3298,
8398,
50247,
14784,
14,
6015,
13,
10414,
8,
198,
2,
532,
530,
6101,
583,
7071,
198,
2,
532,
11926,
389,
583,
7071,
198,
2,
532,
8946,
1322,
389,
3298,
198,
2,
532,
18538,
318,
583,
7071,
198,
2,
198,
2,
2010,
37581,
3544,
25,
198,
2,
532,
3294,
7582,
12,
12048,
12,
7635,
3127,
25412,
11,
352,
583,
7071,
198,
2,
532,
530,
6101,
583,
8398,
198,
2,
532,
11926,
389,
583,
7071,
198,
2,
532,
8946,
1322,
389,
583,
7071,
198,
2,
532,
18538,
318,
3298,
50247,
14784,
14,
411,
349,
85,
13,
10414,
8,
198,
2,
198,
2,
2010,
37581,
318,
3562,
329,
530,
6101,
583,
8398,
11,
475,
340,
338,
407,
33435,
198,
2,
286,
262,
4697,
3918,
7633,
20314,
329,
6101,
34965,
2313,
13,
1406,
356,
423,
284,
779,
198,
2,
257,
8156,
284,
651,
6101,
34965,
2313,
1762,
25,
198,
2,
3740,
1378,
65,
1443,
13,
998,
23289,
13,
2398,
14,
1177,
26652,
13,
10121,
30,
35317,
28,
3865,
1314,
4790,
2,
79,
3865,
1314,
4790,
198,
2,
198,
2,
5579,
318,
257,
10708,
2650,
11,
3616,
649,
3033,
290,
6153,
10392,
198,
2,
4836,
503,
319,
257,
22900,
7269,
13,
632,
635,
1724,
612,
318,
645,
884,
198,
2,
1517,
355,
410,
16,
13,
15,
393,
410,
17,
13,
15,
13,
775,
2198,
611,
262,
2010,
37581,
5301,
318,
6589,
284,
198,
2,
5004,
543,
5794,
815,
307,
973,
13,
198,
198,
11748,
28686,
198,
11748,
302,
198,
11748,
640,
198,
11748,
850,
14681,
198,
11748,
18931,
198,
6738,
269,
10100,
9399,
1330,
10903,
9399,
198,
198,
11748,
9729,
13,
27349,
198,
198,
10943,
37,
62,
25664,
796,
12813,
14784,
14,
6015,
13,
10414,
1,
198,
12884,
33249,
62,
34720,
796,
12813,
14784,
14,
27349,
13,
67,
1,
198,
12884,
4177,
43,
62,
34720,
796,
12813,
14784,
14,
3262,
34168,
30487,
628,
628,
198,
4299,
651,
62,
4774,
3672,
33529,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
2329,
2672,
284,
2198,
1220,
14784,
14,
6015,
13,
10414,
329,
311,
893,
53,
31768,
1912,
5579,
23289,
4263,
13,
198,
220,
220,
220,
1439,
6153,
4482,
35,
6493,
4263,
423,
340,
379,
4277,
1220,
14784,
14,
4774,
3672,
198,
220,
220,
220,
2561,
21207,
1459,
2583,
3672,
286,
16990,
611,
597,
290,
1441,
13,
198,
220,
220,
220,
29403,
379,
1220,
14784,
14,
6015,
13,
10414,
4566,
329,
5579,
23289,
4382,
1262,
311,
893,
53,
31768,
13,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1949,
25,
198,
220,
220,
220,
220,
220,
220,
220,
351,
1280,
7,
10943,
37,
62,
25664,
8,
355,
2583,
3672,
62,
69,
2645,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
1627,
287,
2583,
3672,
62,
69,
2645,
13,
961,
6615,
33529,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
289,
77,
796,
302,
13,
12947,
10786,
39,
10892,
20608,
2625,
7,
15885,
16725,
3256,
1627,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
289,
77,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1441,
289,
77,
13,
8094,
7,
16,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
6045,
628,
220,
220,
220,
2845,
35528,
11,
304,
25,
198,
220,
220,
220,
220,
220,
220,
220,
18931,
13,
10951,
7203,
31768,
1104,
5579,
2583,
3672,
34593,
9045,
4054,
25,
4064,
82,
526,
4064,
965,
7,
68,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
6045,
628,
198,
198,
4299,
651,
62,
4774,
3672,
62,
7753,
7,
259,
7753,
11,
2583,
3672,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
10133,
2583,
3672,
319,
1080,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
503,
7753,
796,
10903,
9399,
3419,
198,
220,
220,
220,
1043,
796,
10352,
198,
220,
220,
220,
329,
1627,
287,
1167,
576,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1627,
796,
1627,
13,
36311,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
611,
705,
11639,
287,
1627,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
479,
11,
410,
796,
1627,
13,
35312,
10786,
28,
3256,
352,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
479,
796,
479,
13,
36311,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
479,
6624,
366,
39,
10892,
20608,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
503,
7753,
11,
705,
39,
10892,
20608,
2625,
4,
82,
30543,
4064,
2583,
3672,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1043,
796,
6407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
503,
7753,
11,
1627,
198,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
503,
7753,
11,
1627,
628,
220,
220,
220,
611,
407,
1043,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
503,
7753,
11,
705,
39,
10892,
20608,
2625,
4,
82,
30543,
4064,
2583,
3672,
628,
220,
220,
220,
503,
7753,
13,
36163,
7,
15,
8,
198,
220,
220,
220,
1441,
503,
7753,
13,
961,
3419,
628,
628,
198,
4299,
4808,
19119,
62,
6015,
62,
10414,
62,
1455,
1590,
7,
259,
7753,
11,
20314,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
8229,
1366,
329,
357,
7266,
25106,
3849,
32186,
290,
11926,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
1303,
3205,
38734,
428,
2393,
4325,
287,
734,
21164,
1201,
340,
338,
1729,
12,
83,
15104,
498,
284,
198,
220,
220,
220,
1303,
4296,
13,
383,
23255,
37,
2246,
1546,
290,
371,
12425,
1546,
9633,
262,
1994,
3951,
11,
475,
484,
198,
220,
220,
220,
1303,
481,
287,
1210,
4941,
584,
9633,
11,
543,
743,
307,
878,
393,
706,
13,
198,
220,
220,
220,
1303,
1081,
257,
1255,
11,
356,
761,
284,
3440,
262,
2104,
2393,
11,
1064,
262,
1388,
9633,
198,
220,
220,
220,
1303,
290,
788,
4781,
262,
4941,
9633,
13,
1649,
326,
318,
1760,
11,
356,
751,
198,
220,
220,
220,
1303,
262,
3951,
329,
262,
649,
4566,
13,
628,
220,
220,
220,
1303,
3274,
7716,
649,
4566,
198,
220,
220,
220,
611,
2114,
796,
17635,
198,
220,
220,
220,
11926,
796,
17635,
628,
220,
220,
220,
24308,
19,
11,
24308,
21,
796,
9729,
13,
27349,
13,
1136,
62,
10494,
1322,
7,
3849,
32186,
8,
628,
220,
220,
220,
611,
14933,
796,
20314,
13,
13083,
3419,
198,
220,
220,
220,
611,
14933,
13,
30619,
3419,
628,
220,
220,
220,
329,
611,
3672,
62,
40290,
287,
611,
14933,
25,
198,
220,
220,
220,
220,
220,
220,
220,
7071,
796,
20314,
58,
361,
3672,
62,
40290,
60,
628,
220,
220,
220,
220,
220,
220,
220,
20966,
19,
82,
796,
7071,
17816,
541,
19,
82,
20520,
198,
220,
220,
220,
220,
220,
220,
220,
20966,
21,
82,
796,
7071,
17816,
541,
21,
82,
20520,
628,
220,
220,
220,
220,
220,
220,
220,
611,
3672,
62,
37333,
844,
62,
22510,
796,
657,
628,
220,
220,
220,
220,
220,
220,
220,
329,
20966,
19,
11,
20966,
21,
287,
3975,
7,
14202,
11,
20966,
19,
82,
11,
20966,
21,
82,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
611,
3672,
62,
37333,
844,
62,
22510,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
3672,
796,
36521,
82,
25,
4,
67,
1,
4064,
357,
361,
3672,
62,
40290,
11,
611,
3672,
62,
37333,
844,
62,
22510,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
3672,
796,
611,
3672,
62,
40290,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
796,
685,
361,
3672,
60,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
20966,
19,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
13,
33295,
10786,
4,
7,
21975,
8,
82,
2010,
27932,
4064,
7,
3262,
27932,
8,
82,
6,
4064,
20966,
19,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
20966,
21,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
13,
33295,
10786,
2860,
4064,
7,
21975,
8,
82,
14,
4,
7,
40290,
11925,
8,
82,
6,
4064,
20966,
21,
8,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
3672,
62,
37333,
844,
62,
22510,
15853,
352,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
2114,
13,
33295,
19510,
361,
3672,
13,
33491,
7,
10354,
3256,
705,
62,
33809,
705,
45302,
22179,
7,
1370,
22305,
628,
220,
220,
220,
220,
220,
220,
220,
329,
1312,
11,
6339,
287,
27056,
378,
7,
39994,
17816,
81,
448,
274,
20520,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
6339,
17816,
27349,
20520,
6624,
705,
15,
13,
15,
13,
15,
13,
15,
6,
290,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6339,
17816,
3262,
27932,
20520,
6624,
705,
15,
13,
15,
13,
15,
13,
15,
6,
290,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6339,
17816,
10494,
1014,
20520,
6624,
24308,
19,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2555,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1627,
796,
27444,
3262,
4064,
7,
27349,
8,
82,
2010,
27932,
4064,
7,
3262,
27932,
8,
82,
308,
86,
4064,
7,
10494,
1014,
8,
82,
1,
4064,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6339,
628,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
11926,
13,
33295,
7,
10786,
4,
82,
62,
38629,
4,
67,
6,
4064,
357,
361,
3672,
62,
40290,
11,
1312,
828,
1627,
4008,
628,
220,
220,
220,
611,
24308,
19,
25,
198,
220,
220,
220,
220,
220,
220,
220,
11926,
13,
33295,
7,
10786,
10494,
1014,
3256,
705,
12286,
308,
86,
4064,
82,
6,
4064,
24308,
19,
4008,
198,
220,
220,
220,
611,
24308,
21,
25,
198,
220,
220,
220,
220,
220,
220,
220,
11926,
13,
33295,
7,
10786,
10494,
1014,
21,
3256,
705,
12286,
308,
86,
4064,
82,
6,
4064,
24308,
21,
4008,
628,
220,
220,
220,
1303,
3244,
3440,
1468,
2393,
198,
220,
220,
220,
3951,
11,
9633,
796,
4808,
29572,
62,
11250,
7,
259,
7753,
8,
628,
220,
220,
220,
1303,
10133,
23255,
37,
2246,
1546,
198,
220,
220,
220,
9493,
23397,
796,
9633,
13,
1136,
10786,
41358,
37,
2246,
1546,
11537,
198,
220,
220,
220,
611,
9493,
23397,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
17220,
1468,
3951,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1438,
287,
4808,
29572,
62,
45286,
7,
6615,
58,
2815,
23397,
4357,
10283,
62,
36668,
28,
17821,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
1438,
287,
9633,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3951,
58,
25641,
2977,
58,
3672,
11907,
796,
6045,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3951,
13,
33295,
7,
7061,
8,
198,
220,
220,
220,
220,
220,
220,
220,
9493,
23397,
796,
18896,
7,
6615,
8,
532,
352,
628,
220,
220,
220,
4566,
796,
17635,
198,
220,
220,
220,
3891,
796,
17635,
198,
220,
220,
220,
329,
1438,
11,
1627,
287,
611,
2114,
25,
198,
220,
220,
220,
220,
220,
220,
220,
4566,
13,
33295,
10786,
4,
82,
2625,
4,
82,
30543,
4064,
357,
3672,
11,
1627,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
3891,
13,
33295,
7,
3672,
8,
628,
220,
220,
220,
4566,
13,
33295,
10786,
41358,
37,
2246,
1546,
16193,
4,
82,
33047,
4064,
705,
45302,
22179,
7,
14933,
4008,
198,
220,
220,
220,
3951,
58,
2815,
23397,
60,
796,
705,
59,
77,
4458,
22179,
7,
11250,
8,
628,
220,
220,
220,
1303,
10133,
371,
12425,
1546,
198,
220,
220,
220,
9493,
23397,
796,
9633,
13,
1136,
10786,
49,
12425,
1546,
11537,
198,
220,
220,
220,
611,
9493,
23397,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
17220,
1468,
3951,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1438,
287,
4808,
29572,
62,
45286,
7,
6615,
58,
2815,
23397,
4357,
10283,
62,
36668,
28,
17821,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
1438,
287,
9633,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3951,
58,
25641,
2977,
58,
3672,
11907,
796,
6045,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3951,
13,
33295,
7,
7061,
8,
198,
220,
220,
220,
220,
220,
220,
220,
9493,
23397,
796,
18896,
7,
6615,
8,
532,
352,
628,
220,
220,
220,
4566,
796,
17635,
198,
220,
220,
220,
3891,
796,
17635,
198,
220,
220,
220,
329,
1438,
11,
1627,
287,
11926,
25,
198,
220,
220,
220,
220,
220,
220,
220,
4566,
13,
33295,
10786,
4,
82,
2625,
4,
82,
30543,
4064,
357,
3672,
11,
1627,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
3891,
13,
33295,
7,
3672,
8,
628,
220,
220,
220,
4566,
13,
33295,
10786,
49,
12425,
1546,
16193,
4,
82,
33047,
4064,
705,
45302,
22179,
7,
14933,
4008,
198,
220,
220,
220,
3951,
58,
2815,
23397,
60,
796,
705,
59,
77,
4458,
22179,
7,
11250,
8,
628,
220,
220,
220,
1303,
357,
47,
20846,
8,
2912,
503,
49791,
50,
198,
220,
220,
220,
9493,
23397,
796,
9633,
13,
1136,
10786,
12884,
33249,
50,
11537,
198,
220,
220,
220,
611,
9493,
23397,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
329,
1438,
287,
4808,
29572,
62,
45286,
7,
6615,
58,
2815,
23397,
4357,
10283,
62,
36668,
28,
17821,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
299,
2815,
23397,
796,
9633,
13,
1136,
7,
3672,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
299,
2815,
23397,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3951,
58,
77,
2815,
23397,
60,
796,
705,
2,
6,
1343,
3951,
58,
2815,
23397,
60,
628,
220,
220,
220,
220,
220,
220,
220,
3951,
58,
2815,
23397,
60,
796,
705,
2,
6,
1343,
3951,
58,
2815,
23397,
60,
628,
220,
220,
220,
1303,
357,
47,
20846,
8,
4296,
17051,
3620,
19213,
198,
220,
220,
220,
9493,
23397,
796,
9633,
13,
1136,
10786,
5631,
3620,
19213,
11537,
198,
220,
220,
220,
611,
9493,
23397,
318,
407,
6045,
25,
198,
220,
220,
220,
220,
220,
220,
220,
12379,
368,
684,
796,
4808,
29572,
62,
45286,
7,
6615,
58,
2815,
23397,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
1949,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3127,
796,
12379,
368,
684,
13,
9630,
10786,
0,
27349,
11537,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
12379,
368,
684,
58,
27349,
60,
796,
705,
27349,
6,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
611,
705,
31,
3262,
12,
5577,
2915,
6,
287,
12379,
368,
684,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
12379,
368,
684,
13,
28956,
10786,
31,
3262,
12,
5577,
2915,
11537,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3951,
58,
2815,
23397,
60,
796,
705,
5631,
3620,
19213,
16193,
4,
82,
33047,
4064,
705,
45302,
22179,
7,
6814,
368,
684,
8,
198,
220,
220,
220,
220,
220,
220,
220,
2845,
11052,
12331,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1208,
628,
220,
220,
220,
1303,
25853,
503,
597,
4615,
3951,
198,
220,
220,
220,
3951,
796,
8106,
7,
50033,
300,
25,
300,
318,
407,
6045,
11,
3951,
8,
628,
220,
220,
220,
1303,
23283,
1096,
656,
649,
2393,
198,
220,
220,
220,
503,
7753,
796,
10903,
9399,
3419,
198,
220,
220,
220,
329,
1627,
287,
3951,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
503,
7753,
11,
1627,
628,
220,
220,
220,
503,
7753,
13,
36163,
7,
15,
8,
198,
220,
220,
220,
1441,
503,
7753,
13,
961,
3419,
198,
198,
4299,
4808,
1136,
62,
7753,
62,
7890,
62,
3262,
37581,
7,
361,
3672,
11,
7071,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
8229,
1366,
329,
357,
7266,
25106,
3849,
32186,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
611,
2114,
796,
17635,
628,
220,
220,
220,
6167,
796,
7071,
17816,
18242,
20520,
628,
220,
220,
220,
20966,
19,
82,
796,
7071,
17816,
541,
19,
82,
20520,
198,
220,
220,
220,
20966,
21,
82,
796,
7071,
17816,
541,
21,
82,
20520,
628,
220,
220,
220,
24308,
19,
796,
7071,
17816,
10494,
1014,
19,
20520,
198,
220,
220,
220,
24308,
21,
796,
7071,
17816,
10494,
1014,
21,
20520,
628,
220,
220,
220,
288,
5907,
796,
7071,
17816,
67,
5907,
20520,
628,
220,
220,
220,
503,
7753,
796,
10903,
9399,
3419,
628,
220,
220,
220,
611,
6167,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
25113,
36052,
4064,
82,
1,
4064,
6167,
198,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
10943,
45,
24565,
2625,
316,
2881,
316,
30543,
198,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
41358,
49836,
28,
4,
82,
6,
4064,
611,
3672,
628,
220,
220,
220,
611,
20966,
19,
82,
25,
198,
220,
220,
220,
220,
220,
220,
220,
20966,
19,
796,
20966,
19,
82,
13,
12924,
7,
15,
8,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
4061,
2625,
12708,
30543,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
2885,
7707,
2625,
4,
7,
21975,
8,
82,
30543,
4064,
20966,
19,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
12884,
31180,
42,
2625,
4,
7,
3262,
27932,
8,
82,
30543,
4064,
20966,
19,
628,
220,
220,
220,
220,
220,
220,
220,
611,
24308,
19,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
38,
6158,
27285,
2625,
4,
82,
30543,
4064,
24308,
19,
628,
220,
220,
220,
611,
20966,
21,
82,
25,
198,
220,
220,
220,
220,
220,
220,
220,
20966,
21,
796,
20966,
21,
82,
13,
12924,
7,
15,
8,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
4061,
21,
2625,
12708,
30543,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
2885,
7707,
21,
2625,
4,
7,
21975,
8,
82,
14,
4,
7,
40290,
11925,
8,
82,
30543,
4064,
20966,
21,
628,
220,
220,
220,
220,
220,
220,
220,
611,
24308,
21,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
38,
6158,
27285,
21,
2625,
4,
82,
30543,
4064,
24308,
21,
628,
220,
220,
220,
11926,
796,
685,
29653,
4,
7,
27349,
8,
82,
14,
4,
7,
3262,
27932,
8,
82,
2884,
4064,
7,
10494,
1014,
8,
82,
30543,
4064,
6339,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
6339,
287,
7071,
17816,
81,
448,
274,
20520,
611,
407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6339,
17816,
27349,
20520,
6624,
705,
15,
13,
15,
13,
15,
13,
15,
6,
290,
407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6339,
17816,
3262,
27932,
20520,
6624,
705,
15,
13,
15,
13,
15,
13,
15,
6,
290,
407,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6339,
17816,
10494,
1014,
20520,
6624,
24308,
19,
60,
628,
220,
220,
220,
611,
11926,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
49,
12425,
1546,
16193,
4,
82,
33047,
4064,
705,
45302,
22179,
7,
81,
448,
274,
8,
628,
220,
220,
220,
611,
288,
5907,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
35,
8035,
16193,
4,
82,
33047,
4064,
705,
45302,
22179,
7,
67,
5907,
8,
628,
220,
220,
220,
1303,
9461,
751,
5637,
47217,
13,
770,
318,
1611,
286,
8156,
88,
11,
766,
2912,
379,
198,
220,
220,
220,
1303,
1353,
329,
7468,
198,
220,
220,
220,
47217,
796,
37250,
4,
7,
21975,
8,
82,
14,
4,
7,
3262,
27932,
8,
82,
6,
4064,
20966,
19,
329,
20966,
19,
287,
20966,
19,
82,
60,
1343,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37250,
4,
7,
21975,
8,
82,
14,
4,
7,
40290,
11925,
8,
82,
6,
4064,
20966,
21,
329,
20966,
21,
287,
20966,
21,
82,
60,
628,
220,
220,
220,
611,
47217,
25,
198,
220,
220,
220,
220,
220,
220,
220,
9729,
796,
705,
26,
45302,
22179,
7,
17816,
541,
37817,
751,
4064,
82,
1614,
4064,
82,
6,
4064,
357,
64,
11,
611,
3672,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
257,
287,
47217,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
32782,
62,
8577,
2625,
4,
82,
30543,
4064,
9729,
628,
220,
220,
220,
220,
220,
220,
220,
47217,
13,
50188,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
9729,
796,
705,
26,
45302,
22179,
7,
17816,
541,
37817,
1619,
4064,
82,
1614,
4064,
82,
6,
4064,
357,
64,
11,
611,
3672,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
329,
257,
287,
47217,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
9609,
448,
7753,
11,
705,
46437,
62,
41925,
2625,
4,
82,
30543,
4064,
9729,
628,
220,
220,
220,
503,
7753,
13,
36163,
7,
15,
8,
198,
220,
220,
220,
1441,
503,
7753,
13,
961,
3419,
628,
628,
198,
4299,
1429,
62,
39994,
62,
16624,
62,
1455,
1590,
7,
19119,
62,
16624,
11,
20314,
2599,
198,
220,
220,
220,
37227,
8645,
378,
2458,
316,
329,
7071,
8398,
37811,
628,
220,
220,
220,
1167,
576,
796,
10903,
9399,
7,
19119,
62,
16624,
13,
1136,
7,
10943,
37,
62,
25664,
11,
10148,
4008,
198,
220,
220,
220,
1366,
796,
4808,
19119,
62,
6015,
62,
10414,
62,
1455,
1590,
7,
259,
7753,
11,
20314,
8,
198,
220,
220,
220,
4296,
62,
16624,
58,
10943,
37,
62,
25664,
60,
796,
1366,
628,
198,
4299,
1429,
62,
39994,
62,
16624,
62,
3262,
34168,
7,
19119,
62,
16624,
11,
20314,
2599,
198,
220,
220,
220,
37227,
8645,
378,
2458,
316,
329,
7071,
8398,
37811,
628,
220,
220,
220,
1303,
2039,
6975,
378,
477,
286,
262,
4683,
3127,
3696,
198,
220,
220,
220,
4781,
62,
16624,
796,
900,
3419,
198,
220,
220,
220,
329,
29472,
287,
28686,
13,
4868,
15908,
7,
12884,
4177,
43,
62,
34720,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
2393,
6978,
796,
28686,
13,
6978,
13,
22179,
7,
12884,
4177,
43,
62,
34720,
11,
29472,
8,
198,
220,
220,
220,
220,
220,
220,
220,
611,
407,
29472,
13,
437,
2032,
342,
10786,
93,
11537,
290,
407,
28686,
13,
6978,
13,
9409,
343,
7,
7753,
6978,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4781,
62,
16624,
13,
2860,
7,
7753,
6978,
8,
628,
220,
220,
220,
2010,
14933,
796,
17635,
198,
220,
220,
220,
329,
611,
3672,
11,
7071,
287,
20314,
13,
2676,
23814,
33529,
198,
220,
220,
220,
220,
220,
220,
220,
1366,
796,
4808,
1136,
62,
7753,
62,
7890,
62,
3262,
34168,
7,
361,
3672,
11,
7071,
8,
628,
220,
220,
220,
220,
220,
220,
220,
2393,
6978,
796,
28686,
13,
6978,
13,
22179,
7,
12884,
4177,
43,
62,
34720,
11,
611,
3672,
8,
198,
220,
220,
220,
220,
220,
220,
220,
4296,
62,
16624,
58,
7753,
6978,
60,
796,
1366,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2393,
6978,
287,
4781,
62,
16624,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4781,
62,
16624,
13,
28956,
7,
7753,
6978,
8,
628,
220,
220,
220,
220,
220,
220,
220,
2010,
14933,
13,
33295,
7,
361,
3672,
8,
628,
220,
220,
220,
1441,
4781,
62,
16624,
11,
2010,
14933,
198,
4299,
1429,
62,
39994,
62,
16624,
62,
3262,
37581,
7,
19119,
62,
16624,
11,
20314,
2599,
198,
220,
220,
220,
37227,
8645,
378,
2458,
316,
329,
7071,
8398,
37811,
628,
220,
220,
220,
1303,
2039,
6975,
378,
477,
286,
262,
4683,
3127,
3696,
198,
220,
220,
220,
4781,
62,
16624,
796,
900,
3419,
198,
220,
220,
220,
329,
29472,
287,
28686,
13,
4868,
15908,
7,
12884,
33249,
62,
34720,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
2393,
6978,
796,
28686,
13,
6978,
13,
22179,
7,
12884,
33249,
62,
34720,
11,
29472,
8,
198,
220,
220,
220,
220,
220,
220,
220,
611,
407,
29472,
13,
437,
2032,
342,
10786,
93,
11537,
290,
407,
28686,
13,
6978,
13,
9409,
343,
7,
7753,
6978,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4781,
62,
16624,
13,
2860,
7,
7753,
6978,
8,
628,
220,
220,
220,
2010,
14933,
796,
17635,
198,
220,
220,
220,
329,
611,
3672,
11,
7071,
287,
20314,
13,
2676,
23814,
33529,
198,
220,
220,
220,
220,
220,
220,
220,
1366,
796,
4808,
1136,
62,
7753,
62,
7890,
62,
3262,
37581,
7,
361,
3672,
11,
7071,
8,
628,
220,
220,
220,
220,
220,
220,
220,
2393,
6978,
796,
28686,
13,
6978,
13,
22179,
7,
12884,
33249,
62,
34720,
11,
611,
3672,
8,
198,
220,
220,
220,
220,
220,
220,
220,
4296,
62,
16624,
58,
7753,
6978,
60,
796,
1366,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2393,
6978,
287,
4781,
62,
16624,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
4781,
62,
16624,
13,
28956,
7,
7753,
6978,
8,
628,
220,
220,
220,
220,
220,
220,
220,
2010,
14933,
13,
33295,
7,
361,
3672,
8,
628,
220,
220,
220,
1167,
576,
796,
10903,
9399,
7,
19119,
62,
16624,
13,
1136,
7,
10943,
37,
62,
25664,
11,
10148,
4008,
198,
220,
220,
220,
1366,
796,
4808,
19119,
62,
6015,
62,
10414,
62,
3262,
37581,
7,
259,
7753,
11,
2010,
14933,
8,
198,
220,
220,
220,
4296,
62,
16624,
58,
10943,
37,
62,
25664,
60,
796,
1366,
628,
220,
220,
220,
1441,
4781,
62,
16624,
11,
2010,
14933,
198
] | 2.353698 | 4,894 |
file = open('./input')
w = 25
h = 6
ppl = 25 * 6
line = file.readline()
layers = []
for start in range(0, len(line), ppl):
layer = line[start:start+ppl]
layers.append([int(pixel) for pixel in layer])
img = []
for i in range(ppl):
for layer in layers:
if layer[i] != 2:
img.append(layer[i])
break
for row in range(h):
print(img[row * w:(row + 1) * w])
| [
7753,
796,
1280,
7,
4458,
14,
15414,
11537,
198,
198,
86,
796,
1679,
198,
71,
796,
718,
198,
381,
75,
796,
1679,
1635,
718,
198,
198,
1370,
796,
2393,
13,
961,
1370,
3419,
198,
198,
75,
6962,
796,
17635,
198,
198,
1640,
923,
287,
2837,
7,
15,
11,
18896,
7,
1370,
828,
279,
489,
2599,
198,
220,
220,
220,
7679,
796,
1627,
58,
9688,
25,
9688,
10,
381,
75,
60,
198,
220,
220,
220,
11685,
13,
33295,
26933,
600,
7,
32515,
8,
329,
17465,
287,
7679,
12962,
198,
198,
9600,
796,
17635,
198,
1640,
1312,
287,
2837,
7,
381,
75,
2599,
198,
220,
220,
220,
329,
7679,
287,
11685,
25,
198,
220,
220,
220,
220,
220,
220,
220,
611,
7679,
58,
72,
60,
14512,
362,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
33705,
13,
33295,
7,
29289,
58,
72,
12962,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2270,
198,
198,
1640,
5752,
287,
2837,
7,
71,
2599,
198,
220,
220,
220,
3601,
7,
9600,
58,
808,
1635,
266,
37498,
808,
1343,
352,
8,
1635,
266,
12962,
198
] | 2.154255 | 188 |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['MediaGraph']
| [
2,
19617,
28,
40477,
12,
23,
198,
2,
17202,
39410,
25,
428,
2393,
373,
7560,
416,
262,
21624,
12994,
26144,
35986,
13,
17202,
198,
2,
17202,
2141,
407,
4370,
416,
1021,
4556,
345,
821,
1728,
345,
760,
644,
345,
389,
1804,
0,
17202,
198,
198,
11748,
14601,
198,
11748,
17472,
12994,
198,
11748,
17472,
12994,
13,
43282,
198,
6738,
19720,
1330,
4377,
11,
337,
5912,
11,
32233,
11,
45835,
11,
4479,
198,
6738,
2644,
1330,
4808,
315,
2410,
11,
4808,
83,
2977,
198,
6738,
764,
1330,
23862,
198,
6738,
47540,
268,
5700,
1330,
1635,
198,
6738,
47540,
15414,
82,
1330,
1635,
198,
198,
834,
439,
834,
796,
37250,
13152,
37065,
20520,
628,
198
] | 3.637168 | 113 |
# Time: O(b^(d/2)), b is the branch factor of bfs, d is the result depth
# Space: O(w * l), w is the number of words, l is the max length of words
from collections import defaultdict
from string import ascii_lowercase
# Time: O(b^d), b is the branch factor of bfs, d is the result depth
# Space: O(w * l), w is the number of words, l is the max length of words
| [
2,
3862,
25,
220,
440,
7,
65,
61,
7,
67,
14,
17,
36911,
275,
318,
262,
8478,
5766,
286,
275,
9501,
11,
288,
318,
262,
1255,
6795,
198,
2,
4687,
25,
440,
7,
86,
1635,
300,
828,
266,
318,
262,
1271,
286,
2456,
11,
300,
318,
262,
3509,
4129,
286,
2456,
198,
198,
6738,
17268,
1330,
4277,
11600,
198,
6738,
4731,
1330,
355,
979,
72,
62,
21037,
7442,
628,
198,
198,
2,
3862,
25,
220,
440,
7,
65,
61,
67,
828,
275,
318,
262,
8478,
5766,
286,
275,
9501,
11,
288,
318,
262,
1255,
6795,
198,
2,
4687,
25,
440,
7,
86,
1635,
300,
828,
266,
318,
262,
1271,
286,
2456,
11,
300,
318,
262,
3509,
4129,
286,
2456,
198
] | 3.058333 | 120 |
from abc import ABCMeta
class Writer(metaclass=ABCMeta):
"""
動画書き込みの、抽象基底クラス
"""
def open(self, **kwargs):
"""
書き込み機能を開く
"""
pass
def write(self, **kwargs):
"""
出力する
"""
pass
def close(self, **kwargs):
"""
処理を終了する
"""
pass
| [
6738,
450,
66,
1330,
9738,
48526,
628,
198,
4871,
26606,
7,
4164,
330,
31172,
28,
24694,
48526,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
10263,
233,
243,
18796,
119,
162,
249,
116,
33778,
164,
122,
120,
2515,
123,
5641,
23513,
162,
232,
121,
164,
109,
94,
161,
253,
118,
41753,
243,
14099,
9263,
8943,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
825,
1280,
7,
944,
11,
12429,
46265,
22046,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
10545,
249,
116,
33778,
164,
122,
120,
2515,
123,
49960,
47797,
121,
31758,
38461,
233,
31917,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1208,
628,
220,
220,
220,
825,
3551,
7,
944,
11,
12429,
46265,
22046,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
10263,
229,
118,
27950,
249,
33623,
25748,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1208,
628,
220,
220,
220,
825,
1969,
7,
944,
11,
12429,
46265,
22046,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
10263,
229,
99,
49426,
228,
31758,
163,
113,
224,
12859,
228,
33623,
25748,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1208,
198
] | 1.427984 | 243 |
from contextlib import contextmanager, ExitStack
from pathlib import Path
from typing import Iterator
from npipes.utils.typeshed import pathlike
@contextmanager
def autoDeleteFile(path:pathlike) -> Iterator[pathlike]:
"""Context manager that deletes a single file when the context ends
"""
try:
yield path
finally:
if Path(path).is_file():
Path(path).unlink()
class AutoDeleter(ExitStack):
"""Stack manager for auto-deleting files; allows files to be added incrementally.
Useful for working with temporary files on disk that should be
removed at the end of a computation.
Ex:
with AutoDeleter() as deleter:
deleter.add(file_1)
# ...
deleter.add(file_2)
# ...
file_3 = deleter.add("some_file.txt")
# file_1, file_2, and file_3 are deleted here automatically
"""
def add(self, path:pathlike) -> pathlike:
"""Returns path after adding it to the auto-deletion context.
"""
return self.enter_context(autoDeleteFile(path))
| [
6738,
4732,
8019,
1330,
4732,
37153,
11,
29739,
25896,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
19720,
1330,
40806,
1352,
198,
198,
6738,
45941,
18636,
13,
26791,
13,
19199,
704,
1330,
3108,
2339,
198,
198,
31,
22866,
37153,
198,
4299,
8295,
38727,
8979,
7,
6978,
25,
6978,
2339,
8,
4613,
40806,
1352,
58,
6978,
2339,
5974,
198,
220,
220,
220,
37227,
21947,
4706,
326,
28128,
274,
257,
2060,
2393,
618,
262,
4732,
5645,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1949,
25,
198,
220,
220,
220,
220,
220,
220,
220,
7800,
3108,
198,
220,
220,
220,
3443,
25,
198,
220,
220,
220,
220,
220,
220,
220,
611,
10644,
7,
6978,
737,
271,
62,
7753,
33529,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10644,
7,
6978,
737,
403,
8726,
3419,
628,
198,
4871,
11160,
5005,
293,
353,
7,
30337,
25896,
2599,
198,
220,
220,
220,
37227,
25896,
4706,
329,
8295,
12,
2934,
293,
889,
3696,
26,
3578,
3696,
284,
307,
2087,
18703,
453,
13,
628,
220,
220,
220,
49511,
329,
1762,
351,
8584,
3696,
319,
11898,
326,
815,
307,
198,
220,
220,
220,
4615,
379,
262,
886,
286,
257,
29964,
13,
628,
220,
220,
220,
1475,
25,
198,
220,
220,
220,
351,
11160,
5005,
293,
353,
3419,
355,
10881,
353,
25,
198,
220,
220,
220,
220,
220,
220,
220,
10881,
353,
13,
2860,
7,
7753,
62,
16,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
2644,
198,
220,
220,
220,
220,
220,
220,
220,
10881,
353,
13,
2860,
7,
7753,
62,
17,
8,
198,
220,
220,
220,
220,
220,
220,
220,
1303,
2644,
198,
220,
220,
220,
220,
220,
220,
220,
2393,
62,
18,
796,
10881,
353,
13,
2860,
7203,
11246,
62,
7753,
13,
14116,
4943,
628,
220,
220,
220,
1303,
2393,
62,
16,
11,
2393,
62,
17,
11,
290,
2393,
62,
18,
389,
13140,
994,
6338,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
825,
751,
7,
944,
11,
3108,
25,
6978,
2339,
8,
4613,
3108,
2339,
25,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
35561,
3108,
706,
4375,
340,
284,
262,
8295,
12,
2934,
1616,
295,
4732,
13,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
198,
220,
220,
220,
220,
220,
220,
220,
1441,
2116,
13,
9255,
62,
22866,
7,
23736,
38727,
8979,
7,
6978,
4008,
198
] | 2.707379 | 393 |
from bottle import run,post,request,response,route
import os
import urllib
@post('/test')
@route('/path',method="post")
if __name__ == '__main__':
port_config = int(os.getenv('PORT', 5000))
run(host='0.0.0.0', port=port_config)
| [
6738,
9294,
1330,
1057,
11,
7353,
11,
25927,
11,
26209,
11,
38629,
198,
11748,
28686,
198,
11748,
2956,
297,
571,
198,
198,
31,
7353,
10786,
14,
9288,
11537,
198,
198,
31,
38629,
10786,
14,
6978,
3256,
24396,
2625,
7353,
4943,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
2493,
62,
11250,
796,
493,
7,
418,
13,
1136,
24330,
10786,
15490,
3256,
23336,
4008,
198,
220,
220,
220,
1057,
7,
4774,
11639,
15,
13,
15,
13,
15,
13,
15,
3256,
2493,
28,
634,
62,
11250,
8,
198
] | 2.553191 | 94 |
import time as time_lib
import numpy as np
import sounddevice as sd
duration = 50 # in seconds
warmup_time = 2 # in seconds
max_pop_time = 3 # in seconds time
pop_threshold = 15 # in volume units
min_pop_time = 512 # in milliseconds
pop_times = []
if __name__ == '__main__':
main()
| [
11748,
640,
355,
640,
62,
8019,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2128,
25202,
355,
45647,
198,
198,
32257,
796,
2026,
220,
1303,
287,
4201,
198,
31975,
929,
62,
2435,
796,
362,
220,
1303,
287,
4201,
198,
9806,
62,
12924,
62,
2435,
796,
513,
220,
1303,
287,
4201,
640,
198,
12924,
62,
400,
10126,
796,
1315,
220,
1303,
287,
6115,
4991,
198,
1084,
62,
12924,
62,
2435,
796,
22243,
220,
1303,
287,
38694,
198,
12924,
62,
22355,
796,
17635,
628,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
1388,
3419,
198
] | 2.893204 | 103 |
# Functions specific to restricted boltzmann machines
# Adapted from MFP/Functions.py
import numpy as np
# BASIS FUNCTIONS: Regression
# Diagonalize first dimension of an n-dimensional array
tau = 1 # Sigmoid threshold unit
basis_logistic = Function('basis', 'logistic', # Commonly known as 'Sigmoid'
[lambda x: tau * (1 + np.exp(-x/tau))**-1, # S
lambda x: np.diag(np.exp(x / tau) / (np.exp(x / tau) + 1) ** 2)]) # S * (1 - S)
# BASIS FUNCTIONS: Classification
basis_softmax = Function('basis', 'SMax',
[softmax,
lambda x: diag(softmax(x)) - softmax(x) @ softmax(x).T])
# ANNEALING FUNCTIONS (learning rate)
anneal_fixed = Function('learn', 'fixed',
[lambda t, d, lim: 1])
anneal_linear = Function('learn', 'linear',
[lambda t, d, lim: 1 - t/lim])
anneal_inverse = Function('learn', 'inverse',
[lambda t, d, lim: 1 / (d * t)])
anneal_power = Function('learn', 'power',
[lambda t, d, lim: d**t])
anneal_exp = Function('learn', 'exp',
[lambda t, d, lim: np.exp(-t / l)])
# DISTRIBUTION FUNCTIONS
dist_uniform = Function('dist', 'uniform',
[lambda *args: np.random.uniform(low=-1, high=1, size=[*args])])
dist_normal = Function('dist', 'normal',
[lambda *args: np.random.normal(loc=0, scale=1, size=[*args])])
| [
2,
40480,
2176,
284,
10770,
18100,
89,
9038,
8217,
198,
2,
30019,
276,
422,
337,
5837,
14,
24629,
2733,
13,
9078,
198,
198,
11748,
299,
32152,
355,
45941,
628,
198,
198,
2,
29809,
1797,
29397,
4177,
11053,
25,
3310,
2234,
198,
2,
6031,
27923,
1096,
717,
15793,
286,
281,
299,
12,
19577,
7177,
198,
198,
83,
559,
796,
352,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
311,
17225,
1868,
11387,
4326,
198,
12093,
271,
62,
6404,
2569,
796,
15553,
10786,
12093,
271,
3256,
705,
6404,
2569,
3256,
220,
1303,
8070,
306,
1900,
355,
705,
50,
17225,
1868,
6,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
2124,
25,
256,
559,
1635,
357,
16,
1343,
45941,
13,
11201,
32590,
87,
14,
83,
559,
4008,
1174,
12,
16,
11,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
311,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37456,
2124,
25,
45941,
13,
10989,
363,
7,
37659,
13,
11201,
7,
87,
1220,
256,
559,
8,
1220,
357,
37659,
13,
11201,
7,
87,
1220,
256,
559,
8,
1343,
352,
8,
12429,
362,
8,
12962,
220,
1303,
311,
1635,
357,
16,
532,
311,
8,
628,
198,
2,
29809,
1797,
29397,
4177,
11053,
25,
40984,
198,
198,
12093,
271,
62,
4215,
9806,
796,
15553,
10786,
12093,
271,
3256,
705,
50,
11518,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
4215,
9806,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
37456,
2124,
25,
2566,
363,
7,
4215,
9806,
7,
87,
4008,
532,
2705,
9806,
7,
87,
8,
2488,
2705,
9806,
7,
87,
737,
51,
12962,
628,
198,
2,
3537,
12161,
1847,
2751,
29397,
4177,
11053,
357,
40684,
2494,
8,
198,
21952,
282,
62,
34021,
220,
220,
796,
15553,
10786,
35720,
3256,
705,
34021,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
256,
11,
288,
11,
1761,
25,
352,
12962,
198,
198,
21952,
282,
62,
29127,
220,
796,
15553,
10786,
35720,
3256,
705,
29127,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
256,
11,
288,
11,
1761,
25,
352,
532,
256,
14,
2475,
12962,
198,
198,
21952,
282,
62,
259,
4399,
796,
15553,
10786,
35720,
3256,
705,
259,
4399,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
256,
11,
288,
11,
1761,
25,
352,
1220,
357,
67,
1635,
256,
8,
12962,
198,
198,
21952,
282,
62,
6477,
220,
220,
796,
15553,
10786,
35720,
3256,
705,
6477,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
256,
11,
288,
11,
1761,
25,
288,
1174,
83,
12962,
198,
198,
21952,
282,
62,
11201,
220,
220,
220,
220,
796,
15553,
10786,
35720,
3256,
705,
11201,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
256,
11,
288,
11,
1761,
25,
45941,
13,
11201,
32590,
83,
1220,
300,
8,
12962,
628,
198,
2,
34957,
9865,
35354,
29397,
4177,
11053,
198,
17080,
62,
403,
6933,
796,
15553,
10786,
17080,
3256,
705,
403,
6933,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
1635,
22046,
25,
45941,
13,
25120,
13,
403,
6933,
7,
9319,
10779,
16,
11,
1029,
28,
16,
11,
2546,
41888,
9,
22046,
12962,
12962,
198,
198,
17080,
62,
11265,
220,
796,
15553,
10786,
17080,
3256,
705,
11265,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
685,
50033,
1635,
22046,
25,
45941,
13,
25120,
13,
11265,
7,
17946,
28,
15,
11,
5046,
28,
16,
11,
2546,
41888,
9,
22046,
12962,
12962,
198
] | 1.984615 | 780 |
# encoding: utf-8
from manet.utils import read_image
import os
| [
2,
21004,
25,
3384,
69,
12,
23,
198,
6738,
582,
316,
13,
26791,
1330,
1100,
62,
9060,
198,
11748,
28686,
628,
198
] | 2.954545 | 22 |
from .models import Menu
| [
6738,
764,
27530,
1330,
21860,
628
] | 4.333333 | 6 |
from .dialogs import OpenView
from src.utils import EmbedFactory
from disnake.ext import commands
class Ide(commands.Cog):
"""Ide cog"""
@commands.command()
@commands.max_concurrency(1, commands.BucketType.channel)
def setup(bot: commands.Bot) -> None:
"""Setup Ide cog"""
bot.add_cog(Ide(bot))
| [
6738,
764,
38969,
18463,
1330,
4946,
7680,
198,
6738,
12351,
13,
26791,
1330,
13302,
276,
22810,
198,
6738,
595,
77,
539,
13,
2302,
1330,
9729,
628,
198,
4871,
16714,
7,
9503,
1746,
13,
34,
519,
2599,
198,
220,
220,
220,
37227,
41452,
43072,
37811,
628,
220,
220,
220,
2488,
9503,
1746,
13,
21812,
3419,
198,
220,
220,
220,
2488,
9503,
1746,
13,
9806,
62,
1102,
34415,
7,
16,
11,
9729,
13,
33,
38811,
6030,
13,
17620,
8,
628,
198,
4299,
9058,
7,
13645,
25,
9729,
13,
20630,
8,
4613,
6045,
25,
198,
220,
220,
220,
37227,
40786,
16714,
43072,
37811,
628,
220,
220,
220,
10214,
13,
2860,
62,
66,
519,
7,
41452,
7,
13645,
4008,
198
] | 2.767241 | 116 |
# Creates C data structures for binary lookup table of entities,
# using python's html5 entity data.
# Usage: python3 tools/make_entities_inc.py > src/entities.inc
import html
entities5 = html.entities.html5
# remove keys without semicolons. For some reason the list
# has duplicates of a few things, like auml, one with and one
# without a semicolon.
entities = sorted([(k[:-1], entities5[k].encode('utf-8')) for k in entities5.keys() if k[-1] == ';'])
# Print out the header:
print("""/* Autogenerated by tools/make_headers_inc.py */
struct cmark_entity_node {
unsigned char *entity;
unsigned char bytes[8];
};
#define CMARK_ENTITY_MIN_LENGTH 2
#define CMARK_ENTITY_MAX_LENGTH 31""")
print("#define CMARK_NUM_ENTITIES " + str(len(entities)));
print("\nstatic const struct cmark_entity_node cmark_entities[] = {");
for (ent, bs) in entities:
print('{(unsigned char*)"' + ent + '", {' + ', '.join(map(str, bs)) + ', 0}},')
print("};")
| [
2,
7921,
274,
327,
1366,
8573,
329,
13934,
35847,
3084,
286,
12066,
11,
198,
2,
1262,
21015,
338,
27711,
20,
9312,
1366,
13,
198,
2,
29566,
25,
21015,
18,
4899,
14,
15883,
62,
298,
871,
62,
1939,
13,
9078,
1875,
12351,
14,
298,
871,
13,
1939,
198,
198,
11748,
27711,
198,
198,
298,
871,
20,
796,
27711,
13,
298,
871,
13,
6494,
20,
198,
198,
2,
4781,
8251,
1231,
5026,
27045,
684,
13,
220,
1114,
617,
1738,
262,
1351,
198,
2,
468,
14184,
16856,
286,
257,
1178,
1243,
11,
588,
257,
388,
75,
11,
530,
351,
290,
530,
198,
2,
1231,
257,
5026,
27045,
261,
13,
198,
298,
871,
796,
23243,
26933,
7,
74,
58,
21912,
16,
4357,
12066,
20,
58,
74,
4083,
268,
8189,
10786,
40477,
12,
23,
6,
4008,
329,
479,
287,
12066,
20,
13,
13083,
3419,
611,
479,
58,
12,
16,
60,
6624,
705,
26,
6,
12962,
198,
198,
2,
12578,
503,
262,
13639,
25,
198,
4798,
7203,
15931,
15211,
5231,
519,
877,
515,
416,
4899,
14,
15883,
62,
50145,
62,
1939,
13,
9078,
9466,
198,
198,
7249,
269,
4102,
62,
26858,
62,
17440,
1391,
198,
197,
43375,
1149,
1635,
26858,
26,
198,
220,
220,
220,
220,
220,
220,
220,
22165,
1149,
9881,
58,
23,
11208,
198,
19629,
198,
198,
2,
13086,
16477,
14175,
62,
3525,
9050,
62,
23678,
62,
43,
49494,
362,
198,
2,
13086,
16477,
14175,
62,
3525,
9050,
62,
22921,
62,
43,
49494,
3261,
15931,
4943,
198,
198,
4798,
7203,
2,
13086,
16477,
14175,
62,
41359,
62,
3525,
30383,
366,
1343,
965,
7,
11925,
7,
298,
871,
4008,
1776,
198,
198,
4798,
7203,
59,
77,
12708,
1500,
2878,
269,
4102,
62,
26858,
62,
17440,
269,
4102,
62,
298,
871,
21737,
796,
1391,
15341,
198,
198,
1640,
357,
298,
11,
275,
82,
8,
287,
12066,
25,
198,
220,
3601,
10786,
90,
7,
43375,
1149,
9,
16725,
6,
1343,
920,
1343,
705,
1600,
1391,
6,
1343,
46083,
45302,
22179,
7,
8899,
7,
2536,
11,
275,
82,
4008,
1343,
46083,
657,
11709,
4032,
8,
198,
198,
4798,
7203,
19629,
4943,
198
] | 2.781977 | 344 |
print "You enter a dark room with two doors. Do you go thorugh door #1 or door # 2"
door = raw_input(">" )
if door == "1":
print "Theres a giant bear here earting a cheescake. What do you do?"
print "Option '1'. Take the cake"
print "Option '2'. Scream at the bear."
bear = raw_input("> ")
if bear == "1":
print "The bears eats your face off. Loser face! "
elif bear == "2":
print "The bear eats your legs off. Good job Legless face! "
else: #haha error in the indentiuon in the book.
print "Well, doing $s is pribably better. Bear runs way " % bear
elif door == "2":
print "You stare into the endless abyss at Cthulhu's retina. "
print "1. Blueberries."
print "2. Yellow Hacket clothespins."
print "3. Understanding revolvers yelling melodies. "
insanity = raw_input("> ")
if insanity == "1" or insanity == "2":
print "Your body survives powered by a mind of hjello. Greatness!"
else:
print "The insanity rots your eyes int a pool of muck. great!"
else:
print "You stumble around and fall on a knife and die. You suck!"
| [
4798,
366,
1639,
3802,
257,
3223,
2119,
351,
734,
8215,
13,
2141,
345,
467,
41899,
6724,
3420,
1303,
16,
393,
3420,
1303,
362,
1,
198,
198,
9424,
796,
8246,
62,
15414,
7,
5320,
1,
1267,
198,
198,
361,
3420,
6624,
366,
16,
1298,
198,
220,
220,
220,
3601,
366,
464,
411,
257,
6175,
6842,
994,
304,
433,
278,
257,
1125,
3798,
539,
13,
1867,
466,
345,
466,
1701,
198,
220,
220,
220,
3601,
366,
19722,
705,
16,
4458,
7214,
262,
12187,
1,
198,
220,
220,
220,
3601,
366,
19722,
705,
17,
4458,
36306,
379,
262,
6842,
526,
628,
220,
220,
220,
6842,
796,
8246,
62,
15414,
7,
5320,
366,
8,
628,
220,
220,
220,
611,
6842,
6624,
366,
16,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
366,
464,
13062,
25365,
534,
1986,
572,
13,
5401,
263,
1986,
0,
366,
198,
220,
220,
220,
1288,
361,
6842,
6624,
366,
17,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
366,
464,
6842,
25365,
534,
7405,
572,
13,
4599,
1693,
3564,
1203,
1986,
0,
366,
198,
220,
220,
220,
2073,
25,
1303,
71,
12236,
4049,
287,
262,
33793,
16115,
261,
287,
262,
1492,
13,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
366,
5779,
11,
1804,
720,
82,
318,
279,
822,
1346,
1365,
13,
14732,
4539,
835,
366,
4064,
6842,
198,
417,
361,
3420,
6624,
366,
17,
1298,
198,
220,
220,
220,
3601,
366,
1639,
24170,
656,
262,
13079,
37678,
379,
39316,
338,
45804,
13,
366,
198,
220,
220,
220,
3601,
366,
16,
13,
4518,
20853,
526,
198,
220,
220,
220,
3601,
366,
17,
13,
12550,
367,
8317,
8242,
49556,
526,
198,
220,
220,
220,
3601,
366,
18,
13,
28491,
35891,
690,
22187,
47077,
13,
366,
628,
220,
220,
220,
30949,
796,
8246,
62,
15414,
7,
5320,
366,
8,
628,
220,
220,
220,
611,
30949,
6624,
366,
16,
1,
393,
30949,
6624,
366,
17,
1298,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
366,
7120,
1767,
36417,
13232,
416,
257,
2000,
286,
289,
73,
11109,
13,
3878,
1108,
2474,
198,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
3601,
366,
464,
30949,
686,
912,
534,
2951,
493,
257,
5933,
286,
285,
1347,
13,
1049,
2474,
198,
198,
17772,
25,
198,
220,
220,
220,
3601,
366,
1639,
35174,
1088,
290,
2121,
319,
257,
9845,
290,
4656,
13,
921,
10110,
2474,
198
] | 2.827068 | 399 |
line0.timing_system.channels.hsc.delay = 4.97e-06
line0.Phase [s] = 5.4527e-06
line0.ChopX = 36.78
line0.ChopY = 30.11
line0.description = 'S-1t'
line0.updated = '2019-05-30 14:18:48'
line1.timing_system.channels.hsc.delay = 0.0
line1.ChopX = 36.78
line1.ChopY = 31.136
line1.description = 'S-1'
line1.updated = '2019-05-30 14:25:48'
line2.timing_system.channels.hsc.delay = 8.232e-09
line2.ChopX = 36.78
line2.ChopY = 31.0579
line2.description = 'S-3'
line2.updated = '2019-05-30 14:28:12'
line3.timing_system.channels.hsc.delay = 1.372e-08
line3.ChopX = 36.78
line3.ChopY = 30.982499999999998
line3.description = 'S-5'
line3.updated = '2019-05-30 14:28:12'
line4.timing_system.channels.hsc.delay = 3.0184e-08
line4.ChopX = 36.78
line4.ChopY = 30.7563
line4.description = 'S-11'
line4.updated = '2019-05-30 14:28:12'
line5.timing_system.channels.hsc.delay = 6.86e-08
line5.ChopX = 36.78
line5.ChopY = 30.2285
line5.description = 'S-25'
line5.updated = '2019-05-30 14:28:12'
line6.timing_system.channels.hsc.delay = 0.0
line6.ChopX = 36.78
line6.ChopY = 30.555
line6.description = 'H-1'
line6.updated = '2019-05-30 14:19:34'
line7.timing_system.channels.hsc.delay = 0.0
line7.ChopX = 36.78
line7.ChopY = 30.555
line7.description = 'H-56'
line7.updated = '2019-05-30 14:17:51'
line8.timing_system.channels.hsc.delay = 0.0
line8.ChopX = 27.67
line8.ChopY = 30.925
line8.description = 'Bypass'
line8.updated = '2019-05-30 14:17:51'
motor_names = ['ChopX', 'ChopY', 'timing_system.channels.hsc.delay', 'timing_system.p0_shift']
motor_labels = ['X', 'Y', 'Phase', 'P0 Shift']
nrows = 12
formats = ['%+6.4f', '%+6.4f', 'time', 'time']
title = 'High-Speed Julich Chopper Modes'
line9.description = 'S-15'
line9.updated = '2019-05-30 14:28:12'
line9.ChopX = 36.78
line9.ChopY = 30.6055
line9.timing_system.channels.hsc.delay = 4.116e-08
line10.description = 'S-19'
line10.updated = '2019-05-30 14:28:12'
line10.ChopX = 36.78
line10.ChopY = 30.4547
line10.timing_system.channels.hsc.delay = 5.2136e-08
tolerance = [0.002, 0.002, 2.8e-09, 2.8e-09]
command_row = 9
widths = [100, 100, 100]
show_in_list = True
show_stop_button = True
command_rows = [11]
row_height = 21
names = ['X', 'Y', 'phase', 'p0_shift']
line7.timing_system.p0_shift = -1.84e-06
line8.timing_system.p0_shift = 0.0
line9.timing_system.p0_shift = -2.7871134923018455e-13
line6.timing_system.p0_shift = 0.0
line5.timing_system.p0_shift = 0.0
line4.timing_system.p0_shift = 0.0
line3.timing_system.p0_shift = -2.7871134923018455e-13
line2.timing_system.p0_shift = 0.0
line1.timing_system.p0_shift = -2.7871134923018455e-13
line0.timing_system.p0_shift = 0.0
line10.timing_system.p0_shift = 0.0
line11.ChopX = 36.78
line11.updated = '2019-06-01 08:36:18'
line11.ChopY = 30.9071
line11.timing_system.channels.hsc.delay = 1.9170000000000002e-08
line11.timing_system.p0_shift = -2.7871134923018455e-13
line11.description = 'S-7' | [
1370,
15,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
604,
13,
5607,
68,
12,
3312,
198,
1370,
15,
13,
35645,
685,
82,
60,
796,
642,
13,
2231,
1983,
68,
12,
3312,
198,
1370,
15,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
15,
13,
1925,
404,
56,
796,
1542,
13,
1157,
198,
1370,
15,
13,
11213,
796,
705,
50,
12,
16,
83,
6,
198,
1370,
15,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
1507,
25,
2780,
6,
198,
1370,
16,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
657,
13,
15,
198,
1370,
16,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
16,
13,
1925,
404,
56,
796,
3261,
13,
20809,
198,
1370,
16,
13,
11213,
796,
705,
50,
12,
16,
6,
198,
1370,
16,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
1495,
25,
2780,
6,
198,
1370,
17,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
807,
13,
24339,
68,
12,
2931,
198,
1370,
17,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
17,
13,
1925,
404,
56,
796,
3261,
13,
2713,
3720,
198,
1370,
17,
13,
11213,
796,
705,
50,
12,
18,
6,
198,
1370,
17,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
2078,
25,
1065,
6,
198,
1370,
18,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
352,
13,
36720,
68,
12,
2919,
198,
1370,
18,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
18,
13,
1925,
404,
56,
796,
1542,
13,
4089,
1731,
24214,
24214,
34808,
198,
1370,
18,
13,
11213,
796,
705,
50,
12,
20,
6,
198,
1370,
18,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
2078,
25,
1065,
6,
198,
1370,
19,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
513,
13,
486,
5705,
68,
12,
2919,
198,
1370,
19,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
19,
13,
1925,
404,
56,
796,
1542,
13,
2425,
5066,
198,
1370,
19,
13,
11213,
796,
705,
50,
12,
1157,
6,
198,
1370,
19,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
2078,
25,
1065,
6,
198,
1370,
20,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
718,
13,
4521,
68,
12,
2919,
198,
1370,
20,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
20,
13,
1925,
404,
56,
796,
1542,
13,
1828,
5332,
198,
1370,
20,
13,
11213,
796,
705,
50,
12,
1495,
6,
198,
1370,
20,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
2078,
25,
1065,
6,
198,
1370,
21,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
657,
13,
15,
198,
1370,
21,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
21,
13,
1925,
404,
56,
796,
1542,
13,
31046,
198,
1370,
21,
13,
11213,
796,
705,
39,
12,
16,
6,
198,
1370,
21,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
1129,
25,
2682,
6,
198,
1370,
22,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
657,
13,
15,
198,
1370,
22,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
22,
13,
1925,
404,
56,
796,
1542,
13,
31046,
198,
1370,
22,
13,
11213,
796,
705,
39,
12,
3980,
6,
198,
1370,
22,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
1558,
25,
4349,
6,
198,
1370,
23,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
657,
13,
15,
198,
1370,
23,
13,
1925,
404,
55,
796,
2681,
13,
3134,
198,
1370,
23,
13,
1925,
404,
56,
796,
1542,
13,
46351,
198,
1370,
23,
13,
11213,
796,
705,
3886,
6603,
6,
198,
1370,
23,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
1558,
25,
4349,
6,
198,
76,
20965,
62,
14933,
796,
37250,
1925,
404,
55,
3256,
705,
1925,
404,
56,
3256,
705,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
3256,
705,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
20520,
198,
76,
20965,
62,
23912,
1424,
796,
37250,
55,
3256,
705,
56,
3256,
705,
35645,
3256,
705,
47,
15,
15576,
20520,
198,
77,
8516,
796,
1105,
198,
687,
1381,
796,
37250,
4,
10,
21,
13,
19,
69,
3256,
705,
4,
10,
21,
13,
19,
69,
3256,
705,
2435,
3256,
705,
2435,
20520,
198,
7839,
796,
705,
11922,
12,
22785,
5979,
488,
10031,
2848,
42082,
6,
198,
1370,
24,
13,
11213,
796,
705,
50,
12,
1314,
6,
198,
1370,
24,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
2078,
25,
1065,
6,
198,
1370,
24,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
24,
13,
1925,
404,
56,
796,
1542,
13,
1899,
2816,
198,
1370,
24,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
604,
13,
18298,
68,
12,
2919,
198,
1370,
940,
13,
11213,
796,
705,
50,
12,
1129,
6,
198,
1370,
940,
13,
43162,
796,
705,
23344,
12,
2713,
12,
1270,
1478,
25,
2078,
25,
1065,
6,
198,
1370,
940,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
940,
13,
1925,
404,
56,
796,
1542,
13,
2231,
2857,
198,
1370,
940,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
642,
13,
17,
20809,
68,
12,
2919,
198,
83,
37668,
796,
685,
15,
13,
21601,
11,
657,
13,
21601,
11,
362,
13,
23,
68,
12,
2931,
11,
362,
13,
23,
68,
12,
2931,
60,
198,
21812,
62,
808,
796,
860,
198,
10394,
82,
796,
685,
3064,
11,
1802,
11,
1802,
60,
198,
12860,
62,
259,
62,
4868,
796,
6407,
198,
12860,
62,
11338,
62,
16539,
796,
6407,
198,
21812,
62,
8516,
796,
685,
1157,
60,
198,
808,
62,
17015,
796,
2310,
198,
14933,
796,
37250,
55,
3256,
705,
56,
3256,
705,
40715,
3256,
705,
79,
15,
62,
30846,
20520,
198,
1370,
22,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
532,
16,
13,
5705,
68,
12,
3312,
198,
1370,
23,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
24,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
532,
17,
13,
41019,
1157,
27371,
1954,
29159,
30505,
68,
12,
1485,
198,
1370,
21,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
20,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
19,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
18,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
532,
17,
13,
41019,
1157,
27371,
1954,
29159,
30505,
68,
12,
1485,
198,
1370,
17,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
16,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
532,
17,
13,
41019,
1157,
27371,
1954,
29159,
30505,
68,
12,
1485,
198,
1370,
15,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
940,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
657,
13,
15,
198,
1370,
1157,
13,
1925,
404,
55,
796,
4570,
13,
3695,
198,
1370,
1157,
13,
43162,
796,
705,
23344,
12,
3312,
12,
486,
8487,
25,
2623,
25,
1507,
6,
198,
1370,
1157,
13,
1925,
404,
56,
796,
1542,
13,
24,
2998,
16,
198,
1370,
1157,
13,
16514,
278,
62,
10057,
13,
354,
8961,
13,
71,
1416,
13,
40850,
796,
352,
13,
24,
1558,
8269,
2388,
17,
68,
12,
2919,
198,
1370,
1157,
13,
16514,
278,
62,
10057,
13,
79,
15,
62,
30846,
796,
532,
17,
13,
41019,
1157,
27371,
1954,
29159,
30505,
68,
12,
1485,
198,
1370,
1157,
13,
11213,
796,
705,
50,
12,
22,
6
] | 2.134074 | 1,350 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Using Classifier and String Features
========================================
This is a famous `shogun` classifier example that predicts family name
of Shogun from his first name.
"""
from jubakit.classifier import Classifier, Schema, Dataset, Config
from jubakit.loader.csv import CSVLoader
# Load the shogun dataset.
train_loader = CSVLoader('shogun.train.csv')
test_loader = CSVLoader('shogun.test.csv')
# Define a Schema that defines types for each columns of the CSV file.
schema = Schema({
'family_name': Schema.LABEL,
'first_name': Schema.STRING,
})
# Create a Dataset.
train_dataset = Dataset(train_loader, schema).shuffle()
test_dataset = Dataset(test_loader, schema)
# Create a Classifier Service.
cfg = Config(
method = 'PA',
converter = {
'string_rules': [{'key': 'first_name', 'type': 'unigram', 'sample_weight': 'bin', 'global_weight': 'bin'}]
}
)
classifier = Classifier.run(cfg)
# Train the classifier.
for _ in classifier.train(train_dataset): pass
# Classify using the classifier.
for (idx, label, result) in classifier.classify(test_dataset):
true_family_name = label
pred_family_name = result[0][0]
first_name = test_dataset.get(idx)['first_name']
print("{0} {1} ({2})".format(
pred_family_name,
first_name,
'correct!' if pred_family_name == true_family_name else 'incorrect'
))
# Stop the classifier.
classifier.stop()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
11,
7297,
11,
3601,
62,
8818,
11,
28000,
1098,
62,
17201,
874,
198,
198,
37811,
198,
12814,
5016,
7483,
290,
10903,
17571,
198,
10052,
2559,
198,
198,
1212,
318,
257,
5863,
4600,
1477,
39918,
63,
1398,
7483,
1672,
326,
26334,
1641,
1438,
198,
1659,
46775,
422,
465,
717,
1438,
13,
198,
37811,
198,
198,
6738,
474,
549,
461,
270,
13,
4871,
7483,
1330,
5016,
7483,
11,
10011,
2611,
11,
16092,
292,
316,
11,
17056,
198,
6738,
474,
549,
461,
270,
13,
29356,
13,
40664,
1330,
44189,
17401,
198,
198,
2,
8778,
262,
427,
39918,
27039,
13,
198,
27432,
62,
29356,
796,
44189,
17401,
10786,
1477,
39918,
13,
27432,
13,
40664,
11537,
198,
9288,
62,
29356,
796,
44189,
17401,
10786,
1477,
39918,
13,
9288,
13,
40664,
11537,
198,
198,
2,
2896,
500,
257,
10011,
2611,
326,
15738,
3858,
329,
1123,
15180,
286,
262,
44189,
2393,
13,
198,
15952,
2611,
796,
10011,
2611,
15090,
198,
220,
705,
17989,
62,
3672,
10354,
10011,
2611,
13,
48780,
3698,
11,
198,
220,
705,
11085,
62,
3672,
10354,
10011,
2611,
13,
18601,
2751,
11,
198,
30072,
198,
198,
2,
13610,
257,
16092,
292,
316,
13,
198,
27432,
62,
19608,
292,
316,
796,
16092,
292,
316,
7,
27432,
62,
29356,
11,
32815,
737,
1477,
18137,
3419,
198,
9288,
62,
19608,
292,
316,
796,
16092,
292,
316,
7,
9288,
62,
29356,
11,
32815,
8,
198,
198,
2,
13610,
257,
5016,
7483,
4809,
13,
198,
37581,
796,
17056,
7,
198,
220,
2446,
796,
705,
4537,
3256,
198,
220,
38394,
796,
1391,
198,
220,
220,
220,
705,
8841,
62,
38785,
10354,
685,
90,
6,
2539,
10354,
705,
11085,
62,
3672,
3256,
705,
4906,
10354,
705,
403,
328,
859,
3256,
705,
39873,
62,
6551,
10354,
705,
8800,
3256,
705,
20541,
62,
6551,
10354,
705,
8800,
6,
92,
60,
198,
220,
1782,
198,
8,
198,
4871,
7483,
796,
5016,
7483,
13,
5143,
7,
37581,
8,
198,
198,
2,
16835,
262,
1398,
7483,
13,
198,
1640,
4808,
287,
1398,
7483,
13,
27432,
7,
27432,
62,
19608,
292,
316,
2599,
1208,
198,
198,
2,
5016,
1958,
1262,
262,
1398,
7483,
13,
198,
1640,
357,
312,
87,
11,
6167,
11,
1255,
8,
287,
1398,
7483,
13,
4871,
1958,
7,
9288,
62,
19608,
292,
316,
2599,
198,
220,
2081,
62,
17989,
62,
3672,
796,
6167,
198,
220,
2747,
62,
17989,
62,
3672,
796,
1255,
58,
15,
7131,
15,
60,
198,
220,
717,
62,
3672,
796,
1332,
62,
19608,
292,
316,
13,
1136,
7,
312,
87,
8,
17816,
11085,
62,
3672,
20520,
198,
220,
3601,
7203,
90,
15,
92,
1391,
16,
92,
37913,
17,
30072,
1911,
18982,
7,
198,
220,
220,
220,
2747,
62,
17989,
62,
3672,
11,
198,
220,
220,
220,
717,
62,
3672,
11,
198,
220,
220,
220,
705,
30283,
13679,
611,
2747,
62,
17989,
62,
3672,
6624,
2081,
62,
17989,
62,
3672,
2073,
705,
1939,
47315,
6,
198,
220,
15306,
198,
198,
2,
13707,
262,
1398,
7483,
13,
198,
4871,
7483,
13,
11338,
3419,
198
] | 2.90631 | 523 |
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
from PIL import ImageOps
'''def turnWhite(imageName, newName):
img = Image.open(imageName+'.png')
img = img.convert("RGBA")
datas = img.getdata()
newData = []
for item in datas:
if item[3]!=0:
newData.append((255, 255, 255, 255))
else:
newData.append(item)
img.putdata(newData)
img.save(newName+".png", "PNG") '''
img = Image.open("shoe1.jpg")
img = ImageOps.grayscale(img)
np_im = np.array(img)
print(np_im.shape)
np_im = (np_im - np.min(np_im))/np.ptp(np_im)
#print(np_im.shape)
#datas=img.getdata()
#print(datas)
#newData = []
#for item in datas:
#newData.append((item[0]/255,item[1]/255,item[2]/255,item[3]))
#img.putdata(newData)
plt.imshow(np_im)
plt.show()
#img.save("new"+".jpg", "JPEG")
#new_im = Image.fromarray(np_im)
#new_im.save("new.jpg")
img.close()
#np_im = np.array(im)
#print(np_im)
#new_arr = ((np_im + 0) * (1/1) * 255).astype('uint8')
#print(new_arr) | [
6738,
350,
4146,
1330,
7412,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
350,
4146,
1330,
7412,
41472,
198,
198,
7061,
6,
4299,
1210,
12256,
7,
9060,
5376,
11,
649,
5376,
2599,
198,
220,
220,
220,
33705,
796,
7412,
13,
9654,
7,
9060,
5376,
10,
4458,
11134,
11537,
198,
220,
220,
220,
33705,
796,
33705,
13,
1102,
1851,
7203,
48192,
4339,
4943,
198,
220,
220,
220,
19395,
796,
33705,
13,
1136,
7890,
3419,
628,
220,
220,
220,
649,
6601,
796,
17635,
198,
220,
220,
220,
329,
2378,
287,
19395,
25,
198,
220,
220,
220,
220,
220,
220,
220,
611,
2378,
58,
18,
60,
0,
28,
15,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
6601,
13,
33295,
19510,
13381,
11,
14280,
11,
14280,
11,
14280,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
2073,
25,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
649,
6601,
13,
33295,
7,
9186,
8,
628,
220,
220,
220,
33705,
13,
1996,
7890,
7,
3605,
6601,
8,
198,
220,
220,
220,
33705,
13,
21928,
7,
3605,
5376,
10,
1911,
11134,
1600,
366,
47,
10503,
4943,
705,
7061,
198,
198,
9600,
796,
7412,
13,
9654,
7203,
1477,
2577,
16,
13,
9479,
4943,
198,
9600,
796,
7412,
41472,
13,
2164,
592,
38765,
7,
9600,
8,
198,
198,
37659,
62,
320,
796,
45941,
13,
18747,
7,
9600,
8,
198,
4798,
7,
37659,
62,
320,
13,
43358,
8,
198,
37659,
62,
320,
796,
357,
37659,
62,
320,
532,
45941,
13,
1084,
7,
37659,
62,
320,
4008,
14,
37659,
13,
457,
79,
7,
37659,
62,
320,
8,
198,
198,
2,
4798,
7,
37659,
62,
320,
13,
43358,
8,
198,
2,
19608,
292,
28,
9600,
13,
1136,
7890,
3419,
198,
2,
4798,
7,
19608,
292,
8,
198,
2,
3605,
6601,
796,
17635,
198,
198,
2,
1640,
2378,
287,
19395,
25,
198,
220,
220,
220,
1303,
3605,
6601,
13,
33295,
19510,
9186,
58,
15,
60,
14,
13381,
11,
9186,
58,
16,
60,
14,
13381,
11,
9186,
58,
17,
60,
14,
13381,
11,
9186,
58,
18,
60,
4008,
198,
2,
9600,
13,
1996,
7890,
7,
3605,
6601,
8,
198,
489,
83,
13,
320,
12860,
7,
37659,
62,
320,
8,
198,
489,
83,
13,
12860,
3419,
198,
2,
9600,
13,
21928,
7203,
3605,
1,
10,
1911,
9479,
1600,
366,
12889,
7156,
4943,
198,
2,
3605,
62,
320,
796,
7412,
13,
6738,
18747,
7,
37659,
62,
320,
8,
198,
2,
3605,
62,
320,
13,
21928,
7203,
3605,
13,
9479,
4943,
198,
9600,
13,
19836,
3419,
198,
2,
37659,
62,
320,
796,
45941,
13,
18747,
7,
320,
8,
198,
198,
2,
4798,
7,
37659,
62,
320,
8,
198,
2,
3605,
62,
3258,
796,
14808,
37659,
62,
320,
1343,
657,
8,
1635,
357,
16,
14,
16,
8,
1635,
14280,
737,
459,
2981,
10786,
28611,
23,
11537,
198,
2,
4798,
7,
3605,
62,
3258,
8
] | 2.09407 | 489 |
import os
import requests
from base64 import b64encode
from flask import render_template
BASE_URL = os.getenv("NSO_URL", "http://localhost:8080")
API_ROOT = BASE_URL + '/api/running'
NSO_USERNAME = os.getenv("NSO_USERNAME", "admin")
NSO_PASSWORD = os.getenv("NSO_PASSWORD", "admin")
HEADERS = {
'Content-Type': "application/vnd.yang.data+json",
'authorization': "Basic {}".format(b64encode(b':'.join((NSO_USERNAME,
NSO_PASSWORD)
)
).strip()
),
'accept': "application/vnd.yang.collection+json"
}
def send_post(url):
"""
used to pass through NSO requests
"""
HEADERS['accept'] = 'application/vnd.yang.data+json'
if not url.startswith('/'):
url = "/{}".format(url)
url = BASE_URL + url
resp = requests.post(url, headers=HEADERS)
return resp
| [
11748,
28686,
198,
11748,
7007,
198,
6738,
2779,
2414,
1330,
275,
2414,
268,
8189,
198,
6738,
42903,
1330,
8543,
62,
28243,
198,
198,
33,
11159,
62,
21886,
796,
28686,
13,
1136,
24330,
7203,
8035,
46,
62,
21886,
1600,
366,
4023,
1378,
36750,
25,
1795,
1795,
4943,
198,
17614,
62,
13252,
2394,
796,
49688,
62,
21886,
1343,
31051,
15042,
14,
20270,
6,
198,
198,
8035,
46,
62,
29904,
20608,
796,
28686,
13,
1136,
24330,
7203,
8035,
46,
62,
29904,
20608,
1600,
366,
28482,
4943,
198,
8035,
46,
62,
47924,
54,
12532,
796,
28686,
13,
1136,
24330,
7203,
8035,
46,
62,
47924,
54,
12532,
1600,
366,
28482,
4943,
198,
198,
37682,
4877,
796,
1391,
198,
220,
220,
220,
705,
19746,
12,
6030,
10354,
366,
31438,
14,
85,
358,
13,
17859,
13,
7890,
10,
17752,
1600,
198,
220,
220,
220,
705,
9800,
1634,
10354,
366,
26416,
23884,
1911,
18982,
7,
65,
2414,
268,
8189,
7,
65,
10354,
4458,
22179,
19510,
8035,
46,
62,
29904,
20608,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10896,
46,
62,
47924,
54,
12532,
8,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1267,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
6739,
36311,
3419,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
10612,
198,
220,
220,
220,
705,
13635,
10354,
366,
31438,
14,
85,
358,
13,
17859,
13,
43681,
10,
17752,
1,
198,
220,
220,
220,
1782,
198,
198,
4299,
3758,
62,
7353,
7,
6371,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
973,
284,
1208,
832,
10896,
46,
7007,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
39837,
4877,
17816,
13635,
20520,
796,
705,
31438,
14,
85,
358,
13,
17859,
13,
7890,
10,
17752,
6,
198,
220,
220,
220,
611,
407,
19016,
13,
9688,
2032,
342,
10786,
14,
6,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
19016,
796,
12813,
90,
92,
1911,
18982,
7,
6371,
8,
198,
220,
220,
220,
19016,
796,
49688,
62,
21886,
1343,
19016,
198,
220,
220,
220,
1217,
796,
7007,
13,
7353,
7,
6371,
11,
24697,
28,
37682,
4877,
8,
198,
220,
220,
220,
1441,
1217,
628
] | 1.916031 | 524 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.