hexsha
stringlengths
40
40
size
int64
6
14.9M
ext
stringclasses
1 value
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
6
260
max_stars_repo_name
stringlengths
6
119
max_stars_repo_head_hexsha
stringlengths
40
41
max_stars_repo_licenses
sequence
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
6
260
max_issues_repo_name
stringlengths
6
119
max_issues_repo_head_hexsha
stringlengths
40
41
max_issues_repo_licenses
sequence
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
6
260
max_forks_repo_name
stringlengths
6
119
max_forks_repo_head_hexsha
stringlengths
40
41
max_forks_repo_licenses
sequence
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
avg_line_length
float64
2
1.04M
max_line_length
int64
2
11.2M
alphanum_fraction
float64
0
1
cells
sequence
cell_types
sequence
cell_type_groups
sequence
d0a2ba040cadf071e99cbb835a9d5a10ab04151d
10,388
ipynb
Jupyter Notebook
regression/temperature_forcasting_with_RNN.ipynb
Chiebukar/Deep-Learning
6a8ef8a949f074195f591858a114196df9701291
[ "MIT" ]
2
2021-05-12T04:37:10.000Z
2021-05-12T04:37:24.000Z
regression/temperature_forcasting_with_RNN.ipynb
Chiebukar/Deep-Learning
6a8ef8a949f074195f591858a114196df9701291
[ "MIT" ]
null
null
null
regression/temperature_forcasting_with_RNN.ipynb
Chiebukar/Deep-Learning
6a8ef8a949f074195f591858a114196df9701291
[ "MIT" ]
null
null
null
26.981818
262
0.460628
[ [ [ "<a href=\"https://colab.research.google.com/github/Chiebukar/Deep-Learning/blob/main/regression/temperature_forcasting_with_RNN.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "## Temperature Forcasting with Jena climate dataset", "_____no_output_____" ] ], [ [ "from google.colab import files\nfiles.upload()", "_____no_output_____" ], [ "!mkdir -p ~/.kaggle\n!cp kaggle.json ~/.kaggle/\n!chmod 600 ~/.kaggle/kaggle.json", "_____no_output_____" ], [ "!kaggle datasets download -d kusuri/jena-climate", "_____no_output_____" ], [ "!ls -d $PWD/*", "_____no_output_____" ], [ "!unzip \\*.zip && rm *.zip", "_____no_output_____" ], [ "!ls -d $PWD/*", "_____no_output_____" ], [ "file_dir = '/content/jena_climate_2009_2016.csv'", "_____no_output_____" ], [ "import numpy as np\nimport pandas as pd", "_____no_output_____" ], [ "jena_df = pd.read_csv(file_dir)\njena_df.head()", "_____no_output_____" ], [ "jena_df.shape", "_____no_output_____" ], [ "jena_df.columns", "_____no_output_____" ], [ "jena_arr = np.array(jena_df.iloc[:, 1:])\njena_arr[:2]", "_____no_output_____" ], [ "# standardize data\nlen_train = 200000\nmean = jena_arr[:len_train].mean(axis=0)\nstd = jena_arr[:len_train].std(axis=0)\njena_arr = (jena_arr-mean)/std", "_____no_output_____" ], [ "# generator to yield batches of data from the recent past and future target \ndef generator(data, min_index, max_index , lookback= 1440, delay=144, step= 6, batch_size=18, shuffle=False):\n\n \"\"\"\n yield batches of data from the recent past and future target\n\n data = original input data\n min_index = minimum index of data to draw from\n max_index maximum index of sata to draw from\n lookback= Number of timestamps back for input data per target\n delay = Number of timestamp in the future for target per lookback\n steps = period in timestamps to sample data\n batch_size = number of samples per batch\n shuffle = To shuffle the samples or not\n\n \"\"\"\n\n if max_index == None:\n max_index = len(data) - delay - 1\n i = min_index + lookback\n\n while 1:\n if shuffle:\n rows = np.random.randint(min_index + lookback, max_index, size= batch_size)\n else:\n if i + batch_size >= max_index:\n i = min_index + lookback\n rows = np.arange(i, min(i + batch_size, max_index))\n i += len(rows)\n \n samples = np.zeros((len(rows), lookback //step, data.shape[-1]))\n targets = np.zeros((len(rows),))\n\n for j, row in enumerate(rows):\n indices = range(rows[j] - lookback, rows[j], step)\n samples[j] = data[indices]\n targets[j] = data[rows[j] + delay][1]\n yield samples, targets", "_____no_output_____" ], [ "train_gen = generator(data= jena_arr,\n min_index= 0,\n max_index= 200000,\n shuffle= True)\n\nvalid_gen = generator(data= jena_arr,\n min_index= 200001,\n max_index = 300000,\n shuffle = True)\n\ntest_gen = generator(data = jena_arr,\n min_index = 300001,\n max_index = None,\n shuffle= True)\n", "_____no_output_____" ], [ "# get validation and test steps\nlookback = 1440\nval_steps = (300000 - 200001 - lookback)\ntest_steps = (len(jena_arr) - 300001 - lookback)", "_____no_output_____" ], [ "# establish baseline\ndef evaluate_naive_method():\n batch_maes = []\n for step in range(val_steps):\n samples, targets = next(valid_gen)\n preds = samples[:, -1, 1]\n mae = np.mean(np.abs(preds - targets))\n batch_maes.append(mae)\n return (np.mean(batch_maes))", "_____no_output_____" ], [ "# get baseline evaluation\nmae = evaluate_naive_method()\ncelsius_mae = mae * std[1]\ncelsius_mae", "_____no_output_____" ], [ "\nfrom tensorflow import keras\nfrom keras.models import Sequential\nfrom keras.layers import Dense, LSTM, Dropout\nfrom keras.callbacks import ModelCheckpoint", "_____no_output_____" ], [ "# build model\ndef build_model():\n model = Sequential()\n model.add(LSTM(32, dropout= 0.1, recurrent_dropout= 0.25,\n return_sequences=True, input_shape = (None, jena_arr.shape[-1])))\n model.add(LSTM(64, activation='tanh', dropout=0.5))\n model.add(Dense(8, activation= 'relu'))\n model.add(Dropout(0.1))\n model.add(Dense(1))\n\n model.compile(loss = 'mae', optimizer = 'rmsprop')\n return model", "_____no_output_____" ], [ "file_path= 'a_weights.best.hdf5'\ncheckpoint = ModelCheckpoint(file_path, monitor= 'val_loss', save_best_only= True, verbose= 1, mode= 'min')", "_____no_output_____" ], [ "model = build_model()\nhistory = model.fit(train_gen, steps_per_epoch = 500, epochs= 25, validation_data= valid_gen, \n validation_steps = 500, callbacks= checkpoint)", "_____no_output_____" ], [ "history_df = pd.DataFrame(history.history)\nhistory_df[['mae', 'val_mae']].plot()", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a2c4dab30c6e919ebc9eb59d56ff04ade45827
8,239
ipynb
Jupyter Notebook
documentation/source/usersGuide/usersGuide_46_streams3.ipynb
cuthbertLab/music21
1be16c255460107c10d7b4bc8eb77f0d115b5eac
[ "MIT" ]
1,449
2015-01-09T15:53:56.000Z
2022-03-31T18:24:46.000Z
documentation/source/usersGuide/usersGuide_46_streams3.ipynb
cuthbertLab/music21
1be16c255460107c10d7b4bc8eb77f0d115b5eac
[ "MIT" ]
1,179
2015-01-07T17:07:54.000Z
2022-03-31T16:46:02.000Z
documentation/source/usersGuide/usersGuide_46_streams3.ipynb
cuthbertLab/music21
1be16c255460107c10d7b4bc8eb77f0d115b5eac
[ "MIT" ]
393
2015-01-03T20:38:16.000Z
2022-03-25T16:51:22.000Z
40.586207
106
0.510256
[ [ [ "# User's Guide, Chapter 46: Streams 3: Manipulation, inPlace, and deepcopy", "_____no_output_____" ], [ "Just getting started here...", "_____no_output_____" ], [ "## Replace all consecutive notes of a Stream with intervals while maintaining the hierarchy", "_____no_output_____" ] ], [ [ "import copy\n\ndef intervalStream(inputStream):\n out = copy.deepcopy(inputStream)\n for n in out[note.Note]:\n nNext = n.next(note.GeneralNote)\n if nNext is None or not nNext.isNote:\n continue # Rests, Chords, etc.\n n.activeSite.replace(n, interval.Interval(n, nNext))\n for other in out.recurse().getElementsNotOfClass([interval.Interval,\n stream.Stream]):\n other.activeSite.remove(other) # get rid of everything that is not an Interval or Stream\n return out", "_____no_output_____" ], [ "b = corpus.parse('bwv66.6')\nints = intervalStream(b.measures(0, 4))\nints.show('text')", "{0.0} <music21.stream.Part Soprano>\n {0.0} <music21.stream.Measure 0 offset=0.0>\n {0.0} <music21.interval.Interval M-2>\n {0.5} <music21.interval.Interval M-2>\n {1.0} <music21.stream.Measure 1 offset=1.0>\n {0.0} <music21.interval.Interval M2>\n {1.0} <music21.interval.Interval M2>\n {2.0} <music21.interval.Interval m3>\n {3.0} <music21.interval.Interval m-3>\n {5.0} <music21.stream.Measure 2 offset=5.0>\n {0.0} <music21.interval.Interval M-2>\n {1.0} <music21.interval.Interval M-2>\n {2.0} <music21.interval.Interval M3>\n {3.0} <music21.interval.Interval M-3>\n {9.0} <music21.stream.Measure 3 offset=9.0>\n {0.0} <music21.interval.Interval M2>\n {0.5} <music21.interval.Interval m-3>\n {1.0} <music21.interval.Interval M-2>\n {2.0} <music21.interval.Interval m3>\n {3.0} <music21.interval.Interval M2>\n {13.0} <music21.stream.Measure 4 offset=13.0>\n {0.0} <music21.interval.Interval P1>\n {1.0} <music21.interval.Interval P-4>\n {2.0} <music21.interval.Interval M-2>\n {3.0} <music21.interval.Interval P4>\n{0.0} <music21.stream.Part Alto>\n {0.0} <music21.stream.Measure 0 offset=0.0>\n {0.0} <music21.interval.Interval M2>\n {1.0} <music21.stream.Measure 1 offset=1.0>\n {0.0} <music21.interval.Interval M-2>\n {1.0} <music21.interval.Interval P1>\n {2.0} <music21.interval.Interval P1>\n {3.0} <music21.interval.Interval P1>\n {5.0} <music21.stream.Measure 2 offset=5.0>\n {0.0} <music21.interval.Interval P4>\n {0.5} <music21.interval.Interval m-2>\n {1.0} <music21.interval.Interval M-3>\n {2.0} <music21.interval.Interval M3>\n {3.0} <music21.interval.Interval M-2>\n {9.0} <music21.stream.Measure 3 offset=9.0>\n {0.0} <music21.interval.Interval M2>\n {0.5} <music21.interval.Interval m-3>\n {1.0} <music21.interval.Interval M-3>\n {2.0} <music21.interval.Interval P4>\n {3.0} <music21.interval.Interval P1>\n {13.0} <music21.stream.Measure 4 offset=13.0>\n {0.0} <music21.interval.Interval M-2>\n {1.0} <music21.interval.Interval m-2>\n {2.0} <music21.interval.Interval M-2>\n {3.0} <music21.interval.Interval P1>\n{0.0} <music21.stream.Part Tenor>\n {0.0} <music21.stream.Measure 0 offset=0.0>\n {0.0} <music21.interval.Interval M2>\n {0.5} <music21.interval.Interval M2>\n {1.0} <music21.stream.Measure 1 offset=1.0>\n {0.0} <music21.interval.Interval M-2>\n {1.0} <music21.interval.Interval M-2>\n {2.0} <music21.interval.Interval M2>\n {3.0} <music21.interval.Interval M-2>\n {5.0} <music21.stream.Measure 2 offset=5.0>\n {0.0} <music21.interval.Interval P5>\n {0.5} <music21.interval.Interval P1>\n {1.0} <music21.interval.Interval M-2>\n {1.5} <music21.interval.Interval m-2>\n {2.0} <music21.interval.Interval P1>\n {3.0} <music21.interval.Interval P1>\n {9.0} <music21.stream.Measure 3 offset=9.0>\n {0.0} <music21.interval.Interval m2>\n {0.5} <music21.interval.Interval m-2>\n {1.0} <music21.interval.Interval M-2>\n {1.5} <music21.interval.Interval M-2>\n {2.0} <music21.interval.Interval M3>\n {3.0} <music21.interval.Interval M-2>\n {13.0} <music21.stream.Measure 4 offset=13.0>\n {0.0} <music21.interval.Interval P1>\n {1.0} <music21.interval.Interval P1>\n {2.0} <music21.interval.Interval M-2>\n {2.5} <music21.interval.Interval m-2>\n {3.0} <music21.interval.Interval M-2>\n{0.0} <music21.stream.Part Bass>\n {0.0} <music21.stream.Measure 0 offset=0.0>\n {0.0} <music21.interval.Interval m-2>\n {0.5} <music21.interval.Interval M-2>\n {1.0} <music21.stream.Measure 1 offset=1.0>\n {0.0} <music21.interval.Interval M2>\n {1.0} <music21.interval.Interval m2>\n {2.0} <music21.interval.Interval m-2>\n {3.0} <music21.interval.Interval m2>\n {5.0} <music21.stream.Measure 2 offset=5.0>\n {0.0} <music21.interval.Interval m-6>\n {0.5} <music21.interval.Interval m3>\n {1.0} <music21.interval.Interval P-5>\n {2.0} <music21.interval.Interval A5>\n {3.0} <music21.interval.Interval m2>\n {9.0} <music21.stream.Measure 3 offset=9.0>\n {0.0} <music21.interval.Interval P-5>\n {0.5} <music21.interval.Interval M2>\n {1.0} <music21.interval.Interval P-5>\n {2.0} <music21.interval.Interval P8>\n {3.0} <music21.interval.Interval M2>\n {13.0} <music21.stream.Measure 4 offset=13.0>\n {0.0} <music21.interval.Interval M-2>\n {0.5} <music21.interval.Interval M2>\n {1.0} <music21.interval.Interval m2>\n {1.5} <music21.interval.Interval M2>\n {2.0} <music21.interval.Interval P-8>\n {2.5} <music21.interval.Interval M2>\n {3.0} <music21.interval.Interval P4>\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code" ] ]
d0a2d1076a404357924b8feed0afb4839fcd7182
46,851
ipynb
Jupyter Notebook
algorithm_implement/PPO2/Proximal_Policy_Optimization(PPO2)_continuous.ipynb
sunnyswag/RL_notes_and_codes
dcfd35a69daa5a42e79a6c78e2ca73c2da38a67b
[ "Apache-2.0" ]
10
2019-06-05T13:20:08.000Z
2021-12-22T09:56:41.000Z
algorithm_implement/PPO2/Proximal_Policy_Optimization(PPO2)_continuous.ipynb
sunnyswag/RL_notes_and_codes
dcfd35a69daa5a42e79a6c78e2ca73c2da38a67b
[ "Apache-2.0" ]
1
2021-06-19T03:23:18.000Z
2021-06-19T03:23:18.000Z
algorithm_implement/PPO2/Proximal_Policy_Optimization(PPO2)_continuous.ipynb
sunnyswag/RL_notes_and_codes
dcfd35a69daa5a42e79a6c78e2ca73c2da38a67b
[ "Apache-2.0" ]
2
2021-12-22T09:56:28.000Z
2022-01-16T06:21:19.000Z
74.841853
23,852
0.749845
[ [ [ "### PPO, Actor-Critic Style\n_______________________\n&nbsp;&nbsp;**for** iteration=1,2,... do<br>\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**for** actor=1,2,...,N do<br>\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Run policy $\\pi_{\\theta_{old}}$ in environment for T timesteps<br>\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Compute advantage estimates $\\hat{A}_1,\\dots,\\hat{A}_T$<br>\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**end for**<br>\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Optimize surrogate(代理人) L wrt $\\theta$,with $K$ epochs and minibatch size $M \\leq NT$<br>\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;$\\theta_{old} \\leftarrow \\theta$<br>\n&nbsp;&nbsp;**end for**\n_______________________", "_____no_output_____" ], [ "### Loss Function L的数学公式为:\n$$\nL_t^{CLIP+VF+S}(\\theta)=\\hat{\\mathbb{E}_t}[L_t^{CLIP}(\\theta)-c_1L^{VF}_t(\\theta)+c_2S[\\pi_\\theta](s_t)]\n$$\n其中,$L^{CLIP}(\\theta)=\\hat{\\mathbb{E}_t}\\big[min(r_t(\\theta)\\hat{A}_t,clip(r_t(\\theta), 1-\\epsilon,1+\\epsilon)\\hat{A}_t)\\big]$, $r_t(\\theta)=\\frac{\\pi_\\theta(a_t|s_t)}{\\pi_{\\theta_{old}}(a_t|s_t)}$<br>\n$L^{VF}_t=(V_\\theta(s_t)-V_t^{targ})^2$ **critic loss**<br>\nS 为奖励熵,保证足够多的探索(写A2C的时候已经OK)<br>\n$c_1, c_2$为参数\n#### $L^{CLIP}和r的关系如下(为了保证\\pi_\\theta和\\pi_{\\theta_{old}}的差值不会很大,满足TRPO中两者方差变化不大的要求)$:\n<img src=\"../assets/PPO_CLIP.png\">", "_____no_output_____" ], [ "### GAE(high-dimensional continuous control using Generalized Advantage Estimation)\nWe address the first challenge by using value functions to substantially reduce the variance of policy gradient estimates at the cost of some bias, with an exponentially-weighted estimator of the advantage function that is analogous to TD(λ). <br>\n\n改进了advantage function的计算方式。将advantage function进行类似于TD(λ)的处理<br>\n\n#### 推导过程\n1. 原始的advantage function : $\\delta^V_t=r_t+\\gamma V(s_{t+1})−V(s_t)$\n2. $在位置t时,其后k个 \\delta 折扣相加$ : \n$$\n\\begin{aligned}\n\\hat{A}^{(1)}_t&:=\\delta^V_t&&=-V(s_t)+r_t+\\gamma V(s_{t+1}) \\\\\n\\hat{A}^{(2)}_t&:=\\delta^V_t+\\gamma \\delta^V_{t+1}&&=-V(s_t)+r_t+\\gamma r_{t+1}+\\gamma ^2 V(s_{t+2}) \\\\\n\\hat{A}^{(3)}_t&:=\\delta^V_t+\\gamma \\delta^V_{t+1}+\\gamma^2 \\delta^V_{t+2}&&=-V(s_t)+r_t+\\gamma r_{t+1}+\\gamma^2 r_{t+2}+\\gamma ^3 V(s_{t+3}) \\\\\n\\hat{A}_t^{(k)}&:=\\sum_{l=0}^{k=1}\\gamma^l\\delta_{t+l}^V&&=-V(s_t)+r_t+\\gamma r_{t+1}+\\dots+\\gamma^{k-1}r_{t+k-1}+\\gamma^kV(s_{t+k})\n\\end{aligned}\n$$\n\n\n3. $k \\to \\infty, \\gamma^kV(s_{t+k})$会变得非常非常非常小,So :\n$$\n\\hat{A}_t^{(\\infty)}=\\sum^\\infty_{l=0}\\gamma^l\\delta_{t+l}^V=-V(s_t)+\\sum^\\infty_{l=0}\\gamma^lr_{t+l}\n$$\n4. 所以,$t$ 时刻的GAE可推导为 :\n$$\n\\begin{aligned}\n\\hat{A}_t^{GAE(\\gamma, \\lambda)}&:=(1-\\lambda)\\big(\\hat{A}_t^{(1)}+\\lambda\\hat{A}_t^{(2)}+\\lambda^2\\hat{A}_t^{(3)}+\\dots\\big)\\\\\n&=(1-\\lambda)\\big(\\delta_t^V+\\lambda(\\delta_t^V+\\gamma\\delta_{t+1}^V)+\\lambda^2(\\delta_t^V+\\gamma\\delta_{t+1}^V+\\gamma^2\\delta_{t+2}^V)+\\dots\\big)\\\\\n&=(1-\\lambda)\\big(\\delta^V_t(1+\\lambda+\\lambda^2+\\dots)+\\gamma\\delta^V_{t+1}(\\lambda+\\lambda^2+\\lambda^3+\\dots)+\\gamma^2\\delta^V_{t+2}(\\lambda^2+\\lambda^3+\\lambda^4+\\dots)+\\dots\\big)\\\\\n&=(1-\\lambda)\\big(\\delta^V_t\\big(\\frac{1}{1-\\lambda}\\big)+\\gamma\\delta^V_{t+1}\\big(\\frac{\\lambda}{1-\\lambda}\\big)+\\gamma^2\\delta^V_{t+2}\\big(\\frac{\\lambda^2}{1-\\lambda}\\big)+\\dots\\big)\\\\\n&=\\underbrace{\\delta^V_t+\\gamma\\lambda\\delta^V_{t+1}+(\\gamma\\lambda)^2\\delta^V_{t+2}+\\dots}_{此处计算时使用这个公式(迭代计算)}\\\\\n&=\\sum_{l=0}^\\infty(\\gamma\\lambda)^l\\delta^V_{t+l}\n\\end{aligned}\n$$", "_____no_output_____" ], [ "### 使用高斯分布(正态分布)来实现随机性策略控制连续动作空间\n1. 高斯分布有两个重要的变量一个是均值 $\\mu$ ,另一个是方差 $\\sigma$ 。$\\mu$ 为高斯函数的对称轴,$\\frac{1}{\\sqrt{2\\pi}\\sigma}$ 为高斯函数的最高点。高斯函数的积分为1。所以我们可以使用它来进行连续动作的sample。方差 $\\sigma$ 越大,分布越分散,方差 $\\sigma$ 越小,分布越集中。\n2. $\\mu$ 的选择很好把控,经过tanh处理之后+简单的数学变换,使nn输出的 $\\mu$ 在env规定的动作空间内就可以\n3. $\\sigma$ 的选择,使用softplus函数对sigma进行处理。softplus 公式为$f(x)=\\frac{1}{\\beta}log(1+exp(\\beta x))$, softplus 是 ReLU 的平滑近似值版本\n4. 高斯分布公式:\n$$\nf(x)=\\frac{1}{\\sqrt{2\\pi}\\sigma}exp\\bigg(-\\frac{(x-\\mu)^2}{2\\sigma^2}\\bigg)\n$$\n5. 和确定性策略相比,需要考虑每个state采取每个动作的概率,计算量确实比较大。", "_____no_output_____" ], [ "### TRPO\n简单理解为一次on-policy到off-policy的转换<br>\n但是为了保证old_policy和new_policy之间方差相差不会太大<br>\n$$\n\\begin{aligned}\nE_{X \\sim p}[f(x)] & \\approx \\frac{1}{N}\\sum^N_{i=1}f(x^i)\\\\\n&= \\int f(x)p(x)dx=\\int f(x)\\frac{p(x)}{q(x)}q(x)dx=E_{x \\sim q}[f(x)\\frac{p(x)}{q(x)}]\n\\end{aligned}\n$$\n由此,在两者期望相同的情况下,论证方差是否相同\n$$\n\\begin{aligned}\n两者期望:\\quad&\\because E_{X \\sim p}[f(x)]=E_{x \\sim q}[f(x)\\frac{p(x)}{q(x)}]\\\\\n方差公式:\\quad&\\because VAR[X]=E[X^2]-(E[X])^2\\\\\nx \\sim p 方差:\\quad&\\therefore Var_{x \\sim p}[f(x)]=\\color{red}{E_{x\\sim p}[f(x)^2]}-(E_{x\\sim p}[f(x)])^2\\\\\nx \\sim q 方差:\\quad&\\therefore Var_{x \\sim q}[f(x)\\frac{p(x)}{q(x)}]=E_{x \\sim q}\\big[\\big([f(x)\\frac{p(x)}{q(x)}\\big)^2\\big]-\\big(E_{x\\sim q}\\big[f(x)\\frac{p(x)}{q(x)}\\big]\\big)^2\\\\\n&=\\color{red}{E_{x \\sim q}\\big[f(x)^2\\frac{p(x)}{q(x)}\\big]}-(E_{x \\sim p}[f(x)])^2\n\\end{aligned}\n$$", "_____no_output_____" ], [ "两者方差公式的差别在标红的位置,也就是说我们如果使两者$E_{x\\sim p}[f(x)^2]$和$E_{x \\sim q}\\big[f(x)^2\\frac{p(x)}{q(x)}\\big]$的差值较小,那么我们所做的off-policy就是可行的<br>\n由此,可直观的看出,我们要使p(x)和q(x)的相差较小。因此就有了PPO1中的所使用的$\\beta KL(\\theta,\\theta')$和PPO2中的clip这些都是为了限制两者的范围在一个可接受的合适空间", "_____no_output_____" ] ], [ [ "import gym\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nimport torch.nn.functional as F\nfrom torch.distributions import Normal\nfrom torch.distributions import Categorical\nimport torch.multiprocessing as mp\n# from torch.utils.tensorboard import SummaryWriter\nimport numpy as np\nfrom IPython.display import clear_output\nimport matplotlib.pyplot as plt\n%matplotlib inline\nimport math\nimport random\nfrom statistics import mean\nimport pdb\ndevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')", "_____no_output_____" ], [ "def plot_function():\n x = np.arange(-10,10,0.05)\n plt.figure(figsize=(9,3.6))\n\n plt.subplot(121)\n plt.title(\"Gaussian distribution\")\n mu, sigma = 0, 10\n y = lambda x : np.exp(-((x-mu)**2)/(2*sigma**2))/(sigma*np.sqrt(2*np.pi))\n plt.plot(x, y(x))\n\n plt.subplot(122)\n plt.title(\"Softplus\")\n y = np.log(1+np.exp(x))\n plt.plot(x, y)\n plt.show()\n\nplot_function()", "_____no_output_____" ] ], [ [ "多线程又双叒叕来了", "_____no_output_____" ] ], [ [ "def worker(worker_id, master_end, worker_end, env_name):\n master_end.close()\n env = gym.make(env_name)\n env.seed(worker_id)\n \n while True:\n cmd, data = worker_end.recv()\n if cmd == 'step':\n state, reward, done, info = env.step(data)\n if done:\n state = env.reset()\n worker_end.send((state, reward, done, info))\n elif cmd == 'reset':\n state = env.reset()\n worker_end.send(state)\n elif cmd == 'reset_task':\n state = env.reset_task()\n worker_end.send(state)\n elif cmd == 'close':\n worker_end.close()\n break\n elif cmd == 'get_spaces':\n worker_end.send((env.observation_space.shape[0], env.action_space.shape[0]))\n else:\n raise NotImplementedError", "_____no_output_____" ], [ "class ParallelEnv:\n def __init__(self, n_train_processes, env_name):\n self.nenvs = n_train_processes\n self.waiting = False\n self.closed = False\n self.workers = []\n self.env_name = env_name\n \n self.master_ends, self.worker_ends = zip(*[mp.Pipe() for _ in range(self.nenvs)])\n \n for worker_id, (master_end, worker_end) in enumerate(zip(self.master_ends, self.worker_ends)):\n p = mp.Process(target=worker, args=(worker_id, master_end, worker_end, self.env_name))\n p.daemon = False\n p.start()\n self.workers.append(p)\n for worker_end in self.worker_ends:\n worker_end.close()\n \n self.master_ends[0].send(('get_spaces', None))\n self.observation_space, self.action_space = self.master_ends[0].recv()\n \n def step_async(self, actions):\n for master_end, action in zip(self.master_ends, actions):\n master_end.send(('step', action))\n self.waiting = True\n \n def step_wait(self):\n results = [master_end.recv() for master_end in self.master_ends]\n self.waiting = False\n states, rewards, dones, infos = zip(*results)\n return np.stack(states), np.stack(rewards), np.stack(dones), infos\n \n def reset(self):\n for master_end in self.master_ends:\n master_end.send(('reset', None))\n return np.stack([master_end.recv() for master_end in self.master_ends])\n \n def step(self, actions):\n self.step_async(actions)\n return self.step_wait()\n \n def close(self):\n if self.closed:\n return\n if self.waiting:\n [master_end.recv() for master_end in self.master_ends]\n for master_end in self.master_ends:\n master_end.send(('close', None))\n del self.workers[:]\n self.closed = True", "_____no_output_____" ] ], [ [ "定义网络", "_____no_output_____" ] ], [ [ "class Actor_critic(nn.Module):\n def __init__(self, in_dim, out_dim):\n super(Actor_critic, self).__init__()\n self.actor_linear1 = nn.Linear(in_dim, 64)\n self.critic_linear1 = nn.Linear(in_dim, 64)\n self.linear2 = nn.Linear(64, 32)\n self.actor_linear3 = nn.Linear(32, out_dim)\n self.critic_linear3 = nn.Linear(32, 1)\n self.sigma_linear = nn.Linear(32, out_dim)\n \n def forward(self, x):\n value_hidden = F.relu(self.linear2(F.relu(self.critic_linear1(x))))\n value = self.critic_linear3(value_hidden)\n actor_hidden = F.relu(self.linear2(F.relu(self.actor_linear1(x))))\n mu = torch.tanh(self.actor_linear3(actor_hidden)) * 2\n sigma = F.softplus(self.sigma_linear(actor_hidden))\n dist = Normal(mu, sigma)\n return dist, value", "_____no_output_____" ] ], [ [ "画图", "_____no_output_____" ] ], [ [ "def smooth_plot(factor, item, plot_decay):\n item_x = np.arange(len(item))\n item_smooth = [np.mean(item[i:i+factor]) if i > factor else np.mean(item[0:i+1])\n for i in range(len(item))]\n for i in range(len(item)// plot_decay):\n item_x = item_x[::2]\n item_smooth = item_smooth[::2]\n return item_x, item_smooth\n \ndef plot(episode, rewards, losses):\n clear_output(True)\n rewards_x, rewards_smooth = smooth_plot(10, rewards, 500)\n losses_x, losses_smooth = smooth_plot(10, losses, 100000)\n \n plt.figure(figsize=(18, 10))\n plt.subplot(211)\n plt.title('episode %s. reward: %s'%(episode, rewards_smooth[-1]))\n plt.plot(rewards, label=\"Rewards\", color='lightsteelblue', linewidth='1')\n plt.plot(rewards_x, rewards_smooth, label='Smothed_Rewards', color='darkorange', linewidth='3')\n plt.legend(loc='best')\n \n plt.subplot(212)\n plt.title('Losses')\n plt.plot(losses,label=\"Losses\",color='lightsteelblue',linewidth='1')\n plt.plot(losses_x, losses_smooth, \n label=\"Smoothed_Losses\",color='darkorange',linewidth='3')\n plt.legend(loc='best')\n \n plt.show()\n \ndef test_env():\n state = env.reset()\n done = False\n total_reward = 0\n while not done:\n state = torch.FloatTensor(state).reshape(-1, 3).to(device)\n log_prob, _ = model(state)\n next_state, reward, done, _ = env.step(log_prob.sample().cpu().numpy())\n state = next_state\n total_reward += reward\n return total_reward", "_____no_output_____" ], [ "def gae_compute(next_value, rewards, masks, values, gamma=0.99, tau=0.95):\n td_target = next_value\n td_target_list = []\n advantage = 0\n advantage_list = []\n for idx in reversed(range(len(values))):\n td_target = td_target * gamma * masks[idx] + rewards[idx]\n td_target_list.insert(0, td_target)\n advantage = td_target - values[idx] + advantage * gamma * tau\n advantage_list.insert(0, advantage)\n return advantage_list, td_target_list", "_____no_output_____" ] ], [ [ "PPO训练更新", "_____no_output_____" ] ], [ [ "import pdb\ndef ppo_iter(states, actions, log_probs, advantages, td_target_list):\n batch_size = actions.size(0)\n for _ in range(batch_size // mini_batch_size):\n ids = np.random.choice(batch_size, mini_batch_size, replace=False)\n yield states[ids, :], actions[ids, :], log_probs[ids, :], advantages[ids, :], td_target_list[ids, :]\n\ndef ppo_train(states, actions, log_probs, advantages, td_target_list, clip_param=0.2):\n losses = []\n for _ in range(ppo_epochs):\n for state, action, old_log_probs, advantage, td_target in ppo_iter(states, actions, log_probs,\n advantages, td_target_list):\n dist, value = model(state)\n entropy = dist.entropy().mean()\n new_log_probs = dist.log_prob(action)\n \n ratio = (new_log_probs - old_log_probs).exp() \n sub1 = ratio * advantage\n \n sub2 = torch.clamp(ratio, 1.0-clip_param, 1.0+clip_param) * advantage\n actor_loss = - torch.min(sub1, sub2).mean()\n critic_loss = (td_target - value).pow(2).mean()\n \n loss = 0.5 * critic_loss + actor_loss - 0.001 * entropy\n losses.append(loss.item())\n \n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n \n old_model.load_state_dict(model.state_dict()) \n return round(mean(losses),2)", "_____no_output_____" ], [ "## hyperparameters ##\n\nnum_envs = 16\nenv_name = \"Pendulum-v0\"\nppo_epochs = 30\nmini_batch_size = 256\nmax_epoch = 10000\nnum_timesteps = 128\n\n## hyperparameters ##\n\nenvs = ParallelEnv(num_envs, env_name)\nstate_space = envs.observation_space\naction_space = envs.action_space\n\nenv = gym.make(env_name)\n\nmodel = Actor_critic(state_space, action_space).to(device)\noptimizer = optim.Adam(model.parameters(), lr=1e-3)\nold_model = Actor_critic(state_space, action_space).to(device)", "_____no_output_____" ], [ "test_rewards = []\nloss_list = []\nstate = envs.reset()\n\nfor epoch in range(max_epoch):\n states, actions, rewards, masks, log_probs, values = [], [], [], [], [], []\n \n for _ in range(num_timesteps):\n dist, value = old_model(torch.FloatTensor(state).to(device))\n action = dist.sample()\n \n next_state, reward, done, _ = envs.step(action.cpu().numpy())\n \n states.append(torch.FloatTensor(state).to(device))\n actions.append(action)\n rewards.append(torch.FloatTensor(reward).unsqueeze(1).to(device))\n masks.append(torch.FloatTensor(1 - done).unsqueeze(1).to(device))\n log_probs.append(dist.log_prob(action))\n values.append(value)\n \n state = next_state\n \n _, next_value = model(torch.FloatTensor(next_state).to(device))\n advantages, td_target_list = gae_compute(next_value, rewards, masks, values)\n loss = ppo_train(torch.cat(states),torch.cat(actions), torch.cat(log_probs).detach(), \n torch.cat(advantages).detach(), torch.cat(td_target_list).detach())\n loss_list.append(loss)\n \n if epoch % 1 == 0:\n test_reward = np.mean([test_env() for _ in range(10)])\n test_rewards.append(test_reward)\n plot(epoch + 1, test_rewards, loss_list)\n# soft = lambda loss : np.mean(loss[-100:]) if len(loss)>=100 else np.mean(loss)\n# writer.add_scalar(\"Test_Rewards\", np.array(soft(test_rewards)), epoch)\n# writer.add_scalar(\"Value_Losses\", np.array(soft(loss_list)), epoch)", "_____no_output_____" ], [ "from IPython import display\n\nenv = gym.make(env_name)\nstate_1 = env.reset()\nimg = plt.imshow(env.render(mode='rgb_array')) # only call this once\nfor _ in range(1000):\n img.set_data(env.render(mode='rgb_array')) # just update the data\n display.display(plt.gcf())\n display.clear_output(wait=True)\n \n prob, value = old_model(torch.FloatTensor(state_1).reshape(1,-1).to(device))\n action = prob.sample().cpu().numpy()\n next_state, _, done, _ = env.step(action)\n if done: \n state_1 = env.reset()\n state_1 = next_state", "_____no_output_____" ] ], [ [ "## PPO Baselines:\n<img src=\"../assets/PPO_baseline.png\"></img>\n### Test_Rewards:\n<img src=\"../assets/PPO_Test_Rewards.png\" width=100%></img>\n### Value_Losses:\n<img src=\"../assets/PPO_Value_Losses.png\"></img>", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ] ]
d0a2d5327df1c552c1b5984a9d2edd1d4b8ee7bb
1,028
ipynb
Jupyter Notebook
scripts/diff-example.ipynb
LinaNouh/nbdiff
d40ee81c74bfcbab34261fb385d6277caf9969c1
[ "MIT" ]
1
2017-09-22T13:01:27.000Z
2017-09-22T13:01:27.000Z
scripts/diff-example.ipynb
LinaNouh/nbdiff
d40ee81c74bfcbab34261fb385d6277caf9969c1
[ "MIT" ]
null
null
null
scripts/diff-example.ipynb
LinaNouh/nbdiff
d40ee81c74bfcbab34261fb385d6277caf9969c1
[ "MIT" ]
null
null
null
16.31746
52
0.353113
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a2e8726d5bfaab68ba324e1714a5d8ed06521a
17,263
ipynb
Jupyter Notebook
week2/3_tensorflow_dnn.ipynb
isacmoura/machine-learning-tf-google-cloud
eba64d7962415e76ebd63d6b51ae07604c4c0529
[ "MIT" ]
3
2020-05-06T05:18:29.000Z
2020-05-18T19:52:45.000Z
week2/3_tensorflow_dnn.ipynb
isacmoura/machine-learning-tf-google-cloud
eba64d7962415e76ebd63d6b51ae07604c4c0529
[ "MIT" ]
null
null
null
week2/3_tensorflow_dnn.ipynb
isacmoura/machine-learning-tf-google-cloud
eba64d7962415e76ebd63d6b51ae07604c4c0529
[ "MIT" ]
3
2020-07-23T06:44:33.000Z
2020-12-17T01:03:48.000Z
39.145125
634
0.602966
[ [ [ "<h1> Create TensorFlow model </h1>\n\nThis notebook illustrates:\n<ol>\n<li> Creating a model using the high-level Estimator API \n</ol>", "_____no_output_____" ] ], [ [ "# change these to try this notebook out\nBUCKET = 'qwiklabs-gcp-37b9fafbd24bf385'\nPROJECT = 'qwiklabs-gcp-37b9fafbd24bf385'\nREGION = 'us-central1'", "_____no_output_____" ], [ "import os\nos.environ['BUCKET'] = BUCKET\nos.environ['PROJECT'] = PROJECT\nos.environ['REGION'] = REGION", "_____no_output_____" ], [ "%%bash\nif ! gsutil ls | grep -q gs://${BUCKET}/; then\n gsutil mb -l ${REGION} gs://${BUCKET}\nfi", "_____no_output_____" ] ], [ [ "<h2> Create TensorFlow model using TensorFlow's Estimator API </h2>\n<p>\nFirst, write an input_fn to read the data.\n<p>\n\n## Lab Task 1\nVerify that the headers match your CSV output", "_____no_output_____" ] ], [ [ "import shutil\nimport numpy as np\nimport tensorflow as tf", "/usr/local/envs/py3env/lib/python3.5/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n from ._conv import register_converters as _register_converters\n" ], [ "# Determine CSV, label, and key columns\nCSV_COLUMNS = 'weight_pounds,is_male,mother_age,plurality,gestation_weeks,key'.split(',')\nLABEL_COLUMN = 'weight_pounds'\nKEY_COLUMN = 'key'\n\n# Set default values for each CSV column\nDEFAULTS = [[0.0], ['null'], [0.0], ['null'], [0.0], ['nokey']]\nTRAIN_STEPS = 1000", "_____no_output_____" ] ], [ [ "## Lab Task 2\n\nFill out the details of the input function below", "_____no_output_____" ] ], [ [ "# Create an input function reading a file using the Dataset API\n# Then provide the results to the Estimator API\ndef read_dataset(filename_pattern, mode, batch_size = 512):\n def _input_fn():\n def decode_csv(line_of_text):\n # TODO #1: Use tf.decode_csv to parse the provided line\n columns = tf.decode_csv(line_of_text, record_defaults=DEFAULTS)\n \n # TODO #2: Make a Python dict. The keys are the column names, the values are from the parsed data\n features = dict(zip(CSV_COLUMNS, columns))\n \n # TODO #3: Return a tuple of features, label where features is a Python dict and label a float\n label = features.pop(LABEL_COLUMN)\n return features, label\n \n # TODO #4: Use tf.gfile.Glob to create list of files that match pattern\n file_list = tf.gfile.Glob(filename_pattern)\n\n # Create dataset from file list\n dataset = (tf.data.TextLineDataset(file_list) # Read text file\n .map(decode_csv)) # Transform each elem by applying decode_csv fn\n \n # TODO #5: In training mode, shuffle the dataset and repeat indefinitely\n # (Look at the API for tf.data.dataset shuffle)\n # The mode input variable will be tf.estimator.ModeKeys.TRAIN if in training mode\n # Tell the dataset to provide data in batches of batch_size \n if mode == tf.estimator.ModeKeys.TRAIN:\n epochs = None # Repeat indefinitely\n dataset = dataset.shuffle(buffer_size = 10 * batch_size)\n else:\n epochs = 1\n \n dataset = dataset.repeat(epochs).batch(batch_size)\n \n # This will now return batches of features, label\n return dataset\n return _input_fn", "_____no_output_____" ] ], [ [ "## Lab Task 3\n\nUse the TensorFlow feature column API to define appropriate feature columns for your raw features that come from the CSV.\n\n<b> Bonus: </b> Separate your columns into wide columns (categorical, discrete, etc.) and deep columns (numeric, embedding, etc.)", "_____no_output_____" ] ], [ [ "# Define feature columns\n# Define feature columns\ndef get_categorical(name, values):\n return tf.feature_column.indicator_column(\n tf.feature_column.categorical_column_with_vocabulary_list(name, values))\n\ndef get_cols():\n # Define column types\n return [\\\n get_categorical('is_male', ['True', 'False', 'Unknown']),\n tf.feature_column.numeric_column('mother_age'),\n get_categorical('plurality',\n ['Single(1)', 'Twins(2)', 'Triplets(3)',\n 'Quadruplets(4)', 'Quintuplets(5)','Multiple(2+)']),\n tf.feature_column.numeric_column('gestation_weeks')\n ]", "_____no_output_____" ] ], [ [ "## Lab Task 4\n\nTo predict with the TensorFlow model, we also need a serving input function (we'll use this in a later lab). We will want all the inputs from our user.\n\nVerify and change the column names and types here as appropriate. These should match your CSV_COLUMNS", "_____no_output_____" ] ], [ [ "# Create serving input function to be able to serve predictions later using provided inputs\ndef serving_input_fn():\n feature_placeholders = {\n 'is_male': tf.placeholder(tf.string, [None]),\n 'mother_age': tf.placeholder(tf.float32, [None]),\n 'plurality': tf.placeholder(tf.string, [None]),\n 'gestation_weeks': tf.placeholder(tf.float32, [None])\n }\n features = {\n key: tf.expand_dims(tensor, -1)\n for key, tensor in feature_placeholders.items()\n }\n return tf.estimator.export.ServingInputReceiver(features, feature_placeholders)", "_____no_output_____" ] ], [ [ "## Lab Task 5\n\nComplete the TODOs in this code:", "_____no_output_____" ] ], [ [ "# Create estimator to train and evaluate\ndef train_and_evaluate(output_dir):\n EVAL_INTERVAL = 300\n run_config = tf.estimator.RunConfig(save_checkpoints_secs = EVAL_INTERVAL,\n keep_checkpoint_max = 3)\n # TODO #1: Create your estimator\n estimator = tf.estimator.DNNRegressor(\n model_dir = output_dir,\n feature_columns = get_cols(),\n hidden_units = [64, 32],\n config = run_config)\n train_spec = tf.estimator.TrainSpec(\n # TODO #2: Call read_dataset passing in the training CSV file and the appropriate mode\n input_fn = read_dataset('train.csv', mode = tf.estimator.ModeKeys.TRAIN),\n max_steps = TRAIN_STEPS)\n exporter = tf.estimator.LatestExporter('exporter', serving_input_fn)\n eval_spec = tf.estimator.EvalSpec(\n # TODO #3: Call read_dataset passing in the evaluation CSV file and the appropriate mode\n input_fn = read_dataset('eval.csv', mode = tf.estimator.ModeKeys.EVAL),\n steps = None,\n start_delay_secs = 60, # start evaluating after N seconds\n throttle_secs = EVAL_INTERVAL, # evaluate every N seconds\n exporters = exporter)\n tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)", "_____no_output_____" ] ], [ [ "Finally, train!", "_____no_output_____" ] ], [ [ "# Run the model\nshutil.rmtree('babyweight_trained', ignore_errors = True) # start fresh each time\ntrain_and_evaluate('babyweight_trained')", "INFO:tensorflow:Using config: {'_num_worker_replicas': 1, '_save_summary_steps': 100, '_model_dir': 'babyweight_trained', '_keep_checkpoint_every_n_hours': 10000, '_is_chief': True, '_evaluation_master': '', '_keep_checkpoint_max': 3, '_task_id': 0, '_global_id_in_cluster': 0, '_train_distribute': None, '_task_type': 'worker', '_session_config': None, '_tf_random_seed': None, '_log_step_count_steps': 100, '_service': None, '_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x7f2680a1c710>, '_save_checkpoints_steps': None, '_save_checkpoints_secs': 300, '_master': '', '_num_ps_replicas': 0}\nINFO:tensorflow:Running training and evaluation locally (non-distributed).\nINFO:tensorflow:Start train and evaluate loop. The evaluate will happen after 300 secs (eval_spec.throttle_secs) or training is finished.\nINFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Create CheckpointSaverHook.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Saving checkpoints for 1 into babyweight_trained/model.ckpt.\nINFO:tensorflow:loss = 32576.717, step = 1\nINFO:tensorflow:global_step/sec: 50.5871\nINFO:tensorflow:loss = 638.1501, step = 101 (1.983 sec)\nINFO:tensorflow:global_step/sec: 48.5499\nINFO:tensorflow:loss = 654.87915, step = 201 (2.060 sec)\nINFO:tensorflow:global_step/sec: 49.545\nINFO:tensorflow:loss = 610.86847, step = 301 (2.019 sec)\nINFO:tensorflow:global_step/sec: 51.2294\nINFO:tensorflow:loss = 571.76263, step = 401 (1.951 sec)\nINFO:tensorflow:global_step/sec: 51.0356\nINFO:tensorflow:loss = 565.4655, step = 501 (1.962 sec)\nINFO:tensorflow:global_step/sec: 50.5303\nINFO:tensorflow:loss = 575.91785, step = 601 (1.976 sec)\nINFO:tensorflow:global_step/sec: 51.2073\nINFO:tensorflow:loss = 586.4931, step = 701 (1.954 sec)\nINFO:tensorflow:global_step/sec: 50.9067\nINFO:tensorflow:loss = 511.8872, step = 801 (1.964 sec)\nINFO:tensorflow:global_step/sec: 50.5373\nINFO:tensorflow:loss = 582.94336, step = 901 (1.979 sec)\nINFO:tensorflow:Saving checkpoints for 1000 into babyweight_trained/model.ckpt.\nINFO:tensorflow:Loss for final step: 558.4249.\nINFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2019-02-05-23:19:59\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from babyweight_trained/model.ckpt-1000\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Finished evaluation at 2019-02-05-23:19:59\nINFO:tensorflow:Saving dict for global step 1000: average_loss = 1.1584599, global_step = 1000, loss = 579.9428\nINFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Signatures INCLUDED in export for Predict: ['predict']\nINFO:tensorflow:Signatures INCLUDED in export for Regress: None\nINFO:tensorflow:Signatures INCLUDED in export for Classify: None\nINFO:tensorflow:Signatures EXCLUDED from export because they cannot be be served via TensorFlow Serving APIs:\nINFO:tensorflow:'regression' : Regression input must be a single string Tensor; got {'gestation_weeks': <tf.Tensor 'Placeholder_3:0' shape=(?,) dtype=float32>, 'mother_age': <tf.Tensor 'Placeholder_1:0' shape=(?,) dtype=float32>, 'is_male': <tf.Tensor 'Placeholder:0' shape=(?,) dtype=string>, 'plurality': <tf.Tensor 'Placeholder_2:0' shape=(?,) dtype=string>}\nINFO:tensorflow:'serving_default' : Regression input must be a single string Tensor; got {'gestation_weeks': <tf.Tensor 'Placeholder_3:0' shape=(?,) dtype=float32>, 'mother_age': <tf.Tensor 'Placeholder_1:0' shape=(?,) dtype=float32>, 'is_male': <tf.Tensor 'Placeholder:0' shape=(?,) dtype=string>, 'plurality': <tf.Tensor 'Placeholder_2:0' shape=(?,) dtype=string>}\nWARNING:tensorflow:Export includes no default signature!\nINFO:tensorflow:Restoring parameters from babyweight_trained/model.ckpt-1000\nINFO:tensorflow:Assets added to graph.\nINFO:tensorflow:No assets to write.\nINFO:tensorflow:SavedModel written to: b\"babyweight_trained/export/exporter/temp-b'1549408800'/saved_model.pb\"\n" ] ], [ [ "When I ran it, the final lines of the output (above) were:\n<pre>\nINFO:tensorflow:Saving dict for global step 1000: average_loss = 1.2693067, global_step = 1000, loss = 635.9226\nINFO:tensorflow:Restoring parameters from babyweight_trained/model.ckpt-1000\nINFO:tensorflow:Assets added to graph.\nINFO:tensorflow:No assets to write.\nINFO:tensorflow:SavedModel written to: babyweight_trained/export/exporter/temp-1517899936/saved_model.pb\n</pre>\nThe exporter directory contains the final model and the final RMSE (the average_loss) is 1.2693067", "_____no_output_____" ], [ "<h2> Monitor and experiment with training </h2>", "_____no_output_____" ] ], [ [ "from google.datalab.ml import TensorBoard\nTensorBoard().start('./babyweight_trained')", "_____no_output_____" ], [ "for pid in TensorBoard.list()['pid']:\n TensorBoard().stop(pid)\n print('Stopped TensorBoard with pid {}'.format(pid))", "Stopped TensorBoard with pid 3769\n" ] ], [ [ "Copyright 2017-2018 Google Inc. Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ] ]
d0a2f0ec6c1cc47b93826de5fd82898a19f0c991
70,937
ipynb
Jupyter Notebook
bin/biolog_processing.ipynb
gregmedlock/ssl_ensembles
f6fd7ca334483d8b1edce6b3d685448b03c0749d
[ "MIT" ]
3
2020-01-08T19:12:01.000Z
2021-04-18T10:31:02.000Z
bin/biolog_processing.ipynb
gregmedlock/ssl_ensembles
f6fd7ca334483d8b1edce6b3d685448b03c0749d
[ "MIT" ]
2
2019-04-09T19:35:28.000Z
2019-11-18T14:06:20.000Z
bin/biolog_processing.ipynb
gregmedlock/ssl_ensembles
f6fd7ca334483d8b1edce6b3d685448b03c0749d
[ "MIT" ]
3
2018-11-26T13:18:22.000Z
2020-11-03T01:47:30.000Z
47.736878
182
0.342825
[ [ [ "import pandas as pd\nimport json\nimport cobra\nimport numpy as np", "/home/greg/Envs/ssl_ensembles/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/home/greg/Envs/ssl_ensembles/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/home/greg/Envs/ssl_ensembles/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/home/greg/Envs/ssl_ensembles/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/home/greg/Envs/ssl_ensembles/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n" ], [ "# read biolog base composition file, original plata table, and mapping file for Plata names -> seed IDs\nbiolog_names_to_seed = pd.read_csv('../data/biolog_names_to_seed.csv',sep='\\t')\nplata_biolog_data = pd.read_csv('../data/plata_biolog_raw.csv',sep=',',index_col=0)", "_____no_output_____" ], [ "# replace plata metabolite names with seed IDs\nplata_biolog_data = plata_biolog_data.rename(dict(zip(biolog_names_to_seed['plata_name'],biolog_names_to_seed['seed_id'])),axis='columns')\n# remove the NA columns (did not map due to criteria listed in manuscript)\nplata_biolog_data = plata_biolog_data.loc[:,~plata_biolog_data.columns.isnull()]\nplata_biolog_data.columns = plata_biolog_data.columns + '_e'", "_____no_output_____" ], [ "# filter organisms by having growth in at least 20 conditions total\ngrowth_threshold = 10 # as in Plata paper. These are relative colorimetric units. See paper for details.\nmin_growth_conditions = 10\nplata_biolog_data = plata_biolog_data.loc[(plata_biolog_data > growth_threshold).sum(axis=1) > min_growth_conditions,]", "_____no_output_____" ], [ "plata_biolog_data = plata_biolog_data > 10\nplata_biolog_data", "_____no_output_____" ], [ "plata_biolog_data.to_csv('../data/plata_thresholded.csv',sep='\\t')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
d0a2f39aecc683cab82efcc5cefdb4f381825c7d
861,116
ipynb
Jupyter Notebook
TEMA-3/Clase22_ValuacionOpciones.ipynb
AndresHdzJmz/SPF-2021-I
2e2b25b0bfb9e3716ceea4253741a6c364f2a579
[ "MIT" ]
null
null
null
TEMA-3/Clase22_ValuacionOpciones.ipynb
AndresHdzJmz/SPF-2021-I
2e2b25b0bfb9e3716ceea4253741a6c364f2a579
[ "MIT" ]
null
null
null
TEMA-3/Clase22_ValuacionOpciones.ipynb
AndresHdzJmz/SPF-2021-I
2e2b25b0bfb9e3716ceea4253741a6c364f2a579
[ "MIT" ]
null
null
null
218.501903
160,220
0.890757
[ [ [ "<img style=\"float: right; margin: 0px 0px 15px 15px;\" src=\"https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSQt6eQo8JPYzYO4p6WmxLtccdtJ4X8WR6GzVVKbsMjyGvUDEn1mg\" width=\"300px\" height=\"100px\" />\n\n# Trabajando con opciones", "_____no_output_____" ], [ "Una opción puede negociarse en el mercado secundario por lo que es importante determinar su valor $V_t$ para cada tiempo $t\\in [0, T]$. La ganancia que obtiene quién adquiere la opción se llama función de pago o \"payoff\" y claramente depende del valor del subyacente. \n\nHay una gran variedad de opciones en el mercado y éstas se clasiflcan según su función de pago y la forma en que pueden ejercerse. Las opciones que tienen como función de pago a\n$$ P(S(t),t)=max\\{S(T)-K,0\\} \\rightarrow \\text{En el caso de Call}$$ \n$$ P(S(t),t)=max\\{K-S(T),0\\} \\rightarrow \\text{En el caso de Put}$$ \nse llaman opciones **Vainilla**, con $h:[0,\\infty) \\to [0,\\infty)$.\n\nLa opción se llama **europea** si puede ejercerse sólo en la fecha de vencimiento.\n\nSe dice que una opción es **americana** si puede ejercerse en cualquier momento antes o en la fecha de vencimiento.\n\nUna opción compleja popular son las llamadas **opciones asiáticas** cuyos pagos dependen de todas las trayectorias del precio de los activos subyacentes. Las opciones cuyos pagos dependen de las trayectorias de los precios de los activos subyacentes se denominan opciones dependientes de la ruta.\n\nPrincipalmente, se puede resumir que las dos razones con más peso de importancia para utilizar opciones son el **aseguramiento** y la **especulación**.\n\n## Opciones Plan Vainilla: opción de compra y opción de venta europea\n\nUna opción vainilla o estándar es una opción normal de compra o venta que no tiene características especiales o inusuales. Puede ser para tamaños y vencimientos estandarizados, y negociarse en un intercambio.\nEn comparación con otras estructuras de opciones, las opciones de vanilla no son sofisticadas o complicadas.\n", "_____no_output_____" ], [ "## 1. ¿Cómo descargar datos de opciones?", "_____no_output_____" ] ], [ [ "#importar los paquetes que se van a usar\nimport pandas as pd\nimport pandas_datareader.data as web\nimport numpy as np\nimport datetime\nimport matplotlib.pyplot as plt\nimport scipy.stats as st\nimport seaborn as sns\n%matplotlib inline\n#algunas opciones para Pandas\npd.set_option('display.notebook_repr_html', True)\npd.set_option('display.max_columns', 6)\npd.set_option('display.max_rows', 10)\npd.set_option('display.width', 78)\npd.set_option('precision', 3)", "_____no_output_____" ] ], [ [ "Usando el paquete `pandas_datareader` también podemos descargar datos de opciones. Por ejemplo, descarguemos los datos de las opciones cuyo activo subyacente son las acciones de Apple", "_____no_output_____" ] ], [ [ "aapl = web.YahooOptions('AAPL')\naapl_opt = aapl.get_all_data().reset_index()\naapl_opt.set_index('Expiry')\n# aapl", "_____no_output_____" ] ], [ [ "Precio del activo subyacente", "_____no_output_____" ] ], [ [ "aapl_opt.Underlying_Price[0]", "_____no_output_____" ] ], [ [ "Datos de la opción ", "_____no_output_____" ] ], [ [ "aapl_opt.loc[0, 'JSON']", "_____no_output_____" ] ], [ [ "### Conceptos claves\n- El precio de la oferta ('bid') se refiere al precio más alto que un comprador pagará por un activo.\n- El precio de venta ('ask') se refiere al precio más bajo que un vendedor aceptará por un activo.\n- La diferencia entre estos dos precios se conoce como 'spread'; cuanto menor es el spread, mayor es la liquidez de la garantía dada.\n- Liquidez: facilidad de convertir cierta opción en efectivo.\n- La volatilidad implícita es el pronóstico del mercado de un probable movimiento en el precio de un valor.\n- La volatilidad implícita aumenta en los mercados bajistas y disminuye cuando el mercado es alcista.\n- El último precio ('lastprice') representa el precio al que ocurrió la última operación, de una opción dada.", "_____no_output_____" ], [ "Una vez tenemos la información, podemos consultar de qué tipo son las opciones", "_____no_output_____" ] ], [ [ "aapl_opt.loc[:, 'Type']", "_____no_output_____" ] ], [ [ "o en que fecha expiran", "_____no_output_____" ] ], [ [ "pd.set_option('display.max_rows', 10)\naapl_opt.loc[:, 'Expiry']", "_____no_output_____" ] ], [ [ "Por otra parte, podríamos querer consultar todas las opciones de compra (call) que expiran en cierta fecha (2020-06-19)", "_____no_output_____" ] ], [ [ "fecha1 = '2021-06-18'\nfecha2 = '2022-09-16'\ncall06_f1 = aapl_opt.loc[(aapl_opt.Expiry== fecha1) & (aapl_opt.Type=='call')]\ncall06_f2 = aapl_opt.loc[(aapl_opt.Expiry== fecha2) & (aapl_opt.Type=='call')]\ncall06_f1\n", "_____no_output_____" ] ], [ [ "## 2. ¿Qué es la volatilidad implícita?", "_____no_output_____" ], [ "**Volatilidad:** desviación estándar de los rendimientos.\n- ¿Cómo se calcula?\n- ¿Para qué calcular la volatilidad?", "_____no_output_____" ], [ "- **Para valuar derivados**, por ejemplo **opciones**.\n- Método de valuación de riesgo neutral (se supone que el precio del activo $S_t$ no se ve afectado por el riesgo de mercado).\n\nRecorderis de cuantitativas:\n1. Ecuación de Black-Scholes\n$$ dS(t) = \\mu S(t) + \\sigma S(t)dW_t$$\n2. Solución de la ecuación\n\nEl valor de una opción Europea de vainilla $V_t$ puede obtenerse por:\n$$V_t = F(t,S_t)$$ donde\n![imagen.png](attachment:imagen.png)\n3. Opción de compra europea, suponiendo que los precios del activo son lognormales\n4. Opción de venta europea, suponiendo que los precios del activo son lognormales", "_____no_output_____" ], [ "Entonces, ¿qué es la **volatilidad implícita**?\n\nLa volatilidad es una medida de la incertidumbre sobre el comportamiento futuro de un activo, que se mide habitualmente como la desviación típica de la rentabilidad de dicho activo. \n\nUna volatilidad implícita es aquella que cuando se sustituye en la ecuación de Black-Scholes o en sus ampliaciones,proporciona el precio de mercado de la opción.", "_____no_output_____" ], [ "## Volatility smile \n- Cuando las opciones con la misma fecha de vencimiento y el mismo activo subyacente, pero diferentes precios de ejercicio, se grafican por la volatilidad implícita, la tendencia es que ese gráfico muestre una sonrisa.\n- La sonrisa muestra que las opciones más alejadas 'in- or out-of-the-money' tienen la mayor volatilidad implícita.\n- No todas las opciones tendrán una sonrisa de volatilidad implícita. Las opciones de acciones a corto plazo y las opciones relacionadas con la moneda tienen más probabilidades de tener una sonrisa de volatilidad\n\n![imagen.png](attachment:imagen.png)\n\n> Fuente: https://www.investopedia.com/terms/v/volatilitysmile.asp", "_____no_output_____" ], [ "> ### Validar para la `fecha = 2020-06-19` y para la fecha `fecha = '2021-01-15'`", "_____no_output_____" ] ], [ [ "# para los call de la fecha 1\nax = call06_f1.set_index('Strike').loc[:, 'IV'].plot(figsize=(8,6))\nax.axvline(call06_f1.Underlying_Price.iloc[0], color='g');", "_____no_output_____" ], [ "# para los call de la fecha 2\nax = call06_f2.set_index('Strike').loc[:, 'IV'].plot(figsize=(8,6))\nax.axvline(call06_f2.Underlying_Price.iloc[0], color='g');", "_____no_output_____" ] ], [ [ "Analicemos ahora datos de los `put`", "_____no_output_____" ] ], [ [ "put06_f1 = aapl_opt.loc[(aapl_opt.Expiry==fecha1) & (aapl_opt.Type=='put')]\nput06_f1", "_____no_output_____" ] ], [ [ "Para los `put` de la `fecha 1`", "_____no_output_____" ] ], [ [ "ax = put06_f1.set_index('Strike').loc[:, 'IV'].plot(figsize=(8,6))\nax.axvline(put06_f1.Underlying_Price.iloc[0], color='g')", "_____no_output_____" ] ], [ [ "Con lo que hemos aprendido, deberíamos ser capaces de crear una función que nos devuelva un `DataFrame` de `pandas` con los precios de cierre ajustados de ciertas compañías en ciertas fechas:\n- Escribir la función a continuación", "_____no_output_____" ] ], [ [ "# Función para descargar precios de cierre ajustados:\ndef get_adj_closes(tickers, start_date=None, end_date=None):\n # Fecha inicio por defecto (start_date='2010-01-01') y fecha fin por defecto (end_date=today)\n # Descargamos DataFrame con todos los datos\n closes = web.DataReader(name=tickers, data_source='yahoo', start=start_date, end=end_date)\n # Solo necesitamos los precios ajustados en el cierre\n closes = closes['Adj Close']\n # Se ordenan los índices de manera ascendente\n closes.sort_index(inplace=True)\n return closes", "_____no_output_____" ] ], [ [ "- Obtener como ejemplo los precios de cierre de Apple del año pasado hasta la fecha. Graficar...", "_____no_output_____" ] ], [ [ "ticker = ['AAPL']\nstart_date = '2017-01-01'\n\ncloses_aapl = get_adj_closes(ticker, start_date)\ncloses_aapl.plot(figsize=(8,5));\nplt.legend(ticker);", "_____no_output_____" ] ], [ [ "- Escribir una función que pasándole el histórico de precios devuelva los rendimientos logarítmicos:", "_____no_output_____" ] ], [ [ "def calc_daily_ret(closes):\n return np.log(closes/closes.shift(1)).iloc[1:]", "_____no_output_____" ] ], [ [ "- Graficar...", "_____no_output_____" ] ], [ [ "ret_aapl = calc_daily_ret(closes_aapl)\nret_aapl.plot(figsize=(8,6));", "_____no_output_____" ] ], [ [ "También, descargar datos de opciones de Apple:", "_____no_output_____" ] ], [ [ "aapl = web.YahooOptions('AAPL')\naapl_opt = aapl.get_all_data().reset_index()\naapl_opt.set_index('Expiry').sort_index()", "_____no_output_____" ], [ "aapl_opt.Underlying_Price[0]", "_____no_output_____" ], [ "K = 135 # strike price\nindice_opt = aapl_opt.loc[(aapl_opt.Type=='call') & (aapl_opt.Strike==K) & (aapl_opt.Expiry=='2023-06-16')]\nindice_opt", "_____no_output_____" ], [ "i_opt= indice_opt.index\nopcion_valuar = aapl_opt.loc[i_opt[0]]\nopcion_valuar['JSON']", "_____no_output_____" ], [ "print('Precio del activo subyacente actual = ',opcion_valuar.Underlying_Price)", "Precio del activo subyacente actual = 133.5\n" ] ], [ [ "# Simulación de precios usando rendimiento simple y logarítmico ", "_____no_output_____" ], [ "* Comenzaremos por suponer que los rendimientos son un p.e. estacionario que distribuyen $\\mathcal{N}(\\mu,\\sigma)$.", "_____no_output_____" ], [ "## Rendimiento Simple", "_____no_output_____" ] ], [ [ "# Obtenemos el rendimiento simple\nRi = closes_aapl.pct_change(1).iloc[1:]\n# Obtenemos su media y desviación estándar de los rendimientos\nmu_R = Ri.mean()[0]\nsigma_R = Ri.std()[0]\nRi", "_____no_output_____" ], [ "today = pd.to_datetime(date.today())\n\n# Obtener fecha de cierre de la opción a valuar\nexpiry = opcion_valuar.Expiry\n\nlen(pd.date_range(today, expiry, freq='B'))", "_____no_output_____" ], [ "from datetime import date\n\n# Encontrar la fecha de hoy en fomato timestamp\ntoday = pd.to_datetime(date.today())\n\n# Obtener fecha de cierre de la opción a valuar\nexpiry = opcion_valuar.Expiry\n\nnscen = 10000\n\n# Generar rangos de fechas de días hábiles\ndates = pd.date_range(today, expiry, freq='B')\nndays = len(dates)", "_____no_output_____" ] ], [ [ "## Mostrar como simular precios usando los rendimientos\n\n### 1. Usando rendimiento simple", "_____no_output_____" ] ], [ [ "# Simular los rendimientos\n\n# Rendimiento diario \ndt = 1\n# Z ~ N(0,1) normal estándar (ndays, nscen)\nZ = np.random.randn(ndays, nscen)\n\n# Simulación normal de los rendimientos\nRi_dt = pd.DataFrame(Z * sigma_R * np.sqrt(dt) + mu_R * dt, index=dates)\nRi_dt", "_____no_output_____" ] ], [ [ "**Simulación de precios usando el rendimiento simple**: Como demostramos en clases pasadas la fórmula de simular precios usando el rendimiento simple es la siguiente:\n$$\nS_T = S_0 \\prod_{i=0}^{T-1} (R_i + 1)\n$$", "_____no_output_____" ] ], [ [ "S0 = opcion_valuar.Underlying_Price\nS_T = S0*(Ri_dt + 1).cumprod()", "_____no_output_____" ], [ "# Simulación del precio\nS_0 = opcion_valuar.Underlying_Price\nS_T = S_0*(1+Ri_dt).cumprod()\nS_T.iloc[0,:] = S_0\n\n# Se muestran los precios simulados con los precios descargados\npd.concat([closes_aapl, S_T.iloc[:, :10]]).plot(figsize=(8,6));\nplt.title('Simulación de precios usando rendimiento simple');", "_____no_output_____" ] ], [ [ "### 2. Rendimiento Logarítmico", "_____no_output_____" ], [ "**Simulación de precios usando el rendimiento logarítmico**: Como demostramos en clases pasadas la fórmula de simular precios usando el rendimiento simple es la siguiente:\n$$\nS_T = S_0 \\cdot e^{\\sum_{i=1}^{T} r_i }\n$$", "_____no_output_____" ] ], [ [ "Z.shape, len(dates)", "_____no_output_____" ], [ "# Calcular rendimiento logarítmico\nri = calc_daily_ret(closes_aapl)\n\n# Usando la media y desviación estándar de los rendimientos logarítmicos\nmu_r = ri.mean()[0]\nsigma_r = ri.std()[0]\n\n# Simulación del rendimiento\ndt = 1\nZ = np.random.randn(ndays, nscen)\nsim_ret_ri = pd.DataFrame(mu_r * dt + Z * sigma_r * np.sqrt(dt), index=dates )\n\n# Simulación del precio\nS_0 = closes_aapl.iloc[-1,0]\nS_T2 = S_0*np.exp(sim_ret_ri.cumsum())\n\n# Se muestran los precios simulados con los precios descargados\n# pd.concat([closes_aapl,S_T2]).plot(figsize=(8,6));\n# plt.title('Simulación de precios usando rendimiento logarítmico');\n\n# from sklearn.metrics import mean_absolute_error\ne1 = np.abs(S_T-S_T2).mean().mean()\ne1", "_____no_output_____" ], [ "print('Las std usando rendimientos logarítmicos y simples son similares')\nsigma_R,sigma_r", "Las std usando rendimientos logarítmicos y simples son similares\n" ] ], [ [ "Con los precios simulados debemos de encontrar el valor de la opción según la función de pago correspondiente. Para este caso es:\n$$\nmax(S_T - K,0)\n$$", "_____no_output_____" ] ], [ [ "opcion_valuar['JSON']", "_____no_output_____" ] ], [ [ "## Valuación usando el modelo de Black and Scholes\nLos supuestos que hicieron Black y Scholes cuando dedujeron su fórmula para la valoración de opciones fueron los siguientes:\n1. El comportamiento del precio de la acción corresponde al modelo logarítmico normal, con $\\mu$ y $\\sigma$\nconstantes.\n2. No hay costos de transición ni impuestos. Todos los títulos son perfectamente divisibles.\n3. No hay dividendos sobre la acción durante la vida de la opción.\n4. No hay oportunidades de arbitraje libres de riesgo.\n5. La negociación de valores es continua.\n6. Los inversionistas pueden adquirir u otorgar préstamos a la misma tasa de interés libre de riesgo.\n7. La tasa de interés libre de riesgo a corto plazo, r, es constante.\n\nBajo los supuestos anteriores podemos presentar las **fórmulas de Black-Scholes** para calcular los precios de compra y de venta europeas sobre acciones que no pagan dividendos:\n$$\n\\text{Valor actual de la opción} = V(S_0, T) = S_0 N(d_1) - K e^{-r*T} N(d_2)\n$$\n\ndonde:\n- $S_0$ = precio de la acción en el momento actual.\n- $K$ = precio \"de ejercicio\" de la opción.\n- $r$ = tasa de interés libre de riesgo.\n- $T$ = tiempo que le resta de vida a la opción.\n- $N(d)$ = función de distribución de la variable aleatoria normal con media nula y desviación típica unitaria\n(probabilidad de que dicha variable sea menor o igual que d). Función de distribución de probabilidad acumulada.\n- $\\sigma$ = varianza por período de la tasa o tipo de rendimiento de la opción.\n\n$$\nd_1 = \\frac{\\ln{\\frac{S_0}{K}} + (r + \\sigma^2 / 2) T}{\\sigma \\sqrt{T}}, \\quad d_2 = \\frac{\\ln{\\frac{S_0}{K}} + (r - \\sigma^2 / 2) T}{\\sigma \\sqrt{T}} \n$$\n\n**Nota**: observe que el __rendimiento esperado__ sobre la acción no se incluye en la ecuación de Black-Scholes. Hay un principio general conocido como valoración neutral al riesgo, el cual establece que cualquier título que depende de otros títulos negociados puede valorarse bajo el supuesto de que el mundo es neutral al riesgo. El resultado demuestra ser muy útil en la práctica. *En un mundo neutral al riesgo, el rendimiento esperado de todos los títulos es la tasa de interés libre de riesgo*, y la tasa de descuento correcta para los flujos de efectivo esperados también es la tasa de interés libre de riesgo.\n\nEl equivalente a la función de Black-Scholes (valuación de la opción) se puede demostrar que es:\n$$\n\\text{Valor actual de la opción} = V(S_0, T) = E^*(e^{-rT} f(S_T)) = e^{-rT} E^*(f(S_T))\n$$\n\ndonde \n$f(S_T)$ representa la función de pago de la opción, que para el caso de un call europeo sería $f(S_T) = \\max({S_T - K})$.\n\n> Referencia: http://diposit.ub.edu/dspace/bitstream/2445/32883/1/Benito_el_modelo_de_Black_Sholes.pdf (página 20)\n\n> Referencia 2: http://www.cmat.edu.uy/~mordecki/courses/upae/upae-curso.pdf (página 24)", "_____no_output_____" ], [ "- Hallar media y desviación estándar muestral de los rendimientos logarítmicos", "_____no_output_____" ] ], [ [ "mu = ret_aapl.mean()[0]\nsigma = ret_aapl.std()[0]\nmu, sigma", "_____no_output_____" ] ], [ [ "No se toma la media sino la tasa libre de riesgo\n> Referencia: https://www.treasury.gov/resource-center/data-chart-center/interest-rates/Pages/TextView.aspx?data=yield", "_____no_output_____" ] ], [ [ "# Tasa de bonos de 1 yr de fecha 21/04/2021 -> 7%\nr = 0.007/360 # Tasa diaria", "_____no_output_____" ] ], [ [ "- Simularemos el tiempo de contrato desde `HOY` hasta la fecha de `Expiry`, 10 escenarios:\n \n - Generar fechas", "_____no_output_____" ] ], [ [ "from datetime import date\n\ntoday = pd.Timestamp(date.today())\nexpiry = opcion_valuar.Expiry\n\ndates = pd.date_range(start=today, end=expiry, freq='B')\n\nndays = len(dates)\nnscen = 10\ndates", "_____no_output_____" ] ], [ [ "- Generamos 10 escenarios de rendimientos simulados y guardamos en un dataframe", "_____no_output_____" ] ], [ [ "sim_ret = pd.DataFrame(sigma*np.random.randn(ndays,nscen)+r, index=dates)\nsim_ret.cumsum()\n# Las columnas son los escenarios y las filas son las días de contrato", "_____no_output_____" ] ], [ [ "- Con los rendimientos simulados, calcular los escenarios de precios respectivos:", "_____no_output_____" ] ], [ [ "S0 = closes_aapl.iloc[-1,0] # Condición inicial del precio a simular\nsim_closes = S0*np.exp(sim_ret.cumsum())\nsim_closes.iloc[0, :] = S0\nsim_closes", "_____no_output_____" ] ], [ [ "- Graficar:", "_____no_output_____" ] ], [ [ "sim_closes.plot(figsize=(8,6));", "_____no_output_____" ], [ "# Se muestran los precios simulados con los precios descargados\npd.concat([closes_aapl,sim_closes]).plot(figsize=(8,6));", "_____no_output_____" ], [ "opcion_valuar['JSON']", "_____no_output_____" ], [ "opcion_valuar", "_____no_output_____" ], [ "from datetime import date\nHoy = date.today()\n\n# strike price de la opción\nK = opcion_valuar['JSON']['strike'] \n\n# Fechas a simular\ndates = pd.date_range(start= Hoy, periods = ndays, freq='B')\n\n# Escenarios y número de días\nndays = len(dates)\nnscen = 100000\n\n# Condición inicial del precio a simular\nS0 = closes_aapl.iloc[-1,0] \n\n# simular rendimientos\nsim_ret = pd.DataFrame(sigma*np.random.randn(ndays,nscen)+r,index=dates)\n\n# Simular precios\nsim_closes = S0*np.exp(sim_ret.cumsum())\n", "_____no_output_____" ], [ "# Valor del call europeo\ncall = pd.DataFrame({'Prima':np.exp(-r*ndays) \\\n *np.fmax(sim_closes-K, 0).mean(axis=1)}, index=dates)\ncall.plot();\n", "_____no_output_____" ] ], [ [ "La valuación de la opción es:", "_____no_output_____" ] ], [ [ "call.iloc[-1]", "_____no_output_____" ] ], [ [ "Intervalo de confianza del 99%", "_____no_output_____" ] ], [ [ "confianza = 0.99\nsigma_est = sim_closes.iloc[-1].sem()\nmean_est = call.iloc[-1].Prima\ni2 = st.norm.interval(confianza, loc=mean_est, scale=sigma_est)\nprint(i2)\n", "(34.383953470513504, 35.621289698015715)\n" ], [ "opcion_valuar['JSON']", "_____no_output_____" ] ], [ [ "## Precios simulados usando técnicas de reducción de varianza", "_____no_output_____" ] ], [ [ "# Usando muestreo estratificado----> #estratros = nscen\nU = (np.arange(0,nscen)+np.random.rand(ndays,nscen))/nscen\nZ = st.norm.ppf(U)\n\nsim_ret2 = pd.DataFrame(sigma*Z+r,index=dates)\nsim_closes2 = S0*np.exp(sim_ret.cumsum())\n\n# Función de pago\nstrike = pd.DataFrame(K*np.ones([ndays,nscen]), index=dates)\ncall = pd.DataFrame({'Prima':np.exp(-r*ndays) \\\n *np.fmax(sim_closes2-strike,np.zeros([ndays,nscen])).T.mean()}, index=dates)\ncall.plot();", "_____no_output_____" ] ], [ [ "La valuación de la opción es:", "_____no_output_____" ] ], [ [ "call.iloc[-1]", "_____no_output_____" ] ], [ [ "Intervalo de confianza del 99%", "_____no_output_____" ] ], [ [ "confianza = 0.99\nsigma_est = sim_closes2.iloc[-1].sem()\nmean_est = call.iloc[-1].Prima\ni2 = st.norm.interval(confianza, loc=mean_est, scale=sigma_est)\nprint(i2)", "(34.383953470513504, 35.621289698015715)\n" ] ], [ [ "### Análisis de la distribución de los rendimientos", "_____no_output_____" ], [ "### Ajustando norm", "_____no_output_____" ] ], [ [ "ren = calc_daily_ret(closes_aapl) # rendimientos \ny,x,_ = plt.hist(ren['AAPL'],bins=50,density=True,label='Histograma rendimientos')\n\nmu_fit,sd_fit = st.norm.fit(ren) # Se ajustan los parámetros de una normal\n# Valores máximo y mínimo de los rendiemientos a generar\nren_max = max(x);ren_min = min(x)\n# Vector de rendimientos generados\nren_gen = np.arange(ren_min,ren_max,0.001)\n# Generación de la normal ajustado con los parámetros encontrados\ncurve_fit = st.norm.pdf(ren_gen,loc=mu_fit,scale=sd_fit)\nplt.plot(ren_gen,curve_fit,label='Distribución ajustada')\nplt.legend()\nplt.show()\n", "_____no_output_____" ] ], [ [ "### Ajustando t", "_____no_output_____" ] ], [ [ "# rendimientos \nren = calc_daily_ret(closes_aapl) \n\n# Histograma de los rendimientos\ny, x, _ = plt.hist(ren['AAPL'], bins=50, density=True, label='Histograma rendimientos')\n\n# Se ajustan los parámetros de una distribución\ndist = 't'\nparams = getattr(st, dist).fit(ren.values) \n\n# Generación de la pdf de la distribución ajustado con los parámetros encontrados\ncurve_fit = getattr(st, dist).pdf(x, *params)\nplt.plot(x, curve_fit, label='Distribución ajustada')\nplt.legend()\nplt.show()\n\n# Q-Q\nst.probplot(ren['AAPL'], sparams=params[:-2], dist=dist, plot=plt);\n", "_____no_output_____" ] ], [ [ "## 3. Valuación usando simulación: uso del histograma de rendimientos\n\nTodo el análisis anterior se mantiene. Solo cambia la forma de generar los números aleatorios para la simulación montecarlo.\n\nAhora, generemos un histograma de los rendimientos diarios para generar valores aleatorios de los rendimientos simulados.", "_____no_output_____" ], [ "- Primero, cantidad de días y número de escenarios de simulación", "_____no_output_____" ] ], [ [ "ndays = len(dates) \nnscen = 10", "_____no_output_____" ] ], [ [ "- Del histograma anterior, ya conocemos las probabilidades de ocurrencia, lo que se llamó como variable `y`", "_____no_output_____" ] ], [ [ "prob", "_____no_output_____" ], [ "prob = y/np.sum(y)\nvalues = x[1:]\nprob.sum()", "_____no_output_____" ] ], [ [ "- Con esto, generamos los números aleatorios correspondientes a los rendimientos (tantos como días por número de escenarios).", "_____no_output_____" ] ], [ [ "# Rendimientos simulados\nret = np.random.choice(values, ndays*nscen, p=prob)\n# Fechas\ndates = pd.date_range(start=Hoy,periods=ndays)\n# Rendimien en Data Frame\nsim_ret_hist = pd.DataFrame(ret.reshape((ndays,nscen)),index=dates)\nsim_ret_hist", "_____no_output_____" ], [ "sim_closes_hist = (closes_aapl.iloc[-1,0])*np.exp(sim_ret_hist.cumsum())\nsim_closes_hist", "_____no_output_____" ], [ "sim_closes_hist.plot(figsize=(8,6),legend=False);", "_____no_output_____" ], [ "pd.concat([closes_aapl,sim_closes_hist]).plot(figsize=(8,6),legend=False);\nplt.title('Simulación usando el histograma de los rendimientos')", "_____no_output_____" ], [ "K = opcion_valuar['JSON']['strike']\nndays = len(dates)\nnscen = 100000\n\n# Histograma tomando la tasa libre de riesgo\nfreq, values = np.histogram(ret_aapl+r-mu, bins=2000)\nprob = freq/np.sum(freq)\n\n# Simulación de los rendimientos\nret = np.random.choice(values[1:], ndays*nscen, p=prob)\n\n# Simulación de precios\nsim_ret_hist = pd.DataFrame(ret.reshape((ndays,nscen)),index=dates)\nsim_closes_hist = (closes_aapl.iloc[-1,0]) * np.exp(sim_ret_hist.cumsum())", "_____no_output_____" ], [ "strike = pd.DataFrame(K*np.ones(ndays*nscen).reshape((ndays,nscen)), index=dates)\ncall_hist = pd.DataFrame({'Prima':np.exp(-r*ndays) \\\n *np.fmax(sim_closes_hist-strike,np.zeros(ndays*nscen).reshape((ndays,nscen))).T.mean()}, index=dates)\ncall_hist.plot();", "_____no_output_____" ], [ "call_hist.iloc[-1]", "_____no_output_____" ], [ "opcion_valuar['JSON']", "_____no_output_____" ] ], [ [ "Intervalo de confianza del 95%", "_____no_output_____" ] ], [ [ "confianza = 0.95\nsigma_est = sim_closes_hist.iloc[-1].sem()\nmean_est = call_hist.iloc[-1].Prima\ni1 = st.t.interval(confianza,nscen-1, loc=mean_est, scale=sigma_est)\ni2 = st.norm.interval(confianza, loc=mean_est, scale=sigma_est)\nprint(i1)\nprint(i1)\n", "(38.04373480098675, 39.00930377235172)\n(38.04373480098675, 39.00930377235172)\n" ] ], [ [ "# <font color = 'red'> Tarea: </font>\n\nReplicar el procedimiento anterior para valoración de opciones 'call', pero en este caso para opciones tipo 'put' y compararlo con el valor teórico de la ecuación de Black-Scholes", "_____no_output_____" ], [ "<script>\n $(document).ready(function(){\n $('div.prompt').hide();\n $('div.back-to-top').hide();\n $('nav#menubar').hide();\n $('.breadcrumb').hide();\n $('.hidden-print').hide();\n });\n</script>\n\n<footer id=\"attribution\" style=\"float:right; color:#808080; background:#fff;\">\nCreated with Jupyter by Esteban Jiménez Rodríguez and modified by Oscar Jaramillo Z.\n</footer>", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
d0a2f5978d1ae8f3c156ae70c4ad1d43baa2e354
13,850
ipynb
Jupyter Notebook
uebung_python1/5_Comprehension.ipynb
AlexaZiRo/share
7473c646070c8066c18a476511ef91a4c4eca2d8
[ "MIT" ]
null
null
null
uebung_python1/5_Comprehension.ipynb
AlexaZiRo/share
7473c646070c8066c18a476511ef91a4c4eca2d8
[ "MIT" ]
null
null
null
uebung_python1/5_Comprehension.ipynb
AlexaZiRo/share
7473c646070c8066c18a476511ef91a4c4eca2d8
[ "MIT" ]
null
null
null
33.616505
622
0.592924
[ [ [ "# List and Dictionary Comprehensions\n\nComprehension is a different way to construct lists and dictionaries. Up to now, every time that we have built up a list or dictionary, we began by initializing it. We then took advantage of their mutability inherent to build them up one element or key-value pair at a time. However, there is a more succinct way to accomplish the vast majority of your list and dictionary construction tasks.\n\n## Objectives\n\nAt the end of this notebook you should be able to:\n\n- use list comprehension\n- use dictionary comprehension\n- use tuple comprehension\n\n## List Comprehensions\n\nBefore we dive into the specifics about how this new tool (list comprehensions) works, let's look at an example question where we build a list. We can then show how to perform the same task with our new tool and learn how it works.\n\nLet's imagine that we have the list `[1, 5, 9, 33]` stored in the variable `my_list`. Now, let's assume that we want to make a new list of the squares of all the values in `my_list` and call it `my_squares`. With the tools we have covered so far, you might write:", "_____no_output_____" ] ], [ [ "my_list = [1, 5, 9, 33]\nmy_squares = []\nfor num in my_list:\n my_squares.append(num ** 2)\nprint(my_squares)", "[1, 25, 81, 1089]\n" ] ], [ [ "Now, `my_squares` will hold the list `[1, 25, 81, 1089]`. To get this, we were simply specifying a bunch of stuff that we wanted to add on to the end of the `my_squares` list, with a starting point at `my_list`. So, from a high level, we can write the framework of creating a list in code as:\n\n```python\nlist_were_building = []\nfor thing in iterable:\n list_were_building.append(transform(thing))\n```\n\nWith this structure in mind, we can use the following syntax to perform the same task of building up a list in a single line! Check it out, along with how it would look for the construction of `my_squares`.\n\n```python\nlist_were_building = [transform(thing) for thing in iterable]\n```\n\nThis last line of code does the exact same thing as the three lines above! In this line, the thing that we would pass to the `append()` method, `transform(thing)`, comes at the beginning of the statement in the `[]`. These `[]` allow for the final product to be defined as a list. Then, the `for` loop statement that we had written is at the end. This is the basic idea behind the [list comprehension](https://en.wikipedia.org/wiki/List_comprehension).\n\nSimilarly, we can build our `my_squares` list using a list comprehension:", "_____no_output_____" ] ], [ [ "my_squares2 = [num ** 2 for num in my_list]\nprint(my_squares2)", "[1, 25, 81, 1089]\n" ] ], [ [ "But wait! There's more! Remember in all the examples where we were getting evens, we had a condition to decide when to append a value to a list? We can also use conditions to determine what \"transformed things\" get added in a list comprehension! Let's look at the evens list builder to hammer this home.\n\n```python\n# Old way of constructing list of evens\nevens = []\nfor num in range(10):\n if num % 2 == 0:\n evens.append(num)\n\n# Old way at high level\nlist_were_building = []\nfor thing in iterable:\n if condition:\n list_were_building.append(transform(thing))\n\n# List comprehension way of constructing list of evens\nevens = [num for num in range(10) if num % 2 == 0]\n\n# List comprehension way at high level\nlist_we_are_building = [transform(thing) for thing in iterable if condition]\n```\nThe way `transform()` was called in the above examples, as though it were a function, is an option when writing list comps. For example, the `my_squares` example could be accomplished in the same way with:\n\n```python\ndef square(num):\n return num ** 2\n\nmy_squares = [square(num) for num in my_list]\n```\n\nThis might seem silly, since we could just write `num ** 2` directly in the list comp as we did above. However, this calling of a function in the list comp becomes a powerful idea when you want to transform the values being iterated over in a complex way.\n\n## Dictionary Comprehensions\n\nJust as list comprehensions are a more succinct way of constructing a list, we have the same ability for dictionaries. Dictionary comprehensions operate in the same way as their list counterparts, except for one fundamental difference. Recall that dictionaries have no `append()` method, and that a new key-value pair is added to the dictionary with the syntax: `my_dict[new_key] = new_value`. In this way, it makes sense that we need syntax to pass both the key and value to the dictionary comprehension.\n\nLuckily, Python gives a simple way to pass a key and value pair, and it is already very familiar to you! You just separate the key and value that you want to enter into the dictionary with a colon, like we did when we were hardcoding the contents in the `{}` dictionary constructor, i.e. `my_dict = {1: 1, 2: 4}`. Let's look at an example where we make a dictionary with the keys as the numbers 1 - 5, and the values as the squares of the keys. We'll do this with both the old way of constructing a dictionary, and then with a dictionary comprehension so that we can see the similarities.", "_____no_output_____" ] ], [ [ "# Standard way. \nsquares_dict = {}\nfor num in range(1, 6): \n squares_dict[num] = num ** 2\nprint(squares_dict)", "{1: 1, 2: 4, 3: 9, 4: 16, 5: 25}\n" ], [ "# Dictionary Comprehension way. \nsquares_dict2 = {num: num ** 2 for num in range(1, 6)}\nprint(squares_dict2)", "{1: 1, 2: 4, 3: 9, 4: 16, 5: 25}\n" ] ], [ [ "We can see that in both cases, we're going through the numbers 1 - 5 with `range(1, 6)` and those `num`s are being assigned as keys. The values assigned to those keys are the squares of the keys, assigned with `squares_dict[num] = num ** 2` and `num: num ** 2`, respectively. Just as with list comprehensions, dictionary comprehensions read as the first thing being the `key: value` pair being added to the dictionary. Then, left to right (top down in the old way), we have what the loop definition would look like. And, just as with list comps, we can add a condition to filter what gets put into the dictionary.\n\nSay that we want a dictionary with a random integer between 1 and 10, associated with each of the values in the list of words: `['cow', 'chicken', 'horse', 'moose']`. Let's look at how we'd do that with a dictionary comprehension. (We're importing from the Python library `random` to get our random integers. We'll talk more about importing later in the course.)", "_____no_output_____" ] ], [ [ "from random import randint\nanimals_list = ['cow', 'chicken', 'horse', 'moose']", "_____no_output_____" ], [ "animals_dict = {animal: randint(1, 10) for animal in animals_list}\nprint(animals_dict)", "{'cow': 5, 'chicken': 8, 'horse': 5, 'moose': 8}\n" ] ], [ [ "## Other Comprehensions\n\nYou can actually use the syntax from the list comprehensions to construct a tuple in what seems like a dynamic way. Take the example.\n", "_____no_output_____" ] ], [ [ "my_tuple = tuple(num for num in range(10) if num % 2 == 0)\nprint(my_tuple)", "(0, 2, 4, 6, 8)\n" ] ], [ [ "All we are doing here is passing `num for num in range(10) if num % 2 == 0` to the tuple constructor, `()`. Since the tuple constructor takes any iterable, which that statement produces, it makes a tuple out of the contents. Note that it would be impossible to make a tuple with statements like this the \"old way\", since tuples don't support appending or mutation of any kind!\n\nFor this reason, in addition to their readability, comprehensions of all types are considered the most Pythonic way of constructing new data structures.\n\n## Check your understanding\n\n1. Take the following for loop, and translate it into a list. comprehensions: \n\n odds = []\n for num in range(10): \n if num % 2 != 0: \n odds.append(num)\n \n \n1. Take the following for loop, and translate it into a dictionary comprehensions: \n\n \n cubes = {}\n for num in range(1, 6): \n cubes[num] = num ** 3\n ", "_____no_output_____" ] ], [ [ "odds = [num for num in range(10) if num % 2 != 0]\nodds", "_____no_output_____" ], [ "cubes = {num: num **3 for num in range(1,6)}\ncubes", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a3063148e92383e1243315f1ef111275d1ca08
90,839
ipynb
Jupyter Notebook
Credit Risk Evaluator.ipynb
memeguerrini/supervised-learning-challenge
775c931d0b420a8d0e68c5d0986e30519765edaa
[ "ADSL" ]
null
null
null
Credit Risk Evaluator.ipynb
memeguerrini/supervised-learning-challenge
775c931d0b420a8d0e68c5d0986e30519765edaa
[ "ADSL" ]
null
null
null
Credit Risk Evaluator.ipynb
memeguerrini/supervised-learning-challenge
775c931d0b420a8d0e68c5d0986e30519765edaa
[ "ADSL" ]
null
null
null
36.452247
211
0.292418
[ [ [ "import numpy as np\nimport pandas as pd\nfrom pathlib import Path\nfrom matplotlib import pyplot as plt\nfrom sklearn.preprocessing import LabelEncoder", "_____no_output_____" ], [ "train_df = pd.read_csv(Path('Resources/2019loans.csv'))\ntest_df = pd.read_csv(Path('Resources/2020Q1loans.csv'))", "_____no_output_____" ], [ "train_df", "_____no_output_____" ], [ "test_df", "_____no_output_____" ], [ "# Convert categorical data to numeric and separate target feature for training data\nx = train_df.drop('loan_status',axis = 1)\nx_train = pd.get_dummies(x)\nx_train = x_train.drop('Unnamed: 0',axis = 1)\nx_train", "_____no_output_____" ], [ "y = pd.get_dummies(train_df['loan_status'])\ny_train = y.drop('high_risk',axis =1)\ny_train = y_train.rename(columns = {'low_risk':'loan_status'})\ny_train = y_train.values.ravel()\ny_train", "_____no_output_____" ], [ "# Convert categorical data to numeric and separate target feature for testing data\nx_td = test_df.drop('loan_status',axis = 1)\nx_test = pd.get_dummies(x_td)\nx_test = x_test.drop('Unnamed: 0',axis = 1)\nx_test", "_____no_output_____" ], [ "y_td = pd.get_dummies(test_df['loan_status'])\ny_test= y_td.drop('high_risk',axis =1)\ny_test = y_test.rename(columns={'low_risk':'loan_status'})\ny_test", "_____no_output_____" ], [ "# add missing dummy variables to testing set\nmissing_cols = set(x_train.columns) - set(x_test.columns)\nmissing_cols", "_____no_output_____" ], [ "for c in missing_cols:\n x_test['debt_settlement_flag_Y'] = 0\n# Ensure the order of column in the test set is in the same order than in train set\nx_test = x_test[x_train.columns]\nx_test", "_____no_output_____" ] ], [ [ "# LOGISTIC REGRESSION MODEL", "_____no_output_____" ] ], [ [ "#import dependencies\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import LogisticRegression", "_____no_output_____" ], [ "# Train the Logistic Regression model on the unscaled data and print the model score\nclassifier = LogisticRegression()\n\n# Fit our model using the training data\nclassifier.fit(x_train, y_train)\nprint(f\"Training Data Score: {classifier.score(x_train, y_train)}\")\nprint(f\"Testing Data Score: {classifier.score(x_test, y_test)}\")", "Training Data Score: 0.6564860426929392\nTesting Data Score: 0.5199914929817099\n" ] ], [ [ "# RandomForest Classifier Model", "_____no_output_____" ] ], [ [ "#import dependencies\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.metrics import classification_report, roc_curve", "_____no_output_____" ], [ "# Train a Random Forest Classifier model and print the model score\nrf_clf = RandomForestClassifier(random_state=1)\nrf_clf.fit(x_train, y_train)\n#classification reports\ny_pred = rf_clf.predict(x_test)\nprint(classification_report(y_test, y_pred))", " precision recall f1-score support\n\n 0 0.75 0.50 0.60 2351\n 1 0.63 0.83 0.71 2351\n\n accuracy 0.67 4702\n macro avg 0.69 0.67 0.66 4702\nweighted avg 0.69 0.67 0.66 4702\n\n" ], [ "#model score\nprint(f\"Training Data Score: {rf_clf.score(x_train, y_train)}\")\nprint(f\"Testing Data Score: {rf_clf.score(x_test, y_test)}\")", "Training Data Score: 1.0\nTesting Data Score: 0.6671629094002552\n" ] ], [ [ "# Scaling the Data", "_____no_output_____" ] ], [ [ "#import dependencies\nfrom sklearn.preprocessing import StandardScaler", "_____no_output_____" ], [ "# Scale the data and all features\nscaler = StandardScaler().fit(x_train)\nx_train_scaled = scaler.transform(x_train)\nx_test_scaled = scaler.transform(x_test)\n\n", "_____no_output_____" ], [ "# Train the Logistic Regression model on the scaled data and print the model score\n# Create a logistic regression model\nclassifier_scaled = LogisticRegression()\n\n# Fit our model using the training data\nclassifier_scaled.fit(x_train_scaled, y_train)\nprint(f\"Training Data Score: {classifier.score(x_train_scaled, y_train)}\")\nprint(f\"Testing Data Score: {classifier.score(x_test_scaled, y_test)}\")", "Training Data Score: 0.6361247947454844\nTesting Data Score: 0.506380263717567\n" ], [ "# Train a Random Forest Classifier model on the scaled data and print the model score\n# Train a Random Forest Classifier model and print the model score\nrf_clf_scaled = RandomForestClassifier(random_state=42)\nrf_clf_scaled.fit(x_train_scaled, y_train)\n#classification reports\ny_pred = rf_clf_scaled.predict(x_test_scaled)\nprint(classification_report(y_test, y_pred))", " precision recall f1-score support\n\n 0 0.73 0.48 0.58 2351\n 1 0.62 0.82 0.70 2351\n\n accuracy 0.65 4702\n macro avg 0.67 0.65 0.64 4702\nweighted avg 0.67 0.65 0.64 4702\n\n" ], [ "#model score\nprint(f\"Training Data Score: {rf_clf_scaled.score(x_train_scaled, y_train)}\")\nprint(f\"Testing Data Score: {rf_clf_scaled.score(x_test_scaled, y_test)}\")", "Training Data Score: 1.0\nTesting Data Score: 0.6546150574223735\n" ] ], [ [ "# Analysis\nThe Randomforest model even though it seems to be overfitting, is showing much better results with the training data set. The logistic_Regression model results are not as good as the Randomforest model.\nA second obeservation is both the Randomforest and the Logistic_Regression models both show unfavourable results with the test data set.", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ] ]
d0a309a3c92aa2396384478392bced5a65b10262
10,925
ipynb
Jupyter Notebook
Lectures/Week07-Segmentation/notebooks/2-segmentation-of-cells-using-opencv-cxx.ipynb
effepivi/ICE-3111-Computer_Vision
d8cf5c8e9e35b107573eeba6912a05db43d0dcd5
[ "BSD-3-Clause" ]
null
null
null
Lectures/Week07-Segmentation/notebooks/2-segmentation-of-cells-using-opencv-cxx.ipynb
effepivi/ICE-3111-Computer_Vision
d8cf5c8e9e35b107573eeba6912a05db43d0dcd5
[ "BSD-3-Clause" ]
null
null
null
Lectures/Week07-Segmentation/notebooks/2-segmentation-of-cells-using-opencv-cxx.ipynb
effepivi/ICE-3111-Computer_Vision
d8cf5c8e9e35b107573eeba6912a05db43d0dcd5
[ "BSD-3-Clause" ]
1
2021-12-03T22:08:49.000Z
2021-12-03T22:08:49.000Z
25
574
0.543982
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a3298e7a1affaea03494fe7cee7853c7c8cbcb
2,690
ipynb
Jupyter Notebook
JupyterNotebooks/Labs/Lab 7.ipynb
WolfyVST/CMPT-220L-203-22S
200cc519c0d177fc71d6c945328e35f6ce907c47
[ "MIT" ]
null
null
null
JupyterNotebooks/Labs/Lab 7.ipynb
WolfyVST/CMPT-220L-203-22S
200cc519c0d177fc71d6c945328e35f6ce907c47
[ "MIT" ]
null
null
null
JupyterNotebooks/Labs/Lab 7.ipynb
WolfyVST/CMPT-220L-203-22S
200cc519c0d177fc71d6c945328e35f6ce907c47
[ "MIT" ]
30
2022-01-21T00:05:12.000Z
2022-02-24T19:41:48.000Z
34.050633
156
0.593309
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a331b46418476716a37fe18cf30d8575e53093
166,463
ipynb
Jupyter Notebook
ML_AI/PyTorch/Denoising_Autoencoder_MNIST.ipynb
PranavHegde99/OpenOctober
1018f92d9aedee168539004cd15f098d927aeeca
[ "Apache-2.0" ]
32
2020-10-17T09:58:41.000Z
2021-10-13T04:43:35.000Z
ML_AI/PyTorch/Denoising_Autoencoder_MNIST.ipynb
vasu-1/OpenOctober
0cfd89ea6e0343e2d89c4d10b544c1a8e55f083a
[ "Apache-2.0" ]
380
2020-10-18T15:35:49.000Z
2021-12-25T05:03:50.000Z
ML_AI/PyTorch/Denoising_Autoencoder_MNIST.ipynb
vasu-1/OpenOctober
0cfd89ea6e0343e2d89c4d10b544c1a8e55f083a
[ "Apache-2.0" ]
68
2020-10-17T17:29:54.000Z
2021-10-13T04:43:35.000Z
65.100899
63,898
0.694977
[ [ [ "## Denoising Autoencoder on MNIST dataset\n* This notebook will give you a very good understanding abou denoising autoencoders\n* For more information: visit [here](https://lilianweng.github.io/lil-log/2018/08/12/from-autoencoder-to-beta-vae.html)\n* The entire notebook is in PyTorch", "_____no_output_____" ] ], [ [ "# Importing packages that will be necessary for the project\nimport numpy as np\nfrom keras.datasets import mnist\nimport matplotlib.pyplot as plt\nfrom tqdm import tqdm\nfrom torchvision import transforms\nimport torch.nn as nn\nfrom torch.utils.data import DataLoader,Dataset\nimport torch\nimport torch.optim as optim\nfrom torch.autograd import Variable", "_____no_output_____" ], [ "# Mounting the google drive to fetch data from it\nfrom google.colab import drive\ndrive.mount('/content/gdrive')", "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n\nEnter your authorization code:\n··········\nMounted at /content/gdrive\n" ], [ "#loading the mnist data\n(x_train,y_train),(x_test,y_test)=mnist.load_data()\nprint(\"No of train datapoints:{}\\nNo of test datapoints:{}\".format(len(x_train),len(x_test)))", "Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz\n11493376/11490434 [==============================] - 0s 0us/step\nNo of train datapoints:60000\nNo of test datapoints:10000\n" ], [ "print(y_train[1]) # Checking labels", "0\n" ], [ "#we add the noise \n\"\"\"\n 'gauss' Gaussian-distributed additive noise.\n 'speckle' out = image + n*image,where\n n is uniform noise with specified mean & variance. \n\"\"\"\n\ndef add_noise(img,noise_type=\"gaussian\"):#input includes the type of the noise to be added and the input image\n \n row,col=28,28\n img=img.astype(np.float32)\n \n if noise_type==\"gaussian\":\n noise=np.random.normal(-5.9,5.9,img.shape) #input includes : mean, deviation, shape of the image and the function picks up a normal distribuition.\n noise=noise.reshape(row,col) # reshaping the noise \n img=img+noise #adding the noise\n return img\n\n if noise_type==\"speckle\":\n noise=np.random.randn(row,col)\n noise=noise.reshape(row,col)\n img=img+img*noise\n return img", "_____no_output_____" ], [ "#Now dividing the dataset into two parts and adding gaussian to one and speckle to another.\nnoises=[\"gaussian\",\"speckle\"]\nnoise_ct=0\nnoise_id=0 #id represnts which noise is being added, its 0 = gaussian and 1 = speckle\ntraindata=np.zeros((60000,28,28)) #revised training data\n\n\n\nfor idx in tqdm(range(len(x_train))): #for the first half we are using gaussian noise & for the second half speckle noise\n \n if noise_ct<(len(x_train)/2):\n noise_ct+=1\n traindata[idx]=add_noise(x_train[idx],noise_type=noises[noise_id])\n \n else:\n print(\"\\n{} noise addition completed to images\".format(noises[noise_id]))\n noise_id+=1\n noise_ct=0\n\n\nprint(\"\\n{} noise addition completed to images\".format(noises[noise_id])) \n\n\n\n\nnoise_ct=0\nnoise_id=0\ntestdata=np.zeros((10000,28,28))\n\nfor idx in tqdm(range(len(x_test))): # Doing the same for the test set.\n \n if noise_ct<(len(x_test)/2):\n noise_ct+=1\n x=add_noise(x_test[idx],noise_type=noises[noise_id])\n testdata[idx]=x\n \n else:\n print(\"\\n{} noise addition completed to images\".format(noises[noise_id]))\n noise_id+=1\n noise_ct=0\n\n\nprint(\"\\n{} noise addition completed to images\".format(noises[noise_id]))", " 53%|█████▎ | 32057/60000 [00:01<00:01, 19218.50it/s]" ], [ "f, axes=plt.subplots(2,2) #setting up 4 figures\n\n#showing images with gaussian noise\naxes[0,0].imshow(x_train[0],cmap=\"gray\")#the original data\naxes[0,0].set_title(\"Original Image\")\naxes[1,0].imshow(traindata[0],cmap='gray')#noised data\naxes[1,0].set_title(\"Noised Image\")\n\n#showing images with speckle noise\naxes[0,1].imshow(x_train[25000],cmap='gray')#original data\naxes[0,1].set_title(\"Original Image\")\naxes[1,1].imshow(traindata[25000],cmap=\"gray\")#noised data\naxes[1,1].set_title(\"Noised Image\")", "_____no_output_____" ], [ "#creating a dataset builder i.e dataloaders\nclass noisedDataset(Dataset):\n \n def __init__(self,datasetnoised,datasetclean,labels,transform):\n self.noise=datasetnoised\n self.clean=datasetclean\n self.labels=labels\n self.transform=transform\n \n def __len__(self):\n return len(self.noise)\n \n def __getitem__(self,idx):\n xNoise=self.noise[idx]\n xClean=self.clean[idx]\n y=self.labels[idx]\n \n if self.transform != None:#just for using the totensor transform\n xNoise=self.transform(xNoise)\n xClean=self.transform(xClean)\n \n \n return (xNoise,xClean,y)", "_____no_output_____" ], [ "#defining the totensor transforms\ntsfms=transforms.Compose([\n transforms.ToTensor()\n])\n\ntrainset=noisedDataset(traindata,x_train,y_train,tsfms)# the labels should not be corrupted because the model has to learn uniques features and denoise it.\ntestset=noisedDataset(testdata,x_test,y_test,tsfms)", "_____no_output_____" ], [ "batch_size=32\n\n\n#creating the dataloader\ntrainloader=DataLoader(trainset,batch_size=32,shuffle=True)\ntestloader=DataLoader(testset,batch_size=1,shuffle=True)", "_____no_output_____" ], [ "#building our ae model:\nclass denoising_model(nn.Module):\n def __init__(self):\n super(denoising_model,self).__init__()\n self.encoder=nn.Sequential(\n nn.Linear(28*28,256),#decreasing the features in the encoder\n nn.ReLU(True),\n nn.Linear(256,128),\n nn.ReLU(True),\n nn.Linear(128,64),\n nn.ReLU(True)\n \n )\n \n self.decoder=nn.Sequential(\n nn.Linear(64,128),#increasing the number of features\n nn.ReLU(True),\n nn.Linear(128,256),\n nn.ReLU(True),\n nn.Linear(256,28*28),\n nn.Sigmoid(),\n )\n \n \n def forward(self,x):\n x=self.encoder(x)#first the encoder\n x=self.decoder(x)#then the decoder to reconstruct the original input.\n \n return x", "_____no_output_____" ], [ "#this is the training code, can be modified according to requirements\n#setting the device\nif torch.cuda.is_available()==True:\n device=\"cuda:0\"\nelse:\n device =\"cpu\"\n\n \nmodel=denoising_model().to(device)\ncriterion=nn.MSELoss()\noptimizer=optim.SGD(model.parameters(),lr=0.01,weight_decay=1e-5)\n\n#setting the number of epochs\nepochs=120\nl=len(trainloader)\nlosslist=list()\nepochloss=0\nrunning_loss=0\nfor epoch in range(epochs):\n \n print(\"Entering Epoch: \",epoch)\n for dirty,clean,label in tqdm((trainloader)):\n \n \n dirty=dirty.view(dirty.size(0),-1).type(torch.FloatTensor)\n clean=clean.view(clean.size(0),-1).type(torch.FloatTensor)\n dirty,clean=dirty.to(device),clean.to(device)\n \n \n \n #-----------------Forward Pass----------------------\n output=model(dirty)\n loss=criterion(output,clean)\n #-----------------Backward Pass---------------------\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n \n running_loss+=loss.item()\n epochloss+=loss.item()\n #-----------------Log-------------------------------\n losslist.append(running_loss/l)\n running_loss=0\n print(\"======> epoch: {}/{}, Loss:{}\".format(epoch,epochs,loss.item()))", "\r 0%| | 0/1875 [00:00<?, ?it/s]" ], [ "#plotting the loss curve\nplt.plot(range(len(losslist)),losslist)", "_____no_output_____" ], [ "\"\"\"Here, we try to visualize some of the results.\n We randomly generate 6 numbers in between 1 and 10k , run them through the model,\n and show the results with comparisons\n \n \"\"\"\n\nf,axes= plt.subplots(6,3,figsize=(20,20))\naxes[0,0].set_title(\"Original Image\")\naxes[0,1].set_title(\"Dirty Image\")\naxes[0,2].set_title(\"Cleaned Image\")\n\ntest_imgs=np.random.randint(0,10000,size=6)\nfor idx in range((6)):\n dirty=testset[test_imgs[idx]][0]\n clean=testset[test_imgs[idx]][1]\n label=testset[test_imgs[idx]][2]\n dirty=dirty.view(dirty.size(0),-1).type(torch.FloatTensor)\n dirty=dirty.to(device)\n output=model(dirty)\n \n output=output.view(1,28,28)\n output=output.permute(1,2,0).squeeze(2)\n output=output.detach().cpu().numpy()\n \n dirty=dirty.view(1,28,28)\n dirty=dirty.permute(1,2,0).squeeze(2)\n dirty=dirty.detach().cpu().numpy()\n \n clean=clean.permute(1,2,0).squeeze(2)\n clean=clean.detach().cpu().numpy()\n \n axes[idx,0].imshow(clean,cmap=\"gray\")\n axes[idx,1].imshow(dirty,cmap=\"gray\")\n axes[idx,2].imshow(output,cmap=\"gray\")", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a331fdf7e580ed137908ed2bb01f2d60e998fb
16,307
ipynb
Jupyter Notebook
projects/crwa/data/sampling_datasets/bhushan-prototype.ipynb
TylerTsang/safe-water
8e3da323d587f6ea3640635d09628eaa9bca2d39
[ "MIT" ]
43
2018-10-02T23:08:21.000Z
2022-02-09T15:00:17.000Z
projects/crwa/data/sampling_datasets/bhushan-prototype.ipynb
TylerTsang/safe-water
8e3da323d587f6ea3640635d09628eaa9bca2d39
[ "MIT" ]
64
2019-02-06T00:33:32.000Z
2021-02-24T17:13:07.000Z
projects/crwa/data/sampling_datasets/bhushan-prototype.ipynb
TylerTsang/safe-water
8e3da323d587f6ea3640635d09628eaa9bca2d39
[ "MIT" ]
84
2018-09-28T12:43:02.000Z
2022-03-08T16:13:42.000Z
31.787524
158
0.452566
[ [ [ "# Cleaning the data to build the prototype for crwa\n\n### This data cleans the original sql output and performs cleaning tasks. Also checking validity of the results against original report found at\n### https://www.crwa.org/uploads/1/2/6/7/126781580/crwa_ecoli_web_2017_updated.xlsx", "_____no_output_____" ] ], [ [ "import pandas as pd\npd.options.display.max_rows = 999\nimport numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "df = pd.read_csv(\"data_for_prototype.csv\")", "_____no_output_____" ], [ "# There are 2 rows with Date = Null so droping those rows\ndf = df.dropna(subset=['Date_Collected'])\n", "_____no_output_____" ], [ "df.isna().sum()", "_____no_output_____" ], [ "# There are following types of invalids in Site_ID \ninvalids = [\"N/A\",\"NULL\",\"ND\"]", "_____no_output_____" ], [ "#Removing these invalid Site_IDs \n\ndf[\"Site_Name\"] = df[\"Site_Name\"].map(lambda x: np.nan if x in invalids else x)\ndf[\"Site_Name\"].fillna(\"ABCD\", inplace=True)", "_____no_output_____" ], [ "#Removing these invalid Town Names \n\ndf[\"Town\"] = df[\"Town\"].map(lambda x: np.nan if x in invalids else x)\ndf[\"Town\"].fillna(\"ABCD\", inplace=True)", "_____no_output_____" ], [ "df[\"River_Mile_Headwaters\"].describe", "_____no_output_____" ], [ "#Removing invalid Miles and selecting only numeric values for miles \n\n\ndf[\"River_Mile_Headwaters\"] = df[\"River_Mile_Headwaters\"].map(lambda x: np.nan if x in invalids else x)\ndf[\"River_Mile_Headwaters\"].fillna(\"00.0 MI\", inplace=True)\ndf[\"Mile\"] = pd.to_numeric(df[\"River_Mile_Headwaters\"].str[0:4])", "_____no_output_____" ], [ "#Removing invalid entrees and selecting only numeric values \n\n\ndf[\"Latitude_DD\"] = df[\"Latitude_DD\"].map(lambda x: np.nan if x in invalids else x)\ndf[\"Latitude_DD\"].fillna(\"00.0 MI\", inplace=True)\ndf[\"Longitude_DD\"] = df[\"Longitude_DD\"].map(lambda x: np.nan if x in invalids else x)\ndf[\"Longitude_DD\"].fillna(\"00.0 MI\", inplace=True)", "_____no_output_____" ], [ "#Removing invalid entrees and selecting only numeric values \n\n\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].map(lambda x: np.nan if x in invalids else x)\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.lstrip('>')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.rstrip('>')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.lstrip('<')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.rstrip('<')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.lstrip('*')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.rstrip('*')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace(',','')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace('%','')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace(' ','')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace('ND','')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.lstrip('.')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.rstrip('.')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace('6..25','6.25')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace('480.81546.25291','480.81546')\ndf[\"Actual_Result\"] = df[\"Actual_Result\"].str.replace('379\\r\\n379',\"379\")\n\n", "_____no_output_____" ], [ "#Functiont to check if string can be converted to numeric\n\n#Input --> string\n#Output --> 1 if convertable else 0\n\ndef isInt_try(v):\n try: i = float(v)\n except: return False\n return True", "_____no_output_____" ], [ "# Applying above function to check any odd strings in Actual_Result Column\n\nfor i in df[\"Actual_Result\"]:\n if isInt_try(i) == 0:\n print(i)\n", "_____no_output_____" ], [ "# Checking any odd strings in Actual_Result Column\n\nfor i in df[\"Actual_Result\"]:\n if str(i).count('.') >= 2:\n print(i)\n", "_____no_output_____" ], [ "# Converting Actual_Result to numeric and Date_Collected to datetime data type\n\ndf[\"Actual_Result\"] = pd.to_numeric(df[\"Actual_Result\"])\ndf[\"Date_Collected\"] = pd.to_datetime(df[\"Date_Collected\"])", "_____no_output_____" ], [ "\"Slicing for E.coli\"\n\ndf_ecoli = df[df[\"Component_Name\"] == \"Escherichia coli\"]", "_____no_output_____" ], [ "df_ecoli.head()", "_____no_output_____" ], [ "# Validating against the original report\n\nresult = df_ecoli.loc[(df_ecoli.Town == \"Milford\") & (df_ecoli.Date_Collected == pd.to_datetime(\"2017-11-21 00:00:00-05:00\"))][\"Actual_Result\"]\nresult", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a353913ee67cf43c6c0ce18cea9db3c6c20929
928,804
ipynb
Jupyter Notebook
content/lectures/lecture01/notebook/cs109b_smoothing.ipynb
wfseaton/2020-CS109B
7b11a2c270144e4fed455b9c9e628222fa2f1f9a
[ "MIT" ]
null
null
null
content/lectures/lecture01/notebook/cs109b_smoothing.ipynb
wfseaton/2020-CS109B
7b11a2c270144e4fed455b9c9e628222fa2f1f9a
[ "MIT" ]
null
null
null
content/lectures/lecture01/notebook/cs109b_smoothing.ipynb
wfseaton/2020-CS109B
7b11a2c270144e4fed455b9c9e628222fa2f1f9a
[ "MIT" ]
null
null
null
471.474112
39,008
0.935908
[ [ [ "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport statsmodels.formula.api as sm\n\n%matplotlib inline ", "_____no_output_____" ], [ "diab = pd.read_csv(\"../data/diabetes.csv\")\nprint(\"\"\"\n# Variables are\n# subject: subject ID number\n# age: age diagnosed with diabetes\n# acidity: a measure of acidity called base deficit\n# y: natural log of serum C-peptide concentration\n# Original source is Sockett et al. (1987)\n# mentioned in Hastie and Tibshirani's book \n# \"Generalized Additive Models\".\n\"\"\"\n)", "\n# Variables are\n# subject: subject ID number\n# age: age diagnosed with diabetes\n# acidity: a measure of acidity called base deficit\n# y: natural log of serum C-peptide concentration\n# Original source is Sockett et al. (1987)\n# mentioned in Hastie and Tibshirani's book \n# \"Generalized Additive Models\".\n\n" ], [ "xpred = pd.DataFrame({\"age\":np.arange(0,16.1,0.1)})", "_____no_output_____" ], [ "ax0 = diab.plot.scatter(x='age',y='y',c='Red',title=\"Diabetes data\")\n_ = ax0.set_xlabel(\"Age at Diagnosis\")\n_ = ax0.set_ylabel(\"Log C-Peptide Concentration\")", "_____no_output_____" ] ], [ [ "## Linear Regression", "_____no_output_____" ] ], [ [ "model1 = sm.ols('y~age',data=diab)\nfit1_lm = model1.fit()\npred1 = fit1_lm.predict(xpred)\nprediction_output = fit1_lm.get_prediction(xpred).summary_frame()", "_____no_output_____" ], [ "ax1 = diab.plot.scatter(x='age',y='y',c='Red',title=\"Diabetes data with least-squares linear fit\")\nax1.set_xlabel(\"Age at Diagnosis\")\nax1.set_ylabel(\"Log C-Peptide Concentration\")\n\n\nax1.plot(xpred.age, prediction_output['mean'],color=\"green\")\n\nax1.plot(xpred.age, prediction_output['mean_ci_lower'], color=\"blue\",linestyle=\"dashed\")\nax1.plot(xpred.age, prediction_output['mean_ci_upper'], color=\"blue\",linestyle=\"dashed\");\n\nax1.plot(xpred.age, prediction_output['obs_ci_lower'], color=\"skyblue\",linestyle=\"dashed\")\nax1.plot(xpred.age, prediction_output['obs_ci_upper'], color=\"skyblue\",linestyle=\"dashed\");", "_____no_output_____" ] ], [ [ "`vander` is for Vandermonde. It's a matrix where the first column is $x^0$, the second is $x^1$, the third is $x^2$ and so on.\n\n` np.vander([6,3,5], 4, increasing=True) = \n array([[ 1, 6, 36, 216], \n [ 1, 3, 9, 27], \n [ 1, 5, 25, 125]])\n` \nIt's therefore similar to sklearn's `polynomial_features`, but because `vander` is numpy it can be used directly in the formula.\n\nSince we have a constant column in the matrix, we put a -1 in the formula to drop the additional constant term statsmodels would otherwise insert\n\nNote that this is **not** an _orthogonal_ polynomial basis. Our estimated coeffecients will be more sensitive to the data than they need to be.", "_____no_output_____" ] ], [ [ "fit2_lm = sm.ols(formula=\"y ~ np.vander(age, 4, increasing=True) -1\",data=diab).fit()\nfit2_lm = sm.ols(formula=\"y ~ age + np.power(age, 2) + np.power(age, 3)\",data=diab).fit()\n\npoly_predictions = fit2_lm.get_prediction(xpred).summary_frame()\npoly_predictions.head()", "_____no_output_____" ], [ "ax2 = diab.plot.scatter(x='age',y='y',c='Red',title=\"Diabetes data with least-squares cubic fit\")\nax2.set_xlabel(\"Age at Diagnosis\")\nax2.set_ylabel(\"Log C-Peptide Concentration\")\n\nax2.plot(xpred.age, poly_predictions['mean'],color=\"green\")\nax2.plot(xpred.age, poly_predictions['mean_ci_lower'], color=\"blue\",linestyle=\"dashed\")\nax2.plot(xpred.age, poly_predictions['mean_ci_upper'], color=\"blue\",linestyle=\"dashed\");\n\n#ax2.plot(xpred.age, poly_predictions['obs_ci_lower'], color=\"skyblue\",linestyle=\"dashed\")\n#ax2.plot(xpred.age, poly_predictions['obs_ci_upper'], color=\"skyblue\",linestyle=\"dashed\");", "_____no_output_____" ] ], [ [ "## Logistic Regression", "_____no_output_____" ] ], [ [ "diab['y_bin'] = 1*(diab['y'] > 4) # multiply by 1 because statsmodels wants 1s and 0s instead of true and false", "_____no_output_____" ], [ "logit_model = sm.logit(\"y_bin ~ age \", data = diab).fit()\nlogit_prediction = logit_model.predict(xpred)", "Optimization terminated successfully.\n Current function value: 0.353988\n Iterations 7\n" ], [ "from scipy.special import expit\nimport re\ndef get_logit_prediction_intervals(model, new_data_df):\n if type(new_data_df) != pd.DataFrame:\n raise TypeError('new_data_df must be a DataFrame')\n \n # transform the raw data according to the formula\n new_data_dict = {}\n for x in model.params.index:\n # only presently supports Intercept, a named column, and polynmoials created via np.vander\n # the trick is finding the correct base column in the raw data\n if x == \"Intercept\":\n new_data_dict[x] = np.ones(new_data_df.shape[0])\n elif x.startswith(\"np.vander(\"):\n try:\n will = re.match(r\"np.vander\\((.*), ?(.*)\\)\\[(.*)\\]\", x)\n column, power, index = will.groups()\n except e:\n raise ValueError(\"Couldn't parse formula-derived feature {}\".format(x))\n new_data_dict[x] = np.vander(new_data_df.loc[:,column], int(power))[:,int(index)]\n else:\n new_data_dict[x] = new_data_df.loc[:,x]\n new_data = pd.DataFrame(new_data_dict)\n \n variance_mat = model.cov_params()\n standard_devs = np.sqrt(np.sum(new_data.dot(variance_mat) * new_data, axis=1))\n \n linear_predictions = new_data.dot(model.params)\n output = pd.DataFrame({\"lower\": expit(linear_predictions - 1.96*standard_devs),\n \"predicted\": expit(linear_predictions),\n \"upper\": expit(linear_predictions + 1.96*standard_devs)\n })\n return output\n\nlogit_prediction_intervals = get_logit_prediction_intervals(logit_model, xpred)\nlogit_prediction_intervals", "_____no_output_____" ], [ "ax = diab.plot.scatter(x='age',y='y_bin',c='Red',title=\"Diabetes data with least-squares cubic fit\")\nax.set_xlabel(\"Age at Diagnosis\")\nax.set_ylabel(\"Log C-Peptide Concentration\")\n\nax.plot(xpred.age, logit_prediction_intervals[\"predicted\"],color=\"green\")\nax.plot(xpred.age, logit_prediction_intervals[\"lower\"], color=\"blue\",linestyle=\"dashed\")\nax.plot(xpred.age, logit_prediction_intervals[\"upper\"], color=\"blue\",linestyle=\"dashed\");\nplt.show()", "_____no_output_____" ], [ "logit_poly_model = sm.logit(\"y_bin ~ np.vander(age, 4) - 1\", data = diab).fit()\nlogit_poly_prediction = logit_poly_model.predict(xpred)", "Optimization terminated successfully.\n Current function value: 0.194005\n Iterations 10\n" ], [ "ax = diab.plot.scatter(x='age',y='y_bin',c='Red',title=\"Diabetes data with least-squares cubic fit\")\nax.set_xlabel(\"Age at Diagnosis\")\nax.set_ylabel(\"Log C-Peptide Concentration\")\n\nlogit_poly_prediction_intervals = get_logit_prediction_intervals(logit_poly_model, xpred)\n\nax.plot(xpred.age, logit_poly_prediction_intervals[\"predicted\"],color=\"green\")\nax.plot(xpred.age, logit_poly_prediction_intervals[\"lower\"], color=\"blue\",linestyle=\"dashed\")\nax.plot(xpred.age, logit_poly_prediction_intervals[\"upper\"], color=\"blue\",linestyle=\"dashed\");\nplt.show()", "_____no_output_____" ] ], [ [ "## Lo(w)ess", "_____no_output_____" ] ], [ [ "from statsmodels.nonparametric.smoothers_lowess import lowess as lowess\n\nlowess_models = {}\nfor cur_frac in [.15,.25,.7, 1]:\n lowess_models[cur_frac] = lowess(diab['y'],diab['age'],frac=cur_frac)", "_____no_output_____" ] ], [ [ "**Note** Python's lowess implementation does not have any tool to predict on new data; it only returns the fitted function's value at the training points. We're making up for that by drawing a straight line between consecutive fitted values. (There are other more sophisticated interpolation techniques, but the ideal approach would be to predict on new points using lowess itself. This is a limitation of the Python implementation, not lowess itself)", "_____no_output_____" ] ], [ [ "from scipy.interpolate import interp1d\nfor cur_frac, cur_model in lowess_models.items():\n ax = diab.plot.scatter(x='age',y='y',c='Red',title=\"Lowess Fit, Fraction = {}\".format(cur_frac))\n ax.set_xlabel(\"Age at Diagnosis\")\n ax.set_ylabel(\"Log C-Peptide Concentration\")\n lowess_interpolation = interp1d(cur_model[:,0], cur_model[:,1], bounds_error=False)\n ax.plot(xpred, lowess_interpolation(xpred), color=\"Blue\")\n plt.show()", "_____no_output_____" ], [ "ax = diab.plot.scatter(x='age',y='y',c='Red',title=\"Large variance, low bias smoother\")\nax.set_xlabel(\"Age at Diagnosis\")\nax.set_ylabel(\"Log C-Peptide Concentration\")\nlowess_interpolation = interp1d(lowess_models[.15][:,0], lowess_models[.15][:,1], bounds_error=False)\nax.plot(xpred, lowess_interpolation(xpred), color=\"lightgreen\")\nplt.show()\n\nax = diab.plot.scatter(x='age',y='y',c='Red',title=\"Low variance, large bias smoother\")\nax.set_xlabel(\"Age at Diagnosis\")\nax.set_ylabel(\"Log C-Peptide Concentration\")\nlowess_interpolation = interp1d(lowess_models[1][:,0], lowess_models[1][:,1], bounds_error=False)\nax.plot(xpred, lowess_interpolation(xpred), color=\"lightgreen\")\nplt.show()", "_____no_output_____" ] ], [ [ "## Splines (via knots)", "_____no_output_____" ], [ "Define a Relu/Truncated cubic function", "_____no_output_____" ] ], [ [ "def h(x, knot, exponent):\n output = np.power(x-knot, exponent)\n output[x<=knot] = 0\n \n return output", "_____no_output_____" ] ], [ [ "Transforming the x values [0,10] with a knot at 4, power 1", "_____no_output_____" ] ], [ [ "xvals = np.arange(0,10.1,0.1)\n\nplt.plot(xvals, h(xvals,4,1), color=\"red\")\nplt.title(\"Truncated linear basis function with knot at x=4\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$(x-4)_+$\") #note the use of TeX in the label\nplt.show()", "_____no_output_____" ] ], [ [ "Transforming the x values [0,10] with a knot at 4, power 3", "_____no_output_____" ] ], [ [ "plt.plot(xvals,h(xvals,4,3),color=\"red\")\nplt.title(\"Truncated cubic basis function with knot at x=4\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$(x-4)_+^3$\")\nplt.show()", "_____no_output_____" ] ], [ [ "The sum of three RELUs with different knots and different coeffecients", "_____no_output_____" ] ], [ [ "plt.plot(xvals, 3*h(xvals,2,1) - 4*h(xvals,5,1) + 0.5*h(xvals,8,1), color=\"red\")\nplt.title(\"Piecewise linear spline with knots at x=2, 5, and 8\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.show()", "_____no_output_____" ] ], [ [ "Above, but with a starting slope and intercept", "_____no_output_____" ] ], [ [ "plt.plot(xvals, 2 + xvals + 3*h(xvals,2,1) - 4*h(xvals,5,1) + 0.5*h(xvals,8,1), color=\"red\")\nplt.title(\"Piecewise linear spline with knots at x=2, 5, and 8\\n plus a starting slope and intercept\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.show()", "_____no_output_____" ] ], [ [ "Using OLS, we can find optimal coeffecients for RELUs with pre-specified knots, just like we can find optimal coeffecients for $x^2$ and $x^3$", "_____no_output_____" ] ], [ [ "# generate some fake data to fit\n\nx = np.arange(0.1,10,9.9/100) \nfrom scipy.stats import norm\ny = norm.ppf(x/10) + np.random.normal(0,0.4,100)", "_____no_output_____" ], [ "fitted_spline_model = sm.ols('y~x+h(x,2,1)+h(x,5,1)+h(x,8,1)',data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"3 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", linewidth=2, label=\"Spline with knots at 2,5,8\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "More knots", "_____no_output_____" ] ], [ [ "fitted_spline_model = sm.ols('y~x+h(x,1,1)+h(x,2,1)+h(x,3.5,1)+h(x,5,1)+h(x,6.5,1)+h(x,8,1)',data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"6 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Linear Spline with knots at\\n1, 2, 3.5, 5, 6.5, 8\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "More knots", "_____no_output_____" ] ], [ [ "fitted_spline_model = sm.ols('y~x+h(x,1,1)+h(x,2,1)+h(x,3,1)+h(x,4,1)+h(x,5,1)+h(x,6,1)+h(x,7,1)+h(x,8,1)+h(x,9,1)',\n data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"9 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Linear Spline with 9 knots\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "Using code to write out the formula this time", "_____no_output_____" ] ], [ [ "n_knots = 25\ncomponents = ['h(x,{},1)'.format(x) for x in np.linspace(0,10,n_knots)]\nformula = ' + '.join(components)\nfinal_formula = 'y ~ x + ' + formula\n\nfinal_formula", "_____no_output_____" ], [ "fitted_spline_model = sm.ols(final_formula,data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"25 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Linear Spline with 25 knots\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "Cubic splines, instead of linear. Still using code to write the formula", "_____no_output_____" ] ], [ [ "components = ['h(x,{},3)'.format(x) for x in [2,5,8]]\nformula = ' + '.join(components)\nfinal_formula = 'y~x + np.power(x,2) + np.power(x,3) + ' + formula\n\nfitted_spline_model = sm.ols(final_formula,data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"3 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Cubic Spline with 3 knots\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ], [ "components = ['h(x,{},3)'.format(x) for x in [1,2,3.5,5,6.5,8]]\nformula = ' + '.join(components)\nfinal_formula = 'y~x + np.power(x,2) + np.power(x,3) + ' + formula\n\nfitted_spline_model = sm.ols(final_formula,data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"6 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Cubic Spline with 6 knots\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ], [ "n_knots = 9\ncomponents = ['h(x,{},3)'.format(x) for x in np.linspace(0,10,n_knots)]\nformula = ' + '.join(components)\nfinal_formula = 'y~x + np.power(x,2) + np.power(x,3) + ' + formula\n\nfitted_spline_model = sm.ols(final_formula,data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"9 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Cubic Spline with 9 knots\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ], [ "n_knots = 25\ncomponents = ['h(x,{},3)'.format(x) for x in np.linspace(0,10,n_knots)]\nformula = ' + '.join(components)\nfinal_formula = 'y~x + np.power(x,2) + np.power(x,3) + ' + formula\n\nfitted_spline_model = sm.ols(final_formula,data={'x':x,'y':y}).fit()\n\nplt.scatter(x,y,facecolors='none', edgecolors='black')\nplt.title(\"25 knots\")\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.plot(x, fitted_spline_model.predict(),color=\"darkblue\", label=\"Cubic Spline with 25 knots\")\nplt.plot(x, norm.ppf(x/10), color=\"red\", label=\"Truth\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "## Smoothing splines", "_____no_output_____" ] ], [ [ "from scipy.interpolate import UnivariateSpline", "_____no_output_____" ] ], [ [ "This method won't allow tied values, and wants its values in sorted order. Add a tiny bit of noise to the x values and sort them.", "_____no_output_____" ] ], [ [ "diab['noisy_age'] = diab['age'] + np.random.normal(0,.001, len(diab))\nsorted_noisy_diab = diab.sort_values([\"noisy_age\"])", "_____no_output_____" ], [ "smoothing_spline_dict = {}\nfor cur_smoothing in [0, 5, 10, 20, np.inf]:\n \n cur_spline = UnivariateSpline(sorted_noisy_diab['noisy_age'],sorted_noisy_diab['y'], s=cur_smoothing)\n ax = diab.plot.scatter(x='age',y='y',c='Red',title=\"Smoothing Spline with s={}\".format(cur_smoothing))\n ax.plot(xpred, cur_spline(xpred))\n # the jaggedness in the original came from plot's implicit linear interpolation\n# lowess_interpolation = interp1d(diab['age'], cur_spline(diab['age']), bounds_error=False)\n# ax.plot(xpred, lowess_interpolation(xpred), color=\"Blue\")\n ax.set_ylim(2,7)", "_____no_output_____" ] ], [ [ "Even when we request infinite smoothness, we don't end up with a horizontal line (no derivatives at all)- we end up with a single cubic curve. That's because we chose a cubic spline.\n\nIn general, this implementation's smoothness regularization penalizes any derivatives _beyond_ what our basic spline needs. So with k=3, as above, the penalty is on the 4th derivative, and with k=2 the penalty is on the 3rd derivative and the \"infinitely smooth\" result will be a parabola. (i.e. a curve with 3rd derivative=0 everywhere)", "_____no_output_____" ], [ "Selecting smoothing level by CV", "_____no_output_____" ] ], [ [ "from sklearn.model_selection import KFold\nfrom sklearn.metrics import r2_score\n\ncandidate_smoothings = [0, 5, 10, 20, 200]\n\nkf = KFold(n_splits=5, random_state=47, shuffle=True)\nscores = np.zeros((5,len(candidate_smoothings)))\n\nfor i, (train_index, test_index) in enumerate(kf.split(sorted_noisy_diab)):\n train_df = sorted_noisy_diab.iloc[train_index,:]\n test_df = sorted_noisy_diab.iloc[test_index,:]\n for j,cur_smoothing in enumerate(candidate_smoothings):\n cur_model = UnivariateSpline(train_df['noisy_age'],train_df['y'], s=cur_smoothing)\n \n scores[i,j] = r2_score(test_df['y'], cur_model(test_df['noisy_age']))\n \nnp.mean(scores, axis=0)", "_____no_output_____" ], [ "best_s = candidate_smoothings[np.argmax(np.mean(scores, axis=0))]\n\nax = diab.plot.scatter(x='age',y='y',c='Red',title=\"smoothing spline with s={}, chosen by cross-validation\".format(best_s))\nbest_model = UnivariateSpline(sorted_noisy_diab['noisy_age'],sorted_noisy_diab['y'], s=best_s)\nax.plot(xpred, best_model(xpred), color=\"darkgreen\")\nplt.show()", "_____no_output_____" ], [ "#We will now work with a new dataset, called GAGurine.\n#The dataset description (from the R package MASS) is below:\nprint(\"\"\"\n# Data were collected on the concentration of a chemical GAG \n# in the urine of 314 children aged from zero to seventeen years. \n# The aim of the study was to produce a chart to help a paediatrican\n# to assess if a child's GAG concentration is ‘normal’.\n\n# The variables are:\n# Age: age of child in years.\n# GAG: concentration of GAG (the units have been lost).\n\"\"\")", "\n# Data were collected on the concentration of a chemical GAG \n# in the urine of 314 children aged from zero to seventeen years. \n# The aim of the study was to produce a chart to help a paediatrican\n# to assess if a child's GAG concentration is ‘normal’.\n\n# The variables are:\n# Age: age of child in years.\n# GAG: concentration of GAG (the units have been lost).\n\n" ], [ "GAGurine = pd.read_csv(\"../data/GAGurine.csv\")\nGAGurine['Age'] = GAGurine['Age']+np.random.normal(0,0.001, len(GAGurine))\nGAGurine = GAGurine.sort_values(['Age'])\nax = GAGurine.plot.scatter(x='Age',y='GAG',c='black',title=\"GAG in urine of children\")\nax.set_xlabel(\"Age\")\nax.set_ylabel(\"GAG\")\nplt.show()", "_____no_output_____" ] ], [ [ "Get quartiles", "_____no_output_____" ] ], [ [ "quarts = GAGurine['Age'].quantile([0.25, 0.5, 0.75]).values.reshape(-1)", "_____no_output_____" ] ], [ [ "Build a Bspline model. Call `splrep` (spline representation) to find the knots and coeffecients that smooth the given data, then call BSpline to build something that can predict on given values.", "_____no_output_____" ] ], [ [ "from scipy.interpolate import splrep\nfrom scipy.interpolate import BSpline\n\nt,c,k = splrep(GAGurine['Age'].values, GAGurine['GAG'].values, t=quarts)\nb_spline_model = BSpline(t,c,k)\nb_spline_model(7)", "_____no_output_____" ] ], [ [ "`LSQUnivariateSpline` fits splines to data, using user-specified knots", "_____no_output_____" ] ], [ [ "from scipy.interpolate import LSQUnivariateSpline\n\nnatural_spline_model = LSQUnivariateSpline(GAGurine['Age'].values, GAGurine['GAG'].values, quarts)", "_____no_output_____" ], [ "ax = GAGurine.plot.scatter(x='Age',y='GAG',c='grey',title=\"GAG in urine of children\")\nax.plot(GAGurine['Age'], b_spline_model(GAGurine['Age']), label=\"B-spline, knots at quartiles\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "## GAMs", "_____no_output_____" ] ], [ [ "kyphosis = pd.read_csv(\"../data/kyphosis.csv\")\nkyphosis[\"outcome\"] = 1*(kyphosis[\"Kyphosis\"] == \"present\")\nkyphosis.describe()", "_____no_output_____" ], [ "from pygam import LogisticGAM, s\n\n\nX = kyphosis[[\"Age\",\"Number\",\"Start\"]]\ny = kyphosis[\"outcome\"]\nkyph_gam = LogisticGAM(s(0)+s(1)+s(2)).fit(X,y)", "_____no_output_____" ] ], [ [ "GAMs provide plots of the effect of increasing each variable (conditional on / adjusted for the other variables)", "_____no_output_____" ] ], [ [ "res = kyph_gam.deviance_residuals(X,y)\nfor i, term in enumerate(kyph_gam.terms):\n if term.isintercept:\n continue\n\n XX = kyph_gam.generate_X_grid(term=i)\n pdep, confi = kyph_gam.partial_dependence(term=i, X=XX, width=0.95)\n pdep2, _ = kyph_gam.partial_dependence(term=i, X=X, width=0.95)\n plt.figure()\n plt.scatter(X.iloc[:,term.feature], pdep2 + res)\n plt.plot(XX[:, term.feature], pdep)\n plt.plot(XX[:, term.feature], confi, c='r', ls='--')\n plt.title(X.columns.values[term.feature])\n plt.show()", "_____no_output_____" ] ], [ [ "AIC is a measure of model quality, estimating performance on a test set (without actually needing a test set). It can be used to compare and select two models.", "_____no_output_____" ] ], [ [ "kyph_gam.summary()", "LogisticGAM \n=============================================== ==========================================================\nDistribution: BinomialDist Effective DoF: 14.2822\nLink Function: LogitLink Log Likelihood: -18.4906\nNumber of Samples: 81 AIC: 65.5456\n AICc: 73.2352\n UBRE: 2.9503\n Scale: 1.0\n Pseudo R-Squared: 0.5557\n==========================================================================================================\nFeature Function Lambda Rank EDoF P > x Sig. Code \n================================= ==================== ============ ============ ============ ============\ns(0) [0.6] 20 7.5 5.57e-01 \ns(1) [0.6] 20 4.9 1.15e-01 \ns(2) [0.6] 20 1.9 1.01e-01 \nintercept 1 0.0 8.17e-03 ** \n==========================================================================================================\nSignificance codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1\n\nWARNING: Fitting splines and a linear function to a feature introduces a model identifiability problem\n which can cause p-values to appear significant when they are not.\n\nWARNING: p-values calculated in this manner behave correctly for un-penalized models or models with\n known smoothing parameters, but when smoothing parameters have been estimated, the p-values\n are typically lower than they should be, meaning that the tests reject the null too readily.\n" ], [ "X = kyphosis[[\"Age\",\"Number\",\"Start\"]]\ny = kyphosis[\"outcome\"]\nsmall_kyph_gam = LogisticGAM(s(0)+s(2)).fit(X,y)\n\n\nres = small_kyph_gam.deviance_residuals(X,y)\nfor i, term in enumerate(small_kyph_gam.terms):\n if term.isintercept:\n continue\n\n XX = small_kyph_gam.generate_X_grid(term=i)\n pdep, confi = small_kyph_gam.partial_dependence(term=i, X=XX, width=0.95)\n pdep2, _ = small_kyph_gam.partial_dependence(term=i, X=X, width=0.95)\n plt.figure()\n plt.scatter(X.iloc[:,term.feature], pdep2 + res)\n plt.plot(XX[:, term.feature], pdep)\n plt.plot(XX[:, term.feature], confi, c='r', ls='--')\n plt.title(X.columns.values[term.feature])\n plt.show()", "_____no_output_____" ] ], [ [ "The original model's AIC was lower, so we prefer that model- it is expected to do better on out-of-sample data.", "_____no_output_____" ] ], [ [ "small_kyph_gam.summary()", "LogisticGAM \n=============================================== ==========================================================\nDistribution: BinomialDist Effective DoF: 10.478\nLink Function: LogitLink Log Likelihood: -23.3388\nNumber of Samples: 81 AIC: 67.6337\n AICc: 71.8141\n UBRE: 2.9385\n Scale: 1.0\n Pseudo R-Squared: 0.4392\n==========================================================================================================\nFeature Function Lambda Rank EDoF P > x Sig. Code \n================================= ==================== ============ ============ ============ ============\ns(0) [0.6] 20 6.8 5.27e-01 \ns(2) [0.6] 20 3.7 6.86e-02 . \nintercept 1 0.0 6.57e-03 ** \n==========================================================================================================\nSignificance codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1\n\nWARNING: Fitting splines and a linear function to a feature introduces a model identifiability problem\n which can cause p-values to appear significant when they are not.\n\nWARNING: p-values calculated in this manner behave correctly for un-penalized models or models with\n known smoothing parameters, but when smoothing parameters have been estimated, the p-values\n are typically lower than they should be, meaning that the tests reject the null too readily.\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
d0a35533f80ca6a79f811876d8d8446b3daa22a6
72,992
ipynb
Jupyter Notebook
07-time-series/07-time-series.ipynb
uwsd/2018-datascience-lectures
4ddaa512d3f618e3fcd36b1f8151687e9ea1facf
[ "MIT" ]
36
2018-01-09T18:25:45.000Z
2021-08-08T02:15:43.000Z
07-time-series/07-time-series.ipynb
uwsd/2018-datascience-lectures
4ddaa512d3f618e3fcd36b1f8151687e9ea1facf
[ "MIT" ]
1
2018-11-16T10:49:32.000Z
2018-11-17T12:35:17.000Z
07-time-series/07-time-series.ipynb
uwsd/2018-datascience-lectures
4ddaa512d3f618e3fcd36b1f8151687e9ea1facf
[ "MIT" ]
41
2018-01-10T20:40:25.000Z
2020-08-20T03:25:15.000Z
54.228826
1,521
0.614972
[ [ [ "# MATH 4100: Temporal data analysis and applications to stock analysis\n*Curtis Miller*\n\n## Introduction\n\nThis is a lecture for [MATH 4100/CS 5160: Introduction to Data Science](http://datasciencecourse.net/), offered at the University of Utah, introducing time series data analysis applied to finance.\n\nAdvanced mathematics and statistics have been present in finance for some time. Prior to the 1980s, banking and finance were well-known for being \"boring\"; investment banking was distinct from commercial banking and the primary role of the industry was handling \"simple\" (at least in comparison to today) financial instruments, such as loans. Deregulation under the Regan administration, coupled with an influx of mathematical talent, transformed the industry from the \"boring\" business of banking to what it is today, and since then, finance has joined the other sciences as a motivation for mathematical research and advancement. For example one of the biggest recent achievements of mathematics was the derivation of the [Black-Scholes formula](https://en.wikipedia.org/wiki/Black%E2%80%93Scholes_model), which facilitated the pricing of stock options (a contract giving the holder the right to purchase or sell a stock at a particular price to the issuer of the option). That said, [bad statistical models, including the Black-Scholes formula, hold part of the blame for the 2008 financial crisis](https://www.theguardian.com/science/2012/feb/12/black-scholes-equation-credit-crunch).\n\nIn recent years, computer science has joined advanced mathematics in revolutionizing finance and **trading**, the practice of buying and selling of financial assets for the purpose of making a profit. In recent years, trading has become dominated by computers; algorithms are responsible for making rapid split-second trading decisions faster than humans could make (so rapidly, [the speed at which light travels is a limitation when designing systems](http://www.nature.com/news/physics-in-finance-trading-at-the-speed-of-light-1.16872)). Additionally, [machine learning and data mining techniques are growing in popularity](http://www.ft.com/cms/s/0/9278d1b6-1e02-11e6-b286-cddde55ca122.html#axzz4G8daZxcl) in the financial sector, and likely will continue to do so. For example, **high-frequency trading (HFT)** is a branch of algorithmic trading where computers make thousands of trades in short periods of time, engaging in complex strategies such as statistical arbitrage and market making. While algorithms may outperform humans, the technology is still new and playing an increasing role in a famously turbulent, high-stakes arena. HFT was responsible for phenomena such as the [2010 flash crash](https://en.wikipedia.org/wiki/2010_Flash_Crash) and a [2013 flash crash](http://money.cnn.com/2013/04/24/investing/twitter-flash-crash/) prompted by a hacked [Associated Press tweet](http://money.cnn.com/2013/04/23/technology/security/ap-twitter-hacked/index.html?iid=EL) about an attack on the White House.\n\nThis lecture, however, will not be about how to crash the stock market with bad mathematical models or trading algorithms. Instead, I intend to provide you with basic tools for handling and analyzing stock market data with Python. We will be using stock data as a first exposure to **time series data**, which is data considered dependent on the time it was observed (other examples of time series include temperature data, demand for energy on a power grid, Internet server load, and many, many others). I will also discuss moving averages, how to construct trading strategies using moving averages, how to formulate exit strategies upon entering a position, and how to evaluate a strategy with backtesting.\n\n**DISCLAIMER: THIS IS NOT FINANCIAL ADVICE!!! Furthermore, I have ZERO experience as a trader (a lot of this knowledge comes from a one-semester course on stock trading I took at Salt Lake Community College)! This is purely introductory knowledge, not enough to make a living trading stocks. People can and do lose money trading stocks, and you do so at your own risk!**\n\n## Preliminaries\n\nI will be using two packages, **quandl** and **pandas_datareader**, which are not installed with [Anaconda](https://www.anaconda.com/) if you are using it. To install these packages, run the following at the appropriate command prompt:\n\n conda install quandl\n conda install pandas-datareader\n\n## Getting and Visualizing Stock Data\n\n### Getting Data from Quandl\n\nBefore we analyze stock data, we need to get it into some workable format. Stock data can be obtained from [Yahoo! Finance](http://finance.yahoo.com), [Google Finance](http://finance.google.com), or a number of other sources. These days I recommend getting data from [Quandl](https://www.quandl.com/), a provider of community-maintained financial and economic data. (Yahoo! Finance used to be the go-to source for good quality stock data, but the API was discontinued in 2017 and reliable data can no longer be obtained: see [this question/answer on StackExchange](https://quant.stackexchange.com/questions/35019/is-yahoo-finance-data-good-or-bad-now) for more details.)\n\nBy default the `get()` function in **quandl** will return a **pandas** `DataFrame` containing the fetched data.", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport quandl\nimport datetime\n\n# We will look at stock prices over the past year, starting at January 1, 2016\nstart = datetime.datetime(2016,1,1)\nend = datetime.date.today()\n\n# Let's get Apple stock data; Apple's ticker symbol is AAPL\n# First argument is the series we want, second is the source (\"yahoo\" for Yahoo! Finance), third is the start date, fourth is the end date\ns = \"AAPL\"\napple = quandl.get(\"WIKI/\" + s, start_date=start, end_date=end)\n\ntype(apple)", "_____no_output_____" ], [ "apple.head()", "_____no_output_____" ] ], [ [ "Let's briefly discuss this. **Open** is the price of the stock at the beginning of the trading day (it need not be the closing price of the previous trading day), **high** is the highest price of the stock on that trading day, **low** the lowest price of the stock on that trading day, and **close** the price of the stock at closing time. **Volume** indicates how many stocks were traded. **Adjusted** prices (such as the adjusted close) is the price of the stock that adjusts the price for corporate actions. While stock prices are considered to be set mostly by traders, **stock splits** (when the company makes each extant stock worth two and halves the price) and **dividends** (payout of company profits per share) also affect the price of a stock and should be accounted for.\n\n### Visualizing Stock Data\n\nNow that we have stock data we would like to visualize it. I first demonstrate how to do so using the **matplotlib** package. Notice that the `apple` `DataFrame` object has a convenience method, `plot()`, which makes creating plots easier.", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt # Import matplotlib\n# This line is necessary for the plot to appear in a Jupyter notebook\n%matplotlib inline\n# Control the default size of figures in this Jupyter notebook\n%pylab inline\npylab.rcParams['figure.figsize'] = (15, 9) # Change the size of plots\n\napple[\"Adj. Close\"].plot(grid = True) # Plot the adjusted closing price of AAPL", "_____no_output_____" ] ], [ [ "A linechart is fine, but there are at least four variables involved for each date (open, high, low, and close), and we would like to have some visual way to see all four variables that does not require plotting four separate lines. Financial data is often plotted with a **Japanese candlestick plot**, so named because it was first created by 18th century Japanese rice traders. Such a chart can be created with **matplotlib**, though it requires considerable effort.\n\nI have made a function you are welcome to use to more easily create candlestick charts from **pandas** data frames, and use it to plot our stock data. (Code is based off [this example](http://matplotlib.org/examples/pylab_examples/finance_demo.html), and you can read the documentation for the functions involved [here](http://matplotlib.org/api/finance_api.html).)", "_____no_output_____" ] ], [ [ "from matplotlib.dates import DateFormatter, WeekdayLocator,\\\n DayLocator, MONDAY\nfrom matplotlib.finance import candlestick_ohlc\n\ndef pandas_candlestick_ohlc(dat, stick = \"day\", adj = False, otherseries = None):\n \"\"\"\n :param dat: pandas DataFrame object with datetime64 index, and float columns \"Open\", \"High\", \"Low\", and \"Close\", likely created via DataReader from \"yahoo\"\n :param stick: A string or number indicating the period of time covered by a single candlestick. Valid string inputs include \"day\", \"week\", \"month\", and \"year\", (\"day\" default), and any numeric input indicates the number of trading days included in a period\n :param adj: A boolean indicating whether to use adjusted prices\n :param otherseries: An iterable that will be coerced into a list, containing the columns of dat that hold other series to be plotted as lines\n \n This will show a Japanese candlestick plot for stock data stored in dat, also plotting other series if passed.\n \"\"\"\n mondays = WeekdayLocator(MONDAY) # major ticks on the mondays\n alldays = DayLocator() # minor ticks on the days\n dayFormatter = DateFormatter('%d') # e.g., 12\n \n # Create a new DataFrame which includes OHLC data for each period specified by stick input\n fields = [\"Open\", \"High\", \"Low\", \"Close\"]\n if adj:\n fields = [\"Adj. \" + s for s in fields]\n transdat = dat.loc[:,fields]\n transdat.columns = pd.Index([\"Open\", \"High\", \"Low\", \"Close\"])\n if (type(stick) == str):\n if stick == \"day\":\n plotdat = transdat\n stick = 1 # Used for plotting\n elif stick in [\"week\", \"month\", \"year\"]:\n if stick == \"week\":\n transdat[\"week\"] = pd.to_datetime(transdat.index).map(lambda x: x.isocalendar()[1]) # Identify weeks\n elif stick == \"month\":\n transdat[\"month\"] = pd.to_datetime(transdat.index).map(lambda x: x.month) # Identify months\n transdat[\"year\"] = pd.to_datetime(transdat.index).map(lambda x: x.isocalendar()[0]) # Identify years\n grouped = transdat.groupby(list(set([\"year\",stick]))) # Group by year and other appropriate variable\n plotdat = pd.DataFrame({\"Open\": [], \"High\": [], \"Low\": [], \"Close\": []}) # Create empty data frame containing what will be plotted\n for name, group in grouped:\n plotdat = plotdat.append(pd.DataFrame({\"Open\": group.iloc[0,0],\n \"High\": max(group.High),\n \"Low\": min(group.Low),\n \"Close\": group.iloc[-1,3]},\n index = [group.index[0]]))\n if stick == \"week\": stick = 5\n elif stick == \"month\": stick = 30\n elif stick == \"year\": stick = 365\n \n elif (type(stick) == int and stick >= 1):\n transdat[\"stick\"] = [np.floor(i / stick) for i in range(len(transdat.index))]\n grouped = transdat.groupby(\"stick\")\n plotdat = pd.DataFrame({\"Open\": [], \"High\": [], \"Low\": [], \"Close\": []}) # Create empty data frame containing what will be plotted\n for name, group in grouped:\n plotdat = plotdat.append(pd.DataFrame({\"Open\": group.iloc[0,0],\n \"High\": max(group.High),\n \"Low\": min(group.Low),\n \"Close\": group.iloc[-1,3]},\n index = [group.index[0]]))\n \n else:\n raise ValueError('Valid inputs to argument \"stick\" include the strings \"day\", \"week\", \"month\", \"year\", or a positive integer')\n\n\n # Set plot parameters, including the axis object ax used for plotting\n fig, ax = plt.subplots()\n fig.subplots_adjust(bottom=0.2)\n if plotdat.index[-1] - plotdat.index[0] < pd.Timedelta('730 days'):\n weekFormatter = DateFormatter('%b %d') # e.g., Jan 12\n ax.xaxis.set_major_locator(mondays)\n ax.xaxis.set_minor_locator(alldays)\n else:\n weekFormatter = DateFormatter('%b %d, %Y')\n ax.xaxis.set_major_formatter(weekFormatter)\n \n ax.grid(True)\n \n # Create the candelstick chart\n candlestick_ohlc(ax, list(zip(list(date2num(plotdat.index.tolist())), plotdat[\"Open\"].tolist(), plotdat[\"High\"].tolist(),\n plotdat[\"Low\"].tolist(), plotdat[\"Close\"].tolist())),\n colorup = \"black\", colordown = \"red\", width = stick * .4)\n \n # Plot other series (such as moving averages) as lines\n if otherseries != None:\n if type(otherseries) != list:\n otherseries = [otherseries]\n dat.loc[:,otherseries].plot(ax = ax, lw = 1.3, grid = True)\n\n ax.xaxis_date()\n ax.autoscale_view()\n plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')\n\n plt.show()\n\npandas_candlestick_ohlc(apple, adj=True)", "_____no_output_____" ] ], [ [ "With a candlestick chart, a black candlestick indicates a day where the closing price was higher than the open (a gain), while a red candlestick indicates a day where the open was higher than the close (a loss). The wicks indicate the high and the low, and the body the open and close (hue is used to determine which end of the body is the open and which the close). Candlestick charts are popular in finance and some strategies in [technical analysis](https://en.wikipedia.org/wiki/Technical_analysis) use them to make trading decisions, depending on the shape, color, and position of the candles. I will not cover such strategies today.\n\nWe may wish to plot multiple financial instruments together; we may want to compare stocks, compare them to the market, or look at other securities such as [exchange-traded funds (ETFs)](https://en.wikipedia.org/wiki/Exchange-traded_fund). Later, we will also want to see how to plot a financial instrument against some indicator, like a moving average. For this you would rather use a line chart than a candlestick chart. (How would you plot multiple candlestick charts on top of one another without cluttering the chart?)\n\nBelow, I get stock data for some other tech companies and plot their adjusted close together.", "_____no_output_____" ] ], [ [ "microsoft, google = (quandl.get(\"WIKI/\" + s, start_date=start, end_date=end) for s in [\"MSFT\", \"GOOG\"])\n\n# Below I create a DataFrame consisting of the adjusted closing price of these stocks, first by making a list of these objects and using the join method\nstocks = pd.DataFrame({\"AAPL\": apple[\"Adj. Close\"],\n \"MSFT\": microsoft[\"Adj. Close\"],\n \"GOOG\": google[\"Adj. Close\"]})\n\nstocks.head()", "_____no_output_____" ], [ "stocks.plot(grid = True)", "_____no_output_____" ] ], [ [ "What's wrong with this chart? While absolute price is important (pricy stocks are difficult to purchase, which affects not only their volatility but *your* ability to trade that stock), when trading, we are more concerned about the relative change of an asset rather than its absolute price. Google's stocks are much more expensive than Apple's or Microsoft's, and this difference makes Apple's and Microsoft's stocks appear much less volatile than they truly are (that is, their price appears to not deviate much).\n\nOne solution would be to use two different scales when plotting the data; one scale will be used by Apple and Microsoft stocks, and the other by Google.", "_____no_output_____" ] ], [ [ "stocks.plot(secondary_y = [\"AAPL\", \"MSFT\"], grid = True)", "_____no_output_____" ] ], [ [ "A \"better\" solution, though, would be to plot the information we actually want: the stock's returns. This involves transforming the data into something more useful for our purposes. There are multiple transformations we could apply.\n\nOne transformation would be to consider the stock's return since the beginning of the period of interest. In other words, we plot:\n\n\\begin{equation*}\n\\text{return}_{t,0} = \\frac{\\text{price}_t}{\\text{price}_0}\n\\end{equation*}\n\nThis will require transforming the data in the `stocks` object, which I do next. Notice that I am using a **lambda function**, which allows me to pass a small function defined quickly as a parameter to another function or method (you can read more about lambda functions [here](https://docs.python.org/3/reference/expressions.html#lambda)).", "_____no_output_____" ] ], [ [ "# df.apply(arg) will apply the function arg to each column in df, and return a DataFrame with the result\n# Recall that lambda x is an anonymous function accepting parameter x; in this case, x will be a pandas Series object\nstock_return = stocks.apply(lambda x: x / x[0])\nstock_return.head()", "_____no_output_____" ], [ "stock_return.plot(grid = True).axhline(y = 1, color = \"black\", lw = 2)", "_____no_output_____" ] ], [ [ "This is a much more useful plot. We can now see how profitable each stock was since the beginning of the period. Furthermore, we see that these stocks are highly correlated; they generally move in the same direction, a fact that was difficult to see in the other charts.\n\nAlternatively, we could plot the change of each stock per day. One way to do so would be to plot the percentage increase of a stock when comparing day $t$ to day $t + 1$, with the formula:\n\n\\begin{equation*}\n\\text{growth}_t = \\frac{\\text{price}_{t + 1} - \\text{price}_t}{\\text{price}_t}\n\\end{equation*}\n\nBut change could be thought of differently as:\n\n\\begin{equation*}\n\\text{increase}_t = \\frac{\\text{price}_{t} - \\text{price}_{t-1}}{\\text{price}_t}\n\\end{equation*}\n\nThese formulas are not the same and can lead to differing conclusions, but there is another way to model the growth of a stock: with log differences.\n\n\\begin{equation*}\n\\text{change}_t = \\log(\\text{price}_{t}) - \\log(\\text{price}_{t - 1})\n\\end{equation*}\n\n(Here, $\\log$ is the natural log, and our definition does not depend as strongly on whether we use $\\log(\\text{price}_{t}) - \\log(\\text{price}_{t - 1})$ or $\\log(\\text{price}_{t+1}) - \\log(\\text{price}_{t})$.) The advantage of using log differences is that this difference can be interpreted as the percentage change in a stock but does not depend on the denominator of a fraction. Additionally, log differences have a desirable property: the sum of the log differences can be interpreted as the total change (as a percentage) over the period summed (which is not a property of the other formulations; they will overestimate growth). Log differences also more cleanly correspond to how stock prices are modeled in continuous time.\n\nWe can obtain and plot the log differences of the data in `stocks` as follows:", "_____no_output_____" ] ], [ [ "# Let's use NumPy's log function, though math's log function would work just as well\nimport numpy as np\n\nstock_change = stocks.apply(lambda x: np.log(x) - np.log(x.shift(1))) # shift moves dates back by 1.\nstock_change.head()", "_____no_output_____" ], [ "stock_change.plot(grid = True).axhline(y = 0, color = \"black\", lw = 2)", "_____no_output_____" ] ], [ [ "Which transformation do you prefer? Looking at returns since the beginning of the period make the overall trend of the securities in question much more apparent. Changes between days, though, are what more advanced methods actually consider when modelling the behavior of a stock. so they should not be ignored.\n\nWe often want to compare the performance of stocks to the performance of the overall market. [SPY](https://finance.yahoo.com/quote/SPY/), which is the ticker symbol for the SPDR S&P 500 exchange-traded mutual fund (ETF), is a fund that attempts only to imitate the composition of the [S&P 500 stock index](https://finance.yahoo.com/quote/%5EGSPC?p=^GSPC), and thus represents the value in \"the market.\"\n\nSPY data is not available for free from Quandl, so I will get this data from Yahoo! Finance. (I don't have a choice.)\n\nBelow I get data for SPY and compare its performance to the performance of our stocks.", "_____no_output_____" ] ], [ [ "#import pandas_datareader.data as web # Going to get SPY from Yahoo! (I know I said you shouldn't but I didn't have a choice)\n#spyder = web.DataReader(\"SPY\", \"yahoo\", start, end) # Didn't work\n#spyder = web.DataReader(\"SPY\", \"google\", start, end) # Didn't work either\n# If all else fails, read from a file, obtained from here: http://www.nasdaq.com/symbol/spy/historical\nspyderdat = pd.read_csv(\"/home/curtis/Downloads/HistoricalQuotes.csv\") # Obviously specific to my system; set to\n # location on your machine\nspyderdat = pd.DataFrame(spyderdat.loc[:, [\"open\", \"high\", \"low\", \"close\", \"close\"]].iloc[1:].as_matrix(),\n index=pd.DatetimeIndex(spyderdat.iloc[1:, 0]),\n columns=[\"Open\", \"High\", \"Low\", \"Close\", \"Adj Close\"]).sort_index()\n\nspyder = spyderdat.loc[start:end]\n\nstocks = stocks.join(spyder.loc[:, \"Adj Close\"]).rename(columns={\"Adj Close\": \"SPY\"})\nstocks.head()", "_____no_output_____" ], [ "stock_return = stocks.apply(lambda x: x / x[0])\nstock_return.plot(grid = True).axhline(y = 1, color = \"black\", lw = 2)", "_____no_output_____" ], [ "stock_change = stocks.apply(lambda x: np.log(x) - np.log(x.shift(1)))\nstock_change.plot(grid=True).axhline(y = 0, color = \"black\", lw = 2)", "_____no_output_____" ] ], [ [ "## Classical Risk Metrics\n\nFrom what we have so far we can already compute informative metrics for our stocks, which can be considered some measure of risk.\n\nFirst, we will want to **annualize** our returns, thus computing the **annual percentage rate (APR)**. This helps us keep returns on a common time scale.", "_____no_output_____" ] ], [ [ "stock_change_apr = stock_change * 252 * 100 # There are 252 trading days in a year; the 100 converts to percentages\nstock_change_apr.tail()", "_____no_output_____" ] ], [ [ "Some of these numbers look initially like nonsense, but that's okay for now.\n\nThe metrics I want are:\n* The average return\n* Volatility (the standard deviation of returns)\n* $\\alpha$ and $\\beta$\n* The Sharpe ratio\n\nThe first two metrics are largely self-explanatory, but the latter two need explaining.\n\nFirst, the **risk-free rate**, which I denote by $r_{RF}$, is the rate of return on a risk-free financial asset. This asset exists only in theory but often yields on low-risk instruments like 3-month U.S. Treasury Bills can be viewed as being virtually risk-free and thus their yields can be used to approximate the risk-free rate. I get the data for these instruments below.", "_____no_output_____" ] ], [ [ "tbill = quandl.get(\"FRED/TB3MS\", start_date=start, end_date=end)\ntbill.tail()", "_____no_output_____" ], [ "tbill.plot()", "_____no_output_____" ], [ "rrf = tbill.iloc[-1, 0] # Get the most recent Treasury Bill rate\nrrf", "_____no_output_____" ] ], [ [ "Now, a **linear regression model** is a model of the following form:\n\n$$y_i = \\alpha + \\beta x_i + \\epsilon_i$$\n\n$\\epsilon_i$ is an error process. Another way to think of this process model is:\n\n$$\\hat{y}_i = \\alpha + \\beta x_i$$\n\n$\\hat{y}_i$ is the **predicted value** of $y_i$ given $x_i$. In other words, a linear regression model tells you how $x_i$ and $y_i$ are related, and how values of $x_i$ can be used to predict values of $y_i$. $\\alpha$ is the **intercept** of the model and $\\beta$ is the **slope**. In particular, $\\alpha$ would be the predicted value of $y$ if $x$ were zero, and $\\beta$ gives how much $y$ changes when $x$ changes by one unit.\n\nThere is an easy way to compute $\\alpha$ and $\\beta$ given the sample means $\\bar{x}$ and $\\bar{y}$ and sample standard deviations $s_x$ and $s_y$ and the correlation between $x$ and $y$, denoted with $r$:\n\n$$\\beta = r \\frac{s_y}{s_x}$$\n$$\\alpha = \\bar{y} - \\beta \\bar{x}$$\n\nIn finance, we use $\\alpha$ and $\\beta$ like so:\n\n$$R_t - r_{RF} = \\alpha + \\beta (R_{Mt} - r_{RF}) + \\epsilon_t$$\n\n$R_t$ is the return of a financial asset (a stock) and $R_t - r_{RF}$ is the **excess return**, or return exceeding the risk-free rate of return. $R_{Mt}$ is the return of the *market* at time $t$. Then $\\alpha$ and $\\beta can be interpreted like so:\n\n* $\\alpha$ is average excess return over the market.\n* $\\beta$ is how much a stock moves in relation to the market. If $\\beta > 0$ then the stock generally moves in the same direction as the market, while when $\\beta < 0$ the stock generally moves in the opposite direction. If $|\\beta| > 1$ the stock moves strongly in response to the market $|\\beta| < 1$ the stock is less responsive to the market.\n\nBelow I get a **pandas** `Series` that contains how much each stock is correlated with SPY (our approximation of the market).", "_____no_output_____" ] ], [ [ "smcorr = stock_change_apr.drop(\"SPY\", 1).corrwith(stock_change_apr.SPY) # Since RRF is constant it doesn't change the\n # correlation so we can ignore it in our\n # calculation\nsmcorr", "_____no_output_____" ] ], [ [ "Then I compute $\\alpha$ and $\\beta$.", "_____no_output_____" ] ], [ [ "sy = stock_change_apr.drop(\"SPY\", 1).std()\nsx = stock_change_apr.SPY.std()\nsy", "_____no_output_____" ], [ "sx", "_____no_output_____" ], [ "ybar = stock_change_apr.drop(\"SPY\", 1).mean() - rrf\nxbar = stock_change_apr.SPY.mean() - rrf\nybar", "_____no_output_____" ], [ "xbar", "_____no_output_____" ], [ "beta = smcorr * sy / sx\nalpha = ybar - beta * xbar\nbeta", "_____no_output_____" ], [ "alpha", "_____no_output_____" ] ], [ [ "The **Sharpe ratio** is another popular risk metric, defined below:\n\n$$\\text{Sharpe ratio} = \\frac{\\bar{R_t} - r_{RF}}{s}$$\n\nHere $s$ is the volatility of the stock. We want the sharpe ratio to be large. A large Sharpe ratio indicates that the stock's excess returns are large relative to the stock's volatilitly. Additionally, the Sharpe ratio is tied to a statistical test (the $t$-test) to determine if a stock earns more on average than the risk-free rate; the larger this ratio, the more likely this is to be the case.\n\nYour challenge now is to compute the Sharpe ratio for each stock listed here, and interpret it. Which stock seems to be the better investment according to the Sharpe ratio?", "_____no_output_____" ] ], [ [ "# Your code here", "_____no_output_____" ] ], [ [ "## Moving Averages\n\nCharts are very useful. In fact, some traders base their strategies almost entirely off charts (these are the \"technicians\", since trading strategies based off finding patterns in charts is a part of the trading doctrine known as **technical analysis**). Let's now consider how we can find trends in stocks.\n\nA **$q$-day moving average** is, for a series $x_t$ and a point in time $t$, the average of the past $q$ days: that is, if $MA^q_t$ denotes a moving average process, then:\n\n\\begin{equation*}\nMA^q_t = \\frac{1}{q} \\sum_{i = 0}^{q-1} x_{t - i}\n\\end{equation*}\n\nMoving averages smooth a series and helps identify trends. The larger $q$ is, the less responsive a moving average process is to short-term fluctuations in the series $x_t$. The idea is that moving average processes help identify trends from \"noise\". **Fast** moving averages have smaller $q$ and more closely follow the stock, while **slow** moving averages have larger $q$, resulting in them responding less to the fluctuations of the stock and being more stable.\n\n**pandas** provides functionality for easily computing moving averages. I demonstrate its use by creating a 20-day (one month) moving average for the Apple data, and plotting it alongside the stock.", "_____no_output_____" ] ], [ [ "apple[\"20d\"] = np.round(apple[\"Adj. Close\"].rolling(window = 20, center = False).mean(), 2)\npandas_candlestick_ohlc(apple.loc['2016-01-04':'2016-12-31',:], otherseries = \"20d\", adj=True)", "_____no_output_____" ] ], [ [ "Notice how late the rolling average begins. It cannot be computed until 20 days have passed. This limitation becomes more severe for longer moving averages. Because I would like to be able to compute 200-day moving averages, I'm going to extend out how much AAPL data we have. That said, we will still largely focus on 2016.", "_____no_output_____" ] ], [ [ "start = datetime.datetime(2010,1,1)\napple = quandl.get(\"WIKI/AAPL\", start_date=start, end_date=end)\napple[\"20d\"] = np.round(apple[\"Adj. Close\"].rolling(window = 20, center = False).mean(), 2)\n\npandas_candlestick_ohlc(apple.loc['2016-01-04':'2016-12-31',:], otherseries = \"20d\", adj=True)", "_____no_output_____" ] ], [ [ "You will notice that a moving average is much smoother than the actua stock data. Additionally, it's a stubborn indicator; a stock needs to be above or below the moving average line in order for the line to change direction. Thus, crossing a moving average signals a possible change in trend, and should draw attention.\n\nTraders are usually interested in multiple moving averages, such as the 20-day, 50-day, and 200-day moving averages. It's easy to examine multiple moving averages at once.", "_____no_output_____" ] ], [ [ "apple[\"50d\"] = np.round(apple[\"Adj. Close\"].rolling(window = 50, center = False).mean(), 2)\napple[\"200d\"] = np.round(apple[\"Adj. Close\"].rolling(window = 200, center = False).mean(), 2)\n\npandas_candlestick_ohlc(apple.loc['2016-01-04':'2016-12-31',:], otherseries = [\"20d\", \"50d\", \"200d\"], adj=True)", "_____no_output_____" ] ], [ [ "The 20-day moving average is the most sensitive to local changes, and the 200-day moving average the least. Here, the 200-day moving average indicates an overall **bearish** trend: the stock is trending downward over time. The 20-day moving average is at times bearish and at other times **bullish**, where a positive swing is expected. You can also see that the crossing of moving average lines indicate changes in trend. These crossings are what we can use as **trading signals**, or indications that a financial security is changind direction and a profitable trade might be made.\n\n## Trading Strategy\n\nOur concern now is to design and evaluate trading strategies.\n\nAny trader must have a set of rules that determine how much of her money she is willing to bet on any single trade. For example, a trader may decide that under no circumstances will she risk more than 10% of her portfolio on a trade. Additionally, in any trade, a trader must have an **exit strategy**, a set of conditions determining when she will exit the position, for either profit or loss. A trader may set a **target**, which is the minimum profit that will induce the trader to leave the position. Likewise, a trader may have a maximum loss she is willing to tolerate; if potential losses go beyond this amount, the trader will exit the position in order to prevent any further loss. We will suppose that the amount of money in the portfolio involved in any particular trade is a fixed proportion; 10% seems like a good number.\n\nHere, I will be demonstrating a [moving average crossover strategy](http://www.investopedia.com/university/movingaverage/movingaverages4.asp). We will use two moving averages, one we consider \"fast\", and the other \"slow\". The strategy is:\n\n* Trade the asset when the fast moving average crosses over the slow moving average.\n* Exit the trade when the fast moving average crosses over the slow moving average again.\n\nA trade will be prompted when the fast moving average crosses from below to above the slow moving average, and the trade will be exited when the fast moving average crosses below the slow moving average later.\n\nWe now have a complete strategy. But before we decide we want to use it, we should try to evaluate the quality of the strategy first. The usual means for doing so is **backtesting**, which is looking at how profitable the strategy is on historical data. For example, looking at the above chart's performance on Apple stock, if the 20-day moving average is the fast moving average and the 50-day moving average the slow, this strategy does not appear to be very profitable, at least not if you are always taking long positions.\n\nLet's see if we can automate the backtesting task. We first identify when the 20-day average is below the 50-day average, and vice versa.", "_____no_output_____" ] ], [ [ "apple['20d-50d'] = apple['20d'] - apple['50d']\napple.tail()", "_____no_output_____" ] ], [ [ "We will refer to the sign of this difference as the **regime**; that is, if the fast moving average is above the slow moving average, this is a bullish regime (the bulls rule), and a bearish regime (the bears rule) holds when the fast moving average is below the slow moving average. I identify regimes with the following code.", "_____no_output_____" ] ], [ [ "# np.where() is a vectorized if-else function, where a condition is checked for each component of a vector, and the first argument passed is used when the condition holds, and the other passed if it does not\napple[\"Regime\"] = np.where(apple['20d-50d'] > 0, 1, 0)\n# We have 1's for bullish regimes and 0's for everything else. Below I replace bearish regimes's values with -1, and to maintain the rest of the vector, the second argument is apple[\"Regime\"]\napple[\"Regime\"] = np.where(apple['20d-50d'] < 0, -1, apple[\"Regime\"])\napple.loc['2016-01-04':'2016-12-31',\"Regime\"].plot(ylim = (-2,2)).axhline(y = 0, color = \"black\", lw = 2)", "_____no_output_____" ], [ "apple[\"Regime\"].plot(ylim = (-2,2)).axhline(y = 0, color = \"black\", lw = 2)", "_____no_output_____" ], [ "apple[\"Regime\"].value_counts()", "_____no_output_____" ] ], [ [ "The last line above indicates that for 1005 days the market was bearish on Apple, while for 600 days the market was bullish, and it was neutral for 54 days.\n\nTrading signals appear at regime changes. When a bullish regime begins, a buy signal is triggered, and when it ends, a sell signal is triggered. Likewise, when a bearish regime begins, a sell signal is triggered, and when the regime ends, a buy signal is triggered (this is of interest only if you ever will short the stock, or use some derivative like a stock option to bet against the market).\n\nIt's simple to obtain signals. Let $r_t$ indicate the regime at time $t$, and $s_t$ the signal at time $t$. Then:\n\n\\begin{equation*}\ns_t = \\text{sign}(r_t - r_{t - 1})\n\\end{equation*}\n\n$s_t \\in \\{-1, 0, 1\\}$, with $-1$ indicating \"sell\", $1$ indicating \"buy\", and $0$ no action. We can obtain signals like so:", "_____no_output_____" ] ], [ [ "# To ensure that all trades close out, I temporarily change the regime of the last row to 0\nregime_orig = apple.loc[:, \"Regime\"].iloc[-1]\napple.loc[:, \"Regime\"].iloc[-1] = 0\napple[\"Signal\"] = np.sign(apple[\"Regime\"] - apple[\"Regime\"].shift(1))\n# Restore original regime data\napple.loc[:, \"Regime\"].iloc[-1] = regime_orig\napple.tail()", "_____no_output_____" ], [ "apple[\"Signal\"].plot(ylim = (-2, 2))", "_____no_output_____" ], [ "apple[\"Signal\"].value_counts()", "_____no_output_____" ] ], [ [ "We would buy Apple stock 23 times and sell Apple stock 23 times. If we only go long on Apple stock, only 23 trades will be engaged in over the 6-year period, while if we pivot from a long to a short position every time a long position is terminated, we would engage in 23 trades total. (Bear in mind that trading more frequently isn't necessarily good; trades are never free.)\n\nYou may notice that the system as it currently stands isn't very robust, since even a fleeting moment when the fast moving average is above the slow moving average triggers a trade, resulting in trades that end immediately (which is bad if not simply because realistically every trade is accompanied by a fee that can quickly erode earnings). Additionally, every bullish regime immediately transitions into a bearish regime, and if you were constructing trading systems that allow both bullish and bearish bets, this would lead to the end of one trade immediately triggering a new trade that bets on the market in the opposite direction, which again seems finnicky. A better system would require more evidence that the market is moving in some particular direction. But we will not concern ourselves with these details for now.\n\nLet's now try to identify what the prices of the stock is at every buy and every sell.", "_____no_output_____" ] ], [ [ "apple.loc[apple[\"Signal\"] == 1, \"Close\"]", "_____no_output_____" ], [ "apple.loc[apple[\"Signal\"] == -1, \"Close\"]", "_____no_output_____" ], [ "# Create a DataFrame with trades, including the price at the trade and the regime under which the trade is made.\napple_signals = pd.concat([\n pd.DataFrame({\"Price\": apple.loc[apple[\"Signal\"] == 1, \"Adj. Close\"],\n \"Regime\": apple.loc[apple[\"Signal\"] == 1, \"Regime\"],\n \"Signal\": \"Buy\"}),\n pd.DataFrame({\"Price\": apple.loc[apple[\"Signal\"] == -1, \"Adj. Close\"],\n \"Regime\": apple.loc[apple[\"Signal\"] == -1, \"Regime\"],\n \"Signal\": \"Sell\"}),\n ])\napple_signals.sort_index(inplace = True)\napple_signals", "_____no_output_____" ], [ "# Let's see the profitability of long trades\napple_long_profits = pd.DataFrame({\n \"Price\": apple_signals.loc[(apple_signals[\"Signal\"] == \"Buy\") &\n apple_signals[\"Regime\"] == 1, \"Price\"],\n \"Profit\": pd.Series(apple_signals[\"Price\"] - apple_signals[\"Price\"].shift(1)).loc[\n apple_signals.loc[(apple_signals[\"Signal\"].shift(1) == \"Buy\") & (apple_signals[\"Regime\"].shift(1) == 1)].index\n ].tolist(),\n \"End Date\": apple_signals[\"Price\"].loc[\n apple_signals.loc[(apple_signals[\"Signal\"].shift(1) == \"Buy\") & (apple_signals[\"Regime\"].shift(1) == 1)].index\n ].index\n })\napple_long_profits", "_____no_output_____" ] ], [ [ "Let's now create a simulated portfolio of $1,000,000, and see how it would behave, according to the rules we have established. This includes:\n\n* Investing only 10% of the portfolio in any trade\n* Exiting the position if losses exceed 20% of the value of the trade.\n\nWhen simulating, bear in mind that:\n\n* Trades are done in batches of 100 stocks.\n* Our stop-loss rule involves placing an order to sell the stock the moment the price drops below the specified level. Thus we need to check whether the lows during this period ever go low enough to trigger the stop-loss. Realistically, unless we buy a put option, we cannot guarantee that we will sell the stock at the price we set at the stop-loss, but we will use this as the selling price anyway for the sake of simplicity.\n* Every trade is accompanied by a commission to the broker, which should be accounted for. I do not do so here.\n\nHere's how a backtest may look:", "_____no_output_____" ] ], [ [ "# We need to get the low of the price during each trade.\ntradeperiods = pd.DataFrame({\"Start\": apple_long_profits.index,\n \"End\": apple_long_profits[\"End Date\"]})\napple_long_profits[\"Low\"] = tradeperiods.apply(lambda x: min(apple.loc[x[\"Start\"]:x[\"End\"], \"Adj. Low\"]), axis = 1)\napple_long_profits", "_____no_output_____" ], [ "# Now we have all the information needed to simulate this strategy in apple_adj_long_profits\ncash = 1000000\napple_backtest = pd.DataFrame({\"Start Port. Value\": [],\n \"End Port. Value\": [],\n \"End Date\": [],\n \"Shares\": [],\n \"Share Price\": [],\n \"Trade Value\": [],\n \"Profit per Share\": [],\n \"Total Profit\": [],\n \"Stop-Loss Triggered\": []})\nport_value = .1 # Max proportion of portfolio bet on any trade\nbatch = 100 # Number of shares bought per batch\nstoploss = .2 # % of trade loss that would trigger a stoploss\nfor index, row in apple_long_profits.iterrows():\n batches = np.floor(cash * port_value) // np.ceil(batch * row[\"Price\"]) # Maximum number of batches of stocks invested in\n trade_val = batches * batch * row[\"Price\"] # How much money is put on the line with each trade\n if row[\"Low\"] < (1 - stoploss) * row[\"Price\"]: # Account for the stop-loss\n share_profit = np.round((1 - stoploss) * row[\"Price\"], 2)\n stop_trig = True\n else:\n share_profit = row[\"Profit\"]\n stop_trig = False\n profit = share_profit * batches * batch # Compute profits\n # Add a row to the backtest data frame containing the results of the trade\n apple_backtest = apple_backtest.append(pd.DataFrame({\n \"Start Port. Value\": cash,\n \"End Port. Value\": cash + profit,\n \"End Date\": row[\"End Date\"],\n \"Shares\": batch * batches,\n \"Share Price\": row[\"Price\"],\n \"Trade Value\": trade_val,\n \"Profit per Share\": share_profit,\n \"Total Profit\": profit,\n \"Stop-Loss Triggered\": stop_trig\n }, index = [index]))\n cash = max(0, cash + profit)\n\napple_backtest", "_____no_output_____" ], [ "apple_backtest[\"End Port. Value\"].plot()", "_____no_output_____" ] ], [ [ "Our portfolio's value grew by 13% in about six years. Considering that only 10% of the portfolio was ever involved in any single trade, this is not bad performance.\n\nNotice that this strategy never lead to our rule of never allowing losses to exceed 20% of the trade's value being invoked. For the sake of simplicity, we will ignore this rule in backtesting.\n\nA more realistic portfolio would not be betting 10% of its value on only one stock. A more realistic one would consider investing in multiple stocks. Multiple trades may be ongoing at any given time involving multiple companies, and most of the portfolio will be in stocks, not cash. Now that we will be investing in multiple stops and exiting only when moving averages cross (not because of a stop-loss), we will need to change our approach to backtesting. For example, we will be using one **pandas** `DataFrame` to contain all buy and sell orders for all stocks being considered, and our loop above will have to track more information.\n\nI have written functions for creating order data for multiple stocks, and a function for performing the backtesting.", "_____no_output_____" ] ], [ [ "def ma_crossover_orders(stocks, fast, slow):\n \"\"\"\n :param stocks: A list of tuples, the first argument in each tuple being a string containing the ticker symbol of each stock (or however you want the stock represented, so long as it's unique), and the second being a pandas DataFrame containing the stocks, with a \"Close\" column and indexing by date (like the data frames returned by the Yahoo! Finance API)\n :param fast: Integer for the number of days used in the fast moving average\n :param slow: Integer for the number of days used in the slow moving average\n \n :return: pandas DataFrame containing stock orders\n \n This function takes a list of stocks and determines when each stock would be bought or sold depending on a moving average crossover strategy, returning a data frame with information about when the stocks in the portfolio are bought or sold according to the strategy\n \"\"\"\n fast_str = str(fast) + 'd'\n slow_str = str(slow) + 'd'\n ma_diff_str = fast_str + '-' + slow_str\n \n trades = pd.DataFrame({\"Price\": [], \"Regime\": [], \"Signal\": []})\n for s in stocks:\n # Get the moving averages, both fast and slow, along with the difference in the moving averages\n s[1][fast_str] = np.round(s[1][\"Close\"].rolling(window = fast, center = False).mean(), 2)\n s[1][slow_str] = np.round(s[1][\"Close\"].rolling(window = slow, center = False).mean(), 2)\n s[1][ma_diff_str] = s[1][fast_str] - s[1][slow_str]\n \n # np.where() is a vectorized if-else function, where a condition is checked for each component of a vector, and the first argument passed is used when the condition holds, and the other passed if it does not\n s[1][\"Regime\"] = np.where(s[1][ma_diff_str] > 0, 1, 0)\n # We have 1's for bullish regimes and 0's for everything else. Below I replace bearish regimes's values with -1, and to maintain the rest of the vector, the second argument is apple[\"Regime\"]\n s[1][\"Regime\"] = np.where(s[1][ma_diff_str] < 0, -1, s[1][\"Regime\"])\n # To ensure that all trades close out, I temporarily change the regime of the last row to 0\n regime_orig = s[1].loc[:, \"Regime\"].iloc[-1]\n s[1].loc[:, \"Regime\"].iloc[-1] = 0\n s[1][\"Signal\"] = np.sign(s[1][\"Regime\"] - s[1][\"Regime\"].shift(1))\n # Restore original regime data\n s[1].loc[:, \"Regime\"].iloc[-1] = regime_orig\n \n # Get signals\n signals = pd.concat([\n pd.DataFrame({\"Price\": s[1].loc[s[1][\"Signal\"] == 1, \"Adj. Close\"],\n \"Regime\": s[1].loc[s[1][\"Signal\"] == 1, \"Regime\"],\n \"Signal\": \"Buy\"}),\n pd.DataFrame({\"Price\": s[1].loc[s[1][\"Signal\"] == -1, \"Adj. Close\"],\n \"Regime\": s[1].loc[s[1][\"Signal\"] == -1, \"Regime\"],\n \"Signal\": \"Sell\"}),\n ])\n signals.index = pd.MultiIndex.from_product([signals.index, [s[0]]], names = [\"Date\", \"Symbol\"])\n trades = trades.append(signals)\n \n trades.sort_index(inplace = True)\n trades.index = pd.MultiIndex.from_tuples(trades.index, names = [\"Date\", \"Symbol\"])\n \n return trades\n\n\ndef backtest(signals, cash, port_value = .1, batch = 100):\n \"\"\"\n :param signals: pandas DataFrame containing buy and sell signals with stock prices and symbols, like that returned by ma_crossover_orders\n :param cash: integer for starting cash value\n :param port_value: maximum proportion of portfolio to risk on any single trade\n :param batch: Trading batch sizes\n \n :return: pandas DataFrame with backtesting results\n \n This function backtests strategies, with the signals generated by the strategies being passed in the signals DataFrame. A fictitious portfolio is simulated and the returns generated by this portfolio are reported.\n \"\"\"\n \n SYMBOL = 1 # Constant for which element in index represents symbol\n portfolio = dict() # Will contain how many stocks are in the portfolio for a given symbol\n port_prices = dict() # Tracks old trade prices for determining profits\n # Dataframe that will contain backtesting report\n results = pd.DataFrame({\"Start Cash\": [],\n \"End Cash\": [],\n \"Portfolio Value\": [],\n \"Type\": [],\n \"Shares\": [],\n \"Share Price\": [],\n \"Trade Value\": [],\n \"Profit per Share\": [],\n \"Total Profit\": []})\n \n for index, row in signals.iterrows():\n # These first few lines are done for any trade\n shares = portfolio.setdefault(index[SYMBOL], 0)\n trade_val = 0\n batches = 0\n cash_change = row[\"Price\"] * shares # Shares could potentially be a positive or negative number (cash_change will be added in the end; negative shares indicate a short)\n portfolio[index[SYMBOL]] = 0 # For a given symbol, a position is effectively cleared\n \n old_price = port_prices.setdefault(index[SYMBOL], row[\"Price\"])\n portfolio_val = 0\n for key, val in portfolio.items():\n portfolio_val += val * port_prices[key]\n \n if row[\"Signal\"] == \"Buy\" and row[\"Regime\"] == 1: # Entering a long position\n batches = np.floor((portfolio_val + cash) * port_value) // np.ceil(batch * row[\"Price\"]) # Maximum number of batches of stocks invested in\n trade_val = batches * batch * row[\"Price\"] # How much money is put on the line with each trade\n cash_change -= trade_val # We are buying shares so cash will go down\n portfolio[index[SYMBOL]] = batches * batch # Recording how many shares are currently invested in the stock\n port_prices[index[SYMBOL]] = row[\"Price\"] # Record price\n old_price = row[\"Price\"]\n elif row[\"Signal\"] == \"Sell\" and row[\"Regime\"] == -1: # Entering a short\n pass\n # Do nothing; can we provide a method for shorting the market?\n #else:\n #raise ValueError(\"I don't know what to do with signal \" + row[\"Signal\"])\n \n pprofit = row[\"Price\"] - old_price # Compute profit per share; old_price is set in such a way that entering a position results in a profit of zero\n \n # Update report\n results = results.append(pd.DataFrame({\n \"Start Cash\": cash,\n \"End Cash\": cash + cash_change,\n \"Portfolio Value\": cash + cash_change + portfolio_val + trade_val,\n \"Type\": row[\"Signal\"],\n \"Shares\": batch * batches,\n \"Share Price\": row[\"Price\"],\n \"Trade Value\": abs(cash_change),\n \"Profit per Share\": pprofit,\n \"Total Profit\": batches * batch * pprofit\n }, index = [index]))\n cash += cash_change # Final change to cash balance\n \n results.sort_index(inplace = True)\n results.index = pd.MultiIndex.from_tuples(results.index, names = [\"Date\", \"Symbol\"])\n \n return results\n\n# Get more stocks\n(microsoft, google, facebook, twitter, netflix,\namazon, yahoo, ge, qualcomm, ibm, hp) = (quandl.get(\"WIKI/\" + s, start_date=start,\n end_date=end) for s in [\"MSFT\", \"GOOG\", \"FB\", \"TWTR\",\n \"NFLX\", \"AMZN\", \"YHOO\", \"GE\",\n \"QCOM\", \"IBM\", \"HPQ\"])", "_____no_output_____" ], [ "signals = ma_crossover_orders([(\"AAPL\", apple),\n (\"MSFT\", microsoft),\n (\"GOOG\", google),\n (\"FB\", facebook),\n (\"TWTR\", twitter),\n (\"NFLX\", netflix),\n (\"AMZN\", amazon),\n (\"YHOO\", yahoo),\n (\"GE\", ge),\n (\"QCOM\", qualcomm),\n (\"IBM\", ibm),\n (\"HPQ\", hp)],\n fast = 20, slow = 50)\nsignals", "_____no_output_____" ], [ "bk = backtest(signals, 1000000)\nbk", "_____no_output_____" ], [ "bk[\"Portfolio Value\"].groupby(level = 0).apply(lambda x: x[-1]).plot()", "_____no_output_____" ] ], [ [ "A more realistic portfolio that can invest in any in a list of twelve (tech) stocks has a final growth of about 100%. How good is this? While on the surface not bad, we will see we could have done better.\n\n## Benchmarking\n\nBacktesting is only part of evaluating the efficacy of a trading strategy. We would like to **benchmark** the strategy, or compare it to other available (usually well-known) strategies in order to determine how well we have done.\n\nWhenever you evaluate a trading system, there is one strategy that you should always check, one that beats all but a handful of managed mutual funds and investment managers: buy and hold [SPY](https://finance.yahoo.com/quote/SPY). The **efficient market hypothesis** claims that it is all but impossible for anyone to beat the market. Thus, one should always buy an index fund that merely reflects the composition of the market.By buying and holding SPY, we are effectively trying to match our returns with the market rather than beat it.\n\nI look at the profits for simply buying and holding SPY.", "_____no_output_____" ] ], [ [ "#spyder = web.DataReader(\"SPY\", \"yahoo\", start, end)\nspyder = spyderdat.loc[start:end]\nspyder.iloc[[0,-1],:]", "_____no_output_____" ], [ "batches = 1000000 // np.ceil(100 * spyder.loc[:,\"Adj Close\"].iloc[0]) # Maximum number of batches of stocks invested in\ntrade_val = batches * batch * spyder.loc[:,\"Adj Close\"].iloc[0] # How much money is used to buy SPY\nfinal_val = batches * batch * spyder.loc[:,\"Adj Close\"].iloc[-1] + (1000000 - trade_val) # Final value of the portfolio\nfinal_val", "_____no_output_____" ], [ "# We see that the buy-and-hold strategy beats the strategy we developed earlier. I would also like to see a plot.\nax_bench = (spyder[\"Adj Close\"] / spyder.loc[:, \"Adj Close\"].iloc[0]).plot(label = \"SPY\")\nax_bench = (bk[\"Portfolio Value\"].groupby(level = 0).apply(lambda x: x[-1]) / 1000000).plot(ax = ax_bench, label = \"Portfolio\")\nax_bench.legend(ax_bench.get_lines(), [l.get_label() for l in ax_bench.get_lines()], loc = 'best')\nax_bench", "_____no_output_____" ] ], [ [ "Buying and holding SPY beats our trading system, at least how we currently set it up, and we haven't even accounted for how expensive our more complex strategy is in terms of fees. Given both the opportunity cost and the expense associated with the active strategy, we should not use it.\n\nWhat could we do to improve the performance of our system? For starters, we could try diversifying. All the stocks we considered were tech companies, which means that if the tech industry is doing poorly, our portfolio will reflect that. We could try developing a system that can also short stocks or bet bearishly, so we can take advantage of movement in any direction. We could seek means for forecasting how high we expect a stock to move. Whatever we do, though, must beat this benchmark; otherwise there is an opportunity cost associated with our trading system.\n\nOther benchmark strategies exist, and if our trading system beat the \"buy and hold SPY\" strategy, we may check against them. Some such strategies include:\n\n* Buy SPY when its closing monthly price is aboves its ten-month moving average.\n* Buy SPY when its ten-month momentum is positive. (**Momentum** is the first difference of a moving average process, or $MO^q_t = MA^q_t - MA^q_{t - 1}$.)\n\n(I first read of these strategies [here](https://www.r-bloggers.com/are-r2s-useful-in-finance-hypothesis-driven-development-in-reverse/?utm_source=feedburner&utm_medium=email&utm_campaign=Feed%3A+RBloggers+%28R+bloggers%29).) The general lesson still holds: *don't use a complex trading system with lots of active trading when a simple strategy involving an index fund without frequent trading beats it.* [This is actually a very difficult requirement to meet.](http://www.nytimes.com/2015/03/15/your-money/how-many-mutual-funds-routinely-rout-the-market-zero.html?_r=0)\n\nAs a final note, suppose that your trading system *did* manage to beat any baseline strategy thrown at it in backtesting. Does backtesting predict future performance? Not at all. [Backtesting has a propensity for overfitting](http://papers.ssrn.com/sol3/papers.cfm?abstract_id=2745220), so just because backtesting predicts high growth doesn't mean that growth will hold in the future. There are strategies for combatting overfitting, such as [walk-forward analysis](https://ntguardian.wordpress.com/2017/06/19/walk-forward-analysis-demonstration-backtrader/) and holding out a portion of a dataset (likely the most recent part) as a final test set to determine if a strategy is profitable, followed by \"sitting on\" a strategy that managed to survive these two filters and seeing if it remains profitable in current markets.\n\n## Conclusion\n\nWhile this lecture ends on a depressing note, keep in mind that [the efficient market hypothesis has many critics.](http://www.nytimes.com/2009/06/06/business/06nocera.html) My own opinion is that as trading becomes more algorithmic, beating the market will become more difficult. That said, it may be possible to beat the market, even though mutual funds seem incapable of doing so (bear in mind, though, that part of the reason mutual funds perform so poorly is because of fees, which is not a concern for index funds).\n\nThis lecture is very brief, covering only one type of strategy: strategies based on moving averages. Many other trading signals exist and employed. Additionally, we never discussed in depth shorting stocks, currency trading, or stock options. Stock options, in particular, are a rich subject that offer many different ways to bet on the direction of a stock. You can read more about derivatives (including stock options and other derivatives) in the book *Derivatives Analytics with Python: Data Analysis, Models, Simulation, Calibration and Hedging*, [which is available from the University of Utah library.](http://proquest.safaribooksonline.com.ezproxy.lib.utah.edu/9781119037996)\n\nAnother resource (which I used as a reference while writing this lecture) is the O'Reilly book *Python for Finance*, [also available from the University of Utah library.](http://proquest.safaribooksonline.com.ezproxy.lib.utah.edu/book/programming/python/9781491945360)\n\nIf you were interested in investigating algorithmic trading, where would you go from here? I would not recommend using the code I wrote above for backtesting; there are better packages for this task. Python has some libraries for algorithmic trading, such as [**pyfolio**](https://quantopian.github.io/pyfolio/) (for analytics), [**zipline**](http://www.zipline.io/beginner-tutorial.html) (for backtesting and algorithmic trading), and [**backtrader**](https://www.backtrader.com/) (also for backtesting and trading). **zipline** seems to be popular likely because it is used and developed by [**quantopian**](https://www.quantopian.com/), a \"crowd-sourced hedge fund\" that allows users to use their data for backtesting and even will license profitable strategies from their authors, giving them a cut of the profits. However, I prefer **backtrader** and have written [blog posts](https://ntguardian.wordpress.com/tag/backtrader/) on using it. It is likely the more complicated between the two but that's the cost of greater power. I am a fan of its design. I also would suggest learning [R](https://www.r-project.org/), since it has many packages for analyzing financial data (moreso than Python) and it's surprisingly easy to use R functions in Python (as I demonstrate in [this post](https://ntguardian.wordpress.com/2017/06/28/stock-trading-analytics-and-optimization-in-python-with-pyfolio-rs-performanceanalytics-and-backtrader/)).\n\nYou can read more about using R and Python for finance on [my blog](https://ntguardian.wordpress.com).\n\nRemember that it is possible (if not common) to lose money in the stock market. It's also true, though, that it's difficult to find returns like those found in stocks, and any investment strategy should take investing in it seriously. This lecture is intended to provide a starting point for evaluating stock trading and investments, and, more generally, analyzing temporal data, and I hope you continue to explore these ideas.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ] ]
d0a3607bd4d0c56a49d82ee552be8ad248dceeb8
214,659
ipynb
Jupyter Notebook
.ipynb_checkpoints/data_explore-checkpoint.ipynb
niuguy/suanming
acc6e5b46b80563cebc5eba43f2a0c0753c0ac0f
[ "BSD-3-Clause" ]
null
null
null
.ipynb_checkpoints/data_explore-checkpoint.ipynb
niuguy/suanming
acc6e5b46b80563cebc5eba43f2a0c0753c0ac0f
[ "BSD-3-Clause" ]
null
null
null
.ipynb_checkpoints/data_explore-checkpoint.ipynb
niuguy/suanming
acc6e5b46b80563cebc5eba43f2a0c0753c0ac0f
[ "BSD-3-Clause" ]
null
null
null
51.292473
15,300
0.470938
[ [ [ "import pandas as pd\nimport _pickle as pickle\nimport numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "def load_data(filename='dataset/dtoc.pkl'):\n pd_dtoc = pickle.load(open(filename, 'rb'))\n return pd_dtoc", "_____no_output_____" ], [ "df = load_data(filename='dataset/dtoc_proc.pkl') ", "_____no_output_____" ], [ "dtoc = df[df['is_dtoc']==1]", "_____no_output_____" ], [ "pd.get_dummies(dtoc, columns = ['diag1', 'diag2'])", "_____no_output_____" ], [ "def one_hot(df, cols):\n df_new = pd.DataFrame()\n for each in cols:\n dummies = pd.get_dummies(df[each], prefix=each, drop_first=False)\n df_new = pd.concat([df_new, dummies], axis=1)\n return df_new", "_____no_output_____" ], [ "one_hot(dtoc, ['diag2','diag3','diag4','diag5','diag6', \n 'diag7', 'diag8', 'diag9','diag10', 'diag11', 'diag12'])", "_____no_output_____" ], [ "dtoc[['diag1','diag2','diag3','diag4','diag5','diag6', \n 'diag7', 'diag8', 'diag9','diag10', 'diag11', 'diag12']].head(10)", "_____no_output_____" ], [ "dtoc.columns", "_____no_output_____" ], [ "dtoc.diag1.value_counts().keys()", "_____no_output_____" ], [ "diag1_counts = dtoc.diag1.value_counts().head(20)\nax = diag1_counts.plot.bar(title='ICD code distribution of delayed transfer')\nax.set_xlabel('ICD')\nax.set_ylabel('Count')", "_____no_output_____" ], [ "plt.style.use('seaborn-deep')\n\nbins = np.linspace(0, 100, 100)\ny1 = dtoc.age\ny2 = df.age\n# pyplot.hist(dtoc.age, bins, alpha=0.5, label='x')\n# pyplot.hist(df.age, bins, alpha=0.5, label='y')\n# pyplot.hist([x, y], bins, label=['x', 'y'], color = ['yellow', 'red'])\n# pyplot.legend(loc='upper right')\n# pyplot.show()\n\n\ncolors = ['b','g']\n\n#plots the histogram\nfig, (ax1, ax2) = plt.subplots(2, 1)\nax1.hist(y1 , color = 'brown')\nax1.set_xlabel('Age')\nax1.set_ylabel('Count')\nax1.set_title('Age distribution of delayed transfer')\nax2.hist(y2 , color = 'green')\nax2.set_title('Age distribution of all')\nax2.set_xlabel('Age')\nax2.set_ylabel('Count')\nfig.tight_layout()\nplt.show()\n# dtoc.age.plot.hist(figsize=(12,8),title='Dtoc ages distribution')", "_____no_output_____" ], [ "plt.hist(y2,color = 'green', label = 'age histogram of all')\nplt.tight_layout()\nplt.show()", "_____no_output_____" ], [ "df.age.value_counts().head(20)", "_____no_output_____" ], [ "df.age.plot.hist(figsize=(12,8),title='Dtoc ages distribution')", "_____no_output_____" ], [ "df.adm_code.value_counts().plot.bar(figsize=(12,8),title='Adm_codes distribution')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a37522782932e9c5f8a8378ea5afc749629fd4
476,990
ipynb
Jupyter Notebook
Web06.ipynb
gera09/Numpy_Matplotlib_Scikit-learn
346e63292583ebe05ff4e951efca08de6bdf45fe
[ "MIT" ]
null
null
null
Web06.ipynb
gera09/Numpy_Matplotlib_Scikit-learn
346e63292583ebe05ff4e951efca08de6bdf45fe
[ "MIT" ]
null
null
null
Web06.ipynb
gera09/Numpy_Matplotlib_Scikit-learn
346e63292583ebe05ff4e951efca08de6bdf45fe
[ "MIT" ]
null
null
null
136.360778
179,848
0.834242
[ [ [ "# Вебинар 6. Консультация по курсовому проекту.\n\n### Задание для курсового проекта\n\nМетрика:\nR2 - коэффициент детерминации (sklearn.metrics.r2_score)\n\nСдача проекта:\n1. Прислать в раздел Задания Урока 10 (\"Вебинар. Консультация по итоговому проекту\")\nссылку на программу в github (программа должна содержаться в файле Jupyter Notebook \nс расширением ipynb). (Pull request не нужен, только ссылка ведущая на сам скрипт).\n2. Приложить файл с названием по образцу NVBaranov_predictions.csv\nс предсказанными ценами для квартир из test.csv (файл должен содержать два поля: Id, Price).\nВ файле с предсказаниями должна быть 5001 строка (названия колонок + 5000 предсказаний).\n\nСроки и условия сдачи:\nДедлайн: сдать проект нужно в течение 72 часов после начала Урока 10 (\"Вебинар. Консультация по итоговому проекту\").\nДля успешной сдачи должны быть все предсказания (для 5000 квартир) и R2 должен быть больше 0.6.\nПри сдаче до дедлайна результат проекта может попасть в топ лучших результатов.\nПовторная сдача и проверка результатов возможны только при условии предыдущей неуспешной сдачи.\nУспешный проект нельзя пересдать в целях повышения результата.\nПроекты, сданные после дедлайна или сданные повторно, не попадают в топ лучших результатов, но можно узнать результат.\nВ качестве итогового результата берется первый успешный результат, последующие успешные результаты не учитываются.\n\nПримечание:\nВсе файлы csv должны содержать названия полей (header - то есть \"шапку\"),\nразделитель - запятая. В файлах не должны содержаться индексы из датафрейма.\n\nРекомендации для файла с кодом (ipynb):\n1. Файл должен содержать заголовки и комментарии\n2. Повторяющиеся операции лучше оформлять в виде функций\n3. Не делать вывод большого количества строк таблиц (5-10 достаточно)\n4. По возможности добавлять графики, описывающие данные (около 3-5)\n5. Добавлять только лучшую модель, то есть не включать в код все варианты решения проекта\n6. Скрипт проекта должен отрабатывать от начала и до конца (от загрузки данных до выгрузки предсказаний)\n7. Весь проект должен быть в одном скрипте (файл ipynb).\n8. При использовании статистик (среднее, медиана и т.д.) в качестве признаков,\nлучше считать их на трейне, и потом на валидационных и тестовых данных не считать \nстатистики заново, а брать их с трейна. Если хватает знаний, можно использовать кросс-валидацию,\nно для сдачи этого проекта достаточно разбить данные из train.csv на train и valid.\n9. Проект должен полностью отрабатывать за разумное время (не больше 10 минут),\nпоэтому в финальный вариант лучше не включать GridSearch с перебором \nбольшого количества сочетаний параметров.\n10. Допускается применение любых моделей машинного обучения из библиотеки sklearn.", "_____no_output_____" ], [ "### Прогнозирование на тестовом датасете\n\n1. Выполнить для тестового датасета те же этапы обработки и постронияния признаков (лучше выполнять действия сразу для двух датасетов)\n2. Не потерять и не перемешать индексы от примеров при построении прогнозов\n3. Прогнозы должны быть для все примеров из тестового датасета (для всех строк)", "_____no_output_____" ], [ "**Подключение библиотек и скриптов**", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\nimport random\n\nfrom sklearn.model_selection import train_test_split, cross_val_score\nfrom sklearn.preprocessing import StandardScaler, MinMaxScaler\nfrom sklearn.ensemble import RandomForestRegressor\nfrom sklearn.metrics import r2_score as r2\nfrom sklearn.model_selection import KFold, GridSearchCV\n\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n%matplotlib inline", "_____no_output_____" ], [ "import warnings\nwarnings.filterwarnings('ignore')", "_____no_output_____" ], [ "matplotlib.rcParams.update({'font.size': 14})\npd.set_option('precision', 3)\npd.set_option('max_columns', 100)", "_____no_output_____" ], [ "def evaluate_preds(train_true_values, train_pred_values, val_true_values, val_pred_values):\n \"\"\"\n Функция для оценки работы модели\n Parameters:\n train_true_values - целевая переменная из тренировочной части датасета\n train_pred_values - предсказания модели по тренировочной части\n val_true_values - целевая переменная из валидационной части датасета\n val_pred_values - предсказания модели по валидационной части\n Returns:\n R2 на тренировочной и валидационной части, \n графики зависимости истинных значений от предсказаний\n \"\"\"\n print(\"Train R2:\\t\" + str(round(r2(train_true_values, train_pred_values), 3)))\n print(\"Valid R2:\\t\" + str(round(r2(val_true_values, val_pred_values), 3)))\n \n plt.figure(figsize=(18,10))\n \n plt.subplot(121)\n sns.scatterplot(x=train_pred_values, y=train_true_values)\n plt.xlabel('Predicted values')\n plt.ylabel('True values')\n plt.title('Train sample prediction')\n \n plt.subplot(122)\n sns.scatterplot(x=val_pred_values, y=val_true_values)\n plt.xlabel('Predicted values')\n plt.ylabel('True values')\n plt.title('Test sample prediction')\n plt.show()", "_____no_output_____" ] ], [ [ "**Пути к директориям и файлам**", "_____no_output_____" ] ], [ [ "TRAIN_DATASET_PATH = 'datasets/project_task/train.csv'\nTEST_DATASET_PATH = 'datasets/project_task/test.csv'", "_____no_output_____" ] ], [ [ "### Загрузка данных", "_____no_output_____" ], [ "**Описание датасета**\n\n* **Id** - идентификационный номер квартиры\n* **DistrictId** - идентификационный номер района\n* **Rooms** - количество комнат\n* **Square** - площадь\n* **LifeSquare** - жилая площадь\n* **KitchenSquare** - площадь кухни\n* **Floor** - этаж\n* **HouseFloor** - количество этажей в доме\n* **HouseYear** - год постройки дома\n* **Ecology_1, Ecology_2, Ecology_3** - экологические показатели местности\n* **Social_1, Social_2, Social_3** - социальные показатели местности\n* **Healthcare_1, Helthcare_2** - показатели местности, связанные с охраной здоровья\n* **Shops_1, Shops_2** - показатели, связанные с наличием магазинов, торговых центров\n* **Price** - цена квартиры", "_____no_output_____" ] ], [ [ "train_df = pd.read_csv('train.csv')\ndisplay(train_df.tail())\nprint(train_df.shape)", "_____no_output_____" ], [ "# train_df = train_df.set_index('Id')\n# train_df.head()", "_____no_output_____" ], [ "test_df = pd.read_csv('test.csv')\ndisplay(test_df.tail())\nprint(test_df.shape)", "_____no_output_____" ], [ "# test_df = test_df.set_index('Id')", "_____no_output_____" ], [ "train_df.shape[1]-1 == test_df.shape[1]", "_____no_output_____" ] ], [ [ "### Приведение типов", "_____no_output_____" ] ], [ [ "train_df.dtypes", "_____no_output_____" ], [ "train_df['Id'] = train_df['Id'].astype(str)\ntrain_df['DistrictId'] = train_df['DistrictId'].astype(str)", "_____no_output_____" ] ], [ [ "### Обзор данных", "_____no_output_____" ], [ "**Целевая переменная**", "_____no_output_____" ] ], [ [ "plt.figure(figsize = (16, 8))\n\ntrain_df['Price'].hist(bins=30)\nplt.ylabel('Count')\nplt.xlabel('Price')\nplt.title('Target distribution')\nplt.show()", "_____no_output_____" ] ], [ [ "**Количественные переменные**", "_____no_output_____" ] ], [ [ "train_df.describe().T", "_____no_output_____" ] ], [ [ "**Категориальные переменные**", "_____no_output_____" ] ], [ [ "cat_colnames = train_df.select_dtypes(include='object').columns.tolist()\ncat_colnames", "_____no_output_____" ], [ "for cat_colname in cat_colnames[2:]:\n print(str(cat_colname) + '\\n\\n' + str(train_df[cat_colname].value_counts()) + '\\n' + '*' * 100 + '\\n')", "Ecology_2\n\nB 9903\nA 97\nName: Ecology_2, dtype: int64\n****************************************************************************************************\n\nEcology_3\n\nB 9725\nA 275\nName: Ecology_3, dtype: int64\n****************************************************************************************************\n\nShops_2\n\nB 9175\nA 825\nName: Shops_2, dtype: int64\n****************************************************************************************************\n\n" ] ], [ [ "### Обработка выбросов", "_____no_output_____" ], [ "**Rooms**", "_____no_output_____" ] ], [ [ "train_df['Rooms'].value_counts()", "_____no_output_____" ], [ "train_df.loc[train_df['Rooms'].isin([0, 10, 19]), 'Rooms'] = train_df['Rooms'].median()", "_____no_output_____" ] ], [ [ "**Square, LifeSquare, KitchenSquare**", "_____no_output_____" ] ], [ [ "train_df.describe()", "_____no_output_____" ], [ "steps = []\nscores = [] # <- записываем финальный score", "_____no_output_____" ], [ "# steps.append('обработка пропусков, выбросов var1')\n\ntrain_df = train_df[train_df['Square'].isnull() |\n (train_df['Square'] < train_df['Square'].quantile(.99)) &\n (train_df['Square'] > train_df['Square'].quantile(.01))]\n\ntrain_df = train_df[train_df['LifeSquare'].isnull() |\n (train_df['LifeSquare'] < train_df['LifeSquare'].quantile(.99)) &\n (train_df['LifeSquare'] > train_df['LifeSquare'].quantile(.01))]\n\ntrain_df = train_df[train_df['KitchenSquare'].isnull() |\n (train_df['KitchenSquare'] < train_df['KitchenSquare'].quantile(.99)) &\n (train_df['KitchenSquare'] > train_df['KitchenSquare'].quantile(.01))]", "_____no_output_____" ], [ "steps.append('обработка пропусков, выбросов var2')\n\n\"\"\"\n...\n...\n...\n\"\"\"", "_____no_output_____" ], [ "train_df.describe()", "_____no_output_____" ], [ "train_df.loc[train_df['LifeSquare'] < 10, 'LifeSquare'] = 10", "_____no_output_____" ], [ "train_df.loc[train_df['KitchenSquare'] < 3, 'KitchenSquare'] = 3", "_____no_output_____" ] ], [ [ "**HouseFloor, Floor**", "_____no_output_____" ] ], [ [ "train_df['HouseFloor'].sort_values().unique()", "_____no_output_____" ], [ "train_df['Floor'].sort_values().unique()", "_____no_output_____" ], [ "train_df.loc[train_df['HouseFloor'] == 0, 'HouseFloor'] = train_df['HouseFloor'].median()", "_____no_output_____" ], [ "floor_outliers = train_df[train_df['Floor'] > train_df['HouseFloor']].index\n\ntrain_df.loc[floor_outliers, 'Floor'] = train_df.loc[floor_outliers, 'HouseFloor'].apply(lambda x: random.randint(1, x))\n", "_____no_output_____" ] ], [ [ "**HouseYear**", "_____no_output_____" ] ], [ [ "train_df['HouseYear'].sort_values().unique()", "_____no_output_____" ], [ "train_df.loc[train_df['HouseYear'] > 2020, 'HouseYear'] = 2020", "_____no_output_____" ] ], [ [ "### Обработка пропусков", "_____no_output_____" ] ], [ [ "train_df.isnull().sum()", "_____no_output_____" ], [ "train_df[['Square', 'LifeSquare', 'KitchenSquare']].head(10)", "_____no_output_____" ] ], [ [ "**LifeSquare**", "_____no_output_____" ] ], [ [ "# медиана до корректировки\ntrain_df['LifeSquare'].median()", "_____no_output_____" ], [ "# медиана расхождения площадей\nsquare_med_diff = (train_df.loc[train_df['LifeSquare'].notnull(), 'Square']\n - train_df.loc[train_df['LifeSquare'].notnull(), 'LifeSquare']\n - train_df.loc[train_df['LifeSquare'].notnull(), 'KitchenSquare']).median()\n\nsquare_med_diff", "_____no_output_____" ], [ "train_df.loc[train_df['LifeSquare'].isnull(), 'LifeSquare'] = (\n train_df.loc[train_df['LifeSquare'].isnull(), 'Square']\n - train_df.loc[train_df['LifeSquare'].isnull(), 'KitchenSquare']\n - square_med_diff\n)", "_____no_output_____" ], [ "train_df['LifeSquare'].median()", "_____no_output_____" ] ], [ [ "**Healthcare_1**", "_____no_output_____" ] ], [ [ "train_df['Healthcare_1'].head()", "_____no_output_____" ], [ "train_df.loc[train_df['Healthcare_1'].isnull(), 'Healthcare_1'] = train_df['Healthcare_1'].median()", "_____no_output_____" ] ], [ [ "### Построение новых признаков", "_____no_output_____" ], [ "**Dummies**", "_____no_output_____" ] ], [ [ "train_df['Ecology_2_bin'] = train_df['Ecology_2'].replace({'A':0, 'B':1})\ntrain_df['Ecology_3_bin'] = train_df['Ecology_3'].replace({'A':0, 'B':1})\ntrain_df['Shops_2_bin'] = train_df['Shops_2'].replace({'A':0, 'B':1})", "_____no_output_____" ] ], [ [ "**DistrictSize, IsDistrictLarge**", "_____no_output_____" ] ], [ [ "train_df['DistrictId'].value_counts()", "_____no_output_____" ], [ "district_size = train_df['DistrictId'].value_counts().reset_index()\\\n .rename(columns={'index':'DistrictId', 'DistrictId':'DistrictSize'})\n\ndistrict_size.head()", "_____no_output_____" ], [ "train_df = train_df.merge(district_size, on='DistrictId', how='left')\ntrain_df.head()", "_____no_output_____" ], [ "(train_df['DistrictSize'] > 100).value_counts()", "_____no_output_____" ], [ "train_df['IsDistrictLarge'] = (train_df['DistrictSize'] > 100).astype(int)", "_____no_output_____" ] ], [ [ "**MedPriceByDistrict**", "_____no_output_____" ] ], [ [ "med_price_by_district = train_df.groupby(['DistrictId', 'Rooms'], as_index=False).agg({'Price':'median'})\\\n .rename(columns={'Price':'MedPriceByDistrict'})\n\nmed_price_by_district.head()", "_____no_output_____" ], [ "train_df = train_df.merge(med_price_by_district, on=['DistrictId', 'Rooms'], how='left')\ntrain_df.head()", "_____no_output_____" ], [ "train_df['MedPriceByDistrict'].isnull().sum()", "_____no_output_____" ] ], [ [ "*Пример переноса признака на test*", "_____no_output_____" ] ], [ [ "test_df['DistrictId'] = test_df['DistrictId'].astype(str)", "_____no_output_____" ], [ "test_df.merge(med_price_by_district, on=['DistrictId', 'Rooms'], how='left').info()", "<class 'pandas.core.frame.DataFrame'>\nInt64Index: 5000 entries, 0 to 4999\nData columns (total 20 columns):\nId 5000 non-null int64\nDistrictId 5000 non-null object\nRooms 5000 non-null float64\nSquare 5000 non-null float64\nLifeSquare 3959 non-null float64\nKitchenSquare 5000 non-null float64\nFloor 5000 non-null int64\nHouseFloor 5000 non-null float64\nHouseYear 5000 non-null int64\nEcology_1 5000 non-null float64\nEcology_2 5000 non-null object\nEcology_3 5000 non-null object\nSocial_1 5000 non-null int64\nSocial_2 5000 non-null int64\nSocial_3 5000 non-null int64\nHealthcare_1 2623 non-null float64\nHelthcare_2 5000 non-null int64\nShops_1 5000 non-null int64\nShops_2 5000 non-null object\nMedPriceByDistrict 4919 non-null float64\ndtypes: float64(8), int64(8), object(4)\nmemory usage: 742.2+ KB\n" ] ], [ [ "### Отбор признаков", "_____no_output_____" ] ], [ [ "train_df.columns.tolist()", "_____no_output_____" ], [ "feature_names = ['Rooms', 'Square', 'LifeSquare', 'KitchenSquare', 'Floor', 'HouseFloor', 'HouseYear',\n 'Ecology_1', 'Ecology_2_bin', 'Ecology_3_bin', 'Social_1', 'Social_2', 'Social_3',\n 'Healthcare_1', 'Helthcare_2', 'Shops_1', 'Shops_2_bin']\n\nnew_feature_names = ['IsDistrictLarge', 'MedPriceByDistrict']\n\ntarget_name = 'Price'", "_____no_output_____" ] ], [ [ "### Разбиение на train и val", "_____no_output_____" ] ], [ [ "X = train_df[feature_names + new_feature_names]\ny = train_df[target_name]", "_____no_output_____" ], [ "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, shuffle=True, random_state=21)", "_____no_output_____" ] ], [ [ "### Построение модели", "_____no_output_____" ], [ "**Обучение и оценка модели**", "_____no_output_____" ] ], [ [ "rf_model = RandomForestRegressor(random_state=21)\nrf_model.fit(X_train, y_train)", "_____no_output_____" ] ], [ [ "**Отложенная выборка**", "_____no_output_____" ] ], [ [ "y_train_preds = rf_model.predict(X_train)\ny_test_preds = rf_model.predict(X_test)\n\nevaluate_preds(y_train, y_train_preds, y_test, y_test_preds)", "Train R2:\t0.952\nValid R2:\t0.671\n" ] ], [ [ "**Перекрёстная проверка**", "_____no_output_____" ] ], [ [ "cv_score = cross_val_score(rf_model, X, y, scoring='r2', cv=KFold(n_splits=3, shuffle=True, random_state=21))\ncv_score", "_____no_output_____" ], [ "cv_score.mean()", "_____no_output_____" ] ], [ [ "### XGBoost", "_____no_output_____" ] ], [ [ "from xgboost import XGBRegressor\n\nxgb = xgboost.XGBRegressor(n_estimators=500, learning_rate=0.08, gamma=0, subsample=0.75,\n colsample_bytree=1, max_depth=7, objective ='reg:squarederror')\nxgb.fit(X_train, y_train)\n\ny_train_preds = xgb.predict(X_train)\ny_test_preds = xgb.predict(X_test)\n\nevaluate_preds(y_train, y_train_preds, y_test, y_test_preds)", "_____no_output_____" ], [ "from sklearn.metrics import r2_score", "_____no_output_____" ], [ "# A parameter grid for XGBoost\nparams = {'min_child_weight':[4,5], \n 'gamma':[i/10.0 for i in range(3,6)], \n 'subsample':[i/10.0 for i in range(6,11)],\n 'max_depth': [2,3,4,7]}\n\n# Initialize XGB and GridSearch\nxgb = XGBRegressor(nthread=-1, objective ='reg:squarederror') \n\ngrid = GridSearchCV(xgb, params)\ngrid.fit(X_train,y_train) ", "_____no_output_____" ], [ "print(r2_score(y_test, grid.best_estimator_.predict(X_test)))", "0.7126652903060436\n" ], [ "y_train_preds = grid.best_estimator_.predict(X_train)\ny_test_preds = grid.best_estimator_.predict(X_test)\n\nevaluate_preds(y_train, y_train_preds, y_test, y_test_preds)", "Train R2:\t0.917\nTest R2:\t0.714\n" ] ], [ [ "**Важность признаков**", "_____no_output_____" ] ], [ [ "feature_importances = pd.DataFrame(zip(X_train.columns, rf_model.feature_importances_), \n columns=['feature_name', 'importance'])\n\nfeature_importances.sort_values(by='importance', ascending=False)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
d0a385f6517b998cbeddbeb1f78a5ec8b625e217
672
ipynb
Jupyter Notebook
examples/jupyter_notebook/digital_circuits/algorithms/QM-Method.ipynb
amandeep511997/ruby_circuits
1774cbc2058360299209420e0e00f366659d0529
[ "MIT" ]
null
null
null
examples/jupyter_notebook/digital_circuits/algorithms/QM-Method.ipynb
amandeep511997/ruby_circuits
1774cbc2058360299209420e0e00f366659d0529
[ "MIT" ]
null
null
null
examples/jupyter_notebook/digital_circuits/algorithms/QM-Method.ipynb
amandeep511997/ruby_circuits
1774cbc2058360299209420e0e00f366659d0529
[ "MIT" ]
null
null
null
16.390244
44
0.479167
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a389a1622af5b4809253b85e178df0196e6a26
5,929
ipynb
Jupyter Notebook
notebooks/MR/Old_notebooks/grappa_basic.ipynb
danieldeidda/SIRF-Exercises
8025e8b1f048bc33681f71c325309cba220bcb62
[ "Apache-2.0" ]
null
null
null
notebooks/MR/Old_notebooks/grappa_basic.ipynb
danieldeidda/SIRF-Exercises
8025e8b1f048bc33681f71c325309cba220bcb62
[ "Apache-2.0" ]
null
null
null
notebooks/MR/Old_notebooks/grappa_basic.ipynb
danieldeidda/SIRF-Exercises
8025e8b1f048bc33681f71c325309cba220bcb62
[ "Apache-2.0" ]
null
null
null
39.791946
88
0.596728
[ [ [ "#'''\n#Demonstrates GRAPPA reconstruction of undersampled data. \n#See function grappa_detail.py for an example showing more of the \n#workings and functionality of the SIRF code.\n#\n#Pre-requisites:\n# 1) If the reconstruction engine is set to Gadgetron (default), then\n# this Python script needs to be able to access a listening gadgetron.\n# On the Virtual Machine, gadgetron is installed and the user just needs\n# to type 'gadgetron' in a terminal window.\n# On standalone systems, the user will need to have installed ISMRMRD\n# and gadgetron code.\n#\n# 2) An input data file from a GRAPPA MRI acquisition in the ISMRMRD format.\n# Example GRAPPA datasets:\n# a) 'meas_MID00108_FID57249_test_2D_2x.dat' is \n# available from https://www.ccppetmr.ac.uk/downloads\n# This is in the manufacturer's raw data format and needs to be\n# converted to ISMRMRD format using 'siemens_to_ismrmrd'.\n# This executable is installed on the Virtual Machine.\n#\n# b) A simulated ISMRMRD h5 file is available as default\n#\n#Usage:\n# grappa_basic.py [--help | options]\n#\n#Options:\n# -f <file>, --file=<file> raw data file\n# [default: simulated_MR_2D_cartesian_Grappa2.h5]\n# -p <path>, --path=<path> path to data files, defaults to data/examples/MR\n# subfolder of SIRF root folder\n# -e <engn>, --engine=<engn> reconstruction engine [default: Gadgetron]\n#'''\n#\n## CCP PETMR Synergistic Image Reconstruction Framework (SIRF)\n## Copyright 2015 - 2017 Rutherford Appleton Laboratory STFC.\n## Copyright 2015 - 2017 University College London.\n## Copyright 2015 - 2017 Physikalisch-Technische Bundesanstalt.\n##\n## This is software developed for the Collaborative Computational\n## Project in Positron Emission Tomography and Magnetic Resonance imaging\n## (http://www.ccppetmr.ac.uk/).\n##\n## Licensed under the Apache License, Version 2.0 (the \"License\");\n## you may not use this file except in compliance with the License.\n## You may obtain a copy of the License at\n## http://www.apache.org/licenses/LICENSE-2.0\n## Unless required by applicable law or agreed to in writing, software\n## distributed under the License is distributed on an \"AS IS\" BASIS,\n## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n## See the License for the specific language governing permissions and\n## limitations under the License.\n\n#__version__ = '0.1.0'\nfrom docopt import docopt\n#args = docopt(__doc__, version=__version__)\n%matplotlib notebook\n# import engine module\n#exec('from p' + args['--engine'] + ' import *')\nfrom sirf.Gadgetron import *\n\ndata_file = 'simulated_MR_2D_cartesian_Grappa2.h5'\ndata_path = examples_data_path('MR')\n\n\n\n# locate the input data file\ninput_file = existing_filepath(data_path, data_file)\n\n# Initially we create a container that points to the h5 file.\n# Data is not read from file until the 'process' method of the\n# reconstructor object is called.\n\n# Create an acquisition container of type AcquisitionData\nprint('---\\n reading in file %s...' % input_file)\nacq_data = AcquisitionData(input_file)\n\n\n# Pre-process this input data.\n# (Currently this is a Python script that just sets up a 3 chain gadget.\n# In the future it will be independent of the MR recon engine.)\nprint('---\\n pre-processing acquisition data...')\npreprocessed_data = preprocess_acquisition_data(acq_data)\n\n\n# Perform reconstruction of the preprocessed data.\n# 1. set the reconstruction to be for Cartesian GRAPPA data.\nrecon = CartesianGRAPPAReconstructor();\n\n# 2. set the reconstruction input to be the data we just preprocessed.\nrecon.set_input(preprocessed_data);\n\n# 3. run (i.e. 'process') the reconstruction.\nprint('---\\n reconstructing...\\n');\nrecon.process();\n\n\n# retrieve reconstruced image and G-factor data\noutput = recon.get_output()\n\n# show reconstructed image and G-factor data\noutput_array = output.as_array()\ntitle = 'Reconstructed image data (magnitude)'\nshow_3D_array(abs(output_array[0::2,:,:]), suptitle = title, \\\n xlabel = 'samples', ylabel = 'readouts', label = 'slice', \\\n show = False)\ntitle = 'Reconstructed G-factor data (magnitude)'\nshow_3D_array(abs(output_array[1::2,:,:]), suptitle = title, \\\n xlabel = 'samples', ylabel = 'readouts', label = 'slice')\n\n\n", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code" ] ]
d0a38c6d9d193d59e5afa4ac205c88940c5ebb6c
194,361
ipynb
Jupyter Notebook
notebooks/T2 -1 - Data Cleaning - Data Wrangling.ipynb
neovanorphen/data_science_course
e327c7befb1a198d5d585bb501bfa3666265f669
[ "MIT" ]
null
null
null
notebooks/T2 -1 - Data Cleaning - Data Wrangling.ipynb
neovanorphen/data_science_course
e327c7befb1a198d5d585bb501bfa3666265f669
[ "MIT" ]
null
null
null
notebooks/T2 -1 - Data Cleaning - Data Wrangling.ipynb
neovanorphen/data_science_course
e327c7befb1a198d5d585bb501bfa3666265f669
[ "MIT" ]
null
null
null
33.784286
139
0.267008
[ [ [ "import pandas as pd", "_____no_output_____" ], [ "data = pd.read_csv('/home/felipe/Developer/AnacondaProjects/python-ml-course/datasets/customer-churn-model/Customer Churn Model.txt')", "_____no_output_____" ], [ "data.head()", "_____no_output_____" ] ], [ [ "# Crear un subconjunto de datos", "_____no_output_____" ] ], [ [ "account_length = data[\"Account Length\"]", "_____no_output_____" ], [ "account_length.head()", "_____no_output_____" ], [ "(type)(account_length)", "_____no_output_____" ], [ "subset = data[[\"Account Length\", \"Phone\", \"Eve Charge\", \"Day Calls\"]]", "_____no_output_____" ], [ "subset.head()", "_____no_output_____" ], [ "desired_columns = [\"Account Length\", \"Phone\", \"Eve Charge\", \"Night Calls\"]\nsubset = data[desired_columns]\nsubset.head()", "_____no_output_____" ], [ "desired_columns = [\"Account Length\", \"VMail Message\", \"Day Calls\"]\nall_columns_list = data.columns.values.tolist()\nsublist = [x for x in all_columns_list if x not in desired_columns]\nsublist", "_____no_output_____" ], [ "subset = data[sublist]\nsubset.head()", "_____no_output_____" ], [ "a = set(desired_columns)\nb = set(all_columns_list)\nsublist = b-a\nsublist = list(sublist)\nsublist", "_____no_output_____" ], [ "data[1:25]", "_____no_output_____" ], [ "# filtrar usuarios con total mins > 500\ndata1 = data[data[\"Day Mins\"]>300]\ndata1", "_____no_output_____" ], [ "#Usuarios de NY\n\ndata_ny = data[data[\"State\"]== \"NY\"]\ndata_ny", "_____no_output_____" ], [ "## AND & OR |\ndata3 = data[(data[\"Day Mins\"]>300) & (data[\"State\"] == \"NY\")]\ndata3\n\ndata4 = data[(data[\"Day Mins\"]>300) | (data[\"State\"] == \"NY\")]\ndata4", "_____no_output_____" ], [ "data5 = data[data[\"Day Calls\"]< data[\"Night Calls\"]]\ndata5", "_____no_output_____" ], [ "data6 = data[data[\"Day Mins\"]< data[\"Night Mins\"]]\ndata6", "_____no_output_____" ], [ "# minutos de dia noche y longitud de la cuenta \nsubset_first_50 = data[[\"Day Mins\", \"Night Mins\", \"Account Length\"]][:50]\nsubset_first_50", "_____no_output_____" ], [ "data.iloc[1:10,3:6]", "_____no_output_____" ], [ "data.iloc[:,3:6]\ndata.iloc[1:10,:]\n\ndata.iloc[1:10,[2,5,7]]", "_____no_output_____" ], [ "data.loc[1:10,[\"Area Code\",\"Day Mins\"]]", "_____no_output_____" ], [ "#crear columnas\n\ndata[\"Total Mins\"] = data[\"Day Mins\"] + data[\"Night Mins\"] + data[\"Eve Mins\"]", "_____no_output_____" ], [ "data[\"Total Calls\"] = data[\"Day Calls\"] + data[\"Night Calls\"] + data[\"Eve Calls\"]", "_____no_output_____" ], [ "data", "_____no_output_____" ] ], [ [ "## Generación de números aleatorios", "_____no_output_____" ] ], [ [ "import numpy as np", "_____no_output_____" ], [ "np.random.randint(1,100)", "_____no_output_____" ], [ "np.random.random()", "_____no_output_____" ], [ "def randint_list(n, a, b):\n x = []\n for i in range(n):\n x.append(np.random.randint(a,b))\n return x", "_____no_output_____" ], [ "randint_list(25, 1, 50)", "_____no_output_____" ], [ "import random", "_____no_output_____" ], [ "random.randrange(0,100,7)", "_____no_output_____" ], [ "a = np.arange(100)\na", "_____no_output_____" ], [ "np.random.shuffle(a)\na", "_____no_output_____" ], [ "col_list = data.columns.values.tolist()", "_____no_output_____" ], [ "np.random.choice(col_list)", "_____no_output_____" ], [ "# seeds\nnp.random.seed(2018)\n\nfor i in range(5):\n print(np.random.random())", "0.8823493117539459\n0.10432773786047767\n0.9070093335163405\n0.3063988986063515\n0.446408872427422\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a390d7fcb26cb542261cac9be601677594e100
699,232
ipynb
Jupyter Notebook
Week6_radvel_fitting/Key.ipynb
chetanchawla/Intro-to-Astro-2021
9ad1faa23f8c57099a22d1b29cad027aab6ef8f1
[ "MIT" ]
108
2021-01-09T07:40:57.000Z
2022-03-10T12:13:44.000Z
Week6_radvel_fitting/Key.ipynb
dragonfire-09/Intro-to-Astro-2021
9e949970980a181ea0bd735ad7c4c2afc23867d0
[ "MIT" ]
10
2021-01-15T21:18:25.000Z
2021-06-27T11:37:44.000Z
Week6_radvel_fitting/Key.ipynb
dragonfire-09/Intro-to-Astro-2021
9e949970980a181ea0bd735ad7c4c2afc23867d0
[ "MIT" ]
51
2021-04-29T18:55:15.000Z
2022-03-18T09:15:32.000Z
591.566836
312,992
0.942394
[ [ [ "# Radial Velocity Orbit-fitting with RadVel\n\n## Week 6, Intro-to-Astro 2021\n### Written by Ruben Santana & Sarah Blunt, 2018\n#### Updated by Joey Murphy, June 2020\n#### Updated by Corey Beard, July 2021\n\n\n## Background information\nRadial velocity measurements tell us how the velocity of a star changes along the direction of our line of sight. These measurements are made using Doppler Spectroscopy, which looks at the spectrum of a star and measures shifts in known absorption lines. Here is a nice [GIF](https://polytechexo.files.wordpress.com/2011/12/spectro.gif) showing the movement of a star due to the presence of an orbiting planet, the shift in the stellar spectrum, and the corresponding radial velocity measurements. \n\nThis tutorial will cover a lot of new topics and build on ones we just learned. We don't have time to review all of them right now, so you're encouraged to read the following references before coming back to complete the tutorial as one of your weekly assignments.\n- [Intro to the Radial Velocity Technique](http://exoplanets.astro.yale.edu/workshop/EPRV/Bibliography_files/Radial_Velocity.pdf) (focus on pgs. 1-6)\n- [Intro to Periodograms](https://arxiv.org/pdf/1703.09824.pdf) (focus on pgs. 1-30)\n- [Intro to Markov Chain Monte Carlo Methods](https://towardsdatascience.com/a-zero-math-introduction-to-markov-chain-monte-carlo-methods-dcba889e0c50) (link also found in the MCMC resources from the Bayesian fitting methods and MCMC tutorial)\n\n\n## About this tutorial\nIn this tutorial, you will use the California Planet Search Python package [RadVel](https://github.com/California-Planet-Search/radvel) to characterize the exoplanets orbiting the star K2-24 (EPIC 203771098) using radial velocity measurements. This tutorial is a modification of the \"[K2-24 Fitting & MCMC](https://github.com/California-Planet-Search/radvel/blob/master/docs/tutorials/K2-24_Fitting%2BMCMC.ipynb)\" tutorial on the RadVel GitHub page. \n\nThere are several coding tasks for you to accomplish in this tutorial. Each task is indicated by a `#TODO` comment.\n\nIn this tutorial, you will:\n- estimate planetary orbital periods using a periodogram\n- perform a maximum likelihood orbit fit with RadVel \n- create a residuals plot\n- perform a Markov Chain Monte Carlo (MCMC) fit to characterize orbital parameter uncertainty\n\n## Outline\n1. RadVel Installation\n2. Importing Data\n3. Finding Periods\n4. Defining and Initializing a Model\n5. Maximum Likelihood Fitting\n6. Residuals\n7. MCMC", "_____no_output_____" ], [ "## 1. Installation\nWe will begin by making sure we have all the python packages needed for the tutorial. First, [install RadVel](http://radvel.readthedocs.io/en/latest/quickstartcli.html#installation) by typing:\n\n`pip install radvel` at the command line. (Some warning messages may print out, but I (Corey) was able to install RadVel successfully in a new Anaconda environment using python=3.8.3.)\n\nIf you want to clone the entire RadVel GitHub repository for easy access to the RadVel source code, type:\n\n`git clone https://github.com/California-Planet-Search/radvel.git`\n\n\nIf everything installed correctly, the following cell should run without errors. If you still see errors try restarting the kernel by using the tab above labeled **kernel >> restart**.", "_____no_output_____" ] ], [ [ "# allows us to see plots on the jupyter notebook\n%matplotlib inline\n\n# used to interact with operating system\nimport os\n\n# models used by radvel for calculations, plotting, and model optimization\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nfrom scipy import optimize\n\n# for corner plots\nimport corner\n\n# for radial velocity analysis\nimport radvel\nfrom radvel.plot import orbit_plots, mcmc_plots\n\n# for periodogram\nfrom astropy.stats import LombScargle\n\n\n# sets font size for plots\nmatplotlib.rcParams['font.size'] = 18", "_____no_output_____" ] ], [ [ "## 2. Importing and Plotting Data\n\nWhen you installed RadVel, some .csv files were placed in a directory on your computer called `radvel.DATADIR`. Let's read this data into Python using pandas.", "_____no_output_____" ] ], [ [ "# import data\npath = os.path.join(radvel.DATADIR,'epic203771098.csv') # path to data file\ndata = pd.read_csv(path, index_col=0) # read data into pandas DataFrame\n\nprint('Path to radvel.DATADIR: {}\\n'.format(radvel.DATADIR))\nprint(data)\n\n# Let's print out the column names of the pandas DataFrame you just created (`data`)\nprint(data.columns.values)\n\n# TODO: print out the length of `data`\nprint(len(data))\n", "Path to radvel.DATADIR: /Users/corey/anaconda3/envs/astroconda/radvel_example_data\n\n errvel t vel\n0 1.593725 2364.819580 6.959066\n1 1.600745 2364.825101 5.017650\n2 1.658815 2364.830703 13.811799\n3 1.653224 2366.827579 1.151030\n4 1.639095 2367.852646 9.389273\n5 1.723691 2373.888150 -2.820614\n6 1.907690 2374.852412 -0.772991\n7 1.709263 2376.863820 -2.222920\n8 1.838565 2377.866073 0.146115\n9 1.649715 2378.834011 2.739558\n10 1.862539 2380.930797 7.571228\n11 1.681661 2382.886140 5.137920\n12 1.901465 2383.823529 0.368812\n13 1.689888 2384.799943 -1.480772\n14 1.680251 2384.828991 -2.737295\n15 1.718589 2384.839720 -5.682658\n16 1.713852 2388.955960 -3.910203\n17 1.644495 2395.857258 -5.635432\n18 1.760751 2402.898756 3.635211\n19 1.649973 2403.771319 3.538690\n20 1.460209 2411.755697 -3.754068\n21 1.782881 2412.794200 -0.111816\n22 1.637531 2420.803019 0.112752\n23 1.760237 2421.822804 -2.587854\n24 1.656808 2422.742125 3.020029\n25 1.982700 2429.761751 -13.033590\n26 1.875608 2429.810230 -10.996779\n27 1.702684 2432.732316 -12.064872\n28 1.913379 2432.807239 -14.867229\n29 1.929956 2457.716902 -1.308613\n30 1.944350 2457.754804 -5.319776\n31 1.617464 2465.710740 4.873121\n['errvel' 't' 'vel']\n32\n" ], [ "# Let's plot time (data.t) vs radial velocity (data.vel) using matplotlib.pyplot\nplt.plot(data.t, data.vel, 'o')\n\n# Now, on a new figure, let's modify the plotting code so that it adds error \n# bars (data.errvel) to each RV measurement\nplt.figure()\nplt.errorbar(data.t, data.vel, data.errvel, fmt='o')\nplt.show()\nplt.errorbar(data.t, data.vel, data.errvel, fmt='o',color='maroon')\n\n\n# Add labels for the x- and y-axes of your plot (time is in days; radial velocity is in m/s)\nplt.xlabel('Time [days]')\nplt.ylabel('Velocity [m/s]')\nplt.show()\n\n# TODO: change the color of the data in your plot\n\n# TODO: What do you notice about the data? Does it look like there is a planet signal? \n# What orbital period would you estimate?\n# Enter your answer in the triple quotes below.\n\n\"\"\"\n\nIt definitely doesn't appear to be a pure sinusoid. This means there could be significant eccentricity, additional planets,\nstellar activity, or any number of other possible explanations. The periods look like on the order of ~10-20 days,\nor so\n\n\n\"\"\"", "_____no_output_____" ] ], [ [ "## 3. Finding a Significant Period\n", "_____no_output_____" ], [ "Now, we will find probable orbital periods using a Lomb-Scargle periodogram. Periodograms are created using a Fourier transform, which is a mathematical process that takes in continuous time-based data and decomposes it into a combination of functions with various frequencies, as seen in the image below. To build more intuition for how a Fourier transform works, checkout this useful [PhET simulation](https://phet.colorado.edu/en/simulation/fourier).\n\n![fourier](https://upload.wikimedia.org/wikipedia/commons/6/61/FFT-Time-Frequency-View.png \"fast fourier transform\")\n([wikipedia](https://upload.wikimedia.org/wikipedia/commons/6/61/FFT-Time-Frequency-View.png))\n\nThe graph on the left is the continous data which is analagous to our radial velocity data. The three sine waves behind the graphs are the functions that are added to produce a good fit to the original data. Finally, the graph on the right is the periodogram. It shows how much each contributing function's frequency contributes to the data model. The larger the peak in the graph, the more significant that frequency is in the data. We use this frequency to get an idea of periodic behaivor in the data (e.g. the orbital period of an exoplanet). Now, we will calculate a periodogram and use it to give us an estimate of the period of the planet's orbit.", "_____no_output_____" ] ], [ [ "def LombScarg(t,v,e,min_per=0.01,max_per=1000):\n\n #Calculate Generalized Lomb-Scargle periodogram and window function\n\n fmin = 1./max_per\n fmax = 1./min_per\n frequency, power = LombScargle(t, v, e).autopower(minimum_frequency=1/1000,maximum_frequency=1.,method='cython')\n\n per = 1/frequency\n #Identify strongest period.\n \n in_window = np.zeros(len(per),dtype=bool)\n for s in range(len(per)):\n if per[s] > min_per and per[s] < max_per:\n in_window[s] += 1\n\n powmax = max(power[in_window])\n imax = np.argmax(power[in_window])\n fbest = frequency[in_window][imax]\n perbest = 1./fbest\n\n return per, power, perbest", "_____no_output_____" ], [ "minPer = 30 # min period to look for 1st planet (in days)\nmaxPer = 50 # max period to look for 1st planet (in days)\n\nperiod, power, period1 = LombScarg(data.t, data.vel,data.errvel,min_per=minPer,max_per=maxPer)\nplt.xlim(1,1000)\nplt.axvline(period1,color='red',linestyle='--')\nplt.semilogx(period,power)\nplt.xlabel('Period (days)')\nplt.ylabel('GLS Power')\nplt.show()\n\n\n# TODO: change the values of minPer and maxPer. How do the results change? Why? Type your answer\n# between the triple quotes below.\n\n\n\"\"\"\n`minPer` and `maxPer` control the period range in which the nyquist searcher looks for significant peaks. Changing\nthem controls which period the searcher returns (it's returning the maximum peak in the allowable range).\n\"\"\"", "_____no_output_____" ] ], [ [ "## 4. Defining and Initializing Model", "_____no_output_____" ], [ "Let's define a function that we will use to initialize the ``radvel.Parameters`` and ``radvel.RVModel`` objects.\nThese will be our initial guesses of the planet parameters based on on the radial velocity measurements shown and periodogram shown above.", "_____no_output_____" ] ], [ [ "nplanets = 1 # number of planets\n\ndef initialize_model():\n \n time_base = 2420.\n params = radvel.Parameters(nplanets,basis='per tc secosw sesinw k')\n params['per1'] = radvel.Parameter(value=period1) # Insert our guess for period of first planet (from periodogram)\n params['tc1'] = radvel.Parameter(value=2080.) # guess for time of transit of 1st planet\n params['secosw1'] = radvel.Parameter(value=0.0) # determines eccentricity (assuming circular orbit here)\n params['sesinw1'] = radvel.Parameter(value=0.0) # determines eccentriciy (assuming circular orbit here)\n params['k1'] = radvel.Parameter(value=3.) # radial velocity semi-amplitude\n\n mod = radvel.RVModel(params, time_base=time_base)\n mod.params['dvdt'] = radvel.Parameter(value=-0.02) # possible acceleration of star\n mod.params['curv'] = radvel.Parameter(value=0.01) # possible curvature in long-term radial velocity trend\n \n return mod\n", "_____no_output_____" ] ], [ [ "Fit the K2-24 RV data assuming circular orbits.\n\nSet initial guesses for the parameters:", "_____no_output_____" ] ], [ [ "mod = initialize_model() # model initiliazed\nlike = radvel.likelihood.RVLikelihood(mod, data.t, data.vel, data.errvel, '_HIRES') # initialize Likelihood object\n\n# define initial guesses for instrument-related parameters\nlike.params['gamma_HIRES'] = radvel.Parameter(value=0.1) # zero-point radial velocity offset\nlike.params['jit_HIRES'] = radvel.Parameter(value=1.0) # white noise", "_____no_output_____" ] ], [ [ "Plot the model with our initial parameter guesses:", "_____no_output_____" ] ], [ [ "def plot_results(like):\n fig = plt.figure(figsize=(12,4))\n fig = plt.gcf()\n fig.set_tight_layout(True)\n plt.errorbar(\n like.x, like.model(data.t.values)+like.residuals(), \n yerr=like.yerr, fmt='o'\n )\n \n ti = np.linspace(data.t.iloc[0] - 5, data.t.iloc[-1] + 5,100) # time array for model\n\n plt.plot(ti, like.model(ti))\n plt.xlabel('Time')\n plt.ylabel('RV')\n \nplot_results(like)", "_____no_output_____" ] ], [ [ "## 5. Maximum Likelihood fit", "_____no_output_____" ], [ "Well, that solution doesn't look very good! Let's optimize the parameters set to vary by maximizing the likelihood.\n\nInitialize a ``radvel.Posterior`` object.", "_____no_output_____" ] ], [ [ "post = radvel.posterior.Posterior(like) # initialize radvel.Posterior object", "_____no_output_____" ] ], [ [ "Choose which parameters to change or hold fixed during a fit. By default, all `radvel.Parameter` objects will vary, so you only have to worry about setting the ones you want to hold fixed.", "_____no_output_____" ] ], [ [ "post.likelihood.params['secosw1'].vary = False # set as false because we are assuming circular orbit\npost.likelihood.params['sesinw1'].vary = False # set as false because we are assuming circular orbit\nprint(like)", "parameter value vary\nper1 43.8487 True\ntc1 2080 True\nsecosw1 0 False\nsesinw1 0 False\nk1 3 True\ndvdt -0.02 True\ncurv 0.01 True\ngamma_HIRES 0.1 True\njit_HIRES 1 True\n\n" ] ], [ [ "Maximize the likelihood and print the updated posterior object", "_____no_output_____" ] ], [ [ "res = optimize.minimize(\n post.neglogprob_array, # objective function is negative log likelihood\n post.get_vary_params(), # initial variable parameters\n method='Powell', # Nelder-Mead also works\n )\n\nplot_results(like) # plot best fit model\nprint(post)", "parameter value vary\nper1 49.0164 True\ntc1 2080.57 True\nsecosw1 0 False\nsesinw1 0 False\nk1 3.81242 True\ndvdt -0.0843784 True\ncurv 0.00152179 True\ngamma_HIRES -4.20245 True\njit_HIRES 3.88509 True\n\nPriors\n------\n\n" ] ], [ [ "RadVel comes equipped with some fancy ready-made plotting routines. Check this out!", "_____no_output_____" ] ], [ [ "matplotlib.rcParams['font.size'] = 12\n\nRVPlot = orbit_plots.MultipanelPlot(post)\nRVPlot.plot_multipanel()\n\nmatplotlib.rcParams['font.size'] = 18", "NOTE: This version of radvel has been modified to NOT include jitters in errorbars.\n" ] ], [ [ "## 6. Residuals and Repeat\nResiduals are the difference of our data and our best-fit model. \n\nNext, we will plot the residuals of our optimized model to see if there is a second planet in our data. When we look at the following residuals, we will see a sinusoidal shape, so another planet may be present! Thus, we will repeat the steps shown earlier (this time using the parameters from the maximum fit for the first planet).", "_____no_output_____" ] ], [ [ "residuals1 = post.likelihood.residuals()\n\n# Let's make a plot of data.time versus `residuals1`\nplt.figure()\nplt.scatter(data.t, residuals1)\nplt.xlabel('time [MJD]')\nplt.ylabel('RV [m/s]')\nplt.show()\n\n# TODO: What do you notice? What would you estimate the period \n# of the other exoplanet in this system to be? Write your answer between the triple quotes below.\n\n\"\"\"\n\nThese residuals appear to go up and down every ~20 days or so. This looks like a more convincing version of the\nperiod we first observed in the original radial velocity data. It's still pretty hard to tell, though! I'm \nhappy we have algorithms to find orbital periods more effectively than the human eye can.\n\n\n\"\"\"", "_____no_output_____" ] ], [ [ "Let's repeat the above analysis with two planets!", "_____no_output_____" ] ], [ [ "nyquist = 2 # maximum sampling rate\nminPer = 20 # minimum period to look for 2nd planet\nmaxPer = 30 # max period to look for 2nd planet\n\n# finding 2nd planet period\nperiod, power, period2 = LombScarg(data.t, data.vel, data.errvel, min_per=minPer, max_per=maxPer) # finding possible periords for 2nd planet\n\nperiod, power, period1 = LombScarg(data.t, data.vel,data.errvel,min_per=minPer,max_per=maxPer)\nplt.xlim(1,1000)\nplt.axvline(period2,color='red',linestyle='--')\nplt.semilogx(period,power)\nplt.show()\n\n# TODO: why doesn't the periodogram return the period of the first planet? Write your answer between the triple\n# quotes below.\n\n\"\"\"\nThe period of the first planet is not in the allowed period range we specified (`minPer` to `maxPer`).\n\n\"\"\"", "_____no_output_____" ] ], [ [ "Repeat the RadVel analysis", "_____no_output_____" ] ], [ [ "nplanets = 2 # number of planets\n\ndef initialize_model():\n \n time_base = 2420\n params = radvel.Parameters(nplanets,basis='per tc secosw sesinw k')\n \n # 1st Planet\n params['per1'] = post.params['per1'] # period of 1st planet\n params['tc1'] = post.params['tc1'] # time transit of 1st planet\n params['secosw1'] = post.params['secosw1'] # determines eccentricity (assuming circular orbit here)\n params['sesinw1'] = post.params['sesinw1'] # determines eccentricity (assuming circular orbit here)\n params['k1'] = post.params['k1'] # velocity semi-amplitude for 1st planet\n \n # 2nd Planet\n params['per2'] = radvel.Parameter(value=period2) # Insert our guess for period of second planet (from periodogram)\n params['tc2'] = radvel.Parameter(value=2070.)\n params['secosw2'] = radvel.Parameter(value=0.0)\n params['sesinw2'] = radvel.Parameter(value=0.0)\n params['k2'] = radvel.Parameter(value=1.1)\n \n mod = radvel.RVModel(params, time_base=time_base)\n mod.params['dvdt'] = radvel.Parameter(value=-0.02) # acceleration of star\n mod.params['curv'] = radvel.Parameter(value=0.01) # curvature of radial velocity fit\n \n return mod\n", "_____no_output_____" ], [ "mod = initialize_model() # initialize radvel.RVModel object\nlike = radvel.likelihood.RVLikelihood(mod, data.t, data.vel, data.errvel, '_HIRES')\nlike.params['gamma_HIRES'] = radvel.Parameter(value=0.1)\nlike.params['jit_HIRES'] = radvel.Parameter(value=1.0)", "_____no_output_____" ], [ "like.params['secosw1'].vary = False # set as false because we are assuming circular orbit\nlike.params['sesinw1'].vary = False \nlike.params['secosw2'].vary = False # set as false because we are assuming circular orbit\nlike.params['sesinw2'].vary = False \n\nprint(like)", "parameter value vary\nper1 49.0164 True\ntc1 2080.57 True\nsecosw1 0 False\nsesinw1 0 False\nk1 3.81242 True\nper2 20.5863 True\ntc2 2070 True\nsecosw2 0 False\nsesinw2 0 False\nk2 1.1 True\ndvdt -0.02 True\ncurv 0.01 True\ngamma_HIRES 0.1 True\njit_HIRES 1 True\n\n" ], [ "plot_results(like)", "_____no_output_____" ], [ "post = radvel.posterior.Posterior(like) # initialize radvel.Posterior object\n\nres = optimize.minimize(\n post.neglogprob_array, # objective function is negative log likelihood\n post.get_vary_params(), # initial variable parameters\n method='Powell', # Nelder-Mead also works\n )\n\nplot_results(like) # plot best fit model\nprint(post)", "parameter value vary\nper1 48.4701 True\ntc1 2083.1 True\nsecosw1 0 False\nsesinw1 0 False\nk1 4.39734 True\nper2 21.126 True\ntc2 2069.83 True\nsecosw2 0 False\nsesinw2 0 False\nk2 4.7912 True\ndvdt -0.0630582 True\ncurv 0.00152013 True\ngamma_HIRES -4.0217 True\njit_HIRES 2.27605 True\n\nPriors\n------\n\n" ], [ "matplotlib.rcParams['font.size'] = 12\n\nRVPlot = orbit_plots.MultipanelPlot(post)\nRVPlot.plot_multipanel()\n\nmatplotlib.rcParams['font.size'] = 18", "NOTE: This version of radvel has been modified to NOT include jitters in errorbars.\n" ], [ "residuals2 = post.likelihood.residuals()\n\n# TODO: make a plot of data.time versus `residuals2`. What do you notice?\n\n# TODO: try redoing the above analysis, but this time, allow the eccentricity parameters to vary during the fit.\n# How does the fit change?\n\nplt.figure()\nplt.scatter(data.t, residuals2)\nplt.xlabel('time [MJD]')\nplt.ylabel('RV [ms$^{-1}$]')\n\n# Here's the original residuals plot, for comparison purposes:\nplt.figure()\nplt.scatter(data.t, residuals1, color='red')\nplt.xlabel('time [MJD]')\nplt.ylabel('RV [ms$^{-1}$]')\n\n\"\"\"\nThe residuals perhaps look a little more randomly distributed than before, but again it's pretty hard to tell\nwithout a periodogram.\n\"\"\"\n\n\n\n\"\"\"\n\nThe easiest way to do this is to rerun the analysis, except whenever you see a line that says secosw1 = False,\nor sesinw1 = False, or secosw2 = False, or sesinw2 = False, you change them to True.\n\nBe careful not to let the model go too crazy with eccentricity, try giving them initial guesses of 0.1.\n\nThe planet RV signatures look more angular (less purely sinusoidal) now that they have a non-zero eccentricity.\nThe data appears to be better-fit by an eccentric orbit model (i.e. the planets probably do have non-negligible\neccentricities).\n\"\"\"", "_____no_output_____" ] ], [ [ "K2-24 only has two known exoplanets so will stop this part of our analysis here. However, when analzying an uncharacterized star system, it's important to continue the analysis until we see no significant reduction in the residuals of the radial velocity. ", "_____no_output_____" ], [ "# 7. Markov Chain Monte Carlo (MCMC)\nAfter reading the intro to MCMC blog post at the beginning of this tutorial, you are an expert on MCMC! Write a 3-sentence introduction to this section yourself. \n\nMCMC is a method of exploring the parameter space of probable orbits using random walks, i.e. randomly changing the parameters of the fit. MCMC is used to find the most probable orbital solution and to determine the uncertainty (error bars) in the fit. MCMC tells you the probability distributions of orbital parameters consistent with the data.", "_____no_output_____" ] ], [ [ "# TODO: edit the Markdown cell immediately above this one with a 3 sentence description of the MCMC method.\n# What does MCMC do? Why do you think it is important to use MCMC to characterize uncertainties in radial\n# velocity fits?", "_____no_output_____" ] ], [ [ "Let's use RadVel to perform an MCMC fit:", "_____no_output_____" ] ], [ [ "df = radvel.mcmc(post, nwalkers=50, nrun=1000)\n\n# TODO: What type of data structure is `df`, the object returned by RadVel's MCMC method?\n\n\"\"\"\nIt is a pandas dataframe\n\"\"\"", "20000/400000 (5.0%) steps complete; Running 15855.08 steps/s; Mean acceptance rate = 48.2%; Min Auto Factor = 19; Max Auto Relative-Change = inf; Min Tz = 6614.6; Max G-R = 1.004\nDiscarding burn-in now that the chains are marginally well-mixed\n\n400000/400000 (100.0%) steps complete; Running 15030.55 steps/s; Mean acceptance rate = 37.4%; Min Auto Factor = 39; Max Auto Relative-Change = 0.0724; Min Tz = 1595.8; Max G-R = 1.018\n\nMCMC: WARNING: chains did not pass convergence tests. They are likely not well-mixed.\n" ] ], [ [ "Make a fun plot!", "_____no_output_____" ] ], [ [ "Corner = mcmc_plots.CornerPlot(post, df)\nCorner.plot()\n\n# TODO: There is a lot going on in this plot. What do you think the off-diagonal boxes are showing? \n# What about the on-diagonal boxes? What is the median period of the first planet? \n# What is the uncertainty on the period of the first planet? The second planet?\n# TODO: Why do you think the uncertainties on the periods of planets b and c are different?\n\n\"\"\"\nThe off-diagonal boxes are 1 dimensional probability distributions over each of the parameters of the fit.\nThe on-diagonal boxes show 2 dimensional probability distributions (covariances) between pairs of parameters\n(the box's row and column show the parameters it corresponds to).\nThe median period of the first plot (for my eccentric fit) is 52.56 days. The uncertainty is +0.08 days, -0.07 days\n(this corresponds to a *68% confidence interval* of [52.49, 52.64] days.)\nThe median period of the second planet is 20.69 days, with an uncertainty of +/- 0.02 days. \nThe uncertainties of the two orbital periods are different because the period of the second planet is much better\nconstrained by the data than the period of the first planet. We see many periods of the second planet repeated\nover the ~100 day dataset, but only ~2 periods of the first planet.\n\n\"\"\"", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
d0a3911e19eb66aea77f5440e936665191b77410
58,544
ipynb
Jupyter Notebook
realnvp/Simple Gaussian Test.ipynb
bjlkeng/sandbox
ba1fea113065256d4981a71f7b4bece7299effd1
[ "MIT" ]
158
2017-11-09T14:56:31.000Z
2022-03-26T17:26:20.000Z
realnvp/Simple Gaussian Test.ipynb
bjlkeng/sandbox
ba1fea113065256d4981a71f7b4bece7299effd1
[ "MIT" ]
8
2017-11-28T11:14:46.000Z
2021-05-03T00:23:57.000Z
realnvp/Simple Gaussian Test.ipynb
bjlkeng/sandbox
ba1fea113065256d4981a71f7b4bece7299effd1
[ "MIT" ]
77
2017-11-21T15:27:52.000Z
2022-02-17T16:37:34.000Z
45.17284
14,388
0.627699
[ [ [ "import math\nimport string\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.special import logit\nfrom IPython.display import display\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import (Input, Dense, Lambda, Flatten, Reshape, BatchNormalization, Layer,\n Activation, Dropout, Conv2D, Conv2DTranspose,\n Concatenate, add, Add, Multiply)\nfrom tensorflow.keras.losses import sparse_categorical_crossentropy\nfrom tensorflow.keras.optimizers import RMSprop, Adam\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras import metrics\nfrom tensorflow.keras import backend as K\nfrom tensorflow.keras.datasets import cifar10\nfrom tensorflow.keras.callbacks import TensorBoard\nfrom tensorflow_addons.callbacks import TQDMProgressBar\n\nfrom realnvp_helpers import Mask, FlowBatchNorm\n\n\n%matplotlib inline", "/home/brian/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n from ._conv import register_converters as _register_converters\n" ], [ "batch_size = 10\nshape = (4, 4, 3)\nbatch_shape = (batch_size,) + shape\nsamples = 100\n\ntrain_data = np.random.normal(0.5, 3, size=(samples,) + (shape))\nprint(batch_shape)\nprint(train_data.shape)\ntrain_data[0, :, :, :]", "(10, 4, 4, 3)\n(100, 4, 4, 3)\n" ], [ "def conv_block(input_shape, kernel_size, filters, stage, block, use_resid=True):\n ''' Adapted from resnet50 implementation in Keras '''\n filters1, filters2, filters3 = filters\n if K.image_data_format() == 'channels_last':\n bn_axis = 3\n else:\n bn_axis = 1\n conv_name_base = 'res' + str(stage) + block + '_branch'\n bn_name_base = 'bn' + str(stage) + block + '_branch'\n \n input_tensor = Input(batch_shape=input_shape)\n x = Conv2D(filters1, (1, 1),\n kernel_initializer='he_normal',\n name=conv_name_base + '2a')(input_tensor)\n x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x)\n x = Activation('relu')(x)\n\n x = Conv2D(filters2, kernel_size,\n padding='same',\n kernel_initializer='he_normal',\n name=conv_name_base + '2b')(x)\n x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x)\n x = Activation('relu')(x)\n\n x = Conv2D(filters3, (1, 1),\n kernel_initializer='he_normal',\n name=conv_name_base + '2c')(x)\n x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x)\n\n if use_resid:\n x = add([x, input_tensor])\n x = Activation('relu')(x)\n \n return Model(input_tensor, x, name='conv_block' + stage + block)", "_____no_output_____" ], [ "def coupling_layer(input_shape, mask_type, stage):\n ''' Implements (as per paper):\n y = b * x + (1 - b) * [x * exp(s(b * x)) + t(b * x)]\n '''\n assert mask_type in ['check_even', 'check_odd', 'channel_even', 'channel_odd']\n mask_prefix = 'check' if mask_type.startswith('check') else 'channel'\n mask_opposite = 'odd' if mask_type.endswith('even') else 'even'\n \n input_tensor = Input(batch_shape=input_shape)\n \n # Raw operations for step\n b0 = Mask(mask_type)\n b1 = Mask(mask_prefix + '_' + mask_opposite)\n s_ = conv_block(input_shape, (3, 3), (32, 32, 3), stage, '_s', use_resid=True)\n t_ = conv_block(input_shape, (3, 3), (32, 32, 3), stage, '_t', use_resid=True)\n batch = FlowBatchNorm(name='_'.join(['FlowBatchNorm' + mask_type + stage]))\n \n # Forward\n masked_input = b1(input_tensor)\n s = s_(masked_input)\n t = t_(masked_input)\n coupling = Lambda(lambda ins: ins[0] * K.exp(ins[1]) + ins[2])([input_tensor, s, t])\n coupling_mask = b0(coupling)\n out1, out2 = Add()([masked_input, coupling_mask]), b0(s)\n out1_norm = batch(out1)\n #batch_loss = Lambda(lambda x: - (K.log(gamma) - 0.5 * K.log(x + batch.epsilon)))(var)\n #batch_loss = Lambda(lambda x: -K.log(gamma))(var)\n #batch_loss = Lambda(lambda x: - ( - 0.5 * K.log(x + batch.epsilon)))(var)\n \n # Reverse\n \n # Return result + masked scale for loss function\n return Model(input_tensor, [out1_norm, out2], name='_'.join(['coupling', mask_type, stage]))", "_____no_output_____" ], [ "def coupling_group(input_tensor, steps, mask_type, stage):\n name_mapping = dict(enumerate(string.ascii_lowercase))\n \n # TODO: Only need check/channel, not even/odd right?\n assert mask_type in ['check_even', 'check_odd', 'channel_even', 'channel_odd']\n mask_prefix = 'check' if mask_type.startswith('check') else 'channel'\n \n x = input_tensor\n s_losses = []\n batch_losses = []\n for i in range(3):\n mask_type = mask_prefix + ('_even' if i % 2 == 0 else '_odd')\n step = coupling_layer(input_tensor.shape, mask_type, stage=str(stage) + name_mapping[i])\n x, s = step(x)\n #x, s = step(x)\n s_losses.append(s)\n \n return x, s_losses", "_____no_output_____" ], [ "def realnvp_zloss(target, z):\n # log(p_X(x)) = log(p_Z(f(x))) + log(|det(\\partial f(x) / \\partial X^T)|)\n # Prior is standard normal(mu=0, sigma=1)\n shape = z.shape\n return K.sum(-0.5 * np.log(math.pi) - 0.5 * z**2, axis=list(range(1, len(shape[1:]))))\n\ndef const_loss(target, output):\n # For debugging\n return K.constant(0)\n\ndef realnvp_sumloss(target, output):\n # Determinant is just sum of \"s\" or \"batch loss\" params (already log-space)\n shape = output.shape\n return K.sum(output, axis=list(range(1, len(shape))))", "_____no_output_____" ], [ "input_tensor = Input(batch_shape=batch_shape)\n#x = conv_block(shape, (3, 3), (32, 32, 3), '0', '_s', use_resid=True)(input_tensor)\nstep = coupling_layer(batch_shape, 'check_even', stage=str('a') + '0')\nx, s = step(input_tensor)\ns_losses = [s, s]\n\n#x, s_losses, batch_losses = coupling_group(input_tensor, steps=3, mask_type='check_even', stage=1)\ns_losses = Concatenate(name='s_losses')(s_losses)\n\nforward_model = Model(inputs=input_tensor, outputs=[x, s_losses])\noptimizer = Adam(lr=0.001)\nforward_model.compile(optimizer=optimizer, \n loss=[realnvp_zloss, realnvp_sumloss])\n #loss=[const_loss, const_loss, realnvp_sumloss])\nforward_model.summary()", "Model: \"model\"\n__________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ninput_1 (InputLayer) [(10, 4, 4, 3)] 0 \n__________________________________________________________________________________________________\ncoupling_check_even_a0 (Model) [(10, 4, 4, 3), (10, 19498 input_1[0][0] \n__________________________________________________________________________________________________\ns_losses (Concatenate) (10, 4, 4, 6) 0 coupling_check_even_a0[1][1] \n coupling_check_even_a0[1][1] \n==================================================================================================\nTotal params: 19,498\nTrainable params: 19,224\nNon-trainable params: 274\n__________________________________________________________________________________________________\n" ], [ "def get_losses_from_layers(layers):\n losses = []\n for layer in layers:\n if isinstance(layer, Model):\n losses.extend(layer._losses)\n losses.extend(get_losses_from_layers(layer.layers))\n else:\n losses.extend(layer.losses)\n return losses\n\nget_losses_from_layers(forward_model.layers)", "_____no_output_____" ], [ "#early_stopping = keras.callbacks.EarlyStopping('val_loss', min_delta=50.0, patience=5)\n#reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=2, min_lr=0.0001)\ns = [len(train_data)] + [int(x) for x in s_losses.shape[1:]]\n#s[0] = int(train_data.shape[0])\n#print(train_data.shape, np.zeros(s).shape)\n\ntensorboard = TensorBoard(log_dir='graph', \n batch_size=batch_size, \n histogram_freq=1, \n write_graph=True) \nhistory = forward_model.fit(\n train_data, [train_data, np.zeros(s)],\n #validation_data=(train_data[:10], [train_data[:10], np.zeros(s)[:10], np.zeros(s)[:10]]),\n batch_size=batch_size,\n epochs=20,\n callbacks=[TQDMProgressBar()], #, tensorboard], #, early_stopping, reduce_lr],\n verbose=0\n)", "WARNING:tensorflow:`batch_size` is no longer needed in the `TensorBoard` Callback and will be ignored in TensorFlow 2.0.\n" ], [ "df = pd.DataFrame(history.history)\n#display(df.describe(percentiles=[0.25 * i for i in range(4)] + [0.95, 0.99]))\ncol = 'val_loss' if 'val_loss' in df else 'loss'\ndisplay(df[-25:])\ndf[col][-25:].plot(figsize=(8, 6))", "_____no_output_____" ] ], [ [ "# 2019-07-28\n\n* Got some framework up to do coupling layers but having trouble passing the scale parameter to the loss function, getting some weird tensorflow error, needs more debugging\n* Without the determinant in the loss function, it looks like loss goes down, so maybe on the right track?\n * It's actually weird that we're not using the image in the output, but I guess that's what's great about this reversible model!\n* TODO:\n * Debug scale function in loss\n * Add reverse (generator) network to functions above.", "_____no_output_____" ], [ "# 2019-07-29\n\n* Explanation of how to estimate probability of continuous variables (relevant for computing bits/pixel without an explicit discrete distribution): https://math.stackexchange.com/questions/2818318/probability-that-a-sample-is-generated-from-a-distribution\n* Idea for a post, explain likelihood estimation of discrete vs. continuous distributions (like pixels), include:\n * Probability of observing a value from continuous distribution = 0\n * https://math.stackexchange.com/questions/2818318/probability-that-a-sample-is-generated-from-a-distribution\n * Probability of observing a value from a set of discrete hypthesis (models) is non-zero using epsilon trick (see above link):\n * https://math.stackexchange.com/questions/920241/can-an-observed-event-in-fact-be-of-zero-probability\n * Explain Equation 3 from \"A NOTE ON THE EVALUATION OF GENERATIVE MODELS\"\n * Also include an example using a simpler case, like a bernoulli variable that we're estimating using a continuous distribution\n * Bring it back to modelling pixels and how they usually do it", "_____no_output_____" ], [ "# 2020-03-30\n\n* To make reversible network, build forward and backward network at the same time using `Model()` to have components that I can use in both networks\n* Looks like I have some instability here, depending on the run I can get an exact fit (-100s loss) or a poor a fit (+10):\n * Turning off residual networks helps\n * Adjusting the learning rate, batch size helps but hard to pinpoint a methodology\n* Most likely it's the instability of using a scale parameter (RealNVP paper Section 3.7), might need to implement their batch norm for more stable results, especially when adding more layers:\n * Reimplement `BatchNorm`: https://github.com/keras-team/keras/blob/master/keras/layers/normalization.py\n * Except return regular result AND (variance + eps) term\n * Use the (var + eps) term to compute Jacobian for loss function (should just be log-additive)\n* Once this is done, add back the other stuff:\n * Turn on residual shortcuts\n * Change batch size to reasonable number and learning rate=0.01\n* If this still doesn't work, might want to implement \"Running average over recent minibatches\" in Appendix E", "_____no_output_____" ], [ "# 2020-03-31\n\n* Fixed a bug (I think) in the network where the coupling layer was wrong. However, it still sometimes get stuck at around a loss of 5 but more often than not (on another training run) get to -10 (after 20 iters).\n* Trying to get FlowBatchNorm worknig but having some issues passing the determinant batch loss as an output because the `batch_size` is not getting passed (it has dimension (3,) but should have dimension (None, 3)). Need to figure out how to tranlate a tensor to Layer that includes batch.", "_____no_output_____" ], [ "# 2020-04-05\n\n* Reminder: BatchNormalization on conv layers only need to normalize across [B, W, H, :] layers, not the \"C\" layer because the filter is identical across a channel (so it uses the same mean/var to normalize). This is nice because it's the same axis (-1) you would normalize across in a Dense layer. See: https://intellipaat.com/community/3872/batch-normalization-in-convolutional-neural-network\n* I think I figured out how to return the batchnorm weights back but now I'm hitting a roadblock when I try to merge them together to put as part of the output loss -- maybe I should just forget it and use the tensors directly in the output loss?\n* Now that I switched to an explicit batch size, it doesn't run anymore... get this error \"Incompatible shapes: [4] vs. [32]\", probably some assumption that I had, got to work backwards and fix it I think.\n", "_____no_output_____" ], [ "# 2020-04-14\n\n* Okay figured out the weird error I was getting: when a Keras model has multiple outputs you either have to give it a list or dict of loss functions, otherwise it will apply the same loss to each output! Of course, I just assumed that it gives you all outputs in one loss function. So silly!\n* I reverted the change to explicitly set batch. Instead in the `BatchNormFlow` layer I just multiply zero by the `inputs` and then add the mean/variance. I think this gives the right shape?\n* **TODOs**:\n * Check that shape/computation for `BatchNormFlow`/`batch_losses` loss is correct\n * Check that loss functions are actually returning a negative log-loss (not just the log)\n * Validate the model is fitting what I want (right now I have an elbow effect as I train more) -- should there be backprop through the batch_losses? I guess not? Check the paper and figure out what to do.\n * Add back in the bigger model that has multiple coupling layers", "_____no_output_____" ], [ "# 2020-04-15\n\n* Somehow I suspect that the batch loss is not getting optimized (the var parameter in the batch norm function). When I set the other loss components to zero, I see that hte batch loss is not really getting smaller -- should it?\n\n loss \tcoupling_check_even_1c_loss \ts_losses_loss \tbatch_losses_loss\n 0 \t146.227879 \t0.0 \t0.0 \t146.227879\n 1 \t131.294226 \t0.0 \t0.0 \t131.294226\n 2 \t135.579913 \t0.0 \t0.0 \t135.579913\n 3 \t127.908073 \t0.0 \t0.0 \t127.908073\n 4 \t130.301921 \t0.0 \t0.0 \t130.301921\n 5 \t139.414369 \t0.0 \t0.0 \t139.414369\n 6 \t129.732767 \t0.0 \t0.0 \t129.732767\n 7 \t127.321448 \t0.0 \t0.0 \t127.321448\n 8 \t130.812973 \t0.0 \t0.0 \t130.812973\n 9 \t136.737979 \t0.0 \t0.0 \t136.737979\n 10 \t135.001893 \t0.0 \t0.0 \t135.001893\n 11 \t140.181680 \t0.0 \t0.0 \t140.181680\n 12 \t133.053322 \t0.0 \t0.0 \t133.053322\n 13 \t132.912917 \t0.0 \t0.0 \t132.912917\n 14 \t122.261415 \t0.0 \t0.0 \t122.261415\n 15 \t139.447081 \t0.0 \t0.0 \t139.447081\n 16 \t134.216364 \t0.0 \t0.0 \t134.216364\n 17 \t133.567210 \t0.0 \t0.0 \t133.567210\n 18 \t131.333447 \t0.0 \t0.0 \t131.333447\n 19 \t133.022141 \t0.0 \t0.0 \t133.022141\n \n* **IDEA:** I should probably unit test the batch norm flow layer to make sure that it's doing what I think it should be doing... need to think about how to structure this experiment.\n* **CHECK**: Should `s` loss be negated also? Seems like I need negative log loss, not just log loss...", "_____no_output_____" ], [ "# 2020-04-16\n\n* Forgot that BatchNorm has two components: $\\mu, \\sigma^2$, the mean and variance of the batch, which we scale ($\\hat{x} = \\frac{x-\\mu}{\\sqrt{\\sigma^2 + \\epsilon}}$) AND two learnable parameters: $\\gamma, \\beta$, which are used to scale the output: $y = \\gamma \\hat{x} + \\beta$. The learnable parameters are the only ones that change!\n* Now, how does that work when calculating the determinant? Let's see:\n\n$$\\frac{\\partial}{\\partial y} \\hat{y} = \\frac{\\partial}{\\partial y}\\big[\\gamma * \\frac{x-\\mu}{\\sqrt{\\sigma^2 + \\epsilon} + \\beta}\\big]$$\n$$ = \\frac{\\gamma}{\\sqrt{\\sigma^2 + \\epsilon}}$$\n\n Therefore, I need to include gamma in the determinant calculation in the batch norm layer!\n \n \nOhhhhh... use `keras.layer.add_loss()` function instead of passing the new things over! Not sure how to deal with batch though... https://www.tensorflow.org/guide/keras/custom_layers_and_models", "_____no_output_____" ], [ "# 2020-04-17\n\n* Made some progress adding batch norm loss use both `layer.add_loss()` and `layer.add_metric()` so I can view it... BUT I need to upgrade to Tensorflow 2.0. \n* After upgrading to 2.0, might as well start using `tf.keras` directly as that's the recommendation from the site.", "_____no_output_____" ], [ "# 2020-04-20\n\n* Upgraded to Tensorflow 2.1! I hate upgrading things...\n* Converted most of my code over too -- still need to add `layer.add_loss()` and `layer.add_metric()` to the `FlowBatchNorm()` layer though. I did convert it over to the TF2 version, inheriting it and assuming that the fancier features are turned off.", "_____no_output_____" ] ], [ [ "from scipy.stats import norm\n\nfor i in range(-10, 10):\n eps = i / 1000\n l = norm.cdf(0 - eps)\n r = norm.cdf(0 + eps)\n print(eps, '\\t', l - r)", "_____no_output_____" ], [ "a = np.array([[[-1, -2], [-3, -4]], [[1,2], [3, 4]], [[5,6], [7, 8]]]) \nb = np.array([100, 200]).reshape([1, 1, 2])\n\nc = a + b\nc[:, :, :]", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ] ]
d0a3a3a396b58a2f7495fe0520d77b9b311e5026
4,019
ipynb
Jupyter Notebook
How_to_solve_Problem_H.ipynb
ShinjiKatoA16/mcpc2018ucsy
199513f080ef96fda8008164c3219fc84baa01a6
[ "MIT" ]
1
2018-09-15T05:03:54.000Z
2018-09-15T05:03:54.000Z
How_to_solve_Problem_H.ipynb
ShinjiKatoA16/mcpc2018ucsy
199513f080ef96fda8008164c3219fc84baa01a6
[ "MIT" ]
null
null
null
How_to_solve_Problem_H.ipynb
ShinjiKatoA16/mcpc2018ucsy
199513f080ef96fda8008164c3219fc84baa01a6
[ "MIT" ]
null
null
null
26.267974
189
0.47798
[ [ [ "# Problem H: Gold Mining\n\nReply the maximum sum of gold mines, that can be mined with single well.\n\n### Input\n\n- Number of test case\n + N: Number of gold layer\n + N lines (x1, x2, y) start, stop and depth of the gold layer\n \n### Output\n\nfor each test case\n\n- 1/3 of total length of gold layer that can be mined with single well (2 digit below 0)\n\n### Sample Input\n\n```\n3\n6\n10 35 10\n50 65 20\n20 45 30\n60 90 40\n30 55 50\n40 70 60\n6\n10 35 10\n50 65 20\n20 45 30\n60 150 40\n30 55 50\n40 70 60\n3\n-30 10 10\n-40 -25 20\n-30 -10 30\n```\n\n### Sample Output\n\n```\n35.00\n48.33\n25.00\n```", "_____no_output_____" ], [ "## How to solve\n\nIf a well covers some (optimized) gold layers, the well can be moved horizontally or change its angle until it touch 2 or more end points of gold layers, without changing coverage.\n\n- select 2 end points of gold well, then calculate the sum of gold layers, that intersect the line\n\nEach gold layer has 2 end points, so the calculation volume is about $2N \\cdot 2N \\cdot N = 4N^3$", "_____no_output_____" ] ], [ [ "#!/usr/bin/python3\n# -*- coding: utf-8 -*-\n\n'''\nMCPC 2018 Problem-H: Gold Mining\n'''\n\nimport sys\n\n\ndef check_mines(mines, p1, p2):\n '''\n mines: list of tuple (x1, x2, y)\n p1, p2: tuple (x,y)\n return: total length of mines on the p1-p2 line\n '''\n\n total_len = 0\n slope_p1_p2 = (p2[0]-p1[0]) / (p2[1]-p1[1])\n for mine in mines:\n mine_x = (slope_p1_p2 * (mine[2]-p1[1])) + p1[0]\n #print(p1, p2, slope_p1_p2, mine, mine_x, file=sys.stderr)\n if mine_x >= mine[0] and mine_x <= mine[1]:\n total_len += mine[1]-mine[0]\n\n return total_len\n\n\ndef solve(mines):\n points = list()\n for mine in mines:\n points.append((mine[0],mine[2])) # (x1, y)\n points.append((mine[1],mine[2])) # (x2, y)\n\n max_total = 0\n for p1_idx in range(len(points)-1):\n for p2_idx in range(p1_idx, len(points)):\n if points[p1_idx][1] == points[p2_idx][1]: continue # same y\n total_len = check_mines(mines, points[p1_idx], points[p2_idx])\n max_total = max(max_total, total_len)\n\n print('{:.2f}'.format(round(max_total/3,2)))\n\n\nnum_tc = int(sys.stdin.readline())\nfor _ in range(num_tc):\n num_mine = int(sys.stdin.readline())\n mines = list()\n for m in range(num_mine):\n x1, x2, y = map(int, sys.stdin.readline().split())\n mines.append((x1, x2, y))\n\n solve(mines)\n", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ] ]
d0a3a9049fa62ceaeb83ea8e0552e989839112cb
94,795
ipynb
Jupyter Notebook
examples/iris/custom-statistic.ipynb
RETURN-project/eucp
926a53271d25910b4e3f85db3c152b6f8c04b08b
[ "Apache-2.0" ]
null
null
null
examples/iris/custom-statistic.ipynb
RETURN-project/eucp
926a53271d25910b4e3f85db3c152b6f8c04b08b
[ "Apache-2.0" ]
1
2020-08-12T10:09:14.000Z
2020-08-12T14:25:53.000Z
examples/iris/custom-statistic.ipynb
RETURN-project/eucp
926a53271d25910b4e3f85db3c152b6f8c04b08b
[ "Apache-2.0" ]
2
2019-03-06T15:41:12.000Z
2019-10-09T20:06:15.000Z
179.196597
75,936
0.872968
[ [ [ "# Calculating a custom statistic", "_____no_output_____" ], [ "This example shows how to define and use a custom `iris.analysis.Aggregator`, that provides a new statistical operator for\nuse with cube aggregation functions such as `~iris.cube.Cube.collapsed`, `~iris.cube.Cube.aggregated_by` or `~iris.cube.Cube.rolling_window`.\n\nIn this case, we have a 240-year sequence of yearly average surface temperature over North America, and we want to calculate in how many years these exceed a certain temperature over a spell of 5 years or more.", "_____no_output_____" ], [ "Define a function to perform the custom statistical operation.\n\nNote: in order to meet the requirements of `iris.analysis.Aggregator`, it must do the calculation over an arbitrary (given) data axis.", "_____no_output_____" ], [ "A function defined in a notebook will have to be defined in a single cell. Splitting it across multiple cells makes it separate code blocks, not one function. \nNotebooks are thus not designed to write a lot of functions. If you do happen to need to do this, consider creating a separate Python module and importing the functions from there.", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom iris.util import rolling_window\ndef count_spells(data, threshold, axis, spell_length):\n \"\"\"\n Function to calculate the number of points in a sequence where the value\n has exceeded a threshold value for at least a certain number of timepoints.\n\n Generalised to operate on multiple time sequences arranged on a specific\n axis of a multidimensional array.\n\n Args:\n\n * data (array):\n raw data to be compared with value threshold.\n\n * threshold (float):\n threshold point for 'significant' datapoints.\n\n * axis (int):\n number of the array dimension mapping the time sequences.\n (Can also be negative, e.g. '-1' means last dimension)\n\n * spell_length (int):\n number of consecutive times at which value > threshold to \"count\".\n\n \"\"\"\n if axis < 0:\n # just cope with negative axis numbers\n axis += data.ndim\n \n\n # Threshold the data to find the 'significant' points.\n data_hits = data > threshold\n # Make an array with data values \"windowed\" along the time axis.\n hit_windows = rolling_window(data_hits, window=spell_length, axis=axis)\n # Find the windows \"full of True-s\" (along the added 'window axis').\n full_windows = np.all(hit_windows, axis=axis+1)\n # Count points fulfilling the condition (along the time axis).\n spell_point_counts = np.sum(full_windows, axis=axis, dtype=int)\n return spell_point_counts", "_____no_output_____" ] ], [ [ "Load the whole time-sequence as a single cube.", "_____no_output_____" ] ], [ [ "import iris\nfile_path = iris.sample_data_path('E1_north_america.nc')\ncube = iris.load_cube(file_path)\ncube", "_____no_output_____" ] ], [ [ "Make an aggregator from the user function.", "_____no_output_____" ] ], [ [ "from iris.analysis import Aggregator\nSPELL_COUNT = Aggregator('spell_count',\n count_spells,\n units_func=lambda units: 1)", "_____no_output_____" ] ], [ [ "Define the parameters of the test.", "_____no_output_____" ] ], [ [ "threshold_temperature = 280.0\nspell_years = 5", "_____no_output_____" ] ], [ [ "Calculate the statistic.", "_____no_output_____" ] ], [ [ "warm_periods = cube.collapsed('time', SPELL_COUNT,\n threshold=threshold_temperature,\n spell_length=spell_years)\nwarm_periods.rename('Number of 5-year warm spells in 240 years')\nwarm_periods", "_____no_output_____" ] ], [ [ "Plot the results.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nimport iris.quickplot as qplt\nimport matplotlib.pyplot as plt\nfrom matplotlib import rcParams\n\nrcParams['figure.figsize'] = [12, 8]\n\nqplt.contourf(warm_periods, cmap='RdYlBu_r')\nplt.gca().coastlines();", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
d0a3bb292bdb4b292650b4b94185e69ea2f78082
384,318
ipynb
Jupyter Notebook
plot_maps.ipynb
KatharinaGruber/windpower_GWA
6d4eddc48f37cb66ac33ebab431b9a223366d4e1
[ "MIT" ]
4
2020-03-30T13:17:11.000Z
2021-02-21T12:57:48.000Z
plot_maps.ipynb
KatharinaGruber/windpower_GWA
6d4eddc48f37cb66ac33ebab431b9a223366d4e1
[ "MIT" ]
null
null
null
plot_maps.ipynb
KatharinaGruber/windpower_GWA
6d4eddc48f37cb66ac33ebab431b9a223366d4e1
[ "MIT" ]
1
2020-11-19T23:50:21.000Z
2020-11-19T23:50:21.000Z
861.699552
114,788
0.954041
[ [ [ "import pandas as pd\nimport geopandas\nimport glob\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport seaborn\nimport shapefile as shp\nfrom paths import *\nfrom refuelplot import *\nsetup()", "_____no_output_____" ], [ "wpNZ = pd.read_csv(data_path + \"/NZ/windparks_NZ.csv\", delimiter=';')\nwpBRA = pd.read_csv(data_path + '/BRA/turbine_data.csv',index_col=0)\nwpUSA = pd.read_csv(data_path + '/USA/uswtdb_v2_3_20200109.csv')\n# remove Guam\nwpUSA = wpUSA[wpUSA.t_state!='GU']\nwpZAF = pd.read_csv(data_path + '/ZAF/windparks_ZAF.csv')", "_____no_output_____" ], [ "shpBRA = geopandas.read_file(data_path + '/country_shapefiles/BRA/BRA_adm1.shp')\nshpNZ = geopandas.read_file(data_path + '/country_shapefiles/NZ/CON2017_HD_Clipped.shp')\nshpUSA = geopandas.read_file(data_path + '/country_shapefiles/USA/cb_2018_us_state_500k.shp')\nshpZAF = geopandas.read_file(data_path + '/country_shapefiles/ZAF/zaf_admbnda_adm1_2016SADB_OCHA.shp')", "_____no_output_____" ] ], [ [ "plot windparks: either all with opacity or aggregate to windparks and maybe use size as capacity indicator?", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(figsize = (9,7))\nax.set_xlim(-180,-65)\nax.set_ylim(20,75)\nshpUSA.plot(color=COLORS[4],ax=ax)\nplt.plot(wpUSA.xlong,wpUSA.ylat,'o',alpha=0.1,markersize=2)", "_____no_output_____" ], [ "import xarray as xr\nfrom matplotlib.patches import Rectangle", "_____no_output_____" ], [ "NZera5 = xr.open_dataset(era_path + '/NZ/era5_wind_NZ_198701.nc')", "_____no_output_____" ], [ "NZmerra2 = xr.open_dataset(mer_path + '/NZ/merra2_wind_NZ_198701.nc')", "_____no_output_____" ], [ "def cell_coords(lon,lat):\n diflat = NZera5.latitude.values - lat\n diflon = NZera5.longitude.values - lon\n clat = NZera5.latitude.values[abs(diflat)==min(abs(diflat))][0]\n clon = NZera5.longitude.values[abs(diflon)==min(abs(diflon))][0]\n return((clon-0.125,clat-0.125))", "_____no_output_____" ], [ "def cell_coords_mer(lon,lat):\n diflat = NZmerra2.lat.values - lat\n diflon = NZmerra2.lon.values - lon\n clat = NZmerra2.lat.values[abs(diflat)==min(abs(diflat))][0]\n clon = NZmerra2.lon.values[abs(diflon)==min(abs(diflon))][0]\n return((clon-0.3125,clat-0.25))", "_____no_output_____" ], [ "shpNZ.to_crs({'init': 'epsg:4326'}).plot(color=COLORS[3]).set_xlim(165,180)\nplt.plot(wpNZ.Longitude,wpNZ.Latitude,'o',markersize=4)\n\nax = plt.gca()\nrect = matplotlib.patches.Rectangle(xy=cell_coords(wpNZ.Longitude[0],wpNZ.Latitude[0]),width= 0.25,height=0.25,alpha=0.7,color=COLORS[1])\nax.add_patch(rect)", "_____no_output_____" ], [ "shpNZ.to_crs({'init': 'epsg:4326'}).plot(color=COLORS[3],alpha=0.5).set_xlim(165,180)\nplt.plot(wpNZ.Longitude,wpNZ.Latitude,'o',markersize=4)\n\nax = plt.gca()\nfor i in range(len(wpNZ)):\n rect = matplotlib.patches.Rectangle(xy=cell_coords_mer(wpNZ.Longitude[i],wpNZ.Latitude[i]),width= 0.625,height=0.5,alpha=0.7,color=COLORS[1])\n ax.add_patch(rect)\nplt.savefig(results_path + '/plots/syssize_NZ.png')", "_____no_output_____" ], [ "shpNZ.to_crs({'init': 'epsg:4326'}).plot(color=COLORS[4]).set_xlim(165,180)\nplt.plot(wpNZ.Longitude,wpNZ.Latitude,'o',markersize=4)", "_____no_output_____" ], [ "shpBRA.plot(color=COLORS[4])\nplt.plot(wpBRA.lon,wpBRA.lat,'o',alpha=0.1,markersize=2)", "_____no_output_____" ], [ "shpZAF.plot(color=COLORS[4])\nplt.plot(wpZAF.Longitude,wpZAF.Latitude,'o',markersize=4)", "_____no_output_____" ], [ "fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2,figsize=(10,10),gridspec_kw = {'wspace':0.15, 'hspace':0.15})\n\nshpBRA.plot(color=COLORS[4],ax=ax1)\nax1.set_xlim(-75,-30)\nax1.set_ylim(-35,10)\nax1.plot(wpBRA.groupby('name').mean().lon,\n wpBRA.groupby('name').mean().lat,'o',alpha=0.1,markersize=2)\nax1.set_title('Brazil')\n\nshpNZ.to_crs({'init': 'epsg:4326'}).plot(color=COLORS[4],ax=ax2).set_xlim(165,180)\nax2.set_xlim(165,179)\nax2.set_ylim(-48,-34)\nax2.plot(wpNZ.Longitude,wpNZ.Latitude,'o',markersize=2)\nax2.set_title('New Zealand')\n\n#ax3.set_xlim(-180,-65)\n#ax3.set_ylim(-20,95)\nax3.set_xlim(-125,-65)\nax3.set_ylim(5,65)\nshpUSA.plot(color=COLORS[4],ax=ax3)\n#ax3.plot(wpUSA.xlong,wpUSA.ylat,'o',alpha=0.1,markersize=2)\nax3.plot(wpUSA.groupby('p_name').mean().xlong,\n wpUSA.groupby('p_name').mean().ylat,'o',alpha=0.1,markersize=2)\nax3.set_title('USA')\n\nshpZAF.plot(color=COLORS[4],ax=ax4)\nax4.set_xlim(16,33)\nax4.set_ylim(-37,-20)\nax4.plot(wpZAF.Longitude,wpZAF.Latitude,'o',markersize=2)\nax4.set_title('South Africa')\nplt.savefig(results_path + '/map_windparks.png')", "_____no_output_____" ], [ "fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2,figsize=(10,10),gridspec_kw = {'wspace':0.1, 'hspace':0.1})\n\nshpBRA.plot(color=COLORS[4],ax=ax1)\n#ax1.plot(wpBRA.lon,wpBRA.lat,'o',alpha=0.1,markersize=2)\nax1.plot(wpBRA.groupby('name').mean().lon,\n wpBRA.groupby('name').mean().lat,'o',alpha=0.1,markersize=2)\nax1.set_title('Brazil')\n\nshpNZ.to_crs({'init': 'epsg:4326'}).plot(color=COLORS[4],ax=ax2).set_xlim(165,180)\nax2.plot(wpNZ.Longitude,wpNZ.Latitude,'o',markersize=2)\nax2.set_title('New Zealand')\n\nax3.set_xlim(-180,-65)\nax3.set_ylim(20,75)\n#ax3.set_ylim(0,87)\nshpUSA.plot(color=COLORS[4],ax=ax3)\n#ax3.plot(wpUSA.xlong,wpUSA.ylat,'o',alpha=0.1,markersize=2)\nax3.plot(wpUSA.groupby('p_name').mean().xlong,\n wpUSA.groupby('p_name').mean().ylat,'o',alpha=0.1,markersize=2)\nax3.set_title('USA')\n\nshpZAF.plot(color=COLORS[4],ax=ax4)\nax4.plot(wpZAF.Longitude,wpZAF.Latitude,'o',markersize=2)\nax4.set_title('South Africa')\nplt.savefig(results_path + '/map_windparks.png')", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a3c8a1b346bf488b59c07c296c47f7936933c8
34,967
ipynb
Jupyter Notebook
notebooks/atlas-demo-coffea.ipynb
oshadura/opendata-higgs-discovery
47b4ac787a69b9580fc1f72948d3af0bdc20759b
[ "MIT" ]
null
null
null
notebooks/atlas-demo-coffea.ipynb
oshadura/opendata-higgs-discovery
47b4ac787a69b9580fc1f72948d3af0bdc20759b
[ "MIT" ]
null
null
null
notebooks/atlas-demo-coffea.ipynb
oshadura/opendata-higgs-discovery
47b4ac787a69b9580fc1f72948d3af0bdc20759b
[ "MIT" ]
null
null
null
117.734007
26,296
0.860983
[ [ [ "# Test Coffea\n\nThis will test Coffea to see if we can figure out how to use it with our code.\n\nFirst are the includes from coffea. This is based on the [example written by Ben](https://github.com/CoffeaTeam/coffea/blob/master/binder/servicex/ATLAS/LocalExample.ipynb).", "_____no_output_____" ] ], [ [ "from servicex import ServiceXDataset\r\nfrom coffea.processor.servicex import DataSource, Analysis\r\nfrom coffea.processor.servicex import LocalExecutor \r\n\r\nimport matplotlib.pyplot as plt\r\n\r\nfrom coffea import hist, processor\r\nfrom IPython.display import display, update_display, HTML", "_____no_output_____" ] ], [ [ "And imports connected with running servicex.", "_____no_output_____" ] ], [ [ "from func_adl import ObjectStream\r\nfrom func_adl_servicex import ServiceXSourceUpROOT\r\nfrom hist import Hist\r\nimport mplhep as mpl\r\nimport awkward as ak\r\n\r\nfrom utils import files", "_____no_output_____" ] ], [ [ "Methods copied to help us get all leptons from the source files", "_____no_output_____" ] ], [ [ "def apply_event_cuts (source: ObjectStream) -> ObjectStream:\r\n '''Event level cuts for the analysis. Keep from sending data that we aren't going to need at all in the end.\r\n '''\r\n return (source\r\n .Where(lambda e: e.trigE or e.trigM))\r\ndef good_leptons(source: ObjectStream) -> ObjectStream:\r\n '''Select out all good leptons from each event. Return their pt, eta, phi, and E, and other\r\n things needed downstream.\r\n\r\n Because uproot doesn't tie toegher the objects, we can't do any cuts at this point.\r\n '''\r\n return source.Select(lambda e:\r\n {\r\n 'lep_pt': e.lep_pt,\r\n 'lep_eta': e.lep_eta,\r\n 'lep_phi': e.lep_phi,\r\n 'lep_energy': e.lep_E,\r\n 'lep_charge': e.lep_charge,\r\n 'lep_ptcone30': e.lep_ptcone30,\r\n 'lep_etcone20': e.lep_etcone20,\r\n 'lep_type': e.lep_type,\r\n 'lep_trackd0pvunbiased': e.lep_trackd0pvunbiased,\r\n 'lep_tracksigd0pvunbiased': e.lep_tracksigd0pvunbiased,\r\n 'lep_z0': e.lep_z0,\r\n })", "_____no_output_____" ] ], [ [ "Create the `func_adl` cuts to get the data. The dataset we use here doesn't matter, as long as it \"looks\" like all the datasets we are going to be processing.", "_____no_output_____" ] ], [ [ "ds = ServiceXSourceUpROOT('cernopendata://dummy', files['ggH125_ZZ4lep']['treename'], backend_name='open_uproot')\nds.return_qastle = True\nleptons = good_leptons(apply_event_cuts(ds))", "_____no_output_____" ] ], [ [ "The analysis code that will apply the 4 lepton cuts and make the 4 lepton mass plot.", "_____no_output_____" ] ], [ [ "class ATLAS_Higgs_4L(Analysis):\r\n @staticmethod\r\n def process(events):\r\n import awkward as ak\r\n from collections import defaultdict\r\n\r\n sumw = defaultdict(float)\r\n mass_hist = hist.Hist(\r\n \"Events\",\r\n hist.Cat(\"dataset\", \"Dataset\"),\r\n hist.Bin(\"mass\", \"$Z_{ee}$ [GeV]\", 60, 60, 120),\r\n )\r\n\r\n dataset = events.metadata['dataset']\r\n leptons = events.lep\r\n\r\n # We need to look at 4 lepton events only.\r\n cut = (ak.num(leptons) == 4)\r\n\r\n # Form the invar mass, plot.\r\n # diele = electrons[cut][:, 0] + electrons[cut][:, 1]\r\n # diele.mass\r\n dilepton = leptons[:,0] + leptons[:,1]\r\n mass_4l = leptons.mass\r\n\r\n # Fill the histogram\r\n sumw[dataset] += len(events)\r\n print(len(events))\r\n mass_hist.fill(\r\n dataset=dataset,\r\n mass=ak.flatten(mass_4l),\r\n )\r\n \r\n return {\r\n \"sumw\": sumw,\r\n \"mass\": mass_hist\r\n }", "_____no_output_____" ] ], [ [ "Create the data source that we will be running against.", "_____no_output_____" ] ], [ [ "def make_ds(name: str, query: ObjectStream):\n '''Create a ServiceX Datasource for a particular ATLAS Open data file\n '''\n datasets = [ServiceXDataset(files[name]['files'], backend_name='open_uproot')]\n return DataSource(query=query, metadata={'dataset': name}, datasets=datasets)", "_____no_output_____" ] ], [ [ "And run!", "_____no_output_____" ] ], [ [ "analysis = ATLAS_Higgs_4L()\n# TODO: It would be good if datatype was determined automagically (there is enough info)\nexecutor = LocalExecutor()\n#executor = DaskExecutor(client_addr=\"tls://localhost:8786\")\ndatasource = make_ds('ggH125_ZZ4lep', leptons)\n\nasync def run_updates_stream(accumulator_stream):\n global first\n\n count = 0\n async for coffea_info in accumulator_stream:\n count += 1\n print(count, coffea_info)\n return coffea_info\n\n# Why do I need run_updates_stream, why not just await on execute (which fails with async gen can't).\n# Perhaps something from aiostream can help here?\nresult = await run_updates_stream(executor.execute(analysis, datasource))", "164716\n1 {'sumw': defaultdict(<class 'float'>, {'[root://eospublic.cern.ch//eos/opendata/atlas/OutreachDatasets/2020-01-22/4lep/MC/mc_345060.ggH125_ZZ4lep.4lep.root]': 164716.0}), 'mass': <Hist (dataset,mass) instance at 0x7f45db420af0>}\n" ], [ "hist.plot1d(result['mass'])", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a3ca78593654ba3fcd2bf8be4b3adb213eeb7b
6,755
ipynb
Jupyter Notebook
examples/tutorial.ipynb
TomohikoNakamura/dwtls
a90895ac331080e757afa2193ae23db699c55905
[ "MIT" ]
5
2021-07-16T13:41:00.000Z
2021-08-04T07:51:39.000Z
examples/tutorial.ipynb
TomohikoNakamura/dwtls
a90895ac331080e757afa2193ae23db699c55905
[ "MIT" ]
null
null
null
examples/tutorial.ipynb
TomohikoNakamura/dwtls
a90895ac331080e757afa2193ae23db699c55905
[ "MIT" ]
null
null
null
31.565421
388
0.586825
[ [ [ "# dwtls: Discrete Wavelet Transform LayerS\nThis library provides downsampling (DS) layers using discrete wavelet transforms (DWTs), which we call DWT layers.\nConventional DS layers lack either antialiasing filters and the perfect reconstruction property, so downsampled features are aliased and entire information of input features are not preserved.\nBy contrast, DWT layers have antialiasing filters and the perfect reconstruction property, which enables us to overcome the two problems.\n\nIn this library, the DWT layer and its extensions are implemented as below:\n- DWT layers with fixed wavelets (Haar, CDF22, CDF26, CDF15, and DD4 wavelets)\n- Trainable DWT (TDWT) layers\n- Weight-normalized trainable DWT (WN-TDWT) layers", "_____no_output_____" ], [ "## Install dwtls", "_____no_output_____" ] ], [ [ "!pip install dwtls", "_____no_output_____" ], [ "import torch\nimport dwtls.dwt_layers", "_____no_output_____" ] ], [ [ "## DWT layers with fixed wavelets\nThe DWT layer (including its extensions) is implemeted as a subclass of `torch.nn.Module` provided by PyTorch, so we can easily use it in PyTorch-based scripts. Also, this layer is differentiable.", "_____no_output_____" ] ], [ [ "dwt_layer = dwtls.dwt_layers.DWT(wavelet=\"haar\")", "_____no_output_____" ], [ "feature = torch.normal(0.0, 1.0, size=(1,1,20)).float()\noutput_feature = dwt_layer(feature)\nprint('Input:', feature)\nprint(\"Output:\", output_feature)", "_____no_output_____" ] ], [ [ "## TDWT layer\nThe TDWT layer has trainable wavelets (precisely, predict and update filters of lifting scheme).\n\nFor example, we can define the TDWT layer having a pair of the prediction and update filters initialized with Haar wavelet.", "_____no_output_____" ] ], [ [ "tdwt_layer = dwtls.dwt_layers.MultiStageLWT([\n dict(predict_ksize=3, update_ksize=3, \n requires_grad={\"predict\": True, \"update\": True}, \n initial_values={\"predict\": [0,1,0], \"update\": [0,0.5,0]})\n])", "_____no_output_____" ] ], [ [ "The `tdwt_layer._predict_weight` and `tdwt_layer._update_weight` of this layer are trainable jointly with other DNN components.", "_____no_output_____" ], [ "We show three structures of the trainable DWT layers used in our music source separation paper [1]. \n\n[1] Tomohiko Nakamura, Shihori Kozuka, and Hiroshi Saruwatari, “Time-Domain Audio Source Separation with Neural Networks Based on Multiresolution Analysis,” IEEE/ACM Transactions on Audio, Speech, and Language Processing, vol. 29, pp. 1687–1701, Apr. 2021. [pdf](https://doi.org/10.1109/TASLP.2021.3072496), [demo](https://tomohikonakamura.github.io/Tomohiko-Nakamura/demo/MRDLA/)\n", "_____no_output_____" ] ], [ [ "# Type A\ntdwt_layer = dwtls.dwt_layers.MultiStageLWT([\n dict(predict_ksize=3, update_ksize=3, \n requires_grad={\"predict\": True, \"update\": True}, \n initial_values={\"predict\": [0,1,0], \"update\": [0,0.5,0]})\n])", "_____no_output_____" ], [ "# Type B\ntdwt_layer = dwtls.dwt_layers.MultiStageLWT([\n dict(predict_ksize=1, update_ksize=1, \n requires_grad={\"predict\": False, \"update\": False}, \n initial_values={\"predict\": [1], \"update\": [0.5]}),\n dict(predict_ksize=3, update_ksize=3, \n requires_grad={\"predict\": True, \"update\": True}, \n initial_values={\"predict\": [0,0,0], \"update\": [0,0,0]})\n])", "_____no_output_____" ], [ "# Type C\ntdwt_layer = dwtls.dwt_layers.MultiStageLWT([\n dict(predict_ksize=3, update_ksize=3, \n requires_grad={\"predict\": True, \"update\": True}, \n initial_values={\"predict\": [0,1,0], \"update\": [0,0.5,0]}),\n dict(predict_ksize=3, update_ksize=3, \n requires_grad={\"predict\": True, \"update\": True}, \n initial_values={\"predict\": [0,0,0], \"update\": [0,0,0]})\n])", "_____no_output_____" ] ], [ [ "## WN-TDWT layer\nThe TDWT layer can be incorporated into many types of DNNs, but such straightforward extension does not guarantee that it has anti-aliasing filters, while it has the perfect reconstruction property owing to the lifting scheme.\n\nThe WN-TDWT layer is developed to overcome this problem. It has both properties owing to adequate normalization of the prediction and update filter coefficients.", "_____no_output_____" ] ], [ [ "# Type A\ntdwt_layer = dwtls.dwt_layers.WeightNormalizedMultiStageLWT([\n dict(predict_ksize=3, update_ksize=3, \n requires_grad={\"predict\": True, \"update\": True}, \n initial_values={\"predict\": [0,1,0], \"update\": [0,0.5,0]})\n])", "_____no_output_____" ] ], [ [ "The WN-TDWT layer can be used in the same way as the TDWT layer.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a3d4fd1b62a64de052b6913e0b07e3fe368cc3
133,335
ipynb
Jupyter Notebook
visualisation.ipynb
eddyvdaker/Systematic-Mapping-Review-Dataecosystems
06bca21291248ef962be2567b6b722fc076aaa3a
[ "MIT" ]
null
null
null
visualisation.ipynb
eddyvdaker/Systematic-Mapping-Review-Dataecosystems
06bca21291248ef962be2567b6b722fc076aaa3a
[ "MIT" ]
null
null
null
visualisation.ipynb
eddyvdaker/Systematic-Mapping-Review-Dataecosystems
06bca21291248ef962be2567b6b722fc076aaa3a
[ "MIT" ]
null
null
null
427.355769
29,018
0.929141
[ [ [ "# Imports\nimport matplotlib.pyplot as plt\nimport json", "_____no_output_____" ], [ "# Load data from result files\nresults_file = './results/results_5.json'\nsummary_file = './results/summary.json'\n\nresults = json.load(open(results_file))['results']\nsummary = json.load(open(summary_file))\n\n\ndef autolabel(rects, label_pos=0):\n \"\"\"\n Generate labels to show values on top of bar charts\n \n :param rects: <pyplot.object> The current pyplot figure\n :param label_pos: <float> OR <int> The amount of offset compared to the height of the bar\n \"\"\"\n for rect in rects:\n height = rect.get_height()\n plt.text(rect.get_x() + rect.get_width()/2., height + label_pos, f'{int(height)}', ha='center', va='bottom')", "_____no_output_____" ], [ "# Visualization for number of articles per category\ncategories = summary['occurrences per category']\ntotal_results = sum(categories.values())\n\ncategories.pop('Generic', None)\ncategories.pop('Not about data ecosystems', None)\ncategories.pop('Systematic Review', None)\n\ntotal_categorized_results = sum(categories.values())\ntotal_uncategorized_results = total_results - total_categorized_results\n\nprint(f'total: {total_results}\\n' \\\n f'catogorized: {total_categorized_results}\\n' \\\n f'uncategorized: {total_uncategorized_results}\\n')\n\nlabels = list(categories.keys())\nvalues = list(categories.values())\n\n# Pie chart\nplt.pie(values, labels=labels, autopct='%1.1f%%', startangle=230)\nplt.axis('equal')\nplt.show()\n\n# Same data in bar chart form\nfig = plt.bar(range(len(categories)), values, align='center')\nautolabel(fig, -0.8)\nplt.xticks(range(len(categories)), labels, rotation=45, ha='right')\nplt.xlabel('Fields')\nplt.ylabel('Studies published')\nplt.show()\n\n# Same charts but this time with the science fields combined\ncategories_combined = categories\ncategories_combined['Science'] += categories_combined.pop('Biology (science)')\ncategories_combined['Science'] += categories_combined.pop('Neuroscience')\n\nlabels_combined = list(categories_combined.keys())\nvalues_combined = list(categories_combined.values())\n\nplt.pie(values_combined, labels=labels_combined, autopct='%1.1f%%', startangle=90)\nplt.axis('equal')\nplt.show()\n\n# Bar chart with science fields combined\nfig = plt.bar(range(len(categories_combined)), values_combined, align='center')\nautolabel(fig, -0.8)\nplt.xticks(range(len(categories_combined)), labels_combined, rotation=45, ha='right')\nplt.xlabel('Fields')\nplt.ylabel('Studies published')\nplt.show()", "total: 68\ncatogorized: 29\nuncategorized: 39\n\n" ], [ "# Visualization of the number of articles published per year\npublish_years = {}\nfor result in results:\n year = result['publish_date'][0:4]\n if year in publish_years.keys():\n publish_years[year] += 1\n else:\n publish_years.update({year: 1})\n \nkey_list = sorted(list(publish_years.keys()))\nvalue_list = [publish_years[x] for x in key_list]\n\n# It shows a drop in 2018 because the year has just started, this gives\n# a wrong idea of the number of studies about the subject\nfig = plt.bar(range(len(value_list)), value_list, align='center')\nautolabel(fig, -1.5)\nplt.xticks(range(len(key_list)), key_list, rotation=45, ha='right')\nplt.xlabel('Publish year')\nplt.ylabel('Studies published')\nplt.show()\n\n# Plot with 2018 removed from the results\nkey_list = key_list[:-1]\nvalue_list = value_list[:-1]\n\nfig = plt.bar(range(len(value_list)), value_list, align='center')\nautolabel(fig, -1.5)\nplt.xticks(range(len(key_list)), key_list, rotation=45, ha='right')\nplt.xlabel('Publish year')\nplt.ylabel('Studies published')\nplt.show()", "_____no_output_____" ], [ "# Show the occurrences of each of the search terms\nsearch_terms = summary['search terms']\n\nlabels = list(search_terms.keys())\nvalues = list(search_terms.values())\n\nfig = plt.bar(range(len(values)), values, align='center')\nautolabel(fig, -3)\nplt.xticks(range(len(labels)), labels, rotation=45, ha='right')\nplt.show()", "_____no_output_____" ], [ "# Check qualitycriteria\nin_title = []\nin_abstract = []\nterm = 'data ecosystem'\n\nfor result in results:\n if term in result['title'].lower():\n in_title.append(result['id'])\n if term in result['abstract'].lower():\n in_abstract.append(result['id'])\n \nprint(f'Results with {term} in title: {in_title}')\nprint(f'Results with {term} in abstract: {in_abstract}')\n\nin_both = [x for x in in_title if x in in_abstract]\nprint(f'\\nResults with {term} in both title and abstract: {in_both}')\n\nin_single = [x for x in in_abstract]\nfor result in in_title:\n in_single.append(result)\n \nin_single = sorted([x for x in in_single if x not in in_both])\nprint(f'\\nResults with {term} only in either title or abstract: {in_single}')", "Results with data ecosystem in title: [0, 1, 2, 3, 4, 6, 7, 9, 10, 12, 13, 14, 15, 16, 17, 18, 19, 23, 25, 31, 50, 54, 56, 58]\nResults with data ecosystem in abstract: [0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 51, 52, 53, 54, 55, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67]\n\nResults with data ecosystem in both title and abstract: [0, 1, 3, 6, 7, 9, 10, 12, 13, 14, 16, 18, 23, 25, 54]\n\nResults with data ecosystem only in either title or abstract: [2, 4, 5, 8, 11, 15, 17, 19, 20, 21, 22, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67]\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
d0a3da5db9e784d1d2a45e8602b43aed8f4361f0
30,009
ipynb
Jupyter Notebook
notebooks/sprinkler_pgm.ipynb
animesh-007/pyprobml
be2345bbadd6cc6f0d1d3158bdd77d199b4969d2
[ "MIT" ]
2
2021-04-10T18:12:19.000Z
2021-05-11T12:07:40.000Z
notebooks/sprinkler_pgm.ipynb
israrbacha/pyprobml
ddeee70e5aaf04bfa685f8e9492770649c7a8360
[ "MIT" ]
1
2021-04-22T15:46:27.000Z
2021-04-22T15:46:27.000Z
notebooks/sprinkler_pgm.ipynb
shivaditya-meduri/pyprobml
9dbe0c95f4ec061b98bf32fa3ac1deafe2e0c04d
[ "MIT" ]
1
2021-01-17T08:46:00.000Z
2021-01-17T08:46:00.000Z
41.391724
3,940
0.440901
[ [ [ "<a href=\"https://colab.research.google.com/github/probml/pyprobml/blob/master/notebooks/sprinkler_pgm.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# Directed graphical models\n\nWe illustrate some basic properties of DGMs.", "_____no_output_____" ] ], [ [ "!pip install causalgraphicalmodels\n!pip install pgmpy", "Requirement already satisfied: causalgraphicalmodels in /usr/local/lib/python3.7/dist-packages (0.0.4)\nRequirement already satisfied: graphviz in /usr/local/lib/python3.7/dist-packages (from causalgraphicalmodels) (0.10.1)\nRequirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from causalgraphicalmodels) (1.1.5)\nRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from causalgraphicalmodels) (1.19.5)\nRequirement already satisfied: networkx in /usr/local/lib/python3.7/dist-packages (from causalgraphicalmodels) (2.5.1)\nRequirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->causalgraphicalmodels) (2018.9)\nRequirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas->causalgraphicalmodels) (2.8.1)\nRequirement already satisfied: decorator<5,>=4.3 in /usr/local/lib/python3.7/dist-packages (from networkx->causalgraphicalmodels) (4.4.2)\nRequirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.7.3->pandas->causalgraphicalmodels) (1.15.0)\nCollecting pgmpy\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/a3/0e/d9fadbfaa35e010c04d43acd3ae9fbefec98897dd7d61a6b7eb5a8b34072/pgmpy-0.1.14-py3-none-any.whl (331kB)\n\u001b[K |████████████████████████████████| 337kB 8.0MB/s \n\u001b[?25hRequirement already satisfied: networkx in /usr/local/lib/python3.7/dist-packages (from pgmpy) (2.5.1)\nRequirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (from pgmpy) (1.8.1+cu101)\nRequirement already satisfied: statsmodels in /usr/local/lib/python3.7/dist-packages (from pgmpy) (0.10.2)\nRequirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from pgmpy) (1.1.5)\nRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from pgmpy) (1.19.5)\nRequirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from pgmpy) (1.0.1)\nRequirement already satisfied: scikit-learn in /usr/local/lib/python3.7/dist-packages (from pgmpy) (0.22.2.post1)\nRequirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from pgmpy) (4.41.1)\nRequirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from pgmpy) (1.4.1)\nRequirement already satisfied: pyparsing in /usr/local/lib/python3.7/dist-packages (from pgmpy) (2.4.7)\nRequirement already satisfied: decorator<5,>=4.3 in /usr/local/lib/python3.7/dist-packages (from networkx->pgmpy) (4.4.2)\nRequirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch->pgmpy) (3.7.4.3)\nRequirement already satisfied: patsy>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from statsmodels->pgmpy) (0.5.1)\nRequirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->pgmpy) (2018.9)\nRequirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas->pgmpy) (2.8.1)\nRequirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from patsy>=0.4.0->statsmodels->pgmpy) (1.15.0)\nInstalling collected packages: pgmpy\nSuccessfully installed pgmpy-0.1.14\n" ], [ "from causalgraphicalmodels import CausalGraphicalModel\nimport pgmpy\nimport numpy as np\nimport pandas as pd", "_____no_output_____" ] ], [ [ "# Make the model", "_____no_output_____" ] ], [ [ "sprinkler = CausalGraphicalModel(\n nodes=[\"cloudy\", \"rain\", \"sprinkler\", \"wet\", \"slippery\"],\n edges=[\n (\"cloudy\", \"rain\"), \n (\"cloudy\", \"sprinkler\"), \n (\"rain\", \"wet\"),\n (\"sprinkler\", \"wet\"), \n (\"wet\", \"slippery\")\n ]\n)\n\n", "_____no_output_____" ] ], [ [ "# Draw the model", "_____no_output_____" ] ], [ [ "# draw return a graphviz `dot` object, which jupyter can render\nout = sprinkler.draw()", "_____no_output_____" ], [ "type(out)\n", "_____no_output_____" ], [ "display(out)", "_____no_output_____" ], [ "out.render() ", "_____no_output_____" ] ], [ [ "# Display the factorization", "_____no_output_____" ] ], [ [ "print(sprinkler.get_distribution())", "P(season)P(sprinkler|season)P(rain|season)P(wet|rain,sprinkler)P(slippery|wet)\n" ] ], [ [ "# D-separation", "_____no_output_____" ] ], [ [ "\n# check for d-seperation of two nodes\nsprinkler.is_d_separated(\"slippery\", \"cloudy\", {\"wet\"})", "_____no_output_____" ] ], [ [ "# Extract CI relationships", "_____no_output_____" ] ], [ [ "# get all the conditional independence relationships implied by a CGM\nCI = sprinkler.get_all_independence_relationships()\nprint(CI)", "[('rain', 'sprinkler', {'cloudy'}), ('rain', 'slippery', {'wet'}), ('rain', 'slippery', {'sprinkler', 'wet'}), ('rain', 'slippery', {'cloudy', 'wet'}), ('rain', 'slippery', {'sprinkler', 'cloudy', 'wet'}), ('sprinkler', 'slippery', {'wet'}), ('sprinkler', 'slippery', {'rain', 'wet'}), ('sprinkler', 'slippery', {'cloudy', 'wet'}), ('sprinkler', 'slippery', {'rain', 'cloudy', 'wet'}), ('cloudy', 'slippery', {'wet'}), ('cloudy', 'slippery', {'rain', 'sprinkler'}), ('cloudy', 'slippery', {'rain', 'wet'}), ('cloudy', 'slippery', {'sprinkler', 'wet'}), ('cloudy', 'slippery', {'rain', 'sprinkler', 'wet'}), ('cloudy', 'wet', {'rain', 'sprinkler'}), ('cloudy', 'wet', {'slippery', 'rain', 'sprinkler'})]\n" ], [ "records = []\nfor ci in CI:\n record = (ci[0], ci[1], ', '.join(x for x in ci[2]))\n records.append(record)\n\nprint(records)\ndf = pd.DataFrame(records, columns = ('X', 'Y', 'Z'))\ndisplay(df)", "[('rain', 'sprinkler', 'cloudy'), ('rain', 'slippery', 'wet'), ('rain', 'slippery', 'sprinkler, wet'), ('rain', 'slippery', 'cloudy, wet'), ('rain', 'slippery', 'sprinkler, cloudy, wet'), ('sprinkler', 'slippery', 'wet'), ('sprinkler', 'slippery', 'rain, wet'), ('sprinkler', 'slippery', 'cloudy, wet'), ('sprinkler', 'slippery', 'rain, cloudy, wet'), ('cloudy', 'slippery', 'wet'), ('cloudy', 'slippery', 'rain, sprinkler'), ('cloudy', 'slippery', 'rain, wet'), ('cloudy', 'slippery', 'sprinkler, wet'), ('cloudy', 'slippery', 'rain, sprinkler, wet'), ('cloudy', 'wet', 'rain, sprinkler'), ('cloudy', 'wet', 'slippery, rain, sprinkler')]\n" ], [ "print(df.to_latex(index=False))", "\\begin{tabular}{lll}\n\\toprule\n X & Y & Z \\\\\n\\midrule\n rain & sprinkler & cloudy \\\\\n rain & slippery & wet \\\\\n rain & slippery & sprinkler, wet \\\\\n rain & slippery & cloudy, wet \\\\\n rain & slippery & sprinkler, cloudy, wet \\\\\n sprinkler & slippery & wet \\\\\n sprinkler & slippery & rain, wet \\\\\n sprinkler & slippery & cloudy, wet \\\\\n sprinkler & slippery & rain, cloudy, wet \\\\\n cloudy & slippery & wet \\\\\n cloudy & slippery & rain, sprinkler \\\\\n cloudy & slippery & rain, wet \\\\\n cloudy & slippery & sprinkler, wet \\\\\n cloudy & slippery & rain, sprinkler, wet \\\\\n cloudy & wet & rain, sprinkler \\\\\n cloudy & wet & slippery, rain, sprinkler \\\\\n\\bottomrule\n\\end{tabular}\n\n" ] ], [ [ "# Inference", "_____no_output_____" ] ], [ [ "\n\nfrom pgmpy.models import BayesianModel\nfrom pgmpy.factors.discrete import TabularCPD\n\n# Defining the model structure. We can define the network by just passing a list of edges.\nmodel = BayesianModel([('C', 'S'), ('C', 'R'), ('S', 'W'), ('R', 'W'), ('W', 'L')])\n\n# Defining individual CPDs.\ncpd_c = TabularCPD(variable='C', variable_card=2, values=np.reshape([0.5, 0.5],(2,1)))\n\n# In pgmpy the columns are the evidences and rows are the states of the variable.\n \ncpd_s = TabularCPD(variable='S', variable_card=2, \n values=[[0.5, 0.9],\n [0.5, 0.1]],\n evidence=['C'],\n evidence_card=[2])\n\ncpd_r = TabularCPD(variable='R', variable_card=2, \n values=[[0.8, 0.2],\n [0.2, 0.8]],\n evidence=['C'],\n evidence_card=[2])\n\ncpd_w = TabularCPD(variable='W', variable_card=2, \n values=[[1.0, 0.1, 0.1, 0.01],\n [0.0, 0.9, 0.9, 0.99]],\n evidence=['S', 'R'],\n evidence_card=[2, 2])\n\ncpd_l = TabularCPD(variable='L', variable_card=2, \n values=[[0.9, 0.1],\n [0.1, 0.9]],\n evidence=['W'],\n evidence_card=[2])\n\n# Associating the CPDs with the network\nmodel.add_cpds(cpd_c, cpd_s, cpd_r, cpd_w, cpd_l)\n\n# check_model checks for the network structure and CPDs and verifies that the CPDs are correctly \n# defined and sum to 1.\nmodel.check_model()\n\n\n", "_____no_output_____" ], [ "from pgmpy.inference import VariableElimination\ninfer = VariableElimination(model)\n\n# p(R=1)= 0.5*0.2 + 0.5*0.8 = 0.5\nprobs = infer.query(['R']).values\nprint('\\np(R=1) = ', probs[1])\n\n# P(R=1|W=1) = 0.7079\nprobs = infer.query(['R'], evidence={'W': 1}).values\nprint('\\np(R=1|W=1) = ', probs[1])\n\n\n# P(R=1|W=1,S=1) = 0.3204\nprobs = infer.query(['R'], evidence={'W': 1, 'S': 1}).values\nprint('\\np(R=1|W=1,S=1) = ', probs[1])", "Finding Elimination Order: : 100%|██████████| 4/4 [00:00<00:00, 557.40it/s]\nEliminating: W: 100%|██████████| 4/4 [00:00<00:00, 341.96it/s]\nFinding Elimination Order: : 100%|██████████| 3/3 [00:00<00:00, 710.42it/s]\nEliminating: C: 100%|██████████| 3/3 [00:00<00:00, 435.59it/s]\nFinding Elimination Order: : 100%|██████████| 2/2 [00:00<00:00, 754.71it/s]\nEliminating: C: 100%|██████████| 2/2 [00:00<00:00, 517.24it/s]" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a3e66ef730bad1879bcc9a4564ef40afc97bfb
486,683
ipynb
Jupyter Notebook
dfir/threat-hunting-analysis-main/Detecting Nmap Behavior with Bro HTTP Logs.ipynb
C45E/h4cker
f7f9524190cfde74403ebcc70e766549929692b6
[ "MIT" ]
10,980
2018-10-06T12:02:56.000Z
2022-03-31T18:08:32.000Z
dfir/threat-hunting-analysis-main/Detecting Nmap Behavior with Bro HTTP Logs.ipynb
CasparsTools/h4cker
929f23cb959ff77ce508b31a56afa2c1d8e5c3a8
[ "MIT" ]
49
2018-10-05T03:08:06.000Z
2021-11-24T12:24:34.000Z
dfir/threat-hunting-analysis-main/Detecting Nmap Behavior with Bro HTTP Logs.ipynb
CasparsTools/h4cker
929f23cb959ff77ce508b31a56afa2c1d8e5c3a8
[ "MIT" ]
1,971
2018-10-08T00:36:31.000Z
2022-03-31T13:11:23.000Z
148.832722
79,210
0.732538
[ [ [ "import json\nfrom datetime import datetime, timedelta\nimport matplotlib.pylab as plot\nimport matplotlib.pyplot as plt\nfrom matplotlib import dates\nimport pandas as pd\nimport numpy as np\n\nimport matplotlib\nmatplotlib.style.use('ggplot')\n%matplotlib inline", "_____no_output_____" ], [ "# Read data from http bro logs\nwith open(\"http.log\",'r') as infile:\n file_data = infile.read()\n \n# Split file by newlines\nfile_data = file_data.split('\\n')\n\n# Remove comment lines\nhttp_data = []\nfor line in file_data:\n if line[0] is not None and line[0] != \"#\":\n http_data.append(line)", "_____no_output_____" ], [ "# Lets analyze user agents\nuser_agent_analysis = {}\nuser_agent_overall = {}\nfor line in http_data:\n # Extract the timestamp\n timestamp = datetime.fromtimestamp(float(line.split('\\t')[0]))\n # Strip second and microsecond from timestamp\n timestamp = str(timestamp.replace(second=0,microsecond=0))\n \n # Extract the user agent\n user_agent = line.split('\\t')[11]\n \n # Update status code analysis variable\n if user_agent not in user_agent_analysis.keys():\n user_agent_analysis[user_agent] = {timestamp: 1}\n else:\n if timestamp not in user_agent_analysis[user_agent].keys():\n user_agent_analysis[user_agent][timestamp] = 1\n else:\n user_agent_analysis[user_agent][timestamp] += 1\n \n # Update overall user agent count\n if user_agent not in user_agent_overall.keys():\n user_agent_overall[user_agent] = 1\n else:\n user_agent_overall[user_agent] += 1", "_____no_output_____" ], [ "df = pd.DataFrame.from_dict(user_agent_analysis,orient='columns').fillna(0)\ndf", "_____no_output_____" ], [ "#df.plot(figsize=(12,9))\n\nax = df.plot(rot=90,figsize=(12,9))", "_____no_output_____" ], [ "user_agent_analysis2 = user_agent_analysis\nprint(user_agent_analysis2.keys())\nhigh_volume_user_agents = [\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.64 Safari/537.36\"\n]\nfor ua in high_volume_user_agents: \n if ua in user_agent_analysis2.keys():\n del user_agent_analysis2[ua]\ndf2 = pd.DataFrame.from_dict(user_agent_analysis2,orient='columns').fillna(0)\ndf2", "['Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0', 'Mozilla/5.0 (compatible; Nmap Scripting Engine; http://nmap.org/book/nse.html)', '-', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.64 Safari/537.36', 'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:28.0) Gecko/20100101 Firefox/28.0', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:41.0) Gecko/20100101 Firefox/41.0', 'Wget/1.16.1 (linux-gnu)']\n" ], [ "df2.plot(rot=90,figsize=(12,9))", "_____no_output_____" ], [ "# Lets analyze status codes\nstatus_code_analysis = {}\nstatus_code_overall = {}\nearliest_time = None\nlatest_time = None\nfor line in http_data:\n # Extract the timestamp\n timestamp = datetime.fromtimestamp(float(line.split('\\t')[0]))\n # Strip minute, second and microsecond from timestamp\n #timestamp = str(timestamp.replace(minute=0,second=0,microsecond=0))\n timestamp = str(timestamp.replace(second=0,microsecond=0))\n \n # Extract the status code\n status_code = line.split('\\t')[14]\n \n # Update status code analysis variable\n if status_code not in status_code_analysis.keys():\n status_code_analysis[status_code] = {timestamp: 1}\n else:\n if timestamp not in status_code_analysis[status_code].keys():\n status_code_analysis[status_code][timestamp] = 1\n else:\n status_code_analysis[status_code][timestamp] += 1\n \n # Update overall status code count\n if status_code not in status_code_overall.keys():\n status_code_overall[status_code] = 1\n else:\n status_code_overall[status_code] += 1\n \n # Update our earliest and latest time as needed\n if earliest_time is None or timestamp < earliest_time:\n earliest_time = timestamp\n if latest_time is None or timestamp > latest_time:\n latest_time = timestamp", "_____no_output_____" ], [ "# Format data for the plot function\nstatus_label = []\ndata = []\nfor code in sorted(status_code_overall.keys()):\n status_label.append(str(code) + \" (\" + str(status_code_overall[code]) + \")\")\n data.append(status_code_overall[code])\n\nplot.figure(1,figsize=[8,8])\npatches, texts = plot.pie(data, shadow=True, startangle=90)\nplot.legend(patches, status_label,loc=\"best\")\nplot.title('Status Code Distribution')\nplot.axis('equal')\nplot.tight_layout()\nplot.show()", "_____no_output_____" ], [ "# Output the status codes in table form\ndf = pd.DataFrame.from_dict(status_code_analysis,orient='columns').fillna(0)\ndf", "_____no_output_____" ], [ "# Plot the status codes\ndf.plot(rot=90,figsize=(12,9))", "_____no_output_____" ], [ "# Remove the 200 status code and re-plot the status codes\nstatus_code_analysis2 = status_code_analysis\nif '200' in status_code_analysis2.keys():\n del status_code_analysis2['200']\nprint(status_code_analysis2.keys())\ndf2 = pd.DataFrame.from_dict(status_code_analysis2,orient='columns').fillna(0)\ndf2.plot(rot=90, figsize=(12,9))", "['304', '301', '302', '-', '405', '404', '401', '400', '501']\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a3e87259fe031de56526ae155a66c0cfc3c287
67,967
ipynb
Jupyter Notebook
DOST_Huffman/DCT_DOST_Huffman_ACC.ipynb
BaiyingLu/Data-Compression-Toolbox
d4fdbb2e09742e01304dee8b0e0bf3afdfce8844
[ "Apache-2.0" ]
3
2020-10-16T00:24:25.000Z
2022-02-10T22:55:11.000Z
DOST_Huffman/DCT_DOST_Huffman_ACC.ipynb
BaiyingLu/Data-Compression-Toolbox
d4fdbb2e09742e01304dee8b0e0bf3afdfce8844
[ "Apache-2.0" ]
2
2021-05-27T00:09:15.000Z
2021-06-11T17:20:38.000Z
DOST_Huffman/DCT_DOST_Huffman_ACC.ipynb
BaiyingLu/Data-Compression-Toolbox
d4fdbb2e09742e01304dee8b0e0bf3afdfce8844
[ "Apache-2.0" ]
1
2021-02-16T16:06:43.000Z
2021-02-16T16:06:43.000Z
119.660211
29,556
0.869878
[ [ [ "# Method4 DCT based DOST + Huffman encoding", "_____no_output_____" ], [ "## Import Libraries", "_____no_output_____" ] ], [ [ "import mne\nimport numpy as np\nfrom scipy.fft import fft,fftshift\nimport matplotlib.pyplot as plt\nfrom scipy.signal import butter, lfilter\nfrom scipy.signal import freqz\nfrom scipy import signal\nfrom scipy.fftpack import fft, dct, idct\nfrom itertools import islice \nimport pandas as pd\nimport os", "_____no_output_____" ] ], [ [ "## Preprocessing", "_____no_output_____" ], [ "### Data loading", "_____no_output_____" ] ], [ [ "acc = pd.read_csv('ACC.csv')\nacc = acc.iloc[1:]\nacc.columns = ['column1','column2','column3']\nnp.savetxt('acc.txt',acc)\n\n\n\nacc_c1 = acc[\"column1\"]\nacc_c2 = acc[\"column2\"]\nacc_c3 = acc[\"column3\"]\nacc_array_c1 = acc_c1.to_numpy() #save the data into an ndarray\nacc_array_c2 = acc_c2.to_numpy()\nacc_array_c3 = acc_c3.to_numpy()\nacc_array_c1.shape\n\nacc_array_c1 = acc_array_c1[0:66000] # Remove the signal in first 3minutes and last 5minutes\nacc_array_c2 = acc_array_c2[0:66000]\nacc_array_c3 = acc_array_c3[0:66000]\n\nsampling_freq = 1/32\nN = acc_array_c1.size\n\nxf = np.linspace(-N*sampling_freq/2, N*sampling_freq/2, N)\n\n\nindex = np.linspace(0, round((N-1)*sampling_freq,4), N)\n\n", "_____no_output_____" ] ], [ [ "### Butterworth Filter to denoising", "_____no_output_____" ] ], [ [ "def butter_bandpass(lowcut, highcut, fs, order=5):\n nyq = 0.5 * fs\n low = lowcut / nyq\n high = highcut / nyq\n b, a = butter(order, [low, high], btype='band')\n return b, a\n\n\ndef butter_bandpass_filter(data, lowcut, highcut, fs, order=5):\n b, a = butter_bandpass(lowcut, highcut, fs, order=order)\n y = lfilter(b, a, data)\n return y", "_____no_output_____" ], [ "from scipy.signal import freqz\nfrom scipy import signal\n# Sample rate and desired cutoff frequencies (in Hz).\nfs = 1000.0\nlowcut = 0.5\nhighcut = 50.0\n\n# Plot the frequency response for a few different orders.\nplt.figure(1)\nplt.clf()\nfor order in [1, 2, 3, 4]:\n b, a = butter_bandpass(lowcut, highcut, fs, order=order)\n w, h = freqz(b, a, worN=2000)\n plt.plot((fs * 0.5 / np.pi) * w, abs(h), label=\"order = %d\" % order)\n\nplt.plot([0, 0.5 * fs], [np.sqrt(0.5), np.sqrt(0.5)],\n '--', label='sqrt(0.5)')\nplt.xlabel('Frequency (Hz)')\nplt.ylabel('Gain')\nplt.grid(True)\nplt.legend(loc='best')", "_____no_output_____" ], [ "y1 = butter_bandpass_filter(acc_array_c1, lowcut, highcut, fs, order=2)\ny2 = butter_bandpass_filter(acc_array_c2, lowcut, highcut, fs, order=2)\ny3 = butter_bandpass_filter(acc_array_c3, lowcut, highcut, fs, order=2)", "_____no_output_____" ], [ "resampled_signal1 = y1\nresampled_signal2 = y2\nresampled_signal3 = y3", "_____no_output_____" ], [ "np.savetxt('processed_acc_col1.txt',resampled_signal1)\nnp.savetxt('processed_acc_col2.txt',resampled_signal2)\nnp.savetxt('processed_acc_col3.txt',resampled_signal3)", "_____no_output_____" ], [ "rounded_signal1 = np.around(resampled_signal1)\nrounded_signal2 = np.around(resampled_signal2)\nrounded_signal3 = np.around(resampled_signal3)", "_____no_output_____" ] ], [ [ "## Transformation --- DCT based DOST ", "_____no_output_____" ] ], [ [ "from scipy.fftpack import fft, dct\naN1 = dct(rounded_signal1, type = 2, norm = 'ortho')\naN2 = dct(rounded_signal2, type = 2, norm = 'ortho')\naN3 = dct(rounded_signal3, type = 2, norm = 'ortho')", "_____no_output_____" ], [ "def return_N(target):\n if target > 1:\n for i in range(1, int(target)):\n if (2 ** i >= target):\n return i-1\n else:\n return 1\n", "_____no_output_____" ], [ "from itertools import islice \nsplit_list = [1]\nfor i in range(0,return_N(aN1.size)):\n split_list.append(2 ** i)\ntemp1 = iter(aN1) \nres1 = [list(islice(temp1, 0, ele)) for ele in split_list]\ntemp2 = iter(aN2) \nres2 = [list(islice(temp2, 0, ele)) for ele in split_list]\ntemp3 = iter(aN3) \nres3 = [list(islice(temp3, 0, ele)) for ele in split_list]", "_____no_output_____" ], [ "from scipy.fftpack import fft, dct, idct\ncN_idct1 = [list(idct(res1[0], type = 2, norm = 'ortho' )), list(idct(res1[1], type = 2, norm = 'ortho' ))]\nfor k in range(2,len(res1)):\n cN_idct1.append(list(idct(res1[k], type = 2, norm = 'ortho' )))\ncN_idct2 = [list(idct(res2[0], type = 2, norm = 'ortho' )), list(idct(res2[1], type = 2, norm = 'ortho' ))]\nfor k in range(2,len(res2)):\n cN_idct2.append(list(idct(res2[k], type = 2, norm = 'ortho' )))\ncN_idct3 = [list(idct(res3[0], type = 2, norm = 'ortho' )), list(idct(res3[1], type = 2, norm = 'ortho' ))]\nfor k in range(2,len(res3)):\n cN_idct3.append(list(idct(res3[k], type = 2, norm = 'ortho' )))", "_____no_output_____" ], [ "all_numbers1 = []\nfor i in cN_idct1:\n for j in i:\n all_numbers1.append(j)\nall_numbers2 = []\nfor i in cN_idct2:\n for j in i:\n all_numbers2.append(j)\nall_numbers3 = []\nfor i in cN_idct3:\n for j in i:\n all_numbers3.append(j)", "_____no_output_____" ], [ "all_numbers1 = np.asarray(all_numbers1)\nall_numbers2 = np.asarray(all_numbers2)\nall_numbers3 = np.asarray(all_numbers3)", "_____no_output_____" ], [ "int_cN1 = np.round(all_numbers1,3)\nint_cN2 = np.round(all_numbers2,3)\nint_cN3 = np.round(all_numbers3,3)", "_____no_output_____" ], [ "np.savetxt('int_cN1.txt',int_cN1, fmt='%.3f')\nnp.savetxt('int_cN2.txt',int_cN2, fmt='%.3f')\nnp.savetxt('int_cN3.txt',int_cN3,fmt='%.3f')", "_____no_output_____" ] ], [ [ "## Huffman Coding", "_____no_output_____" ], [ "### INSTRUCTION ON HOW TO COMPRESS THE DATA BY HUFFMAN CODING\n\n\n(I used the package \"tcmpr 0.2\" and \"pyhuff 1.1\". These two packages provided the same compression result. So here, we just use \"tcmpr 0.2\")\n\n1. Open your termial or git bash, enter \"pip install tcmpr\" to install the \"tcmpr 0.2\" package\n2. Enter the directory which include the file you want to compress OR copy the path of the file you want to compress\n3. Enter \"tcmpr filename.txt\" / \"tcmpr filepath\" to compress the file\n4. Find the compressed file in the same directory of the original file", "_____no_output_____" ] ], [ [ "# Do Huffman encoding based on the instruction above \n# or run this trunk if this scratch locates in the same directory with the signal you want to encode\nos.system('tcmpr int_cN1.txt')\nos.system('tcmpr int_cN2.txt')\nos.system('tcmpr int_cN3.txt')", "_____no_output_____" ] ], [ [ "## Reconstruction", "_____no_output_____" ] ], [ [ "os.system('tcmpr -d int_cN1.txt.huffman')\nos.system('tcmpr -d int_cN2.txt.huffman')\nos.system('tcmpr -d int_cN3.txt.huffman')\ndecoded_data1 = np.loadtxt(fname = \"int_cN1.txt\")\ndecoded_data2 = np.loadtxt(fname = \"int_cN2.txt\")\ndecoded_data3 = np.loadtxt(fname = \"int_cN3.txt\")", "_____no_output_____" ], [ "recover_signal1 = decoded_data1\nrecover_signal2 = decoded_data2\nrecover_signal3 = decoded_data3\nrecover_signal1 = list(recover_signal1)\nrecover_signal2 = list(recover_signal2)\nrecover_signal3 = list(recover_signal3)\nlen(recover_signal1)", "_____no_output_____" ], [ "split_list = [1]\nfor i in range(0,return_N(len(recover_signal1))+1):\n split_list.append(2 ** i)\ntemp_recovered1 = iter(recover_signal1) \nres_recovered1 = [list(islice(temp_recovered1, 0, ele)) for ele in split_list]\ntemp_recovered2 = iter(recover_signal2) \nres_recovered2 = [list(islice(temp_recovered2, 0, ele)) for ele in split_list] \ntemp_recovered3 = iter(recover_signal3) \nres_recovered3 = [list(islice(temp_recovered3, 0, ele)) for ele in split_list] ", "_____no_output_____" ], [ "recover_dct1 = [list(dct(res_recovered1[0], type = 2, norm = 'ortho' )), list(dct(res_recovered1[1], type = 2, norm = 'ortho' ))]\nfor k in range(2,len(res_recovered1)):\n recover_dct1.append(list(dct(res_recovered1[k], type = 2, norm = 'ortho' )))\nrecover_dct2 = [list(dct(res_recovered2[0], type = 2, norm = 'ortho' )), list(dct(res_recovered2[1], type = 2, norm = 'ortho' ))]\nfor k in range(2,len(res_recovered2)):\n recover_dct2.append(list(dct(res_recovered2[k], type = 2, norm = 'ortho' )))\nrecover_dct3 = [list(dct(res_recovered3[0], type = 2, norm = 'ortho' )), list(dct(res_recovered3[1], type = 2, norm = 'ortho' ))]\nfor k in range(2,len(res_recovered3)):\n recover_dct3.append(list(dct(res_recovered3[k], type = 2, norm = 'ortho' )))", "_____no_output_____" ], [ "all_recover1 = []\nfor i in recover_dct1:\n for j in i:\n all_recover1.append(j)\nall_recover2 = []\nfor i in recover_dct2:\n for j in i:\n all_recover2.append(j)\nall_recover3 = []\nfor i in recover_dct3:\n for j in i:\n all_recover3.append(j)\n", "_____no_output_____" ], [ "aN_recover1 = idct(all_recover1, type = 2, norm = 'ortho')\naN_recover2 = idct(all_recover2, type = 2, norm = 'ortho')\naN_recover3 = idct(all_recover3, type = 2, norm = 'ortho')", "_____no_output_____" ], [ "plt.plot(signal.resample(y1, len(aN_recover1))[31000:31100], label = \"origianl\")\nplt.plot(aN_recover1[31000:31100], label = \"recovered\")\nplt.legend()\nplt.title('ACC')\nplt.grid()\nplt.show()", "_____no_output_____" ], [ "#resampled_signal_shorter = resampled_signal1[:len(aN_recover1)] \nresampled_signal_shorter1 = signal.resample(y1, len(aN_recover1))\nfrom sklearn.metrics import mean_squared_error\nfrom math import sqrt\n\ndef PRD_calculation(original_signal, compressed_signal):\n PRD = sqrt(sum((original_signal-compressed_signal)**2)/(sum(original_signal**2)))\n return PRD\n\nPRD = PRD_calculation(resampled_signal_shorter1, aN_recover1)\nprint(\"The PRD is {}%\".format(round(PRD*100,3)))", "The PRD is 3.16%\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a3ed150320ba75b8159144d6be867678f2ab17
104,890
ipynb
Jupyter Notebook
Regression/Linear Models/ElasticNet_RobustScaler.ipynb
surya2365/ds-seed
74ef58479333fed95522f7b691f1209f7d70fc95
[ "Apache-2.0" ]
null
null
null
Regression/Linear Models/ElasticNet_RobustScaler.ipynb
surya2365/ds-seed
74ef58479333fed95522f7b691f1209f7d70fc95
[ "Apache-2.0" ]
null
null
null
Regression/Linear Models/ElasticNet_RobustScaler.ipynb
surya2365/ds-seed
74ef58479333fed95522f7b691f1209f7d70fc95
[ "Apache-2.0" ]
null
null
null
131.276596
48,845
0.855391
[ [ [ "# ElasticNet with RobustScaler\n", "_____no_output_____" ], [ "**This Code template is for the regression analysis using a ElasticNet Regression and the feature rescaling technique RobustScaler in a pipeline**", "_____no_output_____" ], [ "### Required Packages", "_____no_output_____" ] ], [ [ "import warnings as wr\r\nimport numpy as np \r\nimport pandas as pd \r\nimport seaborn as sns\r\nimport matplotlib.pyplot as plt \r\nfrom sklearn.preprocessing import LabelEncoder\r\nfrom sklearn.pipeline import Pipeline\r\nfrom sklearn.preprocessing import RobustScaler\r\nfrom sklearn.model_selection import train_test_split \r\nfrom sklearn.linear_model import ElasticNet\r\nfrom sklearn.metrics import mean_squared_error, r2_score,mean_absolute_error\r\nwr.filterwarnings('ignore')", "_____no_output_____" ] ], [ [ "### Initialization\n\nFilepath of CSV file", "_____no_output_____" ] ], [ [ "#filepath\r\nfile_path= \"\"", "_____no_output_____" ] ], [ [ "List of features which are required for model training .", "_____no_output_____" ] ], [ [ "#x_values\r\nfeatures=[]", "_____no_output_____" ] ], [ [ "Target feature for prediction.", "_____no_output_____" ] ], [ [ "#y_value\r\ntarget=''", "_____no_output_____" ] ], [ [ "### Data Fetching\n\nPandas is an open-source, BSD-licensed library providing high-performance, easy-to-use data manipulation and data analysis tools.\n\nWe will use panda's library to read the CSV file using its storage path.And we use the head function to display the initial row or entry.", "_____no_output_____" ] ], [ [ "df=pd.read_csv(file_path) #reading file\r\ndf.head()#displaying initial entries", "_____no_output_____" ], [ "print('Number of rows are :',df.shape[0], ',and number of columns are :',df.shape[1])", "Number of rows are : 1338 ,and number of columns are : 7\n" ], [ "df.columns.tolist()\r\n", "_____no_output_____" ] ], [ [ "### Data Preprocessing\n\nSince the majority of the machine learning models in the Sklearn library doesn't handle string category data and Null value, we have to explicitly remove or replace null values. The below snippet have functions, which removes the null value if any exists. And convert the string classes data in the datasets by encoding them to integer classes.\n", "_____no_output_____" ] ], [ [ "def NullClearner(df):\r\n if(isinstance(df, pd.Series) and (df.dtype in [\"float64\",\"int64\"])):\r\n df.fillna(df.mean(),inplace=True)\r\n return df\r\n elif(isinstance(df, pd.Series)):\r\n df.fillna(df.mode()[0],inplace=True)\r\n return df\r\n else:return df\r\ndef EncodeX(df):\r\n return pd.get_dummies(df)", "_____no_output_____" ] ], [ [ "#### Correlation Map\n\nIn order to check the correlation between the features, we will plot a correlation matrix. It is effective in summarizing a large amount of data where the goal is to see patterns.", "_____no_output_____" ] ], [ [ "plt.figure(figsize = (15, 10))\r\ncorr = df.corr()\r\nmask = np.triu(np.ones_like(corr, dtype = bool))\r\nsns.heatmap(corr, mask = mask, linewidths = 1, annot = True, fmt = \".2f\")\r\nplt.show()", "_____no_output_____" ], [ "correlation = df[df.columns[1:]].corr()[target][:]\r\ncorrelation", "_____no_output_____" ] ], [ [ "### Feature Selections\n\nIt is the process of reducing the number of input variables when developing a predictive model. Used to reduce the number of input variables to both reduce the computational cost of modelling and, in some cases, to improve the performance of the model.\n\nWe will assign all the required input features to X and target/outcome to Y.", "_____no_output_____" ] ], [ [ "#spliting data into X(features) and Y(Target)\r\n\r\nX=df[features]\r\nY=df[target] ", "_____no_output_____" ] ], [ [ "Calling preprocessing functions on the feature and target set.", "_____no_output_____" ] ], [ [ "x=X.columns.to_list()\r\nfor i in x:\r\n X[i]=NullClearner(X[i])\r\nX=EncodeX(X)\r\nY=NullClearner(Y)\r\nX.head()", "_____no_output_____" ] ], [ [ "### Data Splitting\n\nThe train-test split is a procedure for evaluating the performance of an algorithm. The procedure involves taking a dataset and dividing it into two subsets. The first subset is utilized to fit/train the model. The second subset is used for prediction. The main motive is to estimate the performance of the model on new data.", "_____no_output_____" ] ], [ [ "#we can choose randomstate and test_size as over requerment\r\nX_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.2, random_state = 1) #performing datasplitting", "_____no_output_____" ] ], [ [ "## Model\r\n\r\n\r\n### Data Scaling\r\n**Used RobustScaler**\r\n\r\n* It scales features using statistics that are robust to outliers. \r\n* This method removes the median and scales the data in the range between 1st quartile and 3rd quartile. i.e., in between 25th quantile and 75th quantile range. This range is also called an Interquartile range.\r\n\r\n### ElasticNet\r\n\r\nElastic Net first emerged as a result of critique on Lasso, whose variable selection can be too dependent on data and thus unstable. The solution is to combine the penalties of Ridge regression and Lasso to get the best of both worlds.\r\n\r\n**Features of ElasticNet Regression-**\r\n* It combines the L1 and L2 approaches.\r\n* It performs a more efficient regularization process.\r\n* It has two parameters to be set, λ and α.\r\n\r\n #### Model Tuning Parameters\r\n\r\n 1. alpha : float, default=1.0\r\n \r\n> Constant that multiplies the penalty terms. Defaults to 1.0. See the notes for the exact mathematical meaning of this parameter. alpha = 0 is equivalent to an ordinary least square, solved by the LinearRegression object. For numerical reasons, using alpha = 0 with the Lasso object is not advised. Given this, you should use the LinearRegression object.\r\n\r\n\r\n 2. l1_ratio : float, default=0.5\r\n> The ElasticNet mixing parameter, with 0 <= l1_ratio <= 1. For l1_ratio = 0 the penalty is an L2 penalty. For l1_ratio = 1 it is an L1 penalty. For 0 < l1_ratio < 1, the penalty is a combination of L1 and L2.\r\n\r\n 3. normalize : bool, default=False\r\n>This parameter is ignored when fit_intercept is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use StandardScaler before calling fit on an estimator with normalize=False.\r\n\r\n 4. max_iter : int, default=1000\r\n>The maximum number of iterations.\r\n\r\n 5. tol : float, default=1e-4\r\n>The tolerance for the optimization: if the updates are smaller than tol, the optimization code checks the dual gap for optimality and continues until it is smaller than tol.\r\n\r\n 6. selection : {‘cyclic’, ‘random’}, default=’cyclic’\r\n>If set to ‘random’, a random coefficient is updated every iteration rather than looping over features sequentially by default. This (setting to ‘random’) often leads to significantly faster convergence especially when tol is higher than 1e-4.\r\n", "_____no_output_____" ] ], [ [ "#training the ElasticNet\r\nInput=[(\"scaler\",RobustScaler()),(\"model\",ElasticNet(random_state = 5))]\r\nmodel = Pipeline(Input)\r\nmodel.fit(X_train,y_train)", "_____no_output_____" ] ], [ [ "#### Model Accuracy\nscore() method return the mean accuracy on the given test data and labels.\n\nIn multi-label classification, this is the subset accuracy which is a harsh metric since you require for each sample that each label set be correctly predicted.", "_____no_output_____" ] ], [ [ "print(\"Accuracy score {:.2f} %\\n\".format(model.score(X_test,y_test)*100))", "Accuracy score 48.36 %\n\n" ], [ "#prediction on testing set\r\nprediction=model.predict(X_test)", "_____no_output_____" ] ], [ [ "### Model evolution\n\n\n**r2_score:** The r2_score function computes the percentage variablility explained by our model, either the fraction or the count of correct predictions.\n\n**MAE:** The mean abosolute error function calculates the amount of total error(absolute average distance between the real data and the predicted data) by our model.\n\n**MSE:** The mean squared error function squares the error(penalizes the model for large errors) by our model.", "_____no_output_____" ] ], [ [ "print('Mean Absolute Error:', mean_absolute_error(y_test, prediction)) \r\nprint('Mean Squared Error:', mean_squared_error(y_test, prediction)) \r\nprint('Root Mean Squared Error:', np.sqrt(mean_squared_error(y_test, prediction)))", "Mean Absolute Error: 6195.156634885755\nMean Squared Error: 77088496.26589973\nRoot Mean Squared Error: 8780.00548211103\n" ], [ "print(\"R-squared score : \",r2_score(y_test,prediction))", "R-squared score : 0.48360017436142344\n" ], [ "#ploting actual and predicted\nred = plt.scatter(np.arange(0,80,5),prediction[0:80:5],color = \"red\")\ngreen = plt.scatter(np.arange(0,80,5),y_test[0:80:5],color = \"green\")\nplt.title(\"Comparison of Regression Algorithms\")\nplt.xlabel(\"Index of Candidate\")\nplt.ylabel(\"target\")\nplt.legend((red,green),('ElasticNet', 'REAL'))\nplt.show()\n", "_____no_output_____" ] ], [ [ "### Prediction Plot¶\nFirst, we make use of a plot to plot the actual observations, with x_train on the x-axis and y_train on the y-axis. For the regression line, we will use x_train on the x-axis and then the predictions of the x_train observations on the y-axis.", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(10,6))\nplt.plot(range(20),y_test[0:20], color = \"green\")\nplt.plot(range(20),model.predict(X_test[0:20]), color = \"red\")\nplt.legend([\"Actual\",\"prediction\"]) \nplt.title(\"Predicted vs True Value\")\nplt.xlabel(\"Record number\")\nplt.ylabel(target)\nplt.show()", "_____no_output_____" ] ], [ [ "#### Creator: Vipin Kumar , Github: [Profile](https://github.com/devVipin01)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a3fad4c58a162e34058eda527d955db5bcb9ea
105,515
ipynb
Jupyter Notebook
examples/notebook/examples/bus_driver_scheduling_sat.ipynb
remiomosowon/or-tools
f15537de74088b60dfa325c3b2b5eab365333d03
[ "Apache-2.0" ]
8,273
2015-02-24T22:10:50.000Z
2022-03-31T21:19:27.000Z
examples/notebook/examples/bus_driver_scheduling_sat.ipynb
remiomosowon/or-tools
f15537de74088b60dfa325c3b2b5eab365333d03
[ "Apache-2.0" ]
2,530
2015-03-05T04:27:21.000Z
2022-03-31T06:13:02.000Z
examples/notebook/examples/bus_driver_scheduling_sat.ipynb
remiomosowon/or-tools
f15537de74088b60dfa325c3b2b5eab365333d03
[ "Apache-2.0" ]
2,057
2015-03-04T15:02:02.000Z
2022-03-30T02:29:27.000Z
50.899662
264
0.42068
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a4161b61fac61d2ad04ad110c6b05eb5000bb9
30,247
ipynb
Jupyter Notebook
disaster_tweets.ipynb
Hack-My-Life/disaster_tweets
c95c26200c9db2693088f114ad5d0e4e1bc175ab
[ "MIT" ]
null
null
null
disaster_tweets.ipynb
Hack-My-Life/disaster_tweets
c95c26200c9db2693088f114ad5d0e4e1bc175ab
[ "MIT" ]
null
null
null
disaster_tweets.ipynb
Hack-My-Life/disaster_tweets
c95c26200c9db2693088f114ad5d0e4e1bc175ab
[ "MIT" ]
null
null
null
26.485989
441
0.479089
[ [ [ "# Let's Import Our Libraries", "_____no_output_____" ] ], [ [ "# Keras\nfrom keras.preprocessing.text import Tokenizer\nfrom keras.preprocessing.sequence import pad_sequences\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Flatten, LSTM, Conv1D, MaxPooling1D, Dropout, Activation\nfrom keras.layers.embeddings import Embedding\n\n# Plot\nimport plotly.offline as py\nimport plotly.graph_objs as go\npy.init_notebook_mode(connected=True)\nimport matplotlib as plt\n\n# NLTK\nimport nltk\nfrom nltk.corpus import stopwords\nfrom nltk.stem import SnowballStemmer\n\n#Sklearn\nfrom sklearn.base import BaseEstimator, ClassifierMixin\nfrom sklearn.utils.validation import check_X_y, check_is_fitted\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.linear_model import SGDClassifier\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.metrics import precision_score\nfrom keras.wrappers.scikit_learn import KerasClassifier\nfrom sklearn.model_selection import StratifiedKFold\nfrom sklearn.model_selection import cross_val_score\n\n# Other\nimport re\nimport string\nimport numpy as np\nimport pandas as pd\nfrom sklearn.manifold import TSNE\nfrom scipy import sparse\nimport warnings\nwarnings.filterwarnings('ignore')", "Using TensorFlow backend.\n" ] ], [ [ "# Let's start by exploring the data.", "_____no_output_____" ] ], [ [ "df = pd.read_csv(\"socialmedia-disaster-tweets-DFE.csv\", encoding='latin-1')", "_____no_output_____" ], [ "df.shape", "_____no_output_____" ], [ "df.head(5)", "_____no_output_____" ], [ "df.columns", "_____no_output_____" ] ], [ [ "The 'tweet' column has tweets and 'choose_one' has the classification.", "_____no_output_____" ], [ "Let's determine the number of unique classifications.", "_____no_output_____" ] ], [ [ "df.choose_one.unique()", "_____no_output_____" ] ], [ [ "# Let's work on cleaning up the data", "_____no_output_____" ] ], [ [ "df = df[[\"text\", \"choose_one\"]]\ndf[\"choose_one\"] = df.choose_one.replace({\"Relevant\": 1, \"Not Relevant\": 0})\ndf.rename(columns={\"choose_one\":\"label\"}, inplace=True)", "_____no_output_____" ], [ "df.label=pd.to_numeric(df.label, errors='coerce')\ndf.dropna(inplace=True)", "_____no_output_____" ] ], [ [ "Let's check and see how the data looks.", "_____no_output_____" ] ], [ [ "df.label.unique()", "_____no_output_____" ], [ "df.head(5)", "_____no_output_____" ], [ "df[\"text\"] = df[\"text\"].str.replace(r\"http\\S+|http|@\\S+|at\", \"\")\ndf[\"text\"] = df[\"text\"].str.replace(r\"[^A-Za-z0-9(),!?@\\'\\`\\\"\\_\\n]\", \" \")\ndf[\"text\"] = df[\"text\"].str.lower()", "_____no_output_____" ], [ "df.head(5)", "_____no_output_____" ], [ "df.columns", "_____no_output_____" ] ], [ [ "# Let's Tokenzie: We'll Turn our Sentences into Lists of Words", "_____no_output_____" ] ], [ [ "from nltk.tokenize import RegexpTokenizer\ntokenizer = RegexpTokenizer(r'\\w+')\ndf[\"tokens\"] = df[\"text\"].apply(tokenizer.tokenize)", "_____no_output_____" ] ], [ [ "Tokens will give us more insight into the data", "_____no_output_____" ] ], [ [ "all_words = [word for tokens in df[\"tokens\"] for word in tokens]\nsentence_lengths = [len(tokens) for tokens in df[\"tokens\"]]\nvocabulary = sorted(set(all_words))", "_____no_output_____" ], [ "print(\"%s words total, with a vocabulary size of %s.\" % (len(all_words), len(vocabulary)))", "153824 words total, with a vocabulary size of 18078.\n" ], [ "print(\"Max sentence length is %s.\" % max(sentence_lengths))", "Max sentence length is 34.\n" ] ], [ [ "# Let's Embed: Turning Words into Numbers", "_____no_output_____" ] ], [ [ "from sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.model_selection import train_test_split", "_____no_output_____" ] ], [ [ "### TFIDF Tokenizer for Our Classical ML Models", "_____no_output_____" ] ], [ [ "text = df[\"text\"].tolist()\nlabels = df[\"label\"].tolist()\nX_train, X_test, y_train, y_test = train_test_split(text, labels, test_size=0.2,random_state=40)", "_____no_output_____" ], [ "tfidf_vectorizer = TfidfVectorizer()\nX_train_tfidf = tfidf_vectorizer.fit_transform(X_train)", "_____no_output_____" ], [ "X_test_tfidf = tfidf_vectorizer.transform(X_test)\nX_test_tfidf.shape", "_____no_output_____" ] ], [ [ "### NN Specific Tokenizer", "_____no_output_____" ] ], [ [ "vocabulary_size = 40000\ntokenizer = Tokenizer(num_words = vocabulary_size)\ntokenizer.fit_on_texts(df['text'])\n\nsequences = tokenizer.texts_to_sequences(X_train)\nX_train_nn = pad_sequences(sequences, maxlen=28)\n\nsequences = tokenizer.texts_to_sequences(X_test)\nX_test_nn = pad_sequences(sequences, maxlen=28)\n\nX_train_nn = pd.DataFrame(X_train_nn)\nX_test_nn = pd.DataFrame(X_test_nn)", "_____no_output_____" ], [ "type(X_train_tfidf)", "_____no_output_____" ], [ "type(X_train_nn)", "_____no_output_____" ] ], [ [ "# Here We'll Define a New Classifier", "_____no_output_____" ] ], [ [ "class NbSvmClassifier(BaseEstimator, ClassifierMixin):\n def __init__(self, C=1.0, dual=False, n_jobs=1):\n self.C = C\n self.dual = dual\n self.n_jobs = n_jobs\n\n def predict(self, x):\n # Verify that model has been fit\n check_is_fitted(self, ['_r', '_clf'])\n return self._clf.predict(x.multiply(self._r))\n\n def predict_proba(self, x):\n # Verify that model has been fit\n check_is_fitted(self, ['_r', '_clf'])\n return self._clf.predict_proba(x.multiply(self._r))\n\n def fit(self, x, y):\n # Check that X and y have correct shape\n #y = y.values\n y = y\n x, y = check_X_y(x, y, accept_sparse=True)\n\n def pr(x, y_i, y):\n p = x[y==y_i].sum(0)\n return (p+1) / ((y==y_i).sum()+1)\n\n self._r = sparse.csr_matrix(np.log(pr(x,1,y) / pr(x,0,y)))\n x_nb = x.multiply(self._r)\n self._clf = LogisticRegression(C=self.C, dual=self.dual, n_jobs=self.n_jobs).fit(x_nb, y)\n return self", "_____no_output_____" ] ], [ [ "## Let's get a baseline using Logisitc Regression", "_____no_output_____" ] ], [ [ "classifier = LogisticRegression(C=30.0, class_weight='balanced', solver='newton-cg', multi_class='multinomial', n_jobs=-1, random_state=40)\nclassifier.fit(X_train_tfidf, y_train)\ny_predicted_tfidf = classifier.predict(X_test_tfidf)\n\nprecision = precision_score(y_test, y_predicted_tfidf, pos_label=None,average='weighted')\nprint(precision)", "0.7745952843343159\n" ] ], [ [ "## Now We'll Utilize Our NBSVM Classifier", "_____no_output_____" ] ], [ [ "classifier = NbSvmClassifier(C=4, dual=True, n_jobs=-1).fit(X_train_tfidf, y_train)\nclassifier.fit(X_train_tfidf, y_train)\ny_predicted_tfidf = classifier.predict(X_test_tfidf)\n\nprecision = precision_score(y_test, y_predicted_tfidf, pos_label=None,average='weighted')\nprint(precision)", "0.8057456468641322\n" ] ], [ [ "## Now Let's Apply Grid Search to the Model", "_____no_output_____" ] ], [ [ "from sklearn.model_selection import GridSearchCV\nparam_grid = {\n 'C': [3.0, 3.2, 3.25, 3.3, 3.4, 3.5],\n 'dual' : [True, False]\n}", "_____no_output_____" ], [ "%%time\ngs_classifier = GridSearchCV(NbSvmClassifier(), param_grid, n_jobs=-1)\ngs_classifier = gs_classifier.fit(X_train_tfidf, y_train)", "CPU times: user 125 ms, sys: 43.9 ms, total: 169 ms\nWall time: 733 ms\n" ], [ "gs_classifier.best_score_", "_____no_output_____" ], [ "gs_classifier.best_params_", "_____no_output_____" ] ], [ [ "We don't seem to be getting much extra juice from applying grid search to this model.", "_____no_output_____" ], [ "## Let's Try Regular SVM", "_____no_output_____" ] ], [ [ "classifier = SGDClassifier().fit(X_train_tfidf, y_train)\nclassifier.fit(X_train_tfidf, y_train)\ny_predicted_tfidf = classifier.predict(X_test_tfidf)\nprecision = precision_score(y_test, y_predicted_tfidf, pos_label=None,average='weighted')\nprint(precision)", "0.7946891903618641\n" ] ], [ [ "## Let's Try Multinomial Naive Bayes", "_____no_output_____" ] ], [ [ "classifier = SGDClassifier().fit(X_train_tfidf, y_train)\nclassifier.fit(X_train_tfidf, y_train)\ny_predicted_tfidf = classifier.predict(X_test_tfidf)\nprecision = precision_score(y_test, y_predicted_tfidf, pos_label=None,average='weighted')\nprint(precision)", "0.7960386107807245\n" ] ], [ [ "The last three approaches yielded pretty similar results. Let's try a deep learning model.", "_____no_output_____" ], [ "## Build the network with LSTM", "_____no_output_____" ], [ "### Network Architecture\n\nOur network is going to start with an embedding layer. This layer lets the system expand each token into a much larger vector space. By doing so we can represent each word in a more meaningful way. The layer takes 40K as its first argument, which is the size of our vocabulary. 100 is the second argument, which is the dimension of the embeddings. The third argument is 28 which is the max number of tokens we consider from each tweet.", "_____no_output_____" ] ], [ [ "def create_lstm():\n model = Sequential()\n model.add(Embedding(40000, 100, input_length=28))\n model.add(LSTM(100, dropout=0.9, recurrent_dropout=0.5))\n model.add(Dense(1, activation='sigmoid'))\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n return model", "_____no_output_____" ], [ "classifier = KerasClassifier(build_fn=create_lstm, epochs=3, batch_size=5, verbose=0)", "_____no_output_____" ], [ "%%time\nclassifier.fit(X_train_nn, y_train)", "CPU times: user 7min 21s, sys: 1min 24s, total: 8min 46s\nWall time: 2min 55s\n" ], [ "y_predicted_nn = classifier.predict(X_test_nn)\nprecision = precision_score(y_test, y_predicted_nn, pos_label=None,average='weighted')\nprint(precision)", "0.8128093975364307\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code" ] ]
d0a418883d7af9fafb8a346c9512fa644c1ead8a
956,948
ipynb
Jupyter Notebook
convolutional-neural-networks/cifar-cnn/cifar10_cnn_exercise.ipynb
sroy8091/deep-learning-v2-pytorch
91c71e096edd4b87d4a77fdc7d868e9379fe6a0f
[ "MIT" ]
null
null
null
convolutional-neural-networks/cifar-cnn/cifar10_cnn_exercise.ipynb
sroy8091/deep-learning-v2-pytorch
91c71e096edd4b87d4a77fdc7d868e9379fe6a0f
[ "MIT" ]
null
null
null
convolutional-neural-networks/cifar-cnn/cifar10_cnn_exercise.ipynb
sroy8091/deep-learning-v2-pytorch
91c71e096edd4b87d4a77fdc7d868e9379fe6a0f
[ "MIT" ]
null
null
null
1,184.341584
687,686
0.939362
[ [ [ "<a href=\"https://colab.research.google.com/github/sroy8091/deep-learning-v2-pytorch/blob/master/convolutional-neural-networks/cifar-cnn/cifar10_cnn_exercise.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# Convolutional Neural Networks\n---\nIn this notebook, we train a **CNN** to classify images from the CIFAR-10 database.\n\nThe images in this database are small color images that fall into one of ten classes; some example images are pictured below.\n\n<img src='https://github.com/sroy8091/deep-learning-v2-pytorch/blob/master/convolutional-neural-networks/cifar-cnn/notebook_ims/cifar_data.png?raw=1' width=70% height=70% />", "_____no_output_____" ], [ "### Test for [CUDA](http://pytorch.org/docs/stable/cuda.html)\n\nSince these are larger (32x32x3) images, it may prove useful to speed up your training time by using a GPU. CUDA is a parallel computing platform and CUDA Tensors are the same as typical Tensors, only they utilize GPU's for computation.", "_____no_output_____" ] ], [ [ "import torch\nimport numpy as np\n\n# check if CUDA is available\ntrain_on_gpu = torch.cuda.is_available()\n\nif not train_on_gpu:\n print('CUDA is not available. Training on CPU ...')\nelse:\n print('CUDA is available! Training on GPU ...')", "CUDA is available! Training on GPU ...\n" ] ], [ [ "---\n## Load the [Data](http://pytorch.org/docs/stable/torchvision/datasets.html)\n\nDownloading may take a minute. We load in the training and test data, split the training data into a training and validation set, then create DataLoaders for each of these sets of data.", "_____no_output_____" ] ], [ [ "from torchvision import datasets\nimport torchvision.transforms as transforms\nfrom torch.utils.data.sampler import SubsetRandomSampler\n\n# number of subprocesses to use for data loading\nnum_workers = 0\n# how many samples per batch to load\nbatch_size = 20\n# percentage of training set to use as validation\nvalid_size = 0.2\n\n# convert data to a normalized torch.FloatTensor\ntransform = transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))\n ])\n\n# choose the training and test datasets\ntrain_data = datasets.CIFAR10('data', train=True,\n download=True, transform=transform)\ntest_data = datasets.CIFAR10('data', train=False,\n download=True, transform=transform)\n\n# obtain training indices that will be used for validation\nnum_train = len(train_data)\nindices = list(range(num_train))\nnp.random.shuffle(indices)\nsplit = int(np.floor(valid_size * num_train))\ntrain_idx, valid_idx = indices[split:], indices[:split]\n\n# define samplers for obtaining training and validation batches\ntrain_sampler = SubsetRandomSampler(train_idx)\nvalid_sampler = SubsetRandomSampler(valid_idx)\n\n# prepare data loaders (combine dataset and sampler)\ntrain_loader = torch.utils.data.DataLoader(train_data, batch_size=batch_size,\n sampler=train_sampler, num_workers=num_workers)\nvalid_loader = torch.utils.data.DataLoader(train_data, batch_size=batch_size, \n sampler=valid_sampler, num_workers=num_workers)\ntest_loader = torch.utils.data.DataLoader(test_data, batch_size=batch_size, \n num_workers=num_workers)\n\n# specify the image classes\nclasses = ['airplane', 'automobile', 'bird', 'cat', 'deer',\n 'dog', 'frog', 'horse', 'ship', 'truck']", "Files already downloaded and verified\nFiles already downloaded and verified\n" ] ], [ [ "### Visualize a Batch of Training Data", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\n%matplotlib inline\n\n# helper function to un-normalize and display an image\ndef imshow(img):\n img = img / 2 + 0.5 # unnormalize\n plt.imshow(np.transpose(img, (1, 2, 0))) # convert from Tensor image", "_____no_output_____" ], [ "# obtain one batch of training images\ndataiter = iter(train_loader)\nimages, labels = dataiter.next()\nimages = images.numpy() # convert images to numpy for display\n\n# plot the images in the batch, along with the corresponding labels\nfig = plt.figure(figsize=(25, 4))\n# display 20 images\nfor idx in np.arange(20):\n ax = fig.add_subplot(2, 20/2, idx+1, xticks=[], yticks=[])\n imshow(images[idx])\n ax.set_title(classes[labels[idx]])", "_____no_output_____" ] ], [ [ "### View an Image in More Detail\n\nHere, we look at the normalized red, green, and blue (RGB) color channels as three separate, grayscale intensity images.", "_____no_output_____" ] ], [ [ "rgb_img = np.squeeze(images[3])\nchannels = ['red channel', 'green channel', 'blue channel']\n\nfig = plt.figure(figsize = (36, 36)) \nfor idx in np.arange(rgb_img.shape[0]):\n ax = fig.add_subplot(1, 3, idx + 1)\n img = rgb_img[idx]\n ax.imshow(img, cmap='gray')\n ax.set_title(channels[idx])\n width, height = img.shape\n thresh = img.max()/2.5\n for x in range(width):\n for y in range(height):\n val = round(img[x][y],2) if img[x][y] !=0 else 0\n ax.annotate(str(val), xy=(y,x),\n horizontalalignment='center',\n verticalalignment='center', size=8,\n color='white' if img[x][y]<thresh else 'black')", "_____no_output_____" ] ], [ [ "---\n## Define the Network [Architecture](http://pytorch.org/docs/stable/nn.html)\n\nThis time, you'll define a CNN architecture. Instead of an MLP, which used linear, fully-connected layers, you'll use the following:\n* [Convolutional layers](https://pytorch.org/docs/stable/nn.html#conv2d), which can be thought of as stack of filtered images.\n* [Maxpooling layers](https://pytorch.org/docs/stable/nn.html#maxpool2d), which reduce the x-y size of an input, keeping only the most _active_ pixels from the previous layer.\n* The usual Linear + Dropout layers to avoid overfitting and produce a 10-dim output.\n\nA network with 2 convolutional layers is shown in the image below and in the code, and you've been given starter code with one convolutional and one maxpooling layer.\n\n<img src='https://github.com/sroy8091/deep-learning-v2-pytorch/blob/master/convolutional-neural-networks/cifar-cnn/notebook_ims/2_layer_conv.png?raw=1' height=50% width=50% />\n\n#### TODO: Define a model with multiple convolutional layers, and define the feedforward network behavior.\n\nThe more convolutional layers you include, the more complex patterns in color and shape a model can detect. It's suggested that your final model include 2 or 3 convolutional layers as well as linear layers + dropout in between to avoid overfitting. \n\nIt's good practice to look at existing research and implementations of related models as a starting point for defining your own models. You may find it useful to look at [this PyTorch classification example](https://github.com/pytorch/tutorials/blob/master/beginner_source/blitz/cifar10_tutorial.py) or [this, more complex Keras example](https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py) to help decide on a final structure.\n\n#### Output volume for a convolutional layer\n\nTo compute the output size of a given convolutional layer we can perform the following calculation (taken from [Stanford's cs231n course](http://cs231n.github.io/convolutional-networks/#layers)):\n> We can compute the spatial size of the output volume as a function of the input volume size (W), the kernel/filter size (F), the stride with which they are applied (S), and the amount of zero padding used (P) on the border. The correct formula for calculating how many neurons define the output_W is given by `(W−F+2P)/S+1`. \n\nFor example for a 7x7 input and a 3x3 filter with stride 1 and pad 0 we would get a 5x5 output. With stride 2 we would get a 3x3 output.", "_____no_output_____" ] ], [ [ "import torch.nn as nn\nimport torch.nn.functional as F\n\n# define the CNN architecture\nclass Net(nn.Module):\n def __init__(self):\n super(Net, self).__init__()\n # convolutional layer\n self.conv1 = nn.Conv2d(3, 16, 3, padding=1)\n # max pooling layer\n self.pool = nn.MaxPool2d(2, 2)\n self.conv2 = nn.Conv2d(16, 32, 3, padding=1)\n self.conv3 = nn.Conv2d(32, 64, 3, padding=1)\n self.fc1 = nn.Linear(64*4*4, 512)\n self.fc2 = nn.Linear(512, 10)\n self.dropout = nn.Dropout(p=0.25)\n\n def forward(self, x):\n # add sequence of convolutional and max pooling layers\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = self.pool(F.relu(self.conv3(x)))\n\n x = x.view(-1, 64*4*4)\n\n x = self.dropout(x)\n x = F.relu(self.fc1(x))\n\n x = self.dropout(x)\n x = self.fc2(x)\n\n\n return x\n\n# create a complete CNN\nmodel = Net()\nprint(model)\n\n# move tensors to GPU if CUDA is available\nif train_on_gpu:\n model.cuda()", "Net(\n (conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n (conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (conv3): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (fc1): Linear(in_features=1024, out_features=512, bias=True)\n (fc2): Linear(in_features=512, out_features=10, bias=True)\n (dropout): Dropout(p=0.25, inplace=False)\n)\n" ] ], [ [ "### Specify [Loss Function](http://pytorch.org/docs/stable/nn.html#loss-functions) and [Optimizer](http://pytorch.org/docs/stable/optim.html)\n\nDecide on a loss and optimization function that is best suited for this classification task. The linked code examples from above, may be a good starting point; [this PyTorch classification example](https://github.com/pytorch/tutorials/blob/master/beginner_source/blitz/cifar10_tutorial.py) or [this, more complex Keras example](https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py). Pay close attention to the value for **learning rate** as this value determines how your model converges to a small error.\n\n#### TODO: Define the loss and optimizer and see how these choices change the loss over time.", "_____no_output_____" ] ], [ [ "import torch.optim as optim\n\n# specify loss function\ncriterion = nn.CrossEntropyLoss()\n\n# specify optimizer\noptimizer = optim.SGD(model.parameters(), lr=0.01)", "_____no_output_____" ] ], [ [ "---\n## Train the Network\n\nRemember to look at how the training and validation loss decreases over time; if the validation loss ever increases it indicates possible overfitting.", "_____no_output_____" ] ], [ [ "# number of epochs to train the model\nn_epochs = 30 # you may increase this number to train a final model\n\nvalid_loss_min = np.Inf # track change in validation loss\n\nfor epoch in range(1, n_epochs+1):\n\n # keep track of training and validation loss\n train_loss = 0.0\n valid_loss = 0.0\n \n ###################\n # train the model #\n ###################\n model.train()\n for data, target in train_loader:\n # move tensors to GPU if CUDA is available\n if train_on_gpu:\n data, target = data.cuda(), target.cuda()\n # clear the gradients of all optimized variables\n optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n output = model(data)\n # calculate the batch loss\n loss = criterion(output, target)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n # perform a single optimization step (parameter update)\n optimizer.step()\n # update training loss\n train_loss += loss.item()*data.size(0)\n \n ###################### \n # validate the model #\n ######################\n model.eval()\n for data, target in valid_loader:\n # move tensors to GPU if CUDA is available\n if train_on_gpu:\n data, target = data.cuda(), target.cuda()\n # forward pass: compute predicted outputs by passing inputs to the model\n output = model(data)\n # calculate the batch loss\n loss = criterion(output, target)\n # update average validation loss \n valid_loss += loss.item()*data.size(0)\n \n # calculate average losses\n train_loss = train_loss/len(train_loader.dataset)\n valid_loss = valid_loss/len(valid_loader.dataset)\n \n # print training/validation statistics \n print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format(\n epoch, train_loss, valid_loss))\n \n # save model if validation loss has decreased\n if valid_loss <= valid_loss_min:\n print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format(\n valid_loss_min,\n valid_loss))\n torch.save(model.state_dict(), 'model_cifar.pt')\n valid_loss_min = valid_loss", "Epoch: 1 \tTraining Loss: 0.803521 \tValidation Loss: 0.194699\nValidation loss decreased (inf --> 0.194699). Saving model ...\nEpoch: 2 \tTraining Loss: 0.764657 \tValidation Loss: 0.190203\nValidation loss decreased (0.194699 --> 0.190203). Saving model ...\nEpoch: 3 \tTraining Loss: 0.729479 \tValidation Loss: 0.180554\nValidation loss decreased (0.190203 --> 0.180554). Saving model ...\nEpoch: 4 \tTraining Loss: 0.697147 \tValidation Loss: 0.176048\nValidation loss decreased (0.180554 --> 0.176048). Saving model ...\nEpoch: 5 \tTraining Loss: 0.664248 \tValidation Loss: 0.166369\nValidation loss decreased (0.176048 --> 0.166369). Saving model ...\nEpoch: 6 \tTraining Loss: 0.633258 \tValidation Loss: 0.172071\nEpoch: 7 \tTraining Loss: 0.606116 \tValidation Loss: 0.167166\nEpoch: 8 \tTraining Loss: 0.579878 \tValidation Loss: 0.159903\nValidation loss decreased (0.166369 --> 0.159903). Saving model ...\nEpoch: 9 \tTraining Loss: 0.559891 \tValidation Loss: 0.152775\nValidation loss decreased (0.159903 --> 0.152775). Saving model ...\nEpoch: 10 \tTraining Loss: 0.534879 \tValidation Loss: 0.157326\nEpoch: 11 \tTraining Loss: 0.515649 \tValidation Loss: 0.148941\nValidation loss decreased (0.152775 --> 0.148941). Saving model ...\nEpoch: 12 \tTraining Loss: 0.489339 \tValidation Loss: 0.150183\nEpoch: 13 \tTraining Loss: 0.470855 \tValidation Loss: 0.149713\nEpoch: 14 \tTraining Loss: 0.451540 \tValidation Loss: 0.151616\nEpoch: 15 \tTraining Loss: 0.430954 \tValidation Loss: 0.148358\nValidation loss decreased (0.148941 --> 0.148358). Saving model ...\nEpoch: 16 \tTraining Loss: 0.414108 \tValidation Loss: 0.144657\nValidation loss decreased (0.148358 --> 0.144657). Saving model ...\nEpoch: 17 \tTraining Loss: 0.396600 \tValidation Loss: 0.148721\nEpoch: 18 \tTraining Loss: 0.380980 \tValidation Loss: 0.147071\nEpoch: 19 \tTraining Loss: 0.361990 \tValidation Loss: 0.149246\nEpoch: 20 \tTraining Loss: 0.345308 \tValidation Loss: 0.149856\nEpoch: 21 \tTraining Loss: 0.333657 \tValidation Loss: 0.153758\nEpoch: 22 \tTraining Loss: 0.319412 \tValidation Loss: 0.151106\nEpoch: 23 \tTraining Loss: 0.305450 \tValidation Loss: 0.153118\nEpoch: 24 \tTraining Loss: 0.295869 \tValidation Loss: 0.153868\nEpoch: 25 \tTraining Loss: 0.284524 \tValidation Loss: 0.150587\nEpoch: 26 \tTraining Loss: 0.272934 \tValidation Loss: 0.157747\nEpoch: 27 \tTraining Loss: 0.260829 \tValidation Loss: 0.155512\nEpoch: 28 \tTraining Loss: 0.255282 \tValidation Loss: 0.156478\nEpoch: 29 \tTraining Loss: 0.246054 \tValidation Loss: 0.155306\nEpoch: 30 \tTraining Loss: 0.235927 \tValidation Loss: 0.162395\n" ] ], [ [ "### Load the Model with the Lowest Validation Loss", "_____no_output_____" ] ], [ [ "model.load_state_dict(torch.load('model_cifar.pt'))", "_____no_output_____" ] ], [ [ "---\n## Test the Trained Network\n\nTest your trained model on previously unseen data! A \"good\" result will be a CNN that gets around 70% (or more, try your best!) accuracy on these test images.", "_____no_output_____" ] ], [ [ "# track test loss\ntest_loss = 0.0\nclass_correct = list(0. for i in range(10))\nclass_total = list(0. for i in range(10))\n\nmodel.eval()\n# iterate over test data\nfor data, target in test_loader:\n # move tensors to GPU if CUDA is available\n if train_on_gpu:\n data, target = data.cuda(), target.cuda()\n # forward pass: compute predicted outputs by passing inputs to the model\n output = model(data)\n # calculate the batch loss\n loss = criterion(output, target)\n # update test loss \n test_loss += loss.item()*data.size(0)\n # convert output probabilities to predicted class\n _, pred = torch.max(output, 1) \n # compare predictions to true label\n correct_tensor = pred.eq(target.data.view_as(pred))\n correct = np.squeeze(correct_tensor.numpy()) if not train_on_gpu else np.squeeze(correct_tensor.cpu().numpy())\n # calculate test accuracy for each object class\n for i in range(batch_size):\n label = target.data[i]\n class_correct[label] += correct[i].item()\n class_total[label] += 1\n\n# average test loss\ntest_loss = test_loss/len(test_loader.dataset)\nprint('Test Loss: {:.6f}\\n'.format(test_loss))\n\nfor i in range(10):\n if class_total[i] > 0:\n print('Test Accuracy of %5s: %2d%% (%2d/%2d)' % (\n classes[i], 100 * class_correct[i] / class_total[i],\n np.sum(class_correct[i]), np.sum(class_total[i])))\n else:\n print('Test Accuracy of %5s: N/A (no training examples)' % (classes[i]))\n\nprint('\\nTest Accuracy (Overall): %2d%% (%2d/%2d)' % (\n 100. * np.sum(class_correct) / np.sum(class_total),\n np.sum(class_correct), np.sum(class_total)))", "Test Loss: 0.744639\n\nTest Accuracy of airplane: 77% (779/1000)\nTest Accuracy of automobile: 86% (864/1000)\nTest Accuracy of bird: 67% (672/1000)\nTest Accuracy of cat: 52% (529/1000)\nTest Accuracy of deer: 67% (677/1000)\nTest Accuracy of dog: 63% (630/1000)\nTest Accuracy of frog: 85% (853/1000)\nTest Accuracy of horse: 79% (798/1000)\nTest Accuracy of ship: 88% (885/1000)\nTest Accuracy of truck: 77% (773/1000)\n\nTest Accuracy (Overall): 74% (7460/10000)\n" ] ], [ [ "### Question: What are your model's weaknesses and how might they be improved?", "_____no_output_____" ], [ "**Answer**: (double-click to edit and add an answer)\nBy adding different types of image transformations", "_____no_output_____" ], [ "### Visualize Sample Test Results", "_____no_output_____" ] ], [ [ "# obtain one batch of test images\ndataiter = iter(test_loader)\nimages, labels = dataiter.next()\nimages.numpy()\n\n# move model inputs to cuda, if GPU available\nif train_on_gpu:\n images = images.cuda()\n\n# get sample outputs\noutput = model(images)\n# convert output probabilities to predicted class\n_, preds_tensor = torch.max(output, 1)\npreds = np.squeeze(preds_tensor.numpy()) if not train_on_gpu else np.squeeze(preds_tensor.cpu().numpy())\n\n# plot the images in the batch, along with predicted and true labels\nfig = plt.figure(figsize=(25, 4))\nfor idx in np.arange(20):\n ax = fig.add_subplot(2, 20/2, idx+1, xticks=[], yticks=[])\n imshow(images[idx].cpu())\n ax.set_title(\"{} ({})\".format(classes[preds[idx]], classes[labels[idx]]),\n color=(\"green\" if preds[idx]==labels[idx].item() else \"red\"))", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ] ]
d0a4226e7e78ca668881b222aa845f78f343fd6a
3,135
ipynb
Jupyter Notebook
ds/kb/courses/deep-learning-v2-pytorch/intro-neural-networks/student-admissions/StudentAdmissionsSolutions.ipynb
tobias-fyi/vela
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
[ "MIT" ]
null
null
null
ds/kb/courses/deep-learning-v2-pytorch/intro-neural-networks/student-admissions/StudentAdmissionsSolutions.ipynb
tobias-fyi/vela
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
[ "MIT" ]
8
2020-03-24T17:47:23.000Z
2022-03-12T00:33:21.000Z
ds/kb/courses/deep-learning-v2-pytorch/intro-neural-networks/student-admissions/StudentAdmissionsSolutions.ipynb
tobias-fyi/vela
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
[ "MIT" ]
null
null
null
20.9
94
0.530781
[ [ [ "# Solutions", "_____no_output_____" ], [ "### One-hot encoding the rank", "_____no_output_____" ] ], [ [ "## One solution\n# Make dummy variables for rank\none_hot_data = pd.concat([data, pd.get_dummies(data['rank'], prefix='rank')], axis=1)\n\n# Drop the previous rank column\none_hot_data = one_hot_data.drop('rank', axis=1)\n\n# Print the first 10 rows of our data\none_hot_data[:10]", "_____no_output_____" ], [ "## Alternative solution ##\n# if you're using an up-to-date version of pandas, \n# you can also use selection by columns\n\n# an equally valid solution\none_hot_data = pd.get_dummies(data, columns=['rank'])", "_____no_output_____" ] ], [ [ "### Scaling the data", "_____no_output_____" ] ], [ [ "# Copying our data\nprocessed_data = one_hot_data[:]\n\n# Scaling the columns\nprocessed_data['gre'] = processed_data['gre']/800\nprocessed_data['gpa'] = processed_data['gpa']/4.0\nprocessed_data[:10]", "_____no_output_____" ] ], [ [ "### Backpropagating the data", "_____no_output_____" ] ], [ [ "def error_term_formula(x, y, output):\n return (y - output)*sigmoid_prime(x)", "_____no_output_____" ], [ "## Alternative solution ##\n# you could also *only* use y and the output \n# and calculate sigmoid_prime directly from the activated output!\n\n# below is an equally valid solution (it doesn't utilize x)\ndef error_term_formula(x, y, output):\n return (y-output) * output * (1 - output)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a437987ceb9b3b95ffaca957fc52395133b6a1
196,888
ipynb
Jupyter Notebook
docs/notebooks/Quickstart.ipynb
arfon/LEGWORK
91ca299d00ed6892acdf5980f33826421fa348ef
[ "MIT" ]
14
2021-09-28T21:53:24.000Z
2022-02-05T14:29:44.000Z
docs/notebooks/Quickstart.ipynb
arfon/LEGWORK
91ca299d00ed6892acdf5980f33826421fa348ef
[ "MIT" ]
44
2021-10-31T15:04:26.000Z
2022-03-15T19:01:40.000Z
docs/notebooks/Quickstart.ipynb
arfon/LEGWORK
91ca299d00ed6892acdf5980f33826421fa348ef
[ "MIT" ]
4
2021-11-18T09:20:53.000Z
2022-03-16T11:30:44.000Z
713.362319
122,248
0.947899
[ [ [ "# Quickstart\nIn this tutorial, we explain how to quickly use ``LEGWORK`` to calculate the detectability of a collection of sources.", "_____no_output_____" ] ], [ [ "%matplotlib inline", "_____no_output_____" ] ], [ [ "Let's start by importing the source and visualisation modules of `LEGWORK` and some other common packages.", "_____no_output_____" ] ], [ [ "import legwork.source as source\nimport legwork.visualisation as vis\n\nimport numpy as np\nimport astropy.units as u\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "%config InlineBackend.figure_format = 'retina'\n\nplt.rc('font', family='serif')\nplt.rcParams['text.usetex'] = False\nfs = 24\n\n# update various fontsizes to match\nparams = {'figure.figsize': (12, 8),\n 'legend.fontsize': fs,\n 'axes.labelsize': fs,\n 'xtick.labelsize': 0.7 * fs,\n 'ytick.labelsize': 0.7 * fs}\nplt.rcParams.update(params)", "_____no_output_____" ] ], [ [ "Next let's create a random collection of possible LISA sources in order to assess their detectability.", "_____no_output_____" ] ], [ [ "# create a random collection of sources\nn_values = 1500\nm_1 = np.random.uniform(0, 10, n_values) * u.Msun\nm_2 = np.random.uniform(0, 10, n_values) * u.Msun\ndist = np.random.normal(8, 1.5, n_values) * u.kpc\nf_orb = 10**(-5 * np.random.power(3, n_values)) * u.Hz\necc = 1 - np.random.power(5, n_values)", "_____no_output_____" ] ], [ [ "We can instantiate a `Source` class using these random sources in order to analyse the population. There are also a series of optional parameters which we don't cover here but if you are interested in the purpose of these then check out the [Using the Source Class](Source.ipynb) tutorial.", "_____no_output_____" ] ], [ [ "sources = source.Source(m_1=m_1, m_2=m_2, ecc=ecc, dist=dist, f_orb=f_orb)", "_____no_output_____" ] ], [ [ "This `Source` class has many methods for calculating strains, visualising populations and more. You can learn more about these in the [Using the Source Class](Source.ipynb) tutorial. For now, we shall focus only on the calculation of the signal-to-noise ratio.\n\nTherefore, let's calculate the SNR for these sources. We set `verbose=True` to give an impression of what sort of sources we have created. This function will split the sources based on whether they are stationary/evolving and circular/eccentric and use one of 4 SNR functions for each subpopulation.", "_____no_output_____" ] ], [ [ "snr = sources.get_snr(verbose=True)", "Calculating SNR for 1500 sources\n\t0 sources have already merged\n\t1385 sources are stationary\n\t\t427 sources are stationary and circular\n\t\t958 sources are stationary and eccentric\n\t115 sources are evolving\n\t\t33 sources are evolving and circular\n\t\t82 sources are evolving and eccentric\n" ] ], [ [ "These SNR values are now stored in `sources.snr` and we can mask those that don't meet some detectable threshold.", "_____no_output_____" ] ], [ [ "detectable_threshold = 7\ndetectable_sources = sources.snr > 7\nprint(\"{} of the {} sources are detectable\".format(len(sources.snr[detectable_sources]), n_values))", "585 of the 1500 sources are detectable\n" ] ], [ [ "And just like that we know the number of detectable sources! It could be interesting to see how the SNR varies with orbital frequency so let's use the :meth:`legwork.source.Source.plot_source_variables` to create a 2D density distribution of these variables.", "_____no_output_____" ] ], [ [ "fig, ax = sources.plot_source_variables(xstr=\"f_orb\", ystr=\"snr\", disttype=\"kde\", log_scale=(True, True),\n fill=True, xlim=(2e-6, 2e-1), which_sources=sources.snr > 0)", "_____no_output_____" ] ], [ [ "The reason for this shape may not be immediately obvious. However, if we also use the visualisation module to overlay the LISA sensitivity curve, it becomes clear that the SNRs increase in step with the decrease in the noise and flatten out as the sensitivity curve does as we would expect. To learn more about the visualisation options that `LEGWORK` offers, check out the [Visualisation](Visualisation.ipynb) tutorial.", "_____no_output_____" ] ], [ [ "# create the same plot but set `show=False`\nfig, ax = sources.plot_source_variables(xstr=\"f_orb\", ystr=\"snr\", disttype=\"kde\", log_scale=(True, True),\n fill=True, show=False, which_sources=sources.snr > 0)\n\n# duplicate the x axis and plot the LISA sensitivity curve\nright_ax = ax.twinx()\nfrequency_range = np.logspace(np.log10(2e-6), np.log10(2e-1), 1000) * u.Hz\nvis.plot_sensitivity_curve(frequency_range=frequency_range, fig=fig, ax=right_ax)\n\nplt.show()", "_____no_output_____" ] ], [ [ "That's it for this quickstart into using `LEGWORK`. For more details on using `LEGWORK` to calculate strains, evolve binaries and visualise their distributions check out the [other tutorials](../tutorials.rst) and [demos](../demos.rst) in these docs! You can also read more about the scope and limitations of `LEGWORK` [on this page](../limitations.rst).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "raw", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "raw" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a443e812735ae15d5fee5e1b9bfeac9c79c83d
8,709
ipynb
Jupyter Notebook
notebooks/.ipynb_checkpoints/5(3)_Testing-model-checkpoint.ipynb
sai-krishna-msk/KickAssist
7fb256e3ef4beff231332f6491ebb975f3fe4b43
[ "MIT" ]
null
null
null
notebooks/.ipynb_checkpoints/5(3)_Testing-model-checkpoint.ipynb
sai-krishna-msk/KickAssist
7fb256e3ef4beff231332f6491ebb975f3fe4b43
[ "MIT" ]
7
2021-06-08T21:18:49.000Z
2022-03-12T00:24:33.000Z
notebooks/5(3)_Testing-model.ipynb
sai-krishna-msk/KickAssist
7fb256e3ef4beff231332f6491ebb975f3fe4b43
[ "MIT" ]
null
null
null
28.648026
256
0.555058
[ [ [ "- **Let us see how well our model would perform if we would deploy our model at the end of 2018**\n- **ie: Let us test our model on 2019 data**", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd \nimport category_encoders as ce\nfrom sklearn.preprocessing import LabelBinarizer\nfrom sklearn.preprocessing import OneHotEncoder", "_____no_output_____" ], [ "data_path = \"../data/notebooks/4_merged_data.csv\"", "_____no_output_____" ], [ "df_raw = pd.read_csv(data_path)", "C:\\Users\\saima\\Anaconda3\\envs\\kick\\lib\\site-packages\\IPython\\core\\interactiveshell.py:3063: DtypeWarning: Columns (29) have mixed types.Specify dtype option on import or set low_memory=False.\n interactivity=interactivity, compiler=compiler, result=result)\n" ], [ "df = df_raw.copy()", "_____no_output_____" ], [ "cols = ['launched_at', 'status', 'days_to_deadline', 'goal',\n 'sub_category', 'category', 'blurb_length', 'location_country', 'rewards_mean', 'rewards_median',\n 'rewards_variance', 'rewards_SD', 'rewards_MIN', 'rewards_MAX' ,\n 'rewards_NUM', 'currency', 'launch_year', 'launch_month',\n 'deadline_month']\ntarget_encoding_cols = ['location_country' , 'currency' , 'category', 'sub_category']\n\ntrain_years =[ 2016, 2017 , 2018]\nvalid_years = [2019]\n", "_____no_output_____" ], [ "def pre_proc(df):\n df = df[cols]\n df= df.dropna(axis=0, subset=[\"rewards_MIN\"])\n df= df.dropna(axis=0, subset=[\"blurb_length\"])\n df = df.reset_index(drop=True)\n df[\"launched_at\"] = pd.to_datetime(df[\"launched_at\"]).dt.date\n df.sort_values(\"launched_at\" , inplace=True)\n df.drop(['launched_at'] ,axis=1 , inplace=True)\n df.reset_index(inplace=True)\n df.drop('index', inplace=True , axis=1)\n \n binarizer= LabelBinarizer()\n df[\"status\"] = binarizer.fit_transform(df[\"status\"])\n \n \n return df\n\n\n ", "_____no_output_____" ], [ "def onehot_categ(df):\n \n encoder = OneHotEncoder(sparse=False)\n cat_cols=['category', 'sub_category', 'currency', 'location_country']\n X_hot = encoder.fit_transform(df[cat_cols])\n \n onehotcols = []\n for cat in encoder.categories_:\n for col in cat:\n onehotcols.append(col)\n \n X_hot = pd.DataFrame(X_hot , columns=onehotcols)\n df =pd.concat([df , X_hot] , axis=1)\n df.drop(target_encoding_cols , axis=1 , inplace=True)\n \n \n return df \n ", "_____no_output_____" ], [ "def get_model_data(df , train_years , valid_years):\n df_train = df[df['launch_year'].apply(lambda x: True if x in train_years else False)]\n df_valid= df[df['launch_year'].apply(lambda x: True if x in valid_years else False)]\n \n X_train , y_train = df_train.drop([\"status\",\"launch_year\"] , axis=1) , df_train['status']\n X_valid , y_valid = df_valid.drop([\"status\",\"launch_year\"] , axis=1) , df_valid['status']\n \n return X_train , y_train , X_valid , y_valid\n\n ", "_____no_output_____" ], [ "def helmert_categ(df_train , df_valid):\n encoder = ce.HelmertEncoder(cols = target_encoding_cols , drop_invariant=True )\n dfh = encoder.fit_transform(df_train[target_encoding_cols])\n df_train = pd.concat([df_train , dfh], axis=1)\n df_train.drop(target_encoding_cols , axis=1 , inplace=True)\n dfh = encoder.transform(df_valid[target_encoding_cols])\n df_valid = pd.concat([df_valid , dfh], axis=1)\n df_valid.drop(target_encoding_cols , axis=1 , inplace=True)\n \n return df_train , df_valid ", "_____no_output_____" ], [ "from xgboost import XGBClassifier\nimport operator\n\ndef XG_score(X_train, X_test, y_train, y_test):\n XG_fet = {}\n \n XG= XGBClassifier(n_estimators=150, random_state=9)\n XG.fit(X_train, y_train)\n XG_score = XG.score(X_test, y_test)\n \n \n feat_labels = X_train.columns.values\n \n for feature, acc in zip(feat_labels, XG.feature_importances_):\n XG_fet[feature] = acc\n \n XG_fet = sorted(XG_fet.items(), key=operator.itemgetter(1), reverse=True)\n \n \n return (XG,XG_score, XG_fet)\n", "_____no_output_____" ], [ "df_proc = pre_proc(df)\ndf_onehot = onehot_categ(df_proc)\nX_train_oh , y_train_oh , X_valid_oh , y_valid_oh = get_model_data(df_onehot , train_years , valid_years)", "_____no_output_____" ], [ "df_proc = pre_proc(df)\nX_train_raw , y_train_hel , X_valid_raw , y_valid_hel = get_model_data(df_proc , train_years , valid_years)\nX_train_hel , X_valid_hel = helmert_categ(X_train_raw , X_valid_raw)\n", "_____no_output_____" ], [ "XG_model_oh , XG_scores_oh , XG_fet_imp_oh= XG_score(X_train_oh , X_valid_oh , y_train_oh , y_valid_oh)\nprint(\"Score using OneHot encodinng: {}\".format(XG_scores_oh))", "Score using OneHot encodinng: 0.8151040114442392\n" ], [ "XG_model_hel , XG_scores_hel , XG_fet_imp_hel= XG_score(X_train_hel , X_valid_hel , y_train_hel , y_valid_hel)\nprint(\"Score using Helmert encodinng: {}\".format(XG_scores_hel))", "Score using Helmert encodinng: 0.8627287357692078\n" ] ], [ [ "- **This should is great, our test accuracy is greater than our validation accuracy, usually this should be a red flag but since there was not decision during the process of modeling and preprocessing made based off the 2019(test data), its fine**\n- **In the next notebook we will train the model on the entire dataset and save the model**", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
d0a447ba4de6b331dca821032cda4161e37c5760
19,077
ipynb
Jupyter Notebook
Lab12.ipynb
GriffithB/IA241-Gituhub
201d993ed8fe86c3114aa150e327741d7956d604
[ "MIT" ]
null
null
null
Lab12.ipynb
GriffithB/IA241-Gituhub
201d993ed8fe86c3114aa150e327741d7956d604
[ "MIT" ]
null
null
null
Lab12.ipynb
GriffithB/IA241-Gituhub
201d993ed8fe86c3114aa150e327741d7956d604
[ "MIT" ]
null
null
null
27.647826
96
0.36164
[ [ [ "import pandas", "_____no_output_____" ], [ "df = pandas.read_excel('s3://ia241-bullard/house_price.xls')\n\ndf[:10]", "_____no_output_____" ], [ "df['unit_price']= df['price']/df['area']\ndf[:10]", "_____no_output_____" ] ], [ [ "## 2.2", "_____no_output_____" ] ], [ [ "df['house_type'].value_counts()", "_____no_output_____" ] ], [ [ "## 2.3 avg price ", "_____no_output_____" ] ], [ [ "prc_more_2_bath=df.loc[df['bathroom']>2 ]['price']\nprint('avg price of house more than 2 bathroom is ${}'.format(prc_more_2_bath.mean()))", "avg price of house more than 2 bathroom is $383645.45454545453\n" ] ], [ [ "## 2.4", "_____no_output_____" ] ], [ [ "print('mean unit price is ${}'.format(df['unit_price'].mean()))", "mean unit price is $167.45934522134766\n" ], [ "print('median unit price is ${}'.format(df['unit_price'].median()))", "median unit price is $130.13392857142858\n" ] ], [ [ "## 2.5", "_____no_output_____" ] ], [ [ "df.groupby('house_type').mean()['price']", "_____no_output_____" ] ], [ [ "## 2.6", "_____no_output_____" ] ], [ [ "from scipy import stats", "_____no_output_____" ], [ "result = stats.linregress(df['area'],df['price'])", "_____no_output_____" ], [ "print('slope is {}'.format(result.slope))\nprint('intercept is {}'.format(result.intercept))\nprint('r square is {}'.format(result.rvalue *result.rvalue))\nprint('p value is {}'.format(result.pvalue))", "slope is 79.95495729411489\nintercept is 156254.76245096227\nr square is 0.2343900121890692\np value is 0.001340065037461188\n" ] ], [ [ "## 2.7\n", "_____no_output_____" ] ], [ [ "print('price of house with {}sqft is ${}'.format(2000,2000*result.slope+result.intercept))", "price of house with 2000sqft is $316164.67703919206\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
d0a44e9d34e1fafa77d1993bba18f5ac2068b811
86,908
ipynb
Jupyter Notebook
Virulence_HMMs/Overlap_Databases.ipynb
glickmac/VF_HMM
bf667bafdda73bed0344b9d78bac3a7cfae3fc9e
[ "MIT" ]
null
null
null
Virulence_HMMs/Overlap_Databases.ipynb
glickmac/VF_HMM
bf667bafdda73bed0344b9d78bac3a7cfae3fc9e
[ "MIT" ]
null
null
null
Virulence_HMMs/Overlap_Databases.ipynb
glickmac/VF_HMM
bf667bafdda73bed0344b9d78bac3a7cfae3fc9e
[ "MIT" ]
null
null
null
229.915344
39,348
0.895038
[ [ [ "### Load Data and Libraries", "_____no_output_____" ] ], [ [ "from Bio import SearchIO\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport matplotlib as mpl\nimport seaborn as sns\n%matplotlib inline\n\n\nnum_hit = 0\n#now parse the output\nhit_list = []\nhit_ids = []\ncounts = []\nqnames = []\nwith open('overlap_hmm_blast.txt', 'r') as input:\n for qresult in SearchIO.parse(input, 'hmmer3-tab'):\n query_id = qresult.id #sequence ID from fasta\n qnames.append(query_id)\n hits = qresult.hits\n hit_ids = hit_ids+qresult.hit_keys\n hit_list.append(hits)\n num_hits = len(hits)\n counts.append(num_hits)\n num_hit = num_hit + num_hits\nprint(num_hit)", "2181\n" ] ], [ [ "### Hits by HMM Model", "_____no_output_____" ] ], [ [ "df_query = pd.DataFrame({'Names': qnames, \n 'Hit Counts': counts}, columns=['Names','Hit Counts'])\ndf_query = df_query.sort_values(by=['Hit Counts'], ascending=False)\ntop_n = df_query.head(10)", "_____no_output_____" ], [ "top_n", "_____no_output_____" ], [ "## Number of VOGS identified\ny = df_query['Names'].str.contains('VOG')\ndf2=df_query[y]\ndf2.head(7)", "_____no_output_____" ], [ "f = plt.subplots(figsize=(10, 10))\nsns.pointplot(x='Names', y='Hit Counts', data=df_query, markers=[\"o\"], linestyles=[\"-\"])\nplt.yticks(fontsize=15)\nplt.xticks([])\nplt.title('Count of Virulence Factor Hits in Dataset', fontsize=18)\nplt.xlabel('Virulence Factor Categories', fontsize=18)\nplt.ylabel('Hit Counts', fontsize=16)\nplt.tight_layout()\nsns.despine(trim=True, left=True)\nplt.savefig('HMM_Hit_Counts.jpg', transparent=True)", "_____no_output_____" ], [ "### Top N Hits\nsns.factorplot(x=\"Hit Counts\", y=\"Names\", data=top_n, kind=\"bar\", palette=\"vlag\", size=10)\n\n\n#axes = plt.gca()\n#axes.set_xlim([0,10])\nplt.yticks(fontsize=15)\nplt.xticks(fontsize=15)\nplt.title('Abundances of Top 10 Virulence Factor \\n Hits in Phage Protein Database', fontsize=18)\nplt.xlabel('Virulence Factor Hit Count', fontsize=18)\nplt.ylabel('Top Virulence Factor Identities', fontsize=16)\nplt.tight_layout()\n\n#sns.despine(trim=True, left=True)\nplt.savefig('Top_VF_Hit_Plot.jpg', transparent=True)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
d0a457993029d8fed76f612e7dfdf4627daeea27
5,730
ipynb
Jupyter Notebook
notebooks/dynamic-maps-plotly.ipynb
jpn--/PyTT
c3c7433946d02ebc7709cc995d8e3b3b33234cd0
[ "BSD-3-Clause" ]
null
null
null
notebooks/dynamic-maps-plotly.ipynb
jpn--/PyTT
c3c7433946d02ebc7709cc995d8e3b3b33234cd0
[ "BSD-3-Clause" ]
null
null
null
notebooks/dynamic-maps-plotly.ipynb
jpn--/PyTT
c3c7433946d02ebc7709cc995d8e3b3b33234cd0
[ "BSD-3-Clause" ]
null
null
null
26.045455
108
0.589354
[ [ [ "import transportation_tutorials as tt", "_____no_output_____" ] ], [ [ "# Creating Dynamic Maps\n\nIn this gallery, we will demonstrate the creation of a variety of interactive maps.\nInteractive, dynamic maps are a good choice for analytical work that will be reviewed\nonline, either in a Jupyter notebook by an analyst, or published on a website.\nIn these examples,\nwe will demonstrate creating dynamic maps using [Plotly](https://plot.ly/python/) \nand [mapped](https://pypi.org/project/mapped/), which integrates a handful of \nplotly mapping tools directly into the geopandas dataframe object.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\nimport geopandas as gpd\nimport mapped", "_____no_output_____" ] ], [ [ "We'll begin by loading the TAZ and MAZ shapefiles, filtering them to a restricted study area,\nand defining the center point.", "_____no_output_____" ] ], [ [ "xmin = 905712\nymin = 905343\ntaz = gpd.read_file(tt.data('SERPM8-TAZSHAPE')).cx[xmin:, ymin:].to_crs(epsg=4326)\nmaz = gpd.read_file(tt.data('SERPM8-MAZSHAPE')).cx[xmin:, ymin:].to_crs(epsg=4326)\ncenter = (26.9198, -80.1121) # regular lat-lon", "_____no_output_____" ] ], [ [ "## Simple Map\n\nSimple maps showing the geographic data contained in a GeoDataFrame can be created\nby converting the GeoDataFrame to a GeoJson object, and adding that to\na folium Map.", "_____no_output_____" ] ], [ [ "taz.plotly_choropleth(line_width=2)", "_____no_output_____" ] ], [ [ "### Alternative Map Tiles\n\nThe default tiles are set to [Carto](https://carto.com)'s \n[positron](https://carto.com/blog/getting-to-know-positron-and-dark-matter/), \nbut others tiles are possible, including\ntilesets from [Stamen Design](http://stamen.com/) and [OpenStreetMap](www.openstreetmap.org). \nThe [positron](https://carto.com/blog/getting-to-know-positron-and-dark-matter/) tiles are\nspecifically designed to give geographic context without overwhelming maps with data \nthat is not the analytic focus of the presentation.", "_____no_output_____" ] ], [ [ "taz.plotly_choropleth(line_width=2, mapbox_style=\"open-street-map\")", "_____no_output_____" ] ], [ [ "## Mapping Data\n\nOne of the input files for SERPM 8 is a MAZ-level demographics file.\nThe file for the 2015 base year is included in the tutorial data, and \nwe can load it with the `read_csv` function.", "_____no_output_____" ] ], [ [ "mazd = pd.read_csv(tt.data('SERPM8-MAZDATA', '*.csv'))", "_____no_output_____" ] ], [ [ "Use `info` to see a summary of the DataFrame.", "_____no_output_____" ] ], [ [ "mazd.info()", "_____no_output_____" ] ], [ [ "We can join the demographics table to the shape file we loaded previously,\nto enable some visualizations on this data. This can be done with the\n``merge`` method of DataFrames.", "_____no_output_____" ] ], [ [ "maz1 = maz.merge(mazd, how='left', left_on='MAZ', right_on='mgra')", "_____no_output_____" ], [ "maz1.index=maz1.MAZ", "_____no_output_____" ] ], [ [ "## Choropleth Maps\n\nA [choropleth map](https://en.wikipedia.org/wiki/Choropleth_map) is a map with areas colored, \nshaded, or patterned in proportion to some measured value for the region displayed. This kind of\nmap is commonly used to display things like population density.\n\nWhen a data column is given to the plotly_choropleth function, that data is used to colorize\nthe choropleth map.", "_____no_output_____" ] ], [ [ "maz1.plotly_choropleth(\"PopDen\", colorbar_title=\"Population Density\", colorbar_title_side='right')", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
d0a464e3f14bfdb833246f38a35087636a15e529
684,523
ipynb
Jupyter Notebook
Capstone Project - Car Booking Analysis and Prediction.ipynb
telayat/Car_Booking_Analysis_and_Prediction_Capstone_Project
5ce43efec6404480eeabc79c1d11abe7452ea003
[ "FTL", "CNRI-Python" ]
null
null
null
Capstone Project - Car Booking Analysis and Prediction.ipynb
telayat/Car_Booking_Analysis_and_Prediction_Capstone_Project
5ce43efec6404480eeabc79c1d11abe7452ea003
[ "FTL", "CNRI-Python" ]
null
null
null
Capstone Project - Car Booking Analysis and Prediction.ipynb
telayat/Car_Booking_Analysis_and_Prediction_Capstone_Project
5ce43efec6404480eeabc79c1d11abe7452ea003
[ "FTL", "CNRI-Python" ]
null
null
null
138.259544
142,200
0.833808
[ [ [ "### Data Scientist Nano Degree - Capstone Project \n### Car Booking Analysis and Prediction\n### Tarek Abd ElRahman Ahmed ElAyat", "_____no_output_____" ], [ "#### Let's import the needed libraries", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport numpy as np\nimport warnings\n\nfrom sklearn import model_selection\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import RandomForestRegressor\n\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import r2_score, mean_squared_error\nfrom math import sqrt\n\nimport xgboost as xgb\nfrom sklearn import preprocessing\n#from sklearn.preprocessing import MinMaxScaler\n#scaler = MinMaxScaler()\n\nwarnings.filterwarnings('ignore')\n%matplotlib inline", "_____no_output_____" ], [ "def print_model_scores(model, X_train, X_test, y_train, y_test):\n '''\n INPUT:\n model - the regression model\n X_train - pandas dataframe for the training dataset\n X_test - pandas dataframe for the test dataset\n y_train - pandas dataframe for the training label\n y_test - pandas dataframe for the test label\n \n OUTPUT:\n y_train_preds - panadas dataframe for the y_train prediction\n y_test_preds - panadas dataframe for the y_test prediction\n print scores \n '''\n \n #Predict and score the model on training data\n y_train_preds = model.predict(X_train) \n print(\"The r-squared score for our model on Training data is {} on {} values.\".format(r2_score(y_train, y_train_preds), len(y_train)))\n print(\"The mean_squared_error score for our model on Training data is {} on {} values.\".format(mean_squared_error(y_train, y_train_preds), len(y_train)))\n print(\"The root_mean_squared_error score for our model on Training data is {} on {} values.\".format(sqrt(mean_squared_error(y_train, y_train_preds)), len(y_train)))\n\n print(\" \")\n #Predict and score the model on test data\n y_test_preds = model.predict(X_test) \n print(\"The r-squared score for our model on Testing data is {} on {} values.\".format(r2_score(y_test, y_test_preds), len(y_test)))\n print(\"The mean_squared_error score for our model on Testing data is {} on {} values.\".format(mean_squared_error(y_test, y_test_preds), len(y_test)))\n print(\"The root_mean_squared_error score for our model on Testing data is {} on {} values.\".format(sqrt(mean_squared_error(y_test, y_test_preds)), len(y_test)))\n \n return y_train_preds, y_test_preds\n", "_____no_output_____" ], [ "def create_dummy_df(df, dummy_na):\n '''\n INPUT:\n df - pandas dataframe with categorical variables you want to dummy\n dummy_na - Bool holding whether you want to dummy NA vals of categorical columns or not\n \n OUTPUT:\n df - a new dataframe that has the following characteristics:\n 1. contains all columns that were not specified as categorical\n 2. removes all the original columns in cat_cols\n 3. dummy columns for each of the categorical columns\n 4. if dummy_na is True - it also contains dummy columns for the NaN values\n 5. Use a prefix of the column name with an underscore (_) for separating \n '''\n cat_cols = df.select_dtypes(include=['object']).columns\n \n for col in cat_cols:\n try:\n # for each cat add dummy var, drop original column\n df = pd.concat([df.drop(col, axis=1), pd.get_dummies(df[col], prefix=col, prefix_sep='_', drop_first=True, dummy_na=dummy_na)], axis=1)\n except:\n continue\n return df", "_____no_output_____" ], [ "def coef_weights(coefficients, X_train):\n '''\n INPUT:\n coefficients - the coefficients of the linear model \n X_train - the training data, so the column names can be used\n OUTPUT:\n coefs_df - a dataframe holding the coefficient, estimate, and abs(estimate)\n \n Provides a dataframe that can be used to understand the most influential coefficients\n in a linear model by providing the coefficient estimates along with the name of the \n variable attached to the coefficient.\n '''\n coefs_df = pd.DataFrame()\n coefs_df['est_int'] = X_train.columns\n coefs_df['coefs'] = coefficients\n coefs_df['abs_coefs'] = np.abs(coefficients)\n coefs_df = coefs_df.sort_values('abs_coefs', ascending=False)\n return coefs_df", "_____no_output_____" ] ], [ [ "### Load the training data and lookup files", "_____no_output_____" ] ], [ [ "df_taxi = pd.read_csv('C:\\\\Users\\\\tayat\\\\Documents\\\\Capstone Project\\\\NYC Dataset\\\\taxi_onefile.csv')\ndf_lookup_zone = pd.read_csv('C:\\\\Users\\\\tayat\\\\Documents\\\\Capstone Project\\\\NYC Dataset\\\\taxi+_zone_lookup.csv')", "_____no_output_____" ], [ "pd.set_option('display.max_rows', 50)", "_____no_output_____" ] ], [ [ "### Basic data exploration, list the columns with their data types and describe the features", "_____no_output_____" ] ], [ [ "df_taxi.shape", "_____no_output_____" ], [ "df_taxi.head()", "_____no_output_____" ] ], [ [ "### List the columns, dtypes, and describe the features", "_____no_output_____" ] ], [ [ "df_taxi.dtypes", "_____no_output_____" ], [ "# from the below describtion we notice that there are unlogical (-ve values), missing values and outliers which needs cleaning\ndf_taxi.describe().transpose()", "_____no_output_____" ] ], [ [ "### Count missing data per feature", "_____no_output_____" ] ], [ [ "total = df_taxi.isnull().sum()\npercent = (df_taxi.isnull().sum()/df_taxi.isnull().count())\nmissing_data = pd.concat([total, percent], axis=1, keys=['Total', 'Percent'])\nmissing_data.head(100)", "_____no_output_____" ] ], [ [ "## Feature Engineering:\n### Extract the engineered features from pickup and dropoff times and then drop these two columns", "_____no_output_____" ] ], [ [ "from pandas.tseries.holiday import USFederalHolidayCalendar as calendar\n\ndf_taxi['tpep_pickup_datetime'] = pd.to_datetime(df_taxi['tpep_pickup_datetime'])\ndf_taxi['tpep_dropoff_datetime'] = pd.to_datetime(df_taxi['tpep_dropoff_datetime'])\ndf_taxi['tripduration_mins'] = ((df_taxi['tpep_dropoff_datetime'] - df_taxi['tpep_pickup_datetime']).dt.total_seconds()/60).astype(float)\n\ndf_taxi['year'] = pd.DatetimeIndex(df_taxi['tpep_pickup_datetime']).year\ndf_taxi['month'] = pd.DatetimeIndex(df_taxi['tpep_pickup_datetime']).month\ndf_taxi['day'] = pd.DatetimeIndex(df_taxi['tpep_pickup_datetime']).day\ndf_taxi['hour'] = pd.DatetimeIndex(df_taxi['tpep_pickup_datetime']).hour\ndf_taxi['dayofweek'] = pd.DatetimeIndex(df_taxi['tpep_pickup_datetime']).dayofweek\ndf_taxi['weekendflag'] = (df_taxi['dayofweek']>=5).astype(int)\n\ncal = calendar()\nholidays = cal.holidays(start=df_taxi['tpep_pickup_datetime'].min(), end=df_taxi['tpep_pickup_datetime'].max())\ndf_taxi['holidayflag'] = (df_taxi['tpep_pickup_datetime'].isin(holidays)).astype(int)\ndel holidays\n\ndf_taxi.drop(['tpep_pickup_datetime', 'tpep_dropoff_datetime'], axis=1, inplace=True)", "_____no_output_____" ], [ "df_taxi.head()", "_____no_output_____" ], [ "#Save the output into new file to avoid recalculation\n#df_taxi.to_csv('C:\\\\Users\\\\tayat\\\\Documents\\\\Capstone Project\\\\NYC Dataset\\\\taxi_engineered.csv', index = False)\n#df_taxi = pd.read_csv('C:\\\\Users\\\\tayat\\\\Documents\\\\Capstone Project\\\\NYC Dataset\\\\taxi_engineered.csv')", "_____no_output_____" ], [ "df_taxi.dtypes", "_____no_output_____" ], [ "#Have another look after feature engineering\ndf_taxi.describe().transpose()", "_____no_output_____" ] ], [ [ "## Data Cleaning:\n### From the above columns quick overview, we notice some unlogical columns:\n#### - trip distance, duration, fare, tip, toll...etc with -ve values or very high values\n#### - unlogical and out of range values like passenger_count above 6, ratecodeID above 6 and so on\n### let's analyze the ranges and then get rid of these misleading values first", "_____no_output_____" ] ], [ [ "((df_taxi['trip_distance']/50).astype(int)*50).loc[:].value_counts()", "_____no_output_____" ], [ "((df_taxi['total_amount']/100).astype(int)*100).loc[:].value_counts()", "_____no_output_____" ], [ "((df_taxi['fare_amount']/100).astype(int)*100).loc[:].value_counts()\n#exclude -ve and fareamount > 100", "_____no_output_____" ], [ "((df_taxi['extra']/5).astype(int)*5).loc[:].value_counts()\n#exclude -ve and extra > 5", "_____no_output_____" ], [ "((df_taxi['mta_tax']).astype(int)).loc[:].value_counts()\n#exclude -ve and mta_tax > 1", "_____no_output_____" ], [ "((df_taxi['tip_amount']/10).astype(int)*10).loc[:].value_counts()\n#exclude -ve and tip_amount > 20", "_____no_output_____" ], [ "((df_taxi['tolls_amount']/10).astype(int)*10).loc[:].value_counts()\n#exclude -ve and tolls_amount > 30", "_____no_output_____" ], [ "((df_taxi['improvement_surcharge']).astype(int)).loc[:].value_counts()\n#exclude -ve and improvement_surcharge > 1", "_____no_output_____" ], [ "((df_taxi['tripduration_mins']/60).astype(int)).loc[:].value_counts()\n#exclude -ve and tripduration_mins > 180 minute", "_____no_output_____" ], [ "((df_taxi['trip_distance']/20).astype(int)*20).loc[:].value_counts()\n#exclude -ve and trip_distance > 30", "_____no_output_____" ] ], [ [ "#### Drop rows with unlogical or out of range values", "_____no_output_____" ] ], [ [ "df_taxi.drop(df_taxi[ (df_taxi['VendorID'].isna()) | (df_taxi['fare_amount'] <= 0) | (df_taxi['total_amount'] <= 0) | (df_taxi['tripduration_mins'] <= 0) | (df_taxi['tip_amount'] < 0) | (df_taxi['tolls_amount'] < 0) | (df_taxi['improvement_surcharge'] < 0) | (df_taxi['congestion_surcharge'] < 0) | (df_taxi['trip_distance'] <= 0) | (df_taxi['extra'] < 0) | (df_taxi['mta_tax'] < 0) | (df_taxi['passenger_count'] > 6) | (df_taxi['RatecodeID'] > 6)].index, inplace = True)", "_____no_output_____" ], [ "df_taxi.shape", "_____no_output_____" ] ], [ [ "#### Drop outliers and fill na values", "_____no_output_____" ] ], [ [ "df_taxi.drop(df_taxi[ (df_taxi['fare_amount'] > 100) | (df_taxi['tripduration_mins'] > 180) | (df_taxi['tip_amount'] > 20) | (df_taxi['tolls_amount'] > 30) | (df_taxi['extra'] > 5) | (df_taxi['mta_tax'] > 1) | (df_taxi['improvement_surcharge'] > 1) | (df_taxi['trip_distance'] > 30)].index, inplace = True)", "_____no_output_____" ], [ "df_taxi['congestion_surcharge'] = df_taxi['congestion_surcharge'].fillna(0)", "_____no_output_____" ], [ "df_taxi.shape", "_____no_output_____" ] ], [ [ "### Save the output into new file to avoid recalculation", "_____no_output_____" ] ], [ [ "#Save the output into new file to avoid recalculation\n#df_taxi.to_csv('C:\\\\Users\\\\tayat\\\\Documents\\\\Capstone Project\\\\NYC Dataset\\\\taxi_clean.csv', index = False)\n#df_taxi = pd.read_csv('C:\\\\Users\\\\tayat\\\\Documents\\\\Capstone Project\\\\NYC Dataset\\\\taxi_clean.csv')", "_____no_output_____" ] ], [ [ "### Continue data exploration and understanding after feature engineering and cleaning done", "_____no_output_____" ] ], [ [ "df_taxi.describe().transpose()", "_____no_output_____" ], [ "df_taxi['passenger_count'].value_counts()", "_____no_output_____" ], [ "df_taxi['RatecodeID'].value_counts()", "_____no_output_____" ], [ "df_taxi['sameloc'] = (df_taxi['PULocationID'] == df_taxi['DOLocationID']).astype(int)", "_____no_output_____" ], [ "df_taxi.head()", "_____no_output_____" ] ], [ [ "### Generate the correlation matrix after the data cleaning", "_____no_output_____" ] ], [ [ "corrmat = df_taxi.corr()\nf, ax = plt.subplots(figsize=(12, 9))\nsns.heatmap(corrmat, vmax=.8, square=True, xticklabels=True, yticklabels=True);", "_____no_output_____" ], [ "#scatter plot trip_distance/fare_amount\nvar = 'trip_distance'\ndata = pd.concat([df_taxi['fare_amount'], df_taxi[var]], axis=1)", "_____no_output_____" ], [ "data.plot.scatter(x=var, y='fare_amount'); #, ylim=(0,100)", "_____no_output_____" ], [ "#The trip duration histogram\nexception_trips = df_taxi.loc[(df_taxi['fare_amount'] < 10) & (df_taxi['trip_distance'] > 12)]\nexception_trips.shape\nsns.distplot(exception_trips['tripduration_mins'], kde=False, norm_hist=False);", "_____no_output_____" ], [ "#scatter plot trip duration/fare amount\ndata2 = pd.concat([exception_trips['fare_amount'], exception_trips['tripduration_mins']], axis=1)", "_____no_output_____" ], [ "data2.plot.scatter(x='tripduration_mins', y='fare_amount'); #, ylim=(0,100)", "_____no_output_____" ], [ "#Fare amount histogram\nsns.distplot(df_taxi['fare_amount'], kde=False, norm_hist=False, bins = 20);", "_____no_output_____" ], [ "#Trip distance histogram\nsns.distplot(df_taxi['trip_distance'], kde=False, norm_hist=False, bins = 20);", "_____no_output_____" ], [ "#Hours of the day histogram\nsns.distplot(df_taxi['hour'], kde=False, norm_hist=False);", "_____no_output_____" ], [ "#Day of week histogram\nsns.distplot(df_taxi['dayofweek'], kde=False, norm_hist=False);", "_____no_output_____" ], [ "#Pickup locations histogram\nsns.distplot(df_taxi['PULocationID'], kde=False, norm_hist=False);", "_____no_output_____" ], [ "#Drop-off locations histogram\nsns.distplot(df_taxi['DOLocationID'], kde=False, norm_hist=False);", "_____no_output_____" ], [ "#Same location histogram\nsns.distplot(df_taxi['sameloc'], kde=False, norm_hist=False);", "_____no_output_____" ], [ "((df_taxi['trip_distance']/5).astype(int)*5).loc[:].value_counts()", "_____no_output_____" ], [ "Loc_Dist = pd.pivot_table(df_taxi, index = 'PULocationID', columns = 'DOLocationID', values = 'VendorID', aggfunc = ['count'])\n#Loc_Dist = pd.pivot_table(df_taxi, index = ['PULocationID', 'DOLocationID'], values = 'VendorID', aggfunc = ['count'])\n#print(Loc_Dist)\nf, ax = plt.subplots(figsize=(20, 20))\nsns.heatmap(Loc_Dist, cmap=\"YlGnBu\")", "_____no_output_____" ], [ "#Heatmap for pickup locations over days of the week\nLoc_Dist = pd.pivot_table(df_taxi, index = 'PULocationID', columns = 'dayofweek', values = 'VendorID', aggfunc = ['count'])\nf, ax = plt.subplots(figsize=(20, 20))\nsns.heatmap(Loc_Dist, cmap=\"YlGnBu\")", "_____no_output_____" ], [ "#Heatmap for pickup locations over hours of the day\nLoc_Dist = pd.pivot_table(df_taxi, index = 'PULocationID', columns = 'hour', values = 'VendorID', aggfunc = ['count'])\nf, ax = plt.subplots(figsize=(20, 20))\nsns.heatmap(Loc_Dist, cmap=\"YlGnBu\")", "_____no_output_____" ], [ "PULocations = pd.merge(\n df_taxi['PULocationID'],\n df_lookup_zone,\n how=\"inner\",\n left_on='PULocationID',\n right_on='LocationID',\n sort=True\n)\n\nPULocations.head()", "_____no_output_____" ], [ "#Trip distribution over NY Pickup Boroughs\nPULocations.groupby(['Borough']).count()['LocationID'].plot(kind=\"bar\", title = 'Trips distribution over Pickup Boroughs'); #, fontsize=14, figsize = (30, 8)", "_____no_output_____" ], [ "DOLocations = pd.merge(\n df_taxi['DOLocationID'],\n df_lookup_zone,\n how=\"inner\",\n left_on='DOLocationID',\n right_on='LocationID',\n sort=True\n)", "_____no_output_____" ], [ "#Trip distribution over NY Dropoff Boroughs\nDOLocations.groupby(['Borough']).count()['LocationID'].plot(kind=\"bar\", title = 'Trips distribution over Drop Boroughs'); #, fontsize=14, figsize = (30, 8)", "_____no_output_____" ] ], [ [ "## Business Questions/Use Cases", "_____no_output_____" ], [ "### What are the most demanding areas at specific time?\n#### for example below are top pick up locations for a specific day and hour (Thursday 7pm)", "_____no_output_____" ] ], [ [ "filtered_df = df_taxi[(df_taxi['dayofweek'] == 3) & (df_taxi['hour'] == 19)]['PULocationID']\n\nTopPULocations = pd.merge(\n filtered_df,\n df_lookup_zone,\n how=\"inner\",\n left_on='PULocationID',\n right_on='LocationID',\n sort=True\n)\n\nTopPULocations.head()", "_____no_output_____" ], [ "TopPULocations.groupby(['PULocationID']).count()['LocationID'].sort_values(ascending=False).head(30).plot(kind=\"bar\", figsize = (30, 8), fontsize=20 , title = 'Top PULocations on Thursday 7pm'); #, fontsize=14, figsize = (30, 8)", "_____no_output_____" ] ], [ [ "### How to deploy the fleet based on driver’s preferences for drop off locations (a driver may prefer a drop off near his home)?\n#### For example, below are top pick up locations for a specific day and hour (Thursday 7pm) which will most likely will lead to the favorite drop off locations [230, 234, 236]", "_____no_output_____" ] ], [ [ "DOList = [230, 234, 236]\nfiltered_df2 = df_taxi[(df_taxi['dayofweek'] == 3) & (df_taxi['hour'] == 19) & (df_taxi['DOLocationID'].isin(DOList))][['PULocationID', 'DOLocationID']]\n", "_____no_output_____" ], [ "filtered_df2.groupby(['PULocationID']).count()['DOLocationID'].sort_values(ascending=False).head(30).plot(kind=\"bar\", figsize = (30, 8), fontsize=20 , title = 'Top PULocations on Thursday 7pm for specific DO List'); #, fontsize=14, figsize = (30, 8)", "_____no_output_____" ] ], [ [ "### Now let's run our basic model to predict the Fare_Rate", "_____no_output_____" ], [ "### Scenario #1 Linear regression with all possible features as numeric", "_____no_output_____" ] ], [ [ "#Split into explanatory and response variables\nX = df_taxi[['trip_distance', 'RatecodeID', 'tripduration_mins', 'month', 'hour', 'weekendflag', 'holidayflag', 'passenger_count', 'PULocationID', 'DOLocationID', 'payment_type']]\ny = df_taxi['fare_amount']", "_____no_output_____" ], [ "#Split into train and test\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size = .30, random_state=42)\n\n#Clear memory to be able to run the model\ndel X\ndel y", "_____no_output_____" ], [ "lm_model = LinearRegression(normalize=True) # Instantiate\nlm_model.fit(X_train, y_train) #Fit", "_____no_output_____" ], [ "#Predict and score the model on training and test data\ny_train_preds, y_test_preds = print_model_scores(lm_model, X_train, X_test, y_train, y_test)", "The r-squared score for our model on Training data is 0.9408655229497245 on 14712202 values.\nThe mean_squared_error score for our model on Training data is 6.879148895127688 on 14712202 values.\nThe root_mean_squared_error score for our model on Training data is 2.622813164357631 on 14712202 values.\n \nThe r-squared score for our model on Testing data is 0.9409164098556393 on 6305230 values.\nThe mean_squared_error score for our model on Testing data is 6.871197085487989 on 6305230 values.\nThe root_mean_squared_error score for our model on Testing data is 2.62129683276961 on 6305230 values.\n" ], [ "#Use the function\ncoef_df = coef_weights(lm_model.coef_, X_train)\n\n#A quick look at the top results\ncoef_df.head(20)", "_____no_output_____" ] ], [ [ "### Scenario #2 Linear regression using important features as numeric", "_____no_output_____" ] ], [ [ "#Split into explanatory and response variables\nX = df_taxi[['trip_distance', 'RatecodeID', 'tripduration_mins', 'month', 'hour', 'weekendflag', 'holidayflag']]\ny = df_taxi['fare_amount']", "_____no_output_____" ], [ "#Split into train and test\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size = .30, random_state=42)\ndel X\ndel y", "_____no_output_____" ], [ "lm_model = LinearRegression(normalize=True) # Instantiate\nlm_model.fit(X_train, y_train) #Fit\n \n#Predict and score the model on training and test data\ny_train_preds, y_test_preds = print_model_scores(lm_model, X_train, X_test, y_train, y_test)", "The r-squared score for our model on Training data is 0.9408385370477721 on 14712202 values.\nThe mean_squared_error score for our model on Training data is 6.882288181156071 on 14712202 values.\nThe root_mean_squared_error score for our model on Training data is 2.623411553903823 on 14712202 values.\n \nThe r-squared score for our model on Testing data is 0.940890929867919 on 6305230 values.\nThe mean_squared_error score for our model on Testing data is 6.874160311265811 on 6305230 values.\nThe root_mean_squared_error score for our model on Testing data is 2.6218619931769505 on 6305230 values.\n" ], [ "#Use the function\ncoef_df = coef_weights(lm_model.coef_, X_train)\n\n#A quick look at the top results\ncoef_df.head(20)", "_____no_output_____" ] ], [ [ "### Scenario #3 Linear regression with important features and OneHotEncoding for categorical features", "_____no_output_____" ] ], [ [ "#Split into explanatory and response variables\ndf_taxi_cat = df_taxi[['trip_distance', 'RatecodeID', 'tripduration_mins', 'month', 'hour', 'weekendflag', 'holidayflag', 'fare_amount']]\ndf_taxi_cat = df_taxi_cat.astype({'RatecodeID': 'object', 'month': 'object', 'hour': 'object'})", "_____no_output_____" ], [ "df_taxi_cat = create_dummy_df(df_taxi_cat, dummy_na=False)\ndel df_taxi", "_____no_output_____" ], [ "df_taxi_cat.head()", "_____no_output_____" ], [ "#Split into explanatory and response variables\ny = df_taxi_cat['fare_amount']\nX = df_taxi_cat.drop('fare_amount', axis=1)\ndel df_taxi_cat", "_____no_output_____" ], [ "#Split into train and test\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size = .30, random_state=42)\ndel X\ndel y", "_____no_output_____" ], [ "lm_model = LinearRegression(normalize=True) # Instantiate\nlm_model.fit(X_train, y_train) #Fit", "_____no_output_____" ], [ "#Predict and score the model on training and test data\ny_train_preds, y_test_preds = print_model_scores(lm_model, X_train, X_test, y_train, y_test)", "The r-squared score for our model on Training data is 0.9432230301501271 on 14712202 values.\nThe mean_squared_error score for our model on Training data is 6.604898679993168 on 14712202 values.\nThe root_mean_squared_error score for our model on Training data is 2.5699997431893196 on 14712202 values.\n \nThe r-squared score for our model on Testing data is 0.943321447117103 on 6305230 values.\nThe mean_squared_error score for our model on Testing data is 6.591500388298767 on 6305230 values.\nThe root_mean_squared_error score for our model on Testing data is 2.567391748116903 on 6305230 values.\n" ], [ "#Use the function\ncoef_df = coef_weights(lm_model.coef_, X_train)\n\n#A quick look at the top results\ncoef_df.head(50)", "_____no_output_____" ] ], [ [ "### Scenario #4 XGBoost regressor with important features and onehotencoding for categorical features", "_____no_output_____" ] ], [ [ "#Split into explanatory and response variables\ndf_taxi_cat = df_taxi[['trip_distance', 'RatecodeID', 'tripduration_mins', 'month', 'hour', 'weekendflag', 'holidayflag', 'fare_amount']]\ndf_taxi_cat = df_taxi_cat.astype({'RatecodeID': 'object', 'month': 'object', 'hour': 'object'})", "_____no_output_____" ], [ "df_taxi_cat = create_dummy_df(df_taxi_cat, dummy_na=False)\ndel df_taxi", "_____no_output_____" ], [ "#Split into explanatory and response variables\ny = df_taxi_cat['fare_amount']\nX = df_taxi_cat.drop('fare_amount', axis=1)\ndel df_taxi_cat", "_____no_output_____" ], [ "#Split into train and test\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size = .30, random_state=42)\ndel X\ndel y", "_____no_output_____" ], [ "xg_reg = xgb.XGBRegressor(objective ='reg:linear', colsample_bytree = 0.3, learning_rate = 0.1,vmax_depth = 5, alpha = 10, n_estimators = 10) # Instantiate\nxg_reg.fit(X_train, y_train) #Fit", "[14:03:08] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.3.0/src/objective/regression_obj.cu:170: reg:linear is now deprecated in favor of reg:squarederror.\n[14:03:08] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.3.0/src/learner.cc:541: \nParameters: { vmax_depth } might not be used.\n\n This may not be accurate due to some parameters are only used in language bindings but\n passed down to XGBoost core. Or some parameters are not used but slip through this\n verification. Please open an issue if you find above cases.\n\n\n" ], [ "#Predict and score the model on training and test data\ny_train_preds, y_test_preds = print_model_scores(xg_reg, X_train, X_test, y_train, y_test)", "The r-squared score for our model on Training data is 0.3242116292704751 on 14712202 values.\nThe mean_squared_error score for our model on Training data is 78.61486320225254 on 14712202 values.\nThe root_mean_squared_error score for our model on Training data is 8.866502309380659 on 14712202 values.\n \nThe r-squared score for our model on Testing data is 0.32425626984223854 on 6305230 values.\nThe mean_squared_error score for our model on Testing data is 78.58642878423606 on 6305230 values.\nThe root_mean_squared_error score for our model on Testing data is 8.86489869001536 on 6305230 values.\n" ] ], [ [ "### Scenario #5 XGBoost regressor with modified hyper parameters", "_____no_output_____" ] ], [ [ "#Change the n_estimators from 10 to 50\nxg_reg2 = xgb.XGBRegressor(objective ='reg:linear', colsample_bytree = 0.3, learning_rate = 0.1,vmax_depth = 5, alpha = 10, n_estimators = 50) # Instantiate\nxg_reg2.fit(X_train, y_train) #Fit", "[01:30:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.3.0/src/objective/regression_obj.cu:170: reg:linear is now deprecated in favor of reg:squarederror.\n[01:30:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.3.0/src/learner.cc:541: \nParameters: { vmax_depth } might not be used.\n\n This may not be accurate due to some parameters are only used in language bindings but\n passed down to XGBoost core. Or some parameters are not used but slip through this\n verification. Please open an issue if you find above cases.\n\n\n" ], [ "#Predict and score the model on training and test data\ny_train_preds, y_test_preds = print_model_scores(xg_reg2, X_train, X_test, y_train, y_test)", "The r-squared score for our model on Training data is 0.9636330518992762 on 14712202 values.\nThe mean_squared_error score for our model on Training data is 4.230588707727396 on 14712202 values.\nThe root_mean_squared_error score for our model on Training data is 2.056839494887094 on 14712202 values.\n \nThe r-squared score for our model on Testing data is 0.9638064313943984 on 6305230 values.\nThe mean_squared_error score for our model on Testing data is 4.2091745357480885 on 6305230 values.\nThe root_mean_squared_error score for our model on Testing data is 2.0516272896771697 on 6305230 values.\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a49421eb7fcc0b8ab71fc804f522b464834d58
206,489
ipynb
Jupyter Notebook
.ipynb_checkpoints/Data Loading and Preprocessing-checkpoint.ipynb
AtharvaSune/PyTorch
ec6257f3e47f6f1472506f0bb10dae2a7a2190c4
[ "MIT" ]
1
2019-08-15T11:47:24.000Z
2019-08-15T11:47:24.000Z
.ipynb_checkpoints/Data Loading and Preprocessing-checkpoint.ipynb
AtharvaSune/PyTorch
ec6257f3e47f6f1472506f0bb10dae2a7a2190c4
[ "MIT" ]
null
null
null
.ipynb_checkpoints/Data Loading and Preprocessing-checkpoint.ipynb
AtharvaSune/PyTorch
ec6257f3e47f6f1472506f0bb10dae2a7a2190c4
[ "MIT" ]
null
null
null
512.379653
83,952
0.939813
[ [ [ "from __future__ import division, print_function\nimport os\nimport torch\nimport pandas\nimport numpy as np\nfrom torch.utils.data import DataLoader,Dataset\nfrom torchvision import utils, transforms\nfrom skimage import io, transform\nimport matplotlib.pyplot as plt\nimport warnings\n\n#ignore warnings\nwarnings.filterwarnings(\"ignore\")\n\nplt.ion() #interactive mode on", "_____no_output_____" ] ], [ [ "The dataset being used is the face pose detection dataset, which annotates the data using 68 landmark points. The dataset has a csv file that contains the annotation for the images.", "_____no_output_____" ] ], [ [ "# Import CSV file\nlandmarks_csv = pandas.read_csv(\"data/faces/face_landmarks.csv\")\n\n# Extracting info from the CSV file\nn = 65\nimg_name = landmarks_csv.iloc[n,0]\nlandmarks = landmarks_csv.iloc[n,1:].as_matrix()\nlandmarks = landmarks.astype('float').reshape(-1,2)\n\n# Print a few of the datasets for having a look at\n# the dataset\nprint('Image name: {}'.format(img_name))\nprint('Landmarks shape: {}'.format(landmarks.shape))\nprint('First 4 Landmarks: {}'.format(landmarks[:4]))", "Image name: person-7.jpg\nLandmarks shape: (68, 2)\nFirst 4 Landmarks: [[32. 65.]\n [33. 76.]\n [34. 86.]\n [34. 97.]]\n" ] ], [ [ "Now that we have seen the landmark values let's plot a function to display the landmarks on an image", "_____no_output_____" ] ], [ [ "def plot_landmarks(image, landmarks):\n plt.imshow(image)\n plt.scatter(landmarks[:, 0], landmarks[:, 1], s=10, c='r', marker='.')\n plt.pause(0.01)\n \nplt.figure()\nplot_landmarks(io.imread(os.path.join('data/faces/',img_name)),landmarks)\nplt.show()", "_____no_output_____" ] ], [ [ "To use customa datasets we need to use the <b>(torch.utils.data.Dataset) Dataset</b> class provided. It is an abstract class and hence the custom class should inherit it and override the \n<b>__len__</b> method and the\n<b>__getitem__</b> method\nThe __getitem__ method is used to provide the ith sample from the dataset", "_____no_output_____" ] ], [ [ "class FaceLandmarkDataset(Dataset):\n \n # We will read the file here\n def __init__(self,csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file : string : path to csv file\n root_dir : string : root directory which contains all the images\n transform : callable, optional : Optional transform to be applied \n to the images\n \"\"\"\n self.landmarks_frame = pandas.read_csv(csv_file)\n self.root_dir = root_dir\n self.transform = transform\n \n def __len__(self):\n return len(self.landmarks_frame)\n \n def __getitem__(self, idx):\n \"\"\"\n Args:\n idx (integer): the ith sample\n \"\"\"\n image_name = os.path.join(self.root_dir,self.landmarks_frame.iloc[idx, 0])\n image = io.imread(image_name)\n landmarks = np.array([self.landmarks_frame.iloc[idx, 1:]])\n landmarks = landmarks.astype(\"float\").reshape(-1, 2)\n \n sample = {\"image\":image,\"landmarks\":landmarks}\n \n if self.transform:\n sample = self.transform(sample)\n \n return sample", "_____no_output_____" ], [ "face_dataset = FaceLandmarkDataset(csv_file='data/faces/face_landmarks.csv',\n root_dir='data/faces/')\n\nfig = plt.figure()\n\nfor i in range(len(face_dataset)):\n sample = face_dataset[i]\n\n print(i, sample['image'].shape, sample['landmarks'].shape)\n\n ax = plt.subplot(1, 4, i + 1)\n plt.tight_layout()\n ax.set_title('Sample #{}'.format(i))\n ax.axis('off')\n plot_landmarks(**sample)\n\n if i == 3:\n plt.show()\n break", "0 (324, 215, 3) (68, 2)\n" ] ], [ [ "Now that we have the dataset , we can move on to preprocessing the data. We use the transforms class for this.\nWe will be using callable classes of the transformations we need so that the parameters do not need to be passed again and again. For better description refer the <a href=\"https://pytorch.org/tutorials/beginner/data_loading_tutorial.html\">tutorial</a> from PyTorch.\n\nTo implement callable classes we just need to implement the __call__ method and if required __init__ method of the class. \n\nHere we will be using autocrop , Reshape and To Tensor transformations.\n\n__** NOTE **__<br>\nIn PyTorch the default style for image Tensors is <span>n_channels * Height * Width</span> as opposed to the Tensordlow default of <span>Height * Width * n_channels</span>. But all the images in the real world have the tensorflow default format and hence we need to do that change in the ToTensor class that we will implement.", "_____no_output_____" ] ], [ [ "# Implementing the Rescale class\nclass Rescale(object):\n \"\"\"Rescale the input image to a given size\n \n Args:\n output_size (int or tuple):Desired output size. If tuple, output is\n matched to output_size. If int, smaller of image edges is matched\n to output_size keeping aspect ratio the same\n \"\"\"\n def __init__(self,output_size):\n assert isinstance(output_size,(int,tuple))\n self.output_size = output_size\n \n def __call__(self,sample):\n image, landmarks = samplep['image'], sample['landmarks']\n \n h, w = image.shape[:2]\n if isinstance(self.output_size,int):\n if h>w:\n new_h, new_w = self.output_size * h/w, self.output_size\n else:\n new_h, new_w = slef.output_size, self.output_size *w/h\n else:\n new_h, new_w = self.output_size\n \n image = transform.resize(image, (new_h, new_w))\n \n # h and w are swapped for landmarks because for images,\n # x and y axes are axis 1 and 0 respectively\n landmarks = landmarks * [new_w / w, new_h / h]\n \n return {\"image\": image, \"landmarks\": landmarks}\n# Implementing Random Crop\nclass RandomCrop(object):\n \"\"\"Crop randomly the image in a sample\n \n Args:\n output_size(tuple or int): Desired output size. If int, square crop\n is made.\n \"\"\"\n def __init__(self, output_size):\n assert isinstance(output_size, (int, tuple))\n if isinstance(output_size, int):\n self.output_size = (output_size, output_size)\n else: \n assert len(output_size) == 2\n self.output_size = output_size\n \n def __call__(self, sample):\n images, landmarks = sample['image'], sample['landmarks']\n \n h, w = images.shape[:,2]\n new_h, new_w = self.output_size\n \n top = np.random.randn(0, h-new_h)\n left = np.random.randn(0, w-new_w)\n \n images = images[top:top + new_h, left:left + new_w]\n landmarks = landmarks - [left, top]\n \n sample = {\"image\":images, \"landmarks\": landmarks}\n \n return sample\n \n# Implementing To Tensor\nclass ToTensor(object):\n \"\"\"Convert the PIL image into a tensor\"\"\"\n \n def __call__(self,sample):\n image, landmarks = sample['image'], sample['landmarks']\n \n # Need to transpose\n # Numpy image : H x W x C\n # Torch image : C x H x W\n image = image.transpose((2, 0, 1))\n sample = {\"image\":torch.from_numpy(image),\"landmarks\":torch.from_numpy(landmarks)}", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
d0a4a0ce76c7b217408c6e225a94cd8bf3318273
8,570
ipynb
Jupyter Notebook
K_Means_Clustering.ipynb
nikhilkc96/k-means-clustering
1b3dd13fc145b6598d65ee6b9296d1a8647f8ee9
[ "MIT" ]
null
null
null
K_Means_Clustering.ipynb
nikhilkc96/k-means-clustering
1b3dd13fc145b6598d65ee6b9296d1a8647f8ee9
[ "MIT" ]
null
null
null
K_Means_Clustering.ipynb
nikhilkc96/k-means-clustering
1b3dd13fc145b6598d65ee6b9296d1a8647f8ee9
[ "MIT" ]
null
null
null
34.837398
123
0.547025
[ [ [ "## K Means Clustering\n### Our Objective - Perform K-Means Clustering to detect Network Intrusion Attempts (Cybersecurity)", "_____no_output_____" ] ], [ [ "#matrix math\nimport numpy as np\n#graphing\nimport matplotlib.pyplot as plt\n#graphing animation\nimport matplotlib.animation as animation", "_____no_output_____" ], [ "#load textfile dataset (2D data points)\n# for each user, how many packets are sent per second and what's the size of a packet\n#anomalies (DDOS attempts) will have lots of big packets sent in a short amount of time \ndef load_dataset(name):\n return np.loadtxt(name)", "_____no_output_____" ], [ "#euclidian distance between 2 data points. For as many data points as necessary. \ndef euclidian(a, b):\n return np.linalg.norm(a-b)", "_____no_output_____" ], [ "def kmeans(k, epsilon=0, distance='euclidian'):\n #list to store past centroid\n history_centroids = []\n #set the distance calculation type \n if distance == 'euclidian':\n dist_method = euclidian\n #set the dataset\n dataset = load_dataset('durudataset.txt')\n # dataset = dataset[:, 0:dataset.shape[1] - 1]\n # get the number of rows (instances) and columns (features) from the dataset\n num_instances, num_features = dataset.shape\n #define k centroids (how many clusters do we want to find?) chosen randomly \n prototypes = dataset[np.random.randint(0, num_instances - 1, size=k)]\n #set these to our list of past centroid (to show progress over time)\n history_centroids.append(prototypes)\n #to keep track of centroid at every iteration\n prototypes_old = np.zeros(prototypes.shape)\n #to store clusters\n belongs_to = np.zeros((num_instances, 1))\n norm = dist_method(prototypes, prototypes_old)\n iteration = 0\n while norm > epsilon:\n iteration += 1\n norm = dist_method(prototypes, prototypes_old)\n #for each instance in the dataset\n for index_instance, instance in enumerate(dataset):\n #define a distance vector of size k\n dist_vec = np.zeros((k,1))\n #for each centroid\n for index_prototype, prototype in enumerate(prototypes):\n #compute the distance between x and centroid\n dist_vec[index_prototype] = dist_method(prototype, instance)\n #find the smallest distance, assign that distance to a cluster\n belongs_to[index_instance, 0] = np.argmin(dist_vec)\n \n tmp_prototypes = np.zeros((k, num_features))\n \n #for each cluster (k of them)\n for index in range(len(prototypes)):\n #get all the points assigned to a cluster\n instances_close = [i for i in range(len(belongs_to)) if belongs_to[i] == index]\n #find the mean of those points, this is our new centroid\n prototype = np.mean(dataset[instances_close], axis=0)\n #add our new centroid to our new temporary list\n tmp_prototypes[index, :] = prototype\n \n #set the new list to the current list\n prototypes = tmp_prototypes\n \n #add our calculated centroids to our history for plotting\n history_centroids.append(tmp_prototypes)\n\n #return calculated centroids, history of them all, and assignments for which cluster each datapoint belongs to\n return prototypes, history_centroids, belongs_to", "_____no_output_____" ], [ "#lets define a plotting algorithm for our dataset and our centroids\ndef plot(dataset, history_centroids, belongs_to):\n #we'll have 2 colors for each centroid cluster\n colors = ['r', 'g']\n\n #split our graph by its axis and actual plot\n fig, ax = plt.subplots()\n\n #for each point in our dataset\n for index in range(dataset.shape[0]):\n #get all the points assigned to a cluster\n instances_close = [i for i in range(len(belongs_to)) if belongs_to[i] == index]\n #assign each datapoint in that cluster a color and plot it\n for instance_index in instances_close:\n ax.plot(dataset[instance_index][0], dataset[instance_index][1], (colors[index] + 'o'))\n\n #lets also log the history of centroids calculated via training\n history_points = []\n #for each centroid ever calculated\n for index, centroids in enumerate(history_centroids):\n #print them all out\n for inner, item in enumerate(centroids):\n if index == 0:\n history_points.append(ax.plot(item[0], item[1], 'bo')[0])\n else:\n history_points[inner].set_data(item[0], item[1])\n print(\"centroids {} {}\".format(index, item))\n\n plt.show()", "_____no_output_____" ], [ "#main file \ndef execute():\n #load dataset\n dataset = load_dataset('durudataset.txt')\n #train the model on the data\n centroids, history_centroids, belongs_to = kmeans(2)\n #plot the results\n plot(dataset, history_centroids, belongs_to)", "_____no_output_____" ], [ "%matplotlib notebook\n\n#do everything\nexecute()\n", "_____no_output_____" ], [ "%matplotlib notebook\ndef plot_step_by_step(dataset, history_centroids, belongs_to):\n colors = ['r', 'g']\n\n fig, ax = plt.subplots()\n\n for index in range(dataset.shape[0]):\n instances_close = [i for i in range(len(belongs_to)) if belongs_to[i] == index]\n for instance_index in instances_close:\n ax.plot(dataset[instance_index][0], dataset[instance_index][1], (colors[index] + 'o'))\n\n history_points = []\n for index, centroids in enumerate(history_centroids):\n for inner, item in enumerate(centroids):\n if index == 0:\n history_points.append(ax.plot(item[0], item[1], 'bo')[0])\n else:\n history_points[inner].set_data(item[0], item[1])\n print(\"centroids {} {}\".format(index, item))\n \n plt.pause(0.8)\n ", "_____no_output_____" ], [ "for item in history_centroids:\n plot_step_by_step(dataset, [item], belongs_to)", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a4a87a700e2d3e1f054bb3b6e7ed58a5b3cea0
27,236
ipynb
Jupyter Notebook
pipeline/Visualize-run-veg.ipynb
ajijohn/planet-snowcover
c1dde5a1984ef12bf293680968e93252a2ad240f
[ "MIT" ]
1
2020-03-05T19:01:52.000Z
2020-03-05T19:01:52.000Z
pipeline/Visualize-run-veg.ipynb
ajijohn/planet-snowcover
c1dde5a1984ef12bf293680968e93252a2ad240f
[ "MIT" ]
null
null
null
pipeline/Visualize-run-veg.ipynb
ajijohn/planet-snowcover
c1dde5a1984ef12bf293680968e93252a2ad240f
[ "MIT" ]
1
2021-09-28T19:47:51.000Z
2021-09-28T19:47:51.000Z
58.196581
6,036
0.701682
[ [ [ "import boto3\nsession = boto3.Session(profile_name='esip')\n# Any clients created from this session will use credentials\n# from the [dev] section of ~/.aws/credentials.\ndev_s3_client = session.client('s3')\n", "_____no_output_____" ], [ "%matplotlib inline\nimport rasterio as rio\nimport numpy as np\nfrom matplotlib import pyplot as plt\nimport rasterio.plot\nimport os\nfrom datetime import datetime as dt\n\n# Load the veg aso\nveg_aso = rio.open(r'/home/ubuntu/planet-snowcover/pipeline/aso_veg_test_only.tif')\nveg_aso_mask = veg_aso.read(1)\n\n\n\n", "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/matplotlib/__init__.py:1067: UserWarning: Duplicate key in file \"/home/ubuntu/.config/matplotlib/matplotlibrc\", line #2\n (fname, cnt))\n/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/matplotlib/__init__.py:1067: UserWarning: Duplicate key in file \"/home/ubuntu/.config/matplotlib/matplotlibrc\", line #3\n (fname, cnt))\n" ], [ "# Plot this elevation band:\nplt.figure(figsize=(10,10))\nplt.imshow(veg_aso_mask)\n\ntitle_text = ''\nplt.title(title_text);\n", "_____no_output_____" ], [ "print(veg_aso_mask.shape)", "(26547, 32768)\n" ], [ "\n# Load the DEM and read out a np array of the elevation values (meters):\nveg_pred = rio.open(r'/home/ubuntu/planet-snowcover/pipeline/pred_veg_test_only.tif')\npred_veg = veg_pred.read(1)\n", "_____no_output_____" ], [ "print(pred_veg.shape)", "(26547, 32768)\n" ], [ "\n# Plot this elevation band:\nplt.figure(figsize=(10,10))\nplt.imshow(pred_veg)\n\ntitle_text = ''\nplt.title(title_text);", "_____no_output_____" ], [ "def compute_metrics(true, pred):\n print(true.shape, pred.shape)\n assert true.shape == pred.shape, \"Masks and predictions are different shapes. Are you sure they're in the same CRS/extent?\"\n\n compare = (true, pred)\n\n balanced_acc = metrics.balanced_accuracy_score(*compare)\n prfs = metrics.precision_recall_fscore_support(*compare, average='binary')\n\n these_metrics = {\n \"balanced_accuracy\" : balanced_acc,\n \"precision\": prfs[0],\n \"recall\": prfs[1],\n \"f_score\": prfs[2]\n }\n\n return these_metrics", "_____no_output_____" ], [ "mask_data = veg_aso_mask.flatten()\npred_data = pred_veg.flatten()\n # remove nodata", "_____no_output_____" ], [ "mask_data.shape", "_____no_output_____" ], [ " np.unique(mask_data)", "_____no_output_____" ], [ " np.unique(pred_data)", "_____no_output_____" ], [ "len(mask_data[np.where(mask_data != 9999)])", "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/ipykernel/__main__.py:1: DeprecationWarning: elementwise != comparison failed; this will raise an error in the future.\n if __name__ == '__main__':\n" ], [ "try:\n mask_data = mask_data[np.where(mask_data != -3.4e+38)]\n pred_data = pred_data[np.where(pred_data != -3.4e+38)]\nexcept Exception as e:\n print(\"Error filtering nodata. Do input data files have nodata attribute set? [{}]\".format(e))", "_____no_output_____" ], [ " np.unique(pred_data)", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score\naccuracy_score(mask_data, pred_data)", "_____no_output_____" ], [ "from sklearn import metrics\nmetrics = compute_metrics(mask_data, pred_data)", "(869892096,) (869892096,)\n" ], [ "print(metrics)", "_____no_output_____" ], [ "print(masksnow.shape)", "(3073, 6144)\n" ], [ "from rasterio.plot import show\nshow(masksnow)", "_____no_output_____" ], [ "!gdalinfo /home/ubuntu/s3:::planet-snowcover-analysis:planet-snowcover-2020-03-01-13-37-37-330:planet-snowcover-imagery-veg:20180528_181108_1025_3B_AnalyticMS_SR_clip/mask/ASO_3M_SD_USCATE_20180528_binary_merged.tif", "Driver: GTiff/GeoTIFF\nFiles: /home/ubuntu/s3:::planet-snowcover-analysis:planet-snowcover-2020-03-01-13-37-37-330:planet-snowcover-imagery-veg:20180528_181108_1025_3B_AnalyticMS_SR_clip/mask/ASO_3M_SD_USCATE_20180528_binary_merged.tif\nSize is 6144, 3073\nCoordinate System is:\nGEOGCS[\"WGS 84\",\n DATUM[\"WGS_1984\",\n SPHEROID[\"WGS 84\",6378137,298.257223563,\n AUTHORITY[\"EPSG\",\"7030\"]],\n AUTHORITY[\"EPSG\",\"6326\"]],\n PRIMEM[\"Greenwich\",0],\n UNIT[\"degree\",0.0174532925199433],\n AUTHORITY[\"EPSG\",\"4326\"]]\nOrigin = (-119.630126953125000,38.229550455326141)\nPixel Size = (0.000021457672119,-0.000016856813467)\nMetadata:\n AREA_OR_POINT=Area\nImage Structure Metadata:\n COMPRESSION=LZW\n INTERLEAVE=BAND\nCorner Coordinates:\nUpper Left (-119.6301270, 38.2295505) (119d37'48.46\"W, 38d13'46.38\"N)\nLower Left (-119.6301270, 38.1777495) (119d37'48.46\"W, 38d10'39.90\"N)\nUpper Right (-119.4982910, 38.2295505) (119d29'53.85\"W, 38d13'46.38\"N)\nLower Right (-119.4982910, 38.1777495) (119d29'53.85\"W, 38d10'39.90\"N)\nCenter (-119.5642090, 38.2036500) (119d33'51.15\"W, 38d12'13.14\"N)\nBand 1 Block=6144x1 Type=Int16, ColorInterp=Gray\n NoData Value=9999\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a4aedb320d4a219f887093f1bf66b7e53165fc
3,379
ipynb
Jupyter Notebook
docs/notebooks/01_materials.ipynb
joamatab/modesolverpy
c2a8e1b84bd101b2dc94a2951bb24f50520c47e9
[ "MIT" ]
3
2020-07-14T13:01:45.000Z
2022-02-04T04:32:13.000Z
docs/notebooks/01_materials.ipynb
joamatab/modesolverpy
c2a8e1b84bd101b2dc94a2951bb24f50520c47e9
[ "MIT" ]
null
null
null
docs/notebooks/01_materials.ipynb
joamatab/modesolverpy
c2a8e1b84bd101b2dc94a2951bb24f50520c47e9
[ "MIT" ]
1
2022-02-03T15:46:20.000Z
2022-02-03T15:46:20.000Z
23.143836
143
0.536549
[ [ [ "# Materials\n\nWe have different materials available thanks to the [materialspy](https://opticalmaterialspy.readthedocs.io/en/latest/index.html) library", "_____no_output_____" ] ], [ [ "import numpy as np\nimport matplotlib.pyplot as plt\nimport opticalmaterialspy as mat\nimport modes as ms\n\nm = mat.SiO2()\n\n# Refractive index @ 1550nm.\nprint('n(1.55e-6m):', m.n(1.55e-6)) # Knows 1.55e-6 must be [m].\nprint('n(1.55um):', m.n(1.55)) # Knows 1.55 must be [um].\nprint('n(1550nm):', m.n(1550)) # Knows 1550 must be [nm].\n\n# Group velocity refractive index @ 900nm.\nprint('n_gv(900nm):', m.ng(900))\n\n# Group velocity dispersion @ 808nm.\nprint('GVD(0.808um):', m.gvd(0.808))", "_____no_output_____" ], [ "wavelengths = np.linspace(1.3, 1.6, 10)\nn = [ms.materials.si(w) for w in wavelengths]\n\nplt.plot(wavelengths, n)\nplt.xlabel('wavelength (um)')\nplt.ylabel('Refractive index')\nplt.title('Silicon refractive index')", "_____no_output_____" ] ], [ [ "if your material is not defined in the materials module you can always add it", "_____no_output_____" ] ], [ [ "def nitride(wl):\n return mat.RefractiveIndexWeb(\n \"https://refractiveindex.info/?shelf=main&book=Si3N4&page=Luke\"\n ).n(wl)", "_____no_output_____" ], [ "nsin = [nitride(w) for w in wavelengths]", "_____no_output_____" ], [ "plt.plot(wavelengths, nsin)\nplt.xlabel('wavelength (nm)')\nplt.ylabel('Refractive index')\nplt.title('Silicon nitride refractive index (Si3N4)')", "_____no_output_____" ], [ "help(ms.materials)", "_____no_output_____" ], [ "wavelengths = np.linspace(1.3, 1.6, 10)\nn = [ms.materials.sio2(w) for w in wavelengths]\n\nplt.plot(wavelengths, n)\nplt.xlabel('wavelength (um)')\nplt.ylabel('Refractive index')\nplt.title('SiO2')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
d0a4b9db6e193b996540f6f44ec78222c5b7d19f
11,523
ipynb
Jupyter Notebook
B1117.ipynb
MultiplexDX/B117-RT-qPCR-design
18ab1a035a1e68382df3777821b262088d608b8e
[ "MIT" ]
null
null
null
B1117.ipynb
MultiplexDX/B117-RT-qPCR-design
18ab1a035a1e68382df3777821b262088d608b8e
[ "MIT" ]
null
null
null
B1117.ipynb
MultiplexDX/B117-RT-qPCR-design
18ab1a035a1e68382df3777821b262088d608b8e
[ "MIT" ]
1
2021-03-04T14:09:23.000Z
2021-03-04T14:09:23.000Z
57.615
660
0.635946
[ [ [ "### B.1.1.7\n\n#### The selection of suitable loci (done on Tue 22. Dec. 2020)\nTo identify suitable targets for primer/probe design, we downloaded 1,136 sequences from the GISAID repository filtered during a collection time spanning 1 - 21 December 2020. We focused on the spike gene because lineage B.1.1.7 contains a number of spike gene mutations, including two deletions (ΔH69/ΔV70 and ΔY144) that we focused on for designing a specific assay. \n\nI cut the locus encoding the spike protein and used the *MAFFT* alignment tool (with the parameter - auto) to align all the sequences against the WUHAN reference (NCBI ID: NC_045512.2). ", "_____no_output_____" ] ], [ [ "%%bash\n\n# \"msa_1221.fasta\" is a pre-filtered nucleotide MSA file downladed from the GISAID repository 22.12.2020\n# the WUHAN reference is always used as the first sequence in the GISAID MSA files\ngrep -m 1 \">\" msa_1221.fasta | cut -d\">\" -f2 > sars2_allSeqs_til21stDec2020_andRefWuhan.list\n# to reduce computational time, I used only sequences collected in Dec 2020\n# getting unique sequence IDs\ngrep -P \"2020-12-\" msa_1221.fasta | cut -d\">\" -f2 >> sars2_allSeqs_til21stDec2020_andRefWuhan.list\n# star-end positions of the spike protein in the aligned WUHAN sequence: 22412-26369\n# I called the spike locus of all sequences listed in \"sars2_allSeqs_til21stDec2020_andRefWuhan.list\"\ncount=$(wc -l sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d\" \" -f1)\nfor ((i=1; i<$(($count+1)); i++))\ndo\nID=$(sed -n ''$i'p' sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d\" \" -f1)\necho \">\"$ID >> sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa\ngrep -A 1 -m 1 $ID msa_1221.fasta | grep -v \">\" | cut -c22412-26369 | tr -d '-' | tr -d '\\n' | tr -d ' ' >> sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa\necho \"\" >> sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa\ndone\n\n# using 4 CPUS, I run the mafft tool with default settings\nmafft --thread 4 --auto sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa > sars2_allSeqs_til21stDec2020_andRefWuhan_Spike_mafft.fa", "_____no_output_____" ] ], [ [ "#### Downstream analysis\nTwelve sequences (1.06 %) contained ambiguous signal in the loci of deletions and were not used in the downstream analysis. We separated sequences into two groups: 1) those with the ΔH69/ΔV70 and ΔY144 deletions and 2) those without the deletions (Table 1). Using *SeaView*, we called 95 % consensus sequences for the ΔH69/ΔV70 and ΔY144 group and the No deletions group that were used to design primer and probe sets specific to either B.1.1.7 or all other SARS-CoV-2 variants, respectively. ", "_____no_output_____" ] ], [ [ "%%bash\n# quality checks of bases in the deleted loci (ΔH69/ΔV70 and ΔY144) \n# if a called base has ambiguous character, it is denotes as N\ncount=$(wc -l sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d\" \" -f1)\nfor ((i=1; i<$(($count+1)); i++))\ndo\nID=$(sed -n ''$i'p' sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d\"|\" -f2)\nDel69_70=$(awk ' BEGIN {RS=\">\"}; /'$ID'\\|/ { print \">\"$0 } ' sars2_allSeqs_til21stDec2020_andRefWuhan_Spike_mafft.fa | grep -v \">\" | tr -d '\\n' | tr -d ' ' | cut -c203-208)\nDel144=$(awk ' BEGIN {RS=\">\"}; /'$ID'\\|/ { print \">\"$0 } ' sars2_allSeqs_til21stDec2020_andRefWuhan_Spike_mafft.fa | grep -v \">\" | tr -d '\\n' | tr -d ' ' | cut -c428-430)\n# using the output file, we can also compute the correlation of two deletions (ΔH69/ΔV70 and ΔY144) and to judge about their co-occurrence \necho -e $ID\"\\t\"$Del69_70\"\\t\"$Del144 >> sars2_1stDec20202_21stDec20202_Spike_Qchecks.tsv\n# The shorter deletion (ΔY144) always co-occurred with the longer deletion (ΔH69/ΔV70), whereas the (ΔH69/ΔV70) deletion occurs independently in 17 sequences (1.5 %). \n# Pearson's correlation coefficient of the deletions is 0.953. ", "_____no_output_____" ] ], [ [ "#### Quality checks of the selected primer/probe loci (done on Thu 4. Febr. 2021)\nIn a separate analysis to determine the prevalence of the ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7, we downloaded 416,778 spike protein sequences with the most recent data description file collected from the beginning of the pandemic through 29 January 2021. Using regular expressions (bash pattern matching command grep with the option -P for Perl-compatible regular expression), we searched for loci with both ΔH69/ΔV70 and ΔY144 deletions, and for loci without these deletions. In the regular expression, we kept fixed a few amino acids downstream and upstream from the deletions to omit any miscalling of the searched pattern. \n\n#### Quality checks of the selected primer/probe loci (update: 2. March 2021)\nIn a separate analysis to determine the prevalence of the ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7, we downloaded 633,137 spike protein sequences with the most recent data description file collected from the beginning of the pandemic through 2 March 2021. Using regular expressions (bash pattern matching command grep with the option -P for Perl-compatible regular expression), we searched for loci with both ΔH69/ΔV70 and ΔY144 deletions and for loci without these deletions. In the regular expression, we kept fixed a few amino acids downstream and upstream from the deletions to omit any miscalling of the searched pattern. ", "_____no_output_____" ] ], [ [ "# update for the data; datasets from 1st of March 2021 (download on Tue 2nd March 2021)\n%%bash\n# \"Spike_proteins_0301.fasta\" is a pre-filtered amino-acid MSA file downladed from the GISAID repository 22.12.2020\ngrep -c \">\" Spike_proteins_0301.fasta\n# detection of both deletions (no HV, no Y): d69d70 and d144; with the check for unique sequence IDs\ngrep -B1 -P \"HAISGT.{66}FLGVYHK\" Spike_proteins_0301.fasta | grep \">\" | cut -d\"/\" -f2 | sort | uniq -c | awk ' { print $1\"\\t\"$2} ' | wc -l\n# full pattern (HV and Y), no deletion; with the check for unique sequence IDs\ngrep -B1 -P \"HAIHVSGT.{66}FLGVYYHK\" Spike_proteins_0301.fasta | grep \">\" | cut -d\"/\" -f2 | sort | uniq -c | awk ' { print $1\"\\t\"$2} ' | wc -l\n# only d144 (only HV, no Y); with the check for unique sequence IDs\ngrep -B1 -P \"HAIHVSGT.{66}FLGVYHK\" Spike_proteins_0301.fasta | grep \">\" | cut -d\"/\" -f2 | sort | uniq -c | awk ' { print $1\"\\t\"$2} ' | wc -l\n# only d69d70 (only Y, no HV); with the check for unique sequence IDs\ngrep -B1 -P \"HAISGT.{66}FLGVYYHK\" Spike_proteins_0301.fasta | grep \">\" | cut -d\"/\" -f2 | sort | uniq -c | awk ' { print $1\"\\t\"$2} ' | wc -l\n# detection of both deletions (no HV, no Y): d69d70 and d144; with the time-dependent sorting\ngrep -B1 -P \"HAISGT.{66}FLGVYHK\" Spike_proteins_0301.fasta | grep \">\" | grep -oP \"\\|202[01]-..-\" | sort | uniq -c\n# only d69d70 (only Y, no HV); with the time-dependent sorting\ngrep -B1 -P \"HAISGT.{66}FLGVYYHK\" Spike_proteins_0301.fasta | grep \">\" | grep -oP \"\\|202[01]-..-\" | sort | uniq -c\n\n# call the whole metadata information about sars-cov-2 records with detected both deletions \ncount=$(wc -l Spike_proteins_0301.fasta | cut -d\" \" -f1)\necho $count\n# $count/16=6473\nmyF(){\nfor ((i=1; i<6474; i++))\ndo\nN=$((12946*$1 + $i))\nID=$(sed -n ''$N'p' B117_IDs.list)\n# metadata_2021-03-01_09-16.tsv\nawk -v ID=$ID 'BEGIN{FS=\"\\t\"}; { if ( $3 == ID && $15 == \"Human\" ) { print $1\"\\t\"$3\"\\t\"$7\"\\t\"$18\"\\t\"$19 }} ' metadata_2021-03-01_09-16.tsv >> \"B117_search_\"$i.csv \ndone\n}\n\nexport -f myF\n# 12 946\nparallel -j 16 myF ::: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15\ncat \"B117_search_\"*.csv > \"B117_detected_bothMutations_cladeID.csv \nrm \"B117_search_\"*.csv", "_____no_output_____" ] ], [ [ "#### Results (done on Thu 4. Febr. 2021)\nOur analysis of the prevalence of both ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7, revealed a total of 29,872 sequences that possess both deletions, while 368,474 sequences do not have them. Based on the metadata file, we identified SARS-CoV-2 lineages across all called sequences with both deletions. Only five sequences (0.0167 %) out of 29,872 records are not labelled as B.1.1.7, highlighted the notion that these two deletions are highly specific for the B.1.1.7 variant and make ideal targets for primer/probe design.\n\n#### Results (update: 2. March 2021)\nAnalysis of the prevalence of both ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7 revealed a total of 103,529 sequences that possess both deletions. Based on the metadata file, we identified SARS-CoV-2 lineages across all called sequences with both deletions. Only 108 sequences (0.10%) out of 103,529 sequences are not labelled as B.1.1.7. In other words, 99.90% of sequences containing both deletions belong to lineage B.1.1.7, highlighting the notion that these two deletions are highly specific for the B.1.1.7 variant and make ideal targets for primer/probe design (see the table below, please).\n\n| Clade (Nextstrain) | Total sequences containing both ΔH69/ΔV70 and ΔY144 | % sequences containing both ΔH69/ΔV70 and ΔY144 |\n|:------------------:|:---------------------------------------------------:|:-----------------------------------------------:|\n|19A|6|<0.01%|\n|20A|36|0.03%|\n|20A.EU2|22|0.02%|\n|20B|21|0.02%|\n|20C|6|<0.01%|\n|20E.EU1|13|0.01%|\n|20I/501Y.V1 (**B.1.1.7**)|103,421|**99.90%**|\n|No ID|4|<0.01%|\n|Total|103,529|100%|\n\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a4c017735f8b9db0ab78d8ff256b45f312ce70
520,699
ipynb
Jupyter Notebook
MC/MC Prediction Solution.ipynb
CaptainE/RLSolutionsSutton
25b80eb2e6499d142ec3a8af3f249ad879372a68
[ "MIT" ]
1
2020-11-13T20:46:40.000Z
2020-11-13T20:46:40.000Z
MC/MC Prediction Solution.ipynb
CaptainE/RLSolutionsSutton
25b80eb2e6499d142ec3a8af3f249ad879372a68
[ "MIT" ]
null
null
null
MC/MC Prediction Solution.ipynb
CaptainE/RLSolutionsSutton
25b80eb2e6499d142ec3a8af3f249ad879372a68
[ "MIT" ]
null
null
null
2,324.549107
139,540
0.950056
[ [ [ "%matplotlib inline\n\nimport gym\nimport matplotlib\nimport numpy as np\nimport sys\n\nfrom collections import defaultdict\n\nif \"../\" not in sys.path:\n sys.path.append(\"../\") \nfrom lib.envs.blackjack import BlackjackEnv\nfrom lib import plotting\n\nmatplotlib.style.use('ggplot')", "_____no_output_____" ], [ "env = BlackjackEnv()", "_____no_output_____" ], [ "def mc_prediction(policy, env, num_episodes, discount_factor=1.0):\n \"\"\"\n Monte Carlo prediction algorithm. Calculates the value function\n for a given policy using sampling.\n \n Args:\n policy: A function that maps an observation to action probabilities.\n env: OpenAI gym environment.\n num_episodes: Number of episodes to sample.\n discount_factor: Gamma discount factor.\n \n Returns:\n A dictionary that maps from state -> value.\n The state is a tuple and the value is a float.\n \"\"\"\n\n # Keeps track of sum and count of returns for each state\n # to calculate an average. We could use an array to save all\n # returns (like in the book) but that's memory inefficient.\n returns_sum = defaultdict(float)\n returns_count = defaultdict(float)\n \n # The final value function\n V = defaultdict(float)\n \n for i_episode in range(1, num_episodes + 1):\n # Print out which episode we're on, useful for debugging.\n if i_episode % 1000 == 0:\n print(\"\\rEpisode {}/{}.\".format(i_episode, num_episodes), end=\"\")\n sys.stdout.flush()\n\n # Generate an episode.\n # An episode is an array of (state, action, reward) tuples\n episode = []\n state = env.reset()\n for t in range(100):\n action = policy(state)\n next_state, reward, done, _ = env.step(action)\n episode.append((state, action, reward))\n if done:\n break\n state = next_state\n\n # Find all states the we've visited in this episode\n # We convert each state to a tuple so that we can use it as a dict key\n states_in_episode = set([tuple(x[0]) for x in episode])\n for state in states_in_episode:\n # Find the first occurance of the state in the episode\n first_occurence_idx = next(i for i,x in enumerate(episode) if x[0] == state)\n # Sum up all rewards since the first occurance\n G = sum([x[2]*(discount_factor**i) for i,x in enumerate(episode[first_occurence_idx:])])\n # Calculate average return for this state over all sampled episodes\n returns_sum[state] += G\n returns_count[state] += 1.0\n V[state] = returns_sum[state] / returns_count[state]\n\n return V ", "_____no_output_____" ], [ "def sample_policy(observation):\n \"\"\"\n A policy that sticks if the player score is >= 20 and hits otherwise.\n \"\"\"\n score, dealer_score, usable_ace = observation\n return 0 if score >= 20 else 1", "_____no_output_____" ], [ "V_10k = mc_prediction(sample_policy, env, num_episodes=10000)\nplotting.plot_value_function(V_10k, title=\"10,000 Steps\")\n\nV_500k = mc_prediction(sample_policy, env, num_episodes=500000)\nplotting.plot_value_function(V_500k, title=\"500,000 Steps\")", "Episode 10000/10000." ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
d0a4c7faa40481e15689541369b4e95b9fb692a7
1,036,402
ipynb
Jupyter Notebook
Generate_corrections/20201012-STORM6_2color_Generate_Correction_Profiles.ipynb
shiwei23/Chromatin_Analysis_Scripts
909b9b81de8fcf04dd4c39ac21a84864ce2003ff
[ "MIT" ]
null
null
null
Generate_corrections/20201012-STORM6_2color_Generate_Correction_Profiles.ipynb
shiwei23/Chromatin_Analysis_Scripts
909b9b81de8fcf04dd4c39ac21a84864ce2003ff
[ "MIT" ]
null
null
null
Generate_corrections/20201012-STORM6_2color_Generate_Correction_Profiles.ipynb
shiwei23/Chromatin_Analysis_Scripts
909b9b81de8fcf04dd4c39ac21a84864ce2003ff
[ "MIT" ]
null
null
null
180.243826
89,440
0.855034
[ [ [ "# Generate correction profiles for denoised\n\nby Pu Zheng\n\n2019.06.18", "_____no_output_____" ] ], [ [ "%run \"E:\\Users\\puzheng\\Documents\\Startup_py3.py\"\nsys.path.append(r\"E:\\Users\\puzheng\\Documents\")\n\nimport ImageAnalysis3 as ia\n%matplotlib notebook\n\nfrom ImageAnalysis3 import *\n\nprint(os.getpid())", "46944\n" ], [ "reload(ia.get_img_info)\nreload(ia.corrections)\nreload(ia.visual_tools)\nreload(ia.classes)\nreload(ia.alignment_tools)\nreload(ia.correction_tools.illumination)", "_____no_output_____" ] ], [ [ "# Data folder", "_____no_output_____" ] ], [ [ "# master folder for this dataset:\nmaster_folder = r'\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++'\n# correction folder\ncorrection_folder=os.path.join(master_folder, 'Corrections')\nif not os.path.exists(correction_folder):\n print(f\"creating folder: {correction_folder}\")\n os.makedirs(correction_folder)", "_____no_output_____" ] ], [ [ "# Illumination_correction", "_____no_output_____" ] ], [ [ "#master_folder = r'\\\\10.245.74.116\\Chromatin_NAS_4\\20191218_CTP05-chr21_HCT116_6hauxin'\n#correction_folder =os.path.join(master_folder, 'Corrections')\nfolders, fovs = get_img_info.get_folders(master_folder, 'H')\nfolders = [_fd for _fd in folders if ('Cy' in _fd and '3color' not in _fd) or 'H0R0' in _fd]\nprint(folders)", "Get Folder Names: (ia.get_img_info.get_folders)\n- Number of folders: 81\n- Number of field of views: 72\n['\\\\\\\\10.245.74.158\\\\Chromatin_NAS_6\\\\20201012-mouse_proB_IgH++\\\\H0R0', '\\\\\\\\10.245.74.158\\\\Chromatin_NAS_6\\\\20201012-mouse_proB_IgH++\\\\H74R24Cy7', '\\\\\\\\10.245.74.158\\\\Chromatin_NAS_6\\\\20201012-mouse_proB_IgH++\\\\H75R24Cy5']\n" ], [ "# shared parameters\nsingle_im_size = np.array([35,2048,2048])\nall_colors = ['750','647','488','405']\n# image and threads\nnum_images = 60\nnum_threads = 30", "_____no_output_____" ] ], [ [ "# Illumination correction", "_____no_output_____" ] ], [ [ "%matplotlib inline\nreload(ia.correction_tools.illumination)\n\ndapi_folder = folders[0]\nillumination_pfs = correction_tools.illumination.Generate_Illumination_Correction(\n dapi_folder, \n all_colors,\n all_channels=all_colors,\n num_threads=num_threads,\n num_images=num_images,\n single_im_size=single_im_size,\n correction_folder=correction_folder,\n gaussian_filter_size=60,\n save=True, save_folder=correction_folder,\n overwrite=False,\n )", "-- directly load:[] illumination profiles for files\n-- start calculating ['750', '647', '488', '405'] illumination profiles\n-- 60 among 72 dax files will be loaded in data_folder: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H0R0\n++ start multi-processing illumination profile calculateion with 30 threads for 60 images in 1158.42s.\n-- saving updated profiles\n--- saving 750 profile into file: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\Corrections\\illumination_correction_750_2048x2048.npy\n--- saving 647 profile into file: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\Corrections\\illumination_correction_647_2048x2048.npy\n--- saving 488 profile into file: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\Corrections\\illumination_correction_488_2048x2048.npy\n--- saving 405 profile into file: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\Corrections\\illumination_correction_405_2048x2048.npy\n" ] ], [ [ "# Chromatic Abbrevation", "_____no_output_____" ], [ "## chromatic for 750", "_____no_output_____" ] ], [ [ "cc_folder = folders[1]\nref_folder = folders[2]\nprint(cc_folder, ref_folder)", "\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H74R24Cy7 \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H75R24Cy5\n" ], [ "reload(correction_tools.chromatic)\nchromatic_pfs, chromatic_consts = correction_tools.chromatic.Generate_chromatic_abbrevation(\n cc_folder, ref_folder, '750',\n num_images=num_images, \n num_threads=num_threads, \n fitting_orders=[1,1,1],\n correction_args={'correction_folder':correction_folder,\n 'single_im_size': single_im_size,\n 'all_channels': all_colors,\n 'corr_channels': ['750', '647'],\n },\n save_folder=correction_folder,\n overwrite_profile=True)", "++ generating chromatic info for 40 images in 20 threads in 2267.764s.\n++ fitting polynomial orders: [1 1 1]\n-- constants: [ 2.18325971e-01 1.47750723e-02 -9.58693868e-07 3.15479204e-05] with rsquare=0.3599429684786537\n-- constants: [-2.48105252e-02 2.01826717e-03 -1.82465972e-03 1.36473686e-05] with rsquare=0.9761135277372379\n-- constants: [-1.23677537e-01 -5.45136142e-04 -5.45356247e-06 -1.82303478e-03] with rsquare=0.9739049814258113\n++ saving new profiles into folder: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\Corrections\n" ] ], [ [ "### check chromatic abbrevation, 750-647", "_____no_output_____" ] ], [ [ "# reference image\nref_ims, = ia.io_tools.load.correct_fov_image(ref_filename, \n [647, 488], \n single_im_size=single_im_size,\n all_channels=all_colors,\n warp_image=True,\n illumination_corr=True,\n chromatic_corr=False,\n bleed_corr=False, \n correction_folder=correction_folder)\n\n# target image, not warpping\nraw_ims, corr_funcs, drift = ia.io_tools.load.correct_fov_image(cc_filename, \n [750], \n calculate_drift=True,\n ref_filename=ref_ims[-1],\n single_im_size=single_im_size,\n all_channels=all_colors,\n warp_image=False,\n illumination_corr=True,\n chromatic_corr=True,\n bleed_corr=False, \n correction_folder=correction_folder,\n return_drift=True)\n\n# target image, warppring\ncorr_ims, = ia.io_tools.load.correct_fov_image(cc_filename, \n [750], \n calculate_drift=False,\n drift=drift,\n ref_filename=ref_ims[-1],\n single_im_size=single_im_size,\n all_channels=all_colors,\n warp_image=True,\n illumination_corr=True,\n chromatic_corr=True, \n bleed_corr=False, \n correction_folder=correction_folder,\n return_drift=False)", "- correct the whole fov for image: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H75R24Cy5\\Conv_zscan_05.dax\n-- loading illumination correction profile from file:\n\t 647 illumination_correction_647_2048x2048.npy\n\t 488 illumination_correction_488_2048x2048.npy\n-- loading image from file:\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H75R24Cy5\\Conv_zscan_05.dax in 3.007s\n-- removing hot pixels for channels:['647', '488'] in 35.413s\n-- illumination correction for channels: 647, 488, in 6.867s\n-- warp image with drift:[0. 0. 0.] in 0.000s\n-- finish correction in 47.063s\n- correct the whole fov for image: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H74R24Cy7\\Conv_zscan_05.dax\n-- loading illumination correction profile from file:\n\t 750 illumination_correction_750_2048x2048.npy\n\t 488 illumination_correction_488_2048x2048.npy\n-- loading chromatic_constants correction profile from file:\n\t 750 chromatic_correction_750_647_35_2048_2048_const.pkl\n\t 647 None\n-- loading image from file:\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H74R24Cy7\\Conv_zscan_05.dax in 3.312s\n-- removing hot pixels for channels:['750', '488'] in 35.980s\n-- illumination correction for channels: 750, 488, in 6.801s\n-- apply bead_drift calculate for channel: 488\n-- aligning image -> directly use image\n-- directly use ref_image\n--- finish drift in 71.050s\n-- drift: [ 0. -0.79 0.29]\n-- generate translation function for chromatic correction for channels: ['750'] and drift:[ 0. -0.79 0.29] in 0.000s\n-- finish correction in 118.068s\n- correct the whole fov for image: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H74R24Cy7\\Conv_zscan_05.dax\n-- loading illumination correction profile from file:\n\t 750 illumination_correction_750_2048x2048.npy\n-- loading chromatic correction profile from file:\n\t 750 chromatic_correction_750_647_35_2048_2048.npy\n\t 647 None\n-- loading image from file:\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H74R24Cy7\\Conv_zscan_05.dax in 2.695s\n-- removing hot pixels for channels:['750'] in 18.626s\n-- illumination correction for channels: 750, in 3.449s\n-- warp image with chromatic correction for channels: ['750'] and drift:[ 0. -0.79 0.29] 750, in 105.555s\n-- finish correction in 138.819s\n" ], [ "%matplotlib notebook\nvisual_tools.imshow_mark_3d_v2([raw_ims[0], corr_ims[0], ref_ims[0]])", "_____no_output_____" ], [ "corr_funcs[0]", "_____no_output_____" ], [ "reload(ia.io_tools.load)\nreload(ia.correction_tools.chromatic)\nfrom ImageAnalysis3.spot_tools.fitting import fit_fov_image\n# fitting\nint_th = 1\n\ncorr_spots = fit_fov_image(corr_ims[0], '750', th_seed=400, normalize_backgroud=True)\nraw_spots = fit_fov_image(raw_ims[0], '750', th_seed=400, normalize_backgroud=True)\nref_spots = fit_fov_image(ref_ims[0], '647', th_seed=600, normalize_backgroud=True)\ncorr_spots = corr_spots[corr_spots[:,0] >= int_th]\nraw_spots = raw_spots[raw_spots[:,0] >= int_th]\ncorr_raw_spots = corr_funcs[0](raw_spots)\nref_spots = ref_spots[ref_spots[:,0] >= int_th]", "-- start fitting spots in channel:750, 500 seeded, normalize total background:285.00, 500 fitted in 61.562s.\n-- start fitting spots in channel:750, 500 seeded, normalize total background:285.00, 500 fitted in 62.365s.\n-- start fitting spots in channel:647, 500 seeded, normalize total background:585.00, 500 fitted in 61.056s.\n" ], [ "plt.figure(figsize=(6,6),dpi=100)\nplt.plot(corr_spots[:,2], corr_spots[:,3], 'r.', label='ref')\nplt.plot(corr_raw_spots[:,2], corr_raw_spots[:,3], 'g.', label='not warpping')\nplt.plot(raw_spots[:,2], raw_spots[:,3], 'y.', label='not warpping')\n\nplt.plot(ref_spots[:,2], ref_spots[:,3], 'b.', label='warpping')\nplt.legend()\nplt.xlabel('X')\nplt.ylabel('Y')\n#plt.plot(ref_spots[:,2]+drift[1], ref_spots[:,3]+drift[2], 'b.')\nplt.show()", "_____no_output_____" ], [ "from ImageAnalysis3.spot_tools.matching import find_paired_centers, check_paired_centers\n\n# matching\n_new_drift, paired_ref_cts, paired_corr_cts = find_paired_centers(ref_spots[:,1:4], corr_spots[:,1:4], drift=drift)\nprint(_new_drift)\n_new_drift, paired_ref_cts, paired_corr_cts = check_paired_centers(paired_ref_cts, paired_corr_cts+drift)\nprint(_new_drift)", "[-0.01766196 -0.09067997 -0.01237969]\n[-0.01694834 0.69215355 -0.30152252]\n" ] ], [ [ "## visualize", "_____no_output_____" ] ], [ [ "%matplotlib notebook\nvisual_tools.imshow_mark_3d_v2([raw_ims[0], corr_ims[0], ref_ims[0]])", "_____no_output_____" ] ], [ [ "# bleedthrough correction", "_____no_output_____" ] ], [ [ "from ImageAnalysis3 import _image_size, _correction_folder, _allowed_colors\nreload(correction_tools.bleedthrough)\nfrom ImageAnalysis3.correction_tools.bleedthrough import find_bleedthrough_pairs, interploate_bleedthrough_correction_from_channel\nimport multiprocessing as mp\nfrom ImageAnalysis3.correction_tools.chromatic import generate_polynomial_data\n\n\nbleedthrough_channels=['750', '647']\n\nbleedthrough_correction_args = {\n 'correction_folder': _correction_folder,\n 'single_im_size':single_im_size,\n 'all_channels':all_colors,\n 'corr_channels':['750','647'],\n 'bleed_corr':False,\n 'illumination_corr':False,\n 'chromatic_corr':False,\n}\n\nbleedthrough_fitting_args = {'max_num_seeds':1000,\n 'th_seed': 500,\n 'use_dynamic_th':True,\n}\nreload(correction_tools.bleedthrough)", "_____no_output_____" ], [ "%matplotlib inline\nreload(correction_tools.bleedthrough)\nbleed_pf = correction_tools.bleedthrough.Generate_bleedthrough_correction(\n folders[1:3],\n rsq_th=0.81,\n corr_channels=bleedthrough_channels,\n correction_args=bleedthrough_correction_args,\n fitting_args=bleedthrough_fitting_args,\n num_images=num_images, \n #num_images=30,\n num_threads=num_threads,\n save_folder=correction_folder,\n overwrite_profile=True,\n overwrite_temp=False,\n )", "+ generating bleedthrough profiles.\n++ generating bleedthrough info for 60 images in 30 threads in 3.326s.\n- check 2956 bleedthrough pairs.\n- start iteration with outlier_sigma=2.00, keep_percentage=0.95\n[ True True True ... True True False] [ True True True ... True True False]\n-- iter: 1, kept in this round: 0.854, total: 0.854\n" ] ], [ [ "### Check bleedthorugh for cy7", "_____no_output_____" ] ], [ [ "from ImageAnalysis3.io_tools.load import correct_fov_image\nfrom ImageAnalysis3.io_tools.crop import crop_neighboring_area", "_____no_output_____" ], [ "reload(ia.io_tools.load)\ncy7_filename = os.path.join(folders[0], fovs[3])\n\ncy7_raw_ims, = ia.io_tools.load.correct_fov_image(cy7_filename, \n bleedthrough_channels, \n bleed_corr=False, chromatic_corr=False,\n corr_channels=bleedthrough_channels,\n single_im_size=single_im_size,\n all_channels=all_colors,\n illumination_corr=True,\n correction_folder=correction_folder)\ncy7_corr_ims, = ia.io_tools.load.correct_fov_image(cy7_filename, \n bleedthrough_channels, \n bleed_corr=True, chromatic_corr=False, \n corr_channels=bleedthrough_channels,\n single_im_size=single_im_size,\n all_channels=all_colors,\n illumination_corr=True,\n correction_folder=correction_folder)", "- correct the whole fov for image: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H0R0\\Conv_zscan_03.dax\n-- loading illumination correction profile from file:\n\t 750 illumination_correction_750_2048x2048.npy\n\t 647 illumination_correction_647_2048x2048.npy\n-- loading image from file:\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H0R0\\Conv_zscan_03.dax in 5.591s\n-- removing hot pixels for channels:['750', '647'] in 23.486s\n-- illumination correction for channels: 750, 647, in 4.526s\n-- warp image with drift:[0. 0. 0.] in 0.000s\n-- finish correction in 35.552s\n- correct the whole fov for image: \\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H0R0\\Conv_zscan_03.dax\n-- loading illumination correction profile from file:\n\t 750 illumination_correction_750_2048x2048.npy\n\t 647 illumination_correction_647_2048x2048.npy\n-- loading bleedthrough correction profile from file:bleedthrough_correction_750_647_2048_2048.npy\n-- loading image from file:\\\\10.245.74.158\\Chromatin_NAS_6\\20201012-mouse_proB_IgH++\\H0R0\\Conv_zscan_03.dax in 2.601s\n-- removing hot pixels for channels:['750', '647'] in 23.458s\n-- bleedthrough correction for channels: ['750', '647'] in 12.669s\n-- illumination correction for channels: 750, 647, in 4.445s\n-- warp image with drift:[0. 0. 0.] in 0.000s\n-- finish correction in 44.816s\n" ], [ "%matplotlib notebook\n%matplotlib notebook\n\nvisual_tools.imshow_mark_3d_v2([cy7_raw_ims[0], cy7_corr_ims[0], cy7_raw_ims[1], cy7_corr_ims[1]])", "_____no_output_____" ] ], [ [ "## Check bleedthrough for cy5", "_____no_output_____" ] ], [ [ "reload(ia.io_tools.load)\ncy5_filename = os.path.join(folders[1], fovs[3])\n\ncy5_raw_ims, = ia.io_tools.load.correct_fov_image(cy5_filename, \n bleedthrough_channels, \n bleed_corr=False, chromatic_corr=False,\n corr_channels=bleedthrough_channels,\n single_im_size=single_im_size,\n all_channels=all_colors,\n illumination_corr=True,\n correction_folder=correction_folder)\ncy5_corr_ims, = ia.io_tools.load.correct_fov_image(cy5_filename, \n bleedthrough_channels, \n bleed_corr=True, chromatic_corr=False, \n corr_channels=bleedthrough_channels,\n single_im_size=single_im_size,\n all_channels=all_colors,\n illumination_corr=True,\n correction_folder=correction_folder)", "- correct the whole fov for image: \\\\169.254.229.228\\Chromatin_NAS_3\\20191007_CTP07_chr2\\H4R4_Cy5\\Conv_zscan_02.dax\n-- loading illumination correction profile from file:\n\t 750 illumination_correction_750_2048x2048.npy\n\t 647 illumination_correction_647_2048x2048.npy\n\t 561 illumination_correction_561_2048x2048.npy\n-- loading image from file:\\\\169.254.229.228\\Chromatin_NAS_3\\20191007_CTP07_chr2\\H4R4_Cy5\\Conv_zscan_02.dax in 3.602s\n-- removing hot pixels for channels:['750', '647', '561'] in 35.113s\n-- illumination correction for channels: 750, 647, 561, in 7.695s\n- correct the whole fov for image: \\\\169.254.229.228\\Chromatin_NAS_3\\20191007_CTP07_chr2\\H4R4_Cy5\\Conv_zscan_02.dax\n-- loading illumination correction profile from file:\n\t 750 illumination_correction_750_2048x2048.npy\n\t 647 illumination_correction_647_2048x2048.npy\n\t 561 illumination_correction_561_2048x2048.npy\n-- loading bleedthrough correction profile from file:bleedthrough_correction_750_647_561_2048x2048.npy\n-- loading image from file:\\\\169.254.229.228\\Chromatin_NAS_3\\20191007_CTP07_chr2\\H4R4_Cy5\\Conv_zscan_02.dax in 2.709s\n-- removing hot pixels for channels:['750', '647', '561'] in 35.061s\n-- illumination correction for channels: 750, 647, 561, in 7.464s\n-- bleedthrough correction for channels: ['750', '647', '561'] in 22.998s\n" ], [ "%matplotlib notebook\n%matplotlib notebook\n\nvisual_tools.imshow_mark_3d_v2([cy5_raw_ims[0], cy5_raw_ims[1], cy5_corr_ims[1], cy5_corr_ims[0]])", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a4d67e02862783f2251a34ea00378aad8e98e9
964
ipynb
Jupyter Notebook
Módulo 3/Untitled.ipynb
zapatacc/SimMat2020
80a2bc927348235abb3fd64d44fdcaeece4fb11f
[ "MIT" ]
null
null
null
Módulo 3/Untitled.ipynb
zapatacc/SimMat2020
80a2bc927348235abb3fd64d44fdcaeece4fb11f
[ "MIT" ]
null
null
null
Módulo 3/Untitled.ipynb
zapatacc/SimMat2020
80a2bc927348235abb3fd64d44fdcaeece4fb11f
[ "MIT" ]
48
2020-08-26T15:22:24.000Z
2020-09-03T14:32:14.000Z
19.28
50
0.463693
[ [ [ "from ddeint import ddeint\n\n\ndef model(Y,t,tau):\n x,y,z = Y(t)\n xd, yd, zd = Y(t-tau)\n \n \n dx = x**2\n dy = x**2+y**2\n dz = b2*w2* yd*z/(m+yd)\n \n return [dx,dy,dz]\n\ntt = np.linspace(0,300,1000)\nsol_depredadore_presa = ddeint(model, g, tt)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code" ] ]
d0a4dba316da90395cdbeb9b17404762c37bd64e
3,848
ipynb
Jupyter Notebook
examples/reference/streams/bokeh/BoxEdit.ipynb
Guillemdb/holoviews
826adb4b08bb26840f9e608fe6a1555644c4497e
[ "BSD-3-Clause" ]
null
null
null
examples/reference/streams/bokeh/BoxEdit.ipynb
Guillemdb/holoviews
826adb4b08bb26840f9e608fe6a1555644c4497e
[ "BSD-3-Clause" ]
null
null
null
examples/reference/streams/bokeh/BoxEdit.ipynb
Guillemdb/holoviews
826adb4b08bb26840f9e608fe6a1555644c4497e
[ "BSD-3-Clause" ]
null
null
null
31.801653
444
0.602911
[ [ [ "#### **Title**: BoxEdit Stream\n\n**Dependencies**: Bokeh\n \n**Description**: A linked streams example demonstrating how to use the BoxDraw stream.\n\n**Backends**: [Bokeh](./Boxes.ipynb)", "_____no_output_____" ] ], [ [ "import holoviews as hv\nfrom holoviews import opts\nfrom holoviews import streams\nhv.extension('bokeh')", "_____no_output_____" ] ], [ [ "The ``BoxEdit`` stream adds a bokeh tool to the source plot that allows drawing, dragging, and deleting boxes and making the drawn data available to Python. The tool supports the following actions:\n\n**Add box**\n\n Double click to start one corner, then move the pointer to the other corner and double click again. (Or hold shift then click and drag anywhere on the plot.)\n\n**Move box**\n\n Click and drag an existing box, the box will be dropped once you let go of the mouse button.\n\n**Delete box**\n\n Tap a box to select it then press BACKSPACE or DELETE key while the mouse is within the plot area.\n \n### Properties\n\n* **``empty_value``**: Value to add to non-coordinate columns when adding new box\n* **``num_objects``** (int): Maximum number of boxes to draw before deleting the oldest object\n* **``styles``** (dict): Dictionary of style properties (e.g. line_color, line_width etc.) to apply to each box. If values are lists the values will cycle over the values.", "_____no_output_____" ], [ "As a very straightforward example we will create a `Rectangles` element containing multiple boxes, then attach it as a source to a ``BoxEdit`` stream instance. When we now plot the ``Rectangles`` instance it will add the tool, letting us draw, drag and delete the box polygons. To limit the number of boxes that can be drawn a fixed number of ``num_objects`` may be defined, causing the first box to be dropped when the limit is exceeded.", "_____no_output_____" ] ], [ [ "boxes = hv.Rectangles([(0, 0, 1, 1), (2, 1, 3.5, 2.5), (0.5, 1.5, 1.5, 2.5)])\nbox_stream = streams.BoxEdit(source=boxes, num_objects=3, styles={'fill_color': ['red', 'green', 'blue']})\nboxes.opts(\n opts.Polygons(active_tools=['box_edit'], fill_alpha=0.5, height=400, width=400))", "_____no_output_____" ] ], [ [ "<center><img src=\"https://assets.holoviews.org/gifs/examples/streams/bokeh/box_edit.gif\" width=400></center>", "_____no_output_____" ], [ "Whenever an action is executed the geometry data will be synced with Python, both in the notebook and when deployed on the bokeh server. We can access the data directly as columns of the corners of each box:", "_____no_output_____" ] ], [ [ "box_stream.data", "_____no_output_____" ] ], [ [ "Alternatively we can use the ``element`` property to get an Element containing the returned data:", "_____no_output_____" ] ], [ [ "box_stream.element", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
d0a4e7a46db286d7c50bcb4f8b04f1fc7d2a5120
178,245
ipynb
Jupyter Notebook
BME511/ProbablisticClassificationClustering.ipynb
haribharadwaj/notebooks
3932e3cedb3c43e74a6cf4942e4ad7b0cf9ee988
[ "BSD-3-Clause" ]
null
null
null
BME511/ProbablisticClassificationClustering.ipynb
haribharadwaj/notebooks
3932e3cedb3c43e74a6cf4942e4ad7b0cf9ee988
[ "BSD-3-Clause" ]
null
null
null
BME511/ProbablisticClassificationClustering.ipynb
haribharadwaj/notebooks
3932e3cedb3c43e74a6cf4942e4ad7b0cf9ee988
[ "BSD-3-Clause" ]
2
2021-09-02T13:23:10.000Z
2021-09-26T01:39:34.000Z
319.435484
43,332
0.931095
[ [ [ "<a href=\"https://colab.research.google.com/github/haribharadwaj/notebooks/blob/main/BME511/ProbabilisticClassificationClustering.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# Classification and clustering: probabilistic modeling approach\n\nHere, we will extend out discussion on estimation theory to classification and clusterin problems.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pylab as pl\n\n# Setting it so figs will be a bit bigger\nfrom matplotlib import pyplot as plt\nplt.rcParams['figure.figsize'] = [5, 3.33]\nplt.rcParams['figure.dpi'] = 120", "_____no_output_____" ] ], [ [ "## We will rely heavily on the ```scikit-learn``` library\n\nThe ```sklearn``` library is a mature, well-tested library of implementations of many standard machine-learning models and provides a clean API to work with. In addition to being able to fit models and draw predictions for new inputs, ```sklearn``` also provides many useful utilities for handling data. It also has functions for creating toy datasets which can be used to understand the pros and cons of various models. The library is designed to work in conjunction with ```numpy```, ```scipy```, ```matplotlib```, and ```pandas```.", "_____no_output_____" ], [ "### Start by making a 2D dataset with 2 classes.", "_____no_output_____" ] ], [ [ "from sklearn import datasets\nn_classes = 2\nn_features = 2\nn_samples = 200\nn_redundant = 0\nn_clusters_per_class = 1\nclass_sep = 0.8\nX, y = datasets.make_classification(n_classes=n_classes, n_features=n_features,\n n_samples=n_samples, n_redundant=n_redundant,\n n_clusters_per_class=n_clusters_per_class,\n class_sep=class_sep, random_state=0)", "_____no_output_____" ] ], [ [ "### Visualize the dataset", "_____no_output_____" ] ], [ [ "import itertools\nmarker = itertools.cycle(('o', 's', '*', 'v', '^', 'x')) \nfor c in range(n_classes):\n pl.plot(X[y==c, 0], X[y==c, 1], linestyle='', marker=next(marker))\npl.xlabel('Feature 1')\npl.ylabel('Feature 2')\npl.legend(('Class 1', 'Class 2'), loc='best')", "_____no_output_____" ] ], [ [ "## Construct a naive-Bayes classifier and test it by splitting the dataset\n\nFor simplicity, we will use the Gaussian naive-Bayes model where the value of each feature is modeled as coming from a 1D normal distribution that is conditioned on class but independent of other features.", "_____no_output_____" ] ], [ [ "from sklearn.naive_bayes import GaussianNB\nfrom sklearn.model_selection import train_test_split\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=0)\n\nmodel = GaussianNB()\nmodel.fit(X_train, y_train)\ny_predicted_train = model.predict(X_train)\ny_predicted_test = model.predict(X_test)\n\nacc_train = (y_predicted_train == y_train).sum() * 100. / y_train.shape[0]\nacc_test = (y_predicted_test == y_test).sum() * 100. / y_test.shape[0]\n\nprint(f'Training accuracy = {acc_train:0.1f}%, Test accuracy = {acc_test:0.1f}%')", "Training accuracy = 91.0%, Test accuracy = 92.4%\n" ] ], [ [ "### Visualize results", "_____no_output_____" ] ], [ [ "def make_meshgrid(X, ngrid=100, slack=0.2):\n if len(X.shape) > 2:\n warnings.warn('Grid visualization only work for 2D or less!')\n xmin, xmax = X[:, 0].min(), X[:, 0].max()\n ymin, ymax = X[:, 1].min(), X[:, 1].max()\n \n # Apply some slack so points are are not near the edge\n xmin *= 1 - np.sign(xmin) * slack\n xmax *= 1 + np.sign(xmax) * slack\n ymin *= 1 - np.sign(ymin) * slack\n ymax *= 1 + np.sign(ymax) * slack\n \n dx = (xmax - xmin) / ngrid\n dy = (ymax - ymin) / ngrid\n x = np.arange(xmin, xmax, dx)\n y = np.arange(ymin, ymax, dy)\n xx, yy = np.meshgrid(x, y)\n return (xx, yy)\n\n\ndef plot_decision(xx, yy, clf, **params):\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n out = pl.contourf(xx, yy, Z, **params)\n return out\n\n\nfor c in range(n_classes):\n pl.plot(X_test[y_test==c, 0], X_test[y_test==c, 1], linestyle='', marker=next(marker))\n\npl.plot(X_test[y_predicted_test != y_test, 0], X_test[y_predicted_test != y_test, 1], 'or',\n markersize=12, markerfacecolor='none')\npl.xlabel('Feature 1')\npl.ylabel('Feature 2')\npl.legend(('True Class 1', 'True Class 2', 'Incorrect'), loc='best')\npl.title('Test results')\n\n# Plot decision function\nxx, yy = make_meshgrid(X_test)\nplot_decision(xx, yy, model, cmap='seismic', alpha=0.1)", "_____no_output_____" ] ], [ [ "## Clustering using mixture modeling", "_____no_output_____" ], [ "### Create and visualize a 2D dataset", "_____no_output_____" ] ], [ [ "n_clusters = 3\ncenters = np.asarray([[-4, 0], [0, 3], [1.5, -3]])\nX, y = datasets.make_blobs(centers=centers, n_features=n_features,\n n_samples=n_samples)\n\npl.plot(X[:, 0], X[:, 1], 'o')\npl.xlabel('Feature 1')\npl.ylabel('Feature 2')", "_____no_output_____" ] ], [ [ "### Use a Gaussian mixture model\n\nThe number of clusters is a hyperparameter", "_____no_output_____" ] ], [ [ "from sklearn import mixture\nncomps = 3\nmodel = mixture.GaussianMixture(n_components=ncomps)\ny = model.fit_predict(X)\nprint(f'Log-likelihood = {model.score(X)}, AIC = {model.aic(X)}')\nfor k in range(ncomps):\n pl.plot(X[y==k, 0], X[y==k, 1], marker=next(marker), linestyle='')\npl.xlabel('Feature 1')\npl.ylabel('Feature 2')", "Log-likelihood = -3.882424703774175, AIC = 1586.96988150967\n" ] ], [ [ "## Apply naive Bayes classifier to Wisconsin breast cancer dataset after PCA for dimensionality reduction\n\nDataset is included with ```sklearn``` but comes from:\nW.H. Wolberg, W.N. Street, D.M. Heisey, and O.L. Mangasarian. Computer-derived nuclear features distinguish malignant from benign breast cytology. Human Pathology, 26:792--796, 1995.\n\nHere, fine-needle aspirates (FNA; a type of biopsy) of breast mass are obtained, imaged, and the digitized images are processed to extract features pertaining to the characteristics of the cell nuclei present in the image. The goal is to classify each biopsy as **malignant** or **benign** based on the image.\n\nTen real-valued features are computed for each cell nucleus:\n- radius (mean of distances from center to points on the perimeter)\n- texture (standard deviation of gray-scale values)\n- perimeter\n- area\n- smoothness (local variation in radius lengths)\n- compactness (perimeter^2 / area - 1.0)\n- concavity (severity of concave portions of the contour)\n- concave points (number of concave portions of the contour)\n- symmetry\n- fractal dimension (\"coastline approximation\" - 1)\n\nFor each image, across nuceli, the mean, standard error,\nand \"worst\" or largest (mean of the three largest values) of these features were computed. This yields **30 features per sample**.\nFor instance, field 3 is Mean Radius, field\n13 is Radius SE, field 23 is Worst Radius.\n", "_____no_output_____" ] ], [ [ "from sklearn.datasets import load_breast_cancer\nfrom sklearn.decomposition import PCA\n\ndat = load_breast_cancer()\nX = dat.data\ny = dat.target\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, train_size=0.75)\n\n# Do PCA to get two dimensions\npc = PCA(n_components=2)\npc.fit(X_train)\nX_train_pc = pc.transform(X_train)\nX_test_pc = pc.transform(X_test)", "_____no_output_____" ], [ "model = GaussianNB()\nmodel.fit(X_train_pc, y_train)\ny_predicted_train = model.predict(X_train_pc)\ny_predicted_test = model.predict(X_test_pc)\n\nacc_train = (y_predicted_train == y_train).sum() * 100. / y_train.shape[0]\nacc_test = (y_predicted_test == y_test).sum() * 100. / y_test.shape[0]\n\nprint(f'Training accuracy = {acc_train:0.1f}%, Test accuracy = {acc_test:0.1f}%')", "Training accuracy = 91.3%, Test accuracy = 87.4%\n" ] ], [ [ "### Visualize results", "_____no_output_____" ] ], [ [ "n_classes = 2\nfor c in range(n_classes):\n pl.plot(X_test_pc[y_test==c, 0], X_test_pc[y_test==c, 1], linestyle='', marker=next(marker))\n\npl.plot(X_test_pc[y_predicted_test != y_test, 0], X_test_pc[y_predicted_test != y_test, 1], 'or',\n markersize=12, markerfacecolor='none')\npl.xlabel('PC 1')\npl.ylabel('PC 2')\npl.legend(('True Class 1', 'True Class 2', 'Incorrect'), loc='best')\npl.title('Test results')\n\n# Plot decision function\nxx, yy = make_meshgrid(X_test_pc)\nplot_decision(xx, yy, model, cmap='seismic', alpha=0.2)", "_____no_output_____" ] ], [ [ "## Apply naive Bayes classifier to high-dimensional Wisconsin breast cancer dataset (no PCA)", "_____no_output_____" ] ], [ [ "model = GaussianNB()\n# Using X_train and X_test instead of X_train_pc and X_test_pc\nmodel.fit(X_train, y_train)\ny_predicted_train = model.predict(X_train)\ny_predicted_test = model.predict(X_test)\n\nacc_train = (y_predicted_train == y_train).sum() * 100. / y_train.shape[0]\nacc_test = (y_predicted_test == y_test).sum() * 100. / y_test.shape[0]\n\nprint(f'Training accuracy = {acc_train:0.1f}%, Test accuracy = {acc_test:0.1f}%')", "Training accuracy = 94.4%, Test accuracy = 93.7%\n" ] ], [ [ "### Conclusion about breast cancer dataset\n\nA simple naive Bayes classifier is able to obtain 90+% accuracy in biopsy classification.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a4f2357e546cfa100726fd67c535b01598fb86
96,004
ipynb
Jupyter Notebook
test/.ipynb_checkpoints/Find best inflation terms for from OHP temperature measurements-checkpoint.ipynb
liyuxuan48/thermo-1DPHP
0a4ec9c808353b315e83e4f043e07a828affba54
[ "MIT" ]
1
2021-01-14T11:02:21.000Z
2021-01-14T11:02:21.000Z
test/.ipynb_checkpoints/Find best inflation terms for from OHP temperature measurements-checkpoint.ipynb
liyuxuan48/thermo-1DPHP
0a4ec9c808353b315e83e4f043e07a828affba54
[ "MIT" ]
null
null
null
test/.ipynb_checkpoints/Find best inflation terms for from OHP temperature measurements-checkpoint.ipynb
liyuxuan48/thermo-1DPHP
0a4ec9c808353b315e83e4f043e07a828affba54
[ "MIT" ]
null
null
null
84.362039
38,693
0.81657
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a504a2132e55f9d92e232a3a066e485929e532
250,639
ipynb
Jupyter Notebook
notebooks/figure_s3/figure_s3.ipynb
frcCVPR/frc-loss
f6a2afe0467890fa2208748ba343fd74fb20f5ab
[ "MIT" ]
2
2022-01-12T11:41:50.000Z
2022-03-12T07:59:11.000Z
notebooks/figure_s3/figure_s3.ipynb
frcCVPR/frc-loss
f6a2afe0467890fa2208748ba343fd74fb20f5ab
[ "MIT" ]
null
null
null
notebooks/figure_s3/figure_s3.ipynb
frcCVPR/frc-loss
f6a2afe0467890fa2208748ba343fd74fb20f5ab
[ "MIT" ]
null
null
null
611.314634
237,984
0.945312
[ [ [ "#Fill the paths below\nPATH_FRC = \"\" # git repo directory path\nPATH_ZENODO = \"\" # Data and models are available here: https://zenodo.org/record/5831014#.YdnW_VjMLeo\n\nDATA_FLAT = PATH_ZENODO+'/data/goi_1000/flat_1000/*.png'\nDATA_NORMAL = PATH_ZENODO+'/data/goi_1000/standard_1000/*.jpg'\n\nGAUSS_L2_MODEL = PATH_ZENODO+'/models/gaussian/noise005_set1000/standard/' # noise 0.05\nGAUSS_L2_MODEL_FLAT = PATH_ZENODO+'/models/gaussian/noise005_set1000/flat/' # noise 0.05", "_____no_output_____" ], [ "import sys\nsys.path.append(PATH_FRC)\n\nimport glob\nimport os\nimport skimage\n%matplotlib inline\nimport matplotlib.pyplot as plt\nfrom skimage.io import imread\nimport numpy as np\nimport matplotlib\n\nimport tensorflow as tf\nfrom models2 import FRCUnetModel\nfrom skimage.filters import window\nfrom tqdm import tqdm\nimport pandas as pd\n\nimport scipy.stats as stats\nfrom scipy.optimize import fsolve\nimport pyfftw.interfaces.numpy_fft\nnp.fft = pyfftw.interfaces.numpy_fft\n\nmatplotlib.rcParams.update({'mathtext.default':'regular'})\nmatplotlib.rcParams.update({'font.size': 8})\nmatplotlib.rcParams.update({'axes.labelweight': 'bold'})", "_____no_output_____" ], [ "def normalise_img(image):\n image = image - image.min()\n image = image/image.max() - 0.5\n return image\n \ndef plot_power_spectrum(image):\n if len(image.shape) == 3:\n image = np.sum(image, axis=2)\n image = image.astype('float64')\n image = image - image.mean()\n fourier_image = np.fft.fftn(image) # here the input is grey image\n size = image.shape[0]\n fourier_amplitudes = np.abs(fourier_image)**2\n print(\"FOURIER AMPLITUDES\", np.sum(fourier_amplitudes))\n\n kfreq = np.fft.fftfreq(size) * size # image size\n kfreq2D = np.meshgrid(kfreq, kfreq)\n knrm = np.sqrt(kfreq2D[0]**2 + kfreq2D[1]**2)\n\n knrm = knrm.flatten()\n fourier_amplitudes = fourier_amplitudes.flatten()\n\n kbins = np.arange(0.5, int(size / 2), 1.)\n kvals = 0.5 * (kbins[1:] + kbins[:-1])\n Abins, _, _ = stats.binned_statistic(\n knrm, fourier_amplitudes, statistic=\"mean\", bins=kbins) # mean power\n\n return kvals, Abins \n\ndef load_model(model_dir, model_fname):\n if model_dir is not None:\n return FRCUnetModel(None, model_path=os.path.join(model_dir, model_fname))\n", "_____no_output_____" ], [ "files_flat=sorted(glob.glob(DATA_FLAT))\nfiles_flat=files_flat[:50]", "_____no_output_____" ], [ "files_normal=sorted(glob.glob(DATA_NORMAL))\nfiles_normal=files_normal[:50]", "_____no_output_____" ], [ "cleans_flat=[]\nfor file in files_flat:\n clean = imread(file)\n if len(clean.shape) > 2:\n clean = np.mean(clean, axis=2)\n minsize = np.array(clean.shape).min()\n clean = clean[:minsize,:minsize]\n clean = normalise_img(clean)\n clean = clean.astype('float32')\n #clean = clean*window('hann', clean.shape)\n cleans_flat.append(clean)\ncleans_flat=np.stack(cleans_flat)", "_____no_output_____" ], [ "cleans_normal=[]\nfor file in files_normal:\n clean = imread(file)\n if len(clean.shape) > 2:\n clean = np.mean(clean, axis=2)\n minsize = np.array(clean.shape).min()\n clean = clean[:minsize,:minsize]\n clean = normalise_img(clean)\n clean = clean.astype('float32')\n #clean = clean*window('hann', clean.shape)\n cleans_normal.append(clean)\ncleans_normal=np.stack(cleans_normal)", "_____no_output_____" ], [ "cleans_normal.shape", "_____no_output_____" ], [ "noise1=np.random.normal(0,0.05,256**2*50).reshape(50,256,256)\nnoisy_flat=cleans_flat.copy()+noise1\nnoise2=np.random.normal(0,0.05,256**2*50).reshape(50,256,256)\nnoisy_normal=cleans_normal.copy()+noise2", "_____no_output_____" ], [ "l2_model=load_model(GAUSS_L2_MODEL, 'saved-model-epoch-200')", "WARNING:tensorflow:7 out of the last 7 calls to <function FRCUnetModel.get_radial_masks at 0x7fcb25f46550> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has experimental_relax_shapes=True option that relaxes argument shapes that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.\n" ], [ "l2_1000_model_flat=load_model(GAUSS_L2_MODEL_FLAT, 'saved-model-epoch-200')", "WARNING:tensorflow:8 out of the last 8 calls to <function FRCUnetModel.get_radial_masks at 0x7fcb300ff430> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has experimental_relax_shapes=True option that relaxes argument shapes that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.\n" ], [ "imnr=3", "_____no_output_____" ], [ "denoised_normal = l2_model.model(np.reshape(noisy_normal[imnr], [1,256, 256,1]))\ndenoised_normal = np.squeeze(denoised_normal)", "_____no_output_____" ], [ "denoised_flat = l2_1000_model_flat.model(np.reshape(noisy_flat[imnr], [1,256, 256,1]))\ndenoised_flat = np.squeeze(denoised_flat)", "_____no_output_____" ], [ "x=np.array(plot_power_spectrum(noisy_normal[imnr])[0])\nx=x*1.0/x.max()", "FOURIER AMPLITUDES 391690718.36664474\n" ], [ "fig = plt.figure()\nfig.set_size_inches(7, 7) # 3.5 inch is the width of one column in A4 paper \n\nax = fig.add_subplot(334)\nax.imshow(cleans_flat[imnr], cmap='gray')\nplt.xticks([])\nplt.yticks([])\n#plt.ylabel('Gaussian')\nplt.title('Normalised spectrum, GT')\n\nax = fig.add_subplot(335)\nax.imshow(noisy_flat[imnr], cmap='gray')\nplt.xticks([])\nplt.yticks([])\nplt.title('Normalised spectrum, noisy')\n\nax = fig.add_subplot(336)\nax.imshow(denoised_flat, cmap='gray')\nplt.xticks([])\nplt.yticks([])\nplt.title('Normalised spectrum, denoised')\n\nax = fig.add_subplot(331)\nax.imshow(cleans_normal[imnr], cmap='gray')\nplt.xticks([])\nplt.yticks([])\nplt.title('Standard spectrum, GT')\n\nax = fig.add_subplot(332)\nax.imshow(noisy_normal[imnr], cmap='gray')\nplt.xticks([])\nplt.yticks([])\nplt.title('Standard spectrum, noisy')\n\nax = fig.add_subplot(333)\nax.imshow(denoised_normal, cmap='gray')\nplt.xticks([])\nplt.yticks([])\nplt.title('Standard spectrum, denoised')\n\nax = fig.add_subplot(337)\nplt.title('Ground truth ')\nax.plot(x,np.array(plot_power_spectrum(cleans_flat[imnr])[1]),label='Normalised',color='orange')\nax.plot(x,np.array(plot_power_spectrum(cleans_normal[imnr])[1]),label='Standard',color='blue')\nax.set_xlabel('f/N')\nax.set_ylabel('Power')\nplt.yscale('log')\nplt.xscale('log')\n#ax.locator_params(axis='x', nbins=5)\nplt.ylim([10**1.5,10**7.5 ])\nplt.legend(loc=1)\n\nax = fig.add_subplot(338)\nplt.title('Noisy')\nax.plot(x,np.array(plot_power_spectrum(noisy_flat[imnr])[1]),label='Normalised',color='orange')\nax.plot(x,np.array(plot_power_spectrum(noisy_normal[imnr])[1]),label='Standard',color='blue')\nax.set_xlabel('f/N')\n#ax.set_ylabel('Power')\nplt.yscale('log')\nplt.xscale('log')\nplt.ylim([10**1.5,10**7.5 ])\n\nax = fig.add_subplot(339)\nplt.title('Denoised')\nax.plot(x,np.array(plot_power_spectrum(denoised_flat)[1]),label='Normalised',color='orange')\nax.plot(x,np.array(plot_power_spectrum(denoised_normal)[1]),label='Standard',color='blue')\n\nax.set_xlabel('f/N')\nplt.yscale('log')\nplt.xscale('log')\n#ax.locator_params(axis='x', nbins=5)\nplt.ylim([10**1.5,10**7.5 ])\n\nplt.tight_layout()\nplt.subplots_adjust(wspace=0.23, hspace=0.23)\nfig.savefig('figure_s3.png', dpi=300)", "FOURIER AMPLITUDES 49332148.0\nFOURIER AMPLITUDES 380945660.0\nFOURIER AMPLITUDES 60500138.09149456\nFOURIER AMPLITUDES 391690718.36664474\nFOURIER AMPLITUDES 41253108.0\nFOURIER AMPLITUDES 364254340.0\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a51590a9ab5d3682809f56d365c03a7d131ece
116,193
ipynb
Jupyter Notebook
ch_02.ipynb
fluffels/python-machine-learning
7fd899ffa118c3ee3b7667e1e0981cd41b441000
[ "MIT" ]
null
null
null
ch_02.ipynb
fluffels/python-machine-learning
7fd899ffa118c3ee3b7667e1e0981cd41b441000
[ "MIT" ]
null
null
null
ch_02.ipynb
fluffels/python-machine-learning
7fd899ffa118c3ee3b7667e1e0981cd41b441000
[ "MIT" ]
null
null
null
221.742366
22,779
0.908334
[ [ [ "import pandas\ndf = pandas.read_csv(\n 'https://archive.ics.uci.edu/ml/'\n 'machine-learning-databases/iris/iris.data',\n header=None,\n)\ndf.tail()", "_____no_output_____" ], [ "import numpy\ntargets = df.iloc[0:100, 4].values\ntargets = numpy.where(targets == 'Iris-setosa', -1, 1)\ntargets[:10]", "_____no_output_____" ], [ "samples = df.iloc[0:100, [0, 2]].values\nsamples.shape", "_____no_output_____" ], [ "samples[:10]", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\nplt.scatter(\n samples[:50, 0], samples[:50, 1],\n color='red', marker='o', label='setosa'\n)\nplt.scatter(\n samples[50:100, 0], samples[50:100, 1],\n color='blue', marker='x', label='versicolor'\n)\nplt.xlabel('sepal length')\nplt.ylabel('petal length')\nplt.legend(loc='upper left')\nplt.show()", "_____no_output_____" ], [ "from perceptron import Perceptron\np = Perceptron(learning_rate=0.1, max_iterations=10)\np.train(samples, targets)\np.errors", "_____no_output_____" ], [ "plt.plot(\n range(1, len(p.errors) + 1),\n p.errors,\n marker='o',\n)\nplt.xlabel('epochs')\nplt.ylabel('misclassification count')\nplt.show()", "_____no_output_____" ], [ "from matplotlib.colors import ListedColormap\ndef plot_decision_regions(samples, targets, classifier, resolution=0.02):\n markers = ('s', 'x', 'o', '^', 'v')\n colors = ('red', 'blue', 'lightgreen', 'gray', 'cyan')\n color_count = len(numpy.unique(targets))\n c_map = ListedColormap(colors[:color_count])\n sample_extent = [\n (samples[:, dim].min() - 1, samples[:, dim].max() + 1)\n for dim in (0, 1)\n ]\n grid = numpy.meshgrid(\n numpy.arange(*sample_extent[0], resolution),\n numpy.arange(*sample_extent[1], resolution),\n )\n predictions = classifier.predict(\n numpy.array([\n grid[0].ravel(),\n grid[1].ravel()\n ]).T\n )\n predictions = predictions.reshape(grid[0].shape)\n\n plt.contourf(\n grid[0],\n grid[1],\n predictions,\n alpha=0.4,\n cmap=c_map\n )\n plt.xlim(grid[0].min(), grid[0].max())\n plt.ylim(grid[1].min(), grid[1].max())\n for idx, target in enumerate(numpy.unique(targets)):\n plt.scatter(\n x=samples[targets == target, 0],\n y=samples[targets == target, 1],\n c=c_map(idx),\n alpha=0.8,\n marker=markers[idx],\n label=target\n )\nplot_decision_regions(samples, targets, p, resolution=0.01)\nplt.show()", "'c' argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with 'x' & 'y'. Please use a 2-D array with a single row if you really want to specify the same RGB or RGBA value for all points.\n'c' argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with 'x' & 'y'. Please use a 2-D array with a single row if you really want to specify the same RGB or RGBA value for all points.\n" ], [ "from adanline import Adaline\nada = Adaline()\n\nfig, ax = plt.subplots(nrows=1, ncols=2, figsize=(8, 4))\n\nada.train(samples, targets,\n training_iterations=10,\n training_speed=0.01)\nax[0].plot(\n range(1, len(ada.costs) + 1),\n numpy.log10(ada.costs),\n marker='o'\n)\nax[0].set_xlabel('epochs')\nax[0].set_ylabel('log(SSE)')\nax[0].set_title('Adaline - Learning Rate 0.01')\n\nada.train(samples, targets,\n training_iterations=10,\n training_speed=0.0001)\nax[1].plot(\n range(1, len(ada.costs) + 1),\n numpy.log10(ada.costs),\n marker='o'\n)\nax[1].set_xlabel('epochs')\nax[1].set_ylabel('log(SSE)')\nax[1].set_title('Adaline - Learning Rate 0.0001')\n\nplt.show()", "_____no_output_____" ], [ "samples_standardized = numpy.copy(samples)\ndim = (samples[:, 0], samples[:, 1])\nfor d in (0, 1):\n samples_standardized[:, d] = (dim[d] - dim[d].mean()) / dim[d].std()\nada.train(samples_standardized, targets,\n training_iterations=15,\n training_speed=0.01)\nplt.plot(\n range(1, len(ada.costs) + 1),\n ada.costs,\n marker='o'\n)\nplt.show()", "_____no_output_____" ], [ "plot_decision_regions(samples_standardized, targets, ada)\nplt.show()", "'c' argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with 'x' & 'y'. Please use a 2-D array with a single row if you really want to specify the same RGB or RGBA value for all points.\n'c' argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with 'x' & 'y'. Please use a 2-D array with a single row if you really want to specify the same RGB or RGBA value for all points.\n" ], [ "from adaline import AdalineSingleGradientDescent\nada = AdalineSingleGradientDescent()\nada.train(samples_standardized, targets,\n training_speed=0.01,\n training_iterations=15)\nplt.plot(\n range(1, len(ada.costs) + 1),\n ada.costs,\n marker='o'\n)\nplt.show()", "_____no_output_____" ], [ "plot_decision_regions(samples_standardized, targets,\n classifier=ada)\nplt.title('Adaline - Stochastic Gradient Descent')\nplt.xlabel('sepal length (standardized)')\nplt.xlabel('petal length (standardized)')", "'c' argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with 'x' & 'y'. Please use a 2-D array with a single row if you really want to specify the same RGB or RGBA value for all points.\n'c' argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with 'x' & 'y'. Please use a 2-D array with a single row if you really want to specify the same RGB or RGBA value for all points.\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a51b0725a7282118f6df270ee85b7d0907d230
247,617
ipynb
Jupyter Notebook
Convolutional_Neural_Networks/Autonomous driving application Car detection/Autonomous+driving+application+-+Car+detection+-+v3.ipynb
mnrmja007/my_dl_projects
9d64f716da546e5d279caaa9ab703f59a23b0293
[ "MIT" ]
null
null
null
Convolutional_Neural_Networks/Autonomous driving application Car detection/Autonomous+driving+application+-+Car+detection+-+v3.ipynb
mnrmja007/my_dl_projects
9d64f716da546e5d279caaa9ab703f59a23b0293
[ "MIT" ]
null
null
null
Convolutional_Neural_Networks/Autonomous driving application Car detection/Autonomous+driving+application+-+Car+detection+-+v3.ipynb
mnrmja007/my_dl_projects
9d64f716da546e5d279caaa9ab703f59a23b0293
[ "MIT" ]
null
null
null
177.122318
179,682
0.855054
[ [ [ "# Autonomous driving - Car detection\n\nWelcome to your week 3 programming assignment. You will learn about object detection using the very powerful YOLO model. Many of the ideas in this notebook are described in the two YOLO papers: Redmon et al., 2016 (https://arxiv.org/abs/1506.02640) and Redmon and Farhadi, 2016 (https://arxiv.org/abs/1612.08242). \n\n**You will learn to**:\n- Use object detection on a car detection dataset\n- Deal with bounding boxes\n\nRun the following cell to load the packages and dependencies that are going to be useful for your journey!", "_____no_output_____" ] ], [ [ "import argparse\nimport os\nimport matplotlib.pyplot as plt\nfrom matplotlib.pyplot import imshow\nimport scipy.io\nimport scipy.misc\nimport numpy as np\nimport pandas as pd\nimport PIL\nimport tensorflow as tf\nfrom keras import backend as K\nfrom keras.layers import Input, Lambda, Conv2D\nfrom keras.models import load_model, Model\nfrom yolo_utils import read_classes, read_anchors, generate_colors, preprocess_image, draw_boxes, scale_boxes\nfrom yad2k.models.keras_yolo import yolo_head, yolo_boxes_to_corners, preprocess_true_boxes, yolo_loss, yolo_body\n\n%matplotlib inline", "Using TensorFlow backend.\n" ] ], [ [ "**Important Note**: As you can see, we import Keras's backend as K. This means that to use a Keras function in this notebook, you will need to write: `K.function(...)`.", "_____no_output_____" ], [ "## 1 - Problem Statement\n\nYou are working on a self-driving car. As a critical component of this project, you'd like to first build a car detection system. To collect data, you've mounted a camera to the hood (meaning the front) of the car, which takes pictures of the road ahead every few seconds while you drive around. \n\n<center>\n<video width=\"400\" height=\"200\" src=\"nb_images/road_video_compressed2.mp4\" type=\"video/mp4\" controls>\n</video>\n</center>\n\n<caption><center> Pictures taken from a car-mounted camera while driving around Silicon Valley. <br> We would like to especially thank [drive.ai](https://www.drive.ai/) for providing this dataset! Drive.ai is a company building the brains of self-driving vehicles.\n</center></caption>\n\n<img src=\"nb_images/driveai.png\" style=\"width:100px;height:100;\">\n\nYou've gathered all these images into a folder and have labelled them by drawing bounding boxes around every car you found. Here's an example of what your bounding boxes look like.\n\n<img src=\"nb_images/box_label.png\" style=\"width:500px;height:250;\">\n<caption><center> <u> **Figure 1** </u>: **Definition of a box**<br> </center></caption>\n\nIf you have 80 classes that you want YOLO to recognize, you can represent the class label $c$ either as an integer from 1 to 80, or as an 80-dimensional vector (with 80 numbers) one component of which is 1 and the rest of which are 0. The video lectures had used the latter representation; in this notebook, we will use both representations, depending on which is more convenient for a particular step. \n\nIn this exercise, you will learn how YOLO works, then apply it to car detection. Because the YOLO model is very computationally expensive to train, we will load pre-trained weights for you to use. ", "_____no_output_____" ], [ "## 2 - YOLO", "_____no_output_____" ], [ "YOLO (\"you only look once\") is a popular algoritm because it achieves high accuracy while also being able to run in real-time. This algorithm \"only looks once\" at the image in the sense that it requires only one forward propagation pass through the network to make predictions. After non-max suppression, it then outputs recognized objects together with the bounding boxes.\n\n### 2.1 - Model details\n\nFirst things to know:\n- The **input** is a batch of images of shape (m, 608, 608, 3)\n- The **output** is a list of bounding boxes along with the recognized classes. Each bounding box is represented by 6 numbers $(p_c, b_x, b_y, b_h, b_w, c)$ as explained above. If you expand $c$ into an 80-dimensional vector, each bounding box is then represented by 85 numbers. \n\nWe will use 5 anchor boxes. So you can think of the YOLO architecture as the following: IMAGE (m, 608, 608, 3) -> DEEP CNN -> ENCODING (m, 19, 19, 5, 85).\n\nLets look in greater detail at what this encoding represents. \n\n<img src=\"nb_images/architecture.png\" style=\"width:700px;height:400;\">\n<caption><center> <u> **Figure 2** </u>: **Encoding architecture for YOLO**<br> </center></caption>\n\nIf the center/midpoint of an object falls into a grid cell, that grid cell is responsible for detecting that object.", "_____no_output_____" ], [ "Since we are using 5 anchor boxes, each of the 19 x19 cells thus encodes information about 5 boxes. Anchor boxes are defined only by their width and height.\n\nFor simplicity, we will flatten the last two last dimensions of the shape (19, 19, 5, 85) encoding. So the output of the Deep CNN is (19, 19, 425).\n\n<img src=\"nb_images/flatten.png\" style=\"width:700px;height:400;\">\n<caption><center> <u> **Figure 3** </u>: **Flattening the last two last dimensions**<br> </center></caption>", "_____no_output_____" ], [ "Now, for each box (of each cell) we will compute the following elementwise product and extract a probability that the box contains a certain class.\n\n<img src=\"nb_images/probability_extraction.png\" style=\"width:700px;height:400;\">\n<caption><center> <u> **Figure 4** </u>: **Find the class detected by each box**<br> </center></caption>\n\nHere's one way to visualize what YOLO is predicting on an image:\n- For each of the 19x19 grid cells, find the maximum of the probability scores (taking a max across both the 5 anchor boxes and across different classes). \n- Color that grid cell according to what object that grid cell considers the most likely.\n\nDoing this results in this picture: \n\n<img src=\"nb_images/proba_map.png\" style=\"width:300px;height:300;\">\n<caption><center> <u> **Figure 5** </u>: Each of the 19x19 grid cells colored according to which class has the largest predicted probability in that cell.<br> </center></caption>\n\nNote that this visualization isn't a core part of the YOLO algorithm itself for making predictions; it's just a nice way of visualizing an intermediate result of the algorithm. \n", "_____no_output_____" ], [ "Another way to visualize YOLO's output is to plot the bounding boxes that it outputs. Doing that results in a visualization like this: \n\n<img src=\"nb_images/anchor_map.png\" style=\"width:200px;height:200;\">\n<caption><center> <u> **Figure 6** </u>: Each cell gives you 5 boxes. In total, the model predicts: 19x19x5 = 1805 boxes just by looking once at the image (one forward pass through the network)! Different colors denote different classes. <br> </center></caption>\n\nIn the figure above, we plotted only boxes that the model had assigned a high probability to, but this is still too many boxes. You'd like to filter the algorithm's output down to a much smaller number of detected objects. To do so, you'll use non-max suppression. Specifically, you'll carry out these steps: \n- Get rid of boxes with a low score (meaning, the box is not very confident about detecting a class)\n- Select only one box when several boxes overlap with each other and detect the same object.\n\n", "_____no_output_____" ], [ "### 2.2 - Filtering with a threshold on class scores\n\nYou are going to apply a first filter by thresholding. You would like to get rid of any box for which the class \"score\" is less than a chosen threshold. \n\nThe model gives you a total of 19x19x5x85 numbers, with each box described by 85 numbers. It'll be convenient to rearrange the (19,19,5,85) (or (19,19,425)) dimensional tensor into the following variables: \n- `box_confidence`: tensor of shape $(19 \\times 19, 5, 1)$ containing $p_c$ (confidence probability that there's some object) for each of the 5 boxes predicted in each of the 19x19 cells.\n- `boxes`: tensor of shape $(19 \\times 19, 5, 4)$ containing $(b_x, b_y, b_h, b_w)$ for each of the 5 boxes per cell.\n- `box_class_probs`: tensor of shape $(19 \\times 19, 5, 80)$ containing the detection probabilities $(c_1, c_2, ... c_{80})$ for each of the 80 classes for each of the 5 boxes per cell.\n\n**Exercise**: Implement `yolo_filter_boxes()`.\n1. Compute box scores by doing the elementwise product as described in Figure 4. The following code may help you choose the right operator: \n```python\na = np.random.randn(19*19, 5, 1)\nb = np.random.randn(19*19, 5, 80)\nc = a * b # shape of c will be (19*19, 5, 80)\n```\n2. For each box, find:\n - the index of the class with the maximum box score ([Hint](https://keras.io/backend/#argmax)) (Be careful with what axis you choose; consider using axis=-1)\n - the corresponding box score ([Hint](https://keras.io/backend/#max)) (Be careful with what axis you choose; consider using axis=-1)\n3. Create a mask by using a threshold. As a reminder: `([0.9, 0.3, 0.4, 0.5, 0.1] < 0.4)` returns: `[False, True, False, False, True]`. The mask should be True for the boxes you want to keep. \n4. Use TensorFlow to apply the mask to box_class_scores, boxes and box_classes to filter out the boxes we don't want. You should be left with just the subset of boxes you want to keep. ([Hint](https://www.tensorflow.org/api_docs/python/tf/boolean_mask))\n\nReminder: to call a Keras function, you should use `K.function(...)`.", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: yolo_filter_boxes\n\ndef yolo_filter_boxes(box_confidence, boxes, box_class_probs, threshold = .6):\n \"\"\"Filters YOLO boxes by thresholding on object and class confidence.\n \n Arguments:\n box_confidence -- tensor of shape (19, 19, 5, 1)\n boxes -- tensor of shape (19, 19, 5, 4)\n box_class_probs -- tensor of shape (19, 19, 5, 80)\n threshold -- real value, if [ highest class probability score < threshold], then get rid of the corresponding box\n \n Returns:\n scores -- tensor of shape (None,), containing the class probability score for selected boxes\n boxes -- tensor of shape (None, 4), containing (b_x, b_y, b_h, b_w) coordinates of selected boxes\n classes -- tensor of shape (None,), containing the index of the class detected by the selected boxes\n \n Note: \"None\" is here because you don't know the exact number of selected boxes, as it depends on the threshold. \n For example, the actual output size of scores would be (10,) if there are 10 boxes.\n \"\"\"\n \n # Step 1: Compute box scores\n ### START CODE HERE ### (≈ 1 line)\n box_scores = box_confidence * box_class_probs\n ### END CODE HERE ###\n \n # Step 2: Find the box_classes thanks to the max box_scores, keep track of the corresponding score\n ### START CODE HERE ### (≈ 2 lines)\n box_classes = K.argmax(box_scores, axis=-1)\n box_class_scores = K.max(box_scores, axis=-1)\n ### END CODE HERE ###\n \n # Step 3: Create a filtering mask based on \"box_class_scores\" by using \"threshold\". The mask should have the\n # same dimension as box_class_scores, and be True for the boxes you want to keep (with probability >= threshold)\n ### START CODE HERE ### (≈ 1 line)\n filtering_mask = (box_class_scores > threshold)\n ### END CODE HERE ###\n \n # Step 4: Apply the mask to scores, boxes and classes\n ### START CODE HERE ### (≈ 3 lines)\n scores = tf.boolean_mask(box_class_scores, filtering_mask)\n boxes = tf.boolean_mask(boxes, filtering_mask)\n classes = tf.boolean_mask(box_classes, filtering_mask)\n ### END CODE HERE ###\n \n return scores, boxes, classes", "_____no_output_____" ], [ "with tf.Session() as test_a:\n box_confidence = tf.random_normal([19, 19, 5, 1], mean=1, stddev=4, seed = 1)\n boxes = tf.random_normal([19, 19, 5, 4], mean=1, stddev=4, seed = 1)\n box_class_probs = tf.random_normal([19, 19, 5, 80], mean=1, stddev=4, seed = 1)\n scores, boxes, classes = yolo_filter_boxes(box_confidence, boxes, box_class_probs, threshold = 0.5)\n print(\"scores[2] = \" + str(scores[2].eval()))\n print(\"boxes[2] = \" + str(boxes[2].eval()))\n print(\"classes[2] = \" + str(classes[2].eval()))\n print(\"scores.shape = \" + str(scores.shape))\n print(\"boxes.shape = \" + str(boxes.shape))\n print(\"classes.shape = \" + str(classes.shape))", "scores[2] = 10.7506\nboxes[2] = [ 8.42653275 3.27136683 -0.5313437 -4.94137383]\nclasses[2] = 7\nscores.shape = (?,)\nboxes.shape = (?, 4)\nclasses.shape = (?,)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **scores[2]**\n </td>\n <td>\n 10.7506\n </td>\n </tr>\n <tr>\n <td>\n **boxes[2]**\n </td>\n <td>\n [ 8.42653275 3.27136683 -0.5313437 -4.94137383]\n </td>\n </tr>\n\n <tr>\n <td>\n **classes[2]**\n </td>\n <td>\n 7\n </td>\n </tr>\n <tr>\n <td>\n **scores.shape**\n </td>\n <td>\n (?,)\n </td>\n </tr>\n <tr>\n <td>\n **boxes.shape**\n </td>\n <td>\n (?, 4)\n </td>\n </tr>\n\n <tr>\n <td>\n **classes.shape**\n </td>\n <td>\n (?,)\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "### 2.3 - Non-max suppression ###\n\nEven after filtering by thresholding over the classes scores, you still end up a lot of overlapping boxes. A second filter for selecting the right boxes is called non-maximum suppression (NMS). ", "_____no_output_____" ], [ "<img src=\"nb_images/non-max-suppression.png\" style=\"width:500px;height:400;\">\n<caption><center> <u> **Figure 7** </u>: In this example, the model has predicted 3 cars, but it's actually 3 predictions of the same car. Running non-max suppression (NMS) will select only the most accurate (highest probabiliy) one of the 3 boxes. <br> </center></caption>\n", "_____no_output_____" ], [ "Non-max suppression uses the very important function called **\"Intersection over Union\"**, or IoU.\n<img src=\"nb_images/iou.png\" style=\"width:500px;height:400;\">\n<caption><center> <u> **Figure 8** </u>: Definition of \"Intersection over Union\". <br> </center></caption>\n\n**Exercise**: Implement iou(). Some hints:\n- In this exercise only, we define a box using its two corners (upper left and lower right): `(x1, y1, x2, y2)` rather than the midpoint and height/width.\n- To calculate the area of a rectangle you need to multiply its height `(y2 - y1)` by its width `(x2 - x1)`.\n- You'll also need to find the coordinates `(xi1, yi1, xi2, yi2)` of the intersection of two boxes. Remember that:\n - xi1 = maximum of the x1 coordinates of the two boxes\n - yi1 = maximum of the y1 coordinates of the two boxes\n - xi2 = minimum of the x2 coordinates of the two boxes\n - yi2 = minimum of the y2 coordinates of the two boxes\n- In order to compute the intersection area, you need to make sure the height and width of the intersection are positive, otherwise the intersection area should be zero. Use `max(height, 0)` and `max(width, 0)`.\n\nIn this code, we use the convention that (0,0) is the top-left corner of an image, (1,0) is the upper-right corner, and (1,1) the lower-right corner. ", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: iou\n\ndef iou(box1, box2):\n \"\"\"Implement the intersection over union (IoU) between box1 and box2\n    \n Arguments:\n box1 -- first box, list object with coordinates (x1, y1, x2, y2)\n    box2 -- second box, list object with coordinates (x1, y1, x2, y2)\n    \"\"\"\n\n # Calculate the (y1, x1, y2, x2) coordinates of the intersection of box1 and box2. Calculate its Area.\n ### START CODE HERE ### (≈ 5 lines)\n xi1 = max([box1[0], box2[0]])\n yi1 = max([box1[1], box2[1]])\n xi2 = min([box1[2], box2[2]])\n yi2 = min([box1[3], box2[3]])\n inter_area = max(xi2 - xi1, 0) * max(yi2 - yi1, 0)\n ### END CODE HERE ###    \n\n # Calculate the Union area by using Formula: Union(A,B) = A + B - Inter(A,B)\n ### START CODE HERE ### (≈ 3 lines)\n box1_area = (box1[2] - box1[0]) * (box1[3] - box1[1])\n box2_area = (box1[2] - box1[0]) * (box1[3] - box1[1]) \n union_area = box1_area + box2_area - inter_area\n\n ### END CODE HERE ###\n \n # compute the IoU\n ### START CODE HERE ### (≈ 1 line)\n iou = inter_area / union_area\n ### END CODE HERE ###\n \n return iou", "_____no_output_____" ], [ "box1 = (2, 1, 4, 3)\nbox2 = (1, 2, 3, 4) \nprint(\"iou = \" + str(iou(box1, box2)))", "iou = 0.14285714285714285\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **iou = **\n </td>\n <td>\n 0.14285714285714285\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "You are now ready to implement non-max suppression. The key steps are: \n1. Select the box that has the highest score.\n2. Compute its overlap with all other boxes, and remove boxes that overlap it more than `iou_threshold`.\n3. Go back to step 1 and iterate until there's no more boxes with a lower score than the current selected box.\n\nThis will remove all boxes that have a large overlap with the selected boxes. Only the \"best\" boxes remain.\n\n**Exercise**: Implement yolo_non_max_suppression() using TensorFlow. TensorFlow has two built-in functions that are used to implement non-max suppression (so you don't actually need to use your `iou()` implementation):\n- [tf.image.non_max_suppression()](https://www.tensorflow.org/api_docs/python/tf/image/non_max_suppression)\n- [K.gather()](https://www.tensorflow.org/api_docs/python/tf/gather)", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: yolo_non_max_suppression\n\ndef yolo_non_max_suppression(scores, boxes, classes, max_boxes = 10, iou_threshold = 0.5):\n \"\"\"\n Applies Non-max suppression (NMS) to set of boxes\n \n Arguments:\n scores -- tensor of shape (None,), output of yolo_filter_boxes()\n boxes -- tensor of shape (None, 4), output of yolo_filter_boxes() that have been scaled to the image size (see later)\n classes -- tensor of shape (None,), output of yolo_filter_boxes()\n max_boxes -- integer, maximum number of predicted boxes you'd like\n iou_threshold -- real value, \"intersection over union\" threshold used for NMS filtering\n \n Returns:\n scores -- tensor of shape (, None), predicted score for each box\n boxes -- tensor of shape (4, None), predicted box coordinates\n classes -- tensor of shape (, None), predicted class for each box\n \n Note: The \"None\" dimension of the output tensors has obviously to be less than max_boxes. Note also that this\n function will transpose the shapes of scores, boxes, classes. This is made for convenience.\n \"\"\"\n \n max_boxes_tensor = K.variable(max_boxes, dtype='int32') # tensor to be used in tf.image.non_max_suppression()\n K.get_session().run(tf.variables_initializer([max_boxes_tensor])) # initialize variable max_boxes_tensor\n \n # Use tf.image.non_max_suppression() to get the list of indices corresponding to boxes you keep\n ### START CODE HERE ### (≈ 1 line)\n nms_indices = tf.image.non_max_suppression(boxes, scores, max_boxes_tensor, iou_threshold)\n ### END CODE HERE ###\n \n # Use K.gather() to select only nms_indices from scores, boxes and classes\n ### START CODE HERE ### (≈ 3 lines)\n scores = K.gather(scores, nms_indices)\n boxes = K.gather(boxes, nms_indices)\n classes = K.gather(classes, nms_indices)\n ### END CODE HERE ###\n \n return scores, boxes, classes", "_____no_output_____" ], [ "with tf.Session() as test_b:\n scores = tf.random_normal([54,], mean=1, stddev=4, seed = 1)\n boxes = tf.random_normal([54, 4], mean=1, stddev=4, seed = 1)\n classes = tf.random_normal([54,], mean=1, stddev=4, seed = 1)\n scores, boxes, classes = yolo_non_max_suppression(scores, boxes, classes)\n print(\"scores[2] = \" + str(scores[2].eval()))\n print(\"boxes[2] = \" + str(boxes[2].eval()))\n print(\"classes[2] = \" + str(classes[2].eval()))\n print(\"scores.shape = \" + str(scores.eval().shape))\n print(\"boxes.shape = \" + str(boxes.eval().shape))\n print(\"classes.shape = \" + str(classes.eval().shape))", "scores[2] = 6.9384\nboxes[2] = [-5.299932 3.13798141 4.45036697 0.95942086]\nclasses[2] = -2.24527\nscores.shape = (10,)\nboxes.shape = (10, 4)\nclasses.shape = (10,)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **scores[2]**\n </td>\n <td>\n 6.9384\n </td>\n </tr>\n <tr>\n <td>\n **boxes[2]**\n </td>\n <td>\n [-5.299932 3.13798141 4.45036697 0.95942086]\n </td>\n </tr>\n\n <tr>\n <td>\n **classes[2]**\n </td>\n <td>\n -2.24527\n </td>\n </tr>\n <tr>\n <td>\n **scores.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n <tr>\n <td>\n **boxes.shape**\n </td>\n <td>\n (10, 4)\n </td>\n </tr>\n\n <tr>\n <td>\n **classes.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "### 2.4 Wrapping up the filtering\n\nIt's time to implement a function taking the output of the deep CNN (the 19x19x5x85 dimensional encoding) and filtering through all the boxes using the functions you've just implemented. \n\n**Exercise**: Implement `yolo_eval()` which takes the output of the YOLO encoding and filters the boxes using score threshold and NMS. There's just one last implementational detail you have to know. There're a few ways of representing boxes, such as via their corners or via their midpoint and height/width. YOLO converts between a few such formats at different times, using the following functions (which we have provided): \n\n```python\nboxes = yolo_boxes_to_corners(box_xy, box_wh) \n```\nwhich converts the yolo box coordinates (x,y,w,h) to box corners' coordinates (x1, y1, x2, y2) to fit the input of `yolo_filter_boxes`\n```python\nboxes = scale_boxes(boxes, image_shape)\n```\nYOLO's network was trained to run on 608x608 images. If you are testing this data on a different size image--for example, the car detection dataset had 720x1280 images--this step rescales the boxes so that they can be plotted on top of the original 720x1280 image. \n\nDon't worry about these two functions; we'll show you where they need to be called. ", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: yolo_eval\n\ndef yolo_eval(yolo_outputs, image_shape = (720., 1280.), max_boxes=10, score_threshold=.6, iou_threshold=.5):\n \"\"\"\n Converts the output of YOLO encoding (a lot of boxes) to your predicted boxes along with their scores, box coordinates and classes.\n \n Arguments:\n yolo_outputs -- output of the encoding model (for image_shape of (608, 608, 3)), contains 4 tensors:\n box_confidence: tensor of shape (None, 19, 19, 5, 1)\n box_xy: tensor of shape (None, 19, 19, 5, 2)\n box_wh: tensor of shape (None, 19, 19, 5, 2)\n box_class_probs: tensor of shape (None, 19, 19, 5, 80)\n image_shape -- tensor of shape (2,) containing the input shape, in this notebook we use (608., 608.) (has to be float32 dtype)\n max_boxes -- integer, maximum number of predicted boxes you'd like\n score_threshold -- real value, if [ highest class probability score < threshold], then get rid of the corresponding box\n iou_threshold -- real value, \"intersection over union\" threshold used for NMS filtering\n \n Returns:\n scores -- tensor of shape (None, ), predicted score for each box\n boxes -- tensor of shape (None, 4), predicted box coordinates\n classes -- tensor of shape (None,), predicted class for each box\n \"\"\"\n \n ### START CODE HERE ### \n \n # Retrieve outputs of the YOLO model (≈1 line)\n box_confidence, box_xy, box_wh, box_class_probs = yolo_outputs\n\n # Convert boxes to be ready for filtering functions \n boxes = yolo_boxes_to_corners(box_xy, box_wh)\n\n # Use one of the functions you've implemented to perform Score-filtering with a threshold of score_threshold (≈1 line)\n scores, boxes, classes = yolo_filter_boxes(box_confidence, boxes, box_class_probs, score_threshold)\n \n # Scale boxes back to original image shape.\n boxes = scale_boxes(boxes, image_shape)\n\n # Use one of the functions you've implemented to perform Non-max suppression with a threshold of iou_threshold (≈1 line)\n scores, boxes, classes = yolo_non_max_suppression(scores, boxes, classes, max_boxes, iou_threshold)\n \n ### END CODE HERE ###\n \n return scores, boxes, classes", "_____no_output_____" ], [ "with tf.Session() as test_b:\n yolo_outputs = (tf.random_normal([19, 19, 5, 1], mean=1, stddev=4, seed = 1),\n tf.random_normal([19, 19, 5, 2], mean=1, stddev=4, seed = 1),\n tf.random_normal([19, 19, 5, 2], mean=1, stddev=4, seed = 1),\n tf.random_normal([19, 19, 5, 80], mean=1, stddev=4, seed = 1))\n scores, boxes, classes = yolo_eval(yolo_outputs)\n print(\"scores[2] = \" + str(scores[2].eval()))\n print(\"boxes[2] = \" + str(boxes[2].eval()))\n print(\"classes[2] = \" + str(classes[2].eval()))\n print(\"scores.shape = \" + str(scores.eval().shape))\n print(\"boxes.shape = \" + str(boxes.eval().shape))\n print(\"classes.shape = \" + str(classes.eval().shape))", "scores[2] = 138.791\nboxes[2] = [ 1292.32971191 -278.52166748 3876.98925781 -835.56494141]\nclasses[2] = 54\nscores.shape = (10,)\nboxes.shape = (10, 4)\nclasses.shape = (10,)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **scores[2]**\n </td>\n <td>\n 138.791\n </td>\n </tr>\n <tr>\n <td>\n **boxes[2]**\n </td>\n <td>\n [ 1292.32971191 -278.52166748 3876.98925781 -835.56494141]\n </td>\n </tr>\n\n <tr>\n <td>\n **classes[2]**\n </td>\n <td>\n 54\n </td>\n </tr>\n <tr>\n <td>\n **scores.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n <tr>\n <td>\n **boxes.shape**\n </td>\n <td>\n (10, 4)\n </td>\n </tr>\n\n <tr>\n <td>\n **classes.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "<font color='blue'>\n**Summary for YOLO**:\n- Input image (608, 608, 3)\n- The input image goes through a CNN, resulting in a (19,19,5,85) dimensional output. \n- After flattening the last two dimensions, the output is a volume of shape (19, 19, 425):\n - Each cell in a 19x19 grid over the input image gives 425 numbers. \n - 425 = 5 x 85 because each cell contains predictions for 5 boxes, corresponding to 5 anchor boxes, as seen in lecture. \n - 85 = 5 + 80 where 5 is because $(p_c, b_x, b_y, b_h, b_w)$ has 5 numbers, and and 80 is the number of classes we'd like to detect\n- You then select only few boxes based on:\n - Score-thresholding: throw away boxes that have detected a class with a score less than the threshold\n - Non-max suppression: Compute the Intersection over Union and avoid selecting overlapping boxes\n- This gives you YOLO's final output. ", "_____no_output_____" ], [ "## 3 - Test YOLO pretrained model on images", "_____no_output_____" ], [ "In this part, you are going to use a pretrained model and test it on the car detection dataset. As usual, you start by **creating a session to start your graph**. Run the following cell.", "_____no_output_____" ] ], [ [ "sess = K.get_session()", "_____no_output_____" ] ], [ [ "### 3.1 - Defining classes, anchors and image shape.", "_____no_output_____" ], [ "Recall that we are trying to detect 80 classes, and are using 5 anchor boxes. We have gathered the information about the 80 classes and 5 boxes in two files \"coco_classes.txt\" and \"yolo_anchors.txt\". Let's load these quantities into the model by running the next cell. \n\nThe car detection dataset has 720x1280 images, which we've pre-processed into 608x608 images. ", "_____no_output_____" ] ], [ [ "class_names = read_classes(\"model_data/coco_classes.txt\")\nanchors = read_anchors(\"model_data/yolo_anchors.txt\")\nimage_shape = (720., 1280.) ", "_____no_output_____" ] ], [ [ "### 3.2 - Loading a pretrained model\n\nTraining a YOLO model takes a very long time and requires a fairly large dataset of labelled bounding boxes for a large range of target classes. You are going to load an existing pretrained Keras YOLO model stored in \"yolo.h5\". (These weights come from the official YOLO website, and were converted using a function written by Allan Zelener. References are at the end of this notebook. Technically, these are the parameters from the \"YOLOv2\" model, but we will more simply refer to it as \"YOLO\" in this notebook.) Run the cell below to load the model from this file.", "_____no_output_____" ] ], [ [ "yolo_model = load_model(\"model_data/yolo.h5\")", "/opt/conda/lib/python3.6/site-packages/keras/models.py:251: UserWarning: No training configuration found in save file: the model was *not* compiled. Compile it manually.\n warnings.warn('No training configuration found in save file: '\n" ] ], [ [ "\nThis loads the weights of a trained YOLO model. Here's a summary of the layers your model contains.", "_____no_output_____" ] ], [ [ "yolo_model.summary()", "____________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n====================================================================================================\ninput_1 (InputLayer) (None, 608, 608, 3) 0 \n____________________________________________________________________________________________________\nconv2d_1 (Conv2D) (None, 608, 608, 32) 864 input_1[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_1 (BatchNorm (None, 608, 608, 32) 128 conv2d_1[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_1 (LeakyReLU) (None, 608, 608, 32) 0 batch_normalization_1[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_1 (MaxPooling2D) (None, 304, 304, 32) 0 leaky_re_lu_1[0][0] \n____________________________________________________________________________________________________\nconv2d_2 (Conv2D) (None, 304, 304, 64) 18432 max_pooling2d_1[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_2 (BatchNorm (None, 304, 304, 64) 256 conv2d_2[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_2 (LeakyReLU) (None, 304, 304, 64) 0 batch_normalization_2[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_2 (MaxPooling2D) (None, 152, 152, 64) 0 leaky_re_lu_2[0][0] \n____________________________________________________________________________________________________\nconv2d_3 (Conv2D) (None, 152, 152, 128) 73728 max_pooling2d_2[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_3 (BatchNorm (None, 152, 152, 128) 512 conv2d_3[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_3 (LeakyReLU) (None, 152, 152, 128) 0 batch_normalization_3[0][0] \n____________________________________________________________________________________________________\nconv2d_4 (Conv2D) (None, 152, 152, 64) 8192 leaky_re_lu_3[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_4 (BatchNorm (None, 152, 152, 64) 256 conv2d_4[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_4 (LeakyReLU) (None, 152, 152, 64) 0 batch_normalization_4[0][0] \n____________________________________________________________________________________________________\nconv2d_5 (Conv2D) (None, 152, 152, 128) 73728 leaky_re_lu_4[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_5 (BatchNorm (None, 152, 152, 128) 512 conv2d_5[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_5 (LeakyReLU) (None, 152, 152, 128) 0 batch_normalization_5[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_3 (MaxPooling2D) (None, 76, 76, 128) 0 leaky_re_lu_5[0][0] \n____________________________________________________________________________________________________\nconv2d_6 (Conv2D) (None, 76, 76, 256) 294912 max_pooling2d_3[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_6 (BatchNorm (None, 76, 76, 256) 1024 conv2d_6[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_6 (LeakyReLU) (None, 76, 76, 256) 0 batch_normalization_6[0][0] \n____________________________________________________________________________________________________\nconv2d_7 (Conv2D) (None, 76, 76, 128) 32768 leaky_re_lu_6[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_7 (BatchNorm (None, 76, 76, 128) 512 conv2d_7[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_7 (LeakyReLU) (None, 76, 76, 128) 0 batch_normalization_7[0][0] \n____________________________________________________________________________________________________\nconv2d_8 (Conv2D) (None, 76, 76, 256) 294912 leaky_re_lu_7[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_8 (BatchNorm (None, 76, 76, 256) 1024 conv2d_8[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_8 (LeakyReLU) (None, 76, 76, 256) 0 batch_normalization_8[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_4 (MaxPooling2D) (None, 38, 38, 256) 0 leaky_re_lu_8[0][0] \n____________________________________________________________________________________________________\nconv2d_9 (Conv2D) (None, 38, 38, 512) 1179648 max_pooling2d_4[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_9 (BatchNorm (None, 38, 38, 512) 2048 conv2d_9[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_9 (LeakyReLU) (None, 38, 38, 512) 0 batch_normalization_9[0][0] \n____________________________________________________________________________________________________\nconv2d_10 (Conv2D) (None, 38, 38, 256) 131072 leaky_re_lu_9[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_10 (BatchNor (None, 38, 38, 256) 1024 conv2d_10[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_10 (LeakyReLU) (None, 38, 38, 256) 0 batch_normalization_10[0][0] \n____________________________________________________________________________________________________\nconv2d_11 (Conv2D) (None, 38, 38, 512) 1179648 leaky_re_lu_10[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_11 (BatchNor (None, 38, 38, 512) 2048 conv2d_11[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_11 (LeakyReLU) (None, 38, 38, 512) 0 batch_normalization_11[0][0] \n____________________________________________________________________________________________________\nconv2d_12 (Conv2D) (None, 38, 38, 256) 131072 leaky_re_lu_11[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_12 (BatchNor (None, 38, 38, 256) 1024 conv2d_12[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_12 (LeakyReLU) (None, 38, 38, 256) 0 batch_normalization_12[0][0] \n____________________________________________________________________________________________________\nconv2d_13 (Conv2D) (None, 38, 38, 512) 1179648 leaky_re_lu_12[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_13 (BatchNor (None, 38, 38, 512) 2048 conv2d_13[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_13 (LeakyReLU) (None, 38, 38, 512) 0 batch_normalization_13[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_5 (MaxPooling2D) (None, 19, 19, 512) 0 leaky_re_lu_13[0][0] \n____________________________________________________________________________________________________\nconv2d_14 (Conv2D) (None, 19, 19, 1024) 4718592 max_pooling2d_5[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_14 (BatchNor (None, 19, 19, 1024) 4096 conv2d_14[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_14 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_14[0][0] \n____________________________________________________________________________________________________\nconv2d_15 (Conv2D) (None, 19, 19, 512) 524288 leaky_re_lu_14[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_15 (BatchNor (None, 19, 19, 512) 2048 conv2d_15[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_15 (LeakyReLU) (None, 19, 19, 512) 0 batch_normalization_15[0][0] \n____________________________________________________________________________________________________\nconv2d_16 (Conv2D) (None, 19, 19, 1024) 4718592 leaky_re_lu_15[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_16 (BatchNor (None, 19, 19, 1024) 4096 conv2d_16[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_16 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_16[0][0] \n____________________________________________________________________________________________________\nconv2d_17 (Conv2D) (None, 19, 19, 512) 524288 leaky_re_lu_16[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_17 (BatchNor (None, 19, 19, 512) 2048 conv2d_17[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_17 (LeakyReLU) (None, 19, 19, 512) 0 batch_normalization_17[0][0] \n____________________________________________________________________________________________________\nconv2d_18 (Conv2D) (None, 19, 19, 1024) 4718592 leaky_re_lu_17[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_18 (BatchNor (None, 19, 19, 1024) 4096 conv2d_18[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_18 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_18[0][0] \n____________________________________________________________________________________________________\nconv2d_19 (Conv2D) (None, 19, 19, 1024) 9437184 leaky_re_lu_18[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_19 (BatchNor (None, 19, 19, 1024) 4096 conv2d_19[0][0] \n____________________________________________________________________________________________________\nconv2d_21 (Conv2D) (None, 38, 38, 64) 32768 leaky_re_lu_13[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_19 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_19[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_21 (BatchNor (None, 38, 38, 64) 256 conv2d_21[0][0] \n____________________________________________________________________________________________________\nconv2d_20 (Conv2D) (None, 19, 19, 1024) 9437184 leaky_re_lu_19[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_21 (LeakyReLU) (None, 38, 38, 64) 0 batch_normalization_21[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_20 (BatchNor (None, 19, 19, 1024) 4096 conv2d_20[0][0] \n____________________________________________________________________________________________________\nspace_to_depth_x2 (Lambda) (None, 19, 19, 256) 0 leaky_re_lu_21[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_20 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_20[0][0] \n____________________________________________________________________________________________________\nconcatenate_1 (Concatenate) (None, 19, 19, 1280) 0 space_to_depth_x2[0][0] \n leaky_re_lu_20[0][0] \n____________________________________________________________________________________________________\nconv2d_22 (Conv2D) (None, 19, 19, 1024) 11796480 concatenate_1[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_22 (BatchNor (None, 19, 19, 1024) 4096 conv2d_22[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_22 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_22[0][0] \n____________________________________________________________________________________________________\nconv2d_23 (Conv2D) (None, 19, 19, 425) 435625 leaky_re_lu_22[0][0] \n====================================================================================================\nTotal params: 50,983,561\nTrainable params: 50,962,889\nNon-trainable params: 20,672\n____________________________________________________________________________________________________\n" ] ], [ [ "**Note**: On some computers, you may see a warning message from Keras. Don't worry about it if you do--it is fine.\n\n**Reminder**: this model converts a preprocessed batch of input images (shape: (m, 608, 608, 3)) into a tensor of shape (m, 19, 19, 5, 85) as explained in Figure (2).", "_____no_output_____" ], [ "### 3.3 - Convert output of the model to usable bounding box tensors\n\nThe output of `yolo_model` is a (m, 19, 19, 5, 85) tensor that needs to pass through non-trivial processing and conversion. The following cell does that for you.", "_____no_output_____" ] ], [ [ "yolo_outputs = yolo_head(yolo_model.output, anchors, len(class_names))", "_____no_output_____" ] ], [ [ "You added `yolo_outputs` to your graph. This set of 4 tensors is ready to be used as input by your `yolo_eval` function.", "_____no_output_____" ], [ "### 3.4 - Filtering boxes\n\n`yolo_outputs` gave you all the predicted boxes of `yolo_model` in the correct format. You're now ready to perform filtering and select only the best boxes. Lets now call `yolo_eval`, which you had previously implemented, to do this. ", "_____no_output_____" ] ], [ [ "scores, boxes, classes = yolo_eval(yolo_outputs, image_shape)", "_____no_output_____" ] ], [ [ "### 3.5 - Run the graph on an image\n\nLet the fun begin. You have created a (`sess`) graph that can be summarized as follows:\n\n1. <font color='purple'> yolo_model.input </font> is given to `yolo_model`. The model is used to compute the output <font color='purple'> yolo_model.output </font>\n2. <font color='purple'> yolo_model.output </font> is processed by `yolo_head`. It gives you <font color='purple'> yolo_outputs </font>\n3. <font color='purple'> yolo_outputs </font> goes through a filtering function, `yolo_eval`. It outputs your predictions: <font color='purple'> scores, boxes, classes </font>\n\n**Exercise**: Implement predict() which runs the graph to test YOLO on an image.\nYou will need to run a TensorFlow session, to have it compute `scores, boxes, classes`.\n\nThe code below also uses the following function:\n```python\nimage, image_data = preprocess_image(\"images/\" + image_file, model_image_size = (608, 608))\n```\nwhich outputs:\n- image: a python (PIL) representation of your image used for drawing boxes. You won't need to use it.\n- image_data: a numpy-array representing the image. This will be the input to the CNN.\n\n**Important note**: when a model uses BatchNorm (as is the case in YOLO), you will need to pass an additional placeholder in the feed_dict {K.learning_phase(): 0}.", "_____no_output_____" ] ], [ [ "def predict(sess, image_file):\n \"\"\"\n Runs the graph stored in \"sess\" to predict boxes for \"image_file\". Prints and plots the preditions.\n \n Arguments:\n sess -- your tensorflow/Keras session containing the YOLO graph\n image_file -- name of an image stored in the \"images\" folder.\n \n Returns:\n out_scores -- tensor of shape (None, ), scores of the predicted boxes\n out_boxes -- tensor of shape (None, 4), coordinates of the predicted boxes\n out_classes -- tensor of shape (None, ), class index of the predicted boxes\n \n Note: \"None\" actually represents the number of predicted boxes, it varies between 0 and max_boxes. \n \"\"\"\n\n # Preprocess your image\n image, image_data = preprocess_image(\"images/\" + image_file, model_image_size = (608, 608))\n\n # Run the session with the correct tensors and choose the correct placeholders in the feed_dict.\n # You'll need to use feed_dict={yolo_model.input: ... , K.learning_phase(): 0})\n ### START CODE HERE ### (≈ 1 line)\n out_scores, out_boxes, out_classes = sess.run((scores, boxes, classes), feed_dict = {yolo_model.input:image_data, K.learning_phase() : 0})\n ### END CODE HERE ###\n\n # Print predictions info\n print('Found {} boxes for {}'.format(len(out_boxes), image_file))\n # Generate colors for drawing bounding boxes.\n colors = generate_colors(class_names)\n # Draw bounding boxes on the image file\n draw_boxes(image, out_scores, out_boxes, out_classes, class_names, colors)\n # Save the predicted bounding box on the image\n image.save(os.path.join(\"out\", image_file), quality=90)\n # Display the results in the notebook\n output_image = scipy.misc.imread(os.path.join(\"out\", image_file))\n imshow(output_image)\n \n return out_scores, out_boxes, out_classes", "_____no_output_____" ] ], [ [ "Run the following cell on the \"test.jpg\" image to verify that your function is correct.", "_____no_output_____" ] ], [ [ "out_scores, out_boxes, out_classes = predict(sess, \"test.jpg\")", "Found 7 boxes for test.jpg\ncar 0.60 (925, 285) (1045, 374)\ncar 0.66 (706, 279) (786, 350)\nbus 0.67 (5, 266) (220, 407)\ncar 0.70 (947, 324) (1280, 705)\ncar 0.74 (159, 303) (346, 440)\ncar 0.80 (761, 282) (942, 412)\ncar 0.89 (367, 300) (745, 648)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **Found 7 boxes for test.jpg**\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.60 (925, 285) (1045, 374)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.66 (706, 279) (786, 350)\n </td>\n </tr>\n <tr>\n <td>\n **bus**\n </td>\n <td>\n 0.67 (5, 266) (220, 407)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.70 (947, 324) (1280, 705)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.74 (159, 303) (346, 440)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.80 (761, 282) (942, 412)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.89 (367, 300) (745, 648)\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "The model you've just run is actually able to detect 80 different classes listed in \"coco_classes.txt\". To test the model on your own images:\n 1. Click on \"File\" in the upper bar of this notebook, then click \"Open\" to go on your Coursera Hub.\n 2. Add your image to this Jupyter Notebook's directory, in the \"images\" folder\n 3. Write your image's name in the cell above code\n 4. Run the code and see the output of the algorithm!\n\nIf you were to run your session in a for loop over all your images. Here's what you would get:\n\n<center>\n<video width=\"400\" height=\"200\" src=\"nb_images/pred_video_compressed2.mp4\" type=\"video/mp4\" controls>\n</video>\n</center>\n\n<caption><center> Predictions of the YOLO model on pictures taken from a camera while driving around the Silicon Valley <br> Thanks [drive.ai](https://www.drive.ai/) for providing this dataset! </center></caption>", "_____no_output_____" ], [ "<font color='blue'>\n**What you should remember**:\n- YOLO is a state-of-the-art object detection model that is fast and accurate\n- It runs an input image through a CNN which outputs a 19x19x5x85 dimensional volume. \n- The encoding can be seen as a grid where each of the 19x19 cells contains information about 5 boxes.\n- You filter through all the boxes using non-max suppression. Specifically: \n - Score thresholding on the probability of detecting a class to keep only accurate (high probability) boxes\n - Intersection over Union (IoU) thresholding to eliminate overlapping boxes\n- Because training a YOLO model from randomly initialized weights is non-trivial and requires a large dataset as well as lot of computation, we used previously trained model parameters in this exercise. If you wish, you can also try fine-tuning the YOLO model with your own dataset, though this would be a fairly non-trivial exercise. ", "_____no_output_____" ], [ "**References**: The ideas presented in this notebook came primarily from the two YOLO papers. The implementation here also took significant inspiration and used many components from Allan Zelener's github repository. The pretrained weights used in this exercise came from the official YOLO website. \n- Joseph Redmon, Santosh Divvala, Ross Girshick, Ali Farhadi - [You Only Look Once: Unified, Real-Time Object Detection](https://arxiv.org/abs/1506.02640) (2015)\n- Joseph Redmon, Ali Farhadi - [YOLO9000: Better, Faster, Stronger](https://arxiv.org/abs/1612.08242) (2016)\n- Allan Zelener - [YAD2K: Yet Another Darknet 2 Keras](https://github.com/allanzelener/YAD2K)\n- The official YOLO website (https://pjreddie.com/darknet/yolo/) ", "_____no_output_____" ], [ "**Car detection dataset**:\n<a rel=\"license\" href=\"http://creativecommons.org/licenses/by/4.0/\"><img alt=\"Creative Commons License\" style=\"border-width:0\" src=\"https://i.creativecommons.org/l/by/4.0/88x31.png\" /></a><br /><span xmlns:dct=\"http://purl.org/dc/terms/\" property=\"dct:title\">The Drive.ai Sample Dataset</span> (provided by drive.ai) is licensed under a <a rel=\"license\" href=\"http://creativecommons.org/licenses/by/4.0/\">Creative Commons Attribution 4.0 International License</a>. We are especially grateful to Brody Huval, Chih Hu and Rahul Patel for collecting and providing this dataset. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
d0a51fae07f92eaf8dac4bfa0a354cf34c85d662
266,937
ipynb
Jupyter Notebook
jupyter_notebooks/trash/2_26_costs.ipynb
noambuckman/mpc-multiple-vehicles
a20949c335f1af97962569eed112e6cef46174d9
[ "MIT" ]
1
2021-11-02T15:16:17.000Z
2021-11-02T15:16:17.000Z
jupyter_notebooks/trash/2_26_costs.ipynb
noambuckman/mpc-multiple-vehicles
a20949c335f1af97962569eed112e6cef46174d9
[ "MIT" ]
5
2021-04-14T17:08:59.000Z
2021-05-27T21:41:02.000Z
jupyter_notebooks/trash/2_26_costs.ipynb
noambuckman/mpc-multiple-vehicles
a20949c335f1af97962569eed112e6cef46174d9
[ "MIT" ]
2
2022-02-07T08:16:05.000Z
2022-03-09T23:30:17.000Z
89.187103
15,076
0.577709
[ [ [ "import datetime\nimport os, sys\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport casadi as cas\nimport pickle\nimport copy as cp\n\n\n# from ..</src> import car_plotting\n# from .import src.car_plotting\nPROJECT_PATH = '/home/nbuckman/Dropbox (MIT)/DRL/2020_01_cooperative_mpc/mpc-multiple-vehicles/'\nsys.path.append(PROJECT_PATH)\n\n\nimport src.MPC_Casadi as mpc\nimport src.TrafficWorld as tw\nimport src.IterativeBestResponseMPCMultiple as mibr\n\nnp.set_printoptions(precision=2)", "_____no_output_____" ], [ "\nNEW = True\nif NEW:\n optional_suffix = \"testsave\"\n subdir_name = datetime.datetime.now().strftime(\"%Y%m%d-%H%M%S\") + optional_suffix\n folder = \"results/\" + subdir_name + \"/\"\n os.makedirs(folder)\n os.makedirs(folder+\"imgs/\")\n os.makedirs(folder+\"data/\")\n os.makedirs(folder+\"vids/\")\nelse:\n subdir_name = \"20200224-103456_real_dim_CA\"\n folder = \"results/\" + subdir_name + \"/\"\nprint(folder)\n\n\n\nT = 10 #numbr of time horizons\ndt = 0.2\nN = int(T/dt) #Number of control intervals\nworld = tw.TrafficWorld(2, 0, 1000)\n# Initial Conditions\n\nall_other_x0 = []\nall_other_u = []\nn_other = 2\n\nall_other_MPC = []\nnext_x0 = 0\nfor i in range(n_other):\n x1_MPC = mpc.MPC(dt)\n x1_MPC.theta_iamb = np.pi/2.5\n x1_MPC.k_final = 1.0\n x1_MPC.k_s = -2.0\n # x1_MPC.k_s = 0.0\n # x1_MPC.k_x = -1.0\n\n x1_MPC.min_y = world.y_min\n x1_MPC.max_y = world.y_max\n\n x1_MPC.k_u_v = 0.10\n x1_MPC.k_u_delta = 0.10\n x1_MPC.k_lat = 1.0\n # x1_MPC.k_change_u_v = 1.0\n # x1_MPC.k_change_u_delta = 1.0\n\n if i%2 == 0:\n lane_number = 0\n next_x0 += x1_MPC.L/2.0 + 2*x1_MPC.min_dist\n else:\n lane_number = 1\n \n initial_speed = 20 * 0.447 # m/s\n x1_MPC.fd = x1_MPC.gen_f_desired_lane(world, lane_number, True)\n x0 = np.array([next_x0, world.get_lane_centerline_y(lane_number), 0, 0, initial_speed, 0]).T\n u1 = np.zeros((2,N))\n u1[0,:] = np.clip(np.pi/180 *np.random.normal(size=(1,N)), -2 * np.pi/180, 2 * np.pi/180)\n# u1[0,:] = np.ones((1,N)) * np.pi/6\n # u1[1,:] = np.clip(np.random.normal(size=(1,N)), -x1_MPC.max_acceleration * x1_MPC.dt, x1_MPC.max_acceleration * x1_MPC.dt)\n all_other_MPC += [x1_MPC]\n all_other_x0 += [x0]\n all_other_u += [u1] \n\namb_MPC = cp.deepcopy(x1_MPC)\namb_MPC.theta_iamb = 0.0\n\namb_MPC.k_u_v = 0.10\namb_MPC.k_u_delta = 1.0\namb_MPC.k_change_u_v = 0.01\namb_MPC.k_change_u_delta = 0.0\namb_MPC.k_phi\namb_MPC.k_x = -1/10000.0\namb_MPC.k_s = 0\n# amb_MPC.min_v = initial_speed \n# amb_MPC.k_u_change = 1.0\n# amb_MPC.k_lat = 0\namb_MPC.k_lon = 0.0\n# amb_MPC.k_s = -2.0\namb_MPC.max_v = 40 * 0.447 # m/s\n# amb_MPC.max_X_dev = 5.0\n\n\n\namb_MPC.fd = amb_MPC.gen_f_desired_lane(world, 0, True)\n\nx0_amb = np.array([0, 0, 0, 0, 1.1*initial_speed , 0]).T\nuamb = np.zeros((2,N))\nuamb[0,:] = np.clip(np.pi/180 * np.random.normal(size=(1,N)), -2 * np.pi/180, 2 * np.pi/180)\namb_MPC.min_v = 1.1*initial_speed\n", "results/20200226-125359testsave/\n" ], [ "WARM = True\nn_total_round = 60\nibr_sub_it = 1\nruntimeerrors = 0\nmin_slack = 100000.0\nfor n_round in range(n_total_round):\n response_MPC = amb_MPC\n response_x0 = x0_amb\n \n nonresponse_MPC_list = all_other_MPC\n nonresponse_x0_list = all_other_x0\n nonresponse_u_list = all_other_u\n bri = mibr.IterativeBestResponseMPCMultiple(response_MPC, None, nonresponse_MPC_list )\n bri.k_slack = 999\n bri.generate_optimization(N, T, response_x0, None, nonresponse_x0_list, 5, slack=True)\n bri.solve(None, nonresponse_u_list)\n x1, u1, x1_des, _, _, _, other_x, other_u, other_des = bri.get_solution()", "This is Ipopt version 3.12.3, running with linear solver mumps.\nNOTE: Other linear solvers might be more efficient (see Ipopt documentation).\n\nNumber of nonzeros in equality constraint Jacobian...: 4230\nNumber of nonzeros in inequality constraint Jacobian.: 3466\nNumber of nonzeros in Lagrangian Hessian.............: 3648\n\nTotal number of variables............................: 2293\n variables with only lower bounds: 0\n variables with lower and upper bounds: 0\n variables with only upper bounds: 0\nTotal number of equality constraints.................: 1377\nTotal number of inequality constraints...............: 967\n inequality constraints with only lower bounds: 204\n inequality constraints with lower and upper bounds: 355\n inequality constraints with only upper bounds: 408\n\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 0 0.0000000e+00 9.83e+00 7.50e-01 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0\n 1 1.0399716e-02 9.83e+00 1.26e+00 -1.0 1.20e+02 -4.0 4.93e-04 3.26e-04h 1\n 2 2.0045559e-02 9.83e+00 2.20e+01 -1.0 1.82e+02 -4.5 3.56e-04 3.85e-05h 1\n 3 4.4710489e-01 9.83e+00 2.52e+01 -1.0 2.46e+02 -5.0 3.88e-04 3.37e-04h 1\n 4 2.5591113e+00 9.83e+00 6.47e+01 -1.0 3.34e+02 -5.4 1.11e-03 4.95e-04h 1\n 5 1.0304744e+01 9.83e+00 1.06e+02 -1.0 2.54e+02 -5.9 1.15e-03 5.03e-04h 1\n 6 7.9216962e+01 9.82e+00 1.08e+02 -1.0 1.86e+02 -6.4 1.38e-03 1.35e-03h 1\n 7 1.6205310e+02 9.82e+00 1.46e+02 -1.0 1.46e+02 -6.9 1.48e-03 8.87e-04h 1\n 8 2.2025207e+02 9.83e+00 1.56e+02 -1.0 3.22e+02 -7.3 6.46e-04 4.87e-04h 1\n 9 2.2441787e+02 9.83e+00 1.54e+02 -1.0 4.45e+03 -7.8 8.53e-06 3.28e-05h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 10 2.5182294e+02 9.82e+00 1.42e+02 -1.0 9.50e+02 -8.3 8.65e-06 2.07e-04h 1\n 11 2.8777432e+02 9.81e+00 1.66e+02 -1.0 4.28e+02 -8.8 6.37e-04 2.55e-04h 1\n 12 4.0157811e+02 9.80e+00 1.67e+02 -1.0 1.45e+02 -9.2 7.37e-04 7.08e-04h 1\n 13 4.1550901e+02 9.80e+00 1.78e+02 -1.0 1.56e+02 -9.7 2.52e-04 7.97e-05h 1\n 14 6.1114602e+02 9.80e+00 1.21e+02 -1.0 1.50e+02 -10.2 8.33e-05 1.00e-03h 1\n 15 6.1355211e+02 9.80e+00 1.82e+02 -1.0 1.68e+02 -10.7 9.96e-04 1.13e-05h 1\n 16 6.2428940e+02 9.80e+00 2.68e+02 -1.0 3.50e+02 -11.2 1.43e-03 5.02e-05h 1\n 17r 6.2428940e+02 9.80e+00 9.99e+02 1.0 0.00e+00 -11.6 0.00e+00 4.03e-07R 5\n 18r 8.6414419e+02 9.80e+00 9.99e+02 1.0 6.29e+03 - 3.71e-04 1.98e-05f 1\n 19r 3.3647143e+03 9.80e+00 9.99e+02 1.0 1.65e+03 - 3.45e-04 1.15e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 20r 5.1092797e+04 9.80e+00 9.98e+02 1.0 7.67e+02 - 3.88e-04 6.81e-04f 1\n 21r 6.3342770e+04 9.79e+00 1.07e+03 1.0 7.22e+00 2.0 1.50e-01 1.14e-02f 1\n 22r 1.7594664e+05 9.73e+00 1.03e+03 1.0 3.13e+00 2.4 2.75e-01 2.46e-01f 1\n 23r 1.6728527e+05 9.68e+00 1.07e+03 1.0 2.44e+00 2.9 3.31e-02 1.26e-01f 1\n 24r 1.7195513e+05 9.51e+00 1.09e+03 1.0 5.75e-01 3.3 8.31e-01 1.00e+00f 1\n 25r 1.7774546e+05 9.35e+00 1.07e+03 1.0 1.83e+00 2.8 3.09e-01 3.74e-01f 1\n 26r 1.7826098e+05 8.80e+00 1.06e+03 1.0 5.11e+00 2.3 3.60e-01 5.80e-01f 1\n 27 1.7790304e+05 8.75e+00 2.67e+03 -1.0 9.73e+01 -12.1 1.05e-03 4.89e-03f 1\n 28 1.7790581e+05 8.75e+00 2.67e+03 -1.0 9.99e+01 -12.6 4.15e-03 8.71e-05h 1\n 29 1.7805578e+05 8.74e+00 2.67e+03 -1.0 1.49e+02 -13.1 2.98e-03 1.65e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 30 1.7826045e+05 8.73e+00 2.66e+03 -1.0 1.00e+02 -13.5 5.02e-03 1.36e-03h 1\n 31 1.7833630e+05 8.72e+00 2.66e+03 -1.0 9.67e+01 -14.0 4.35e-03 3.57e-04h 1\n 32 1.7838366e+05 8.72e+00 2.66e+03 -1.0 9.66e+01 -14.5 5.53e-03 1.85e-04h 1\n 33 1.7897357e+05 8.70e+00 2.66e+03 -1.0 9.66e+01 -15.0 6.03e-03 1.99e-03h 1\n 34 1.7979138e+05 8.68e+00 2.65e+03 -1.0 9.65e+01 -15.5 5.02e-03 2.33e-03h 1\n 35 1.8083548e+05 8.66e+00 2.64e+03 -1.0 9.65e+01 -15.9 5.14e-03 2.59e-03h 1\n 36 1.8126368e+05 8.65e+00 2.64e+03 -1.0 9.68e+01 -16.4 1.38e-03 9.55e-04h 1\n 37 1.8126992e+05 8.65e+00 2.64e+03 -1.0 9.72e+01 -16.9 2.04e-05 1.32e-05h 1\n 38 1.8127749e+05 8.65e+00 2.64e+03 -1.0 9.75e+01 -17.4 5.22e-03 1.49e-05h 1\n 39 1.8272429e+05 8.63e+00 2.63e+03 -1.0 9.81e+01 -17.8 3.94e-04 2.72e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 40 1.8373146e+05 8.61e+00 2.63e+03 -1.0 9.79e+01 -18.3 2.78e-05 1.86e-03h 1\n 41 1.8460711e+05 8.60e+00 2.63e+03 -1.0 9.79e+01 -18.8 6.10e-03 1.58e-03h 1\n 42 1.8506845e+05 8.59e+00 3.03e+03 -1.0 9.77e+01 -19.3 3.60e-03 8.05e-04h 1\n 43 1.8630538e+05 8.57e+00 2.81e+03 -1.0 9.79e+01 -19.7 1.23e-03 2.15e-03h 1\n 44 1.8693912e+05 8.56e+00 2.69e+03 -1.0 9.91e+01 -19.1 6.31e-04 1.18e-03h 1\n 45 1.8714858e+05 8.56e+00 2.61e+03 -1.0 9.72e+01 -19.6 1.86e-05 3.60e-04h 1\n 46 1.8833659e+05 8.54e+00 3.03e+03 -1.0 9.83e+01 -19.1 4.39e-03 2.16e-03h 1\n 47 1.8900050e+05 8.53e+00 3.12e+03 -1.0 9.71e+01 -19.6 1.51e-03 1.12e-03h 1\n 48 1.8921851e+05 8.53e+00 3.04e+03 -1.0 9.85e+01 -19.1 2.64e-05 3.81e-04h 1\n 49 1.8974673e+05 8.52e+00 2.87e+03 -1.0 9.66e+01 -19.6 1.53e-04 8.26e-04h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 50 1.8975438e+05 8.52e+00 3.74e+03 -1.0 9.81e+01 -19.1 3.80e-03 1.32e-05h 1\n 51 1.9002449e+05 8.52e+00 4.95e+03 -1.0 9.84e+01 -19.6 3.14e-03 3.97e-04h 1\n 52 1.9113127e+05 8.50e+00 5.17e+03 -1.0 9.86e+01 -19.1 2.66e-03 1.79e-03h 1\n 53 1.9218965e+05 8.49e+00 4.74e+03 -1.0 9.66e+01 -19.6 1.49e-05 1.46e-03h 1\n 54 1.9235900e+05 8.49e+00 4.77e+03 -1.0 9.69e+01 -19.1 3.43e-04 2.55e-04h 1\n 55 1.9614877e+05 8.45e+00 3.24e+03 -1.0 9.55e+01 -19.6 4.82e-05 4.46e-03h 1\n 56 1.9767353e+05 8.43e+00 3.55e+03 -1.0 9.43e+01 -19.1 3.11e-03 2.00e-03h 1\n 57 2.0045953e+05 8.41e+00 3.54e+03 -1.0 9.38e+01 -19.6 3.09e-03 3.11e-03h 1\n 58 2.0058633e+05 8.41e+00 4.32e+03 -1.0 9.34e+01 -19.1 2.74e-03 1.60e-04h 1\n 59r 2.0058633e+05 8.41e+00 1.00e+03 0.9 0.00e+00 -19.6 0.00e+00 4.42e-07R 5\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 60r 2.0053275e+05 8.41e+00 1.00e+03 0.9 3.17e+03 - 3.55e-04 6.45e-05f 1\n 61r 2.0041460e+05 8.41e+00 1.00e+03 0.9 1.58e+03 - 3.29e-04 1.19e-04f 1\n 62r 2.0024799e+05 8.41e+00 9.99e+02 0.9 1.43e+03 - 7.29e-04 1.04e-03f 1\n 63r 2.0178166e+05 8.42e+00 9.97e+02 0.9 9.93e+02 - 2.24e-03 1.49e-03f 1\n 64r 2.0902033e+05 8.42e+00 9.92e+02 0.9 5.71e+02 - 1.11e-02 3.85e-03f 1\n 65r 2.3136420e+05 8.41e+00 9.83e+02 0.9 3.09e+02 - 8.14e-03 9.32e-03f 1\n 66r 2.6153891e+05 8.40e+00 9.88e+02 0.9 3.62e+02 - 7.16e-03 1.05e-02f 1\n 67r 2.7798903e+05 8.40e+00 9.64e+02 0.9 3.55e+02 - 1.32e-02 9.11e-03f 1\n 68r 2.8657577e+05 8.40e+00 9.59e+02 0.9 2.47e+02 - 2.08e-02 4.88e-03f 1\n 69r 2.9527415e+05 8.40e+00 9.55e+02 0.9 1.55e+02 - 2.61e-03 4.50e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 70r 3.0527057e+05 8.40e+00 9.50e+02 0.9 5.20e+02 - 6.42e-03 5.12e-03f 1\n 71r 3.3582481e+05 8.40e+00 9.58e+02 0.9 1.01e+02 - 3.07e-02 1.51e-02f 1\n 72r 4.1107309e+05 8.41e+00 9.04e+02 0.9 1.14e+02 - 2.39e-02 3.35e-02f 1\n 73r 4.5964398e+05 8.41e+00 8.95e+02 0.9 1.15e+02 - 3.62e-02 2.11e-02f 1\n 74r 5.2016472e+05 8.41e+00 8.67e+02 0.9 1.36e+02 - 4.89e-02 3.03e-02f 1\n 75r 5.9030592e+05 8.41e+00 8.31e+02 0.9 1.48e+02 - 6.01e-02 3.79e-02f 1\n 76r 6.7476964e+05 8.41e+00 7.88e+02 0.9 1.61e+02 - 7.73e-02 4.59e-02f 1\n 77r 7.7561624e+05 8.41e+00 7.44e+02 0.9 1.82e+02 - 1.18e-01 5.62e-02f 1\n 78r 8.4945381e+05 8.41e+00 7.06e+02 0.9 4.62e+02 - 4.89e-03 5.46e-02f 1\n 79r 9.0140487e+05 8.41e+00 6.78e+02 0.9 2.50e+02 - 1.14e-01 3.60e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 80r 9.7546515e+05 8.41e+00 6.36e+02 0.9 2.54e+02 - 1.43e-01 6.16e-02f 1\n 81r 1.1190008e+06 8.38e+00 5.68e+02 0.9 3.76e+02 - 3.36e-02 1.08e-01f 1\n 82r 1.1225339e+06 8.37e+00 5.49e+02 0.9 8.82e+02 - 8.26e-03 3.25e-02f 1\n 83r 1.1286850e+06 8.36e+00 5.46e+02 0.9 4.12e+02 - 3.25e-02 6.75e-03f 1\n 84r 1.2659338e+06 8.29e+00 3.86e+02 0.9 7.87e+00 0.0 3.42e-01 2.93e-01f 1\n 85r 1.2612169e+06 8.28e+00 3.74e+02 0.2 2.38e+02 - 1.60e-02 3.04e-02f 1\n 86r 1.3908950e+06 8.17e+00 3.95e+02 0.2 3.02e+02 - 2.38e-02 7.50e-02f 1\n 87r 1.4392216e+06 8.15e+00 2.99e+02 0.2 7.97e+00 0.4 1.84e-01 1.68e-01f 1\n 88r 1.4749437e+06 8.14e+00 2.77e+02 0.2 5.34e+01 -0.1 4.57e-02 3.68e-02f 1\n 89r 1.4776820e+06 8.12e+00 2.77e+02 0.2 8.08e+02 - 3.91e-03 5.72e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 90r 1.4904632e+06 8.09e+00 3.10e+02 0.2 2.23e+02 - 1.15e-01 3.01e-02f 1\n 91r 1.4780369e+06 8.03e+00 2.85e+02 0.2 2.60e+02 - 1.24e-02 4.77e-02f 1\n 92r 1.4633854e+06 7.98e+00 2.84e+02 0.2 2.35e+02 - 1.42e-01 5.45e-02f 1\n 93r 1.4468075e+06 7.94e+00 2.89e+02 0.2 3.39e+02 - 2.25e-02 4.04e-02f 1\n 94r 1.4177297e+06 7.84e+00 3.28e+02 0.2 3.67e+02 - 4.08e-02 1.06e-01f 1\n 95r 1.4208335e+06 7.83e+00 3.24e+02 0.2 3.41e+02 - 4.16e-02 2.22e-02f 1\n 96r 1.4035701e+06 7.79e+00 2.82e+02 0.2 3.36e+02 - 6.49e-02 9.28e-02f 1\n 97r 1.3981555e+06 7.76e+00 2.08e+02 0.2 3.03e+01 -0.5 1.71e-01 7.29e-02f 1\n 98r 1.3736085e+06 7.75e+00 2.02e+02 0.2 4.74e+02 - 2.70e-02 1.88e-02f 1\n 99r 1.3738774e+06 7.70e+00 1.87e+02 0.2 4.77e+02 - 7.99e-02 9.61e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 100r 1.3580286e+06 7.69e+00 2.14e+02 0.2 4.67e+02 - 1.03e-01 3.83e-02f 1\n 101r 1.3021262e+06 7.68e+00 2.15e+02 0.2 5.22e+02 - 1.39e-01 9.61e-02f 1\n 102r 1.2905134e+06 7.68e+00 2.12e+02 0.2 5.78e+02 - 5.56e-02 4.11e-02f 1\n 103r 1.2958912e+06 7.68e+00 1.61e+02 0.2 6.56e+02 - 2.71e-02 1.08e-01f 1\n 104r 1.3293811e+06 7.69e+00 1.47e+02 0.2 7.20e+02 - 8.99e-02 8.45e-02f 1\n 105r 1.4013282e+06 7.70e+00 1.56e+02 0.2 9.31e+02 - 7.51e-02 1.05e-01f 1\n 106r 1.5010415e+06 7.67e+00 2.27e+02 0.2 3.80e+01 -1.0 6.34e-02 1.79e-01f 1\n 107r 1.5310867e+06 7.66e+00 2.10e+02 0.2 5.35e+01 -1.5 5.32e-02 3.54e-02f 1\n 108r 1.6858235e+06 7.64e+00 2.07e+02 0.2 2.27e+02 -2.0 1.41e-02 1.65e-02f 1\n 109r 1.6992342e+06 7.64e+00 2.00e+02 0.2 4.36e+01 -0.6 3.64e-02 3.57e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 110r 1.6720537e+06 7.63e+00 1.42e+02 0.2 2.59e+00 0.7 3.58e-01 1.88e-01f 1\n 111r 1.6393971e+06 7.62e+00 1.61e+02 0.2 5.96e+00 0.2 6.27e-02 2.98e-01f 1\n 112r 1.5942856e+06 7.62e+00 9.69e+01 0.2 3.94e+00 0.6 5.59e-01 6.60e-01f 1\n 113r 1.5454439e+06 7.61e+00 8.14e+01 -0.5 9.57e+00 0.2 2.87e-01 3.13e-01f 1\n 114r 1.5348572e+06 7.52e+00 4.26e+01 -0.5 2.28e+01 -0.3 2.19e-01 1.39e-01f 1\n 115 1.5101190e+06 7.46e+00 1.69e+04 -1.0 7.31e+02 -19.1 1.74e-03 8.11e-03f 1\n 116 1.5081170e+06 7.45e+00 1.69e+04 -1.0 6.47e+02 -19.6 1.66e-03 6.70e-04f 1\n 117 1.5000973e+06 7.43e+00 1.68e+04 -1.0 6.33e+02 -19.1 1.83e-03 2.70e-03f 1\n 118 1.4972338e+06 7.43e+00 1.68e+04 -1.0 6.14e+02 -19.6 4.62e-03 9.87e-04f 1\n 119 1.4926222e+06 7.41e+00 1.68e+04 -1.0 5.92e+02 -19.1 4.76e-03 1.69e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 120 1.4897946e+06 7.40e+00 1.68e+04 -1.0 5.73e+02 -19.6 1.24e-03 1.25e-03f 1\n 121 1.4867964e+06 7.39e+00 1.67e+04 -1.0 5.67e+02 -19.1 1.24e-02 1.40e-03f 1\n 122 1.4811114e+06 7.37e+00 1.67e+04 -1.0 5.31e+02 -19.6 2.94e-03 2.99e-03f 1\n 123 1.4776862e+06 7.36e+00 1.67e+04 -1.0 5.21e+02 -19.1 1.64e-02 1.85e-03f 1\n 124 1.4722508e+06 7.34e+00 1.66e+04 -1.0 4.87e+02 -19.6 5.29e-03 3.07e-03f 1\n 125 1.4686209e+06 7.32e+00 1.66e+04 -1.0 4.76e+02 -19.1 8.82e-03 2.07e-03f 1\n 126 1.4549412e+06 7.26e+00 1.64e+04 -1.0 4.61e+02 -19.6 8.60e-03 7.87e-03f 1\n 127 1.4439147e+06 7.22e+00 1.63e+04 -1.0 4.42e+02 -19.1 3.68e-03 6.45e-03f 1\n 128 1.4318869e+06 7.17e+00 1.62e+04 -1.0 4.33e+02 -19.6 2.58e-04 7.02e-03f 1\n 129 1.4287865e+06 7.15e+00 1.62e+04 -1.0 4.28e+02 -19.1 9.28e-03 1.82e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 130 1.4117953e+06 7.08e+00 1.60e+04 -1.0 4.17e+02 -19.6 1.21e-02 9.95e-03f 1\n 131 1.4094629e+06 7.07e+00 1.60e+04 -1.0 3.99e+02 -19.1 1.22e-02 1.30e-03f 1\n 132 1.3898684e+06 6.99e+00 1.58e+04 -1.0 3.86e+02 -19.6 6.69e-03 1.09e-02f 1\n 133 1.3666966e+06 6.91e+00 1.56e+04 -1.0 3.73e+02 -19.1 1.23e-04 1.25e-02f 1\n 134 1.3653874e+06 6.90e+00 1.56e+04 -1.0 3.64e+02 -19.6 8.61e-03 7.06e-04f 1\n 135 1.3600774e+06 6.88e+00 1.56e+04 -1.0 3.58e+02 -19.1 4.77e-04 2.88e-03f 1\n 136 1.3599863e+06 6.88e+00 1.56e+04 -1.0 3.54e+02 -19.6 1.16e-04 4.90e-05f 1\n 137 1.3520824e+06 6.85e+00 1.55e+04 -1.0 3.56e+02 -19.1 2.25e-02 4.63e-03f 1\n 138 1.3198881e+06 6.72e+00 1.52e+04 -1.0 3.32e+02 -19.6 4.47e-03 1.93e-02f 1\n 139 1.3018334e+06 6.65e+00 1.51e+04 -1.0 3.23e+02 -19.1 3.31e-02 1.03e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 140 1.3001178e+06 6.64e+00 1.50e+04 -1.0 3.09e+02 -19.6 2.72e-03 1.11e-03f 1\n 141 1.2712885e+06 6.53e+00 1.48e+04 -1.0 3.10e+02 -19.1 1.01e-04 1.69e-02f 1\n 142 1.2672814e+06 6.51e+00 1.48e+04 -1.0 3.20e+02 -19.6 4.18e-03 2.32e-03f 1\n 143 1.2662613e+06 6.51e+00 1.47e+04 -1.0 2.90e+02 -19.1 1.94e-02 6.22e-04f 1\n 144 1.2251694e+06 6.34e+00 1.43e+04 -1.0 2.82e+02 -19.6 2.71e-03 2.66e-02f 1\n 145 1.2238974e+06 6.33e+00 1.43e+04 -1.0 2.99e+02 -19.1 1.32e-03 7.91e-04f 1\n 146 1.2218795e+06 6.32e+00 1.43e+04 -1.0 3.08e+02 -19.6 1.65e-03 1.32e-03f 1\n 147 1.2218327e+06 6.32e+00 1.43e+04 -1.0 3.05e+02 -19.1 3.84e-03 2.74e-05f 1\n 148 1.1933966e+06 6.22e+00 1.41e+04 -1.0 3.06e+02 -19.6 7.34e-04 1.67e-02f 1\n 149 1.1923484e+06 6.21e+00 1.41e+04 -1.0 3.08e+02 -19.1 8.24e-03 5.84e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 150 1.1851137e+06 6.19e+00 1.40e+04 -1.0 2.76e+02 -19.6 7.09e-04 4.39e-03f 1\n 151 1.1674398e+06 6.13e+00 1.39e+04 -1.0 2.77e+02 -19.1 4.82e-03 9.94e-03f 1\n 152 1.1671054e+06 6.12e+00 1.39e+04 -1.0 2.72e+02 -19.6 4.03e-03 2.01e-04f 1\n 153 1.1607724e+06 6.10e+00 1.38e+04 -1.0 2.70e+02 -19.1 6.82e-04 3.72e-03f 1\n 154 1.1521951e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.6 1.57e-03 5.21e-03f 1\n 155 1.1512500e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.1 7.99e-04 5.27e-04f 1\n 156 1.1507231e+06 6.06e+00 1.37e+04 -1.0 2.68e+02 -19.6 4.24e-04 3.27e-04f 1\n 157 1.1507040e+06 6.06e+00 1.37e+04 -1.0 2.81e+02 -19.1 5.41e-04 9.69e-06f 1\n 158 1.1467730e+06 6.05e+00 1.37e+04 -1.0 2.63e+02 -19.6 8.26e-06 2.11e-03f 1\n 159 1.0732380e+06 5.82e+00 1.32e+04 -1.0 2.63e+02 -19.1 2.67e-04 3.91e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 160 1.0714161e+06 5.81e+00 1.32e+04 -1.0 2.56e+02 -19.6 1.02e-02 9.30e-04f 1\n 161 1.0713627e+06 5.81e+00 1.32e+04 -1.0 2.54e+02 -19.1 8.09e-05 2.84e-05f 1\n 162 1.0580046e+06 5.76e+00 1.31e+04 -1.0 2.54e+02 -19.6 1.35e-04 7.69e-03f 1\n 163 1.0545534e+06 5.75e+00 1.30e+04 -1.0 2.54e+02 -19.1 3.39e-03 1.89e-03f 1\n 164 1.0341517e+06 5.68e+00 1.30e+04 -1.0 2.52e+02 -19.6 8.74e-04 1.22e-02f 1\n 165 1.0336429e+06 5.68e+00 1.29e+04 -1.0 2.51e+02 -19.1 6.22e-04 2.90e-04f 1\n 166 1.0275521e+06 5.66e+00 1.38e+04 -1.0 2.51e+02 -19.6 4.36e-04 3.68e-03f 1\n 167 1.0179732e+06 5.63e+00 1.32e+04 -1.0 2.50e+02 -19.1 7.95e-03 5.53e-03f 1\n 168 9.9624521e+05 5.55e+00 1.59e+04 -1.0 2.48e+02 -19.6 4.63e-03 1.40e-02f 1\n 169 9.9527357e+05 5.55e+00 1.58e+04 -1.0 2.44e+02 -19.1 5.96e-04 5.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 170 9.9489625e+05 5.55e+00 1.56e+04 -1.0 2.47e+02 -19.6 1.04e-03 2.42e-04f 1\n 171 9.9489118e+05 5.55e+00 1.50e+04 -1.0 2.53e+02 -19.1 1.24e-03 2.86e-06f 1\n 172 9.9421219e+05 5.54e+00 1.57e+04 -1.0 2.49e+02 -19.6 3.63e-06 4.37e-04f 1\n 173 9.8486650e+05 5.51e+00 1.68e+04 -1.0 2.46e+02 -19.1 5.48e-04 5.68e-03f 1\n 174 9.7308682e+05 5.47e+00 1.65e+04 -1.0 2.49e+02 -19.6 8.25e-03 7.61e-03f 1\n 175 9.7212896e+05 5.47e+00 1.65e+04 -1.0 2.44e+02 -19.1 3.20e-04 5.80e-04f 1\n 176 9.6540396e+05 5.44e+00 1.75e+04 -1.0 2.49e+02 -19.6 9.01e-04 4.34e-03f 1\n 177 9.4998516e+05 5.39e+00 1.51e+04 -1.0 2.44e+02 -19.1 1.93e-02 9.44e-03f 1\n 178 9.4904520e+05 5.39e+00 1.52e+04 -1.0 2.45e+02 -19.6 2.19e-04 6.06e-04f 1\n 179 9.4902274e+05 5.39e+00 1.51e+04 -1.0 2.75e+02 -19.1 1.62e-04 1.14e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 180 9.3880492e+05 5.35e+00 1.67e+04 -1.0 2.29e+02 -19.6 1.38e-05 6.45e-03f 1\n 181 8.9559690e+05 5.21e+00 1.79e+04 -1.0 2.29e+02 -19.1 1.06e-02 2.71e-02f 1\n 182 8.5549874e+05 5.07e+00 1.93e+04 -1.0 2.22e+02 -19.6 4.07e-03 2.60e-02f 1\n 183 8.4452994e+05 5.04e+00 1.99e+04 -1.0 2.17e+02 -19.1 4.96e-04 7.46e-03f 1\n 184 8.2921005e+05 4.98e+00 1.98e+04 -1.0 2.14e+02 -19.6 9.95e-03 1.09e-02f 1\n 185 8.2905253e+05 4.98e+00 1.90e+04 -1.0 2.09e+02 -19.1 5.75e-03 1.10e-04f 1\n 186 8.2904529e+05 4.98e+00 1.89e+04 -1.0 2.18e+02 -19.6 7.15e-05 5.41e-06f 1\n 187 8.2712955e+05 4.97e+00 1.92e+04 -1.0 2.14e+02 -19.1 5.72e-05 1.35e-03f 1\n 188 8.0950543e+05 4.91e+00 2.28e+04 -1.0 2.31e+02 -19.6 1.11e-03 1.34e-02f 1\n 189 8.0862616e+05 4.90e+00 2.24e+04 -1.0 2.12e+02 -19.1 1.92e-03 6.51e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 190 8.0425351e+05 4.89e+00 2.35e+04 -1.0 2.22e+02 -19.6 4.57e-04 3.51e-03f 1\n 191 7.9537870e+05 4.85e+00 2.31e+04 -1.0 2.15e+02 -19.1 7.37e-03 6.61e-03f 1\n 192 7.9489154e+05 4.85e+00 2.29e+04 -1.0 2.11e+02 -19.6 1.29e-03 3.92e-04f 1\n 193 7.8751827e+05 4.83e+00 2.29e+04 -1.0 2.01e+02 -19.1 2.56e-03 5.71e-03f 1\n 194 7.8736120e+05 4.82e+00 2.31e+04 -1.0 2.00e+02 -19.6 1.54e-04 1.32e-04f 1\n 195 7.8402757e+05 4.81e+00 2.31e+04 -1.0 1.94e+02 -19.1 2.19e-03 2.63e-03f 1\n 196 7.8388845e+05 4.81e+00 2.53e+04 -1.0 1.98e+02 -19.6 2.15e-04 1.18e-04f 1\n 197 7.8385262e+05 4.81e+00 2.50e+04 -1.0 1.94e+02 -19.1 4.34e-03 2.82e-05f 1\n 198 7.8236217e+05 4.81e+00 1.09e+04 -1.0 1.97e+02 -19.6 3.11e-04 1.27e-03f 1\n 199 7.7035462e+05 4.76e+00 1.08e+04 -1.0 1.91e+02 -19.1 3.91e-04 9.52e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 200 7.6826638e+05 4.75e+00 1.08e+04 -1.0 1.90e+02 -19.6 2.68e-02 1.77e-03f 1\n 201 7.6137802e+05 4.73e+00 1.07e+04 -1.0 1.80e+02 -19.1 5.08e-04 5.41e-03f 1\n 202 7.6092627e+05 4.72e+00 1.07e+04 -1.0 1.85e+02 -19.6 8.12e-03 3.85e-04f 1\n 203 6.8620409e+05 4.44e+00 1.26e+04 -1.0 1.80e+02 -19.1 1.57e-04 6.08e-02f 1\n 204 6.8223234e+05 4.42e+00 1.15e+04 -1.0 1.85e+02 -19.6 6.43e-03 3.54e-03f 1\n 205 6.7550321e+05 4.40e+00 1.16e+04 -1.0 1.74e+02 -19.1 5.10e-03 5.69e-03f 1\n 206 6.7512988e+05 4.39e+00 1.00e+04 -1.0 1.86e+02 -19.6 8.13e-03 3.47e-04f 1\n 207 6.6341460e+05 4.35e+00 9.84e+03 -1.0 1.72e+02 -19.1 1.30e-02 1.04e-02f 1\n 208 6.5075026e+05 4.30e+00 1.09e+04 -1.0 1.87e+02 -19.6 2.10e-04 1.19e-02f 1\n 209 6.4469528e+05 4.27e+00 1.05e+04 -1.0 1.72e+02 -19.1 7.52e-03 5.37e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 210 6.3074280e+05 4.22e+00 1.26e+04 -1.0 1.85e+02 -19.6 4.48e-04 1.32e-02f 1\n 211 6.3015403e+05 4.21e+00 1.26e+04 -1.0 3.38e+01 2.3 1.77e-03 7.51e-04f 1\n 212 6.3013669e+05 4.21e+00 1.47e+04 -1.0 1.21e+02 1.8 1.17e-03 1.79e-05f 1\n 213 6.2710947e+05 4.20e+00 1.45e+04 -1.0 1.26e+02 1.4 4.90e-04 2.86e-03f 1\n 214 6.2100935e+05 4.18e+00 1.49e+04 -1.0 1.53e+02 0.9 1.86e-03 5.85e-03f 1\n 215 6.1066877e+05 4.14e+00 1.59e+04 -1.0 1.97e+02 0.4 1.05e-05 8.89e-03f 1\n 216 6.0984620e+05 4.14e+00 1.57e+04 -1.0 1.84e+02 1.7 8.49e-03 1.01e-03f 1\n 217 6.0751052e+05 4.13e+00 1.57e+04 -1.0 2.02e+02 1.3 5.49e-03 2.08e-03f 1\n 218 6.0707832e+05 4.12e+00 1.57e+04 -1.0 2.01e+02 1.7 2.32e-04 5.32e-04f 1\n 219 6.0707327e+05 4.12e+00 1.53e+04 -1.0 1.96e+02 2.1 2.90e-03 7.52e-06f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 220 6.0555478e+05 4.12e+00 1.54e+04 -1.0 2.25e+02 1.6 9.57e-04 1.84e-03f 1\n 221 6.0553909e+05 4.12e+00 1.47e+04 -1.0 2.14e+02 2.1 5.83e-03 2.26e-05f 1\n 222 6.0387470e+05 4.11e+00 1.47e+04 -1.0 2.27e+02 1.6 2.04e-03 1.80e-03f 1\n 223 6.0325229e+05 4.11e+00 1.47e+04 -1.0 2.20e+02 2.0 4.35e-04 7.72e-04f 1\n 224 6.0322399e+05 4.11e+00 1.45e+04 -1.0 2.21e+02 1.5 4.10e-03 2.96e-05f 1\n 225 6.0185557e+05 4.10e+00 1.45e+04 -1.0 2.17e+02 2.0 1.59e-05 1.70e-03f 1\n 226 6.0180800e+05 4.10e+00 1.44e+04 -1.0 2.29e+02 1.5 1.37e-03 4.63e-05f 1\n 227 5.9923339e+05 4.09e+00 1.44e+04 -1.0 2.27e+02 1.9 2.84e-03 2.85e-03f 1\n 228 5.9851909e+05 4.09e+00 1.44e+04 -1.0 2.29e+02 1.4 9.28e-04 6.31e-04f 1\n 229 5.9692058e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.9 1.94e-03 1.77e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 230 5.9689878e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.4 1.22e-05 1.79e-05f 1\n 231 5.9684337e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.8 7.71e-05 6.11e-05f 1\n 232 5.9682024e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.3 2.55e-06 2.10e-05f 1\n 233 5.9680869e+05 4.08e+00 1.38e+04 -1.0 2.28e+02 1.8 7.05e-03 1.36e-05f 1\n 234 5.9076367e+05 4.05e+00 1.41e+04 -1.0 2.30e+02 1.3 5.00e-04 5.99e-03f 1\n 235 5.9073170e+05 4.05e+00 1.42e+04 -1.0 2.32e+02 1.7 4.66e-03 3.40e-05f 1\n 236 5.8521172e+05 4.03e+00 1.45e+04 -1.0 2.32e+02 1.2 8.81e-04 5.50e-03f 1\n 237 5.8376022e+05 4.03e+00 1.44e+04 -1.0 2.30e+02 0.8 2.07e-06 1.04e-03f 1\n 238 5.8372062e+05 4.03e+00 1.53e+04 -1.0 2.35e+02 2.1 3.62e-03 4.81e-05f 1\n 239 5.7779448e+05 4.00e+00 1.55e+04 -1.0 2.39e+02 1.6 1.33e-02 6.22e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 240 5.7771432e+05 4.00e+00 1.57e+04 -1.0 2.26e+02 2.0 1.00e-03 9.89e-05f 1\n 241 5.7665827e+05 4.00e+00 1.57e+04 -1.0 2.07e+02 1.6 9.92e-04 1.10e-03f 1\n 242 5.7511556e+05 3.99e+00 1.55e+04 -1.0 2.12e+02 2.0 1.09e-03 1.92e-03f 1\n 243 5.7052438e+05 3.97e+00 1.52e+04 -1.0 2.09e+02 1.5 2.73e-04 4.68e-03f 1\n 244 5.6998537e+05 3.97e+00 1.51e+04 -1.0 2.36e+02 1.9 3.75e-04 6.54e-04f 1\n 245 5.6612860e+05 3.95e+00 1.48e+04 -1.0 2.37e+02 1.5 3.47e-05 3.75e-03f 1\n 246 5.6572525e+05 3.95e+00 1.50e+04 -1.0 2.36e+02 1.9 2.57e-03 4.85e-04f 1\n 247 5.6516789e+05 3.95e+00 1.51e+04 -1.0 2.36e+02 1.4 1.32e-03 4.83e-04f 1\n 248 5.6316860e+05 3.94e+00 1.52e+04 -1.0 2.36e+02 1.8 3.05e-03 2.38e-03f 1\n 249 5.6254967e+05 3.94e+00 1.50e+04 -1.0 2.35e+02 1.3 7.91e-06 4.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 250 5.6077033e+05 3.93e+00 1.49e+04 -1.0 2.36e+02 1.8 1.29e-03 2.17e-03f 1\n 251 5.6022313e+05 3.93e+00 1.47e+04 -1.0 2.34e+02 1.3 2.29e-06 3.07e-04f 1\n 252 5.5692514e+05 3.91e+00 1.49e+04 -1.0 2.36e+02 1.7 8.13e-03 3.96e-03f 1\n 253 5.5673567e+05 3.91e+00 1.49e+04 -1.0 2.31e+02 1.2 2.68e-06 9.58e-05f 1\n 254 5.5535652e+05 3.91e+00 1.55e+04 -1.0 2.33e+02 1.7 1.14e-02 1.64e-03f 1\n 255 5.5534273e+05 3.91e+00 1.56e+04 -1.0 2.30e+02 2.1 4.75e-04 1.85e-05f 1\n 256 5.5514554e+05 3.90e+00 1.71e+04 -1.0 2.27e+02 1.6 1.32e-02 2.20e-04f 1\n 257 5.5141803e+05 3.90e+00 1.48e+04 -1.0 4.83e+02 1.1 1.93e-05 1.30e-03f 1\n 258 5.4693933e+05 3.88e+00 1.42e+04 -1.0 2.39e+02 1.6 2.91e-04 4.86e-03f 1\n 259 5.4687686e+05 3.88e+00 1.43e+04 -1.0 2.52e+02 1.1 2.34e-03 6.08e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 260 5.4353115e+05 3.87e+00 1.41e+04 -1.0 2.42e+02 1.5 1.34e-03 3.66e-03f 1\n 261 5.4346337e+05 3.87e+00 1.41e+04 -1.0 2.07e+02 1.0 1.56e-04 6.65e-05f 1\n 262 5.4335021e+05 3.87e+00 1.42e+04 -1.0 1.95e+02 1.5 2.22e-03 1.26e-04f 1\n 263 5.4296226e+05 3.86e+00 1.43e+04 -1.0 1.86e+02 1.0 2.63e-03 3.71e-04f 1\n 264 5.3812113e+05 3.84e+00 1.40e+04 -1.0 2.13e+02 1.4 2.17e-05 5.31e-03f 1\n 265 5.3811408e+05 3.84e+00 1.41e+04 -1.0 2.23e+02 1.8 3.29e-04 8.06e-06f 1\n 266 5.3810662e+05 3.84e+00 1.41e+04 -1.0 2.43e+02 1.4 9.90e-04 7.83e-06f 1\n 267 5.3692599e+05 3.84e+00 1.38e+04 -1.0 2.30e+02 1.8 8.84e-05 1.33e-03f 1\n 268 5.3689601e+05 3.84e+00 1.38e+04 -1.0 1.90e+02 1.3 1.05e-03 3.21e-05f 1\n 269 5.3480708e+05 3.83e+00 1.32e+04 -1.0 2.44e+02 0.8 2.23e-04 1.25e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 270 5.3431885e+05 3.83e+00 1.34e+04 -1.0 1.83e+02 1.3 4.31e-03 5.17e-04f 1\n 271 5.3310063e+05 3.83e+00 1.31e+04 -1.0 1.89e+02 1.7 2.86e-05 1.41e-03f 1\n 272 5.3150897e+05 3.82e+00 1.22e+04 -1.0 1.85e+02 2.1 1.96e-04 1.98e-03f 1\n 273 5.3147543e+05 3.82e+00 1.22e+04 -1.0 1.61e+02 1.6 1.35e-04 3.82e-05f 1\n 274 5.3137612e+05 3.82e+00 1.24e+04 -1.0 1.76e+02 1.2 3.23e-03 1.02e-04f 1\n 275 5.3106158e+05 3.82e+00 1.29e+04 -1.0 1.79e+02 1.6 5.34e-03 3.64e-04f 1\n 276 5.2929724e+05 3.81e+00 1.28e+04 -1.0 1.73e+02 1.1 5.52e-05 1.84e-03f 1\n 277 5.2907597e+05 3.81e+00 1.29e+04 -1.0 1.63e+02 2.4 4.41e-04 3.68e-04f 1\n 278 5.2816992e+05 3.80e+00 1.27e+04 -1.0 1.88e+02 2.0 5.12e-04 1.14e-03f 1\n 279 5.2807415e+05 3.80e+00 1.27e+04 -1.0 1.48e+03 1.5 1.26e-06 8.93e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 280 5.2805145e+05 3.80e+00 1.26e+04 -1.0 1.94e+02 1.0 1.55e-03 2.34e-05f 1\n 281 5.2494578e+05 3.79e+00 1.26e+04 -1.0 1.75e+02 1.4 7.38e-04 3.61e-03f 1\n 282 5.2382656e+05 3.79e+00 1.20e+04 -1.0 6.75e+02 1.0 1.61e-05 3.04e-04f 1\n 283 5.2294012e+05 3.78e+00 1.20e+04 -1.0 1.74e+02 1.4 1.78e-03 9.88e-04f 1\n 284 5.2002561e+05 3.77e+00 1.24e+04 -1.0 1.76e+02 1.8 7.11e-03 3.79e-03f 1\n 285 5.1038171e+05 3.73e+00 1.19e+04 -1.0 1.71e+02 1.3 6.29e-05 1.07e-02f 1\n 286 5.1029468e+05 3.73e+00 1.19e+04 -1.0 1.84e+02 1.8 7.42e-04 1.12e-04f 1\n 287 5.1028585e+05 3.73e+00 1.21e+04 -1.0 2.01e+02 2.2 4.17e-04 1.31e-05f 1\n 288 5.0835844e+05 3.72e+00 1.19e+04 -1.0 2.75e+02 1.7 1.15e-05 2.41e-03f 1\n 289 5.0829350e+05 3.72e+00 1.19e+04 -1.0 1.71e+02 1.2 3.97e-04 6.12e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 290 5.0786133e+05 3.72e+00 1.30e+04 -1.0 1.77e+02 1.7 1.37e-02 5.49e-04f 1\n 291 4.9734337e+05 3.69e+00 1.06e+04 -1.0 1.72e+02 1.2 8.48e-04 8.86e-03f 1\n 292 4.9689003e+05 3.68e+00 1.08e+04 -1.0 1.74e+02 1.6 3.20e-03 5.66e-04f 1\n 293 4.9257017e+05 3.67e+00 9.15e+03 -1.0 1.62e+02 1.1 7.85e-05 3.01e-03f 1\n 294 4.9140020e+05 3.67e+00 9.02e+03 -1.0 2.41e+02 1.6 4.58e-05 1.41e-03f 1\n 295 4.9127824e+05 3.67e+00 1.14e+04 -1.0 1.61e+02 1.1 2.66e-03 6.68e-05f 1\n 296 4.9066209e+05 3.66e+00 1.29e+04 -1.0 1.75e+02 1.5 1.82e-02 7.38e-04f 1\n 297 4.8899995e+05 3.66e+00 1.45e+04 -1.0 5.05e+02 1.0 9.31e-04 4.06e-04f 1\n 298 4.7750162e+05 3.61e+00 1.35e+04 -1.0 1.76e+02 1.5 4.98e-03 1.35e-02f 1\n 299 4.7434464e+05 3.61e+00 1.49e+04 -1.0 9.97e+02 1.0 6.27e-04 4.14e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 300 4.7339897e+05 3.61e+00 1.50e+04 -1.0 1.73e+02 1.4 2.13e-03 1.10e-03f 1\n 301 4.7161223e+05 3.61e+00 1.40e+04 -1.0 3.25e+02 0.9 1.03e-04 5.77e-04f 1\n 302 4.7150987e+05 3.60e+00 1.43e+04 -1.0 2.00e+02 0.4 6.17e-03 9.97e-05f 1\n 303 4.6068296e+05 3.57e+00 1.36e+04 -1.0 3.30e+02 -0.0 4.88e-05 1.06e-02f 1\n 304 4.5827805e+05 3.56e+00 1.49e+04 -1.0 1.68e+02 1.3 1.36e-02 2.70e-03f 1\n 305 4.5598068e+05 3.55e+00 1.41e+04 -1.0 4.79e+02 0.8 4.48e-04 7.06e-04f 1\n 306 4.5518400e+05 3.55e+00 1.43e+04 -1.0 1.59e+02 1.2 2.67e-03 8.88e-04f 1\n 307 4.5309751e+05 3.55e+00 1.72e+04 -1.0 2.98e+02 0.8 2.61e-03 8.91e-04f 1\n 308 4.4826799e+05 3.53e+00 1.86e+04 -1.0 1.66e+02 1.2 1.97e-02 5.39e-03f 1\n 309 4.4337229e+05 3.51e+00 1.70e+04 -1.0 1.62e+02 0.7 4.61e-05 4.04e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 310 4.3957043e+05 3.50e+00 2.22e+04 -1.0 1.64e+02 1.1 5.11e-02 4.22e-03f 1\n 311 4.3840763e+05 3.50e+00 2.35e+04 -1.0 1.68e+02 0.7 4.15e-03 9.47e-04f 1\n 312 4.2663652e+05 3.45e+00 2.43e+04 -1.0 1.62e+02 1.1 2.95e-02 1.34e-02f 1\n 313 4.1771582e+05 3.43e+00 2.40e+04 -1.0 1.75e+02 0.6 6.75e-03 7.00e-03f 1\n 314 4.1583852e+05 3.42e+00 2.27e+04 -1.0 8.29e+02 0.1 1.03e-04 3.62e-04f 1\n 315 4.0823036e+05 3.40e+00 2.35e+04 -1.0 1.82e+02 0.6 9.82e-03 6.83e-03f 1\n 316 4.0742102e+05 3.40e+00 2.31e+04 -1.0 2.88e+02 0.1 7.31e-05 5.39e-04f 1\n 317 4.0736672e+05 3.40e+00 2.36e+04 -1.0 4.84e+02 -0.4 7.32e-03 5.87e-05f 1\n 318 3.9419770e+05 3.35e+00 2.18e+04 -1.0 7.13e+02 -0.9 8.53e-05 1.33e-02f 1\n 319 3.8663282e+05 3.33e+00 1.92e+04 -1.0 8.56e+02 -1.3 2.10e-04 5.79e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 320 3.8579331e+05 3.33e+00 2.13e+04 -1.0 9.22e+02 -1.8 7.40e-03 7.22e-04f 1\n 321 3.7974323e+05 3.32e+00 1.94e+04 -1.0 9.50e+02 -2.3 1.97e-04 4.91e-03f 1\n 322 3.7938905e+05 3.31e+00 2.15e+04 -1.0 9.51e+02 -2.8 8.43e-03 3.27e-04f 1\n 323 3.7037180e+05 3.29e+00 1.92e+04 -1.0 9.55e+02 -3.3 1.13e-04 8.55e-03f 1\n 324 3.6908213e+05 3.28e+00 1.94e+04 -1.0 9.41e+02 -3.7 2.53e-03 1.30e-03f 1\n 325 3.6277620e+05 3.26e+00 1.81e+04 -1.0 2.17e+02 0.3 8.07e-04 7.29e-03f 1\n 326 3.5989666e+05 3.24e+00 1.79e+04 -1.0 1.55e+02 1.6 1.27e-03 5.36e-03f 1\n 327 3.5985661e+05 3.24e+00 1.79e+04 -1.0 1.59e+02 2.1 9.24e-04 1.04e-04f 1\n 328 3.5935266e+05 3.24e+00 1.79e+04 -1.0 1.62e+02 1.6 6.26e-05 8.73e-04f 1\n 329 3.5783375e+05 3.23e+00 1.78e+04 -1.0 1.63e+02 2.0 3.50e-03 3.63e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 330 3.5743587e+05 3.22e+00 1.78e+04 -1.0 1.61e+02 1.5 3.51e-05 6.78e-04f 1\n 331 3.5668635e+05 3.22e+00 1.78e+04 -1.0 1.52e+02 1.1 2.73e-04 1.08e-03f 1\n 332 3.5630331e+05 3.22e+00 1.78e+04 -1.0 1.67e+02 0.6 3.51e-03 4.92e-04f 1\n 333 3.5562828e+05 3.22e+00 1.78e+04 -1.0 2.82e+02 0.1 3.34e-03 7.59e-04f 1\n 334 3.5480287e+05 3.21e+00 1.78e+04 -1.0 4.82e+02 -0.4 4.21e-05 9.86e-04f 1\n 335 3.5438009e+05 3.21e+00 1.78e+04 -1.0 6.55e+02 -0.8 1.29e-04 5.04e-04f 1\n 336 3.5436086e+05 3.21e+00 1.78e+04 -1.0 8.12e+02 -1.3 2.56e-03 2.29e-05f 1\n 337 3.5193976e+05 3.20e+00 1.77e+04 -1.0 8.82e+02 -1.8 1.07e-03 2.89e-03f 1\n 338 3.5190047e+05 3.20e+00 1.77e+04 -1.0 9.10e+02 -2.3 7.21e-03 4.74e-05f 1\n 339 3.5093683e+05 3.20e+00 1.77e+04 -1.0 7.47e+02 -1.0 1.35e-05 1.13e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 340 3.5052498e+05 3.20e+00 1.77e+04 -1.0 2.07e+02 0.4 5.15e-04 5.29e-04f 1\n 341 3.3750946e+05 3.14e+00 1.74e+04 -1.0 3.58e+02 -0.1 1.67e-05 1.64e-02f 1\n 342 3.3747338e+05 3.14e+00 1.74e+04 -1.0 5.75e+02 -0.6 6.58e-03 4.69e-05f 1\n 343 3.3562091e+05 3.14e+00 1.74e+04 -1.0 7.66e+02 -1.1 1.05e-04 2.42e-03f 1\n 344 3.3510890e+05 3.13e+00 1.73e+04 -1.0 8.41e+02 -1.5 1.22e-03 6.80e-04f 1\n 345 3.2871807e+05 3.11e+00 1.72e+04 -1.0 8.84e+02 -2.0 3.02e-03 8.53e-03f 1\n 346 3.2777372e+05 3.10e+00 1.72e+04 -1.0 1.54e+02 1.1 7.57e-05 1.51e-03f 1\n 347 3.2772491e+05 3.10e+00 1.72e+04 -1.0 1.60e+02 1.6 2.40e-03 9.66e-05f 1\n 348 3.2718271e+05 3.10e+00 1.71e+04 -1.0 1.60e+02 2.0 4.86e-03 1.52e-03f 1\n 349 3.2706600e+05 3.10e+00 1.71e+04 -1.0 1.58e+02 1.5 1.31e-03 2.19e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 350 3.2406746e+05 3.08e+00 1.70e+04 -1.0 1.47e+02 1.0 1.75e-04 5.59e-03f 1\n 351 3.2366994e+05 3.08e+00 1.70e+04 -1.0 1.50e+02 1.5 4.30e-03 7.40e-04f 1\n 352 3.2356913e+05 3.08e+00 1.70e+04 -1.0 1.37e+02 1.0 1.11e-04 1.90e-04f 1\n 353 3.2355506e+05 3.08e+00 1.70e+04 -1.0 1.49e+02 1.4 3.22e-03 2.36e-05f 1\n 354 3.2316524e+05 3.07e+00 1.70e+04 -1.0 1.40e+02 0.9 1.38e-03 6.09e-04f 1\n 355r 3.2316524e+05 3.07e+00 1.00e+03 0.5 0.00e+00 0.4 0.00e+00 3.57e-07R 8\n 356r 3.2229084e+05 3.07e+00 1.00e+03 0.5 2.06e+03 - 6.18e-03 7.60e-04f 1\n 357r 3.2109567e+05 3.06e+00 9.98e+02 0.5 1.80e+03 - 1.71e-03 8.09e-04f 1\n 358r 3.1149974e+05 2.97e+00 9.95e+02 0.5 1.67e+03 - 3.74e-03 6.54e-03f 1\n 359r 3.1087190e+05 2.97e+00 9.95e+02 0.5 8.58e+02 - 9.83e-05 4.32e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 360r 3.0989090e+05 2.96e+00 9.97e+02 0.5 9.94e+02 - 6.50e-03 6.44e-04f 1\n 361r 3.0241262e+05 2.87e+00 9.92e+02 0.5 9.07e+02 - 2.36e-03 5.34e-03f 1\n 362r 2.9694919e+05 2.80e+00 9.80e+02 0.5 8.52e+02 - 1.55e-02 4.79e-03f 1\n 363r 2.8929254e+05 2.68e+00 9.72e+02 0.5 6.39e+02 - 1.25e-02 8.39e-03f 1\n 364 2.8919644e+05 2.68e+00 9.52e+03 -1.0 1.36e+02 -0.0 2.03e-04 1.66e-04f 1\n 365 2.8908661e+05 2.68e+00 9.52e+03 -1.0 1.38e+02 -0.5 3.89e-04 1.90e-04f 1\n 366 2.8861557e+05 2.68e+00 9.51e+03 -1.0 1.34e+02 -1.0 2.45e-04 8.16e-04f 1\n 367 2.8784152e+05 2.67e+00 9.50e+03 -1.0 1.29e+02 -1.5 5.67e-04 1.35e-03f 1\n 368 2.8557520e+05 2.66e+00 9.46e+03 -1.0 1.18e+02 -1.9 1.40e-03 3.97e-03f 1\n 369 2.8048140e+05 2.64e+00 9.38e+03 -1.0 1.31e+02 -2.4 2.51e-03 9.01e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 370 2.7807299e+05 2.63e+00 9.34e+03 -1.0 1.50e+02 -2.9 1.64e-03 4.32e-03f 1\n 371 2.7069758e+05 2.59e+00 9.21e+03 -1.0 1.60e+02 -3.4 3.85e-03 1.34e-02f 1\n 372 2.5748481e+05 2.53e+00 8.98e+03 -1.0 1.59e+02 -3.8 4.45e-03 2.48e-02f 1\n 373 2.5409647e+05 2.51e+00 8.92e+03 -1.0 1.53e+02 -4.3 1.21e-02 6.62e-03f 1\n 374 2.4261880e+05 2.45e+00 8.72e+03 -1.0 1.50e+02 -4.8 6.04e-03 2.29e-02f 1\n 375 2.2822588e+05 2.38e+00 8.45e+03 -1.0 1.46e+02 -5.3 1.51e-02 3.02e-02f 1\n 376 2.0965467e+05 2.28e+00 8.10e+03 -1.0 1.41e+02 -5.8 1.82e-02 4.16e-02f 1\n 377 2.0936932e+05 2.28e+00 8.10e+03 -1.0 1.37e+02 -6.2 3.27e-02 6.83e-04f 1\n 378 2.0780500e+05 2.27e+00 8.07e+03 -1.0 1.37e+02 -6.7 4.67e-03 3.76e-03f 1\n 379 1.9606770e+05 2.20e+00 7.84e+03 -1.0 1.38e+02 -7.2 7.84e-04 2.87e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 380 1.7935187e+05 2.11e+00 7.49e+03 -1.0 1.38e+02 -7.7 2.58e-02 4.37e-02f 1\n 381 1.7765925e+05 2.10e+00 7.46e+03 -1.0 1.36e+02 -8.1 4.29e-02 4.74e-03f 1\n 382 1.6495345e+05 2.02e+00 7.18e+03 -1.0 1.34e+02 -8.6 1.77e-02 3.65e-02f 1\n 383 1.6340175e+05 2.01e+00 7.15e+03 -1.0 1.30e+02 -9.1 1.84e-02 4.73e-03f 1\n 384 1.5788016e+05 1.98e+00 7.03e+03 -1.0 1.29e+02 -9.6 3.72e-03 1.71e-02f 1\n 385 1.5064127e+05 1.93e+00 6.87e+03 -1.0 1.28e+02 -10.1 8.10e-03 2.33e-02f 1\n 386 1.2939352e+05 1.79e+00 6.36e+03 -1.0 1.25e+02 -10.5 1.85e-02 7.34e-02f 1\n 387 1.2646803e+05 1.77e+00 6.29e+03 -1.0 1.18e+02 -11.0 4.15e-02 1.14e-02f 1\n 388 1.1471323e+05 1.68e+00 5.99e+03 -1.0 1.16e+02 -11.5 1.69e-02 4.78e-02f 1\n 389 1.0992888e+05 1.65e+00 5.86e+03 -1.0 1.11e+02 -12.0 7.97e-02 2.12e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 390 1.0212816e+05 1.59e+00 5.65e+03 -1.0 1.07e+02 -12.4 1.86e-03 3.63e-02f 1\n 391 8.6177113e+04 1.46e+00 5.19e+03 -1.0 1.04e+02 -12.9 7.26e-02 8.17e-02f 1\n 392 8.5370527e+04 1.45e+00 5.16e+03 -1.0 9.48e+01 -13.4 3.22e-02 4.71e-03f 1\n 393 7.4077318e+04 1.35e+00 4.81e+03 -1.0 9.38e+01 -13.9 1.85e-02 6.88e-02f 1\n 394 7.1177623e+04 1.33e+00 4.71e+03 -1.0 8.78e+01 -14.3 6.45e-02 1.99e-02f 1\n 395 6.4716733e+04 1.26e+00 4.49e+03 -1.0 8.51e+01 -14.8 4.54e-03 4.67e-02f 1\n 396 5.7488482e+04 1.19e+00 4.23e+03 -1.0 8.14e+01 -15.3 9.91e-03 5.78e-02f 1\n 397 4.8754351e+04 1.10e+00 3.90e+03 -1.0 7.88e+01 -15.8 6.42e-02 7.95e-02f 1\n 398 4.7745315e+04 1.08e+00 3.86e+03 -1.0 7.72e+01 -16.3 9.49e-02 1.05e-02f 1\n 399 4.1977413e+04 1.02e+00 3.61e+03 -1.0 7.89e+01 -16.7 6.18e-02 6.27e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 400 3.3677208e+04 9.10e-01 3.24e+03 -1.0 7.75e+01 -17.2 7.45e-03 1.05e-01f 1\n 401 3.0166245e+04 8.61e-01 3.06e+03 -1.0 7.30e+01 -17.7 2.38e-02 5.39e-02f 1\n 402 2.5223779e+04 7.87e-01 2.80e+03 -1.0 7.10e+01 -18.2 1.21e-01 8.61e-02f 1\n 403 2.2341250e+04 7.40e-01 2.63e+03 -1.0 6.89e+01 -18.6 1.27e-01 5.92e-02f 1\n 404 1.8863927e+04 6.80e-01 2.42e+03 -1.0 6.77e+01 -19.1 8.08e-02 8.17e-02f 1\n 405 1.5848263e+04 6.23e-01 2.21e+03 -1.0 6.47e+01 -19.6 5.68e-02 8.40e-02f 1\n 406 1.4057233e+04 5.86e-01 2.08e+03 -1.0 6.57e+01 -19.1 8.62e-02 5.84e-02f 1\n 407 1.1013361e+04 5.19e-01 1.84e+03 -1.0 5.68e+01 -19.6 1.68e-01 1.16e-01f 1\n 408 7.9952027e+03 4.41e-01 1.57e+03 -1.0 5.34e+01 -19.1 1.48e-01 1.49e-01f 1\n 409 5.4065322e+03 3.62e-01 1.29e+03 -1.0 4.45e+01 -19.6 1.93e-01 1.80e-01f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 410 2.4913122e+03 2.44e-01 8.68e+02 -1.0 4.14e+01 -19.1 1.52e-01 3.26e-01f 1\n 411 1.0310441e+03 1.55e-01 5.51e+02 -1.0 2.82e+01 -19.6 2.47e-01 3.66e-01f 1\n 412 2.7434337e+02 7.52e-02 4.80e+02 -1.0 2.93e+01 -19.1 1.96e-01 5.14e-01f 1\n 413 5.4776714e+01 2.33e-02 7.33e+02 -1.0 7.94e+00 -19.6 4.50e-01 6.90e-01f 1\n 414 2.8195260e+01 3.14e-04 1.32e+03 -1.0 1.06e+01 -19.1 5.84e-01 1.00e+00f 1\n 415 3.2622344e+01 3.87e-05 1.06e+03 -1.0 1.60e+01 -19.6 7.02e-01 9.94e-01f 1\n 416 3.0040029e+01 4.65e-05 2.31e+02 -1.0 4.17e+01 -19.1 9.04e-01 1.00e+00f 1\n 417 3.0266079e+01 4.64e-04 4.41e+02 -1.0 1.21e+02 -19.6 1.00e+00 9.30e-01h 1\n 418 2.9559008e+01 3.42e-05 5.33e+01 -1.0 1.42e+01 -19.1 6.03e-01 1.00e+00h 1\n 419 2.9904880e+01 2.39e-06 6.35e-03 -1.0 6.68e+00 -19.6 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 420 2.4902915e+01 2.75e-05 1.10e+00 -2.5 1.14e+01 -19.1 4.91e-01 4.44e-01f 1\n 421 2.2342576e+01 8.15e-05 1.58e+00 -2.5 6.79e+01 -19.6 1.30e-01 3.02e-01f 1\n 422 2.0275308e+01 8.22e-05 1.07e+00 -2.5 6.41e+00 -3.7 3.38e-01 3.24e-01h 1\n 423 1.9264876e+01 9.78e-05 8.99e-01 -2.5 2.12e+01 -4.2 3.83e-01 1.56e-01h 1\n 424 1.8289856e+01 7.85e-05 6.81e-01 -2.5 6.19e+00 -4.6 2.31e-01 2.42e-01h 1\n 425 1.6132778e+01 1.96e-04 8.21e-01 -2.5 1.24e+01 -5.1 4.78e-01 7.58e-01h 1\n 426 1.5611473e+01 1.11e-04 5.01e-01 -2.5 1.64e+01 -5.6 4.71e-01 5.11e-01h 1\n 427 1.5163933e+01 4.19e-05 3.81e-01 -2.5 2.85e+00 -3.4 1.00e+00 6.97e-01h 1\n 428 1.5050001e+01 3.15e-05 4.60e-01 -2.5 3.60e+00 -3.8 6.17e-01 2.96e-01h 1\n 429 1.4900715e+01 5.30e-06 1.02e+00 -2.5 1.96e+00 -4.3 6.36e-01 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 430 1.4825885e+01 2.55e-05 4.15e-01 -2.5 5.52e+00 -3.9 8.38e-01 5.50e-01h 1\n 431 1.4739809e+01 3.24e-05 3.56e-01 -2.5 1.98e+01 -4.4 1.99e-01 1.55e-01h 1\n 432 1.4596773e+01 2.88e-05 5.34e-01 -2.5 5.19e+00 -3.9 5.29e-01 7.94e-01h 1\n 433 1.4484823e+01 3.46e-05 3.39e-01 -2.5 3.05e+01 -4.4 2.32e-01 9.62e-02h 1\n 434 1.4369681e+01 4.72e-05 4.22e-01 -2.5 1.62e+01 -4.9 3.06e-01 5.62e-01h 1\n 435 1.4279373e+01 4.71e-05 3.94e+02 -2.5 3.10e+01 -5.4 4.70e-01 2.25e-01h 1\n 436 1.4208263e+01 2.19e-05 2.73e+02 -2.5 2.87e+01 -5.8 3.06e-01 1.00e+00h 1\n 437 1.4134986e+01 1.91e-05 1.24e+02 -2.5 1.93e+01 -6.3 5.46e-01 6.57e-01h 1\n 438 1.4091516e+01 4.37e-05 3.46e+01 -2.5 2.35e+01 -6.8 7.19e-01 1.00e+00h 1\n 439 1.4087170e+01 4.09e-06 2.76e-04 -2.5 7.32e+00 -7.3 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 440 1.4088533e+01 1.91e-09 4.39e-07 -2.5 4.32e-01 -7.8 1.00e+00 1.00e+00h 1\n 441 1.3804149e+01 3.76e-06 7.14e+00 -3.8 4.84e+00 -8.2 6.51e-01 8.20e-01f 1\n 442 1.3706304e+01 1.35e-06 1.69e-03 -3.8 1.67e+00 -8.7 1.00e+00 1.00e+00h 1\n 443 1.3684885e+01 4.66e-08 3.95e-05 -3.8 4.07e-02 -9.2 1.00e+00 1.00e+00h 1\n 444 1.3664347e+01 8.58e-08 6.51e-02 -5.7 3.79e-01 -9.7 9.45e-01 9.46e-01h 1\n 445 1.3660196e+01 4.91e-08 1.08e+01 -5.7 4.56e-02 -10.1 1.00e+00 9.19e-01h 1\n 446 1.3659000e+01 4.92e-08 1.36e-06 -5.7 9.38e-03 -10.6 1.00e+00 1.00e+00f 1\n 447 1.3658600e+01 4.94e-08 9.08e+01 -8.6 3.53e-03 -11.1 9.94e-01 8.63e-01h 1\n 448 1.3658471e+01 5.57e-08 6.82e+00 -8.6 1.22e-03 -11.6 1.00e+00 9.90e-01f 1\n 449 1.3658445e+01 4.72e-08 2.13e-09 -8.6 4.22e-04 -12.0 1.00e+00 1.00e+00f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 450 1.3658439e+01 4.92e-08 3.85e-12 -8.6 1.81e-05 -12.5 1.00e+00 1.00e+00h 1\n 451 1.3658437e+01 4.92e-08 1.82e-12 -8.6 1.52e-06 -13.0 1.00e+00 1.00e+00h 1\n 452 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.26e-07 -13.5 1.00e+00 1.00e+00h 1\n 453 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.49e-07 -14.0 1.00e+00 1.00e+00h 1\n 454 1.3658437e+01 4.92e-08 3.64e-12 -8.6 9.58e-09 -14.4 1.00e+00 1.00e+00h 1\n 455 1.3658437e+01 4.92e-08 9.09e-13 -8.6 3.90e-11 -14.9 1.00e+00 1.00e+00h 1\n 456 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.39e-12 -15.4 1.00e+00 1.00e+00h 1\n 457 1.3658437e+01 4.92e-08 9.09e-13 -8.6 9.57e-13 -15.9 1.00e+00 1.00e+00h 1\n 458 1.3658437e+01 4.92e-08 9.09e-13 -8.6 7.94e-13 -16.3 1.00e+00 1.00e+00h 1\n 459 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.30e-13 -16.8 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 460 1.3658437e+01 4.92e-08 4.55e-13 -8.6 2.07e-12 -17.3 1.00e+00 1.00e+00h 1\n 461 1.3658437e+01 4.92e-08 9.09e-13 -8.6 2.88e-12 -17.8 1.00e+00 1.00e+00h 1\n 462 1.3658437e+01 4.92e-08 9.09e-13 -8.6 5.03e-12 -18.2 1.00e+00 1.00e+00h 1\n 463 1.3658437e+01 4.92e-08 4.55e-13 -8.6 3.27e-12 -18.7 1.00e+00 1.00e+00h 1\n\nNumber of Iterations....: 463\n\n (scaled) (unscaled)\nObjective...............: 1.3658436881286921e+01 1.3658436881286921e+01\nDual infeasibility......: 4.5474735088646412e-13 4.5474735088646412e-13\nConstraint violation....: 4.9168770388519079e-08 4.9168770388519079e-08\nComplementarity.........: 2.5059067889663204e-09 2.5059067889663204e-09\nOverall NLP error.......: 4.9168770388519079e-08 4.9168770388519079e-08\n\n\nNumber of objective function evaluations = 483\nNumber of objective gradient evaluations = 395\nNumber of equality constraint evaluations = 483\nNumber of inequality constraint evaluations = 483\nNumber of equality constraint Jacobian evaluations = 467\nNumber of inequality constraint Jacobian evaluations = 467\nNumber of Lagrangian Hessian evaluations = 463\nTotal CPU secs in IPOPT (w/o function evaluations) = 4.565\nTotal CPU secs in NLP function evaluations = 10.021\n\nEXIT: Solved To Acceptable Level.\n solver : t_proc (avg) t_wall (avg) n_eval\n nlp_f | 21.33ms ( 44.17us) 21.30ms ( 44.11us) 483\n nlp_g | 520.21ms ( 1.08ms) 520.18ms ( 1.08ms) 483\n nlp_grad | 3.04ms ( 3.04ms) 3.04ms ( 3.04ms) 1\n nlp_grad_f | 34.11ms ( 86.15us) 34.12ms ( 86.15us) 396\n nlp_hess_l | 6.28 s ( 13.65ms) 6.26 s ( 13.60ms) 460\n nlp_jac_g | 3.17 s ( 6.78ms) 3.16 s ( 6.76ms) 468\n total | 14.66 s ( 14.66 s) 14.58 s ( 14.58 s) 1\nThis is Ipopt version 3.12.3, running with linear solver mumps.\nNOTE: Other linear solvers might be more efficient (see Ipopt documentation).\n\nNumber of nonzeros in equality constraint Jacobian...: 4230\nNumber of nonzeros in inequality constraint Jacobian.: 3466\nNumber of nonzeros in Lagrangian Hessian.............: 3648\n\nTotal number of variables............................: 2293\n variables with only lower bounds: 0\n variables with lower and upper bounds: 0\n variables with only upper bounds: 0\nTotal number of equality constraints.................: 1377\nTotal number of inequality constraints...............: 967\n inequality constraints with only lower bounds: 204\n inequality constraints with lower and upper bounds: 355\n inequality constraints with only upper bounds: 408\n\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 0 0.0000000e+00 9.83e+00 7.50e-01 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0\n 1 1.0399716e-02 9.83e+00 1.26e+00 -1.0 1.20e+02 -4.0 4.93e-04 3.26e-04h 1\n 2 2.0045559e-02 9.83e+00 2.20e+01 -1.0 1.82e+02 -4.5 3.56e-04 3.85e-05h 1\n 3 4.4710489e-01 9.83e+00 2.52e+01 -1.0 2.46e+02 -5.0 3.88e-04 3.37e-04h 1\n 4 2.5591113e+00 9.83e+00 6.47e+01 -1.0 3.34e+02 -5.4 1.11e-03 4.95e-04h 1\n 5 1.0304744e+01 9.83e+00 1.06e+02 -1.0 2.54e+02 -5.9 1.15e-03 5.03e-04h 1\n 6 7.9216962e+01 9.82e+00 1.08e+02 -1.0 1.86e+02 -6.4 1.38e-03 1.35e-03h 1\n 7 1.6205310e+02 9.82e+00 1.46e+02 -1.0 1.46e+02 -6.9 1.48e-03 8.87e-04h 1\n 8 2.2025207e+02 9.83e+00 1.56e+02 -1.0 3.22e+02 -7.3 6.46e-04 4.87e-04h 1\n 9 2.2441787e+02 9.83e+00 1.54e+02 -1.0 4.45e+03 -7.8 8.53e-06 3.28e-05h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 10 2.5182294e+02 9.82e+00 1.42e+02 -1.0 9.50e+02 -8.3 8.65e-06 2.07e-04h 1\n 11 2.8777432e+02 9.81e+00 1.66e+02 -1.0 4.28e+02 -8.8 6.37e-04 2.55e-04h 1\n 12 4.0157811e+02 9.80e+00 1.67e+02 -1.0 1.45e+02 -9.2 7.37e-04 7.08e-04h 1\n 13 4.1550901e+02 9.80e+00 1.78e+02 -1.0 1.56e+02 -9.7 2.52e-04 7.97e-05h 1\n 14 6.1114602e+02 9.80e+00 1.21e+02 -1.0 1.50e+02 -10.2 8.33e-05 1.00e-03h 1\n 15 6.1355211e+02 9.80e+00 1.82e+02 -1.0 1.68e+02 -10.7 9.96e-04 1.13e-05h 1\n 16 6.2428940e+02 9.80e+00 2.68e+02 -1.0 3.50e+02 -11.2 1.43e-03 5.02e-05h 1\n 17r 6.2428940e+02 9.80e+00 9.99e+02 1.0 0.00e+00 -11.6 0.00e+00 4.03e-07R 5\n 18r 8.6414419e+02 9.80e+00 9.99e+02 1.0 6.29e+03 - 3.71e-04 1.98e-05f 1\n 19r 3.3647143e+03 9.80e+00 9.99e+02 1.0 1.65e+03 - 3.45e-04 1.15e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 20r 5.1092797e+04 9.80e+00 9.98e+02 1.0 7.67e+02 - 3.88e-04 6.81e-04f 1\n 21r 6.3342770e+04 9.79e+00 1.07e+03 1.0 7.22e+00 2.0 1.50e-01 1.14e-02f 1\n 22r 1.7594664e+05 9.73e+00 1.03e+03 1.0 3.13e+00 2.4 2.75e-01 2.46e-01f 1\n 23r 1.6728527e+05 9.68e+00 1.07e+03 1.0 2.44e+00 2.9 3.31e-02 1.26e-01f 1\n 24r 1.7195513e+05 9.51e+00 1.09e+03 1.0 5.75e-01 3.3 8.31e-01 1.00e+00f 1\n 25r 1.7774546e+05 9.35e+00 1.07e+03 1.0 1.83e+00 2.8 3.09e-01 3.74e-01f 1\n 26r 1.7826098e+05 8.80e+00 1.06e+03 1.0 5.11e+00 2.3 3.60e-01 5.80e-01f 1\n 27 1.7790304e+05 8.75e+00 2.67e+03 -1.0 9.73e+01 -12.1 1.05e-03 4.89e-03f 1\n 28 1.7790581e+05 8.75e+00 2.67e+03 -1.0 9.99e+01 -12.6 4.15e-03 8.71e-05h 1\n 29 1.7805578e+05 8.74e+00 2.67e+03 -1.0 1.49e+02 -13.1 2.98e-03 1.65e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 30 1.7826045e+05 8.73e+00 2.66e+03 -1.0 1.00e+02 -13.5 5.02e-03 1.36e-03h 1\n 31 1.7833630e+05 8.72e+00 2.66e+03 -1.0 9.67e+01 -14.0 4.35e-03 3.57e-04h 1\n 32 1.7838366e+05 8.72e+00 2.66e+03 -1.0 9.66e+01 -14.5 5.53e-03 1.85e-04h 1\n 33 1.7897357e+05 8.70e+00 2.66e+03 -1.0 9.66e+01 -15.0 6.03e-03 1.99e-03h 1\n 34 1.7979138e+05 8.68e+00 2.65e+03 -1.0 9.65e+01 -15.5 5.02e-03 2.33e-03h 1\n 35 1.8083548e+05 8.66e+00 2.64e+03 -1.0 9.65e+01 -15.9 5.14e-03 2.59e-03h 1\n 36 1.8126368e+05 8.65e+00 2.64e+03 -1.0 9.68e+01 -16.4 1.38e-03 9.55e-04h 1\n 37 1.8126992e+05 8.65e+00 2.64e+03 -1.0 9.72e+01 -16.9 2.04e-05 1.32e-05h 1\n 38 1.8127749e+05 8.65e+00 2.64e+03 -1.0 9.75e+01 -17.4 5.22e-03 1.49e-05h 1\n 39 1.8272429e+05 8.63e+00 2.63e+03 -1.0 9.81e+01 -17.8 3.94e-04 2.72e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 40 1.8373146e+05 8.61e+00 2.63e+03 -1.0 9.79e+01 -18.3 2.78e-05 1.86e-03h 1\n 41 1.8460711e+05 8.60e+00 2.63e+03 -1.0 9.79e+01 -18.8 6.10e-03 1.58e-03h 1\n 42 1.8506845e+05 8.59e+00 3.03e+03 -1.0 9.77e+01 -19.3 3.60e-03 8.05e-04h 1\n 43 1.8630538e+05 8.57e+00 2.81e+03 -1.0 9.79e+01 -19.7 1.23e-03 2.15e-03h 1\n 44 1.8693912e+05 8.56e+00 2.69e+03 -1.0 9.91e+01 -19.1 6.31e-04 1.18e-03h 1\n 45 1.8714858e+05 8.56e+00 2.61e+03 -1.0 9.72e+01 -19.6 1.86e-05 3.60e-04h 1\n 46 1.8833659e+05 8.54e+00 3.03e+03 -1.0 9.83e+01 -19.1 4.39e-03 2.16e-03h 1\n 47 1.8900050e+05 8.53e+00 3.12e+03 -1.0 9.71e+01 -19.6 1.51e-03 1.12e-03h 1\n 48 1.8921851e+05 8.53e+00 3.04e+03 -1.0 9.85e+01 -19.1 2.64e-05 3.81e-04h 1\n 49 1.8974673e+05 8.52e+00 2.87e+03 -1.0 9.66e+01 -19.6 1.53e-04 8.26e-04h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 50 1.8975438e+05 8.52e+00 3.74e+03 -1.0 9.81e+01 -19.1 3.80e-03 1.32e-05h 1\n 51 1.9002449e+05 8.52e+00 4.95e+03 -1.0 9.84e+01 -19.6 3.14e-03 3.97e-04h 1\n 52 1.9113127e+05 8.50e+00 5.17e+03 -1.0 9.86e+01 -19.1 2.66e-03 1.79e-03h 1\n 53 1.9218965e+05 8.49e+00 4.74e+03 -1.0 9.66e+01 -19.6 1.49e-05 1.46e-03h 1\n 54 1.9235900e+05 8.49e+00 4.77e+03 -1.0 9.69e+01 -19.1 3.43e-04 2.55e-04h 1\n 55 1.9614877e+05 8.45e+00 3.24e+03 -1.0 9.55e+01 -19.6 4.82e-05 4.46e-03h 1\n 56 1.9767353e+05 8.43e+00 3.55e+03 -1.0 9.43e+01 -19.1 3.11e-03 2.00e-03h 1\n 57 2.0045953e+05 8.41e+00 3.54e+03 -1.0 9.38e+01 -19.6 3.09e-03 3.11e-03h 1\n 58 2.0058633e+05 8.41e+00 4.32e+03 -1.0 9.34e+01 -19.1 2.74e-03 1.60e-04h 1\n 59r 2.0058633e+05 8.41e+00 1.00e+03 0.9 0.00e+00 -19.6 0.00e+00 4.42e-07R 5\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 60r 2.0053275e+05 8.41e+00 1.00e+03 0.9 3.17e+03 - 3.55e-04 6.45e-05f 1\n 61r 2.0041460e+05 8.41e+00 1.00e+03 0.9 1.58e+03 - 3.29e-04 1.19e-04f 1\n 62r 2.0024799e+05 8.41e+00 9.99e+02 0.9 1.43e+03 - 7.29e-04 1.04e-03f 1\n 63r 2.0178166e+05 8.42e+00 9.97e+02 0.9 9.93e+02 - 2.24e-03 1.49e-03f 1\n 64r 2.0902033e+05 8.42e+00 9.92e+02 0.9 5.71e+02 - 1.11e-02 3.85e-03f 1\n 65r 2.3136420e+05 8.41e+00 9.83e+02 0.9 3.09e+02 - 8.14e-03 9.32e-03f 1\n 66r 2.6153891e+05 8.40e+00 9.88e+02 0.9 3.62e+02 - 7.16e-03 1.05e-02f 1\n 67r 2.7798903e+05 8.40e+00 9.64e+02 0.9 3.55e+02 - 1.32e-02 9.11e-03f 1\n 68r 2.8657577e+05 8.40e+00 9.59e+02 0.9 2.47e+02 - 2.08e-02 4.88e-03f 1\n 69r 2.9527415e+05 8.40e+00 9.55e+02 0.9 1.55e+02 - 2.61e-03 4.50e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 70r 3.0527057e+05 8.40e+00 9.50e+02 0.9 5.20e+02 - 6.42e-03 5.12e-03f 1\n 71r 3.3582481e+05 8.40e+00 9.58e+02 0.9 1.01e+02 - 3.07e-02 1.51e-02f 1\n 72r 4.1107309e+05 8.41e+00 9.04e+02 0.9 1.14e+02 - 2.39e-02 3.35e-02f 1\n 73r 4.5964398e+05 8.41e+00 8.95e+02 0.9 1.15e+02 - 3.62e-02 2.11e-02f 1\n 74r 5.2016472e+05 8.41e+00 8.67e+02 0.9 1.36e+02 - 4.89e-02 3.03e-02f 1\n 75r 5.9030592e+05 8.41e+00 8.31e+02 0.9 1.48e+02 - 6.01e-02 3.79e-02f 1\n 76r 6.7476964e+05 8.41e+00 7.88e+02 0.9 1.61e+02 - 7.73e-02 4.59e-02f 1\n 77r 7.7561624e+05 8.41e+00 7.44e+02 0.9 1.82e+02 - 1.18e-01 5.62e-02f 1\n 78r 8.4945381e+05 8.41e+00 7.06e+02 0.9 4.62e+02 - 4.89e-03 5.46e-02f 1\n 79r 9.0140487e+05 8.41e+00 6.78e+02 0.9 2.50e+02 - 1.14e-01 3.60e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 80r 9.7546515e+05 8.41e+00 6.36e+02 0.9 2.54e+02 - 1.43e-01 6.16e-02f 1\n 81r 1.1190008e+06 8.38e+00 5.68e+02 0.9 3.76e+02 - 3.36e-02 1.08e-01f 1\n 82r 1.1225339e+06 8.37e+00 5.49e+02 0.9 8.82e+02 - 8.26e-03 3.25e-02f 1\n 83r 1.1286850e+06 8.36e+00 5.46e+02 0.9 4.12e+02 - 3.25e-02 6.75e-03f 1\n 84r 1.2659338e+06 8.29e+00 3.86e+02 0.9 7.87e+00 0.0 3.42e-01 2.93e-01f 1\n 85r 1.2612169e+06 8.28e+00 3.74e+02 0.2 2.38e+02 - 1.60e-02 3.04e-02f 1\n 86r 1.3908950e+06 8.17e+00 3.95e+02 0.2 3.02e+02 - 2.38e-02 7.50e-02f 1\n 87r 1.4392216e+06 8.15e+00 2.99e+02 0.2 7.97e+00 0.4 1.84e-01 1.68e-01f 1\n 88r 1.4749437e+06 8.14e+00 2.77e+02 0.2 5.34e+01 -0.1 4.57e-02 3.68e-02f 1\n 89r 1.4776820e+06 8.12e+00 2.77e+02 0.2 8.08e+02 - 3.91e-03 5.72e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 90r 1.4904632e+06 8.09e+00 3.10e+02 0.2 2.23e+02 - 1.15e-01 3.01e-02f 1\n 91r 1.4780369e+06 8.03e+00 2.85e+02 0.2 2.60e+02 - 1.24e-02 4.77e-02f 1\n 92r 1.4633854e+06 7.98e+00 2.84e+02 0.2 2.35e+02 - 1.42e-01 5.45e-02f 1\n 93r 1.4468075e+06 7.94e+00 2.89e+02 0.2 3.39e+02 - 2.25e-02 4.04e-02f 1\n 94r 1.4177297e+06 7.84e+00 3.28e+02 0.2 3.67e+02 - 4.08e-02 1.06e-01f 1\n 95r 1.4208335e+06 7.83e+00 3.24e+02 0.2 3.41e+02 - 4.16e-02 2.22e-02f 1\n 96r 1.4035701e+06 7.79e+00 2.82e+02 0.2 3.36e+02 - 6.49e-02 9.28e-02f 1\n 97r 1.3981555e+06 7.76e+00 2.08e+02 0.2 3.03e+01 -0.5 1.71e-01 7.29e-02f 1\n 98r 1.3736085e+06 7.75e+00 2.02e+02 0.2 4.74e+02 - 2.70e-02 1.88e-02f 1\n 99r 1.3738774e+06 7.70e+00 1.87e+02 0.2 4.77e+02 - 7.99e-02 9.61e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 100r 1.3580286e+06 7.69e+00 2.14e+02 0.2 4.67e+02 - 1.03e-01 3.83e-02f 1\n 101r 1.3021262e+06 7.68e+00 2.15e+02 0.2 5.22e+02 - 1.39e-01 9.61e-02f 1\n 102r 1.2905134e+06 7.68e+00 2.12e+02 0.2 5.78e+02 - 5.56e-02 4.11e-02f 1\n 103r 1.2958912e+06 7.68e+00 1.61e+02 0.2 6.56e+02 - 2.71e-02 1.08e-01f 1\n 104r 1.3293811e+06 7.69e+00 1.47e+02 0.2 7.20e+02 - 8.99e-02 8.45e-02f 1\n 105r 1.4013282e+06 7.70e+00 1.56e+02 0.2 9.31e+02 - 7.51e-02 1.05e-01f 1\n 106r 1.5010415e+06 7.67e+00 2.27e+02 0.2 3.80e+01 -1.0 6.34e-02 1.79e-01f 1\n 107r 1.5310867e+06 7.66e+00 2.10e+02 0.2 5.35e+01 -1.5 5.32e-02 3.54e-02f 1\n 108r 1.6858235e+06 7.64e+00 2.07e+02 0.2 2.27e+02 -2.0 1.41e-02 1.65e-02f 1\n 109r 1.6992342e+06 7.64e+00 2.00e+02 0.2 4.36e+01 -0.6 3.64e-02 3.57e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 110r 1.6720537e+06 7.63e+00 1.42e+02 0.2 2.59e+00 0.7 3.58e-01 1.88e-01f 1\n 111r 1.6393971e+06 7.62e+00 1.61e+02 0.2 5.96e+00 0.2 6.27e-02 2.98e-01f 1\n 112r 1.5942856e+06 7.62e+00 9.69e+01 0.2 3.94e+00 0.6 5.59e-01 6.60e-01f 1\n 113r 1.5454439e+06 7.61e+00 8.14e+01 -0.5 9.57e+00 0.2 2.87e-01 3.13e-01f 1\n 114r 1.5348572e+06 7.52e+00 4.26e+01 -0.5 2.28e+01 -0.3 2.19e-01 1.39e-01f 1\n 115 1.5101190e+06 7.46e+00 1.69e+04 -1.0 7.31e+02 -19.1 1.74e-03 8.11e-03f 1\n 116 1.5081170e+06 7.45e+00 1.69e+04 -1.0 6.47e+02 -19.6 1.66e-03 6.70e-04f 1\n 117 1.5000973e+06 7.43e+00 1.68e+04 -1.0 6.33e+02 -19.1 1.83e-03 2.70e-03f 1\n 118 1.4972338e+06 7.43e+00 1.68e+04 -1.0 6.14e+02 -19.6 4.62e-03 9.87e-04f 1\n 119 1.4926222e+06 7.41e+00 1.68e+04 -1.0 5.92e+02 -19.1 4.76e-03 1.69e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 120 1.4897946e+06 7.40e+00 1.68e+04 -1.0 5.73e+02 -19.6 1.24e-03 1.25e-03f 1\n 121 1.4867964e+06 7.39e+00 1.67e+04 -1.0 5.67e+02 -19.1 1.24e-02 1.40e-03f 1\n 122 1.4811114e+06 7.37e+00 1.67e+04 -1.0 5.31e+02 -19.6 2.94e-03 2.99e-03f 1\n 123 1.4776862e+06 7.36e+00 1.67e+04 -1.0 5.21e+02 -19.1 1.64e-02 1.85e-03f 1\n 124 1.4722508e+06 7.34e+00 1.66e+04 -1.0 4.87e+02 -19.6 5.29e-03 3.07e-03f 1\n 125 1.4686209e+06 7.32e+00 1.66e+04 -1.0 4.76e+02 -19.1 8.82e-03 2.07e-03f 1\n 126 1.4549412e+06 7.26e+00 1.64e+04 -1.0 4.61e+02 -19.6 8.60e-03 7.87e-03f 1\n 127 1.4439147e+06 7.22e+00 1.63e+04 -1.0 4.42e+02 -19.1 3.68e-03 6.45e-03f 1\n 128 1.4318869e+06 7.17e+00 1.62e+04 -1.0 4.33e+02 -19.6 2.58e-04 7.02e-03f 1\n 129 1.4287865e+06 7.15e+00 1.62e+04 -1.0 4.28e+02 -19.1 9.28e-03 1.82e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 130 1.4117953e+06 7.08e+00 1.60e+04 -1.0 4.17e+02 -19.6 1.21e-02 9.95e-03f 1\n 131 1.4094629e+06 7.07e+00 1.60e+04 -1.0 3.99e+02 -19.1 1.22e-02 1.30e-03f 1\n 132 1.3898684e+06 6.99e+00 1.58e+04 -1.0 3.86e+02 -19.6 6.69e-03 1.09e-02f 1\n 133 1.3666966e+06 6.91e+00 1.56e+04 -1.0 3.73e+02 -19.1 1.23e-04 1.25e-02f 1\n 134 1.3653874e+06 6.90e+00 1.56e+04 -1.0 3.64e+02 -19.6 8.61e-03 7.06e-04f 1\n 135 1.3600774e+06 6.88e+00 1.56e+04 -1.0 3.58e+02 -19.1 4.77e-04 2.88e-03f 1\n 136 1.3599863e+06 6.88e+00 1.56e+04 -1.0 3.54e+02 -19.6 1.16e-04 4.90e-05f 1\n 137 1.3520824e+06 6.85e+00 1.55e+04 -1.0 3.56e+02 -19.1 2.25e-02 4.63e-03f 1\n 138 1.3198881e+06 6.72e+00 1.52e+04 -1.0 3.32e+02 -19.6 4.47e-03 1.93e-02f 1\n 139 1.3018334e+06 6.65e+00 1.51e+04 -1.0 3.23e+02 -19.1 3.31e-02 1.03e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 140 1.3001178e+06 6.64e+00 1.50e+04 -1.0 3.09e+02 -19.6 2.72e-03 1.11e-03f 1\n 141 1.2712885e+06 6.53e+00 1.48e+04 -1.0 3.10e+02 -19.1 1.01e-04 1.69e-02f 1\n 142 1.2672814e+06 6.51e+00 1.48e+04 -1.0 3.20e+02 -19.6 4.18e-03 2.32e-03f 1\n 143 1.2662613e+06 6.51e+00 1.47e+04 -1.0 2.90e+02 -19.1 1.94e-02 6.22e-04f 1\n 144 1.2251694e+06 6.34e+00 1.43e+04 -1.0 2.82e+02 -19.6 2.71e-03 2.66e-02f 1\n 145 1.2238974e+06 6.33e+00 1.43e+04 -1.0 2.99e+02 -19.1 1.32e-03 7.91e-04f 1\n 146 1.2218795e+06 6.32e+00 1.43e+04 -1.0 3.08e+02 -19.6 1.65e-03 1.32e-03f 1\n 147 1.2218327e+06 6.32e+00 1.43e+04 -1.0 3.05e+02 -19.1 3.84e-03 2.74e-05f 1\n 148 1.1933966e+06 6.22e+00 1.41e+04 -1.0 3.06e+02 -19.6 7.34e-04 1.67e-02f 1\n 149 1.1923484e+06 6.21e+00 1.41e+04 -1.0 3.08e+02 -19.1 8.24e-03 5.84e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 150 1.1851137e+06 6.19e+00 1.40e+04 -1.0 2.76e+02 -19.6 7.09e-04 4.39e-03f 1\n 151 1.1674398e+06 6.13e+00 1.39e+04 -1.0 2.77e+02 -19.1 4.82e-03 9.94e-03f 1\n 152 1.1671054e+06 6.12e+00 1.39e+04 -1.0 2.72e+02 -19.6 4.03e-03 2.01e-04f 1\n 153 1.1607724e+06 6.10e+00 1.38e+04 -1.0 2.70e+02 -19.1 6.82e-04 3.72e-03f 1\n 154 1.1521951e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.6 1.57e-03 5.21e-03f 1\n 155 1.1512500e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.1 7.99e-04 5.27e-04f 1\n 156 1.1507231e+06 6.06e+00 1.37e+04 -1.0 2.68e+02 -19.6 4.24e-04 3.27e-04f 1\n 157 1.1507040e+06 6.06e+00 1.37e+04 -1.0 2.81e+02 -19.1 5.41e-04 9.69e-06f 1\n 158 1.1467730e+06 6.05e+00 1.37e+04 -1.0 2.63e+02 -19.6 8.26e-06 2.11e-03f 1\n 159 1.0732380e+06 5.82e+00 1.32e+04 -1.0 2.63e+02 -19.1 2.67e-04 3.91e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 160 1.0714161e+06 5.81e+00 1.32e+04 -1.0 2.56e+02 -19.6 1.02e-02 9.30e-04f 1\n 161 1.0713627e+06 5.81e+00 1.32e+04 -1.0 2.54e+02 -19.1 8.09e-05 2.84e-05f 1\n 162 1.0580046e+06 5.76e+00 1.31e+04 -1.0 2.54e+02 -19.6 1.35e-04 7.69e-03f 1\n 163 1.0545534e+06 5.75e+00 1.30e+04 -1.0 2.54e+02 -19.1 3.39e-03 1.89e-03f 1\n 164 1.0341517e+06 5.68e+00 1.30e+04 -1.0 2.52e+02 -19.6 8.74e-04 1.22e-02f 1\n 165 1.0336429e+06 5.68e+00 1.29e+04 -1.0 2.51e+02 -19.1 6.22e-04 2.90e-04f 1\n 166 1.0275521e+06 5.66e+00 1.38e+04 -1.0 2.51e+02 -19.6 4.36e-04 3.68e-03f 1\n 167 1.0179732e+06 5.63e+00 1.32e+04 -1.0 2.50e+02 -19.1 7.95e-03 5.53e-03f 1\n 168 9.9624521e+05 5.55e+00 1.59e+04 -1.0 2.48e+02 -19.6 4.63e-03 1.40e-02f 1\n 169 9.9527357e+05 5.55e+00 1.58e+04 -1.0 2.44e+02 -19.1 5.96e-04 5.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 170 9.9489625e+05 5.55e+00 1.56e+04 -1.0 2.47e+02 -19.6 1.04e-03 2.42e-04f 1\n 171 9.9489118e+05 5.55e+00 1.50e+04 -1.0 2.53e+02 -19.1 1.24e-03 2.86e-06f 1\n 172 9.9421219e+05 5.54e+00 1.57e+04 -1.0 2.49e+02 -19.6 3.63e-06 4.37e-04f 1\n 173 9.8486650e+05 5.51e+00 1.68e+04 -1.0 2.46e+02 -19.1 5.48e-04 5.68e-03f 1\n 174 9.7308682e+05 5.47e+00 1.65e+04 -1.0 2.49e+02 -19.6 8.25e-03 7.61e-03f 1\n 175 9.7212896e+05 5.47e+00 1.65e+04 -1.0 2.44e+02 -19.1 3.20e-04 5.80e-04f 1\n 176 9.6540396e+05 5.44e+00 1.75e+04 -1.0 2.49e+02 -19.6 9.01e-04 4.34e-03f 1\n 177 9.4998516e+05 5.39e+00 1.51e+04 -1.0 2.44e+02 -19.1 1.93e-02 9.44e-03f 1\n 178 9.4904520e+05 5.39e+00 1.52e+04 -1.0 2.45e+02 -19.6 2.19e-04 6.06e-04f 1\n 179 9.4902274e+05 5.39e+00 1.51e+04 -1.0 2.75e+02 -19.1 1.62e-04 1.14e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 180 9.3880492e+05 5.35e+00 1.67e+04 -1.0 2.29e+02 -19.6 1.38e-05 6.45e-03f 1\n 181 8.9559690e+05 5.21e+00 1.79e+04 -1.0 2.29e+02 -19.1 1.06e-02 2.71e-02f 1\n 182 8.5549874e+05 5.07e+00 1.93e+04 -1.0 2.22e+02 -19.6 4.07e-03 2.60e-02f 1\n 183 8.4452994e+05 5.04e+00 1.99e+04 -1.0 2.17e+02 -19.1 4.96e-04 7.46e-03f 1\n 184 8.2921005e+05 4.98e+00 1.98e+04 -1.0 2.14e+02 -19.6 9.95e-03 1.09e-02f 1\n 185 8.2905253e+05 4.98e+00 1.90e+04 -1.0 2.09e+02 -19.1 5.75e-03 1.10e-04f 1\n 186 8.2904529e+05 4.98e+00 1.89e+04 -1.0 2.18e+02 -19.6 7.15e-05 5.41e-06f 1\n 187 8.2712955e+05 4.97e+00 1.92e+04 -1.0 2.14e+02 -19.1 5.72e-05 1.35e-03f 1\n 188 8.0950543e+05 4.91e+00 2.28e+04 -1.0 2.31e+02 -19.6 1.11e-03 1.34e-02f 1\n 189 8.0862616e+05 4.90e+00 2.24e+04 -1.0 2.12e+02 -19.1 1.92e-03 6.51e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 190 8.0425351e+05 4.89e+00 2.35e+04 -1.0 2.22e+02 -19.6 4.57e-04 3.51e-03f 1\n 191 7.9537870e+05 4.85e+00 2.31e+04 -1.0 2.15e+02 -19.1 7.37e-03 6.61e-03f 1\n 192 7.9489154e+05 4.85e+00 2.29e+04 -1.0 2.11e+02 -19.6 1.29e-03 3.92e-04f 1\n 193 7.8751827e+05 4.83e+00 2.29e+04 -1.0 2.01e+02 -19.1 2.56e-03 5.71e-03f 1\n 194 7.8736120e+05 4.82e+00 2.31e+04 -1.0 2.00e+02 -19.6 1.54e-04 1.32e-04f 1\n 195 7.8402757e+05 4.81e+00 2.31e+04 -1.0 1.94e+02 -19.1 2.19e-03 2.63e-03f 1\n 196 7.8388845e+05 4.81e+00 2.53e+04 -1.0 1.98e+02 -19.6 2.15e-04 1.18e-04f 1\n 197 7.8385262e+05 4.81e+00 2.50e+04 -1.0 1.94e+02 -19.1 4.34e-03 2.82e-05f 1\n 198 7.8236217e+05 4.81e+00 1.09e+04 -1.0 1.97e+02 -19.6 3.11e-04 1.27e-03f 1\n 199 7.7035462e+05 4.76e+00 1.08e+04 -1.0 1.91e+02 -19.1 3.91e-04 9.52e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 200 7.6826638e+05 4.75e+00 1.08e+04 -1.0 1.90e+02 -19.6 2.68e-02 1.77e-03f 1\n 201 7.6137802e+05 4.73e+00 1.07e+04 -1.0 1.80e+02 -19.1 5.08e-04 5.41e-03f 1\n 202 7.6092627e+05 4.72e+00 1.07e+04 -1.0 1.85e+02 -19.6 8.12e-03 3.85e-04f 1\n 203 6.8620409e+05 4.44e+00 1.26e+04 -1.0 1.80e+02 -19.1 1.57e-04 6.08e-02f 1\n 204 6.8223234e+05 4.42e+00 1.15e+04 -1.0 1.85e+02 -19.6 6.43e-03 3.54e-03f 1\n 205 6.7550321e+05 4.40e+00 1.16e+04 -1.0 1.74e+02 -19.1 5.10e-03 5.69e-03f 1\n 206 6.7512988e+05 4.39e+00 1.00e+04 -1.0 1.86e+02 -19.6 8.13e-03 3.47e-04f 1\n 207 6.6341460e+05 4.35e+00 9.84e+03 -1.0 1.72e+02 -19.1 1.30e-02 1.04e-02f 1\n 208 6.5075026e+05 4.30e+00 1.09e+04 -1.0 1.87e+02 -19.6 2.10e-04 1.19e-02f 1\n 209 6.4469528e+05 4.27e+00 1.05e+04 -1.0 1.72e+02 -19.1 7.52e-03 5.37e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 210 6.3074280e+05 4.22e+00 1.26e+04 -1.0 1.85e+02 -19.6 4.48e-04 1.32e-02f 1\n 211 6.3015403e+05 4.21e+00 1.26e+04 -1.0 3.38e+01 2.3 1.77e-03 7.51e-04f 1\n 212 6.3013669e+05 4.21e+00 1.47e+04 -1.0 1.21e+02 1.8 1.17e-03 1.79e-05f 1\n 213 6.2710947e+05 4.20e+00 1.45e+04 -1.0 1.26e+02 1.4 4.90e-04 2.86e-03f 1\n 214 6.2100935e+05 4.18e+00 1.49e+04 -1.0 1.53e+02 0.9 1.86e-03 5.85e-03f 1\n 215 6.1066877e+05 4.14e+00 1.59e+04 -1.0 1.97e+02 0.4 1.05e-05 8.89e-03f 1\n 216 6.0984620e+05 4.14e+00 1.57e+04 -1.0 1.84e+02 1.7 8.49e-03 1.01e-03f 1\n 217 6.0751052e+05 4.13e+00 1.57e+04 -1.0 2.02e+02 1.3 5.49e-03 2.08e-03f 1\n 218 6.0707832e+05 4.12e+00 1.57e+04 -1.0 2.01e+02 1.7 2.32e-04 5.32e-04f 1\n 219 6.0707327e+05 4.12e+00 1.53e+04 -1.0 1.96e+02 2.1 2.90e-03 7.52e-06f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 220 6.0555478e+05 4.12e+00 1.54e+04 -1.0 2.25e+02 1.6 9.57e-04 1.84e-03f 1\n 221 6.0553909e+05 4.12e+00 1.47e+04 -1.0 2.14e+02 2.1 5.83e-03 2.26e-05f 1\n 222 6.0387470e+05 4.11e+00 1.47e+04 -1.0 2.27e+02 1.6 2.04e-03 1.80e-03f 1\n 223 6.0325229e+05 4.11e+00 1.47e+04 -1.0 2.20e+02 2.0 4.35e-04 7.72e-04f 1\n 224 6.0322399e+05 4.11e+00 1.45e+04 -1.0 2.21e+02 1.5 4.10e-03 2.96e-05f 1\n 225 6.0185557e+05 4.10e+00 1.45e+04 -1.0 2.17e+02 2.0 1.59e-05 1.70e-03f 1\n 226 6.0180800e+05 4.10e+00 1.44e+04 -1.0 2.29e+02 1.5 1.37e-03 4.63e-05f 1\n 227 5.9923339e+05 4.09e+00 1.44e+04 -1.0 2.27e+02 1.9 2.84e-03 2.85e-03f 1\n 228 5.9851909e+05 4.09e+00 1.44e+04 -1.0 2.29e+02 1.4 9.28e-04 6.31e-04f 1\n 229 5.9692058e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.9 1.94e-03 1.77e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 230 5.9689878e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.4 1.22e-05 1.79e-05f 1\n 231 5.9684337e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.8 7.71e-05 6.11e-05f 1\n 232 5.9682024e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.3 2.55e-06 2.10e-05f 1\n 233 5.9680869e+05 4.08e+00 1.38e+04 -1.0 2.28e+02 1.8 7.05e-03 1.36e-05f 1\n 234 5.9076367e+05 4.05e+00 1.41e+04 -1.0 2.30e+02 1.3 5.00e-04 5.99e-03f 1\n 235 5.9073170e+05 4.05e+00 1.42e+04 -1.0 2.32e+02 1.7 4.66e-03 3.40e-05f 1\n 236 5.8521172e+05 4.03e+00 1.45e+04 -1.0 2.32e+02 1.2 8.81e-04 5.50e-03f 1\n 237 5.8376022e+05 4.03e+00 1.44e+04 -1.0 2.30e+02 0.8 2.07e-06 1.04e-03f 1\n 238 5.8372062e+05 4.03e+00 1.53e+04 -1.0 2.35e+02 2.1 3.62e-03 4.81e-05f 1\n 239 5.7779448e+05 4.00e+00 1.55e+04 -1.0 2.39e+02 1.6 1.33e-02 6.22e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 240 5.7771432e+05 4.00e+00 1.57e+04 -1.0 2.26e+02 2.0 1.00e-03 9.89e-05f 1\n 241 5.7665827e+05 4.00e+00 1.57e+04 -1.0 2.07e+02 1.6 9.92e-04 1.10e-03f 1\n 242 5.7511556e+05 3.99e+00 1.55e+04 -1.0 2.12e+02 2.0 1.09e-03 1.92e-03f 1\n 243 5.7052438e+05 3.97e+00 1.52e+04 -1.0 2.09e+02 1.5 2.73e-04 4.68e-03f 1\n 244 5.6998537e+05 3.97e+00 1.51e+04 -1.0 2.36e+02 1.9 3.75e-04 6.54e-04f 1\n 245 5.6612860e+05 3.95e+00 1.48e+04 -1.0 2.37e+02 1.5 3.47e-05 3.75e-03f 1\n 246 5.6572525e+05 3.95e+00 1.50e+04 -1.0 2.36e+02 1.9 2.57e-03 4.85e-04f 1\n 247 5.6516789e+05 3.95e+00 1.51e+04 -1.0 2.36e+02 1.4 1.32e-03 4.83e-04f 1\n 248 5.6316860e+05 3.94e+00 1.52e+04 -1.0 2.36e+02 1.8 3.05e-03 2.38e-03f 1\n 249 5.6254967e+05 3.94e+00 1.50e+04 -1.0 2.35e+02 1.3 7.91e-06 4.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 250 5.6077033e+05 3.93e+00 1.49e+04 -1.0 2.36e+02 1.8 1.29e-03 2.17e-03f 1\n 251 5.6022313e+05 3.93e+00 1.47e+04 -1.0 2.34e+02 1.3 2.29e-06 3.07e-04f 1\n 252 5.5692514e+05 3.91e+00 1.49e+04 -1.0 2.36e+02 1.7 8.13e-03 3.96e-03f 1\n 253 5.5673567e+05 3.91e+00 1.49e+04 -1.0 2.31e+02 1.2 2.68e-06 9.58e-05f 1\n 254 5.5535652e+05 3.91e+00 1.55e+04 -1.0 2.33e+02 1.7 1.14e-02 1.64e-03f 1\n 255 5.5534273e+05 3.91e+00 1.56e+04 -1.0 2.30e+02 2.1 4.75e-04 1.85e-05f 1\n 256 5.5514554e+05 3.90e+00 1.71e+04 -1.0 2.27e+02 1.6 1.32e-02 2.20e-04f 1\n 257 5.5141803e+05 3.90e+00 1.48e+04 -1.0 4.83e+02 1.1 1.93e-05 1.30e-03f 1\n 258 5.4693933e+05 3.88e+00 1.42e+04 -1.0 2.39e+02 1.6 2.91e-04 4.86e-03f 1\n 259 5.4687686e+05 3.88e+00 1.43e+04 -1.0 2.52e+02 1.1 2.34e-03 6.08e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 260 5.4353115e+05 3.87e+00 1.41e+04 -1.0 2.42e+02 1.5 1.34e-03 3.66e-03f 1\n 261 5.4346337e+05 3.87e+00 1.41e+04 -1.0 2.07e+02 1.0 1.56e-04 6.65e-05f 1\n 262 5.4335021e+05 3.87e+00 1.42e+04 -1.0 1.95e+02 1.5 2.22e-03 1.26e-04f 1\n 263 5.4296226e+05 3.86e+00 1.43e+04 -1.0 1.86e+02 1.0 2.63e-03 3.71e-04f 1\n 264 5.3812113e+05 3.84e+00 1.40e+04 -1.0 2.13e+02 1.4 2.17e-05 5.31e-03f 1\n 265 5.3811408e+05 3.84e+00 1.41e+04 -1.0 2.23e+02 1.8 3.29e-04 8.06e-06f 1\n 266 5.3810662e+05 3.84e+00 1.41e+04 -1.0 2.43e+02 1.4 9.90e-04 7.83e-06f 1\n 267 5.3692599e+05 3.84e+00 1.38e+04 -1.0 2.30e+02 1.8 8.84e-05 1.33e-03f 1\n 268 5.3689601e+05 3.84e+00 1.38e+04 -1.0 1.90e+02 1.3 1.05e-03 3.21e-05f 1\n 269 5.3480708e+05 3.83e+00 1.32e+04 -1.0 2.44e+02 0.8 2.23e-04 1.25e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 270 5.3431885e+05 3.83e+00 1.34e+04 -1.0 1.83e+02 1.3 4.31e-03 5.17e-04f 1\n 271 5.3310063e+05 3.83e+00 1.31e+04 -1.0 1.89e+02 1.7 2.86e-05 1.41e-03f 1\n 272 5.3150897e+05 3.82e+00 1.22e+04 -1.0 1.85e+02 2.1 1.96e-04 1.98e-03f 1\n 273 5.3147543e+05 3.82e+00 1.22e+04 -1.0 1.61e+02 1.6 1.35e-04 3.82e-05f 1\n 274 5.3137612e+05 3.82e+00 1.24e+04 -1.0 1.76e+02 1.2 3.23e-03 1.02e-04f 1\n 275 5.3106158e+05 3.82e+00 1.29e+04 -1.0 1.79e+02 1.6 5.34e-03 3.64e-04f 1\n 276 5.2929724e+05 3.81e+00 1.28e+04 -1.0 1.73e+02 1.1 5.52e-05 1.84e-03f 1\n 277 5.2907597e+05 3.81e+00 1.29e+04 -1.0 1.63e+02 2.4 4.41e-04 3.68e-04f 1\n 278 5.2816992e+05 3.80e+00 1.27e+04 -1.0 1.88e+02 2.0 5.12e-04 1.14e-03f 1\n 279 5.2807415e+05 3.80e+00 1.27e+04 -1.0 1.48e+03 1.5 1.26e-06 8.93e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 280 5.2805145e+05 3.80e+00 1.26e+04 -1.0 1.94e+02 1.0 1.55e-03 2.34e-05f 1\n 281 5.2494578e+05 3.79e+00 1.26e+04 -1.0 1.75e+02 1.4 7.38e-04 3.61e-03f 1\n 282 5.2382656e+05 3.79e+00 1.20e+04 -1.0 6.75e+02 1.0 1.61e-05 3.04e-04f 1\n 283 5.2294012e+05 3.78e+00 1.20e+04 -1.0 1.74e+02 1.4 1.78e-03 9.88e-04f 1\n 284 5.2002561e+05 3.77e+00 1.24e+04 -1.0 1.76e+02 1.8 7.11e-03 3.79e-03f 1\n 285 5.1038171e+05 3.73e+00 1.19e+04 -1.0 1.71e+02 1.3 6.29e-05 1.07e-02f 1\n 286 5.1029468e+05 3.73e+00 1.19e+04 -1.0 1.84e+02 1.8 7.42e-04 1.12e-04f 1\n 287 5.1028585e+05 3.73e+00 1.21e+04 -1.0 2.01e+02 2.2 4.17e-04 1.31e-05f 1\n 288 5.0835844e+05 3.72e+00 1.19e+04 -1.0 2.75e+02 1.7 1.15e-05 2.41e-03f 1\n 289 5.0829350e+05 3.72e+00 1.19e+04 -1.0 1.71e+02 1.2 3.97e-04 6.12e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 290 5.0786133e+05 3.72e+00 1.30e+04 -1.0 1.77e+02 1.7 1.37e-02 5.49e-04f 1\n 291 4.9734337e+05 3.69e+00 1.06e+04 -1.0 1.72e+02 1.2 8.48e-04 8.86e-03f 1\n 292 4.9689003e+05 3.68e+00 1.08e+04 -1.0 1.74e+02 1.6 3.20e-03 5.66e-04f 1\n 293 4.9257017e+05 3.67e+00 9.15e+03 -1.0 1.62e+02 1.1 7.85e-05 3.01e-03f 1\n 294 4.9140020e+05 3.67e+00 9.02e+03 -1.0 2.41e+02 1.6 4.58e-05 1.41e-03f 1\n 295 4.9127824e+05 3.67e+00 1.14e+04 -1.0 1.61e+02 1.1 2.66e-03 6.68e-05f 1\n 296 4.9066209e+05 3.66e+00 1.29e+04 -1.0 1.75e+02 1.5 1.82e-02 7.38e-04f 1\n 297 4.8899995e+05 3.66e+00 1.45e+04 -1.0 5.05e+02 1.0 9.31e-04 4.06e-04f 1\n 298 4.7750162e+05 3.61e+00 1.35e+04 -1.0 1.76e+02 1.5 4.98e-03 1.35e-02f 1\n 299 4.7434464e+05 3.61e+00 1.49e+04 -1.0 9.97e+02 1.0 6.27e-04 4.14e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 300 4.7339897e+05 3.61e+00 1.50e+04 -1.0 1.73e+02 1.4 2.13e-03 1.10e-03f 1\n 301 4.7161223e+05 3.61e+00 1.40e+04 -1.0 3.25e+02 0.9 1.03e-04 5.77e-04f 1\n 302 4.7150987e+05 3.60e+00 1.43e+04 -1.0 2.00e+02 0.4 6.17e-03 9.97e-05f 1\n 303 4.6068296e+05 3.57e+00 1.36e+04 -1.0 3.30e+02 -0.0 4.88e-05 1.06e-02f 1\n 304 4.5827805e+05 3.56e+00 1.49e+04 -1.0 1.68e+02 1.3 1.36e-02 2.70e-03f 1\n 305 4.5598068e+05 3.55e+00 1.41e+04 -1.0 4.79e+02 0.8 4.48e-04 7.06e-04f 1\n 306 4.5518400e+05 3.55e+00 1.43e+04 -1.0 1.59e+02 1.2 2.67e-03 8.88e-04f 1\n 307 4.5309751e+05 3.55e+00 1.72e+04 -1.0 2.98e+02 0.8 2.61e-03 8.91e-04f 1\n 308 4.4826799e+05 3.53e+00 1.86e+04 -1.0 1.66e+02 1.2 1.97e-02 5.39e-03f 1\n 309 4.4337229e+05 3.51e+00 1.70e+04 -1.0 1.62e+02 0.7 4.61e-05 4.04e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 310 4.3957043e+05 3.50e+00 2.22e+04 -1.0 1.64e+02 1.1 5.11e-02 4.22e-03f 1\n 311 4.3840763e+05 3.50e+00 2.35e+04 -1.0 1.68e+02 0.7 4.15e-03 9.47e-04f 1\n 312 4.2663652e+05 3.45e+00 2.43e+04 -1.0 1.62e+02 1.1 2.95e-02 1.34e-02f 1\n 313 4.1771582e+05 3.43e+00 2.40e+04 -1.0 1.75e+02 0.6 6.75e-03 7.00e-03f 1\n 314 4.1583852e+05 3.42e+00 2.27e+04 -1.0 8.29e+02 0.1 1.03e-04 3.62e-04f 1\n 315 4.0823036e+05 3.40e+00 2.35e+04 -1.0 1.82e+02 0.6 9.82e-03 6.83e-03f 1\n 316 4.0742102e+05 3.40e+00 2.31e+04 -1.0 2.88e+02 0.1 7.31e-05 5.39e-04f 1\n 317 4.0736672e+05 3.40e+00 2.36e+04 -1.0 4.84e+02 -0.4 7.32e-03 5.87e-05f 1\n 318 3.9419770e+05 3.35e+00 2.18e+04 -1.0 7.13e+02 -0.9 8.53e-05 1.33e-02f 1\n 319 3.8663282e+05 3.33e+00 1.92e+04 -1.0 8.56e+02 -1.3 2.10e-04 5.79e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 320 3.8579331e+05 3.33e+00 2.13e+04 -1.0 9.22e+02 -1.8 7.40e-03 7.22e-04f 1\n 321 3.7974323e+05 3.32e+00 1.94e+04 -1.0 9.50e+02 -2.3 1.97e-04 4.91e-03f 1\n 322 3.7938905e+05 3.31e+00 2.15e+04 -1.0 9.51e+02 -2.8 8.43e-03 3.27e-04f 1\n 323 3.7037180e+05 3.29e+00 1.92e+04 -1.0 9.55e+02 -3.3 1.13e-04 8.55e-03f 1\n 324 3.6908213e+05 3.28e+00 1.94e+04 -1.0 9.41e+02 -3.7 2.53e-03 1.30e-03f 1\n 325 3.6277620e+05 3.26e+00 1.81e+04 -1.0 2.17e+02 0.3 8.07e-04 7.29e-03f 1\n 326 3.5989666e+05 3.24e+00 1.79e+04 -1.0 1.55e+02 1.6 1.27e-03 5.36e-03f 1\n 327 3.5985661e+05 3.24e+00 1.79e+04 -1.0 1.59e+02 2.1 9.24e-04 1.04e-04f 1\n 328 3.5935266e+05 3.24e+00 1.79e+04 -1.0 1.62e+02 1.6 6.26e-05 8.73e-04f 1\n 329 3.5783375e+05 3.23e+00 1.78e+04 -1.0 1.63e+02 2.0 3.50e-03 3.63e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 330 3.5743587e+05 3.22e+00 1.78e+04 -1.0 1.61e+02 1.5 3.51e-05 6.78e-04f 1\n 331 3.5668635e+05 3.22e+00 1.78e+04 -1.0 1.52e+02 1.1 2.73e-04 1.08e-03f 1\n 332 3.5630331e+05 3.22e+00 1.78e+04 -1.0 1.67e+02 0.6 3.51e-03 4.92e-04f 1\n 333 3.5562828e+05 3.22e+00 1.78e+04 -1.0 2.82e+02 0.1 3.34e-03 7.59e-04f 1\n 334 3.5480287e+05 3.21e+00 1.78e+04 -1.0 4.82e+02 -0.4 4.21e-05 9.86e-04f 1\n 335 3.5438009e+05 3.21e+00 1.78e+04 -1.0 6.55e+02 -0.8 1.29e-04 5.04e-04f 1\n 336 3.5436086e+05 3.21e+00 1.78e+04 -1.0 8.12e+02 -1.3 2.56e-03 2.29e-05f 1\n 337 3.5193976e+05 3.20e+00 1.77e+04 -1.0 8.82e+02 -1.8 1.07e-03 2.89e-03f 1\n 338 3.5190047e+05 3.20e+00 1.77e+04 -1.0 9.10e+02 -2.3 7.21e-03 4.74e-05f 1\n 339 3.5093683e+05 3.20e+00 1.77e+04 -1.0 7.47e+02 -1.0 1.35e-05 1.13e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 340 3.5052498e+05 3.20e+00 1.77e+04 -1.0 2.07e+02 0.4 5.15e-04 5.29e-04f 1\n 341 3.3750946e+05 3.14e+00 1.74e+04 -1.0 3.58e+02 -0.1 1.67e-05 1.64e-02f 1\n 342 3.3747338e+05 3.14e+00 1.74e+04 -1.0 5.75e+02 -0.6 6.58e-03 4.69e-05f 1\n 343 3.3562091e+05 3.14e+00 1.74e+04 -1.0 7.66e+02 -1.1 1.05e-04 2.42e-03f 1\n 344 3.3510890e+05 3.13e+00 1.73e+04 -1.0 8.41e+02 -1.5 1.22e-03 6.80e-04f 1\n 345 3.2871807e+05 3.11e+00 1.72e+04 -1.0 8.84e+02 -2.0 3.02e-03 8.53e-03f 1\n 346 3.2777372e+05 3.10e+00 1.72e+04 -1.0 1.54e+02 1.1 7.57e-05 1.51e-03f 1\n 347 3.2772491e+05 3.10e+00 1.72e+04 -1.0 1.60e+02 1.6 2.40e-03 9.66e-05f 1\n 348 3.2718271e+05 3.10e+00 1.71e+04 -1.0 1.60e+02 2.0 4.86e-03 1.52e-03f 1\n 349 3.2706600e+05 3.10e+00 1.71e+04 -1.0 1.58e+02 1.5 1.31e-03 2.19e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 350 3.2406746e+05 3.08e+00 1.70e+04 -1.0 1.47e+02 1.0 1.75e-04 5.59e-03f 1\n 351 3.2366994e+05 3.08e+00 1.70e+04 -1.0 1.50e+02 1.5 4.30e-03 7.40e-04f 1\n 352 3.2356913e+05 3.08e+00 1.70e+04 -1.0 1.37e+02 1.0 1.11e-04 1.90e-04f 1\n 353 3.2355506e+05 3.08e+00 1.70e+04 -1.0 1.49e+02 1.4 3.22e-03 2.36e-05f 1\n 354 3.2316524e+05 3.07e+00 1.70e+04 -1.0 1.40e+02 0.9 1.38e-03 6.09e-04f 1\n 355r 3.2316524e+05 3.07e+00 1.00e+03 0.5 0.00e+00 0.4 0.00e+00 3.57e-07R 8\n 356r 3.2229084e+05 3.07e+00 1.00e+03 0.5 2.06e+03 - 6.18e-03 7.60e-04f 1\n 357r 3.2109567e+05 3.06e+00 9.98e+02 0.5 1.80e+03 - 1.71e-03 8.09e-04f 1\n 358r 3.1149974e+05 2.97e+00 9.95e+02 0.5 1.67e+03 - 3.74e-03 6.54e-03f 1\n 359r 3.1087190e+05 2.97e+00 9.95e+02 0.5 8.58e+02 - 9.83e-05 4.32e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 360r 3.0989090e+05 2.96e+00 9.97e+02 0.5 9.94e+02 - 6.50e-03 6.44e-04f 1\n 361r 3.0241262e+05 2.87e+00 9.92e+02 0.5 9.07e+02 - 2.36e-03 5.34e-03f 1\n 362r 2.9694919e+05 2.80e+00 9.80e+02 0.5 8.52e+02 - 1.55e-02 4.79e-03f 1\n 363r 2.8929254e+05 2.68e+00 9.72e+02 0.5 6.39e+02 - 1.25e-02 8.39e-03f 1\n 364 2.8919644e+05 2.68e+00 9.52e+03 -1.0 1.36e+02 -0.0 2.03e-04 1.66e-04f 1\n 365 2.8908661e+05 2.68e+00 9.52e+03 -1.0 1.38e+02 -0.5 3.89e-04 1.90e-04f 1\n 366 2.8861557e+05 2.68e+00 9.51e+03 -1.0 1.34e+02 -1.0 2.45e-04 8.16e-04f 1\n 367 2.8784152e+05 2.67e+00 9.50e+03 -1.0 1.29e+02 -1.5 5.67e-04 1.35e-03f 1\n 368 2.8557520e+05 2.66e+00 9.46e+03 -1.0 1.18e+02 -1.9 1.40e-03 3.97e-03f 1\n 369 2.8048140e+05 2.64e+00 9.38e+03 -1.0 1.31e+02 -2.4 2.51e-03 9.01e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 370 2.7807299e+05 2.63e+00 9.34e+03 -1.0 1.50e+02 -2.9 1.64e-03 4.32e-03f 1\n 371 2.7069758e+05 2.59e+00 9.21e+03 -1.0 1.60e+02 -3.4 3.85e-03 1.34e-02f 1\n 372 2.5748481e+05 2.53e+00 8.98e+03 -1.0 1.59e+02 -3.8 4.45e-03 2.48e-02f 1\n 373 2.5409647e+05 2.51e+00 8.92e+03 -1.0 1.53e+02 -4.3 1.21e-02 6.62e-03f 1\n 374 2.4261880e+05 2.45e+00 8.72e+03 -1.0 1.50e+02 -4.8 6.04e-03 2.29e-02f 1\n 375 2.2822588e+05 2.38e+00 8.45e+03 -1.0 1.46e+02 -5.3 1.51e-02 3.02e-02f 1\n 376 2.0965467e+05 2.28e+00 8.10e+03 -1.0 1.41e+02 -5.8 1.82e-02 4.16e-02f 1\n 377 2.0936932e+05 2.28e+00 8.10e+03 -1.0 1.37e+02 -6.2 3.27e-02 6.83e-04f 1\n 378 2.0780500e+05 2.27e+00 8.07e+03 -1.0 1.37e+02 -6.7 4.67e-03 3.76e-03f 1\n 379 1.9606770e+05 2.20e+00 7.84e+03 -1.0 1.38e+02 -7.2 7.84e-04 2.87e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 380 1.7935187e+05 2.11e+00 7.49e+03 -1.0 1.38e+02 -7.7 2.58e-02 4.37e-02f 1\n 381 1.7765925e+05 2.10e+00 7.46e+03 -1.0 1.36e+02 -8.1 4.29e-02 4.74e-03f 1\n 382 1.6495345e+05 2.02e+00 7.18e+03 -1.0 1.34e+02 -8.6 1.77e-02 3.65e-02f 1\n 383 1.6340175e+05 2.01e+00 7.15e+03 -1.0 1.30e+02 -9.1 1.84e-02 4.73e-03f 1\n 384 1.5788016e+05 1.98e+00 7.03e+03 -1.0 1.29e+02 -9.6 3.72e-03 1.71e-02f 1\n 385 1.5064127e+05 1.93e+00 6.87e+03 -1.0 1.28e+02 -10.1 8.10e-03 2.33e-02f 1\n 386 1.2939352e+05 1.79e+00 6.36e+03 -1.0 1.25e+02 -10.5 1.85e-02 7.34e-02f 1\n 387 1.2646803e+05 1.77e+00 6.29e+03 -1.0 1.18e+02 -11.0 4.15e-02 1.14e-02f 1\n 388 1.1471323e+05 1.68e+00 5.99e+03 -1.0 1.16e+02 -11.5 1.69e-02 4.78e-02f 1\n 389 1.0992888e+05 1.65e+00 5.86e+03 -1.0 1.11e+02 -12.0 7.97e-02 2.12e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 390 1.0212816e+05 1.59e+00 5.65e+03 -1.0 1.07e+02 -12.4 1.86e-03 3.63e-02f 1\n 391 8.6177113e+04 1.46e+00 5.19e+03 -1.0 1.04e+02 -12.9 7.26e-02 8.17e-02f 1\n 392 8.5370527e+04 1.45e+00 5.16e+03 -1.0 9.48e+01 -13.4 3.22e-02 4.71e-03f 1\n 393 7.4077318e+04 1.35e+00 4.81e+03 -1.0 9.38e+01 -13.9 1.85e-02 6.88e-02f 1\n 394 7.1177623e+04 1.33e+00 4.71e+03 -1.0 8.78e+01 -14.3 6.45e-02 1.99e-02f 1\n 395 6.4716733e+04 1.26e+00 4.49e+03 -1.0 8.51e+01 -14.8 4.54e-03 4.67e-02f 1\n 396 5.7488482e+04 1.19e+00 4.23e+03 -1.0 8.14e+01 -15.3 9.91e-03 5.78e-02f 1\n 397 4.8754351e+04 1.10e+00 3.90e+03 -1.0 7.88e+01 -15.8 6.42e-02 7.95e-02f 1\n 398 4.7745315e+04 1.08e+00 3.86e+03 -1.0 7.72e+01 -16.3 9.49e-02 1.05e-02f 1\n 399 4.1977413e+04 1.02e+00 3.61e+03 -1.0 7.89e+01 -16.7 6.18e-02 6.27e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 400 3.3677208e+04 9.10e-01 3.24e+03 -1.0 7.75e+01 -17.2 7.45e-03 1.05e-01f 1\n 401 3.0166245e+04 8.61e-01 3.06e+03 -1.0 7.30e+01 -17.7 2.38e-02 5.39e-02f 1\n 402 2.5223779e+04 7.87e-01 2.80e+03 -1.0 7.10e+01 -18.2 1.21e-01 8.61e-02f 1\n 403 2.2341250e+04 7.40e-01 2.63e+03 -1.0 6.89e+01 -18.6 1.27e-01 5.92e-02f 1\n 404 1.8863927e+04 6.80e-01 2.42e+03 -1.0 6.77e+01 -19.1 8.08e-02 8.17e-02f 1\n 405 1.5848263e+04 6.23e-01 2.21e+03 -1.0 6.47e+01 -19.6 5.68e-02 8.40e-02f 1\n 406 1.4057233e+04 5.86e-01 2.08e+03 -1.0 6.57e+01 -19.1 8.62e-02 5.84e-02f 1\n 407 1.1013361e+04 5.19e-01 1.84e+03 -1.0 5.68e+01 -19.6 1.68e-01 1.16e-01f 1\n 408 7.9952027e+03 4.41e-01 1.57e+03 -1.0 5.34e+01 -19.1 1.48e-01 1.49e-01f 1\n 409 5.4065322e+03 3.62e-01 1.29e+03 -1.0 4.45e+01 -19.6 1.93e-01 1.80e-01f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 410 2.4913122e+03 2.44e-01 8.68e+02 -1.0 4.14e+01 -19.1 1.52e-01 3.26e-01f 1\n 411 1.0310441e+03 1.55e-01 5.51e+02 -1.0 2.82e+01 -19.6 2.47e-01 3.66e-01f 1\n 412 2.7434337e+02 7.52e-02 4.80e+02 -1.0 2.93e+01 -19.1 1.96e-01 5.14e-01f 1\n 413 5.4776714e+01 2.33e-02 7.33e+02 -1.0 7.94e+00 -19.6 4.50e-01 6.90e-01f 1\n 414 2.8195260e+01 3.14e-04 1.32e+03 -1.0 1.06e+01 -19.1 5.84e-01 1.00e+00f 1\n 415 3.2622344e+01 3.87e-05 1.06e+03 -1.0 1.60e+01 -19.6 7.02e-01 9.94e-01f 1\n 416 3.0040029e+01 4.65e-05 2.31e+02 -1.0 4.17e+01 -19.1 9.04e-01 1.00e+00f 1\n 417 3.0266079e+01 4.64e-04 4.41e+02 -1.0 1.21e+02 -19.6 1.00e+00 9.30e-01h 1\n 418 2.9559008e+01 3.42e-05 5.33e+01 -1.0 1.42e+01 -19.1 6.03e-01 1.00e+00h 1\n 419 2.9904880e+01 2.39e-06 6.35e-03 -1.0 6.68e+00 -19.6 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 420 2.4902915e+01 2.75e-05 1.10e+00 -2.5 1.14e+01 -19.1 4.91e-01 4.44e-01f 1\n 421 2.2342576e+01 8.15e-05 1.58e+00 -2.5 6.79e+01 -19.6 1.30e-01 3.02e-01f 1\n 422 2.0275308e+01 8.22e-05 1.07e+00 -2.5 6.41e+00 -3.7 3.38e-01 3.24e-01h 1\n 423 1.9264876e+01 9.78e-05 8.99e-01 -2.5 2.12e+01 -4.2 3.83e-01 1.56e-01h 1\n 424 1.8289856e+01 7.85e-05 6.81e-01 -2.5 6.19e+00 -4.6 2.31e-01 2.42e-01h 1\n 425 1.6132778e+01 1.96e-04 8.21e-01 -2.5 1.24e+01 -5.1 4.78e-01 7.58e-01h 1\n 426 1.5611473e+01 1.11e-04 5.01e-01 -2.5 1.64e+01 -5.6 4.71e-01 5.11e-01h 1\n 427 1.5163933e+01 4.19e-05 3.81e-01 -2.5 2.85e+00 -3.4 1.00e+00 6.97e-01h 1\n 428 1.5050001e+01 3.15e-05 4.60e-01 -2.5 3.60e+00 -3.8 6.17e-01 2.96e-01h 1\n 429 1.4900715e+01 5.30e-06 1.02e+00 -2.5 1.96e+00 -4.3 6.36e-01 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 430 1.4825885e+01 2.55e-05 4.15e-01 -2.5 5.52e+00 -3.9 8.38e-01 5.50e-01h 1\n 431 1.4739809e+01 3.24e-05 3.56e-01 -2.5 1.98e+01 -4.4 1.99e-01 1.55e-01h 1\n 432 1.4596773e+01 2.88e-05 5.34e-01 -2.5 5.19e+00 -3.9 5.29e-01 7.94e-01h 1\n 433 1.4484823e+01 3.46e-05 3.39e-01 -2.5 3.05e+01 -4.4 2.32e-01 9.62e-02h 1\n 434 1.4369681e+01 4.72e-05 4.22e-01 -2.5 1.62e+01 -4.9 3.06e-01 5.62e-01h 1\n 435 1.4279373e+01 4.71e-05 3.94e+02 -2.5 3.10e+01 -5.4 4.70e-01 2.25e-01h 1\n 436 1.4208263e+01 2.19e-05 2.73e+02 -2.5 2.87e+01 -5.8 3.06e-01 1.00e+00h 1\n 437 1.4134986e+01 1.91e-05 1.24e+02 -2.5 1.93e+01 -6.3 5.46e-01 6.57e-01h 1\n 438 1.4091516e+01 4.37e-05 3.46e+01 -2.5 2.35e+01 -6.8 7.19e-01 1.00e+00h 1\n 439 1.4087170e+01 4.09e-06 2.76e-04 -2.5 7.32e+00 -7.3 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 440 1.4088533e+01 1.91e-09 4.39e-07 -2.5 4.32e-01 -7.8 1.00e+00 1.00e+00h 1\n 441 1.3804149e+01 3.76e-06 7.14e+00 -3.8 4.84e+00 -8.2 6.51e-01 8.20e-01f 1\n 442 1.3706304e+01 1.35e-06 1.69e-03 -3.8 1.67e+00 -8.7 1.00e+00 1.00e+00h 1\n 443 1.3684885e+01 4.66e-08 3.95e-05 -3.8 4.07e-02 -9.2 1.00e+00 1.00e+00h 1\n 444 1.3664347e+01 8.58e-08 6.51e-02 -5.7 3.79e-01 -9.7 9.45e-01 9.46e-01h 1\n 445 1.3660196e+01 4.91e-08 1.08e+01 -5.7 4.56e-02 -10.1 1.00e+00 9.19e-01h 1\n 446 1.3659000e+01 4.92e-08 1.36e-06 -5.7 9.38e-03 -10.6 1.00e+00 1.00e+00f 1\n 447 1.3658600e+01 4.94e-08 9.08e+01 -8.6 3.53e-03 -11.1 9.94e-01 8.63e-01h 1\n 448 1.3658471e+01 5.57e-08 6.82e+00 -8.6 1.22e-03 -11.6 1.00e+00 9.90e-01f 1\n 449 1.3658445e+01 4.72e-08 2.13e-09 -8.6 4.22e-04 -12.0 1.00e+00 1.00e+00f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 450 1.3658439e+01 4.92e-08 3.85e-12 -8.6 1.81e-05 -12.5 1.00e+00 1.00e+00h 1\n 451 1.3658437e+01 4.92e-08 1.82e-12 -8.6 1.52e-06 -13.0 1.00e+00 1.00e+00h 1\n 452 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.26e-07 -13.5 1.00e+00 1.00e+00h 1\n 453 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.49e-07 -14.0 1.00e+00 1.00e+00h 1\n 454 1.3658437e+01 4.92e-08 3.64e-12 -8.6 9.58e-09 -14.4 1.00e+00 1.00e+00h 1\n 455 1.3658437e+01 4.92e-08 9.09e-13 -8.6 3.90e-11 -14.9 1.00e+00 1.00e+00h 1\n 456 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.39e-12 -15.4 1.00e+00 1.00e+00h 1\n 457 1.3658437e+01 4.92e-08 9.09e-13 -8.6 9.57e-13 -15.9 1.00e+00 1.00e+00h 1\n 458 1.3658437e+01 4.92e-08 9.09e-13 -8.6 7.94e-13 -16.3 1.00e+00 1.00e+00h 1\n 459 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.30e-13 -16.8 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 460 1.3658437e+01 4.92e-08 4.55e-13 -8.6 2.07e-12 -17.3 1.00e+00 1.00e+00h 1\n 461 1.3658437e+01 4.92e-08 9.09e-13 -8.6 2.88e-12 -17.8 1.00e+00 1.00e+00h 1\n 462 1.3658437e+01 4.92e-08 9.09e-13 -8.6 5.03e-12 -18.2 1.00e+00 1.00e+00h 1\n 463 1.3658437e+01 4.92e-08 4.55e-13 -8.6 3.27e-12 -18.7 1.00e+00 1.00e+00h 1\n\nNumber of Iterations....: 463\n\n (scaled) (unscaled)\nObjective...............: 1.3658436881286921e+01 1.3658436881286921e+01\nDual infeasibility......: 4.5474735088646412e-13 4.5474735088646412e-13\nConstraint violation....: 4.9168770388519079e-08 4.9168770388519079e-08\nComplementarity.........: 2.5059067889663204e-09 2.5059067889663204e-09\nOverall NLP error.......: 4.9168770388519079e-08 4.9168770388519079e-08\n\n\nNumber of objective function evaluations = 483\nNumber of objective gradient evaluations = 395\nNumber of equality constraint evaluations = 483\nNumber of inequality constraint evaluations = 483\nNumber of equality constraint Jacobian evaluations = 467\nNumber of inequality constraint Jacobian evaluations = 467\nNumber of Lagrangian Hessian evaluations = 463\nTotal CPU secs in IPOPT (w/o function evaluations) = 4.521\nTotal CPU secs in NLP function evaluations = 10.181\n\nEXIT: Solved To Acceptable Level.\n solver : t_proc (avg) t_wall (avg) n_eval\n nlp_f | 30.64ms ( 63.43us) 30.63ms ( 63.42us) 483\n nlp_g | 523.37ms ( 1.08ms) 522.91ms ( 1.08ms) 483\n nlp_grad | 3.11ms ( 3.11ms) 3.11ms ( 3.11ms) 1\n nlp_grad_f | 34.94ms ( 88.23us) 34.97ms ( 88.32us) 396\n nlp_hess_l | 6.38 s ( 13.87ms) 6.36 s ( 13.82ms) 460\n nlp_jac_g | 3.21 s ( 6.87ms) 3.20 s ( 6.84ms) 468\n total | 14.79 s ( 14.79 s) 14.70 s ( 14.70 s) 1\nThis is Ipopt version 3.12.3, running with linear solver mumps.\nNOTE: Other linear solvers might be more efficient (see Ipopt documentation).\n\nNumber of nonzeros in equality constraint Jacobian...: 4230\nNumber of nonzeros in inequality constraint Jacobian.: 3466\nNumber of nonzeros in Lagrangian Hessian.............: 3648\n\nTotal number of variables............................: 2293\n variables with only lower bounds: 0\n variables with lower and upper bounds: 0\n variables with only upper bounds: 0\nTotal number of equality constraints.................: 1377\nTotal number of inequality constraints...............: 967\n inequality constraints with only lower bounds: 204\n inequality constraints with lower and upper bounds: 355\n inequality constraints with only upper bounds: 408\n\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 0 0.0000000e+00 9.83e+00 7.50e-01 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0\n 1 1.0399716e-02 9.83e+00 1.26e+00 -1.0 1.20e+02 -4.0 4.93e-04 3.26e-04h 1\n 2 2.0045559e-02 9.83e+00 2.20e+01 -1.0 1.82e+02 -4.5 3.56e-04 3.85e-05h 1\n 3 4.4710489e-01 9.83e+00 2.52e+01 -1.0 2.46e+02 -5.0 3.88e-04 3.37e-04h 1\n 4 2.5591113e+00 9.83e+00 6.47e+01 -1.0 3.34e+02 -5.4 1.11e-03 4.95e-04h 1\n 5 1.0304744e+01 9.83e+00 1.06e+02 -1.0 2.54e+02 -5.9 1.15e-03 5.03e-04h 1\n 6 7.9216962e+01 9.82e+00 1.08e+02 -1.0 1.86e+02 -6.4 1.38e-03 1.35e-03h 1\n 7 1.6205310e+02 9.82e+00 1.46e+02 -1.0 1.46e+02 -6.9 1.48e-03 8.87e-04h 1\n 8 2.2025207e+02 9.83e+00 1.56e+02 -1.0 3.22e+02 -7.3 6.46e-04 4.87e-04h 1\n 9 2.2441787e+02 9.83e+00 1.54e+02 -1.0 4.45e+03 -7.8 8.53e-06 3.28e-05h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 10 2.5182294e+02 9.82e+00 1.42e+02 -1.0 9.50e+02 -8.3 8.65e-06 2.07e-04h 1\n 11 2.8777432e+02 9.81e+00 1.66e+02 -1.0 4.28e+02 -8.8 6.37e-04 2.55e-04h 1\n 12 4.0157811e+02 9.80e+00 1.67e+02 -1.0 1.45e+02 -9.2 7.37e-04 7.08e-04h 1\n 13 4.1550901e+02 9.80e+00 1.78e+02 -1.0 1.56e+02 -9.7 2.52e-04 7.97e-05h 1\n 14 6.1114602e+02 9.80e+00 1.21e+02 -1.0 1.50e+02 -10.2 8.33e-05 1.00e-03h 1\n 15 6.1355211e+02 9.80e+00 1.82e+02 -1.0 1.68e+02 -10.7 9.96e-04 1.13e-05h 1\n 16 6.2428940e+02 9.80e+00 2.68e+02 -1.0 3.50e+02 -11.2 1.43e-03 5.02e-05h 1\n 17r 6.2428940e+02 9.80e+00 9.99e+02 1.0 0.00e+00 -11.6 0.00e+00 4.03e-07R 5\n 18r 8.6414419e+02 9.80e+00 9.99e+02 1.0 6.29e+03 - 3.71e-04 1.98e-05f 1\n 19r 3.3647143e+03 9.80e+00 9.99e+02 1.0 1.65e+03 - 3.45e-04 1.15e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 20r 5.1092797e+04 9.80e+00 9.98e+02 1.0 7.67e+02 - 3.88e-04 6.81e-04f 1\n 21r 6.3342770e+04 9.79e+00 1.07e+03 1.0 7.22e+00 2.0 1.50e-01 1.14e-02f 1\n 22r 1.7594664e+05 9.73e+00 1.03e+03 1.0 3.13e+00 2.4 2.75e-01 2.46e-01f 1\n 23r 1.6728527e+05 9.68e+00 1.07e+03 1.0 2.44e+00 2.9 3.31e-02 1.26e-01f 1\n 24r 1.7195513e+05 9.51e+00 1.09e+03 1.0 5.75e-01 3.3 8.31e-01 1.00e+00f 1\n 25r 1.7774546e+05 9.35e+00 1.07e+03 1.0 1.83e+00 2.8 3.09e-01 3.74e-01f 1\n 26r 1.7826098e+05 8.80e+00 1.06e+03 1.0 5.11e+00 2.3 3.60e-01 5.80e-01f 1\n 27 1.7790304e+05 8.75e+00 2.67e+03 -1.0 9.73e+01 -12.1 1.05e-03 4.89e-03f 1\n 28 1.7790581e+05 8.75e+00 2.67e+03 -1.0 9.99e+01 -12.6 4.15e-03 8.71e-05h 1\n 29 1.7805578e+05 8.74e+00 2.67e+03 -1.0 1.49e+02 -13.1 2.98e-03 1.65e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 30 1.7826045e+05 8.73e+00 2.66e+03 -1.0 1.00e+02 -13.5 5.02e-03 1.36e-03h 1\n 31 1.7833630e+05 8.72e+00 2.66e+03 -1.0 9.67e+01 -14.0 4.35e-03 3.57e-04h 1\n 32 1.7838366e+05 8.72e+00 2.66e+03 -1.0 9.66e+01 -14.5 5.53e-03 1.85e-04h 1\n 33 1.7897357e+05 8.70e+00 2.66e+03 -1.0 9.66e+01 -15.0 6.03e-03 1.99e-03h 1\n 34 1.7979138e+05 8.68e+00 2.65e+03 -1.0 9.65e+01 -15.5 5.02e-03 2.33e-03h 1\n 35 1.8083548e+05 8.66e+00 2.64e+03 -1.0 9.65e+01 -15.9 5.14e-03 2.59e-03h 1\n 36 1.8126368e+05 8.65e+00 2.64e+03 -1.0 9.68e+01 -16.4 1.38e-03 9.55e-04h 1\n 37 1.8126992e+05 8.65e+00 2.64e+03 -1.0 9.72e+01 -16.9 2.04e-05 1.32e-05h 1\n 38 1.8127749e+05 8.65e+00 2.64e+03 -1.0 9.75e+01 -17.4 5.22e-03 1.49e-05h 1\n 39 1.8272429e+05 8.63e+00 2.63e+03 -1.0 9.81e+01 -17.8 3.94e-04 2.72e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 40 1.8373146e+05 8.61e+00 2.63e+03 -1.0 9.79e+01 -18.3 2.78e-05 1.86e-03h 1\n 41 1.8460711e+05 8.60e+00 2.63e+03 -1.0 9.79e+01 -18.8 6.10e-03 1.58e-03h 1\n 42 1.8506845e+05 8.59e+00 3.03e+03 -1.0 9.77e+01 -19.3 3.60e-03 8.05e-04h 1\n 43 1.8630538e+05 8.57e+00 2.81e+03 -1.0 9.79e+01 -19.7 1.23e-03 2.15e-03h 1\n 44 1.8693912e+05 8.56e+00 2.69e+03 -1.0 9.91e+01 -19.1 6.31e-04 1.18e-03h 1\n 45 1.8714858e+05 8.56e+00 2.61e+03 -1.0 9.72e+01 -19.6 1.86e-05 3.60e-04h 1\n 46 1.8833659e+05 8.54e+00 3.03e+03 -1.0 9.83e+01 -19.1 4.39e-03 2.16e-03h 1\n 47 1.8900050e+05 8.53e+00 3.12e+03 -1.0 9.71e+01 -19.6 1.51e-03 1.12e-03h 1\n 48 1.8921851e+05 8.53e+00 3.04e+03 -1.0 9.85e+01 -19.1 2.64e-05 3.81e-04h 1\n 49 1.8974673e+05 8.52e+00 2.87e+03 -1.0 9.66e+01 -19.6 1.53e-04 8.26e-04h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 50 1.8975438e+05 8.52e+00 3.74e+03 -1.0 9.81e+01 -19.1 3.80e-03 1.32e-05h 1\n 51 1.9002449e+05 8.52e+00 4.95e+03 -1.0 9.84e+01 -19.6 3.14e-03 3.97e-04h 1\n 52 1.9113127e+05 8.50e+00 5.17e+03 -1.0 9.86e+01 -19.1 2.66e-03 1.79e-03h 1\n 53 1.9218965e+05 8.49e+00 4.74e+03 -1.0 9.66e+01 -19.6 1.49e-05 1.46e-03h 1\n 54 1.9235900e+05 8.49e+00 4.77e+03 -1.0 9.69e+01 -19.1 3.43e-04 2.55e-04h 1\n 55 1.9614877e+05 8.45e+00 3.24e+03 -1.0 9.55e+01 -19.6 4.82e-05 4.46e-03h 1\n 56 1.9767353e+05 8.43e+00 3.55e+03 -1.0 9.43e+01 -19.1 3.11e-03 2.00e-03h 1\n 57 2.0045953e+05 8.41e+00 3.54e+03 -1.0 9.38e+01 -19.6 3.09e-03 3.11e-03h 1\n 58 2.0058633e+05 8.41e+00 4.32e+03 -1.0 9.34e+01 -19.1 2.74e-03 1.60e-04h 1\n 59r 2.0058633e+05 8.41e+00 1.00e+03 0.9 0.00e+00 -19.6 0.00e+00 4.42e-07R 5\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 60r 2.0053275e+05 8.41e+00 1.00e+03 0.9 3.17e+03 - 3.55e-04 6.45e-05f 1\n 61r 2.0041460e+05 8.41e+00 1.00e+03 0.9 1.58e+03 - 3.29e-04 1.19e-04f 1\n 62r 2.0024799e+05 8.41e+00 9.99e+02 0.9 1.43e+03 - 7.29e-04 1.04e-03f 1\n 63r 2.0178166e+05 8.42e+00 9.97e+02 0.9 9.93e+02 - 2.24e-03 1.49e-03f 1\n 64r 2.0902033e+05 8.42e+00 9.92e+02 0.9 5.71e+02 - 1.11e-02 3.85e-03f 1\n 65r 2.3136420e+05 8.41e+00 9.83e+02 0.9 3.09e+02 - 8.14e-03 9.32e-03f 1\n 66r 2.6153891e+05 8.40e+00 9.88e+02 0.9 3.62e+02 - 7.16e-03 1.05e-02f 1\n 67r 2.7798903e+05 8.40e+00 9.64e+02 0.9 3.55e+02 - 1.32e-02 9.11e-03f 1\n 68r 2.8657577e+05 8.40e+00 9.59e+02 0.9 2.47e+02 - 2.08e-02 4.88e-03f 1\n 69r 2.9527415e+05 8.40e+00 9.55e+02 0.9 1.55e+02 - 2.61e-03 4.50e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 70r 3.0527057e+05 8.40e+00 9.50e+02 0.9 5.20e+02 - 6.42e-03 5.12e-03f 1\n 71r 3.3582481e+05 8.40e+00 9.58e+02 0.9 1.01e+02 - 3.07e-02 1.51e-02f 1\n 72r 4.1107309e+05 8.41e+00 9.04e+02 0.9 1.14e+02 - 2.39e-02 3.35e-02f 1\n 73r 4.5964398e+05 8.41e+00 8.95e+02 0.9 1.15e+02 - 3.62e-02 2.11e-02f 1\n 74r 5.2016472e+05 8.41e+00 8.67e+02 0.9 1.36e+02 - 4.89e-02 3.03e-02f 1\n 75r 5.9030592e+05 8.41e+00 8.31e+02 0.9 1.48e+02 - 6.01e-02 3.79e-02f 1\n 76r 6.7476964e+05 8.41e+00 7.88e+02 0.9 1.61e+02 - 7.73e-02 4.59e-02f 1\n 77r 7.7561624e+05 8.41e+00 7.44e+02 0.9 1.82e+02 - 1.18e-01 5.62e-02f 1\n 78r 8.4945381e+05 8.41e+00 7.06e+02 0.9 4.62e+02 - 4.89e-03 5.46e-02f 1\n 79r 9.0140487e+05 8.41e+00 6.78e+02 0.9 2.50e+02 - 1.14e-01 3.60e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 80r 9.7546515e+05 8.41e+00 6.36e+02 0.9 2.54e+02 - 1.43e-01 6.16e-02f 1\n 81r 1.1190008e+06 8.38e+00 5.68e+02 0.9 3.76e+02 - 3.36e-02 1.08e-01f 1\n 82r 1.1225339e+06 8.37e+00 5.49e+02 0.9 8.82e+02 - 8.26e-03 3.25e-02f 1\n 83r 1.1286850e+06 8.36e+00 5.46e+02 0.9 4.12e+02 - 3.25e-02 6.75e-03f 1\n 84r 1.2659338e+06 8.29e+00 3.86e+02 0.9 7.87e+00 0.0 3.42e-01 2.93e-01f 1\n 85r 1.2612169e+06 8.28e+00 3.74e+02 0.2 2.38e+02 - 1.60e-02 3.04e-02f 1\n 86r 1.3908950e+06 8.17e+00 3.95e+02 0.2 3.02e+02 - 2.38e-02 7.50e-02f 1\n 87r 1.4392216e+06 8.15e+00 2.99e+02 0.2 7.97e+00 0.4 1.84e-01 1.68e-01f 1\n 88r 1.4749437e+06 8.14e+00 2.77e+02 0.2 5.34e+01 -0.1 4.57e-02 3.68e-02f 1\n 89r 1.4776820e+06 8.12e+00 2.77e+02 0.2 8.08e+02 - 3.91e-03 5.72e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 90r 1.4904632e+06 8.09e+00 3.10e+02 0.2 2.23e+02 - 1.15e-01 3.01e-02f 1\n 91r 1.4780369e+06 8.03e+00 2.85e+02 0.2 2.60e+02 - 1.24e-02 4.77e-02f 1\n 92r 1.4633854e+06 7.98e+00 2.84e+02 0.2 2.35e+02 - 1.42e-01 5.45e-02f 1\n 93r 1.4468075e+06 7.94e+00 2.89e+02 0.2 3.39e+02 - 2.25e-02 4.04e-02f 1\n 94r 1.4177297e+06 7.84e+00 3.28e+02 0.2 3.67e+02 - 4.08e-02 1.06e-01f 1\n 95r 1.4208335e+06 7.83e+00 3.24e+02 0.2 3.41e+02 - 4.16e-02 2.22e-02f 1\n 96r 1.4035701e+06 7.79e+00 2.82e+02 0.2 3.36e+02 - 6.49e-02 9.28e-02f 1\n 97r 1.3981555e+06 7.76e+00 2.08e+02 0.2 3.03e+01 -0.5 1.71e-01 7.29e-02f 1\n 98r 1.3736085e+06 7.75e+00 2.02e+02 0.2 4.74e+02 - 2.70e-02 1.88e-02f 1\n 99r 1.3738774e+06 7.70e+00 1.87e+02 0.2 4.77e+02 - 7.99e-02 9.61e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 100r 1.3580286e+06 7.69e+00 2.14e+02 0.2 4.67e+02 - 1.03e-01 3.83e-02f 1\n 101r 1.3021262e+06 7.68e+00 2.15e+02 0.2 5.22e+02 - 1.39e-01 9.61e-02f 1\n 102r 1.2905134e+06 7.68e+00 2.12e+02 0.2 5.78e+02 - 5.56e-02 4.11e-02f 1\n 103r 1.2958912e+06 7.68e+00 1.61e+02 0.2 6.56e+02 - 2.71e-02 1.08e-01f 1\n 104r 1.3293811e+06 7.69e+00 1.47e+02 0.2 7.20e+02 - 8.99e-02 8.45e-02f 1\n 105r 1.4013282e+06 7.70e+00 1.56e+02 0.2 9.31e+02 - 7.51e-02 1.05e-01f 1\n 106r 1.5010415e+06 7.67e+00 2.27e+02 0.2 3.80e+01 -1.0 6.34e-02 1.79e-01f 1\n 107r 1.5310867e+06 7.66e+00 2.10e+02 0.2 5.35e+01 -1.5 5.32e-02 3.54e-02f 1\n 108r 1.6858235e+06 7.64e+00 2.07e+02 0.2 2.27e+02 -2.0 1.41e-02 1.65e-02f 1\n 109r 1.6992342e+06 7.64e+00 2.00e+02 0.2 4.36e+01 -0.6 3.64e-02 3.57e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 110r 1.6720537e+06 7.63e+00 1.42e+02 0.2 2.59e+00 0.7 3.58e-01 1.88e-01f 1\n 111r 1.6393971e+06 7.62e+00 1.61e+02 0.2 5.96e+00 0.2 6.27e-02 2.98e-01f 1\n 112r 1.5942856e+06 7.62e+00 9.69e+01 0.2 3.94e+00 0.6 5.59e-01 6.60e-01f 1\n 113r 1.5454439e+06 7.61e+00 8.14e+01 -0.5 9.57e+00 0.2 2.87e-01 3.13e-01f 1\n 114r 1.5348572e+06 7.52e+00 4.26e+01 -0.5 2.28e+01 -0.3 2.19e-01 1.39e-01f 1\n 115 1.5101190e+06 7.46e+00 1.69e+04 -1.0 7.31e+02 -19.1 1.74e-03 8.11e-03f 1\n 116 1.5081170e+06 7.45e+00 1.69e+04 -1.0 6.47e+02 -19.6 1.66e-03 6.70e-04f 1\n 117 1.5000973e+06 7.43e+00 1.68e+04 -1.0 6.33e+02 -19.1 1.83e-03 2.70e-03f 1\n 118 1.4972338e+06 7.43e+00 1.68e+04 -1.0 6.14e+02 -19.6 4.62e-03 9.87e-04f 1\n 119 1.4926222e+06 7.41e+00 1.68e+04 -1.0 5.92e+02 -19.1 4.76e-03 1.69e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 120 1.4897946e+06 7.40e+00 1.68e+04 -1.0 5.73e+02 -19.6 1.24e-03 1.25e-03f 1\n 121 1.4867964e+06 7.39e+00 1.67e+04 -1.0 5.67e+02 -19.1 1.24e-02 1.40e-03f 1\n 122 1.4811114e+06 7.37e+00 1.67e+04 -1.0 5.31e+02 -19.6 2.94e-03 2.99e-03f 1\n 123 1.4776862e+06 7.36e+00 1.67e+04 -1.0 5.21e+02 -19.1 1.64e-02 1.85e-03f 1\n 124 1.4722508e+06 7.34e+00 1.66e+04 -1.0 4.87e+02 -19.6 5.29e-03 3.07e-03f 1\n 125 1.4686209e+06 7.32e+00 1.66e+04 -1.0 4.76e+02 -19.1 8.82e-03 2.07e-03f 1\n 126 1.4549412e+06 7.26e+00 1.64e+04 -1.0 4.61e+02 -19.6 8.60e-03 7.87e-03f 1\n 127 1.4439147e+06 7.22e+00 1.63e+04 -1.0 4.42e+02 -19.1 3.68e-03 6.45e-03f 1\n 128 1.4318869e+06 7.17e+00 1.62e+04 -1.0 4.33e+02 -19.6 2.58e-04 7.02e-03f 1\n 129 1.4287865e+06 7.15e+00 1.62e+04 -1.0 4.28e+02 -19.1 9.28e-03 1.82e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 130 1.4117953e+06 7.08e+00 1.60e+04 -1.0 4.17e+02 -19.6 1.21e-02 9.95e-03f 1\n 131 1.4094629e+06 7.07e+00 1.60e+04 -1.0 3.99e+02 -19.1 1.22e-02 1.30e-03f 1\n 132 1.3898684e+06 6.99e+00 1.58e+04 -1.0 3.86e+02 -19.6 6.69e-03 1.09e-02f 1\n 133 1.3666966e+06 6.91e+00 1.56e+04 -1.0 3.73e+02 -19.1 1.23e-04 1.25e-02f 1\n 134 1.3653874e+06 6.90e+00 1.56e+04 -1.0 3.64e+02 -19.6 8.61e-03 7.06e-04f 1\n 135 1.3600774e+06 6.88e+00 1.56e+04 -1.0 3.58e+02 -19.1 4.77e-04 2.88e-03f 1\n 136 1.3599863e+06 6.88e+00 1.56e+04 -1.0 3.54e+02 -19.6 1.16e-04 4.90e-05f 1\n 137 1.3520824e+06 6.85e+00 1.55e+04 -1.0 3.56e+02 -19.1 2.25e-02 4.63e-03f 1\n 138 1.3198881e+06 6.72e+00 1.52e+04 -1.0 3.32e+02 -19.6 4.47e-03 1.93e-02f 1\n 139 1.3018334e+06 6.65e+00 1.51e+04 -1.0 3.23e+02 -19.1 3.31e-02 1.03e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 140 1.3001178e+06 6.64e+00 1.50e+04 -1.0 3.09e+02 -19.6 2.72e-03 1.11e-03f 1\n 141 1.2712885e+06 6.53e+00 1.48e+04 -1.0 3.10e+02 -19.1 1.01e-04 1.69e-02f 1\n 142 1.2672814e+06 6.51e+00 1.48e+04 -1.0 3.20e+02 -19.6 4.18e-03 2.32e-03f 1\n 143 1.2662613e+06 6.51e+00 1.47e+04 -1.0 2.90e+02 -19.1 1.94e-02 6.22e-04f 1\n 144 1.2251694e+06 6.34e+00 1.43e+04 -1.0 2.82e+02 -19.6 2.71e-03 2.66e-02f 1\n 145 1.2238974e+06 6.33e+00 1.43e+04 -1.0 2.99e+02 -19.1 1.32e-03 7.91e-04f 1\n 146 1.2218795e+06 6.32e+00 1.43e+04 -1.0 3.08e+02 -19.6 1.65e-03 1.32e-03f 1\n 147 1.2218327e+06 6.32e+00 1.43e+04 -1.0 3.05e+02 -19.1 3.84e-03 2.74e-05f 1\n 148 1.1933966e+06 6.22e+00 1.41e+04 -1.0 3.06e+02 -19.6 7.34e-04 1.67e-02f 1\n 149 1.1923484e+06 6.21e+00 1.41e+04 -1.0 3.08e+02 -19.1 8.24e-03 5.84e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 150 1.1851137e+06 6.19e+00 1.40e+04 -1.0 2.76e+02 -19.6 7.09e-04 4.39e-03f 1\n 151 1.1674398e+06 6.13e+00 1.39e+04 -1.0 2.77e+02 -19.1 4.82e-03 9.94e-03f 1\n 152 1.1671054e+06 6.12e+00 1.39e+04 -1.0 2.72e+02 -19.6 4.03e-03 2.01e-04f 1\n 153 1.1607724e+06 6.10e+00 1.38e+04 -1.0 2.70e+02 -19.1 6.82e-04 3.72e-03f 1\n 154 1.1521951e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.6 1.57e-03 5.21e-03f 1\n 155 1.1512500e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.1 7.99e-04 5.27e-04f 1\n 156 1.1507231e+06 6.06e+00 1.37e+04 -1.0 2.68e+02 -19.6 4.24e-04 3.27e-04f 1\n 157 1.1507040e+06 6.06e+00 1.37e+04 -1.0 2.81e+02 -19.1 5.41e-04 9.69e-06f 1\n 158 1.1467730e+06 6.05e+00 1.37e+04 -1.0 2.63e+02 -19.6 8.26e-06 2.11e-03f 1\n 159 1.0732380e+06 5.82e+00 1.32e+04 -1.0 2.63e+02 -19.1 2.67e-04 3.91e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 160 1.0714161e+06 5.81e+00 1.32e+04 -1.0 2.56e+02 -19.6 1.02e-02 9.30e-04f 1\n 161 1.0713627e+06 5.81e+00 1.32e+04 -1.0 2.54e+02 -19.1 8.09e-05 2.84e-05f 1\n 162 1.0580046e+06 5.76e+00 1.31e+04 -1.0 2.54e+02 -19.6 1.35e-04 7.69e-03f 1\n 163 1.0545534e+06 5.75e+00 1.30e+04 -1.0 2.54e+02 -19.1 3.39e-03 1.89e-03f 1\n 164 1.0341517e+06 5.68e+00 1.30e+04 -1.0 2.52e+02 -19.6 8.74e-04 1.22e-02f 1\n 165 1.0336429e+06 5.68e+00 1.29e+04 -1.0 2.51e+02 -19.1 6.22e-04 2.90e-04f 1\n 166 1.0275521e+06 5.66e+00 1.38e+04 -1.0 2.51e+02 -19.6 4.36e-04 3.68e-03f 1\n 167 1.0179732e+06 5.63e+00 1.32e+04 -1.0 2.50e+02 -19.1 7.95e-03 5.53e-03f 1\n 168 9.9624521e+05 5.55e+00 1.59e+04 -1.0 2.48e+02 -19.6 4.63e-03 1.40e-02f 1\n 169 9.9527357e+05 5.55e+00 1.58e+04 -1.0 2.44e+02 -19.1 5.96e-04 5.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 170 9.9489625e+05 5.55e+00 1.56e+04 -1.0 2.47e+02 -19.6 1.04e-03 2.42e-04f 1\n 171 9.9489118e+05 5.55e+00 1.50e+04 -1.0 2.53e+02 -19.1 1.24e-03 2.86e-06f 1\n 172 9.9421219e+05 5.54e+00 1.57e+04 -1.0 2.49e+02 -19.6 3.63e-06 4.37e-04f 1\n 173 9.8486650e+05 5.51e+00 1.68e+04 -1.0 2.46e+02 -19.1 5.48e-04 5.68e-03f 1\n 174 9.7308682e+05 5.47e+00 1.65e+04 -1.0 2.49e+02 -19.6 8.25e-03 7.61e-03f 1\n 175 9.7212896e+05 5.47e+00 1.65e+04 -1.0 2.44e+02 -19.1 3.20e-04 5.80e-04f 1\n 176 9.6540396e+05 5.44e+00 1.75e+04 -1.0 2.49e+02 -19.6 9.01e-04 4.34e-03f 1\n 177 9.4998516e+05 5.39e+00 1.51e+04 -1.0 2.44e+02 -19.1 1.93e-02 9.44e-03f 1\n 178 9.4904520e+05 5.39e+00 1.52e+04 -1.0 2.45e+02 -19.6 2.19e-04 6.06e-04f 1\n 179 9.4902274e+05 5.39e+00 1.51e+04 -1.0 2.75e+02 -19.1 1.62e-04 1.14e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 180 9.3880492e+05 5.35e+00 1.67e+04 -1.0 2.29e+02 -19.6 1.38e-05 6.45e-03f 1\n 181 8.9559690e+05 5.21e+00 1.79e+04 -1.0 2.29e+02 -19.1 1.06e-02 2.71e-02f 1\n 182 8.5549874e+05 5.07e+00 1.93e+04 -1.0 2.22e+02 -19.6 4.07e-03 2.60e-02f 1\n 183 8.4452994e+05 5.04e+00 1.99e+04 -1.0 2.17e+02 -19.1 4.96e-04 7.46e-03f 1\n 184 8.2921005e+05 4.98e+00 1.98e+04 -1.0 2.14e+02 -19.6 9.95e-03 1.09e-02f 1\n 185 8.2905253e+05 4.98e+00 1.90e+04 -1.0 2.09e+02 -19.1 5.75e-03 1.10e-04f 1\n 186 8.2904529e+05 4.98e+00 1.89e+04 -1.0 2.18e+02 -19.6 7.15e-05 5.41e-06f 1\n 187 8.2712955e+05 4.97e+00 1.92e+04 -1.0 2.14e+02 -19.1 5.72e-05 1.35e-03f 1\n 188 8.0950543e+05 4.91e+00 2.28e+04 -1.0 2.31e+02 -19.6 1.11e-03 1.34e-02f 1\n 189 8.0862616e+05 4.90e+00 2.24e+04 -1.0 2.12e+02 -19.1 1.92e-03 6.51e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 190 8.0425351e+05 4.89e+00 2.35e+04 -1.0 2.22e+02 -19.6 4.57e-04 3.51e-03f 1\n 191 7.9537870e+05 4.85e+00 2.31e+04 -1.0 2.15e+02 -19.1 7.37e-03 6.61e-03f 1\n 192 7.9489154e+05 4.85e+00 2.29e+04 -1.0 2.11e+02 -19.6 1.29e-03 3.92e-04f 1\n 193 7.8751827e+05 4.83e+00 2.29e+04 -1.0 2.01e+02 -19.1 2.56e-03 5.71e-03f 1\n 194 7.8736120e+05 4.82e+00 2.31e+04 -1.0 2.00e+02 -19.6 1.54e-04 1.32e-04f 1\n 195 7.8402757e+05 4.81e+00 2.31e+04 -1.0 1.94e+02 -19.1 2.19e-03 2.63e-03f 1\n 196 7.8388845e+05 4.81e+00 2.53e+04 -1.0 1.98e+02 -19.6 2.15e-04 1.18e-04f 1\n 197 7.8385262e+05 4.81e+00 2.50e+04 -1.0 1.94e+02 -19.1 4.34e-03 2.82e-05f 1\n 198 7.8236217e+05 4.81e+00 1.09e+04 -1.0 1.97e+02 -19.6 3.11e-04 1.27e-03f 1\n 199 7.7035462e+05 4.76e+00 1.08e+04 -1.0 1.91e+02 -19.1 3.91e-04 9.52e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 200 7.6826638e+05 4.75e+00 1.08e+04 -1.0 1.90e+02 -19.6 2.68e-02 1.77e-03f 1\n 201 7.6137802e+05 4.73e+00 1.07e+04 -1.0 1.80e+02 -19.1 5.08e-04 5.41e-03f 1\n 202 7.6092627e+05 4.72e+00 1.07e+04 -1.0 1.85e+02 -19.6 8.12e-03 3.85e-04f 1\n 203 6.8620409e+05 4.44e+00 1.26e+04 -1.0 1.80e+02 -19.1 1.57e-04 6.08e-02f 1\n 204 6.8223234e+05 4.42e+00 1.15e+04 -1.0 1.85e+02 -19.6 6.43e-03 3.54e-03f 1\n 205 6.7550321e+05 4.40e+00 1.16e+04 -1.0 1.74e+02 -19.1 5.10e-03 5.69e-03f 1\n 206 6.7512988e+05 4.39e+00 1.00e+04 -1.0 1.86e+02 -19.6 8.13e-03 3.47e-04f 1\n 207 6.6341460e+05 4.35e+00 9.84e+03 -1.0 1.72e+02 -19.1 1.30e-02 1.04e-02f 1\n 208 6.5075026e+05 4.30e+00 1.09e+04 -1.0 1.87e+02 -19.6 2.10e-04 1.19e-02f 1\n 209 6.4469528e+05 4.27e+00 1.05e+04 -1.0 1.72e+02 -19.1 7.52e-03 5.37e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 210 6.3074280e+05 4.22e+00 1.26e+04 -1.0 1.85e+02 -19.6 4.48e-04 1.32e-02f 1\n 211 6.3015403e+05 4.21e+00 1.26e+04 -1.0 3.38e+01 2.3 1.77e-03 7.51e-04f 1\n 212 6.3013669e+05 4.21e+00 1.47e+04 -1.0 1.21e+02 1.8 1.17e-03 1.79e-05f 1\n 213 6.2710947e+05 4.20e+00 1.45e+04 -1.0 1.26e+02 1.4 4.90e-04 2.86e-03f 1\n 214 6.2100935e+05 4.18e+00 1.49e+04 -1.0 1.53e+02 0.9 1.86e-03 5.85e-03f 1\n 215 6.1066877e+05 4.14e+00 1.59e+04 -1.0 1.97e+02 0.4 1.05e-05 8.89e-03f 1\n 216 6.0984620e+05 4.14e+00 1.57e+04 -1.0 1.84e+02 1.7 8.49e-03 1.01e-03f 1\n 217 6.0751052e+05 4.13e+00 1.57e+04 -1.0 2.02e+02 1.3 5.49e-03 2.08e-03f 1\n 218 6.0707832e+05 4.12e+00 1.57e+04 -1.0 2.01e+02 1.7 2.32e-04 5.32e-04f 1\n 219 6.0707327e+05 4.12e+00 1.53e+04 -1.0 1.96e+02 2.1 2.90e-03 7.52e-06f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 220 6.0555478e+05 4.12e+00 1.54e+04 -1.0 2.25e+02 1.6 9.57e-04 1.84e-03f 1\n 221 6.0553909e+05 4.12e+00 1.47e+04 -1.0 2.14e+02 2.1 5.83e-03 2.26e-05f 1\n 222 6.0387470e+05 4.11e+00 1.47e+04 -1.0 2.27e+02 1.6 2.04e-03 1.80e-03f 1\n 223 6.0325229e+05 4.11e+00 1.47e+04 -1.0 2.20e+02 2.0 4.35e-04 7.72e-04f 1\n 224 6.0322399e+05 4.11e+00 1.45e+04 -1.0 2.21e+02 1.5 4.10e-03 2.96e-05f 1\n 225 6.0185557e+05 4.10e+00 1.45e+04 -1.0 2.17e+02 2.0 1.59e-05 1.70e-03f 1\n 226 6.0180800e+05 4.10e+00 1.44e+04 -1.0 2.29e+02 1.5 1.37e-03 4.63e-05f 1\n 227 5.9923339e+05 4.09e+00 1.44e+04 -1.0 2.27e+02 1.9 2.84e-03 2.85e-03f 1\n 228 5.9851909e+05 4.09e+00 1.44e+04 -1.0 2.29e+02 1.4 9.28e-04 6.31e-04f 1\n 229 5.9692058e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.9 1.94e-03 1.77e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 230 5.9689878e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.4 1.22e-05 1.79e-05f 1\n 231 5.9684337e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.8 7.71e-05 6.11e-05f 1\n 232 5.9682024e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.3 2.55e-06 2.10e-05f 1\n 233 5.9680869e+05 4.08e+00 1.38e+04 -1.0 2.28e+02 1.8 7.05e-03 1.36e-05f 1\n 234 5.9076367e+05 4.05e+00 1.41e+04 -1.0 2.30e+02 1.3 5.00e-04 5.99e-03f 1\n 235 5.9073170e+05 4.05e+00 1.42e+04 -1.0 2.32e+02 1.7 4.66e-03 3.40e-05f 1\n 236 5.8521172e+05 4.03e+00 1.45e+04 -1.0 2.32e+02 1.2 8.81e-04 5.50e-03f 1\n 237 5.8376022e+05 4.03e+00 1.44e+04 -1.0 2.30e+02 0.8 2.07e-06 1.04e-03f 1\n 238 5.8372062e+05 4.03e+00 1.53e+04 -1.0 2.35e+02 2.1 3.62e-03 4.81e-05f 1\n 239 5.7779448e+05 4.00e+00 1.55e+04 -1.0 2.39e+02 1.6 1.33e-02 6.22e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 240 5.7771432e+05 4.00e+00 1.57e+04 -1.0 2.26e+02 2.0 1.00e-03 9.89e-05f 1\n 241 5.7665827e+05 4.00e+00 1.57e+04 -1.0 2.07e+02 1.6 9.92e-04 1.10e-03f 1\n 242 5.7511556e+05 3.99e+00 1.55e+04 -1.0 2.12e+02 2.0 1.09e-03 1.92e-03f 1\n 243 5.7052438e+05 3.97e+00 1.52e+04 -1.0 2.09e+02 1.5 2.73e-04 4.68e-03f 1\n 244 5.6998537e+05 3.97e+00 1.51e+04 -1.0 2.36e+02 1.9 3.75e-04 6.54e-04f 1\n 245 5.6612860e+05 3.95e+00 1.48e+04 -1.0 2.37e+02 1.5 3.47e-05 3.75e-03f 1\n 246 5.6572525e+05 3.95e+00 1.50e+04 -1.0 2.36e+02 1.9 2.57e-03 4.85e-04f 1\n 247 5.6516789e+05 3.95e+00 1.51e+04 -1.0 2.36e+02 1.4 1.32e-03 4.83e-04f 1\n 248 5.6316860e+05 3.94e+00 1.52e+04 -1.0 2.36e+02 1.8 3.05e-03 2.38e-03f 1\n 249 5.6254967e+05 3.94e+00 1.50e+04 -1.0 2.35e+02 1.3 7.91e-06 4.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 250 5.6077033e+05 3.93e+00 1.49e+04 -1.0 2.36e+02 1.8 1.29e-03 2.17e-03f 1\n 251 5.6022313e+05 3.93e+00 1.47e+04 -1.0 2.34e+02 1.3 2.29e-06 3.07e-04f 1\n 252 5.5692514e+05 3.91e+00 1.49e+04 -1.0 2.36e+02 1.7 8.13e-03 3.96e-03f 1\n 253 5.5673567e+05 3.91e+00 1.49e+04 -1.0 2.31e+02 1.2 2.68e-06 9.58e-05f 1\n 254 5.5535652e+05 3.91e+00 1.55e+04 -1.0 2.33e+02 1.7 1.14e-02 1.64e-03f 1\n 255 5.5534273e+05 3.91e+00 1.56e+04 -1.0 2.30e+02 2.1 4.75e-04 1.85e-05f 1\n 256 5.5514554e+05 3.90e+00 1.71e+04 -1.0 2.27e+02 1.6 1.32e-02 2.20e-04f 1\n 257 5.5141803e+05 3.90e+00 1.48e+04 -1.0 4.83e+02 1.1 1.93e-05 1.30e-03f 1\n 258 5.4693933e+05 3.88e+00 1.42e+04 -1.0 2.39e+02 1.6 2.91e-04 4.86e-03f 1\n 259 5.4687686e+05 3.88e+00 1.43e+04 -1.0 2.52e+02 1.1 2.34e-03 6.08e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 260 5.4353115e+05 3.87e+00 1.41e+04 -1.0 2.42e+02 1.5 1.34e-03 3.66e-03f 1\n 261 5.4346337e+05 3.87e+00 1.41e+04 -1.0 2.07e+02 1.0 1.56e-04 6.65e-05f 1\n 262 5.4335021e+05 3.87e+00 1.42e+04 -1.0 1.95e+02 1.5 2.22e-03 1.26e-04f 1\n 263 5.4296226e+05 3.86e+00 1.43e+04 -1.0 1.86e+02 1.0 2.63e-03 3.71e-04f 1\n 264 5.3812113e+05 3.84e+00 1.40e+04 -1.0 2.13e+02 1.4 2.17e-05 5.31e-03f 1\n 265 5.3811408e+05 3.84e+00 1.41e+04 -1.0 2.23e+02 1.8 3.29e-04 8.06e-06f 1\n 266 5.3810662e+05 3.84e+00 1.41e+04 -1.0 2.43e+02 1.4 9.90e-04 7.83e-06f 1\n 267 5.3692599e+05 3.84e+00 1.38e+04 -1.0 2.30e+02 1.8 8.84e-05 1.33e-03f 1\n 268 5.3689601e+05 3.84e+00 1.38e+04 -1.0 1.90e+02 1.3 1.05e-03 3.21e-05f 1\n 269 5.3480708e+05 3.83e+00 1.32e+04 -1.0 2.44e+02 0.8 2.23e-04 1.25e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 270 5.3431885e+05 3.83e+00 1.34e+04 -1.0 1.83e+02 1.3 4.31e-03 5.17e-04f 1\n 271 5.3310063e+05 3.83e+00 1.31e+04 -1.0 1.89e+02 1.7 2.86e-05 1.41e-03f 1\n 272 5.3150897e+05 3.82e+00 1.22e+04 -1.0 1.85e+02 2.1 1.96e-04 1.98e-03f 1\n 273 5.3147543e+05 3.82e+00 1.22e+04 -1.0 1.61e+02 1.6 1.35e-04 3.82e-05f 1\n 274 5.3137612e+05 3.82e+00 1.24e+04 -1.0 1.76e+02 1.2 3.23e-03 1.02e-04f 1\n 275 5.3106158e+05 3.82e+00 1.29e+04 -1.0 1.79e+02 1.6 5.34e-03 3.64e-04f 1\n 276 5.2929724e+05 3.81e+00 1.28e+04 -1.0 1.73e+02 1.1 5.52e-05 1.84e-03f 1\n 277 5.2907597e+05 3.81e+00 1.29e+04 -1.0 1.63e+02 2.4 4.41e-04 3.68e-04f 1\n 278 5.2816992e+05 3.80e+00 1.27e+04 -1.0 1.88e+02 2.0 5.12e-04 1.14e-03f 1\n 279 5.2807415e+05 3.80e+00 1.27e+04 -1.0 1.48e+03 1.5 1.26e-06 8.93e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 280 5.2805145e+05 3.80e+00 1.26e+04 -1.0 1.94e+02 1.0 1.55e-03 2.34e-05f 1\n 281 5.2494578e+05 3.79e+00 1.26e+04 -1.0 1.75e+02 1.4 7.38e-04 3.61e-03f 1\n 282 5.2382656e+05 3.79e+00 1.20e+04 -1.0 6.75e+02 1.0 1.61e-05 3.04e-04f 1\n 283 5.2294012e+05 3.78e+00 1.20e+04 -1.0 1.74e+02 1.4 1.78e-03 9.88e-04f 1\n 284 5.2002561e+05 3.77e+00 1.24e+04 -1.0 1.76e+02 1.8 7.11e-03 3.79e-03f 1\n 285 5.1038171e+05 3.73e+00 1.19e+04 -1.0 1.71e+02 1.3 6.29e-05 1.07e-02f 1\n 286 5.1029468e+05 3.73e+00 1.19e+04 -1.0 1.84e+02 1.8 7.42e-04 1.12e-04f 1\n 287 5.1028585e+05 3.73e+00 1.21e+04 -1.0 2.01e+02 2.2 4.17e-04 1.31e-05f 1\n 288 5.0835844e+05 3.72e+00 1.19e+04 -1.0 2.75e+02 1.7 1.15e-05 2.41e-03f 1\n 289 5.0829350e+05 3.72e+00 1.19e+04 -1.0 1.71e+02 1.2 3.97e-04 6.12e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 290 5.0786133e+05 3.72e+00 1.30e+04 -1.0 1.77e+02 1.7 1.37e-02 5.49e-04f 1\n 291 4.9734337e+05 3.69e+00 1.06e+04 -1.0 1.72e+02 1.2 8.48e-04 8.86e-03f 1\n 292 4.9689003e+05 3.68e+00 1.08e+04 -1.0 1.74e+02 1.6 3.20e-03 5.66e-04f 1\n 293 4.9257017e+05 3.67e+00 9.15e+03 -1.0 1.62e+02 1.1 7.85e-05 3.01e-03f 1\n 294 4.9140020e+05 3.67e+00 9.02e+03 -1.0 2.41e+02 1.6 4.58e-05 1.41e-03f 1\n 295 4.9127824e+05 3.67e+00 1.14e+04 -1.0 1.61e+02 1.1 2.66e-03 6.68e-05f 1\n 296 4.9066209e+05 3.66e+00 1.29e+04 -1.0 1.75e+02 1.5 1.82e-02 7.38e-04f 1\n 297 4.8899995e+05 3.66e+00 1.45e+04 -1.0 5.05e+02 1.0 9.31e-04 4.06e-04f 1\n 298 4.7750162e+05 3.61e+00 1.35e+04 -1.0 1.76e+02 1.5 4.98e-03 1.35e-02f 1\n 299 4.7434464e+05 3.61e+00 1.49e+04 -1.0 9.97e+02 1.0 6.27e-04 4.14e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 300 4.7339897e+05 3.61e+00 1.50e+04 -1.0 1.73e+02 1.4 2.13e-03 1.10e-03f 1\n 301 4.7161223e+05 3.61e+00 1.40e+04 -1.0 3.25e+02 0.9 1.03e-04 5.77e-04f 1\n 302 4.7150987e+05 3.60e+00 1.43e+04 -1.0 2.00e+02 0.4 6.17e-03 9.97e-05f 1\n 303 4.6068296e+05 3.57e+00 1.36e+04 -1.0 3.30e+02 -0.0 4.88e-05 1.06e-02f 1\n 304 4.5827805e+05 3.56e+00 1.49e+04 -1.0 1.68e+02 1.3 1.36e-02 2.70e-03f 1\n 305 4.5598068e+05 3.55e+00 1.41e+04 -1.0 4.79e+02 0.8 4.48e-04 7.06e-04f 1\n 306 4.5518400e+05 3.55e+00 1.43e+04 -1.0 1.59e+02 1.2 2.67e-03 8.88e-04f 1\n 307 4.5309751e+05 3.55e+00 1.72e+04 -1.0 2.98e+02 0.8 2.61e-03 8.91e-04f 1\n 308 4.4826799e+05 3.53e+00 1.86e+04 -1.0 1.66e+02 1.2 1.97e-02 5.39e-03f 1\n 309 4.4337229e+05 3.51e+00 1.70e+04 -1.0 1.62e+02 0.7 4.61e-05 4.04e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 310 4.3957043e+05 3.50e+00 2.22e+04 -1.0 1.64e+02 1.1 5.11e-02 4.22e-03f 1\n 311 4.3840763e+05 3.50e+00 2.35e+04 -1.0 1.68e+02 0.7 4.15e-03 9.47e-04f 1\n 312 4.2663652e+05 3.45e+00 2.43e+04 -1.0 1.62e+02 1.1 2.95e-02 1.34e-02f 1\n 313 4.1771582e+05 3.43e+00 2.40e+04 -1.0 1.75e+02 0.6 6.75e-03 7.00e-03f 1\n 314 4.1583852e+05 3.42e+00 2.27e+04 -1.0 8.29e+02 0.1 1.03e-04 3.62e-04f 1\n 315 4.0823036e+05 3.40e+00 2.35e+04 -1.0 1.82e+02 0.6 9.82e-03 6.83e-03f 1\n 316 4.0742102e+05 3.40e+00 2.31e+04 -1.0 2.88e+02 0.1 7.31e-05 5.39e-04f 1\n 317 4.0736672e+05 3.40e+00 2.36e+04 -1.0 4.84e+02 -0.4 7.32e-03 5.87e-05f 1\n 318 3.9419770e+05 3.35e+00 2.18e+04 -1.0 7.13e+02 -0.9 8.53e-05 1.33e-02f 1\n 319 3.8663282e+05 3.33e+00 1.92e+04 -1.0 8.56e+02 -1.3 2.10e-04 5.79e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 320 3.8579331e+05 3.33e+00 2.13e+04 -1.0 9.22e+02 -1.8 7.40e-03 7.22e-04f 1\n 321 3.7974323e+05 3.32e+00 1.94e+04 -1.0 9.50e+02 -2.3 1.97e-04 4.91e-03f 1\n 322 3.7938905e+05 3.31e+00 2.15e+04 -1.0 9.51e+02 -2.8 8.43e-03 3.27e-04f 1\n 323 3.7037180e+05 3.29e+00 1.92e+04 -1.0 9.55e+02 -3.3 1.13e-04 8.55e-03f 1\n 324 3.6908213e+05 3.28e+00 1.94e+04 -1.0 9.41e+02 -3.7 2.53e-03 1.30e-03f 1\n 325 3.6277620e+05 3.26e+00 1.81e+04 -1.0 2.17e+02 0.3 8.07e-04 7.29e-03f 1\n 326 3.5989666e+05 3.24e+00 1.79e+04 -1.0 1.55e+02 1.6 1.27e-03 5.36e-03f 1\n 327 3.5985661e+05 3.24e+00 1.79e+04 -1.0 1.59e+02 2.1 9.24e-04 1.04e-04f 1\n 328 3.5935266e+05 3.24e+00 1.79e+04 -1.0 1.62e+02 1.6 6.26e-05 8.73e-04f 1\n 329 3.5783375e+05 3.23e+00 1.78e+04 -1.0 1.63e+02 2.0 3.50e-03 3.63e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 330 3.5743587e+05 3.22e+00 1.78e+04 -1.0 1.61e+02 1.5 3.51e-05 6.78e-04f 1\n 331 3.5668635e+05 3.22e+00 1.78e+04 -1.0 1.52e+02 1.1 2.73e-04 1.08e-03f 1\n 332 3.5630331e+05 3.22e+00 1.78e+04 -1.0 1.67e+02 0.6 3.51e-03 4.92e-04f 1\n 333 3.5562828e+05 3.22e+00 1.78e+04 -1.0 2.82e+02 0.1 3.34e-03 7.59e-04f 1\n 334 3.5480287e+05 3.21e+00 1.78e+04 -1.0 4.82e+02 -0.4 4.21e-05 9.86e-04f 1\n 335 3.5438009e+05 3.21e+00 1.78e+04 -1.0 6.55e+02 -0.8 1.29e-04 5.04e-04f 1\n 336 3.5436086e+05 3.21e+00 1.78e+04 -1.0 8.12e+02 -1.3 2.56e-03 2.29e-05f 1\n 337 3.5193976e+05 3.20e+00 1.77e+04 -1.0 8.82e+02 -1.8 1.07e-03 2.89e-03f 1\n 338 3.5190047e+05 3.20e+00 1.77e+04 -1.0 9.10e+02 -2.3 7.21e-03 4.74e-05f 1\n 339 3.5093683e+05 3.20e+00 1.77e+04 -1.0 7.47e+02 -1.0 1.35e-05 1.13e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 340 3.5052498e+05 3.20e+00 1.77e+04 -1.0 2.07e+02 0.4 5.15e-04 5.29e-04f 1\n 341 3.3750946e+05 3.14e+00 1.74e+04 -1.0 3.58e+02 -0.1 1.67e-05 1.64e-02f 1\n 342 3.3747338e+05 3.14e+00 1.74e+04 -1.0 5.75e+02 -0.6 6.58e-03 4.69e-05f 1\n 343 3.3562091e+05 3.14e+00 1.74e+04 -1.0 7.66e+02 -1.1 1.05e-04 2.42e-03f 1\n 344 3.3510890e+05 3.13e+00 1.73e+04 -1.0 8.41e+02 -1.5 1.22e-03 6.80e-04f 1\n 345 3.2871807e+05 3.11e+00 1.72e+04 -1.0 8.84e+02 -2.0 3.02e-03 8.53e-03f 1\n 346 3.2777372e+05 3.10e+00 1.72e+04 -1.0 1.54e+02 1.1 7.57e-05 1.51e-03f 1\n 347 3.2772491e+05 3.10e+00 1.72e+04 -1.0 1.60e+02 1.6 2.40e-03 9.66e-05f 1\n 348 3.2718271e+05 3.10e+00 1.71e+04 -1.0 1.60e+02 2.0 4.86e-03 1.52e-03f 1\n 349 3.2706600e+05 3.10e+00 1.71e+04 -1.0 1.58e+02 1.5 1.31e-03 2.19e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 350 3.2406746e+05 3.08e+00 1.70e+04 -1.0 1.47e+02 1.0 1.75e-04 5.59e-03f 1\n 351 3.2366994e+05 3.08e+00 1.70e+04 -1.0 1.50e+02 1.5 4.30e-03 7.40e-04f 1\n 352 3.2356913e+05 3.08e+00 1.70e+04 -1.0 1.37e+02 1.0 1.11e-04 1.90e-04f 1\n 353 3.2355506e+05 3.08e+00 1.70e+04 -1.0 1.49e+02 1.4 3.22e-03 2.36e-05f 1\n 354 3.2316524e+05 3.07e+00 1.70e+04 -1.0 1.40e+02 0.9 1.38e-03 6.09e-04f 1\n 355r 3.2316524e+05 3.07e+00 1.00e+03 0.5 0.00e+00 0.4 0.00e+00 3.57e-07R 8\n 356r 3.2229084e+05 3.07e+00 1.00e+03 0.5 2.06e+03 - 6.18e-03 7.60e-04f 1\n 357r 3.2109567e+05 3.06e+00 9.98e+02 0.5 1.80e+03 - 1.71e-03 8.09e-04f 1\n 358r 3.1149974e+05 2.97e+00 9.95e+02 0.5 1.67e+03 - 3.74e-03 6.54e-03f 1\n 359r 3.1087190e+05 2.97e+00 9.95e+02 0.5 8.58e+02 - 9.83e-05 4.32e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 360r 3.0989090e+05 2.96e+00 9.97e+02 0.5 9.94e+02 - 6.50e-03 6.44e-04f 1\n 361r 3.0241262e+05 2.87e+00 9.92e+02 0.5 9.07e+02 - 2.36e-03 5.34e-03f 1\n 362r 2.9694919e+05 2.80e+00 9.80e+02 0.5 8.52e+02 - 1.55e-02 4.79e-03f 1\n 363r 2.8929254e+05 2.68e+00 9.72e+02 0.5 6.39e+02 - 1.25e-02 8.39e-03f 1\n 364 2.8919644e+05 2.68e+00 9.52e+03 -1.0 1.36e+02 -0.0 2.03e-04 1.66e-04f 1\n 365 2.8908661e+05 2.68e+00 9.52e+03 -1.0 1.38e+02 -0.5 3.89e-04 1.90e-04f 1\n 366 2.8861557e+05 2.68e+00 9.51e+03 -1.0 1.34e+02 -1.0 2.45e-04 8.16e-04f 1\n 367 2.8784152e+05 2.67e+00 9.50e+03 -1.0 1.29e+02 -1.5 5.67e-04 1.35e-03f 1\n 368 2.8557520e+05 2.66e+00 9.46e+03 -1.0 1.18e+02 -1.9 1.40e-03 3.97e-03f 1\n 369 2.8048140e+05 2.64e+00 9.38e+03 -1.0 1.31e+02 -2.4 2.51e-03 9.01e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 370 2.7807299e+05 2.63e+00 9.34e+03 -1.0 1.50e+02 -2.9 1.64e-03 4.32e-03f 1\n 371 2.7069758e+05 2.59e+00 9.21e+03 -1.0 1.60e+02 -3.4 3.85e-03 1.34e-02f 1\n 372 2.5748481e+05 2.53e+00 8.98e+03 -1.0 1.59e+02 -3.8 4.45e-03 2.48e-02f 1\n 373 2.5409647e+05 2.51e+00 8.92e+03 -1.0 1.53e+02 -4.3 1.21e-02 6.62e-03f 1\n 374 2.4261880e+05 2.45e+00 8.72e+03 -1.0 1.50e+02 -4.8 6.04e-03 2.29e-02f 1\n 375 2.2822588e+05 2.38e+00 8.45e+03 -1.0 1.46e+02 -5.3 1.51e-02 3.02e-02f 1\n 376 2.0965467e+05 2.28e+00 8.10e+03 -1.0 1.41e+02 -5.8 1.82e-02 4.16e-02f 1\n 377 2.0936932e+05 2.28e+00 8.10e+03 -1.0 1.37e+02 -6.2 3.27e-02 6.83e-04f 1\n 378 2.0780500e+05 2.27e+00 8.07e+03 -1.0 1.37e+02 -6.7 4.67e-03 3.76e-03f 1\n 379 1.9606770e+05 2.20e+00 7.84e+03 -1.0 1.38e+02 -7.2 7.84e-04 2.87e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 380 1.7935187e+05 2.11e+00 7.49e+03 -1.0 1.38e+02 -7.7 2.58e-02 4.37e-02f 1\n 381 1.7765925e+05 2.10e+00 7.46e+03 -1.0 1.36e+02 -8.1 4.29e-02 4.74e-03f 1\n 382 1.6495345e+05 2.02e+00 7.18e+03 -1.0 1.34e+02 -8.6 1.77e-02 3.65e-02f 1\n 383 1.6340175e+05 2.01e+00 7.15e+03 -1.0 1.30e+02 -9.1 1.84e-02 4.73e-03f 1\n 384 1.5788016e+05 1.98e+00 7.03e+03 -1.0 1.29e+02 -9.6 3.72e-03 1.71e-02f 1\n 385 1.5064127e+05 1.93e+00 6.87e+03 -1.0 1.28e+02 -10.1 8.10e-03 2.33e-02f 1\n 386 1.2939352e+05 1.79e+00 6.36e+03 -1.0 1.25e+02 -10.5 1.85e-02 7.34e-02f 1\n 387 1.2646803e+05 1.77e+00 6.29e+03 -1.0 1.18e+02 -11.0 4.15e-02 1.14e-02f 1\n 388 1.1471323e+05 1.68e+00 5.99e+03 -1.0 1.16e+02 -11.5 1.69e-02 4.78e-02f 1\n 389 1.0992888e+05 1.65e+00 5.86e+03 -1.0 1.11e+02 -12.0 7.97e-02 2.12e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 390 1.0212816e+05 1.59e+00 5.65e+03 -1.0 1.07e+02 -12.4 1.86e-03 3.63e-02f 1\n 391 8.6177113e+04 1.46e+00 5.19e+03 -1.0 1.04e+02 -12.9 7.26e-02 8.17e-02f 1\n 392 8.5370527e+04 1.45e+00 5.16e+03 -1.0 9.48e+01 -13.4 3.22e-02 4.71e-03f 1\n 393 7.4077318e+04 1.35e+00 4.81e+03 -1.0 9.38e+01 -13.9 1.85e-02 6.88e-02f 1\n 394 7.1177623e+04 1.33e+00 4.71e+03 -1.0 8.78e+01 -14.3 6.45e-02 1.99e-02f 1\n 395 6.4716733e+04 1.26e+00 4.49e+03 -1.0 8.51e+01 -14.8 4.54e-03 4.67e-02f 1\n 396 5.7488482e+04 1.19e+00 4.23e+03 -1.0 8.14e+01 -15.3 9.91e-03 5.78e-02f 1\n 397 4.8754351e+04 1.10e+00 3.90e+03 -1.0 7.88e+01 -15.8 6.42e-02 7.95e-02f 1\n 398 4.7745315e+04 1.08e+00 3.86e+03 -1.0 7.72e+01 -16.3 9.49e-02 1.05e-02f 1\n 399 4.1977413e+04 1.02e+00 3.61e+03 -1.0 7.89e+01 -16.7 6.18e-02 6.27e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 400 3.3677208e+04 9.10e-01 3.24e+03 -1.0 7.75e+01 -17.2 7.45e-03 1.05e-01f 1\n 401 3.0166245e+04 8.61e-01 3.06e+03 -1.0 7.30e+01 -17.7 2.38e-02 5.39e-02f 1\n 402 2.5223779e+04 7.87e-01 2.80e+03 -1.0 7.10e+01 -18.2 1.21e-01 8.61e-02f 1\n 403 2.2341250e+04 7.40e-01 2.63e+03 -1.0 6.89e+01 -18.6 1.27e-01 5.92e-02f 1\n 404 1.8863927e+04 6.80e-01 2.42e+03 -1.0 6.77e+01 -19.1 8.08e-02 8.17e-02f 1\n 405 1.5848263e+04 6.23e-01 2.21e+03 -1.0 6.47e+01 -19.6 5.68e-02 8.40e-02f 1\n 406 1.4057233e+04 5.86e-01 2.08e+03 -1.0 6.57e+01 -19.1 8.62e-02 5.84e-02f 1\n 407 1.1013361e+04 5.19e-01 1.84e+03 -1.0 5.68e+01 -19.6 1.68e-01 1.16e-01f 1\n 408 7.9952027e+03 4.41e-01 1.57e+03 -1.0 5.34e+01 -19.1 1.48e-01 1.49e-01f 1\n 409 5.4065322e+03 3.62e-01 1.29e+03 -1.0 4.45e+01 -19.6 1.93e-01 1.80e-01f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 410 2.4913122e+03 2.44e-01 8.68e+02 -1.0 4.14e+01 -19.1 1.52e-01 3.26e-01f 1\n 411 1.0310441e+03 1.55e-01 5.51e+02 -1.0 2.82e+01 -19.6 2.47e-01 3.66e-01f 1\n 412 2.7434337e+02 7.52e-02 4.80e+02 -1.0 2.93e+01 -19.1 1.96e-01 5.14e-01f 1\n 413 5.4776714e+01 2.33e-02 7.33e+02 -1.0 7.94e+00 -19.6 4.50e-01 6.90e-01f 1\n 414 2.8195260e+01 3.14e-04 1.32e+03 -1.0 1.06e+01 -19.1 5.84e-01 1.00e+00f 1\n 415 3.2622344e+01 3.87e-05 1.06e+03 -1.0 1.60e+01 -19.6 7.02e-01 9.94e-01f 1\n 416 3.0040029e+01 4.65e-05 2.31e+02 -1.0 4.17e+01 -19.1 9.04e-01 1.00e+00f 1\n 417 3.0266079e+01 4.64e-04 4.41e+02 -1.0 1.21e+02 -19.6 1.00e+00 9.30e-01h 1\n 418 2.9559008e+01 3.42e-05 5.33e+01 -1.0 1.42e+01 -19.1 6.03e-01 1.00e+00h 1\n 419 2.9904880e+01 2.39e-06 6.35e-03 -1.0 6.68e+00 -19.6 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 420 2.4902915e+01 2.75e-05 1.10e+00 -2.5 1.14e+01 -19.1 4.91e-01 4.44e-01f 1\n 421 2.2342576e+01 8.15e-05 1.58e+00 -2.5 6.79e+01 -19.6 1.30e-01 3.02e-01f 1\n 422 2.0275308e+01 8.22e-05 1.07e+00 -2.5 6.41e+00 -3.7 3.38e-01 3.24e-01h 1\n 423 1.9264876e+01 9.78e-05 8.99e-01 -2.5 2.12e+01 -4.2 3.83e-01 1.56e-01h 1\n 424 1.8289856e+01 7.85e-05 6.81e-01 -2.5 6.19e+00 -4.6 2.31e-01 2.42e-01h 1\n 425 1.6132778e+01 1.96e-04 8.21e-01 -2.5 1.24e+01 -5.1 4.78e-01 7.58e-01h 1\n 426 1.5611473e+01 1.11e-04 5.01e-01 -2.5 1.64e+01 -5.6 4.71e-01 5.11e-01h 1\n 427 1.5163933e+01 4.19e-05 3.81e-01 -2.5 2.85e+00 -3.4 1.00e+00 6.97e-01h 1\n 428 1.5050001e+01 3.15e-05 4.60e-01 -2.5 3.60e+00 -3.8 6.17e-01 2.96e-01h 1\n 429 1.4900715e+01 5.30e-06 1.02e+00 -2.5 1.96e+00 -4.3 6.36e-01 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 430 1.4825885e+01 2.55e-05 4.15e-01 -2.5 5.52e+00 -3.9 8.38e-01 5.50e-01h 1\n 431 1.4739809e+01 3.24e-05 3.56e-01 -2.5 1.98e+01 -4.4 1.99e-01 1.55e-01h 1\n 432 1.4596773e+01 2.88e-05 5.34e-01 -2.5 5.19e+00 -3.9 5.29e-01 7.94e-01h 1\n 433 1.4484823e+01 3.46e-05 3.39e-01 -2.5 3.05e+01 -4.4 2.32e-01 9.62e-02h 1\n 434 1.4369681e+01 4.72e-05 4.22e-01 -2.5 1.62e+01 -4.9 3.06e-01 5.62e-01h 1\n 435 1.4279373e+01 4.71e-05 3.94e+02 -2.5 3.10e+01 -5.4 4.70e-01 2.25e-01h 1\n 436 1.4208263e+01 2.19e-05 2.73e+02 -2.5 2.87e+01 -5.8 3.06e-01 1.00e+00h 1\n 437 1.4134986e+01 1.91e-05 1.24e+02 -2.5 1.93e+01 -6.3 5.46e-01 6.57e-01h 1\n 438 1.4091516e+01 4.37e-05 3.46e+01 -2.5 2.35e+01 -6.8 7.19e-01 1.00e+00h 1\n 439 1.4087170e+01 4.09e-06 2.76e-04 -2.5 7.32e+00 -7.3 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 440 1.4088533e+01 1.91e-09 4.39e-07 -2.5 4.32e-01 -7.8 1.00e+00 1.00e+00h 1\n 441 1.3804149e+01 3.76e-06 7.14e+00 -3.8 4.84e+00 -8.2 6.51e-01 8.20e-01f 1\n 442 1.3706304e+01 1.35e-06 1.69e-03 -3.8 1.67e+00 -8.7 1.00e+00 1.00e+00h 1\n 443 1.3684885e+01 4.66e-08 3.95e-05 -3.8 4.07e-02 -9.2 1.00e+00 1.00e+00h 1\n 444 1.3664347e+01 8.58e-08 6.51e-02 -5.7 3.79e-01 -9.7 9.45e-01 9.46e-01h 1\n 445 1.3660196e+01 4.91e-08 1.08e+01 -5.7 4.56e-02 -10.1 1.00e+00 9.19e-01h 1\n 446 1.3659000e+01 4.92e-08 1.36e-06 -5.7 9.38e-03 -10.6 1.00e+00 1.00e+00f 1\n 447 1.3658600e+01 4.94e-08 9.08e+01 -8.6 3.53e-03 -11.1 9.94e-01 8.63e-01h 1\n 448 1.3658471e+01 5.57e-08 6.82e+00 -8.6 1.22e-03 -11.6 1.00e+00 9.90e-01f 1\n 449 1.3658445e+01 4.72e-08 2.13e-09 -8.6 4.22e-04 -12.0 1.00e+00 1.00e+00f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 450 1.3658439e+01 4.92e-08 3.85e-12 -8.6 1.81e-05 -12.5 1.00e+00 1.00e+00h 1\n 451 1.3658437e+01 4.92e-08 1.82e-12 -8.6 1.52e-06 -13.0 1.00e+00 1.00e+00h 1\n 452 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.26e-07 -13.5 1.00e+00 1.00e+00h 1\n 453 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.49e-07 -14.0 1.00e+00 1.00e+00h 1\n 454 1.3658437e+01 4.92e-08 3.64e-12 -8.6 9.58e-09 -14.4 1.00e+00 1.00e+00h 1\n 455 1.3658437e+01 4.92e-08 9.09e-13 -8.6 3.90e-11 -14.9 1.00e+00 1.00e+00h 1\n 456 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.39e-12 -15.4 1.00e+00 1.00e+00h 1\n 457 1.3658437e+01 4.92e-08 9.09e-13 -8.6 9.57e-13 -15.9 1.00e+00 1.00e+00h 1\n 458 1.3658437e+01 4.92e-08 9.09e-13 -8.6 7.94e-13 -16.3 1.00e+00 1.00e+00h 1\n 459 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.30e-13 -16.8 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 460 1.3658437e+01 4.92e-08 4.55e-13 -8.6 2.07e-12 -17.3 1.00e+00 1.00e+00h 1\n 461 1.3658437e+01 4.92e-08 9.09e-13 -8.6 2.88e-12 -17.8 1.00e+00 1.00e+00h 1\n 462 1.3658437e+01 4.92e-08 9.09e-13 -8.6 5.03e-12 -18.2 1.00e+00 1.00e+00h 1\n 463 1.3658437e+01 4.92e-08 4.55e-13 -8.6 3.27e-12 -18.7 1.00e+00 1.00e+00h 1\n\nNumber of Iterations....: 463\n\n (scaled) (unscaled)\nObjective...............: 1.3658436881286921e+01 1.3658436881286921e+01\nDual infeasibility......: 4.5474735088646412e-13 4.5474735088646412e-13\nConstraint violation....: 4.9168770388519079e-08 4.9168770388519079e-08\nComplementarity.........: 2.5059067889663204e-09 2.5059067889663204e-09\nOverall NLP error.......: 4.9168770388519079e-08 4.9168770388519079e-08\n\n\nNumber of objective function evaluations = 483\nNumber of objective gradient evaluations = 395\nNumber of equality constraint evaluations = 483\nNumber of inequality constraint evaluations = 483\nNumber of equality constraint Jacobian evaluations = 467\nNumber of inequality constraint Jacobian evaluations = 467\nNumber of Lagrangian Hessian evaluations = 463\nTotal CPU secs in IPOPT (w/o function evaluations) = 4.510\nTotal CPU secs in NLP function evaluations = 10.087\n\nEXIT: Solved To Acceptable Level.\n solver : t_proc (avg) t_wall (avg) n_eval\n nlp_f | 29.34ms ( 60.74us) 29.31ms ( 60.69us) 483\n nlp_g | 524.38ms ( 1.09ms) 524.13ms ( 1.09ms) 483\n nlp_grad | 3.10ms ( 3.10ms) 3.10ms ( 3.10ms) 1\n nlp_grad_f | 36.14ms ( 91.26us) 36.16ms ( 91.31us) 396\n nlp_hess_l | 6.33 s ( 13.76ms) 6.31 s ( 13.71ms) 460\n nlp_jac_g | 3.20 s ( 6.85ms) 3.19 s ( 6.83ms) 468\n total | 14.70 s ( 14.70 s) 14.61 s ( 14.61 s) 1\nThis is Ipopt version 3.12.3, running with linear solver mumps.\nNOTE: Other linear solvers might be more efficient (see Ipopt documentation).\n\nNumber of nonzeros in equality constraint Jacobian...: 4230\nNumber of nonzeros in inequality constraint Jacobian.: 3466\nNumber of nonzeros in Lagrangian Hessian.............: 3648\n\nTotal number of variables............................: 2293\n variables with only lower bounds: 0\n variables with lower and upper bounds: 0\n variables with only upper bounds: 0\nTotal number of equality constraints.................: 1377\nTotal number of inequality constraints...............: 967\n inequality constraints with only lower bounds: 204\n inequality constraints with lower and upper bounds: 355\n inequality constraints with only upper bounds: 408\n\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 0 0.0000000e+00 9.83e+00 7.50e-01 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0\n 1 1.0399716e-02 9.83e+00 1.26e+00 -1.0 1.20e+02 -4.0 4.93e-04 3.26e-04h 1\n 2 2.0045559e-02 9.83e+00 2.20e+01 -1.0 1.82e+02 -4.5 3.56e-04 3.85e-05h 1\n 3 4.4710489e-01 9.83e+00 2.52e+01 -1.0 2.46e+02 -5.0 3.88e-04 3.37e-04h 1\n 4 2.5591113e+00 9.83e+00 6.47e+01 -1.0 3.34e+02 -5.4 1.11e-03 4.95e-04h 1\n 5 1.0304744e+01 9.83e+00 1.06e+02 -1.0 2.54e+02 -5.9 1.15e-03 5.03e-04h 1\n 6 7.9216962e+01 9.82e+00 1.08e+02 -1.0 1.86e+02 -6.4 1.38e-03 1.35e-03h 1\n 7 1.6205310e+02 9.82e+00 1.46e+02 -1.0 1.46e+02 -6.9 1.48e-03 8.87e-04h 1\n 8 2.2025207e+02 9.83e+00 1.56e+02 -1.0 3.22e+02 -7.3 6.46e-04 4.87e-04h 1\n 9 2.2441787e+02 9.83e+00 1.54e+02 -1.0 4.45e+03 -7.8 8.53e-06 3.28e-05h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 10 2.5182294e+02 9.82e+00 1.42e+02 -1.0 9.50e+02 -8.3 8.65e-06 2.07e-04h 1\n 11 2.8777432e+02 9.81e+00 1.66e+02 -1.0 4.28e+02 -8.8 6.37e-04 2.55e-04h 1\n 12 4.0157811e+02 9.80e+00 1.67e+02 -1.0 1.45e+02 -9.2 7.37e-04 7.08e-04h 1\n 13 4.1550901e+02 9.80e+00 1.78e+02 -1.0 1.56e+02 -9.7 2.52e-04 7.97e-05h 1\n 14 6.1114602e+02 9.80e+00 1.21e+02 -1.0 1.50e+02 -10.2 8.33e-05 1.00e-03h 1\n 15 6.1355211e+02 9.80e+00 1.82e+02 -1.0 1.68e+02 -10.7 9.96e-04 1.13e-05h 1\n 16 6.2428940e+02 9.80e+00 2.68e+02 -1.0 3.50e+02 -11.2 1.43e-03 5.02e-05h 1\n 17r 6.2428940e+02 9.80e+00 9.99e+02 1.0 0.00e+00 -11.6 0.00e+00 4.03e-07R 5\n 18r 8.6414419e+02 9.80e+00 9.99e+02 1.0 6.29e+03 - 3.71e-04 1.98e-05f 1\n 19r 3.3647143e+03 9.80e+00 9.99e+02 1.0 1.65e+03 - 3.45e-04 1.15e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 20r 5.1092797e+04 9.80e+00 9.98e+02 1.0 7.67e+02 - 3.88e-04 6.81e-04f 1\n 21r 6.3342770e+04 9.79e+00 1.07e+03 1.0 7.22e+00 2.0 1.50e-01 1.14e-02f 1\n 22r 1.7594664e+05 9.73e+00 1.03e+03 1.0 3.13e+00 2.4 2.75e-01 2.46e-01f 1\n 23r 1.6728527e+05 9.68e+00 1.07e+03 1.0 2.44e+00 2.9 3.31e-02 1.26e-01f 1\n 24r 1.7195513e+05 9.51e+00 1.09e+03 1.0 5.75e-01 3.3 8.31e-01 1.00e+00f 1\n 25r 1.7774546e+05 9.35e+00 1.07e+03 1.0 1.83e+00 2.8 3.09e-01 3.74e-01f 1\n 26r 1.7826098e+05 8.80e+00 1.06e+03 1.0 5.11e+00 2.3 3.60e-01 5.80e-01f 1\n 27 1.7790304e+05 8.75e+00 2.67e+03 -1.0 9.73e+01 -12.1 1.05e-03 4.89e-03f 1\n 28 1.7790581e+05 8.75e+00 2.67e+03 -1.0 9.99e+01 -12.6 4.15e-03 8.71e-05h 1\n 29 1.7805578e+05 8.74e+00 2.67e+03 -1.0 1.49e+02 -13.1 2.98e-03 1.65e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 30 1.7826045e+05 8.73e+00 2.66e+03 -1.0 1.00e+02 -13.5 5.02e-03 1.36e-03h 1\n 31 1.7833630e+05 8.72e+00 2.66e+03 -1.0 9.67e+01 -14.0 4.35e-03 3.57e-04h 1\n 32 1.7838366e+05 8.72e+00 2.66e+03 -1.0 9.66e+01 -14.5 5.53e-03 1.85e-04h 1\n 33 1.7897357e+05 8.70e+00 2.66e+03 -1.0 9.66e+01 -15.0 6.03e-03 1.99e-03h 1\n 34 1.7979138e+05 8.68e+00 2.65e+03 -1.0 9.65e+01 -15.5 5.02e-03 2.33e-03h 1\n 35 1.8083548e+05 8.66e+00 2.64e+03 -1.0 9.65e+01 -15.9 5.14e-03 2.59e-03h 1\n 36 1.8126368e+05 8.65e+00 2.64e+03 -1.0 9.68e+01 -16.4 1.38e-03 9.55e-04h 1\n 37 1.8126992e+05 8.65e+00 2.64e+03 -1.0 9.72e+01 -16.9 2.04e-05 1.32e-05h 1\n 38 1.8127749e+05 8.65e+00 2.64e+03 -1.0 9.75e+01 -17.4 5.22e-03 1.49e-05h 1\n 39 1.8272429e+05 8.63e+00 2.63e+03 -1.0 9.81e+01 -17.8 3.94e-04 2.72e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 40 1.8373146e+05 8.61e+00 2.63e+03 -1.0 9.79e+01 -18.3 2.78e-05 1.86e-03h 1\n 41 1.8460711e+05 8.60e+00 2.63e+03 -1.0 9.79e+01 -18.8 6.10e-03 1.58e-03h 1\n 42 1.8506845e+05 8.59e+00 3.03e+03 -1.0 9.77e+01 -19.3 3.60e-03 8.05e-04h 1\n 43 1.8630538e+05 8.57e+00 2.81e+03 -1.0 9.79e+01 -19.7 1.23e-03 2.15e-03h 1\n 44 1.8693912e+05 8.56e+00 2.69e+03 -1.0 9.91e+01 -19.1 6.31e-04 1.18e-03h 1\n 45 1.8714858e+05 8.56e+00 2.61e+03 -1.0 9.72e+01 -19.6 1.86e-05 3.60e-04h 1\n 46 1.8833659e+05 8.54e+00 3.03e+03 -1.0 9.83e+01 -19.1 4.39e-03 2.16e-03h 1\n 47 1.8900050e+05 8.53e+00 3.12e+03 -1.0 9.71e+01 -19.6 1.51e-03 1.12e-03h 1\n 48 1.8921851e+05 8.53e+00 3.04e+03 -1.0 9.85e+01 -19.1 2.64e-05 3.81e-04h 1\n 49 1.8974673e+05 8.52e+00 2.87e+03 -1.0 9.66e+01 -19.6 1.53e-04 8.26e-04h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 50 1.8975438e+05 8.52e+00 3.74e+03 -1.0 9.81e+01 -19.1 3.80e-03 1.32e-05h 1\n 51 1.9002449e+05 8.52e+00 4.95e+03 -1.0 9.84e+01 -19.6 3.14e-03 3.97e-04h 1\n 52 1.9113127e+05 8.50e+00 5.17e+03 -1.0 9.86e+01 -19.1 2.66e-03 1.79e-03h 1\n 53 1.9218965e+05 8.49e+00 4.74e+03 -1.0 9.66e+01 -19.6 1.49e-05 1.46e-03h 1\n 54 1.9235900e+05 8.49e+00 4.77e+03 -1.0 9.69e+01 -19.1 3.43e-04 2.55e-04h 1\n 55 1.9614877e+05 8.45e+00 3.24e+03 -1.0 9.55e+01 -19.6 4.82e-05 4.46e-03h 1\n 56 1.9767353e+05 8.43e+00 3.55e+03 -1.0 9.43e+01 -19.1 3.11e-03 2.00e-03h 1\n 57 2.0045953e+05 8.41e+00 3.54e+03 -1.0 9.38e+01 -19.6 3.09e-03 3.11e-03h 1\n 58 2.0058633e+05 8.41e+00 4.32e+03 -1.0 9.34e+01 -19.1 2.74e-03 1.60e-04h 1\n 59r 2.0058633e+05 8.41e+00 1.00e+03 0.9 0.00e+00 -19.6 0.00e+00 4.42e-07R 5\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 60r 2.0053275e+05 8.41e+00 1.00e+03 0.9 3.17e+03 - 3.55e-04 6.45e-05f 1\n 61r 2.0041460e+05 8.41e+00 1.00e+03 0.9 1.58e+03 - 3.29e-04 1.19e-04f 1\n 62r 2.0024799e+05 8.41e+00 9.99e+02 0.9 1.43e+03 - 7.29e-04 1.04e-03f 1\n 63r 2.0178166e+05 8.42e+00 9.97e+02 0.9 9.93e+02 - 2.24e-03 1.49e-03f 1\n 64r 2.0902033e+05 8.42e+00 9.92e+02 0.9 5.71e+02 - 1.11e-02 3.85e-03f 1\n 65r 2.3136420e+05 8.41e+00 9.83e+02 0.9 3.09e+02 - 8.14e-03 9.32e-03f 1\n 66r 2.6153891e+05 8.40e+00 9.88e+02 0.9 3.62e+02 - 7.16e-03 1.05e-02f 1\n 67r 2.7798903e+05 8.40e+00 9.64e+02 0.9 3.55e+02 - 1.32e-02 9.11e-03f 1\n 68r 2.8657577e+05 8.40e+00 9.59e+02 0.9 2.47e+02 - 2.08e-02 4.88e-03f 1\n 69r 2.9527415e+05 8.40e+00 9.55e+02 0.9 1.55e+02 - 2.61e-03 4.50e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 70r 3.0527057e+05 8.40e+00 9.50e+02 0.9 5.20e+02 - 6.42e-03 5.12e-03f 1\n 71r 3.3582481e+05 8.40e+00 9.58e+02 0.9 1.01e+02 - 3.07e-02 1.51e-02f 1\n 72r 4.1107309e+05 8.41e+00 9.04e+02 0.9 1.14e+02 - 2.39e-02 3.35e-02f 1\n 73r 4.5964398e+05 8.41e+00 8.95e+02 0.9 1.15e+02 - 3.62e-02 2.11e-02f 1\n 74r 5.2016472e+05 8.41e+00 8.67e+02 0.9 1.36e+02 - 4.89e-02 3.03e-02f 1\n 75r 5.9030592e+05 8.41e+00 8.31e+02 0.9 1.48e+02 - 6.01e-02 3.79e-02f 1\n 76r 6.7476964e+05 8.41e+00 7.88e+02 0.9 1.61e+02 - 7.73e-02 4.59e-02f 1\n 77r 7.7561624e+05 8.41e+00 7.44e+02 0.9 1.82e+02 - 1.18e-01 5.62e-02f 1\n 78r 8.4945381e+05 8.41e+00 7.06e+02 0.9 4.62e+02 - 4.89e-03 5.46e-02f 1\n 79r 9.0140487e+05 8.41e+00 6.78e+02 0.9 2.50e+02 - 1.14e-01 3.60e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 80r 9.7546515e+05 8.41e+00 6.36e+02 0.9 2.54e+02 - 1.43e-01 6.16e-02f 1\n 81r 1.1190008e+06 8.38e+00 5.68e+02 0.9 3.76e+02 - 3.36e-02 1.08e-01f 1\n 82r 1.1225339e+06 8.37e+00 5.49e+02 0.9 8.82e+02 - 8.26e-03 3.25e-02f 1\n 83r 1.1286850e+06 8.36e+00 5.46e+02 0.9 4.12e+02 - 3.25e-02 6.75e-03f 1\n 84r 1.2659338e+06 8.29e+00 3.86e+02 0.9 7.87e+00 0.0 3.42e-01 2.93e-01f 1\n 85r 1.2612169e+06 8.28e+00 3.74e+02 0.2 2.38e+02 - 1.60e-02 3.04e-02f 1\n 86r 1.3908950e+06 8.17e+00 3.95e+02 0.2 3.02e+02 - 2.38e-02 7.50e-02f 1\n 87r 1.4392216e+06 8.15e+00 2.99e+02 0.2 7.97e+00 0.4 1.84e-01 1.68e-01f 1\n 88r 1.4749437e+06 8.14e+00 2.77e+02 0.2 5.34e+01 -0.1 4.57e-02 3.68e-02f 1\n 89r 1.4776820e+06 8.12e+00 2.77e+02 0.2 8.08e+02 - 3.91e-03 5.72e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 90r 1.4904632e+06 8.09e+00 3.10e+02 0.2 2.23e+02 - 1.15e-01 3.01e-02f 1\n 91r 1.4780369e+06 8.03e+00 2.85e+02 0.2 2.60e+02 - 1.24e-02 4.77e-02f 1\n 92r 1.4633854e+06 7.98e+00 2.84e+02 0.2 2.35e+02 - 1.42e-01 5.45e-02f 1\n 93r 1.4468075e+06 7.94e+00 2.89e+02 0.2 3.39e+02 - 2.25e-02 4.04e-02f 1\n 94r 1.4177297e+06 7.84e+00 3.28e+02 0.2 3.67e+02 - 4.08e-02 1.06e-01f 1\n 95r 1.4208335e+06 7.83e+00 3.24e+02 0.2 3.41e+02 - 4.16e-02 2.22e-02f 1\n 96r 1.4035701e+06 7.79e+00 2.82e+02 0.2 3.36e+02 - 6.49e-02 9.28e-02f 1\n 97r 1.3981555e+06 7.76e+00 2.08e+02 0.2 3.03e+01 -0.5 1.71e-01 7.29e-02f 1\n 98r 1.3736085e+06 7.75e+00 2.02e+02 0.2 4.74e+02 - 2.70e-02 1.88e-02f 1\n 99r 1.3738774e+06 7.70e+00 1.87e+02 0.2 4.77e+02 - 7.99e-02 9.61e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 100r 1.3580286e+06 7.69e+00 2.14e+02 0.2 4.67e+02 - 1.03e-01 3.83e-02f 1\n 101r 1.3021262e+06 7.68e+00 2.15e+02 0.2 5.22e+02 - 1.39e-01 9.61e-02f 1\n 102r 1.2905134e+06 7.68e+00 2.12e+02 0.2 5.78e+02 - 5.56e-02 4.11e-02f 1\n 103r 1.2958912e+06 7.68e+00 1.61e+02 0.2 6.56e+02 - 2.71e-02 1.08e-01f 1\n 104r 1.3293811e+06 7.69e+00 1.47e+02 0.2 7.20e+02 - 8.99e-02 8.45e-02f 1\n 105r 1.4013282e+06 7.70e+00 1.56e+02 0.2 9.31e+02 - 7.51e-02 1.05e-01f 1\n 106r 1.5010415e+06 7.67e+00 2.27e+02 0.2 3.80e+01 -1.0 6.34e-02 1.79e-01f 1\n 107r 1.5310867e+06 7.66e+00 2.10e+02 0.2 5.35e+01 -1.5 5.32e-02 3.54e-02f 1\n 108r 1.6858235e+06 7.64e+00 2.07e+02 0.2 2.27e+02 -2.0 1.41e-02 1.65e-02f 1\n 109r 1.6992342e+06 7.64e+00 2.00e+02 0.2 4.36e+01 -0.6 3.64e-02 3.57e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 110r 1.6720537e+06 7.63e+00 1.42e+02 0.2 2.59e+00 0.7 3.58e-01 1.88e-01f 1\n 111r 1.6393971e+06 7.62e+00 1.61e+02 0.2 5.96e+00 0.2 6.27e-02 2.98e-01f 1\n 112r 1.5942856e+06 7.62e+00 9.69e+01 0.2 3.94e+00 0.6 5.59e-01 6.60e-01f 1\n 113r 1.5454439e+06 7.61e+00 8.14e+01 -0.5 9.57e+00 0.2 2.87e-01 3.13e-01f 1\n 114r 1.5348572e+06 7.52e+00 4.26e+01 -0.5 2.28e+01 -0.3 2.19e-01 1.39e-01f 1\n 115 1.5101190e+06 7.46e+00 1.69e+04 -1.0 7.31e+02 -19.1 1.74e-03 8.11e-03f 1\n 116 1.5081170e+06 7.45e+00 1.69e+04 -1.0 6.47e+02 -19.6 1.66e-03 6.70e-04f 1\n 117 1.5000973e+06 7.43e+00 1.68e+04 -1.0 6.33e+02 -19.1 1.83e-03 2.70e-03f 1\n 118 1.4972338e+06 7.43e+00 1.68e+04 -1.0 6.14e+02 -19.6 4.62e-03 9.87e-04f 1\n 119 1.4926222e+06 7.41e+00 1.68e+04 -1.0 5.92e+02 -19.1 4.76e-03 1.69e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 120 1.4897946e+06 7.40e+00 1.68e+04 -1.0 5.73e+02 -19.6 1.24e-03 1.25e-03f 1\n 121 1.4867964e+06 7.39e+00 1.67e+04 -1.0 5.67e+02 -19.1 1.24e-02 1.40e-03f 1\n 122 1.4811114e+06 7.37e+00 1.67e+04 -1.0 5.31e+02 -19.6 2.94e-03 2.99e-03f 1\n 123 1.4776862e+06 7.36e+00 1.67e+04 -1.0 5.21e+02 -19.1 1.64e-02 1.85e-03f 1\n 124 1.4722508e+06 7.34e+00 1.66e+04 -1.0 4.87e+02 -19.6 5.29e-03 3.07e-03f 1\n 125 1.4686209e+06 7.32e+00 1.66e+04 -1.0 4.76e+02 -19.1 8.82e-03 2.07e-03f 1\n 126 1.4549412e+06 7.26e+00 1.64e+04 -1.0 4.61e+02 -19.6 8.60e-03 7.87e-03f 1\n 127 1.4439147e+06 7.22e+00 1.63e+04 -1.0 4.42e+02 -19.1 3.68e-03 6.45e-03f 1\n 128 1.4318869e+06 7.17e+00 1.62e+04 -1.0 4.33e+02 -19.6 2.58e-04 7.02e-03f 1\n 129 1.4287865e+06 7.15e+00 1.62e+04 -1.0 4.28e+02 -19.1 9.28e-03 1.82e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 130 1.4117953e+06 7.08e+00 1.60e+04 -1.0 4.17e+02 -19.6 1.21e-02 9.95e-03f 1\n 131 1.4094629e+06 7.07e+00 1.60e+04 -1.0 3.99e+02 -19.1 1.22e-02 1.30e-03f 1\n 132 1.3898684e+06 6.99e+00 1.58e+04 -1.0 3.86e+02 -19.6 6.69e-03 1.09e-02f 1\n 133 1.3666966e+06 6.91e+00 1.56e+04 -1.0 3.73e+02 -19.1 1.23e-04 1.25e-02f 1\n 134 1.3653874e+06 6.90e+00 1.56e+04 -1.0 3.64e+02 -19.6 8.61e-03 7.06e-04f 1\n 135 1.3600774e+06 6.88e+00 1.56e+04 -1.0 3.58e+02 -19.1 4.77e-04 2.88e-03f 1\n 136 1.3599863e+06 6.88e+00 1.56e+04 -1.0 3.54e+02 -19.6 1.16e-04 4.90e-05f 1\n 137 1.3520824e+06 6.85e+00 1.55e+04 -1.0 3.56e+02 -19.1 2.25e-02 4.63e-03f 1\n 138 1.3198881e+06 6.72e+00 1.52e+04 -1.0 3.32e+02 -19.6 4.47e-03 1.93e-02f 1\n 139 1.3018334e+06 6.65e+00 1.51e+04 -1.0 3.23e+02 -19.1 3.31e-02 1.03e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 140 1.3001178e+06 6.64e+00 1.50e+04 -1.0 3.09e+02 -19.6 2.72e-03 1.11e-03f 1\n 141 1.2712885e+06 6.53e+00 1.48e+04 -1.0 3.10e+02 -19.1 1.01e-04 1.69e-02f 1\n 142 1.2672814e+06 6.51e+00 1.48e+04 -1.0 3.20e+02 -19.6 4.18e-03 2.32e-03f 1\n 143 1.2662613e+06 6.51e+00 1.47e+04 -1.0 2.90e+02 -19.1 1.94e-02 6.22e-04f 1\n 144 1.2251694e+06 6.34e+00 1.43e+04 -1.0 2.82e+02 -19.6 2.71e-03 2.66e-02f 1\n 145 1.2238974e+06 6.33e+00 1.43e+04 -1.0 2.99e+02 -19.1 1.32e-03 7.91e-04f 1\n 146 1.2218795e+06 6.32e+00 1.43e+04 -1.0 3.08e+02 -19.6 1.65e-03 1.32e-03f 1\n 147 1.2218327e+06 6.32e+00 1.43e+04 -1.0 3.05e+02 -19.1 3.84e-03 2.74e-05f 1\n 148 1.1933966e+06 6.22e+00 1.41e+04 -1.0 3.06e+02 -19.6 7.34e-04 1.67e-02f 1\n 149 1.1923484e+06 6.21e+00 1.41e+04 -1.0 3.08e+02 -19.1 8.24e-03 5.84e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 150 1.1851137e+06 6.19e+00 1.40e+04 -1.0 2.76e+02 -19.6 7.09e-04 4.39e-03f 1\n 151 1.1674398e+06 6.13e+00 1.39e+04 -1.0 2.77e+02 -19.1 4.82e-03 9.94e-03f 1\n 152 1.1671054e+06 6.12e+00 1.39e+04 -1.0 2.72e+02 -19.6 4.03e-03 2.01e-04f 1\n 153 1.1607724e+06 6.10e+00 1.38e+04 -1.0 2.70e+02 -19.1 6.82e-04 3.72e-03f 1\n 154 1.1521951e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.6 1.57e-03 5.21e-03f 1\n 155 1.1512500e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.1 7.99e-04 5.27e-04f 1\n 156 1.1507231e+06 6.06e+00 1.37e+04 -1.0 2.68e+02 -19.6 4.24e-04 3.27e-04f 1\n 157 1.1507040e+06 6.06e+00 1.37e+04 -1.0 2.81e+02 -19.1 5.41e-04 9.69e-06f 1\n 158 1.1467730e+06 6.05e+00 1.37e+04 -1.0 2.63e+02 -19.6 8.26e-06 2.11e-03f 1\n 159 1.0732380e+06 5.82e+00 1.32e+04 -1.0 2.63e+02 -19.1 2.67e-04 3.91e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 160 1.0714161e+06 5.81e+00 1.32e+04 -1.0 2.56e+02 -19.6 1.02e-02 9.30e-04f 1\n 161 1.0713627e+06 5.81e+00 1.32e+04 -1.0 2.54e+02 -19.1 8.09e-05 2.84e-05f 1\n 162 1.0580046e+06 5.76e+00 1.31e+04 -1.0 2.54e+02 -19.6 1.35e-04 7.69e-03f 1\n 163 1.0545534e+06 5.75e+00 1.30e+04 -1.0 2.54e+02 -19.1 3.39e-03 1.89e-03f 1\n 164 1.0341517e+06 5.68e+00 1.30e+04 -1.0 2.52e+02 -19.6 8.74e-04 1.22e-02f 1\n 165 1.0336429e+06 5.68e+00 1.29e+04 -1.0 2.51e+02 -19.1 6.22e-04 2.90e-04f 1\n 166 1.0275521e+06 5.66e+00 1.38e+04 -1.0 2.51e+02 -19.6 4.36e-04 3.68e-03f 1\n 167 1.0179732e+06 5.63e+00 1.32e+04 -1.0 2.50e+02 -19.1 7.95e-03 5.53e-03f 1\n 168 9.9624521e+05 5.55e+00 1.59e+04 -1.0 2.48e+02 -19.6 4.63e-03 1.40e-02f 1\n 169 9.9527357e+05 5.55e+00 1.58e+04 -1.0 2.44e+02 -19.1 5.96e-04 5.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 170 9.9489625e+05 5.55e+00 1.56e+04 -1.0 2.47e+02 -19.6 1.04e-03 2.42e-04f 1\n 171 9.9489118e+05 5.55e+00 1.50e+04 -1.0 2.53e+02 -19.1 1.24e-03 2.86e-06f 1\n 172 9.9421219e+05 5.54e+00 1.57e+04 -1.0 2.49e+02 -19.6 3.63e-06 4.37e-04f 1\n 173 9.8486650e+05 5.51e+00 1.68e+04 -1.0 2.46e+02 -19.1 5.48e-04 5.68e-03f 1\n 174 9.7308682e+05 5.47e+00 1.65e+04 -1.0 2.49e+02 -19.6 8.25e-03 7.61e-03f 1\n 175 9.7212896e+05 5.47e+00 1.65e+04 -1.0 2.44e+02 -19.1 3.20e-04 5.80e-04f 1\n 176 9.6540396e+05 5.44e+00 1.75e+04 -1.0 2.49e+02 -19.6 9.01e-04 4.34e-03f 1\n 177 9.4998516e+05 5.39e+00 1.51e+04 -1.0 2.44e+02 -19.1 1.93e-02 9.44e-03f 1\n 178 9.4904520e+05 5.39e+00 1.52e+04 -1.0 2.45e+02 -19.6 2.19e-04 6.06e-04f 1\n 179 9.4902274e+05 5.39e+00 1.51e+04 -1.0 2.75e+02 -19.1 1.62e-04 1.14e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 180 9.3880492e+05 5.35e+00 1.67e+04 -1.0 2.29e+02 -19.6 1.38e-05 6.45e-03f 1\n 181 8.9559690e+05 5.21e+00 1.79e+04 -1.0 2.29e+02 -19.1 1.06e-02 2.71e-02f 1\n 182 8.5549874e+05 5.07e+00 1.93e+04 -1.0 2.22e+02 -19.6 4.07e-03 2.60e-02f 1\n 183 8.4452994e+05 5.04e+00 1.99e+04 -1.0 2.17e+02 -19.1 4.96e-04 7.46e-03f 1\n 184 8.2921005e+05 4.98e+00 1.98e+04 -1.0 2.14e+02 -19.6 9.95e-03 1.09e-02f 1\n 185 8.2905253e+05 4.98e+00 1.90e+04 -1.0 2.09e+02 -19.1 5.75e-03 1.10e-04f 1\n 186 8.2904529e+05 4.98e+00 1.89e+04 -1.0 2.18e+02 -19.6 7.15e-05 5.41e-06f 1\n 187 8.2712955e+05 4.97e+00 1.92e+04 -1.0 2.14e+02 -19.1 5.72e-05 1.35e-03f 1\n 188 8.0950543e+05 4.91e+00 2.28e+04 -1.0 2.31e+02 -19.6 1.11e-03 1.34e-02f 1\n 189 8.0862616e+05 4.90e+00 2.24e+04 -1.0 2.12e+02 -19.1 1.92e-03 6.51e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 190 8.0425351e+05 4.89e+00 2.35e+04 -1.0 2.22e+02 -19.6 4.57e-04 3.51e-03f 1\n 191 7.9537870e+05 4.85e+00 2.31e+04 -1.0 2.15e+02 -19.1 7.37e-03 6.61e-03f 1\n 192 7.9489154e+05 4.85e+00 2.29e+04 -1.0 2.11e+02 -19.6 1.29e-03 3.92e-04f 1\n 193 7.8751827e+05 4.83e+00 2.29e+04 -1.0 2.01e+02 -19.1 2.56e-03 5.71e-03f 1\n 194 7.8736120e+05 4.82e+00 2.31e+04 -1.0 2.00e+02 -19.6 1.54e-04 1.32e-04f 1\n 195 7.8402757e+05 4.81e+00 2.31e+04 -1.0 1.94e+02 -19.1 2.19e-03 2.63e-03f 1\n 196 7.8388845e+05 4.81e+00 2.53e+04 -1.0 1.98e+02 -19.6 2.15e-04 1.18e-04f 1\n 197 7.8385262e+05 4.81e+00 2.50e+04 -1.0 1.94e+02 -19.1 4.34e-03 2.82e-05f 1\n 198 7.8236217e+05 4.81e+00 1.09e+04 -1.0 1.97e+02 -19.6 3.11e-04 1.27e-03f 1\n 199 7.7035462e+05 4.76e+00 1.08e+04 -1.0 1.91e+02 -19.1 3.91e-04 9.52e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 200 7.6826638e+05 4.75e+00 1.08e+04 -1.0 1.90e+02 -19.6 2.68e-02 1.77e-03f 1\n 201 7.6137802e+05 4.73e+00 1.07e+04 -1.0 1.80e+02 -19.1 5.08e-04 5.41e-03f 1\n 202 7.6092627e+05 4.72e+00 1.07e+04 -1.0 1.85e+02 -19.6 8.12e-03 3.85e-04f 1\n 203 6.8620409e+05 4.44e+00 1.26e+04 -1.0 1.80e+02 -19.1 1.57e-04 6.08e-02f 1\n 204 6.8223234e+05 4.42e+00 1.15e+04 -1.0 1.85e+02 -19.6 6.43e-03 3.54e-03f 1\n 205 6.7550321e+05 4.40e+00 1.16e+04 -1.0 1.74e+02 -19.1 5.10e-03 5.69e-03f 1\n 206 6.7512988e+05 4.39e+00 1.00e+04 -1.0 1.86e+02 -19.6 8.13e-03 3.47e-04f 1\n 207 6.6341460e+05 4.35e+00 9.84e+03 -1.0 1.72e+02 -19.1 1.30e-02 1.04e-02f 1\n 208 6.5075026e+05 4.30e+00 1.09e+04 -1.0 1.87e+02 -19.6 2.10e-04 1.19e-02f 1\n 209 6.4469528e+05 4.27e+00 1.05e+04 -1.0 1.72e+02 -19.1 7.52e-03 5.37e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 210 6.3074280e+05 4.22e+00 1.26e+04 -1.0 1.85e+02 -19.6 4.48e-04 1.32e-02f 1\n 211 6.3015403e+05 4.21e+00 1.26e+04 -1.0 3.38e+01 2.3 1.77e-03 7.51e-04f 1\n 212 6.3013669e+05 4.21e+00 1.47e+04 -1.0 1.21e+02 1.8 1.17e-03 1.79e-05f 1\n 213 6.2710947e+05 4.20e+00 1.45e+04 -1.0 1.26e+02 1.4 4.90e-04 2.86e-03f 1\n 214 6.2100935e+05 4.18e+00 1.49e+04 -1.0 1.53e+02 0.9 1.86e-03 5.85e-03f 1\n 215 6.1066877e+05 4.14e+00 1.59e+04 -1.0 1.97e+02 0.4 1.05e-05 8.89e-03f 1\n 216 6.0984620e+05 4.14e+00 1.57e+04 -1.0 1.84e+02 1.7 8.49e-03 1.01e-03f 1\n 217 6.0751052e+05 4.13e+00 1.57e+04 -1.0 2.02e+02 1.3 5.49e-03 2.08e-03f 1\n 218 6.0707832e+05 4.12e+00 1.57e+04 -1.0 2.01e+02 1.7 2.32e-04 5.32e-04f 1\n 219 6.0707327e+05 4.12e+00 1.53e+04 -1.0 1.96e+02 2.1 2.90e-03 7.52e-06f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 220 6.0555478e+05 4.12e+00 1.54e+04 -1.0 2.25e+02 1.6 9.57e-04 1.84e-03f 1\n 221 6.0553909e+05 4.12e+00 1.47e+04 -1.0 2.14e+02 2.1 5.83e-03 2.26e-05f 1\n 222 6.0387470e+05 4.11e+00 1.47e+04 -1.0 2.27e+02 1.6 2.04e-03 1.80e-03f 1\n 223 6.0325229e+05 4.11e+00 1.47e+04 -1.0 2.20e+02 2.0 4.35e-04 7.72e-04f 1\n 224 6.0322399e+05 4.11e+00 1.45e+04 -1.0 2.21e+02 1.5 4.10e-03 2.96e-05f 1\n 225 6.0185557e+05 4.10e+00 1.45e+04 -1.0 2.17e+02 2.0 1.59e-05 1.70e-03f 1\n 226 6.0180800e+05 4.10e+00 1.44e+04 -1.0 2.29e+02 1.5 1.37e-03 4.63e-05f 1\n 227 5.9923339e+05 4.09e+00 1.44e+04 -1.0 2.27e+02 1.9 2.84e-03 2.85e-03f 1\n 228 5.9851909e+05 4.09e+00 1.44e+04 -1.0 2.29e+02 1.4 9.28e-04 6.31e-04f 1\n 229 5.9692058e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.9 1.94e-03 1.77e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 230 5.9689878e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.4 1.22e-05 1.79e-05f 1\n 231 5.9684337e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.8 7.71e-05 6.11e-05f 1\n 232 5.9682024e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.3 2.55e-06 2.10e-05f 1\n 233 5.9680869e+05 4.08e+00 1.38e+04 -1.0 2.28e+02 1.8 7.05e-03 1.36e-05f 1\n 234 5.9076367e+05 4.05e+00 1.41e+04 -1.0 2.30e+02 1.3 5.00e-04 5.99e-03f 1\n 235 5.9073170e+05 4.05e+00 1.42e+04 -1.0 2.32e+02 1.7 4.66e-03 3.40e-05f 1\n 236 5.8521172e+05 4.03e+00 1.45e+04 -1.0 2.32e+02 1.2 8.81e-04 5.50e-03f 1\n 237 5.8376022e+05 4.03e+00 1.44e+04 -1.0 2.30e+02 0.8 2.07e-06 1.04e-03f 1\n 238 5.8372062e+05 4.03e+00 1.53e+04 -1.0 2.35e+02 2.1 3.62e-03 4.81e-05f 1\n 239 5.7779448e+05 4.00e+00 1.55e+04 -1.0 2.39e+02 1.6 1.33e-02 6.22e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 240 5.7771432e+05 4.00e+00 1.57e+04 -1.0 2.26e+02 2.0 1.00e-03 9.89e-05f 1\n 241 5.7665827e+05 4.00e+00 1.57e+04 -1.0 2.07e+02 1.6 9.92e-04 1.10e-03f 1\n 242 5.7511556e+05 3.99e+00 1.55e+04 -1.0 2.12e+02 2.0 1.09e-03 1.92e-03f 1\n 243 5.7052438e+05 3.97e+00 1.52e+04 -1.0 2.09e+02 1.5 2.73e-04 4.68e-03f 1\n 244 5.6998537e+05 3.97e+00 1.51e+04 -1.0 2.36e+02 1.9 3.75e-04 6.54e-04f 1\n 245 5.6612860e+05 3.95e+00 1.48e+04 -1.0 2.37e+02 1.5 3.47e-05 3.75e-03f 1\n 246 5.6572525e+05 3.95e+00 1.50e+04 -1.0 2.36e+02 1.9 2.57e-03 4.85e-04f 1\n 247 5.6516789e+05 3.95e+00 1.51e+04 -1.0 2.36e+02 1.4 1.32e-03 4.83e-04f 1\n 248 5.6316860e+05 3.94e+00 1.52e+04 -1.0 2.36e+02 1.8 3.05e-03 2.38e-03f 1\n 249 5.6254967e+05 3.94e+00 1.50e+04 -1.0 2.35e+02 1.3 7.91e-06 4.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 250 5.6077033e+05 3.93e+00 1.49e+04 -1.0 2.36e+02 1.8 1.29e-03 2.17e-03f 1\n 251 5.6022313e+05 3.93e+00 1.47e+04 -1.0 2.34e+02 1.3 2.29e-06 3.07e-04f 1\n 252 5.5692514e+05 3.91e+00 1.49e+04 -1.0 2.36e+02 1.7 8.13e-03 3.96e-03f 1\n 253 5.5673567e+05 3.91e+00 1.49e+04 -1.0 2.31e+02 1.2 2.68e-06 9.58e-05f 1\n 254 5.5535652e+05 3.91e+00 1.55e+04 -1.0 2.33e+02 1.7 1.14e-02 1.64e-03f 1\n 255 5.5534273e+05 3.91e+00 1.56e+04 -1.0 2.30e+02 2.1 4.75e-04 1.85e-05f 1\n 256 5.5514554e+05 3.90e+00 1.71e+04 -1.0 2.27e+02 1.6 1.32e-02 2.20e-04f 1\n 257 5.5141803e+05 3.90e+00 1.48e+04 -1.0 4.83e+02 1.1 1.93e-05 1.30e-03f 1\n 258 5.4693933e+05 3.88e+00 1.42e+04 -1.0 2.39e+02 1.6 2.91e-04 4.86e-03f 1\n 259 5.4687686e+05 3.88e+00 1.43e+04 -1.0 2.52e+02 1.1 2.34e-03 6.08e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 260 5.4353115e+05 3.87e+00 1.41e+04 -1.0 2.42e+02 1.5 1.34e-03 3.66e-03f 1\n 261 5.4346337e+05 3.87e+00 1.41e+04 -1.0 2.07e+02 1.0 1.56e-04 6.65e-05f 1\n 262 5.4335021e+05 3.87e+00 1.42e+04 -1.0 1.95e+02 1.5 2.22e-03 1.26e-04f 1\n 263 5.4296226e+05 3.86e+00 1.43e+04 -1.0 1.86e+02 1.0 2.63e-03 3.71e-04f 1\n 264 5.3812113e+05 3.84e+00 1.40e+04 -1.0 2.13e+02 1.4 2.17e-05 5.31e-03f 1\n 265 5.3811408e+05 3.84e+00 1.41e+04 -1.0 2.23e+02 1.8 3.29e-04 8.06e-06f 1\n 266 5.3810662e+05 3.84e+00 1.41e+04 -1.0 2.43e+02 1.4 9.90e-04 7.83e-06f 1\n 267 5.3692599e+05 3.84e+00 1.38e+04 -1.0 2.30e+02 1.8 8.84e-05 1.33e-03f 1\n 268 5.3689601e+05 3.84e+00 1.38e+04 -1.0 1.90e+02 1.3 1.05e-03 3.21e-05f 1\n 269 5.3480708e+05 3.83e+00 1.32e+04 -1.0 2.44e+02 0.8 2.23e-04 1.25e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 270 5.3431885e+05 3.83e+00 1.34e+04 -1.0 1.83e+02 1.3 4.31e-03 5.17e-04f 1\n 271 5.3310063e+05 3.83e+00 1.31e+04 -1.0 1.89e+02 1.7 2.86e-05 1.41e-03f 1\n 272 5.3150897e+05 3.82e+00 1.22e+04 -1.0 1.85e+02 2.1 1.96e-04 1.98e-03f 1\n 273 5.3147543e+05 3.82e+00 1.22e+04 -1.0 1.61e+02 1.6 1.35e-04 3.82e-05f 1\n 274 5.3137612e+05 3.82e+00 1.24e+04 -1.0 1.76e+02 1.2 3.23e-03 1.02e-04f 1\n 275 5.3106158e+05 3.82e+00 1.29e+04 -1.0 1.79e+02 1.6 5.34e-03 3.64e-04f 1\n 276 5.2929724e+05 3.81e+00 1.28e+04 -1.0 1.73e+02 1.1 5.52e-05 1.84e-03f 1\n 277 5.2907597e+05 3.81e+00 1.29e+04 -1.0 1.63e+02 2.4 4.41e-04 3.68e-04f 1\n 278 5.2816992e+05 3.80e+00 1.27e+04 -1.0 1.88e+02 2.0 5.12e-04 1.14e-03f 1\n 279 5.2807415e+05 3.80e+00 1.27e+04 -1.0 1.48e+03 1.5 1.26e-06 8.93e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 280 5.2805145e+05 3.80e+00 1.26e+04 -1.0 1.94e+02 1.0 1.55e-03 2.34e-05f 1\n 281 5.2494578e+05 3.79e+00 1.26e+04 -1.0 1.75e+02 1.4 7.38e-04 3.61e-03f 1\n 282 5.2382656e+05 3.79e+00 1.20e+04 -1.0 6.75e+02 1.0 1.61e-05 3.04e-04f 1\n 283 5.2294012e+05 3.78e+00 1.20e+04 -1.0 1.74e+02 1.4 1.78e-03 9.88e-04f 1\n 284 5.2002561e+05 3.77e+00 1.24e+04 -1.0 1.76e+02 1.8 7.11e-03 3.79e-03f 1\n 285 5.1038171e+05 3.73e+00 1.19e+04 -1.0 1.71e+02 1.3 6.29e-05 1.07e-02f 1\n 286 5.1029468e+05 3.73e+00 1.19e+04 -1.0 1.84e+02 1.8 7.42e-04 1.12e-04f 1\n 287 5.1028585e+05 3.73e+00 1.21e+04 -1.0 2.01e+02 2.2 4.17e-04 1.31e-05f 1\n 288 5.0835844e+05 3.72e+00 1.19e+04 -1.0 2.75e+02 1.7 1.15e-05 2.41e-03f 1\n 289 5.0829350e+05 3.72e+00 1.19e+04 -1.0 1.71e+02 1.2 3.97e-04 6.12e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 290 5.0786133e+05 3.72e+00 1.30e+04 -1.0 1.77e+02 1.7 1.37e-02 5.49e-04f 1\n 291 4.9734337e+05 3.69e+00 1.06e+04 -1.0 1.72e+02 1.2 8.48e-04 8.86e-03f 1\n 292 4.9689003e+05 3.68e+00 1.08e+04 -1.0 1.74e+02 1.6 3.20e-03 5.66e-04f 1\n 293 4.9257017e+05 3.67e+00 9.15e+03 -1.0 1.62e+02 1.1 7.85e-05 3.01e-03f 1\n 294 4.9140020e+05 3.67e+00 9.02e+03 -1.0 2.41e+02 1.6 4.58e-05 1.41e-03f 1\n 295 4.9127824e+05 3.67e+00 1.14e+04 -1.0 1.61e+02 1.1 2.66e-03 6.68e-05f 1\n 296 4.9066209e+05 3.66e+00 1.29e+04 -1.0 1.75e+02 1.5 1.82e-02 7.38e-04f 1\n 297 4.8899995e+05 3.66e+00 1.45e+04 -1.0 5.05e+02 1.0 9.31e-04 4.06e-04f 1\n 298 4.7750162e+05 3.61e+00 1.35e+04 -1.0 1.76e+02 1.5 4.98e-03 1.35e-02f 1\n 299 4.7434464e+05 3.61e+00 1.49e+04 -1.0 9.97e+02 1.0 6.27e-04 4.14e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 300 4.7339897e+05 3.61e+00 1.50e+04 -1.0 1.73e+02 1.4 2.13e-03 1.10e-03f 1\n 301 4.7161223e+05 3.61e+00 1.40e+04 -1.0 3.25e+02 0.9 1.03e-04 5.77e-04f 1\n 302 4.7150987e+05 3.60e+00 1.43e+04 -1.0 2.00e+02 0.4 6.17e-03 9.97e-05f 1\n 303 4.6068296e+05 3.57e+00 1.36e+04 -1.0 3.30e+02 -0.0 4.88e-05 1.06e-02f 1\n 304 4.5827805e+05 3.56e+00 1.49e+04 -1.0 1.68e+02 1.3 1.36e-02 2.70e-03f 1\n 305 4.5598068e+05 3.55e+00 1.41e+04 -1.0 4.79e+02 0.8 4.48e-04 7.06e-04f 1\n 306 4.5518400e+05 3.55e+00 1.43e+04 -1.0 1.59e+02 1.2 2.67e-03 8.88e-04f 1\n 307 4.5309751e+05 3.55e+00 1.72e+04 -1.0 2.98e+02 0.8 2.61e-03 8.91e-04f 1\n 308 4.4826799e+05 3.53e+00 1.86e+04 -1.0 1.66e+02 1.2 1.97e-02 5.39e-03f 1\n 309 4.4337229e+05 3.51e+00 1.70e+04 -1.0 1.62e+02 0.7 4.61e-05 4.04e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 310 4.3957043e+05 3.50e+00 2.22e+04 -1.0 1.64e+02 1.1 5.11e-02 4.22e-03f 1\n 311 4.3840763e+05 3.50e+00 2.35e+04 -1.0 1.68e+02 0.7 4.15e-03 9.47e-04f 1\n 312 4.2663652e+05 3.45e+00 2.43e+04 -1.0 1.62e+02 1.1 2.95e-02 1.34e-02f 1\n 313 4.1771582e+05 3.43e+00 2.40e+04 -1.0 1.75e+02 0.6 6.75e-03 7.00e-03f 1\n 314 4.1583852e+05 3.42e+00 2.27e+04 -1.0 8.29e+02 0.1 1.03e-04 3.62e-04f 1\n 315 4.0823036e+05 3.40e+00 2.35e+04 -1.0 1.82e+02 0.6 9.82e-03 6.83e-03f 1\n 316 4.0742102e+05 3.40e+00 2.31e+04 -1.0 2.88e+02 0.1 7.31e-05 5.39e-04f 1\n 317 4.0736672e+05 3.40e+00 2.36e+04 -1.0 4.84e+02 -0.4 7.32e-03 5.87e-05f 1\n 318 3.9419770e+05 3.35e+00 2.18e+04 -1.0 7.13e+02 -0.9 8.53e-05 1.33e-02f 1\n 319 3.8663282e+05 3.33e+00 1.92e+04 -1.0 8.56e+02 -1.3 2.10e-04 5.79e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 320 3.8579331e+05 3.33e+00 2.13e+04 -1.0 9.22e+02 -1.8 7.40e-03 7.22e-04f 1\n 321 3.7974323e+05 3.32e+00 1.94e+04 -1.0 9.50e+02 -2.3 1.97e-04 4.91e-03f 1\n 322 3.7938905e+05 3.31e+00 2.15e+04 -1.0 9.51e+02 -2.8 8.43e-03 3.27e-04f 1\n 323 3.7037180e+05 3.29e+00 1.92e+04 -1.0 9.55e+02 -3.3 1.13e-04 8.55e-03f 1\n 324 3.6908213e+05 3.28e+00 1.94e+04 -1.0 9.41e+02 -3.7 2.53e-03 1.30e-03f 1\n 325 3.6277620e+05 3.26e+00 1.81e+04 -1.0 2.17e+02 0.3 8.07e-04 7.29e-03f 1\n 326 3.5989666e+05 3.24e+00 1.79e+04 -1.0 1.55e+02 1.6 1.27e-03 5.36e-03f 1\n 327 3.5985661e+05 3.24e+00 1.79e+04 -1.0 1.59e+02 2.1 9.24e-04 1.04e-04f 1\n 328 3.5935266e+05 3.24e+00 1.79e+04 -1.0 1.62e+02 1.6 6.26e-05 8.73e-04f 1\n 329 3.5783375e+05 3.23e+00 1.78e+04 -1.0 1.63e+02 2.0 3.50e-03 3.63e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 330 3.5743587e+05 3.22e+00 1.78e+04 -1.0 1.61e+02 1.5 3.51e-05 6.78e-04f 1\n 331 3.5668635e+05 3.22e+00 1.78e+04 -1.0 1.52e+02 1.1 2.73e-04 1.08e-03f 1\n 332 3.5630331e+05 3.22e+00 1.78e+04 -1.0 1.67e+02 0.6 3.51e-03 4.92e-04f 1\n 333 3.5562828e+05 3.22e+00 1.78e+04 -1.0 2.82e+02 0.1 3.34e-03 7.59e-04f 1\n 334 3.5480287e+05 3.21e+00 1.78e+04 -1.0 4.82e+02 -0.4 4.21e-05 9.86e-04f 1\n 335 3.5438009e+05 3.21e+00 1.78e+04 -1.0 6.55e+02 -0.8 1.29e-04 5.04e-04f 1\n 336 3.5436086e+05 3.21e+00 1.78e+04 -1.0 8.12e+02 -1.3 2.56e-03 2.29e-05f 1\n 337 3.5193976e+05 3.20e+00 1.77e+04 -1.0 8.82e+02 -1.8 1.07e-03 2.89e-03f 1\n 338 3.5190047e+05 3.20e+00 1.77e+04 -1.0 9.10e+02 -2.3 7.21e-03 4.74e-05f 1\n 339 3.5093683e+05 3.20e+00 1.77e+04 -1.0 7.47e+02 -1.0 1.35e-05 1.13e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 340 3.5052498e+05 3.20e+00 1.77e+04 -1.0 2.07e+02 0.4 5.15e-04 5.29e-04f 1\n 341 3.3750946e+05 3.14e+00 1.74e+04 -1.0 3.58e+02 -0.1 1.67e-05 1.64e-02f 1\n 342 3.3747338e+05 3.14e+00 1.74e+04 -1.0 5.75e+02 -0.6 6.58e-03 4.69e-05f 1\n 343 3.3562091e+05 3.14e+00 1.74e+04 -1.0 7.66e+02 -1.1 1.05e-04 2.42e-03f 1\n 344 3.3510890e+05 3.13e+00 1.73e+04 -1.0 8.41e+02 -1.5 1.22e-03 6.80e-04f 1\n 345 3.2871807e+05 3.11e+00 1.72e+04 -1.0 8.84e+02 -2.0 3.02e-03 8.53e-03f 1\n 346 3.2777372e+05 3.10e+00 1.72e+04 -1.0 1.54e+02 1.1 7.57e-05 1.51e-03f 1\n 347 3.2772491e+05 3.10e+00 1.72e+04 -1.0 1.60e+02 1.6 2.40e-03 9.66e-05f 1\n 348 3.2718271e+05 3.10e+00 1.71e+04 -1.0 1.60e+02 2.0 4.86e-03 1.52e-03f 1\n 349 3.2706600e+05 3.10e+00 1.71e+04 -1.0 1.58e+02 1.5 1.31e-03 2.19e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 350 3.2406746e+05 3.08e+00 1.70e+04 -1.0 1.47e+02 1.0 1.75e-04 5.59e-03f 1\n 351 3.2366994e+05 3.08e+00 1.70e+04 -1.0 1.50e+02 1.5 4.30e-03 7.40e-04f 1\n 352 3.2356913e+05 3.08e+00 1.70e+04 -1.0 1.37e+02 1.0 1.11e-04 1.90e-04f 1\n 353 3.2355506e+05 3.08e+00 1.70e+04 -1.0 1.49e+02 1.4 3.22e-03 2.36e-05f 1\n 354 3.2316524e+05 3.07e+00 1.70e+04 -1.0 1.40e+02 0.9 1.38e-03 6.09e-04f 1\n 355r 3.2316524e+05 3.07e+00 1.00e+03 0.5 0.00e+00 0.4 0.00e+00 3.57e-07R 8\n 356r 3.2229084e+05 3.07e+00 1.00e+03 0.5 2.06e+03 - 6.18e-03 7.60e-04f 1\n 357r 3.2109567e+05 3.06e+00 9.98e+02 0.5 1.80e+03 - 1.71e-03 8.09e-04f 1\n 358r 3.1149974e+05 2.97e+00 9.95e+02 0.5 1.67e+03 - 3.74e-03 6.54e-03f 1\n 359r 3.1087190e+05 2.97e+00 9.95e+02 0.5 8.58e+02 - 9.83e-05 4.32e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 360r 3.0989090e+05 2.96e+00 9.97e+02 0.5 9.94e+02 - 6.50e-03 6.44e-04f 1\n 361r 3.0241262e+05 2.87e+00 9.92e+02 0.5 9.07e+02 - 2.36e-03 5.34e-03f 1\n 362r 2.9694919e+05 2.80e+00 9.80e+02 0.5 8.52e+02 - 1.55e-02 4.79e-03f 1\n 363r 2.8929254e+05 2.68e+00 9.72e+02 0.5 6.39e+02 - 1.25e-02 8.39e-03f 1\n 364 2.8919644e+05 2.68e+00 9.52e+03 -1.0 1.36e+02 -0.0 2.03e-04 1.66e-04f 1\n 365 2.8908661e+05 2.68e+00 9.52e+03 -1.0 1.38e+02 -0.5 3.89e-04 1.90e-04f 1\n 366 2.8861557e+05 2.68e+00 9.51e+03 -1.0 1.34e+02 -1.0 2.45e-04 8.16e-04f 1\n 367 2.8784152e+05 2.67e+00 9.50e+03 -1.0 1.29e+02 -1.5 5.67e-04 1.35e-03f 1\n 368 2.8557520e+05 2.66e+00 9.46e+03 -1.0 1.18e+02 -1.9 1.40e-03 3.97e-03f 1\n 369 2.8048140e+05 2.64e+00 9.38e+03 -1.0 1.31e+02 -2.4 2.51e-03 9.01e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 370 2.7807299e+05 2.63e+00 9.34e+03 -1.0 1.50e+02 -2.9 1.64e-03 4.32e-03f 1\n 371 2.7069758e+05 2.59e+00 9.21e+03 -1.0 1.60e+02 -3.4 3.85e-03 1.34e-02f 1\n 372 2.5748481e+05 2.53e+00 8.98e+03 -1.0 1.59e+02 -3.8 4.45e-03 2.48e-02f 1\n 373 2.5409647e+05 2.51e+00 8.92e+03 -1.0 1.53e+02 -4.3 1.21e-02 6.62e-03f 1\n 374 2.4261880e+05 2.45e+00 8.72e+03 -1.0 1.50e+02 -4.8 6.04e-03 2.29e-02f 1\n 375 2.2822588e+05 2.38e+00 8.45e+03 -1.0 1.46e+02 -5.3 1.51e-02 3.02e-02f 1\n 376 2.0965467e+05 2.28e+00 8.10e+03 -1.0 1.41e+02 -5.8 1.82e-02 4.16e-02f 1\n 377 2.0936932e+05 2.28e+00 8.10e+03 -1.0 1.37e+02 -6.2 3.27e-02 6.83e-04f 1\n 378 2.0780500e+05 2.27e+00 8.07e+03 -1.0 1.37e+02 -6.7 4.67e-03 3.76e-03f 1\n 379 1.9606770e+05 2.20e+00 7.84e+03 -1.0 1.38e+02 -7.2 7.84e-04 2.87e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 380 1.7935187e+05 2.11e+00 7.49e+03 -1.0 1.38e+02 -7.7 2.58e-02 4.37e-02f 1\n 381 1.7765925e+05 2.10e+00 7.46e+03 -1.0 1.36e+02 -8.1 4.29e-02 4.74e-03f 1\n 382 1.6495345e+05 2.02e+00 7.18e+03 -1.0 1.34e+02 -8.6 1.77e-02 3.65e-02f 1\n 383 1.6340175e+05 2.01e+00 7.15e+03 -1.0 1.30e+02 -9.1 1.84e-02 4.73e-03f 1\n 384 1.5788016e+05 1.98e+00 7.03e+03 -1.0 1.29e+02 -9.6 3.72e-03 1.71e-02f 1\n 385 1.5064127e+05 1.93e+00 6.87e+03 -1.0 1.28e+02 -10.1 8.10e-03 2.33e-02f 1\n 386 1.2939352e+05 1.79e+00 6.36e+03 -1.0 1.25e+02 -10.5 1.85e-02 7.34e-02f 1\n 387 1.2646803e+05 1.77e+00 6.29e+03 -1.0 1.18e+02 -11.0 4.15e-02 1.14e-02f 1\n 388 1.1471323e+05 1.68e+00 5.99e+03 -1.0 1.16e+02 -11.5 1.69e-02 4.78e-02f 1\n 389 1.0992888e+05 1.65e+00 5.86e+03 -1.0 1.11e+02 -12.0 7.97e-02 2.12e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 390 1.0212816e+05 1.59e+00 5.65e+03 -1.0 1.07e+02 -12.4 1.86e-03 3.63e-02f 1\n 391 8.6177113e+04 1.46e+00 5.19e+03 -1.0 1.04e+02 -12.9 7.26e-02 8.17e-02f 1\n 392 8.5370527e+04 1.45e+00 5.16e+03 -1.0 9.48e+01 -13.4 3.22e-02 4.71e-03f 1\n 393 7.4077318e+04 1.35e+00 4.81e+03 -1.0 9.38e+01 -13.9 1.85e-02 6.88e-02f 1\n 394 7.1177623e+04 1.33e+00 4.71e+03 -1.0 8.78e+01 -14.3 6.45e-02 1.99e-02f 1\n 395 6.4716733e+04 1.26e+00 4.49e+03 -1.0 8.51e+01 -14.8 4.54e-03 4.67e-02f 1\n 396 5.7488482e+04 1.19e+00 4.23e+03 -1.0 8.14e+01 -15.3 9.91e-03 5.78e-02f 1\n 397 4.8754351e+04 1.10e+00 3.90e+03 -1.0 7.88e+01 -15.8 6.42e-02 7.95e-02f 1\n 398 4.7745315e+04 1.08e+00 3.86e+03 -1.0 7.72e+01 -16.3 9.49e-02 1.05e-02f 1\n 399 4.1977413e+04 1.02e+00 3.61e+03 -1.0 7.89e+01 -16.7 6.18e-02 6.27e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 400 3.3677208e+04 9.10e-01 3.24e+03 -1.0 7.75e+01 -17.2 7.45e-03 1.05e-01f 1\n 401 3.0166245e+04 8.61e-01 3.06e+03 -1.0 7.30e+01 -17.7 2.38e-02 5.39e-02f 1\n 402 2.5223779e+04 7.87e-01 2.80e+03 -1.0 7.10e+01 -18.2 1.21e-01 8.61e-02f 1\n 403 2.2341250e+04 7.40e-01 2.63e+03 -1.0 6.89e+01 -18.6 1.27e-01 5.92e-02f 1\n 404 1.8863927e+04 6.80e-01 2.42e+03 -1.0 6.77e+01 -19.1 8.08e-02 8.17e-02f 1\n 405 1.5848263e+04 6.23e-01 2.21e+03 -1.0 6.47e+01 -19.6 5.68e-02 8.40e-02f 1\n 406 1.4057233e+04 5.86e-01 2.08e+03 -1.0 6.57e+01 -19.1 8.62e-02 5.84e-02f 1\n 407 1.1013361e+04 5.19e-01 1.84e+03 -1.0 5.68e+01 -19.6 1.68e-01 1.16e-01f 1\n 408 7.9952027e+03 4.41e-01 1.57e+03 -1.0 5.34e+01 -19.1 1.48e-01 1.49e-01f 1\n 409 5.4065322e+03 3.62e-01 1.29e+03 -1.0 4.45e+01 -19.6 1.93e-01 1.80e-01f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 410 2.4913122e+03 2.44e-01 8.68e+02 -1.0 4.14e+01 -19.1 1.52e-01 3.26e-01f 1\n 411 1.0310441e+03 1.55e-01 5.51e+02 -1.0 2.82e+01 -19.6 2.47e-01 3.66e-01f 1\n 412 2.7434337e+02 7.52e-02 4.80e+02 -1.0 2.93e+01 -19.1 1.96e-01 5.14e-01f 1\n 413 5.4776714e+01 2.33e-02 7.33e+02 -1.0 7.94e+00 -19.6 4.50e-01 6.90e-01f 1\n 414 2.8195260e+01 3.14e-04 1.32e+03 -1.0 1.06e+01 -19.1 5.84e-01 1.00e+00f 1\n 415 3.2622344e+01 3.87e-05 1.06e+03 -1.0 1.60e+01 -19.6 7.02e-01 9.94e-01f 1\n 416 3.0040029e+01 4.65e-05 2.31e+02 -1.0 4.17e+01 -19.1 9.04e-01 1.00e+00f 1\n 417 3.0266079e+01 4.64e-04 4.41e+02 -1.0 1.21e+02 -19.6 1.00e+00 9.30e-01h 1\n 418 2.9559008e+01 3.42e-05 5.33e+01 -1.0 1.42e+01 -19.1 6.03e-01 1.00e+00h 1\n 419 2.9904880e+01 2.39e-06 6.35e-03 -1.0 6.68e+00 -19.6 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 420 2.4902915e+01 2.75e-05 1.10e+00 -2.5 1.14e+01 -19.1 4.91e-01 4.44e-01f 1\n 421 2.2342576e+01 8.15e-05 1.58e+00 -2.5 6.79e+01 -19.6 1.30e-01 3.02e-01f 1\n 422 2.0275308e+01 8.22e-05 1.07e+00 -2.5 6.41e+00 -3.7 3.38e-01 3.24e-01h 1\n 423 1.9264876e+01 9.78e-05 8.99e-01 -2.5 2.12e+01 -4.2 3.83e-01 1.56e-01h 1\n 424 1.8289856e+01 7.85e-05 6.81e-01 -2.5 6.19e+00 -4.6 2.31e-01 2.42e-01h 1\n 425 1.6132778e+01 1.96e-04 8.21e-01 -2.5 1.24e+01 -5.1 4.78e-01 7.58e-01h 1\n 426 1.5611473e+01 1.11e-04 5.01e-01 -2.5 1.64e+01 -5.6 4.71e-01 5.11e-01h 1\n 427 1.5163933e+01 4.19e-05 3.81e-01 -2.5 2.85e+00 -3.4 1.00e+00 6.97e-01h 1\n 428 1.5050001e+01 3.15e-05 4.60e-01 -2.5 3.60e+00 -3.8 6.17e-01 2.96e-01h 1\n 429 1.4900715e+01 5.30e-06 1.02e+00 -2.5 1.96e+00 -4.3 6.36e-01 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 430 1.4825885e+01 2.55e-05 4.15e-01 -2.5 5.52e+00 -3.9 8.38e-01 5.50e-01h 1\n 431 1.4739809e+01 3.24e-05 3.56e-01 -2.5 1.98e+01 -4.4 1.99e-01 1.55e-01h 1\n 432 1.4596773e+01 2.88e-05 5.34e-01 -2.5 5.19e+00 -3.9 5.29e-01 7.94e-01h 1\n 433 1.4484823e+01 3.46e-05 3.39e-01 -2.5 3.05e+01 -4.4 2.32e-01 9.62e-02h 1\n 434 1.4369681e+01 4.72e-05 4.22e-01 -2.5 1.62e+01 -4.9 3.06e-01 5.62e-01h 1\n 435 1.4279373e+01 4.71e-05 3.94e+02 -2.5 3.10e+01 -5.4 4.70e-01 2.25e-01h 1\n 436 1.4208263e+01 2.19e-05 2.73e+02 -2.5 2.87e+01 -5.8 3.06e-01 1.00e+00h 1\n 437 1.4134986e+01 1.91e-05 1.24e+02 -2.5 1.93e+01 -6.3 5.46e-01 6.57e-01h 1\n 438 1.4091516e+01 4.37e-05 3.46e+01 -2.5 2.35e+01 -6.8 7.19e-01 1.00e+00h 1\n 439 1.4087170e+01 4.09e-06 2.76e-04 -2.5 7.32e+00 -7.3 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 440 1.4088533e+01 1.91e-09 4.39e-07 -2.5 4.32e-01 -7.8 1.00e+00 1.00e+00h 1\n 441 1.3804149e+01 3.76e-06 7.14e+00 -3.8 4.84e+00 -8.2 6.51e-01 8.20e-01f 1\n 442 1.3706304e+01 1.35e-06 1.69e-03 -3.8 1.67e+00 -8.7 1.00e+00 1.00e+00h 1\n 443 1.3684885e+01 4.66e-08 3.95e-05 -3.8 4.07e-02 -9.2 1.00e+00 1.00e+00h 1\n 444 1.3664347e+01 8.58e-08 6.51e-02 -5.7 3.79e-01 -9.7 9.45e-01 9.46e-01h 1\n 445 1.3660196e+01 4.91e-08 1.08e+01 -5.7 4.56e-02 -10.1 1.00e+00 9.19e-01h 1\n 446 1.3659000e+01 4.92e-08 1.36e-06 -5.7 9.38e-03 -10.6 1.00e+00 1.00e+00f 1\n 447 1.3658600e+01 4.94e-08 9.08e+01 -8.6 3.53e-03 -11.1 9.94e-01 8.63e-01h 1\n 448 1.3658471e+01 5.57e-08 6.82e+00 -8.6 1.22e-03 -11.6 1.00e+00 9.90e-01f 1\n 449 1.3658445e+01 4.72e-08 2.13e-09 -8.6 4.22e-04 -12.0 1.00e+00 1.00e+00f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 450 1.3658439e+01 4.92e-08 3.85e-12 -8.6 1.81e-05 -12.5 1.00e+00 1.00e+00h 1\n 451 1.3658437e+01 4.92e-08 1.82e-12 -8.6 1.52e-06 -13.0 1.00e+00 1.00e+00h 1\n 452 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.26e-07 -13.5 1.00e+00 1.00e+00h 1\n 453 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.49e-07 -14.0 1.00e+00 1.00e+00h 1\n 454 1.3658437e+01 4.92e-08 3.64e-12 -8.6 9.58e-09 -14.4 1.00e+00 1.00e+00h 1\n 455 1.3658437e+01 4.92e-08 9.09e-13 -8.6 3.90e-11 -14.9 1.00e+00 1.00e+00h 1\n 456 1.3658437e+01 4.92e-08 9.09e-13 -8.6 1.39e-12 -15.4 1.00e+00 1.00e+00h 1\n 457 1.3658437e+01 4.92e-08 9.09e-13 -8.6 9.57e-13 -15.9 1.00e+00 1.00e+00h 1\n 458 1.3658437e+01 4.92e-08 9.09e-13 -8.6 7.94e-13 -16.3 1.00e+00 1.00e+00h 1\n 459 1.3658437e+01 4.92e-08 9.09e-13 -8.6 6.30e-13 -16.8 1.00e+00 1.00e+00h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 460 1.3658437e+01 4.92e-08 4.55e-13 -8.6 2.07e-12 -17.3 1.00e+00 1.00e+00h 1\n 461 1.3658437e+01 4.92e-08 9.09e-13 -8.6 2.88e-12 -17.8 1.00e+00 1.00e+00h 1\n 462 1.3658437e+01 4.92e-08 9.09e-13 -8.6 5.03e-12 -18.2 1.00e+00 1.00e+00h 1\n 463 1.3658437e+01 4.92e-08 4.55e-13 -8.6 3.27e-12 -18.7 1.00e+00 1.00e+00h 1\n\nNumber of Iterations....: 463\n\n (scaled) (unscaled)\nObjective...............: 1.3658436881286921e+01 1.3658436881286921e+01\nDual infeasibility......: 4.5474735088646412e-13 4.5474735088646412e-13\nConstraint violation....: 4.9168770388519079e-08 4.9168770388519079e-08\nComplementarity.........: 2.5059067889663204e-09 2.5059067889663204e-09\nOverall NLP error.......: 4.9168770388519079e-08 4.9168770388519079e-08\n\n\nNumber of objective function evaluations = 483\nNumber of objective gradient evaluations = 395\nNumber of equality constraint evaluations = 483\nNumber of inequality constraint evaluations = 483\nNumber of equality constraint Jacobian evaluations = 467\nNumber of inequality constraint Jacobian evaluations = 467\nNumber of Lagrangian Hessian evaluations = 463\nTotal CPU secs in IPOPT (w/o function evaluations) = 4.499\nTotal CPU secs in NLP function evaluations = 9.988\n\nEXIT: Solved To Acceptable Level.\n solver : t_proc (avg) t_wall (avg) n_eval\n nlp_f | 28.65ms ( 59.32us) 28.66ms ( 59.35us) 483\n nlp_g | 513.61ms ( 1.06ms) 513.34ms ( 1.06ms) 483\n nlp_grad | 3.08ms ( 3.08ms) 3.08ms ( 3.08ms) 1\n nlp_grad_f | 34.91ms ( 88.17us) 34.96ms ( 88.29us) 396\n nlp_hess_l | 6.27 s ( 13.63ms) 6.25 s ( 13.58ms) 460\n nlp_jac_g | 3.15 s ( 6.74ms) 3.14 s ( 6.72ms) 468\n total | 14.60 s ( 14.60 s) 14.51 s ( 14.51 s) 1\nThis is Ipopt version 3.12.3, running with linear solver mumps.\nNOTE: Other linear solvers might be more efficient (see Ipopt documentation).\n\nNumber of nonzeros in equality constraint Jacobian...: 4230\nNumber of nonzeros in inequality constraint Jacobian.: 3466\nNumber of nonzeros in Lagrangian Hessian.............: 3648\n\nTotal number of variables............................: 2293\n variables with only lower bounds: 0\n variables with lower and upper bounds: 0\n variables with only upper bounds: 0\nTotal number of equality constraints.................: 1377\nTotal number of inequality constraints...............: 967\n inequality constraints with only lower bounds: 204\n inequality constraints with lower and upper bounds: 355\n inequality constraints with only upper bounds: 408\n\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 0 0.0000000e+00 9.83e+00 7.50e-01 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0\n 1 1.0399716e-02 9.83e+00 1.26e+00 -1.0 1.20e+02 -4.0 4.93e-04 3.26e-04h 1\n 2 2.0045559e-02 9.83e+00 2.20e+01 -1.0 1.82e+02 -4.5 3.56e-04 3.85e-05h 1\n 3 4.4710489e-01 9.83e+00 2.52e+01 -1.0 2.46e+02 -5.0 3.88e-04 3.37e-04h 1\n 4 2.5591113e+00 9.83e+00 6.47e+01 -1.0 3.34e+02 -5.4 1.11e-03 4.95e-04h 1\n 5 1.0304744e+01 9.83e+00 1.06e+02 -1.0 2.54e+02 -5.9 1.15e-03 5.03e-04h 1\n 6 7.9216962e+01 9.82e+00 1.08e+02 -1.0 1.86e+02 -6.4 1.38e-03 1.35e-03h 1\n 7 1.6205310e+02 9.82e+00 1.46e+02 -1.0 1.46e+02 -6.9 1.48e-03 8.87e-04h 1\n 8 2.2025207e+02 9.83e+00 1.56e+02 -1.0 3.22e+02 -7.3 6.46e-04 4.87e-04h 1\n 9 2.2441787e+02 9.83e+00 1.54e+02 -1.0 4.45e+03 -7.8 8.53e-06 3.28e-05h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 10 2.5182294e+02 9.82e+00 1.42e+02 -1.0 9.50e+02 -8.3 8.65e-06 2.07e-04h 1\n 11 2.8777432e+02 9.81e+00 1.66e+02 -1.0 4.28e+02 -8.8 6.37e-04 2.55e-04h 1\n 12 4.0157811e+02 9.80e+00 1.67e+02 -1.0 1.45e+02 -9.2 7.37e-04 7.08e-04h 1\n 13 4.1550901e+02 9.80e+00 1.78e+02 -1.0 1.56e+02 -9.7 2.52e-04 7.97e-05h 1\n 14 6.1114602e+02 9.80e+00 1.21e+02 -1.0 1.50e+02 -10.2 8.33e-05 1.00e-03h 1\n 15 6.1355211e+02 9.80e+00 1.82e+02 -1.0 1.68e+02 -10.7 9.96e-04 1.13e-05h 1\n 16 6.2428940e+02 9.80e+00 2.68e+02 -1.0 3.50e+02 -11.2 1.43e-03 5.02e-05h 1\n 17r 6.2428940e+02 9.80e+00 9.99e+02 1.0 0.00e+00 -11.6 0.00e+00 4.03e-07R 5\n 18r 8.6414419e+02 9.80e+00 9.99e+02 1.0 6.29e+03 - 3.71e-04 1.98e-05f 1\n 19r 3.3647143e+03 9.80e+00 9.99e+02 1.0 1.65e+03 - 3.45e-04 1.15e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 20r 5.1092797e+04 9.80e+00 9.98e+02 1.0 7.67e+02 - 3.88e-04 6.81e-04f 1\n 21r 6.3342770e+04 9.79e+00 1.07e+03 1.0 7.22e+00 2.0 1.50e-01 1.14e-02f 1\n 22r 1.7594664e+05 9.73e+00 1.03e+03 1.0 3.13e+00 2.4 2.75e-01 2.46e-01f 1\n 23r 1.6728527e+05 9.68e+00 1.07e+03 1.0 2.44e+00 2.9 3.31e-02 1.26e-01f 1\n 24r 1.7195513e+05 9.51e+00 1.09e+03 1.0 5.75e-01 3.3 8.31e-01 1.00e+00f 1\n 25r 1.7774546e+05 9.35e+00 1.07e+03 1.0 1.83e+00 2.8 3.09e-01 3.74e-01f 1\n 26r 1.7826098e+05 8.80e+00 1.06e+03 1.0 5.11e+00 2.3 3.60e-01 5.80e-01f 1\n 27 1.7790304e+05 8.75e+00 2.67e+03 -1.0 9.73e+01 -12.1 1.05e-03 4.89e-03f 1\n 28 1.7790581e+05 8.75e+00 2.67e+03 -1.0 9.99e+01 -12.6 4.15e-03 8.71e-05h 1\n 29 1.7805578e+05 8.74e+00 2.67e+03 -1.0 1.49e+02 -13.1 2.98e-03 1.65e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 30 1.7826045e+05 8.73e+00 2.66e+03 -1.0 1.00e+02 -13.5 5.02e-03 1.36e-03h 1\n 31 1.7833630e+05 8.72e+00 2.66e+03 -1.0 9.67e+01 -14.0 4.35e-03 3.57e-04h 1\n 32 1.7838366e+05 8.72e+00 2.66e+03 -1.0 9.66e+01 -14.5 5.53e-03 1.85e-04h 1\n 33 1.7897357e+05 8.70e+00 2.66e+03 -1.0 9.66e+01 -15.0 6.03e-03 1.99e-03h 1\n 34 1.7979138e+05 8.68e+00 2.65e+03 -1.0 9.65e+01 -15.5 5.02e-03 2.33e-03h 1\n 35 1.8083548e+05 8.66e+00 2.64e+03 -1.0 9.65e+01 -15.9 5.14e-03 2.59e-03h 1\n 36 1.8126368e+05 8.65e+00 2.64e+03 -1.0 9.68e+01 -16.4 1.38e-03 9.55e-04h 1\n 37 1.8126992e+05 8.65e+00 2.64e+03 -1.0 9.72e+01 -16.9 2.04e-05 1.32e-05h 1\n 38 1.8127749e+05 8.65e+00 2.64e+03 -1.0 9.75e+01 -17.4 5.22e-03 1.49e-05h 1\n 39 1.8272429e+05 8.63e+00 2.63e+03 -1.0 9.81e+01 -17.8 3.94e-04 2.72e-03h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 40 1.8373146e+05 8.61e+00 2.63e+03 -1.0 9.79e+01 -18.3 2.78e-05 1.86e-03h 1\n 41 1.8460711e+05 8.60e+00 2.63e+03 -1.0 9.79e+01 -18.8 6.10e-03 1.58e-03h 1\n 42 1.8506845e+05 8.59e+00 3.03e+03 -1.0 9.77e+01 -19.3 3.60e-03 8.05e-04h 1\n 43 1.8630538e+05 8.57e+00 2.81e+03 -1.0 9.79e+01 -19.7 1.23e-03 2.15e-03h 1\n 44 1.8693912e+05 8.56e+00 2.69e+03 -1.0 9.91e+01 -19.1 6.31e-04 1.18e-03h 1\n 45 1.8714858e+05 8.56e+00 2.61e+03 -1.0 9.72e+01 -19.6 1.86e-05 3.60e-04h 1\n 46 1.8833659e+05 8.54e+00 3.03e+03 -1.0 9.83e+01 -19.1 4.39e-03 2.16e-03h 1\n 47 1.8900050e+05 8.53e+00 3.12e+03 -1.0 9.71e+01 -19.6 1.51e-03 1.12e-03h 1\n 48 1.8921851e+05 8.53e+00 3.04e+03 -1.0 9.85e+01 -19.1 2.64e-05 3.81e-04h 1\n 49 1.8974673e+05 8.52e+00 2.87e+03 -1.0 9.66e+01 -19.6 1.53e-04 8.26e-04h 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 50 1.8975438e+05 8.52e+00 3.74e+03 -1.0 9.81e+01 -19.1 3.80e-03 1.32e-05h 1\n 51 1.9002449e+05 8.52e+00 4.95e+03 -1.0 9.84e+01 -19.6 3.14e-03 3.97e-04h 1\n 52 1.9113127e+05 8.50e+00 5.17e+03 -1.0 9.86e+01 -19.1 2.66e-03 1.79e-03h 1\n 53 1.9218965e+05 8.49e+00 4.74e+03 -1.0 9.66e+01 -19.6 1.49e-05 1.46e-03h 1\n 54 1.9235900e+05 8.49e+00 4.77e+03 -1.0 9.69e+01 -19.1 3.43e-04 2.55e-04h 1\n 55 1.9614877e+05 8.45e+00 3.24e+03 -1.0 9.55e+01 -19.6 4.82e-05 4.46e-03h 1\n 56 1.9767353e+05 8.43e+00 3.55e+03 -1.0 9.43e+01 -19.1 3.11e-03 2.00e-03h 1\n 57 2.0045953e+05 8.41e+00 3.54e+03 -1.0 9.38e+01 -19.6 3.09e-03 3.11e-03h 1\n 58 2.0058633e+05 8.41e+00 4.32e+03 -1.0 9.34e+01 -19.1 2.74e-03 1.60e-04h 1\n 59r 2.0058633e+05 8.41e+00 1.00e+03 0.9 0.00e+00 -19.6 0.00e+00 4.42e-07R 5\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 60r 2.0053275e+05 8.41e+00 1.00e+03 0.9 3.17e+03 - 3.55e-04 6.45e-05f 1\n 61r 2.0041460e+05 8.41e+00 1.00e+03 0.9 1.58e+03 - 3.29e-04 1.19e-04f 1\n 62r 2.0024799e+05 8.41e+00 9.99e+02 0.9 1.43e+03 - 7.29e-04 1.04e-03f 1\n 63r 2.0178166e+05 8.42e+00 9.97e+02 0.9 9.93e+02 - 2.24e-03 1.49e-03f 1\n 64r 2.0902033e+05 8.42e+00 9.92e+02 0.9 5.71e+02 - 1.11e-02 3.85e-03f 1\n 65r 2.3136420e+05 8.41e+00 9.83e+02 0.9 3.09e+02 - 8.14e-03 9.32e-03f 1\n 66r 2.6153891e+05 8.40e+00 9.88e+02 0.9 3.62e+02 - 7.16e-03 1.05e-02f 1\n 67r 2.7798903e+05 8.40e+00 9.64e+02 0.9 3.55e+02 - 1.32e-02 9.11e-03f 1\n 68r 2.8657577e+05 8.40e+00 9.59e+02 0.9 2.47e+02 - 2.08e-02 4.88e-03f 1\n 69r 2.9527415e+05 8.40e+00 9.55e+02 0.9 1.55e+02 - 2.61e-03 4.50e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 70r 3.0527057e+05 8.40e+00 9.50e+02 0.9 5.20e+02 - 6.42e-03 5.12e-03f 1\n 71r 3.3582481e+05 8.40e+00 9.58e+02 0.9 1.01e+02 - 3.07e-02 1.51e-02f 1\n 72r 4.1107309e+05 8.41e+00 9.04e+02 0.9 1.14e+02 - 2.39e-02 3.35e-02f 1\n 73r 4.5964398e+05 8.41e+00 8.95e+02 0.9 1.15e+02 - 3.62e-02 2.11e-02f 1\n 74r 5.2016472e+05 8.41e+00 8.67e+02 0.9 1.36e+02 - 4.89e-02 3.03e-02f 1\n 75r 5.9030592e+05 8.41e+00 8.31e+02 0.9 1.48e+02 - 6.01e-02 3.79e-02f 1\n 76r 6.7476964e+05 8.41e+00 7.88e+02 0.9 1.61e+02 - 7.73e-02 4.59e-02f 1\n 77r 7.7561624e+05 8.41e+00 7.44e+02 0.9 1.82e+02 - 1.18e-01 5.62e-02f 1\n 78r 8.4945381e+05 8.41e+00 7.06e+02 0.9 4.62e+02 - 4.89e-03 5.46e-02f 1\n 79r 9.0140487e+05 8.41e+00 6.78e+02 0.9 2.50e+02 - 1.14e-01 3.60e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 80r 9.7546515e+05 8.41e+00 6.36e+02 0.9 2.54e+02 - 1.43e-01 6.16e-02f 1\n 81r 1.1190008e+06 8.38e+00 5.68e+02 0.9 3.76e+02 - 3.36e-02 1.08e-01f 1\n 82r 1.1225339e+06 8.37e+00 5.49e+02 0.9 8.82e+02 - 8.26e-03 3.25e-02f 1\n 83r 1.1286850e+06 8.36e+00 5.46e+02 0.9 4.12e+02 - 3.25e-02 6.75e-03f 1\n 84r 1.2659338e+06 8.29e+00 3.86e+02 0.9 7.87e+00 0.0 3.42e-01 2.93e-01f 1\n 85r 1.2612169e+06 8.28e+00 3.74e+02 0.2 2.38e+02 - 1.60e-02 3.04e-02f 1\n 86r 1.3908950e+06 8.17e+00 3.95e+02 0.2 3.02e+02 - 2.38e-02 7.50e-02f 1\n 87r 1.4392216e+06 8.15e+00 2.99e+02 0.2 7.97e+00 0.4 1.84e-01 1.68e-01f 1\n 88r 1.4749437e+06 8.14e+00 2.77e+02 0.2 5.34e+01 -0.1 4.57e-02 3.68e-02f 1\n 89r 1.4776820e+06 8.12e+00 2.77e+02 0.2 8.08e+02 - 3.91e-03 5.72e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 90r 1.4904632e+06 8.09e+00 3.10e+02 0.2 2.23e+02 - 1.15e-01 3.01e-02f 1\n 91r 1.4780369e+06 8.03e+00 2.85e+02 0.2 2.60e+02 - 1.24e-02 4.77e-02f 1\n 92r 1.4633854e+06 7.98e+00 2.84e+02 0.2 2.35e+02 - 1.42e-01 5.45e-02f 1\n 93r 1.4468075e+06 7.94e+00 2.89e+02 0.2 3.39e+02 - 2.25e-02 4.04e-02f 1\n 94r 1.4177297e+06 7.84e+00 3.28e+02 0.2 3.67e+02 - 4.08e-02 1.06e-01f 1\n 95r 1.4208335e+06 7.83e+00 3.24e+02 0.2 3.41e+02 - 4.16e-02 2.22e-02f 1\n 96r 1.4035701e+06 7.79e+00 2.82e+02 0.2 3.36e+02 - 6.49e-02 9.28e-02f 1\n 97r 1.3981555e+06 7.76e+00 2.08e+02 0.2 3.03e+01 -0.5 1.71e-01 7.29e-02f 1\n 98r 1.3736085e+06 7.75e+00 2.02e+02 0.2 4.74e+02 - 2.70e-02 1.88e-02f 1\n 99r 1.3738774e+06 7.70e+00 1.87e+02 0.2 4.77e+02 - 7.99e-02 9.61e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 100r 1.3580286e+06 7.69e+00 2.14e+02 0.2 4.67e+02 - 1.03e-01 3.83e-02f 1\n 101r 1.3021262e+06 7.68e+00 2.15e+02 0.2 5.22e+02 - 1.39e-01 9.61e-02f 1\n 102r 1.2905134e+06 7.68e+00 2.12e+02 0.2 5.78e+02 - 5.56e-02 4.11e-02f 1\n 103r 1.2958912e+06 7.68e+00 1.61e+02 0.2 6.56e+02 - 2.71e-02 1.08e-01f 1\n 104r 1.3293811e+06 7.69e+00 1.47e+02 0.2 7.20e+02 - 8.99e-02 8.45e-02f 1\n 105r 1.4013282e+06 7.70e+00 1.56e+02 0.2 9.31e+02 - 7.51e-02 1.05e-01f 1\n 106r 1.5010415e+06 7.67e+00 2.27e+02 0.2 3.80e+01 -1.0 6.34e-02 1.79e-01f 1\n 107r 1.5310867e+06 7.66e+00 2.10e+02 0.2 5.35e+01 -1.5 5.32e-02 3.54e-02f 1\n 108r 1.6858235e+06 7.64e+00 2.07e+02 0.2 2.27e+02 -2.0 1.41e-02 1.65e-02f 1\n 109r 1.6992342e+06 7.64e+00 2.00e+02 0.2 4.36e+01 -0.6 3.64e-02 3.57e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 110r 1.6720537e+06 7.63e+00 1.42e+02 0.2 2.59e+00 0.7 3.58e-01 1.88e-01f 1\n 111r 1.6393971e+06 7.62e+00 1.61e+02 0.2 5.96e+00 0.2 6.27e-02 2.98e-01f 1\n 112r 1.5942856e+06 7.62e+00 9.69e+01 0.2 3.94e+00 0.6 5.59e-01 6.60e-01f 1\n 113r 1.5454439e+06 7.61e+00 8.14e+01 -0.5 9.57e+00 0.2 2.87e-01 3.13e-01f 1\n 114r 1.5348572e+06 7.52e+00 4.26e+01 -0.5 2.28e+01 -0.3 2.19e-01 1.39e-01f 1\n 115 1.5101190e+06 7.46e+00 1.69e+04 -1.0 7.31e+02 -19.1 1.74e-03 8.11e-03f 1\n 116 1.5081170e+06 7.45e+00 1.69e+04 -1.0 6.47e+02 -19.6 1.66e-03 6.70e-04f 1\n 117 1.5000973e+06 7.43e+00 1.68e+04 -1.0 6.33e+02 -19.1 1.83e-03 2.70e-03f 1\n 118 1.4972338e+06 7.43e+00 1.68e+04 -1.0 6.14e+02 -19.6 4.62e-03 9.87e-04f 1\n 119 1.4926222e+06 7.41e+00 1.68e+04 -1.0 5.92e+02 -19.1 4.76e-03 1.69e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 120 1.4897946e+06 7.40e+00 1.68e+04 -1.0 5.73e+02 -19.6 1.24e-03 1.25e-03f 1\n 121 1.4867964e+06 7.39e+00 1.67e+04 -1.0 5.67e+02 -19.1 1.24e-02 1.40e-03f 1\n 122 1.4811114e+06 7.37e+00 1.67e+04 -1.0 5.31e+02 -19.6 2.94e-03 2.99e-03f 1\n 123 1.4776862e+06 7.36e+00 1.67e+04 -1.0 5.21e+02 -19.1 1.64e-02 1.85e-03f 1\n 124 1.4722508e+06 7.34e+00 1.66e+04 -1.0 4.87e+02 -19.6 5.29e-03 3.07e-03f 1\n 125 1.4686209e+06 7.32e+00 1.66e+04 -1.0 4.76e+02 -19.1 8.82e-03 2.07e-03f 1\n 126 1.4549412e+06 7.26e+00 1.64e+04 -1.0 4.61e+02 -19.6 8.60e-03 7.87e-03f 1\n 127 1.4439147e+06 7.22e+00 1.63e+04 -1.0 4.42e+02 -19.1 3.68e-03 6.45e-03f 1\n 128 1.4318869e+06 7.17e+00 1.62e+04 -1.0 4.33e+02 -19.6 2.58e-04 7.02e-03f 1\n 129 1.4287865e+06 7.15e+00 1.62e+04 -1.0 4.28e+02 -19.1 9.28e-03 1.82e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 130 1.4117953e+06 7.08e+00 1.60e+04 -1.0 4.17e+02 -19.6 1.21e-02 9.95e-03f 1\n 131 1.4094629e+06 7.07e+00 1.60e+04 -1.0 3.99e+02 -19.1 1.22e-02 1.30e-03f 1\n 132 1.3898684e+06 6.99e+00 1.58e+04 -1.0 3.86e+02 -19.6 6.69e-03 1.09e-02f 1\n 133 1.3666966e+06 6.91e+00 1.56e+04 -1.0 3.73e+02 -19.1 1.23e-04 1.25e-02f 1\n 134 1.3653874e+06 6.90e+00 1.56e+04 -1.0 3.64e+02 -19.6 8.61e-03 7.06e-04f 1\n 135 1.3600774e+06 6.88e+00 1.56e+04 -1.0 3.58e+02 -19.1 4.77e-04 2.88e-03f 1\n 136 1.3599863e+06 6.88e+00 1.56e+04 -1.0 3.54e+02 -19.6 1.16e-04 4.90e-05f 1\n 137 1.3520824e+06 6.85e+00 1.55e+04 -1.0 3.56e+02 -19.1 2.25e-02 4.63e-03f 1\n 138 1.3198881e+06 6.72e+00 1.52e+04 -1.0 3.32e+02 -19.6 4.47e-03 1.93e-02f 1\n 139 1.3018334e+06 6.65e+00 1.51e+04 -1.0 3.23e+02 -19.1 3.31e-02 1.03e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 140 1.3001178e+06 6.64e+00 1.50e+04 -1.0 3.09e+02 -19.6 2.72e-03 1.11e-03f 1\n 141 1.2712885e+06 6.53e+00 1.48e+04 -1.0 3.10e+02 -19.1 1.01e-04 1.69e-02f 1\n 142 1.2672814e+06 6.51e+00 1.48e+04 -1.0 3.20e+02 -19.6 4.18e-03 2.32e-03f 1\n 143 1.2662613e+06 6.51e+00 1.47e+04 -1.0 2.90e+02 -19.1 1.94e-02 6.22e-04f 1\n 144 1.2251694e+06 6.34e+00 1.43e+04 -1.0 2.82e+02 -19.6 2.71e-03 2.66e-02f 1\n 145 1.2238974e+06 6.33e+00 1.43e+04 -1.0 2.99e+02 -19.1 1.32e-03 7.91e-04f 1\n 146 1.2218795e+06 6.32e+00 1.43e+04 -1.0 3.08e+02 -19.6 1.65e-03 1.32e-03f 1\n 147 1.2218327e+06 6.32e+00 1.43e+04 -1.0 3.05e+02 -19.1 3.84e-03 2.74e-05f 1\n 148 1.1933966e+06 6.22e+00 1.41e+04 -1.0 3.06e+02 -19.6 7.34e-04 1.67e-02f 1\n 149 1.1923484e+06 6.21e+00 1.41e+04 -1.0 3.08e+02 -19.1 8.24e-03 5.84e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 150 1.1851137e+06 6.19e+00 1.40e+04 -1.0 2.76e+02 -19.6 7.09e-04 4.39e-03f 1\n 151 1.1674398e+06 6.13e+00 1.39e+04 -1.0 2.77e+02 -19.1 4.82e-03 9.94e-03f 1\n 152 1.1671054e+06 6.12e+00 1.39e+04 -1.0 2.72e+02 -19.6 4.03e-03 2.01e-04f 1\n 153 1.1607724e+06 6.10e+00 1.38e+04 -1.0 2.70e+02 -19.1 6.82e-04 3.72e-03f 1\n 154 1.1521951e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.6 1.57e-03 5.21e-03f 1\n 155 1.1512500e+06 6.07e+00 1.37e+04 -1.0 2.70e+02 -19.1 7.99e-04 5.27e-04f 1\n 156 1.1507231e+06 6.06e+00 1.37e+04 -1.0 2.68e+02 -19.6 4.24e-04 3.27e-04f 1\n 157 1.1507040e+06 6.06e+00 1.37e+04 -1.0 2.81e+02 -19.1 5.41e-04 9.69e-06f 1\n 158 1.1467730e+06 6.05e+00 1.37e+04 -1.0 2.63e+02 -19.6 8.26e-06 2.11e-03f 1\n 159 1.0732380e+06 5.82e+00 1.32e+04 -1.0 2.63e+02 -19.1 2.67e-04 3.91e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 160 1.0714161e+06 5.81e+00 1.32e+04 -1.0 2.56e+02 -19.6 1.02e-02 9.30e-04f 1\n 161 1.0713627e+06 5.81e+00 1.32e+04 -1.0 2.54e+02 -19.1 8.09e-05 2.84e-05f 1\n 162 1.0580046e+06 5.76e+00 1.31e+04 -1.0 2.54e+02 -19.6 1.35e-04 7.69e-03f 1\n 163 1.0545534e+06 5.75e+00 1.30e+04 -1.0 2.54e+02 -19.1 3.39e-03 1.89e-03f 1\n 164 1.0341517e+06 5.68e+00 1.30e+04 -1.0 2.52e+02 -19.6 8.74e-04 1.22e-02f 1\n 165 1.0336429e+06 5.68e+00 1.29e+04 -1.0 2.51e+02 -19.1 6.22e-04 2.90e-04f 1\n 166 1.0275521e+06 5.66e+00 1.38e+04 -1.0 2.51e+02 -19.6 4.36e-04 3.68e-03f 1\n 167 1.0179732e+06 5.63e+00 1.32e+04 -1.0 2.50e+02 -19.1 7.95e-03 5.53e-03f 1\n 168 9.9624521e+05 5.55e+00 1.59e+04 -1.0 2.48e+02 -19.6 4.63e-03 1.40e-02f 1\n 169 9.9527357e+05 5.55e+00 1.58e+04 -1.0 2.44e+02 -19.1 5.96e-04 5.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 170 9.9489625e+05 5.55e+00 1.56e+04 -1.0 2.47e+02 -19.6 1.04e-03 2.42e-04f 1\n 171 9.9489118e+05 5.55e+00 1.50e+04 -1.0 2.53e+02 -19.1 1.24e-03 2.86e-06f 1\n 172 9.9421219e+05 5.54e+00 1.57e+04 -1.0 2.49e+02 -19.6 3.63e-06 4.37e-04f 1\n 173 9.8486650e+05 5.51e+00 1.68e+04 -1.0 2.46e+02 -19.1 5.48e-04 5.68e-03f 1\n 174 9.7308682e+05 5.47e+00 1.65e+04 -1.0 2.49e+02 -19.6 8.25e-03 7.61e-03f 1\n 175 9.7212896e+05 5.47e+00 1.65e+04 -1.0 2.44e+02 -19.1 3.20e-04 5.80e-04f 1\n 176 9.6540396e+05 5.44e+00 1.75e+04 -1.0 2.49e+02 -19.6 9.01e-04 4.34e-03f 1\n 177 9.4998516e+05 5.39e+00 1.51e+04 -1.0 2.44e+02 -19.1 1.93e-02 9.44e-03f 1\n 178 9.4904520e+05 5.39e+00 1.52e+04 -1.0 2.45e+02 -19.6 2.19e-04 6.06e-04f 1\n 179 9.4902274e+05 5.39e+00 1.51e+04 -1.0 2.75e+02 -19.1 1.62e-04 1.14e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 180 9.3880492e+05 5.35e+00 1.67e+04 -1.0 2.29e+02 -19.6 1.38e-05 6.45e-03f 1\n 181 8.9559690e+05 5.21e+00 1.79e+04 -1.0 2.29e+02 -19.1 1.06e-02 2.71e-02f 1\n 182 8.5549874e+05 5.07e+00 1.93e+04 -1.0 2.22e+02 -19.6 4.07e-03 2.60e-02f 1\n 183 8.4452994e+05 5.04e+00 1.99e+04 -1.0 2.17e+02 -19.1 4.96e-04 7.46e-03f 1\n 184 8.2921005e+05 4.98e+00 1.98e+04 -1.0 2.14e+02 -19.6 9.95e-03 1.09e-02f 1\n 185 8.2905253e+05 4.98e+00 1.90e+04 -1.0 2.09e+02 -19.1 5.75e-03 1.10e-04f 1\n 186 8.2904529e+05 4.98e+00 1.89e+04 -1.0 2.18e+02 -19.6 7.15e-05 5.41e-06f 1\n 187 8.2712955e+05 4.97e+00 1.92e+04 -1.0 2.14e+02 -19.1 5.72e-05 1.35e-03f 1\n 188 8.0950543e+05 4.91e+00 2.28e+04 -1.0 2.31e+02 -19.6 1.11e-03 1.34e-02f 1\n 189 8.0862616e+05 4.90e+00 2.24e+04 -1.0 2.12e+02 -19.1 1.92e-03 6.51e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 190 8.0425351e+05 4.89e+00 2.35e+04 -1.0 2.22e+02 -19.6 4.57e-04 3.51e-03f 1\n 191 7.9537870e+05 4.85e+00 2.31e+04 -1.0 2.15e+02 -19.1 7.37e-03 6.61e-03f 1\n 192 7.9489154e+05 4.85e+00 2.29e+04 -1.0 2.11e+02 -19.6 1.29e-03 3.92e-04f 1\n 193 7.8751827e+05 4.83e+00 2.29e+04 -1.0 2.01e+02 -19.1 2.56e-03 5.71e-03f 1\n 194 7.8736120e+05 4.82e+00 2.31e+04 -1.0 2.00e+02 -19.6 1.54e-04 1.32e-04f 1\n 195 7.8402757e+05 4.81e+00 2.31e+04 -1.0 1.94e+02 -19.1 2.19e-03 2.63e-03f 1\n 196 7.8388845e+05 4.81e+00 2.53e+04 -1.0 1.98e+02 -19.6 2.15e-04 1.18e-04f 1\n 197 7.8385262e+05 4.81e+00 2.50e+04 -1.0 1.94e+02 -19.1 4.34e-03 2.82e-05f 1\n 198 7.8236217e+05 4.81e+00 1.09e+04 -1.0 1.97e+02 -19.6 3.11e-04 1.27e-03f 1\n 199 7.7035462e+05 4.76e+00 1.08e+04 -1.0 1.91e+02 -19.1 3.91e-04 9.52e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 200 7.6826638e+05 4.75e+00 1.08e+04 -1.0 1.90e+02 -19.6 2.68e-02 1.77e-03f 1\n 201 7.6137802e+05 4.73e+00 1.07e+04 -1.0 1.80e+02 -19.1 5.08e-04 5.41e-03f 1\n 202 7.6092627e+05 4.72e+00 1.07e+04 -1.0 1.85e+02 -19.6 8.12e-03 3.85e-04f 1\n 203 6.8620409e+05 4.44e+00 1.26e+04 -1.0 1.80e+02 -19.1 1.57e-04 6.08e-02f 1\n 204 6.8223234e+05 4.42e+00 1.15e+04 -1.0 1.85e+02 -19.6 6.43e-03 3.54e-03f 1\n 205 6.7550321e+05 4.40e+00 1.16e+04 -1.0 1.74e+02 -19.1 5.10e-03 5.69e-03f 1\n 206 6.7512988e+05 4.39e+00 1.00e+04 -1.0 1.86e+02 -19.6 8.13e-03 3.47e-04f 1\n 207 6.6341460e+05 4.35e+00 9.84e+03 -1.0 1.72e+02 -19.1 1.30e-02 1.04e-02f 1\n 208 6.5075026e+05 4.30e+00 1.09e+04 -1.0 1.87e+02 -19.6 2.10e-04 1.19e-02f 1\n 209 6.4469528e+05 4.27e+00 1.05e+04 -1.0 1.72e+02 -19.1 7.52e-03 5.37e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 210 6.3074280e+05 4.22e+00 1.26e+04 -1.0 1.85e+02 -19.6 4.48e-04 1.32e-02f 1\n 211 6.3015403e+05 4.21e+00 1.26e+04 -1.0 3.38e+01 2.3 1.77e-03 7.51e-04f 1\n 212 6.3013669e+05 4.21e+00 1.47e+04 -1.0 1.21e+02 1.8 1.17e-03 1.79e-05f 1\n 213 6.2710947e+05 4.20e+00 1.45e+04 -1.0 1.26e+02 1.4 4.90e-04 2.86e-03f 1\n 214 6.2100935e+05 4.18e+00 1.49e+04 -1.0 1.53e+02 0.9 1.86e-03 5.85e-03f 1\n 215 6.1066877e+05 4.14e+00 1.59e+04 -1.0 1.97e+02 0.4 1.05e-05 8.89e-03f 1\n 216 6.0984620e+05 4.14e+00 1.57e+04 -1.0 1.84e+02 1.7 8.49e-03 1.01e-03f 1\n 217 6.0751052e+05 4.13e+00 1.57e+04 -1.0 2.02e+02 1.3 5.49e-03 2.08e-03f 1\n 218 6.0707832e+05 4.12e+00 1.57e+04 -1.0 2.01e+02 1.7 2.32e-04 5.32e-04f 1\n 219 6.0707327e+05 4.12e+00 1.53e+04 -1.0 1.96e+02 2.1 2.90e-03 7.52e-06f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 220 6.0555478e+05 4.12e+00 1.54e+04 -1.0 2.25e+02 1.6 9.57e-04 1.84e-03f 1\n 221 6.0553909e+05 4.12e+00 1.47e+04 -1.0 2.14e+02 2.1 5.83e-03 2.26e-05f 1\n 222 6.0387470e+05 4.11e+00 1.47e+04 -1.0 2.27e+02 1.6 2.04e-03 1.80e-03f 1\n 223 6.0325229e+05 4.11e+00 1.47e+04 -1.0 2.20e+02 2.0 4.35e-04 7.72e-04f 1\n 224 6.0322399e+05 4.11e+00 1.45e+04 -1.0 2.21e+02 1.5 4.10e-03 2.96e-05f 1\n 225 6.0185557e+05 4.10e+00 1.45e+04 -1.0 2.17e+02 2.0 1.59e-05 1.70e-03f 1\n 226 6.0180800e+05 4.10e+00 1.44e+04 -1.0 2.29e+02 1.5 1.37e-03 4.63e-05f 1\n 227 5.9923339e+05 4.09e+00 1.44e+04 -1.0 2.27e+02 1.9 2.84e-03 2.85e-03f 1\n 228 5.9851909e+05 4.09e+00 1.44e+04 -1.0 2.29e+02 1.4 9.28e-04 6.31e-04f 1\n 229 5.9692058e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.9 1.94e-03 1.77e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 230 5.9689878e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.4 1.22e-05 1.79e-05f 1\n 231 5.9684337e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.8 7.71e-05 6.11e-05f 1\n 232 5.9682024e+05 4.08e+00 1.44e+04 -1.0 2.28e+02 1.3 2.55e-06 2.10e-05f 1\n 233 5.9680869e+05 4.08e+00 1.38e+04 -1.0 2.28e+02 1.8 7.05e-03 1.36e-05f 1\n 234 5.9076367e+05 4.05e+00 1.41e+04 -1.0 2.30e+02 1.3 5.00e-04 5.99e-03f 1\n 235 5.9073170e+05 4.05e+00 1.42e+04 -1.0 2.32e+02 1.7 4.66e-03 3.40e-05f 1\n 236 5.8521172e+05 4.03e+00 1.45e+04 -1.0 2.32e+02 1.2 8.81e-04 5.50e-03f 1\n 237 5.8376022e+05 4.03e+00 1.44e+04 -1.0 2.30e+02 0.8 2.07e-06 1.04e-03f 1\n 238 5.8372062e+05 4.03e+00 1.53e+04 -1.0 2.35e+02 2.1 3.62e-03 4.81e-05f 1\n 239 5.7779448e+05 4.00e+00 1.55e+04 -1.0 2.39e+02 1.6 1.33e-02 6.22e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 240 5.7771432e+05 4.00e+00 1.57e+04 -1.0 2.26e+02 2.0 1.00e-03 9.89e-05f 1\n 241 5.7665827e+05 4.00e+00 1.57e+04 -1.0 2.07e+02 1.6 9.92e-04 1.10e-03f 1\n 242 5.7511556e+05 3.99e+00 1.55e+04 -1.0 2.12e+02 2.0 1.09e-03 1.92e-03f 1\n 243 5.7052438e+05 3.97e+00 1.52e+04 -1.0 2.09e+02 1.5 2.73e-04 4.68e-03f 1\n 244 5.6998537e+05 3.97e+00 1.51e+04 -1.0 2.36e+02 1.9 3.75e-04 6.54e-04f 1\n 245 5.6612860e+05 3.95e+00 1.48e+04 -1.0 2.37e+02 1.5 3.47e-05 3.75e-03f 1\n 246 5.6572525e+05 3.95e+00 1.50e+04 -1.0 2.36e+02 1.9 2.57e-03 4.85e-04f 1\n 247 5.6516789e+05 3.95e+00 1.51e+04 -1.0 2.36e+02 1.4 1.32e-03 4.83e-04f 1\n 248 5.6316860e+05 3.94e+00 1.52e+04 -1.0 2.36e+02 1.8 3.05e-03 2.38e-03f 1\n 249 5.6254967e+05 3.94e+00 1.50e+04 -1.0 2.35e+02 1.3 7.91e-06 4.87e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 250 5.6077033e+05 3.93e+00 1.49e+04 -1.0 2.36e+02 1.8 1.29e-03 2.17e-03f 1\n 251 5.6022313e+05 3.93e+00 1.47e+04 -1.0 2.34e+02 1.3 2.29e-06 3.07e-04f 1\n 252 5.5692514e+05 3.91e+00 1.49e+04 -1.0 2.36e+02 1.7 8.13e-03 3.96e-03f 1\n 253 5.5673567e+05 3.91e+00 1.49e+04 -1.0 2.31e+02 1.2 2.68e-06 9.58e-05f 1\n 254 5.5535652e+05 3.91e+00 1.55e+04 -1.0 2.33e+02 1.7 1.14e-02 1.64e-03f 1\n 255 5.5534273e+05 3.91e+00 1.56e+04 -1.0 2.30e+02 2.1 4.75e-04 1.85e-05f 1\n 256 5.5514554e+05 3.90e+00 1.71e+04 -1.0 2.27e+02 1.6 1.32e-02 2.20e-04f 1\n 257 5.5141803e+05 3.90e+00 1.48e+04 -1.0 4.83e+02 1.1 1.93e-05 1.30e-03f 1\n 258 5.4693933e+05 3.88e+00 1.42e+04 -1.0 2.39e+02 1.6 2.91e-04 4.86e-03f 1\n 259 5.4687686e+05 3.88e+00 1.43e+04 -1.0 2.52e+02 1.1 2.34e-03 6.08e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 260 5.4353115e+05 3.87e+00 1.41e+04 -1.0 2.42e+02 1.5 1.34e-03 3.66e-03f 1\n 261 5.4346337e+05 3.87e+00 1.41e+04 -1.0 2.07e+02 1.0 1.56e-04 6.65e-05f 1\n 262 5.4335021e+05 3.87e+00 1.42e+04 -1.0 1.95e+02 1.5 2.22e-03 1.26e-04f 1\n 263 5.4296226e+05 3.86e+00 1.43e+04 -1.0 1.86e+02 1.0 2.63e-03 3.71e-04f 1\n 264 5.3812113e+05 3.84e+00 1.40e+04 -1.0 2.13e+02 1.4 2.17e-05 5.31e-03f 1\n 265 5.3811408e+05 3.84e+00 1.41e+04 -1.0 2.23e+02 1.8 3.29e-04 8.06e-06f 1\n 266 5.3810662e+05 3.84e+00 1.41e+04 -1.0 2.43e+02 1.4 9.90e-04 7.83e-06f 1\n 267 5.3692599e+05 3.84e+00 1.38e+04 -1.0 2.30e+02 1.8 8.84e-05 1.33e-03f 1\n 268 5.3689601e+05 3.84e+00 1.38e+04 -1.0 1.90e+02 1.3 1.05e-03 3.21e-05f 1\n 269 5.3480708e+05 3.83e+00 1.32e+04 -1.0 2.44e+02 0.8 2.23e-04 1.25e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 270 5.3431885e+05 3.83e+00 1.34e+04 -1.0 1.83e+02 1.3 4.31e-03 5.17e-04f 1\n 271 5.3310063e+05 3.83e+00 1.31e+04 -1.0 1.89e+02 1.7 2.86e-05 1.41e-03f 1\n 272 5.3150897e+05 3.82e+00 1.22e+04 -1.0 1.85e+02 2.1 1.96e-04 1.98e-03f 1\n 273 5.3147543e+05 3.82e+00 1.22e+04 -1.0 1.61e+02 1.6 1.35e-04 3.82e-05f 1\n 274 5.3137612e+05 3.82e+00 1.24e+04 -1.0 1.76e+02 1.2 3.23e-03 1.02e-04f 1\n 275 5.3106158e+05 3.82e+00 1.29e+04 -1.0 1.79e+02 1.6 5.34e-03 3.64e-04f 1\n 276 5.2929724e+05 3.81e+00 1.28e+04 -1.0 1.73e+02 1.1 5.52e-05 1.84e-03f 1\n 277 5.2907597e+05 3.81e+00 1.29e+04 -1.0 1.63e+02 2.4 4.41e-04 3.68e-04f 1\n 278 5.2816992e+05 3.80e+00 1.27e+04 -1.0 1.88e+02 2.0 5.12e-04 1.14e-03f 1\n 279 5.2807415e+05 3.80e+00 1.27e+04 -1.0 1.48e+03 1.5 1.26e-06 8.93e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 280 5.2805145e+05 3.80e+00 1.26e+04 -1.0 1.94e+02 1.0 1.55e-03 2.34e-05f 1\n 281 5.2494578e+05 3.79e+00 1.26e+04 -1.0 1.75e+02 1.4 7.38e-04 3.61e-03f 1\n 282 5.2382656e+05 3.79e+00 1.20e+04 -1.0 6.75e+02 1.0 1.61e-05 3.04e-04f 1\n 283 5.2294012e+05 3.78e+00 1.20e+04 -1.0 1.74e+02 1.4 1.78e-03 9.88e-04f 1\n 284 5.2002561e+05 3.77e+00 1.24e+04 -1.0 1.76e+02 1.8 7.11e-03 3.79e-03f 1\n 285 5.1038171e+05 3.73e+00 1.19e+04 -1.0 1.71e+02 1.3 6.29e-05 1.07e-02f 1\n 286 5.1029468e+05 3.73e+00 1.19e+04 -1.0 1.84e+02 1.8 7.42e-04 1.12e-04f 1\n 287 5.1028585e+05 3.73e+00 1.21e+04 -1.0 2.01e+02 2.2 4.17e-04 1.31e-05f 1\n 288 5.0835844e+05 3.72e+00 1.19e+04 -1.0 2.75e+02 1.7 1.15e-05 2.41e-03f 1\n 289 5.0829350e+05 3.72e+00 1.19e+04 -1.0 1.71e+02 1.2 3.97e-04 6.12e-05f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 290 5.0786133e+05 3.72e+00 1.30e+04 -1.0 1.77e+02 1.7 1.37e-02 5.49e-04f 1\n 291 4.9734337e+05 3.69e+00 1.06e+04 -1.0 1.72e+02 1.2 8.48e-04 8.86e-03f 1\n 292 4.9689003e+05 3.68e+00 1.08e+04 -1.0 1.74e+02 1.6 3.20e-03 5.66e-04f 1\n 293 4.9257017e+05 3.67e+00 9.15e+03 -1.0 1.62e+02 1.1 7.85e-05 3.01e-03f 1\n 294 4.9140020e+05 3.67e+00 9.02e+03 -1.0 2.41e+02 1.6 4.58e-05 1.41e-03f 1\n 295 4.9127824e+05 3.67e+00 1.14e+04 -1.0 1.61e+02 1.1 2.66e-03 6.68e-05f 1\n 296 4.9066209e+05 3.66e+00 1.29e+04 -1.0 1.75e+02 1.5 1.82e-02 7.38e-04f 1\n 297 4.8899995e+05 3.66e+00 1.45e+04 -1.0 5.05e+02 1.0 9.31e-04 4.06e-04f 1\n 298 4.7750162e+05 3.61e+00 1.35e+04 -1.0 1.76e+02 1.5 4.98e-03 1.35e-02f 1\n 299 4.7434464e+05 3.61e+00 1.49e+04 -1.0 9.97e+02 1.0 6.27e-04 4.14e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 300 4.7339897e+05 3.61e+00 1.50e+04 -1.0 1.73e+02 1.4 2.13e-03 1.10e-03f 1\n 301 4.7161223e+05 3.61e+00 1.40e+04 -1.0 3.25e+02 0.9 1.03e-04 5.77e-04f 1\n 302 4.7150987e+05 3.60e+00 1.43e+04 -1.0 2.00e+02 0.4 6.17e-03 9.97e-05f 1\n 303 4.6068296e+05 3.57e+00 1.36e+04 -1.0 3.30e+02 -0.0 4.88e-05 1.06e-02f 1\n 304 4.5827805e+05 3.56e+00 1.49e+04 -1.0 1.68e+02 1.3 1.36e-02 2.70e-03f 1\n 305 4.5598068e+05 3.55e+00 1.41e+04 -1.0 4.79e+02 0.8 4.48e-04 7.06e-04f 1\n 306 4.5518400e+05 3.55e+00 1.43e+04 -1.0 1.59e+02 1.2 2.67e-03 8.88e-04f 1\n 307 4.5309751e+05 3.55e+00 1.72e+04 -1.0 2.98e+02 0.8 2.61e-03 8.91e-04f 1\n 308 4.4826799e+05 3.53e+00 1.86e+04 -1.0 1.66e+02 1.2 1.97e-02 5.39e-03f 1\n 309 4.4337229e+05 3.51e+00 1.70e+04 -1.0 1.62e+02 0.7 4.61e-05 4.04e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 310 4.3957043e+05 3.50e+00 2.22e+04 -1.0 1.64e+02 1.1 5.11e-02 4.22e-03f 1\n 311 4.3840763e+05 3.50e+00 2.35e+04 -1.0 1.68e+02 0.7 4.15e-03 9.47e-04f 1\n 312 4.2663652e+05 3.45e+00 2.43e+04 -1.0 1.62e+02 1.1 2.95e-02 1.34e-02f 1\n 313 4.1771582e+05 3.43e+00 2.40e+04 -1.0 1.75e+02 0.6 6.75e-03 7.00e-03f 1\n 314 4.1583852e+05 3.42e+00 2.27e+04 -1.0 8.29e+02 0.1 1.03e-04 3.62e-04f 1\n 315 4.0823036e+05 3.40e+00 2.35e+04 -1.0 1.82e+02 0.6 9.82e-03 6.83e-03f 1\n 316 4.0742102e+05 3.40e+00 2.31e+04 -1.0 2.88e+02 0.1 7.31e-05 5.39e-04f 1\n 317 4.0736672e+05 3.40e+00 2.36e+04 -1.0 4.84e+02 -0.4 7.32e-03 5.87e-05f 1\n 318 3.9419770e+05 3.35e+00 2.18e+04 -1.0 7.13e+02 -0.9 8.53e-05 1.33e-02f 1\n 319 3.8663282e+05 3.33e+00 1.92e+04 -1.0 8.56e+02 -1.3 2.10e-04 5.79e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 320 3.8579331e+05 3.33e+00 2.13e+04 -1.0 9.22e+02 -1.8 7.40e-03 7.22e-04f 1\n 321 3.7974323e+05 3.32e+00 1.94e+04 -1.0 9.50e+02 -2.3 1.97e-04 4.91e-03f 1\n 322 3.7938905e+05 3.31e+00 2.15e+04 -1.0 9.51e+02 -2.8 8.43e-03 3.27e-04f 1\n 323 3.7037180e+05 3.29e+00 1.92e+04 -1.0 9.55e+02 -3.3 1.13e-04 8.55e-03f 1\n 324 3.6908213e+05 3.28e+00 1.94e+04 -1.0 9.41e+02 -3.7 2.53e-03 1.30e-03f 1\n 325 3.6277620e+05 3.26e+00 1.81e+04 -1.0 2.17e+02 0.3 8.07e-04 7.29e-03f 1\n 326 3.5989666e+05 3.24e+00 1.79e+04 -1.0 1.55e+02 1.6 1.27e-03 5.36e-03f 1\n 327 3.5985661e+05 3.24e+00 1.79e+04 -1.0 1.59e+02 2.1 9.24e-04 1.04e-04f 1\n 328 3.5935266e+05 3.24e+00 1.79e+04 -1.0 1.62e+02 1.6 6.26e-05 8.73e-04f 1\n 329 3.5783375e+05 3.23e+00 1.78e+04 -1.0 1.63e+02 2.0 3.50e-03 3.63e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 330 3.5743587e+05 3.22e+00 1.78e+04 -1.0 1.61e+02 1.5 3.51e-05 6.78e-04f 1\n 331 3.5668635e+05 3.22e+00 1.78e+04 -1.0 1.52e+02 1.1 2.73e-04 1.08e-03f 1\n 332 3.5630331e+05 3.22e+00 1.78e+04 -1.0 1.67e+02 0.6 3.51e-03 4.92e-04f 1\n 333 3.5562828e+05 3.22e+00 1.78e+04 -1.0 2.82e+02 0.1 3.34e-03 7.59e-04f 1\n 334 3.5480287e+05 3.21e+00 1.78e+04 -1.0 4.82e+02 -0.4 4.21e-05 9.86e-04f 1\n 335 3.5438009e+05 3.21e+00 1.78e+04 -1.0 6.55e+02 -0.8 1.29e-04 5.04e-04f 1\n 336 3.5436086e+05 3.21e+00 1.78e+04 -1.0 8.12e+02 -1.3 2.56e-03 2.29e-05f 1\n 337 3.5193976e+05 3.20e+00 1.77e+04 -1.0 8.82e+02 -1.8 1.07e-03 2.89e-03f 1\n 338 3.5190047e+05 3.20e+00 1.77e+04 -1.0 9.10e+02 -2.3 7.21e-03 4.74e-05f 1\n 339 3.5093683e+05 3.20e+00 1.77e+04 -1.0 7.47e+02 -1.0 1.35e-05 1.13e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 340 3.5052498e+05 3.20e+00 1.77e+04 -1.0 2.07e+02 0.4 5.15e-04 5.29e-04f 1\n 341 3.3750946e+05 3.14e+00 1.74e+04 -1.0 3.58e+02 -0.1 1.67e-05 1.64e-02f 1\n 342 3.3747338e+05 3.14e+00 1.74e+04 -1.0 5.75e+02 -0.6 6.58e-03 4.69e-05f 1\n 343 3.3562091e+05 3.14e+00 1.74e+04 -1.0 7.66e+02 -1.1 1.05e-04 2.42e-03f 1\n 344 3.3510890e+05 3.13e+00 1.73e+04 -1.0 8.41e+02 -1.5 1.22e-03 6.80e-04f 1\n 345 3.2871807e+05 3.11e+00 1.72e+04 -1.0 8.84e+02 -2.0 3.02e-03 8.53e-03f 1\n 346 3.2777372e+05 3.10e+00 1.72e+04 -1.0 1.54e+02 1.1 7.57e-05 1.51e-03f 1\n 347 3.2772491e+05 3.10e+00 1.72e+04 -1.0 1.60e+02 1.6 2.40e-03 9.66e-05f 1\n 348 3.2718271e+05 3.10e+00 1.71e+04 -1.0 1.60e+02 2.0 4.86e-03 1.52e-03f 1\n 349 3.2706600e+05 3.10e+00 1.71e+04 -1.0 1.58e+02 1.5 1.31e-03 2.19e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 350 3.2406746e+05 3.08e+00 1.70e+04 -1.0 1.47e+02 1.0 1.75e-04 5.59e-03f 1\n 351 3.2366994e+05 3.08e+00 1.70e+04 -1.0 1.50e+02 1.5 4.30e-03 7.40e-04f 1\n 352 3.2356913e+05 3.08e+00 1.70e+04 -1.0 1.37e+02 1.0 1.11e-04 1.90e-04f 1\n 353 3.2355506e+05 3.08e+00 1.70e+04 -1.0 1.49e+02 1.4 3.22e-03 2.36e-05f 1\n 354 3.2316524e+05 3.07e+00 1.70e+04 -1.0 1.40e+02 0.9 1.38e-03 6.09e-04f 1\n 355r 3.2316524e+05 3.07e+00 1.00e+03 0.5 0.00e+00 0.4 0.00e+00 3.57e-07R 8\n 356r 3.2229084e+05 3.07e+00 1.00e+03 0.5 2.06e+03 - 6.18e-03 7.60e-04f 1\n 357r 3.2109567e+05 3.06e+00 9.98e+02 0.5 1.80e+03 - 1.71e-03 8.09e-04f 1\n 358r 3.1149974e+05 2.97e+00 9.95e+02 0.5 1.67e+03 - 3.74e-03 6.54e-03f 1\n 359r 3.1087190e+05 2.97e+00 9.95e+02 0.5 8.58e+02 - 9.83e-05 4.32e-04f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 360r 3.0989090e+05 2.96e+00 9.97e+02 0.5 9.94e+02 - 6.50e-03 6.44e-04f 1\n 361r 3.0241262e+05 2.87e+00 9.92e+02 0.5 9.07e+02 - 2.36e-03 5.34e-03f 1\n 362r 2.9694919e+05 2.80e+00 9.80e+02 0.5 8.52e+02 - 1.55e-02 4.79e-03f 1\n 363r 2.8929254e+05 2.68e+00 9.72e+02 0.5 6.39e+02 - 1.25e-02 8.39e-03f 1\n 364 2.8919644e+05 2.68e+00 9.52e+03 -1.0 1.36e+02 -0.0 2.03e-04 1.66e-04f 1\n 365 2.8908661e+05 2.68e+00 9.52e+03 -1.0 1.38e+02 -0.5 3.89e-04 1.90e-04f 1\n 366 2.8861557e+05 2.68e+00 9.51e+03 -1.0 1.34e+02 -1.0 2.45e-04 8.16e-04f 1\n 367 2.8784152e+05 2.67e+00 9.50e+03 -1.0 1.29e+02 -1.5 5.67e-04 1.35e-03f 1\n 368 2.8557520e+05 2.66e+00 9.46e+03 -1.0 1.18e+02 -1.9 1.40e-03 3.97e-03f 1\n 369 2.8048140e+05 2.64e+00 9.38e+03 -1.0 1.31e+02 -2.4 2.51e-03 9.01e-03f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 370 2.7807299e+05 2.63e+00 9.34e+03 -1.0 1.50e+02 -2.9 1.64e-03 4.32e-03f 1\n 371 2.7069758e+05 2.59e+00 9.21e+03 -1.0 1.60e+02 -3.4 3.85e-03 1.34e-02f 1\n 372 2.5748481e+05 2.53e+00 8.98e+03 -1.0 1.59e+02 -3.8 4.45e-03 2.48e-02f 1\n 373 2.5409647e+05 2.51e+00 8.92e+03 -1.0 1.53e+02 -4.3 1.21e-02 6.62e-03f 1\n 374 2.4261880e+05 2.45e+00 8.72e+03 -1.0 1.50e+02 -4.8 6.04e-03 2.29e-02f 1\n 375 2.2822588e+05 2.38e+00 8.45e+03 -1.0 1.46e+02 -5.3 1.51e-02 3.02e-02f 1\n 376 2.0965467e+05 2.28e+00 8.10e+03 -1.0 1.41e+02 -5.8 1.82e-02 4.16e-02f 1\n 377 2.0936932e+05 2.28e+00 8.10e+03 -1.0 1.37e+02 -6.2 3.27e-02 6.83e-04f 1\n 378 2.0780500e+05 2.27e+00 8.07e+03 -1.0 1.37e+02 -6.7 4.67e-03 3.76e-03f 1\n 379 1.9606770e+05 2.20e+00 7.84e+03 -1.0 1.38e+02 -7.2 7.84e-04 2.87e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 380 1.7935187e+05 2.11e+00 7.49e+03 -1.0 1.38e+02 -7.7 2.58e-02 4.37e-02f 1\n 381 1.7765925e+05 2.10e+00 7.46e+03 -1.0 1.36e+02 -8.1 4.29e-02 4.74e-03f 1\n 382 1.6495345e+05 2.02e+00 7.18e+03 -1.0 1.34e+02 -8.6 1.77e-02 3.65e-02f 1\n 383 1.6340175e+05 2.01e+00 7.15e+03 -1.0 1.30e+02 -9.1 1.84e-02 4.73e-03f 1\n 384 1.5788016e+05 1.98e+00 7.03e+03 -1.0 1.29e+02 -9.6 3.72e-03 1.71e-02f 1\n 385 1.5064127e+05 1.93e+00 6.87e+03 -1.0 1.28e+02 -10.1 8.10e-03 2.33e-02f 1\n 386 1.2939352e+05 1.79e+00 6.36e+03 -1.0 1.25e+02 -10.5 1.85e-02 7.34e-02f 1\n 387 1.2646803e+05 1.77e+00 6.29e+03 -1.0 1.18e+02 -11.0 4.15e-02 1.14e-02f 1\n 388 1.1471323e+05 1.68e+00 5.99e+03 -1.0 1.16e+02 -11.5 1.69e-02 4.78e-02f 1\n 389 1.0992888e+05 1.65e+00 5.86e+03 -1.0 1.11e+02 -12.0 7.97e-02 2.12e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 390 1.0212816e+05 1.59e+00 5.65e+03 -1.0 1.07e+02 -12.4 1.86e-03 3.63e-02f 1\n 391 8.6177113e+04 1.46e+00 5.19e+03 -1.0 1.04e+02 -12.9 7.26e-02 8.17e-02f 1\n 392 8.5370527e+04 1.45e+00 5.16e+03 -1.0 9.48e+01 -13.4 3.22e-02 4.71e-03f 1\n 393 7.4077318e+04 1.35e+00 4.81e+03 -1.0 9.38e+01 -13.9 1.85e-02 6.88e-02f 1\n 394 7.1177623e+04 1.33e+00 4.71e+03 -1.0 8.78e+01 -14.3 6.45e-02 1.99e-02f 1\n 395 6.4716733e+04 1.26e+00 4.49e+03 -1.0 8.51e+01 -14.8 4.54e-03 4.67e-02f 1\n 396 5.7488482e+04 1.19e+00 4.23e+03 -1.0 8.14e+01 -15.3 9.91e-03 5.78e-02f 1\n 397 4.8754351e+04 1.10e+00 3.90e+03 -1.0 7.88e+01 -15.8 6.42e-02 7.95e-02f 1\n 398 4.7745315e+04 1.08e+00 3.86e+03 -1.0 7.72e+01 -16.3 9.49e-02 1.05e-02f 1\n 399 4.1977413e+04 1.02e+00 3.61e+03 -1.0 7.89e+01 -16.7 6.18e-02 6.27e-02f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 400 3.3677208e+04 9.10e-01 3.24e+03 -1.0 7.75e+01 -17.2 7.45e-03 1.05e-01f 1\n 401 3.0166245e+04 8.61e-01 3.06e+03 -1.0 7.30e+01 -17.7 2.38e-02 5.39e-02f 1\n 402 2.5223779e+04 7.87e-01 2.80e+03 -1.0 7.10e+01 -18.2 1.21e-01 8.61e-02f 1\n 403 2.2341250e+04 7.40e-01 2.63e+03 -1.0 6.89e+01 -18.6 1.27e-01 5.92e-02f 1\n 404 1.8863927e+04 6.80e-01 2.42e+03 -1.0 6.77e+01 -19.1 8.08e-02 8.17e-02f 1\n 405 1.5848263e+04 6.23e-01 2.21e+03 -1.0 6.47e+01 -19.6 5.68e-02 8.40e-02f 1\n 406 1.4057233e+04 5.86e-01 2.08e+03 -1.0 6.57e+01 -19.1 8.62e-02 5.84e-02f 1\n 407 1.1013361e+04 5.19e-01 1.84e+03 -1.0 5.68e+01 -19.6 1.68e-01 1.16e-01f 1\n 408 7.9952027e+03 4.41e-01 1.57e+03 -1.0 5.34e+01 -19.1 1.48e-01 1.49e-01f 1\n 409 5.4065322e+03 3.62e-01 1.29e+03 -1.0 4.45e+01 -19.6 1.93e-01 1.80e-01f 1\niter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls\n 410 2.4913122e+03 2.44e-01 8.68e+02 -1.0 4.14e+01 -19.1 1.52e-01 3.26e-01f 1\n 411 1.0310441e+03 1.55e-01 5.51e+02 -1.0 2.82e+01 -19.6 2.47e-01 3.66e-01f 1\n 412 2.7434337e+02 7.52e-02 4.80e+02 -1.0 2.93e+01 -19.1 1.96e-01 5.14e-01f 1\n 413 5.4776714e+01 2.33e-02 7.33e+02 -1.0 7.94e+00 -19.6 4.50e-01 6.90e-01f 1\n 414 2.8195260e+01 3.14e-04 1.32e+03 -1.0 1.06e+01 -19.1 5.84e-01 1.00e+00f 1\n 415 3.2622344e+01 3.87e-05 1.06e+03 -1.0 1.60e+01 -19.6 7.02e-01 9.94e-01f 1\n 416 3.0040029e+01 4.65e-05 2.31e+02 -1.0 4.17e+01 -19.1 9.04e-01 1.00e+00f 1\n 417 3.0266079e+01 4.64e-04 4.41e+02 -1.0 1.21e+02 -19.6 1.00e+00 9.30e-01h 1\n 418 2.9559008e+01 3.42e-05 5.33e+01 -1.0 1.42e+01 -19.1 6.03e-01 1.00e+00h 1" ], [ "x1 = bri.opti.debug.value(bri.x_opt)\nplt.plot(x1[0,:], x1[1,:])", "_____no_output_____" ], [ "costs = [\"self.k_u_delta * self.u_delta_cost\",\n \"self.k_u_v * self.u_v_cost\",\n \"self.k_lat * self.lat_cost\", \n \"self.k_lon * self.lon_cost\", \n \"self.k_phi_error * self.phi_error_cost\",\n \"self.k_phi_dot * self.phidot_cost\",\n \"self.k_s * self.s_cost\",\n \"self.k_v * self.v_cost\",\n \"self.k_change_u_v * self.change_u_v\",\n \"self.k_change_u_delta * self.change_u_delta\", \n \"self.k_final * self.final_costs\",\n \"self.k_x * self.x_cost\"]\n\n\nfor i in range(len(bri.car1_costs_list)):\n amb_costs = bri.opti.debug.value(bri.car1_costs_list[i])\n print('%.03f'%amb_costs, costs[i])\n \nprint(bri.opti.debug.value(bri.slack_cost))", "0.157 self.k_u_delta * self.u_delta_cost\n3.246 self.k_u_v * self.u_v_cost\n296.473 self.k_lat * self.lat_cost\n0.000 self.k_lon * self.lon_cost\n25.852 self.k_phi_error * self.phi_error_cost\n186.438 self.k_phi_dot * self.phidot_cost\n0.000 self.k_s * self.s_cost\n0.000 self.k_v * self.v_cost\n0.373 self.k_change_u_v * self.change_u_v\n0.000 self.k_change_u_delta * self.change_u_delta\n50.985 self.k_final * self.final_costs\n-39.603 self.k_x * self.x_cost\n12634778.361273078\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
d0a524b08ecd54e458f8fbfb482314fa90868bfa
55,561
ipynb
Jupyter Notebook
AString Solutions.ipynb
SolitonScientific/AtomicString
e81f33d8760025910d2c3f1bb43496372adfefb6
[ "MIT" ]
null
null
null
AString Solutions.ipynb
SolitonScientific/AtomicString
e81f33d8760025910d2c3f1bb43496372adfefb6
[ "MIT" ]
null
null
null
AString Solutions.ipynb
SolitonScientific/AtomicString
e81f33d8760025910d2c3f1bb43496372adfefb6
[ "MIT" ]
1
2021-04-11T21:43:17.000Z
2021-04-11T21:43:17.000Z
345.099379
21,390
0.897608
[ [ [ "# %load CommonFunctions.py\n\n\n# # COMMON ATOMIC AND ASTRING FUNCTIONS\n\n# In[14]:\n\n############### One String Pulse with width, shift and scale #############\ndef StringPulse(String1, t: float, a = 1., b = 0., c = 1., d = 0.) -> float:\n x = (t - b)/a\n if (x < -1):\n res = -0.5\n elif (x > 1):\n res = 0.5\n else:\n res = String1(x)\n res = d + res * c\n return res\n\n\n# In[16]:\n\n\n###### Atomic String Applied to list with width, shift and scale #############\ndef String(String1, x: list, a = 1., b = 0., c = 1., d = 0.) -> list:\n res = []\n for i in range(len(x)):\n res.append(StringPulse(String1, x[i], a, b, c, d))\n return res\n\n\n# In[17]:\n\n\n###### Summation of two lists #############\ndef Sum(x1: list, x2: list) -> list:\n res = []\n for i in range(len(x1)):\n res.append(x1[i] + x2[i])\n return res\n\n\n# In[18]:\n\n\n##########################################################\n##This script introduces Atomic Function \n################### One Pulse of atomic function\ndef up1(x: float) -> float:\n #Atomic function table\n up_y = [0.5, 0.48, 0.460000017,0.440000421,0.420003478,0.400016184, 0.380053256, 0.360139056, 0.340308139, 0.320605107,\n 0.301083436, 0.281802850, 0.262826445, 0.244218000, 0.226041554, 0.208361009, 0.191239338, 0.174736305, \n 0.158905389, 0.143991189, 0.129427260, 0.115840866, 0.103044024, 0.9110444278e-01, 0.798444445e-01, 0.694444445e-01, \n 0.598444445e-01, 0.510444877e-01, 0.430440239e-01, 0.358409663e-01, 0.294282603e-01, 0.237911889e-01, 0.189053889e-01, \n 0.147363055e-01, 0.112393379e-01, 0.836100883e-02, 0.604155412e-02, 0.421800000e-02, 0.282644445e-02, 0.180999032e-02, \n 0.108343562e-02, 0.605106267e-03, 0.308138660e-03, 0.139055523e-03, 0.532555251e-04, 0.161841328e-04, 0.347816874e-05, \n 0.420576116e-05, 0.167693347e-07, 0.354008603e-10, 0]\n up_x = np.arange(0.5, 1.01, 0.01)\n\n res = 0.\n if ((x >= 0.5) and (x <= 1)):\n for i in range(len(up_x) - 1):\n if (up_x[i] >= x) and (x < up_x[i+1]):\n N1 = 1 - (x - up_x[i])/0.01\n res = N1 * up_y[i] + (1 - N1) * up_y[i+1]\n return res\n return res\n\n\n# In[19]:\n############### Atomic Function Pulse with width, shift and scale #############\ndef pulse(up1, t: float, a = 1., b = 0., c = 1., d = 0.) -> float:\n x = (t - b)/a\n res = 0.\n if (x >= 0.5) and (x <= 1):\n res = up1(x)\n elif (x >= 0.0) and (x < 0.5):\n res = 1 - up1(1 - x)\n elif (x >= -1 and x <= -0.5):\n res = up1(-x)\n elif (x > -0.5) and (x < 0):\n res = 1 - up1(1 + x)\n res = d + res * c\n return res\n\n############### Atomic Function Applied to list with width, shift and scale #############\ndef up(up1, x: list, a = 1., b = 0., c = 1., d = 0.) -> list:\n res = []\n for i in range(len(x)):\n res.append(pulse(up1, x[i], a, b, c, d))\n return res\n\n############### Atomic String #############\ndef AString1(x: float) -> float:\n res = 1 * (pulse(up1, x/2.0 - 0.5) - 0.5)\n return res\n\n############### Atomic String Pulse with width, shift and scale #############\ndef AStringPulse(t: float, a = 1., b = 0., c = 1., d = 0.) -> float:\n x = (t - b)/a\n if (x < -1):\n res = -0.5\n elif (x > 1):\n res = 0.5\n else:\n res = AString1(x)\n res = d + res * c\n return res\n\n###### Atomic String Applied to list with width, shift and scale #############\ndef AString(x: list, a = 1., b = 0., c = 1., d = 0.) -> list:\n res = []\n for i in range(len(x)):\n res.append(AStringPulse(x[i], a, b, c, d))\n return res\n\n", "_____no_output_____" ], [ "import numpy as np\nimport pylab as pl", "_____no_output_____" ], [ "x = np.arange(-2.0, 2.0, 0.01)", "_____no_output_____" ], [ "pl.title('Atomic Function')\npl.plot(x, up(up1, x), label='Atomic Function')\npl.grid(True)\npl.show()", "_____no_output_____" ], [ "pl.title('Atomic String')\npl.plot(x, String(AString1, x, 1.0, 0, 1, 0), label='Atomic String')\npl.grid(True)\npl.show()", "_____no_output_____" ], [ "x = np.arange(-4.0, 4.0, 0.01)\ndx = x[1] - x[0]\n\npl.title('Atomic String')\npl.plot(x, String(AString1, x, 1., 0., 1., 1.), label='Atomic String')\n\nIntAString = np.cumsum(String(AString1, x, 1., 0., 1., 1.)) * dx\npl.plot(x, IntAString, label='AString Integral')\n\nInt2AString = np.cumsum(IntAString) * dx\npl.plot(x, Int2AString, label='AString Integral Integral')\n\npl.title('AString with Integrals')\npl.legend(loc='best', numpoints=1)\npl.grid(True)\npl.show()", "_____no_output_____" ] ], [ [ "## Summary and Observations", "_____no_output_____" ], [ "1) AString Integrals provide smooth curly connections between two straight lines\n\n2) Further integrals provide smooth curly connections between parabolas!!\n\n3) In general, AString integrals can provide smooth connections between any similar shapes!!!", "_____no_output_____" ] ], [ [ "AString1(0)", "_____no_output_____" ], [ "v=[0,0.1,0.25,0.5,0.9,1]\nfor i in v:\n print(AString1(i))", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ] ]
d0a527941cba022d31f478c0905d677a2666462d
1,948
ipynb
Jupyter Notebook
create_minimum_pascal3d_dataset.ipynb
maximpavliv/sem-keypts-process-scripts
e4bc0fd318404b67e7be1879b3e30d4e92538cac
[ "BSD-3-Clause" ]
null
null
null
create_minimum_pascal3d_dataset.ipynb
maximpavliv/sem-keypts-process-scripts
e4bc0fd318404b67e7be1879b3e30d4e92538cac
[ "BSD-3-Clause" ]
null
null
null
create_minimum_pascal3d_dataset.ipynb
maximpavliv/sem-keypts-process-scripts
e4bc0fd318404b67e7be1879b3e30d4e92538cac
[ "BSD-3-Clause" ]
null
null
null
30.920635
96
0.589836
[ [ [ "import h5py\nimport numpy as np\n\nwith h5py.File('/home/maxim/depth_dataset/train.h5', 'r') as hdf_train: \n center_train = np.array(hdf_train.get('center'))\n part_train = np.array(hdf_train.get('part'))\n scale_train = np.array(hdf_train.get('scale'))\n\nwith h5py.File('/home/maxim/depth_dataset/train_minimal.h5', 'w') as hdf_train_minimal:\n hdf_train_minimal.create_dataset('center', data=center_train)\n hdf_train_minimal.create_dataset('scale', data=scale_train)\n hdf_train_minimal.create_dataset('part', data=part_train)\n \nwith h5py.File('/home/maxim/depth_dataset/valid.h5', 'r') as hdf_valid: \n center_valid = np.array(hdf_valid.get('center'))\n part_valid = np.array(hdf_valid.get('part'))\n scale_valid = np.array(hdf_valid.get('scale'))\n\nwith h5py.File('/home/maxim/depth_dataset/valid_minimal.h5', 'w') as hdf_valid_minimal:\n hdf_valid_minimal.create_dataset('center', data=center_valid)\n hdf_valid_minimal.create_dataset('scale', data=scale_valid)\n hdf_valid_minimal.create_dataset('part', data=part_valid)\n", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code" ] ]
d0a5282d4fa3812f6eb171aec0aceaef48f6b951
4,014
ipynb
Jupyter Notebook
Scrape Google Scholar.ipynb
sn95033/Research-Notebook-Visualization
e9d6ad453479be152af948bd4736200dbdd04325
[ "MIT" ]
null
null
null
Scrape Google Scholar.ipynb
sn95033/Research-Notebook-Visualization
e9d6ad453479be152af948bd4736200dbdd04325
[ "MIT" ]
null
null
null
Scrape Google Scholar.ipynb
sn95033/Research-Notebook-Visualization
e9d6ad453479be152af948bd4736200dbdd04325
[ "MIT" ]
null
null
null
30.876923
191
0.482312
[ [ [ "from bs4 import BeautifulSoup\nimport requests\nfrom splinter import Browser\nimport time\n\nexecutable_path = {'executable_path': './chromedriver.exe'}\nbrowser = Browser('chrome', **executable_path, headless=False)\n\n#Scrape headlines and summaries from CNN\nbase_url = 'https://scholar.google.com/scholar?start='\n\narticle_list = []\n\nn=0\npage = 0\ni=0\n\n\nfor i in range(1): \n\n url= base_url + str(page) + '&q=open+cell+metal+foam&hl=en&as_sdt=0,5'\n browser.visit(url)\n \n html = browser.html\n soup = BeautifulSoup(html, 'html.parser')\n \n articles = soup.find_all(\"div\",class_=\"gs_r gs_or gs_scl\")\n n=0\n\n for article in articles:\n try:\n stats=articles[n].find('div',class_=\"gs_a\").text.strip('&nbsp')\n dash_loc = stats.find('-')\n\n authors = stats[0:dash_loc-2]\n stats2=stats[dash_loc+1:]\n dash_loc_2=stats2.find('-')\n\n journal_date = stats[dash_loc:(dash_loc+dash_loc_2)]\n comma_loc=journal_date.find(',')\n journal=journal_date[2:comma_loc]\n date=journal_date[comma_loc+2:]\n \n title=articles[n].find('h3',class_=\"gs_rt\").text.strip('<b>')\n pdf=articles[n].find('div',class_=\"gs_fl\").text.strip('')\n citations=articles[n].find_all('div',class_=\"gs_fl\")\n \n if len(citations)==2:\n citation=citations[1].text.strip(\"Cited by \").strip(\" versions Library Search\")\n citation=citation[:3].strip()\n else:\n citation=citations[0].text.strip(\"Cited by \").strip(\" versions Library Search\")\n citation=citation[:3].strip()\n pdf=\"No\"\n summary = articles[n].find('div',class_=\"gs_rs\").text.strip('<b>')\n summary=summary[0:(len(summary)-2)]\n \n list_item = {\"Title\":title,\"Authors\":authors,\"Journal\":journal,\"Date\":date,\"Stats\":stats,\"Citations\":citation,\"Full Text Available\":pdf,\"Summary\":summary}\n article_list.append(list_item)\n \n except (AttributeError):\n print('Missing Data')\n \n n=n+1\n \n page=page+10\n i=i+1\n print(i)\n \n time.sleep(35)", "1\n" ], [ "import pandas as pd\nimport numpy as np\n\n#Store data into a csv as a backup\ncnn_news = pd.DataFrame(article_list)\n\ncnn_news['Summary'].replace('\\xa0...','',inplace=True)\n\ncnn_news.to_csv(\"./Output Data/Articles.csv\", index=False, header=True)\ncnn_news.head(20)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
d0a5293f2c5c597189a4d35380528367bce4b2df
10,290
ipynb
Jupyter Notebook
exploration/4_skill_biased_sampling_function/4_sql_implement_sampling_function.ipynb
Cozieee/dsu-mlpp
f195da7fcd7dcdb1e88a35fcd0439c9bb88d165c
[ "MIT" ]
null
null
null
exploration/4_skill_biased_sampling_function/4_sql_implement_sampling_function.ipynb
Cozieee/dsu-mlpp
f195da7fcd7dcdb1e88a35fcd0439c9bb88d165c
[ "MIT" ]
null
null
null
exploration/4_skill_biased_sampling_function/4_sql_implement_sampling_function.ipynb
Cozieee/dsu-mlpp
f195da7fcd7dcdb1e88a35fcd0439c9bb88d165c
[ "MIT" ]
null
null
null
55.923913
2,475
0.649466
[ [ [ "import numpy as np\nfrom exploration.config import sql_inst, mongo_inst", "_____no_output_____" ], [ " val_random_db = mongo_inst['val_random_db']\n val_dump = (val_random_db['osu_scores_high'], val_random_db['osu_user_stats'])", "_____no_output_____" ], [ "pdf_func = np.load(\"exploration/skill_biased_sampling_function/pdf_sample_func.npy\")\ngreedy_func = np.load(\"exploration/skill_biased_sampling_function/greedy_sample_func.npy\")", "_____no_output_____" ], [ "values = list(enumerate(_func))", "_____no_output_____" ], [ "with sql_inst('osu_random_2021_02') as conn:\n with conn.cursor() as cursor:\n cursor.execute(\n '''\n DROP TABLE IF EXISTS sample_func;\n CREATE TABLE sample_func (user_pp INT PRIMARY KEY, probability FLOAT NOT NULL);\n ALTER TABLE sample_func AUTO_INCREMENT=100;\n '''\n )\n conn.commit()\n\n with conn.cursor() as cursor:\n cursor.executemany(\n '''\n INSERT INTO sample_func VALUES\n (%s, %s)\n '''\n , values)\n conn.commit()", "_____no_output_____" ], [ "from datetime import datetime\nfrom mlpp.data_collection.sample_func import sampleFuncGenerator\n\nwith sql_inst('osu_random_2021_02') as conn:\n with conn.cursor() as cursor:\n cursor.execute(\n '''\n SELECT * FROM osu_user_stats\n WHERE rank_score < 7000 AND RAND() <= (\n SELECT probability FROM sample_func\n WHERE user_pp = FLOOR(rank_score)\n LIMIT 1\n )\n '''\n )\n\n sampled_users = [u[0] for u in cursor]\n print(sampled_users)\n\n sampled_scores = list(\n mongo_inst['val_random_db']['osu_scores_high'].find({\n 'user_id': {\n '$in': sampled_users\n },\n 'date': {\n '$gt': datetime(2019, 1, 1)\n }\n }, {'mlpp.est_user_pp': 1})\n )\n\n data = list(map(lambda s: s['mlpp']['est_user_pp'],sampled_scores))\n\n print(sampleFuncGenerator.prop_displaced(data))\n ", "_____no_output_____" ], [ "NUM_BINS = 200\nMAX_PP = 7000\nDATE_LIMIT = datetime(2019,1,1)\n\ngenerator = sampleFuncGenerator(date_limit = DATE_LIMIT, max_pp = MAX_PP, n_bins = NUM_BINS)", "_____no_output_____" ], [ "def simulate_fit(fit, dump = osu_dump):\n sc, _ = generator.simulate(*dump, fit)\n score_pp = list(map(lambda s: s['mlpp']['est_user_pp'], sc))\n return score_pp", "_____no_output_____" ], [ "sample = simulate_fit(greedy_func, val_dump)\ncap = len(sample) / 50", "_____no_output_____" ], [ "len(sample)/ val_dump[0].count()", "_____no_output_____" ], [ "users_7k_up = [u['_id'] for u in val_dump[1].find({'rank_score': {'$gt': 7000}}, {'_id': 1})]", "_____no_output_____" ], [ "random_scores_pipeline = [\n {'$match': {\n 'date': {'$gt': datetime(2019, 1, 1)},\n 'user_id': {'$nin': users_7k_up}\n }},\n {'$sample': {'size': len(sample)}},\n {'$project': {'mlpp': {'est_user_pp': 1}}}\n]", "_____no_output_____" ], [ "random_sample = [s['mlpp']['est_user_pp'] for s in val_dump[0].aggregate(random_scores_pipeline)]", "_____no_output_____" ], [ "sampleFuncGenerator.prop_displaced(sample)", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\n\nfig, axs = plt.subplots(1, 2, figsize = (20, 8))\naxs[0].hist(random_sample, bins = 50)\naxs[0].plot([0, 7000], [cap, cap])\naxs[0].set_title('Random 1% sample')\naxs[0].annotate(f'Error: 40.6%', [5500, 5000], fontsize=20)\naxs[0].set(xlabel = 'Score est user PP', ylabel='Count')\naxs[1].hist(sample, bins = 50)\naxs[1].plot([0, 7000], [cap, cap])\naxs[1].annotate(f'Error: 12.4%', [5500, 1500], fontsize=20)\naxs[1].set_title('Sampling function 1% sample')\naxs[1].set(xlabel = 'Score est user PP', ylabel='Count')", "_____no_output_____" ] ], [ [ "<a style='text-decoration:none;line-height:16px;display:flex;color:#5B5B62;padding:10px;justify-content:end;' href='https://deepnote.com?utm_source=created-in-deepnote-cell&projectId=f93d0822-db5a-47ef-9a78-57b8adfbeb20' target=\"_blank\">\n<img style='display:inline;max-height:16px;margin:0px;margin-right:7.5px;' src='data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iODBweCIgaGVpZ2h0PSI4MHB4IiB2aWV3Qm94PSIwIDAgODAgODAiIHZlcnNpb249IjEuMSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayI+CiAgICA8IS0tIEdlbmVyYXRvcjogU2tldGNoIDU0LjEgKDc2NDkwKSAtIGh0dHBzOi8vc2tldGNoYXBwLmNvbSAtLT4KICAgIDx0aXRsZT5Hcm91cCAzPC90aXRsZT4KICAgIDxkZXNjPkNyZWF0ZWQgd2l0aCBTa2V0Y2guPC9kZXNjPgogICAgPGcgaWQ9IkxhbmRpbmciIHN0cm9rZT0ibm9uZSIgc3Ryb2tlLXdpZHRoPSIxIiBmaWxsPSJub25lIiBmaWxsLXJ1bGU9ImV2ZW5vZGQiPgogICAgICAgIDxnIGlkPSJBcnRib2FyZCIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoLTEyMzUuMDAwMDAwLCAtNzkuMDAwMDAwKSI+CiAgICAgICAgICAgIDxnIGlkPSJHcm91cC0zIiB0cmFuc2Zvcm09InRyYW5zbGF0ZSgxMjM1LjAwMDAwMCwgNzkuMDAwMDAwKSI+CiAgICAgICAgICAgICAgICA8cG9seWdvbiBpZD0iUGF0aC0yMCIgZmlsbD0iIzAyNjVCNCIgcG9pbnRzPSIyLjM3NjIzNzYyIDgwIDM4LjA0NzY2NjcgODAgNTcuODIxNzgyMiA3My44MDU3NTkyIDU3LjgyMTc4MjIgMzIuNzU5MjczOSAzOS4xNDAyMjc4IDMxLjY4MzE2ODMiPjwvcG9seWdvbj4KICAgICAgICAgICAgICAgIDxwYXRoIGQ9Ik0zNS4wMDc3MTgsODAgQzQyLjkwNjIwMDcsNzYuNDU0OTM1OCA0Ny41NjQ5MTY3LDcxLjU0MjI2NzEgNDguOTgzODY2LDY1LjI2MTk5MzkgQzUxLjExMjI4OTksNTUuODQxNTg0MiA0MS42NzcxNzk1LDQ5LjIxMjIyODQgMjUuNjIzOTg0Niw0OS4yMTIyMjg0IEMyNS40ODQ5Mjg5LDQ5LjEyNjg0NDggMjkuODI2MTI5Niw0My4yODM4MjQ4IDM4LjY0NzU4NjksMzEuNjgzMTY4MyBMNzIuODcxMjg3MSwzMi41NTQ0MjUgTDY1LjI4MDk3Myw2Ny42NzYzNDIxIEw1MS4xMTIyODk5LDc3LjM3NjE0NCBMMzUuMDA3NzE4LDgwIFoiIGlkPSJQYXRoLTIyIiBmaWxsPSIjMDAyODY4Ij48L3BhdGg+CiAgICAgICAgICAgICAgICA8cGF0aCBkPSJNMCwzNy43MzA0NDA1IEwyNy4xMTQ1MzcsMC4yNTcxMTE0MzYgQzYyLjM3MTUxMjMsLTEuOTkwNzE3MDEgODAsMTAuNTAwMzkyNyA4MCwzNy43MzA0NDA1IEM4MCw2NC45NjA0ODgyIDY0Ljc3NjUwMzgsNzkuMDUwMzQxNCAzNC4zMjk1MTEzLDgwIEM0Ny4wNTUzNDg5LDc3LjU2NzA4MDggNTMuNDE4MjY3Nyw3MC4zMTM2MTAzIDUzLjQxODI2NzcsNTguMjM5NTg4NSBDNTMuNDE4MjY3Nyw0MC4xMjg1NTU3IDM2LjMwMzk1NDQsMzcuNzMwNDQwNSAyNS4yMjc0MTcsMzcuNzMwNDQwNSBDMTcuODQzMDU4NiwzNy43MzA0NDA1IDkuNDMzOTE5NjYsMzcuNzMwNDQwNSAwLDM3LjczMDQ0MDUgWiIgaWQ9IlBhdGgtMTkiIGZpbGw9IiMzNzkzRUYiPjwvcGF0aD4KICAgICAgICAgICAgPC9nPgogICAgICAgIDwvZz4KICAgIDwvZz4KPC9zdmc+' > </img>\nCreated in <span style='font-weight:600;margin-left:4px;'>Deepnote</span></a>", "_____no_output_____" ] ] ]
[ "code", "markdown" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
d0a5301a14b50e0cf0f9c7703b8b01413ca8736e
9,029
ipynb
Jupyter Notebook
examples/archive/package_management/parse_imports.ipynb
lipteck/pymedphys
6e8e2b5db8173eafa6006481ceeca4f4341789e0
[ "Apache-2.0" ]
2
2020-02-04T03:21:20.000Z
2020-04-11T14:17:53.000Z
prototyping/dependency-tree/old-prototypes/parse_imports.ipynb
SimonBiggs/pymedphys
83f02eac6549ac155c6963e0a8d1f9284359b652
[ "Apache-2.0" ]
6
2020-10-06T15:36:46.000Z
2022-02-27T05:15:17.000Z
prototyping/dependency-tree/old-prototypes/parse_imports.ipynb
SimonBiggs/pymedphys
83f02eac6549ac155c6963e0a8d1f9284359b652
[ "Apache-2.0" ]
1
2020-12-20T14:14:00.000Z
2020-12-20T14:14:00.000Z
26.555882
103
0.504264
[ [ [ "import ast\nfrom glob import glob\nimport sys\nimport os\nfrom copy import deepcopy\n\nimport networkx as nx\n\nfrom stdlib_list import stdlib_list\nSTDLIB = set(stdlib_list())\n\nCONVERSIONS = {\n 'attr': 'attrs',\n 'PIL': 'Pillow',\n 'Image': 'Pillow',\n 'mpl_toolkits': 'matplotlib',\n 'dateutil': 'python-dateutil'\n}", "_____no_output_____" ], [ "dirtree = nx.DiGraph()\n\nexclude_dirs = {'node_modules', '__pycache__', 'dist'}\nexclude_files = {'__init__.py', '_version.py', '_install_requires.py'}\npackages_dir = os.path.join(ROOT, 'packages')\n\nfor root, dirs, files in os.walk(packages_dir, topdown=True):\n dirs[:] = [d for d in dirs if d not in exclude_dirs]\n \n if '__init__.py' in files:\n module_init = os.path.join(root, '__init__.py')\n files[:] = [f for f in files if f not in exclude_files]\n \n dirtree.add_node(module_init)\n parent_init = os.path.join(os.path.dirname(root), '__init__.py')\n if os.path.exists(parent_init):\n dirtree.add_edge(parent_init, module_init)\n\n for f in files:\n if f.endswith('.py'):\n filepath = os.path.join(root, f)\n dirtree.add_node(filepath)\n dirtree.add_edge(module_init, filepath)", "_____no_output_____" ], [ "package_roots = [n for n, d in dirtree.in_degree() if d == 0]\npackage_root_map = {\n os.path.basename(os.path.dirname(package_root)): package_root\n for package_root in package_roots\n}\n\ninternal_packages = list(package_root_map.keys())\ninternal_packages", "_____no_output_____" ], [ "import_types = {\n type(ast.parse('import george').body[0]),\n type(ast.parse('import george as macdonald').body[0])}\n\nimport_from_types = {\n type(ast.parse('from george import macdonald').body[0])\n}\n\nall_import_types = import_types.union(import_from_types)\n\nall_import_types", "_____no_output_____" ], [ "def get_imports(filepath):\n with open(filepath, 'r') as file:\n data = file.read()\n\n parsed = ast.parse(data)\n imports = [node for node in ast.walk(parsed) if type(node) in all_import_types]\n\n stdlib_imports = set()\n external_imports = set()\n internal_imports = set()\n near_relative_imports = set()\n far_relative_imports = set()\n \n def get_base_converted_module(name):\n name = name.split('.')[0]\n \n try:\n name = CONVERSIONS[name]\n except KeyError:\n pass\n \n return name\n \n def add_level_0(name):\n if name in STDLIB:\n stdlib_imports.add(name)\n elif name in internal_packages:\n internal_imports.add(name)\n else:\n external_imports.add(name)\n\n for an_import in imports:\n \n if type(an_import) in import_types:\n for alias in an_import.names:\n name = get_base_converted_module(alias.name)\n add_level_0(name)\n \n elif type(an_import) in import_from_types:\n name = get_base_converted_module(an_import.module)\n if an_import.level == 0:\n add_level_0(name)\n elif an_import.level == 1:\n near_relative_imports.add(name)\n else:\n far_relative_imports.add(name)\n \n else:\n raise\n \n \n \n return {\n 'stdlib': stdlib_imports,\n 'external': external_imports,\n 'internal': internal_imports,\n 'near_relative': near_relative_imports,\n 'far_relative': far_relative_imports}", "_____no_output_____" ], [ "all_imports = {\n filepath: get_imports(filepath)\n for filepath in dirtree.nodes()\n}", "_____no_output_____" ], [ "def get_descendants_dependencies(filepath):\n dependencies = deepcopy(all_imports[filepath])\n \n for descendant in nx.descendants(dirtree, filepath):\n for key, item in all_imports[descendant].items():\n dependencies[key] |= item\n \n return dependencies", "_____no_output_____" ], [ "package_dependencies = {\n package: get_descendants_dependencies(root)\n for package, root in package_root_map.items()\n}\n\npackage_dependencies", "_____no_output_____" ], [ "get_descendants_dependencies(package_roots[0])", "_____no_output_____" ], [ "list(nx.neighbors(dirtree, package_roots[4]))", "_____no_output_____" ], [ "nx.descendants(dirtree, '/home/simon/git/pymedphys/packages/pymedphys/src/pymedphys/__init__.py')", "_____no_output_____" ], [ "# nx.neighbors()", "_____no_output_____" ], [ "imports = [node for node in ast.walk(table) if type(node) in all_import_types]\nimports", "_____no_output_____" ], [ "# external_imports = set()\n# near_internal_imports = set()\n# far_internal_imports = set()\n\n# for an_import in imports:\n# if type(an_import) in import_types:\n# for alias in an_import.names:\n# external_imports.add(alias.name)\n# elif type(an_import) in import_from_types:\n# if an_import.level == 0:\n# external_imports.add(an_import.module)\n# elif an_import.level == 1:\n# near_internal_imports.add(an_import.module)\n# else:\n# far_internal_imports.add(an_import.module)\n# else:\n# raise\n \n# print(ast.dump(an_import))", "_____no_output_____" ], [ "external_imports", "_____no_output_____" ], [ "near_internal_imports", "_____no_output_____" ], [ "far_internal_imports", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a54261851a0108982a80c8f84ec13e8f047131
1,816
ipynb
Jupyter Notebook
vtkplotter_examples/notebooks/draw_points.ipynb
ismarou/vtkplotter-examples
1eefcc026be169ab7a77a5bce6dec8044c33b554
[ "MIT" ]
4
2020-07-30T02:38:29.000Z
2021-09-12T14:30:18.000Z
vtkplotter_examples/notebooks/draw_points.ipynb
ismarou/vtkplotter-examples
1eefcc026be169ab7a77a5bce6dec8044c33b554
[ "MIT" ]
null
null
null
vtkplotter_examples/notebooks/draw_points.ipynb
ismarou/vtkplotter-examples
1eefcc026be169ab7a77a5bce6dec8044c33b554
[ "MIT" ]
null
null
null
21.879518
122
0.535242
[ [ [ "from vtkplotter import *\nimport numpy as np\n\n#embedWindow(False) #uncomment to pop the usual vtk rendering window, press Esc to close it.\n\npositions = np.random.randn(5000,3)\nscalars = np.sum(positions, axis=1)\n\nplot = Plotter()\nplot.xtitle = '\\sum_i x_i'\n\npoints = Points(positions)\n\n# color vertices based on their scalar value with any matplotlib color map\npoints.pointColors(scalars, cmap='coolwarm')\nplot += points\n\nplot.show()", "_____no_output_____" ], [ "closePlotter()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
d0a54bc074baeeb05175b3685f8100feb8c14682
1,264
ipynb
Jupyter Notebook
Day 4 Assignment-1-Copy1.ipynb
16641A0599/LetsUpgrade-python-B7
2e2c08386852b5330fae2e99c3b3691ef8388e67
[ "Apache-2.0" ]
null
null
null
Day 4 Assignment-1-Copy1.ipynb
16641A0599/LetsUpgrade-python-B7
2e2c08386852b5330fae2e99c3b3691ef8388e67
[ "Apache-2.0" ]
null
null
null
Day 4 Assignment-1-Copy1.ipynb
16641A0599/LetsUpgrade-python-B7
2e2c08386852b5330fae2e99c3b3691ef8388e67
[ "Apache-2.0" ]
null
null
null
17.802817
44
0.425633
[ [ [ "lower = 1042000\nupper = 702648265\n\nfor num in range(lower, upper + 1):\n\n \n order = len(str(num))\n \n \n sum = 0\n\n temp = num\n while temp > 0:\n digit = temp % 10\n sum += digit ** order\n temp //= 10\n\n if num == sum:\n print(num)\n break\n \n ", "1741725\n" ] ] ]
[ "code" ]
[ [ "code" ] ]
d0a54d6ada80f0fc5ca4f8a5953e41043e17334f
112,046
ipynb
Jupyter Notebook
project-bikesharing/Predicting_bike_sharing_data.ipynb
hailla/deep-learning-v2-pytorch
82941d5e632328e72c1a5806ede95ccfd02adfb8
[ "MIT" ]
null
null
null
project-bikesharing/Predicting_bike_sharing_data.ipynb
hailla/deep-learning-v2-pytorch
82941d5e632328e72c1a5806ede95ccfd02adfb8
[ "MIT" ]
null
null
null
project-bikesharing/Predicting_bike_sharing_data.ipynb
hailla/deep-learning-v2-pytorch
82941d5e632328e72c1a5806ede95ccfd02adfb8
[ "MIT" ]
null
null
null
129.983759
76,264
0.817289
[ [ [ "# Your first neural network\n\nIn this project, you'll build your first neural network and use it to predict daily bike rental ridership. We've provided some of the code, but left the implementation of the neural network up to you (for the most part). After you've submitted this project, feel free to explore the data and the model more.\n\n", "_____no_output_____" ] ], [ [ "%matplotlib inline\n%load_ext autoreload\n%autoreload 2\n%config InlineBackend.figure_format = 'retina'\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt", "_____no_output_____" ] ], [ [ "## Load and prepare the data\n\nA critical step in working with neural networks is preparing the data correctly. Variables on different scales make it difficult for the network to efficiently learn the correct weights. Below, we've written the code to load and prepare the data. You'll learn more about this soon!", "_____no_output_____" ] ], [ [ "data_path = 'Bike-Sharing-Dataset/hour.csv'\n\nrides = pd.read_csv(data_path)", "_____no_output_____" ], [ "rides.head()", "_____no_output_____" ] ], [ [ "## Checking out the data\n\nThis dataset has the number of riders for each hour of each day from January 1 2011 to December 31 2012. The number of riders is split between casual and registered, summed up in the `cnt` column. You can see the first few rows of the data above.\n\nBelow is a plot showing the number of bike riders over the first 10 days or so in the data set. (Some days don't have exactly 24 entries in the data set, so it's not exactly 10 days.) You can see the hourly rentals here. This data is pretty complicated! The weekends have lower over all ridership and there are spikes when people are biking to and from work during the week. Looking at the data above, we also have information about temperature, humidity, and windspeed, all of these likely affecting the number of riders. You'll be trying to capture all this with your model.", "_____no_output_____" ] ], [ [ "rides[:24*10].plot(x='dteday', y='cnt')", "_____no_output_____" ] ], [ [ "### Dummy variables\nHere we have some categorical variables like season, weather, month. To include these in our model, we'll need to make binary dummy variables. This is simple to do with Pandas thanks to `get_dummies()`.", "_____no_output_____" ] ], [ [ "dummy_fields = ['season', 'weathersit', 'mnth', 'hr', 'weekday']\nfor each in dummy_fields:\n dummies = pd.get_dummies(rides[each], prefix=each, drop_first=False)\n rides = pd.concat([rides, dummies], axis=1)\n\nfields_to_drop = ['instant', 'dteday', 'season', 'weathersit', \n 'weekday', 'atemp', 'mnth', 'workingday', 'hr']\ndata = rides.drop(fields_to_drop, axis=1)\ndata.head()", "_____no_output_____" ] ], [ [ "### Scaling target variables\nTo make training the network easier, we'll standardize each of the continuous variables. That is, we'll shift and scale the variables such that they have zero mean and a standard deviation of 1.\n\nThe scaling factors are saved so we can go backwards when we use the network for predictions.", "_____no_output_____" ] ], [ [ "quant_features = ['casual', 'registered', 'cnt', 'temp', 'hum', 'windspeed']\n# Store scalings in a dictionary so we can convert back later\nscaled_features = {}\nfor each in quant_features:\n mean, std = data[each].mean(), data[each].std()\n scaled_features[each] = [mean, std]\n data.loc[:, each] = (data[each] - mean)/std", "_____no_output_____" ] ], [ [ "### Splitting the data into training, testing, and validation sets\n\nWe'll save the data for the last approximately 21 days to use as a test set after we've trained the network. We'll use this set to make predictions and compare them with the actual number of riders.", "_____no_output_____" ] ], [ [ "# Save data for approximately the last 21 days \ntest_data = data[-21*24:]\n\n# Now remove the test data from the data set \ndata = data[:-21*24]\n\n# Separate the data into features and targets\ntarget_fields = ['cnt', 'casual', 'registered']\nfeatures, targets = data.drop(target_fields, axis=1), data[target_fields]\ntest_features, test_targets = test_data.drop(target_fields, axis=1), test_data[target_fields]", "_____no_output_____" ] ], [ [ "We'll split the data into two sets, one for training and one for validating as the network is being trained. Since this is time series data, we'll train on historical data, then try to predict on future data (the validation set).", "_____no_output_____" ] ], [ [ "# Hold out the last 60 days or so of the remaining data as a validation set\ntrain_features, train_targets = features[:-60*24], targets[:-60*24]\nval_features, val_targets = features[-60*24:], targets[-60*24:]", "_____no_output_____" ] ], [ [ "## Time to build the network\n\nBelow you'll build your network. We've built out the structure. You'll implement both the forward pass and backwards pass through the network. You'll also set the hyperparameters: the learning rate, the number of hidden units, and the number of training passes.\n\n<img src=\"assets/neural_network.png\" width=300px>\n\nThe network has two layers, a hidden layer and an output layer. The hidden layer will use the sigmoid function for activations. The output layer has only one node and is used for the regression, the output of the node is the same as the input of the node. That is, the activation function is $f(x)=x$. A function that takes the input signal and generates an output signal, but takes into account the threshold, is called an activation function. We work through each layer of our network calculating the outputs for each neuron. All of the outputs from one layer become inputs to the neurons on the next layer. This process is called *forward propagation*.\n\nWe use the weights to propagate signals forward from the input to the output layers in a neural network. We use the weights to also propagate error backwards from the output back into the network to update our weights. This is called *backpropagation*.\n\n> **Hint:** You'll need the derivative of the output activation function ($f(x) = x$) for the backpropagation implementation. If you aren't familiar with calculus, this function is equivalent to the equation $y = x$. What is the slope of that equation? That is the derivative of $f(x)$.\n\nBelow, you have these tasks:\n1. Implement the sigmoid function to use as the activation function. Set `self.activation_function` in `__init__` to your sigmoid function.\n2. Implement the forward pass in the `train` method.\n3. Implement the backpropagation algorithm in the `train` method, including calculating the output error.\n4. Implement the forward pass in the `run` method.\n ", "_____no_output_____" ] ], [ [ "#############\n# In the my_answers.py file, fill out the TODO sections as specified\n#############\n\nfrom my_answers import NeuralNetwork", "_____no_output_____" ], [ "def MSE(y, Y):\n return np.mean((y-Y)**2)", "_____no_output_____" ] ], [ [ "## Unit tests\n\nRun these unit tests to check the correctness of your network implementation. This will help you be sure your network was implemented correctly befor you starting trying to train it. These tests must all be successful to pass the project.", "_____no_output_____" ] ], [ [ "import unittest\n\ninputs = np.array([[0.5, -0.2, 0.1]])\ntargets = np.array([[0.4]])\ntest_w_i_h = np.array([[0.1, -0.2],\n [0.4, 0.5],\n [-0.3, 0.2]])\ntest_w_h_o = np.array([[0.3],\n [-0.1]])\n\nclass TestMethods(unittest.TestCase):\n \n ##########\n # Unit tests for data loading\n ##########\n \n def test_data_path(self):\n # Test that file path to dataset has been unaltered\n self.assertTrue(data_path.lower() == 'bike-sharing-dataset/hour.csv')\n \n def test_data_loaded(self):\n # Test that data frame loaded\n self.assertTrue(isinstance(rides, pd.DataFrame))\n \n ##########\n # Unit tests for network functionality\n ##########\n\n def test_activation(self):\n network = NeuralNetwork(3, 2, 1, 0.5)\n # Test that the activation function is a sigmoid\n self.assertTrue(np.all(network.activation_function(0.5) == 1/(1+np.exp(-0.5))))\n\n def test_train(self):\n # Test that weights are updated correctly on training\n network = NeuralNetwork(3, 2, 1, 0.5)\n network.weights_input_to_hidden = test_w_i_h.copy()\n network.weights_hidden_to_output = test_w_h_o.copy()\n \n network.train(inputs, targets)\n self.assertTrue(np.allclose(network.weights_hidden_to_output, \n np.array([[ 0.37275328], \n [-0.03172939]])))\n self.assertTrue(np.allclose(network.weights_input_to_hidden,\n np.array([[ 0.10562014, -0.20185996], \n [0.39775194, 0.50074398], \n [-0.29887597, 0.19962801]])))\n\n def test_run(self):\n # Test correctness of run method\n network = NeuralNetwork(3, 2, 1, 0.5)\n network.weights_input_to_hidden = test_w_i_h.copy()\n network.weights_hidden_to_output = test_w_h_o.copy()\n\n self.assertTrue(np.allclose(network.run(inputs), 0.09998924))\n\nsuite = unittest.TestLoader().loadTestsFromModule(TestMethods())\nunittest.TextTestRunner().run(suite)", ".....\n----------------------------------------------------------------------\nRan 5 tests in 0.008s\n\nOK\n" ] ], [ [ "## Training the network\n\nHere you'll set the hyperparameters for the network. The strategy here is to find hyperparameters such that the error on the training set is low, but you're not overfitting to the data. If you train the network too long or have too many hidden nodes, it can become overly specific to the training set and will fail to generalize to the validation set. That is, the loss on the validation set will start increasing as the training set loss drops.\n\nYou'll also be using a method know as Stochastic Gradient Descent (SGD) to train the network. The idea is that for each training pass, you grab a random sample of the data instead of using the whole data set. You use many more training passes than with normal gradient descent, but each pass is much faster. This ends up training the network more efficiently. You'll learn more about SGD later.\n\n### Choose the number of iterations\nThis is the number of batches of samples from the training data we'll use to train the network. The more iterations you use, the better the model will fit the data. However, this process can have sharply diminishing returns and can waste computational resources if you use too many iterations. You want to find a number here where the network has a low training loss, and the validation loss is at a minimum. The ideal number of iterations would be a level that stops shortly after the validation loss is no longer decreasing.\n\n### Choose the learning rate\nThis scales the size of weight updates. If this is too big, the weights tend to explode and the network fails to fit the data. Normally a good choice to start at is 0.1; however, if you effectively divide the learning rate by n_records, try starting out with a learning rate of 1. In either case, if the network has problems fitting the data, try reducing the learning rate. Note that the lower the learning rate, the smaller the steps are in the weight updates and the longer it takes for the neural network to converge.\n\n### Choose the number of hidden nodes\nIn a model where all the weights are optimized, the more hidden nodes you have, the more accurate the predictions of the model will be. (A fully optimized model could have weights of zero, after all.) However, the more hidden nodes you have, the harder it will be to optimize the weights of the model, and the more likely it will be that suboptimal weights will lead to overfitting. With overfitting, the model will memorize the training data instead of learning the true pattern, and won't generalize well to unseen data. \n\nTry a few different numbers and see how it affects the performance. You can look at the losses dictionary for a metric of the network performance. If the number of hidden units is too low, then the model won't have enough space to learn and if it is too high there are too many options for the direction that the learning can take. The trick here is to find the right balance in number of hidden units you choose. You'll generally find that the best number of hidden nodes to use ends up being between the number of input and output nodes.", "_____no_output_____" ] ], [ [ "import sys\n\n####################\n### Set the hyperparameters in you myanswers.py file ###\n####################\n\nfrom my_answers import iterations, learning_rate, hidden_nodes, output_nodes\n\n\nN_i = train_features.shape[1]\nnetwork = NeuralNetwork(N_i, hidden_nodes, output_nodes, learning_rate)\n\nlosses = {'train':[], 'validation':[]}\nfor ii in range(iterations):\n # Go through a random batch of 128 records from the training data set\n batch = np.random.choice(train_features.index, size=128)\n X, y = train_features.ix[batch].values, train_targets.ix[batch]['cnt']\n \n network.train(X, y)\n \n # Printing out the training progress\n train_loss = MSE(network.run(train_features).T, train_targets['cnt'].values)\n val_loss = MSE(network.run(val_features).T, val_targets['cnt'].values)\n sys.stdout.write(\"\\rProgress: {:2.1f}\".format(100 * ii/float(iterations)) \\\n + \"% ... Training loss: \" + str(train_loss)[:5] \\\n + \" ... Validation loss: \" + str(val_loss)[:5])\n sys.stdout.flush()\n \n losses['train'].append(train_loss)\n losses['validation'].append(val_loss)", "_____no_output_____" ], [ "plt.plot(losses['train'], label='Training loss')\nplt.plot(losses['validation'], label='Validation loss')\nplt.legend()\n_ = plt.ylim()", "_____no_output_____" ] ], [ [ "## Check out your predictions\n\nHere, use the test data to view how well your network is modeling the data. If something is completely wrong here, make sure each step in your network is implemented correctly.", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(figsize=(8,4))\n\nmean, std = scaled_features['cnt']\npredictions = network.run(test_features).T*std + mean\nax.plot(predictions[0], label='Prediction')\nax.plot((test_targets['cnt']*std + mean).values, label='Data')\nax.set_xlim(right=len(predictions))\nax.legend()\n\ndates = pd.to_datetime(rides.ix[test_data.index]['dteday'])\ndates = dates.apply(lambda d: d.strftime('%b %d'))\nax.set_xticks(np.arange(len(dates))[12::24])\n_ = ax.set_xticklabels(dates[12::24], rotation=45)", "_____no_output_____" ] ], [ [ "## OPTIONAL: Thinking about your results(this question will not be evaluated in the rubric).\n \nAnswer these questions about your results. How well does the model predict the data? Where does it fail? Why does it fail where it does?\n\n> **Note:** You can edit the text in this cell by double clicking on it. When you want to render the text, press control + enter\n\n#### Your answer below", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a557982bfab4be3dacf4ac777634c4d5ba0f11
3,841
ipynb
Jupyter Notebook
Matplotlib_interactive_functions.ipynb
furnstahl/binder_test
09c797d1775521f845a739a81e75cdf471f3a29c
[ "MIT" ]
null
null
null
Matplotlib_interactive_functions.ipynb
furnstahl/binder_test
09c797d1775521f845a739a81e75cdf471f3a29c
[ "MIT" ]
null
null
null
Matplotlib_interactive_functions.ipynb
furnstahl/binder_test
09c797d1775521f845a739a81e75cdf471f3a29c
[ "MIT" ]
null
null
null
25.437086
229
0.502994
[ [ [ "# Matplotlib interactive functions\n\nFrom https://matplotlib.org/gallery/event_handling/ginput_manual_clabel_sgskip.html#sphx-glr-gallery-event-handling-ginput-manual-clabel-sgskip-py", "_____no_output_____" ] ], [ [ "import time\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n%matplotlib qt\n\ndef tellme(s):\n print(s)\n plt.title(s, fontsize=16)\n plt.draw()", "_____no_output_____" ], [ "plt.clf()\nplt.axis([-1., 1., -1., 1.])\nplt.setp(plt.gca(), autoscale_on=False)\n\ntellme('You will define a triangle, click to begin')\n\nplt.waitforbuttonpress()\n\nwhile True:\n pts = []\n while len(pts) < 3:\n tellme('Select 3 corners with mouse')\n pts = np.asarray(plt.ginput(3, timeout=-1))\n if len(pts) < 3:\n tellme('Too few points, starting over')\n time.sleep(1) # Wait a second\n\n ph = plt.fill(pts[:, 0], pts[:, 1], 'r', lw=2)\n\n tellme('Happy? Key click for yes, mouse click for no')\n\n if plt.waitforbuttonpress():\n break\n\n # Get rid of fill\n for p in ph:\n p.remove()", "You will define a triangle, click to begin\n" ], [ "# Define a nice function of distance from individual pts\ndef f(x, y, pts):\n z = np.zeros_like(x)\n for p in pts:\n z = z + 1/(np.sqrt((x - p[0])**2 + (y - p[1])**2))\n return 1/z\n\n\nX, Y = np.meshgrid(np.linspace(-1, 1, 51), np.linspace(-1, 1, 51))\nZ = f(X, Y, pts)\n\nCS = plt.contour(X, Y, Z, 20)\n\ntellme('Use mouse to select contour label locations, middle button to finish')\nCL = plt.clabel(CS, manual=True)", "_____no_output_____" ], [ "tellme('Now do a nested zoom, click to begin')\nplt.waitforbuttonpress()\n\nwhile True:\n tellme('Select two corners of zoom, middle mouse button to finish')\n pts = np.asarray(plt.ginput(2, timeout=-1))\n\n if len(pts) < 2:\n break\n\n pts = np.sort(pts, axis=0)\n plt.axis(pts.T.ravel())\n\ntellme('All Done!')\nplt.show()\n", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code" ] ]
d0a58bae1710db63416c71f8eadba22391c71605
327,545
ipynb
Jupyter Notebook
HRNet_MARS_tutorial.ipynb
404ic/HRNet_for_MARS
a2e828a25ceef8a72671db84f6ab909b2aa9f184
[ "MIT" ]
null
null
null
HRNet_MARS_tutorial.ipynb
404ic/HRNet_for_MARS
a2e828a25ceef8a72671db84f6ab909b2aa9f184
[ "MIT" ]
null
null
null
HRNet_MARS_tutorial.ipynb
404ic/HRNet_for_MARS
a2e828a25ceef8a72671db84f6ab909b2aa9f184
[ "MIT" ]
null
null
null
1,780.13587
169,930
0.9222
[ [ [ "# HRNet for MARS Tutorial\n\nThis notebook will walk through using the [HRNet pose estimator](https://github.com/leoxiaobin/deep-high-resolution-net.pytorch) with the [data](https://data.caltech.edu/records/2011) used in the [Mouse Action Recognition System](https://www.biorxiv.org/content/10.1101/2020.07.26.222299v1).", "_____no_output_____" ], [ "## 0. Set up your environment\n\nClone this repository. \n\nFollow the instructions [here](https://github.com/leoxiaobin/deep-high-resolution-net.pytorch). Specifically, in the conda environment that you want to run this pose estimator, complete steps 1, 3, 4, 6, 7 under Installation in Quick Setup.\n\nAlso make an `annotations` directory.\n\nThen, download the MARS COCO Evaluation tools [here](https://github.com/neuroethology/MARS_pycocotools) in that same environment.\n\nYour root directory should look like this before running the model:\n```\n{PROJECT ROOT}\n├── HRNet_MARS_tutorial.ipynb\n├── LICENSE\n├── annotations\n├── data\n├── data_utils\n├── experiments\n├── lib\n├── log\n├── models\n├── output\n├── project_config.yaml\n├── requirements.txt\n└── tools\n```", "_____no_output_____" ], [ "## 1. Obtain the data \n\nVisit [this link](https://data.caltech.edu/records/2011) and download the data. As a reminder, you can `wget https://data.caltech.edu/records/2011` to download the data directly into the directory of your choice.\n\nWe will be using two configuration files, one for general information about your data, and the other for training the model and model parameters. The first is called `project_config.yaml` in the root of your project directory. The other will be in `experiments/mars`. \n1. Make sure you fill out `project_config.yaml` to fit your data. You will need to add the file name of your manifest file, and move that manifest file to the `annotations` directory. \n2. Also, add the images referred to in the manifest file into `annotations/raw_images`. ", "_____no_output_____" ], [ "## 2. Process and format the data\n\nAfter running the code cell below, organize the data like this, with `mars` residing in the `data` directory:\n```\nmars\n├── annotations\n ├── keypoints_[view]_test.json\n ├── keypoints_[view]_train.json\n └── keypoints_[view]_val.json\n├── images\n ├── MARS_[view]_00000.jpg\n ├── MARS_[view]_00001.jpg\n ├── MARS_[view]_00002.jpg\n ├── MARS_[view]_00003.jpg\n ├── MARS_[view]_00004.jpg\n └── ...\n```\n(For MARS, [view] corresponds to either front or top.)", "_____no_output_____" ] ], [ [ "import data_utils.process_keypoints as process_data\n\nproject = '/home/ubuntu/Desktop/ericykma/hrnet_notebook/HRNet_for_MARS'\nprocess_data.process_all_keypoints(project)", "_____no_output_____" ] ], [ [ "## 3. Train the model. \n\nPlace your config files in `experiments/mars`. See `example_config.yaml` for an example.\n\nThen, run `python tools/train.py experiments/mars/example_config.yaml` in the root of your project directory, replacing `example_config.yaml` with your config file to train your model.", "_____no_output_____" ], [ "You can view training and validation loss/accuracy curves using `python -m tensorboard.main --logdir=log` to tune your model.\n\nYou can also use the below visualization code to view model output examples. It uses predictions and ground truth annotations corresponding to the images in `DATASET.TEST_SET` in your configuration file, so be sure to correspond to those image numbers/IDs. You have the option to save the image in the `results` directory in the directory in `output` that corresponds to your configuration file.", "_____no_output_____" ] ], [ [ "import data_utils.visualize as visualize\n\nvisualize.plot_frame(project='/home/ubuntu/Desktop/ericykma/hrnet_notebook/HRNet_for_MARS', \n config_file='experiments/mars/w48_256x192_adam_lr1e-3_imagenet_pretrain.yaml',\n frame_num=13999,\n save=True\n )", "_____no_output_____" ] ], [ [ "## 4. Test your model\n\nYou can run `python tools/test.py experiments/mars/example_config.yaml` to test your model. It will test the file under `DATASET.TEST_SET`, so if you are ready to evaluate your model on the test set, change that parameter to `test`.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
d0a5985238acd50793d706546d19f2f650ca800c
8,977
ipynb
Jupyter Notebook
notebook/NonStiffODE/Pleiades_wpd.ipynb
Vaibhavdixit02/DiffEqBenchmarks.jl
32a0cb01741b2e4a490933ee0ec3f775080f3a72
[ "MIT" ]
null
null
null
notebook/NonStiffODE/Pleiades_wpd.ipynb
Vaibhavdixit02/DiffEqBenchmarks.jl
32a0cb01741b2e4a490933ee0ec3f775080f3a72
[ "MIT" ]
null
null
null
notebook/NonStiffODE/Pleiades_wpd.ipynb
Vaibhavdixit02/DiffEqBenchmarks.jl
32a0cb01741b2e4a490933ee0ec3f775080f3a72
[ "MIT" ]
null
null
null
48.263441
866
0.579592
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a59d8304c4ce23c458cb3821efbb3ebba07406
176,575
ipynb
Jupyter Notebook
assignments/assignment03/NumpyEx01.ipynb
CalPolyPat/phys202-2015-work
55529cac618e18bb5f1c7dfbb101784b878912a5
[ "MIT" ]
null
null
null
assignments/assignment03/NumpyEx01.ipynb
CalPolyPat/phys202-2015-work
55529cac618e18bb5f1c7dfbb101784b878912a5
[ "MIT" ]
null
null
null
assignments/assignment03/NumpyEx01.ipynb
CalPolyPat/phys202-2015-work
55529cac618e18bb5f1c7dfbb101784b878912a5
[ "MIT" ]
null
null
null
424.459135
95,506
0.584165
[ [ [ "# Numpy Exercise 1", "_____no_output_____" ], [ "## Imports", "_____no_output_____" ] ], [ [ "import numpy as np\n%matplotlib inline\nimport matplotlib.pyplot as plt\nimport seaborn as sns", "_____no_output_____" ], [ "import antipackage\nimport github.ellisonbg.misc.vizarray as va", "_____no_output_____" ] ], [ [ "## Checkerboard", "_____no_output_____" ], [ "Write a Python function that creates a square `(size,size)` 2d Numpy array with the values `0.0` and `1.0`:\n\n* Your function should work for both odd and even `size`.\n* The `0,0` element should be `1.0`.\n* The `dtype` should be `float`.", "_____no_output_____" ] ], [ [ "def checkerboard(size):\n \"\"\"Return a 2d checkboard of 0.0 and 1.0 as a NumPy array\"\"\"\n board = np.ones((size,size), dtype=float)\n for i in range(size):\n if i%2==0:\n board[i,1:size:2]=0\n else:\n board[i,0:size:2]=0\n va.enable()\n return board\ncheckerboard(10)", "_____no_output_____" ], [ "a = checkerboard(4)\nassert a[0,0]==1.0\nassert a.sum()==8.0\nassert a.dtype==np.dtype(float)\nassert np.all(a[0,0:5:2]==1.0)\nassert np.all(a[1,0:5:2]==0.0)\n\nb = checkerboard(5)\nassert b[0,0]==1.0\nassert b.sum()==13.0\nassert np.all(b.ravel()[0:26:2]==1.0)\nassert np.all(b.ravel()[1:25:2]==0.0)", "_____no_output_____" ] ], [ [ "Use `vizarray` to visualize a checkerboard of `size=20` with a block size of `10px`.", "_____no_output_____" ] ], [ [ "va.set_block_size(10)\ncheckerboard(20)", "_____no_output_____" ], [ "assert True", "_____no_output_____" ] ], [ [ "Use `vizarray` to visualize a checkerboard of `size=27` with a block size of `5px`.", "_____no_output_____" ] ], [ [ "va.set_block_size(5)\ncheckerboard(27)", "_____no_output_____" ], [ "assert True", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a5a324ad6306d680b028e6e3dd75a26aec74b0
10,755
ipynb
Jupyter Notebook
3_stack.ipynb
nagahamaVH/meli2020
dcbe5962a9d670151d6c4bb9d877a83d1b89d5c9
[ "MIT" ]
21
2020-12-01T19:00:25.000Z
2021-09-06T22:30:03.000Z
3_stack.ipynb
nagahamaVH/meli2020
dcbe5962a9d670151d6c4bb9d877a83d1b89d5c9
[ "MIT" ]
null
null
null
3_stack.ipynb
nagahamaVH/meli2020
dcbe5962a9d670151d6c4bb9d877a83d1b89d5c9
[ "MIT" ]
3
2020-12-01T20:03:26.000Z
2021-09-09T02:22:15.000Z
29.225543
144
0.54291
[ [ [ "import pandas as pd\nimport numpy as np\n%matplotlib inline\nimport joblib\n\nimport json\nimport tqdm\n\nimport glob\n\nimport numba\nimport dask\nimport xgboost\nfrom dask.diagnostics import ProgressBar\nimport re\nProgressBar().register()\nfold1, fold2 = joblib.load(\"./valid/fold1.pkl.z\"), joblib.load(\"./valid/fold2.pkl.z\")", "_____no_output_____" ], [ "train = pd.read_parquet(\"./data/train.parquet\")\ntrain_melt = pd.read_parquet(\"./data/22c_train_melt_with_features.parquet\")\ntest_melt = pd.read_parquet(\"./data/22c_test_melt_with_features.parquet\")", "_____no_output_____" ], [ "test_melt.head()", "_____no_output_____" ], [ "item_data = pd.read_parquet(\"./data/item_data.parquet\")\nitem_data.head()\n\nitem_title_map = item_data[['item_id', 'title']].drop_duplicates()\nitem_title_map = item_title_map.set_index(\"item_id\").squeeze().to_dict()\n\nitem_price_map = item_data[['item_id', 'price']].drop_duplicates()\nitem_price_map = item_price_map.set_index(\"item_id\").squeeze().to_dict()\n\nitem_domain_map = item_data[['item_id', 'domain_id']].drop_duplicates()\nitem_domain_map = item_domain_map.set_index(\"item_id\").squeeze().to_dict()", "_____no_output_____" ] ], [ [ "# stack gen", "_____no_output_____" ] ], [ [ "%%time\nlog_pos = np.log1p(np.arange(1,11))\nbest_sellers = [1587422, 1803710, 10243, 548905, 1906937, 716822, 1361154, 1716388, 725371, 859574]\nbest_sellers_domain = [item_domain_map[e] for e in best_sellers]\n\ndef pad(lst):\n \n if len(lst) == 0:\n return best_sellers\n if len(lst) < 10:\n lst += best_sellers[:(10 - len(lst))]\n return np.array(lst)\n\ndef pad_str(lst):\n if len(lst) == 0:\n return best_sellers_domain\n if len(lst) < 10:\n lst += best_sellers_domain[:(10 - len(lst))]\n return lst\n\n# this is wrong, double counts exact item hits\ndef ndcg_vec(ytrue, ypred, ytrue_domain, ypred_domain):\n relevance = np.zeros((ypred.shape[0], 10))\n for i in range(10):\n relevance[:, i] = np.equal(ypred_domain[:, i], ytrue_domain) * (np.equal(ypred[:, i], ytrue) * 12 + 1)\n dcg = (relevance / log_pos).sum(axis=1)\n\n i_relevance = np.ones(10)\n i_relevance[0] = 12.\n idcg = np.zeros(ypred.shape[0]) + (i_relevance / log_pos).sum()\n\n return (dcg / idcg).mean()", "_____no_output_____" ], [ "%%time\ntr_list = glob.glob(\"./stack_2f/*_train.parquet\")\nts_list = glob.glob(\"./stack_2f/*_test.parquet\")\n\ntrain = train_melt[['seq_index','event_info','has_bought', 'item_domain', 'bought_domain', 'bought_id', 'y_rank']].copy()\nfor f in tr_list:\n fname = re.search('/(\\d[\\d\\w]+)_', f).group(1)\n fdf = pd.read_parquet(f).rename(columns={\"p\": fname})\n train = pd.merge(train, fdf, on=['seq_index','event_info'])\n \ntrain = train.sort_values(\"seq_index\")\n \ntest = test_melt[['seq_index','event_info']].copy()\nfor f in ts_list:\n fname = re.search('/(\\d[\\d\\w]+)_', f).group(1)\n fdf = pd.read_parquet(f).rename(columns={\"p\": fname})\n test = pd.merge(test, fdf, on=['seq_index','event_info'])\n \ntest = test.sort_values(\"seq_index\")", "_____no_output_____" ], [ "train.head()", "_____no_output_____" ], [ "test.head()", "_____no_output_____" ], [ "train.columns", "_____no_output_____" ], [ "from sklearn.model_selection import GroupKFold\nfrom cuml.preprocessing import TargetEncoder\n\n\nstack_p = list()\nfor f1, f2 in [(fold1, fold2), (fold2, fold1)]:\n Xtr = train[train['seq_index'].isin(f1)]\n Xval = train[train['seq_index'].isin(f2)]\n\n\n features = ['22c', '26']\n\n params = [0.1, 3, 1, 0.5, 1.]\n learning_rate, max_depth, min_child_weight, subsample, colsample_bytree = params\n\n\n Xtrr, ytr = Xtr[features], Xtr['y_rank']\n Xvall = Xval[features]\n \n groups = Xtr.groupby('seq_index').size().values\n\n mdl = xgboost.XGBRanker(seed=0, tree_method='gpu_hist', gpu_id=0, n_estimators=100,\n learning_rate=learning_rate, max_depth=max_depth, min_child_weight=min_child_weight,\n subsample=subsample, colsample_bytree=colsample_bytree, objective='rank:pairwise', num_parallel_tree=5)\n\n mdl.fit(Xtrr, ytr, group=groups)\n\n p = mdl.predict(Xvall)\n\n preds = Xval[['seq_index', 'has_bought', 'item_domain', 'bought_domain', 'event_info', 'bought_id']].copy()\n preds['p'] = p\n \n preds = preds.sort_values('p', ascending=False).drop_duplicates(subset=['seq_index', 'event_info'])\n\n ytrue = preds.groupby(\"seq_index\")['bought_id'].apply(lambda x: x.iloc[0]).values\n ytrue_domain = preds.groupby(\"seq_index\")['bought_domain'].apply(lambda x: x.iloc[0]).values\n\n ypred = preds.groupby(\"seq_index\")['event_info'].apply(lambda x: pad(x.iloc[:10].tolist()))\n ypred = np.array(ypred.tolist())\n\n ypred_domain = preds.groupby(\"seq_index\")['item_domain'].apply(lambda x: pad_str(x.iloc[:10].tolist()))\n ypred_domain = np.array(ypred_domain.tolist())\n\n print(ndcg_vec(ytrue, ypred, ytrue_domain, ypred_domain))", "_____no_output_____" ] ], [ [ "# test", "_____no_output_____" ] ], [ [ "groups = train.groupby('seq_index').size().values\nlearning_rate, max_depth, min_child_weight, subsample, colsample_bytree = params\nmdl = xgboost.XGBRanker(seed=0, tree_method='gpu_hist', gpu_id=0, n_estimators=100,\n learning_rate=learning_rate, max_depth=max_depth, min_child_weight=min_child_weight,\n subsample=subsample, colsample_bytree=colsample_bytree, objective='rank:pairwise', num_parallel_tree=5)\nmdl.fit(train[features], train['y_rank'], group=groups)", "_____no_output_____" ], [ "test[features].head()", "_____no_output_____" ], [ "p = mdl.predict(test[features])", "_____no_output_____" ], [ "preds = test[['seq_index', 'event_info']].copy()\npreds['p'] = p\npreds = preds.sort_values('p', ascending=False).drop_duplicates(subset=['seq_index', 'event_info'])", "_____no_output_____" ], [ "def pad(lst):\n pad_candidates = [1587422, 1803710, 10243, 548905, 1906937, 716822, 1361154, 1716388, 725371, 859574]\n if len(lst) == 0:\n return pad_candidates\n if len(lst) < 10:\n lst += [lst[0]] * (10 - len(lst)) # pad_candidates[:(10 - len(lst))]\n return np.array(lst)\nypred = preds.groupby(\"seq_index\")['event_info'].apply(lambda x: pad(x.iloc[:10].tolist()))\nseq_index = ypred.index\nypred = np.array(ypred.tolist())\n", "_____no_output_____" ], [ "ypred_final = np.zeros((177070, 10))\nypred_final[seq_index, :] = ypred\nno_views = np.setdiff1d(np.arange(177070), seq_index)\n#ypred_final[no_views, :] = np.array([1587422, 1803710, 10243, 548905, 1906937, 716822, 1361154, 1716388, 725371, 859574])\nypred_final = ypred_final.astype(int)", "_____no_output_____" ], [ "#permite produtos repetidos\npd.DataFrame(ypred_final).to_csv(\"./subs/27.csv\", index=False, header=False)", "_____no_output_____" ], [ "test['seq_index'].max()", "_____no_output_____" ], [ "!wc -l ./subs/27.csv", "_____no_output_____" ], [ "!head ./subs/27.csv", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a5bae3051dab89a755c9a7ac8d3d66a953f833
324,308
ipynb
Jupyter Notebook
AppendixB.ipynb
User-zwj/Thesis_Wenjuan_Code
a9d18f8cb7354f5a22b1f3336c8721d07c368235
[ "MIT" ]
null
null
null
AppendixB.ipynb
User-zwj/Thesis_Wenjuan_Code
a9d18f8cb7354f5a22b1f3336c8721d07c368235
[ "MIT" ]
null
null
null
AppendixB.ipynb
User-zwj/Thesis_Wenjuan_Code
a9d18f8cb7354f5a22b1f3336c8721d07c368235
[ "MIT" ]
null
null
null
255.763407
39,028
0.911313
[ [ [ "import os\nimport time\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.random import set_seed\nfrom math import factorial\nfrom scipy.stats import norm\nfrom scipy.integrate import odeint\nimport numpy.polynomial.hermite_e as H \nfrom sklearn.preprocessing import StandardScaler\nimport dolfin as fn\nfrom numpy.polynomial.legendre import leggauss\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "####### Plot Formatting ######\nplt.rc('lines', linewidth = 4)\nplt.rc('xtick', labelsize = 13)\nplt.rc('ytick', labelsize = 13)\nplt.rc('legend',fontsize=14)\nplt.rcParams[\"font.family\"] = \"serif\"\nplt.rcParams['axes.labelsize'] = 18\nplt.rcParams['axes.titlesize'] = 15\nplt.rcParams['lines.markersize'] = 8\nplt.rcParams['figure.figsize'] = (7.0, 5.0)", "_____no_output_____" ], [ "#### To make it cleaner, create Directory \"images\" to store all the figures ####\nimagepath = os.path.join(os.getcwd(),\"images\")\nos.makedirs(imagepath,exist_ok=True)", "_____no_output_____" ] ], [ [ "# PCE vs MC\n\n$$ \\frac{dy(t)}{dt} = -\\lambda y, \\ \\ y(0)=1 $$\n$$ y(t) = e^{-\\lambda t} $$\n$$QoI = y(T)$$\nwhere $T$ is a fixed time point. $\\Lambda = (-\\infty, \\infty)$, $\\mathcal{D}=(0,\\infty)$", "_____no_output_____" ], [ "## Polynomial Chaos", "_____no_output_____" ] ], [ [ "start_def = time.time()\n\ndef Phi(n):\n #define H_n\n coeffs = [0]*(n+1)\n coeffs[n] = 1\n return coeffs\n\ndef inner2_herm(n): ###return the denominator when computing $k_i$\n return factorial(n)\n\ndef product3_herm(i,j,l):\n #compute \\Phi_i*\\Phi_j*\\Phi_l\n return lambda x: H.hermeval(x, H.hermemul(H.hermemul(Phi(i),Phi(j)),Phi(l))) \n\ndef inner3_herm(P,i,j,l):\n #compute <\\Phi_i\\Phi_j\\Phi_l>\n \n #Set up Gauss-Hermite quadrature, weighting function is exp^{-x^2}\n m=(P+1)**2\n x, w=H.hermegauss(m) \n inner=sum([product3_herm(i,j,l)(x[idx]) * w[idx] for idx in range(m)]) \n \n return inner/np.sqrt(2*np.pi) #because of the weight\n\ntime_def = time.time() - start_def", "_____no_output_____" ], [ "start_prep = time.time()\n\nP=4\nki_herm = [0,1]+[0]*(P-1)\nInner3_herm = np.zeros((P+1,P+1,P+1)) #store all inner3_herm values\nInner2_herm = np.zeros(P+1)\n\nfor i in range(P+1):\n for j in range(P+1):\n for l in range(P+1):\n Inner3_herm[i,j,l] = inner3_herm(P,i,j,l)\n\nfor i in range(P+1):\n Inner2_herm[i] = inner2_herm(i)\n \ntime_prep = time.time() - start_prep", "_____no_output_____" ], [ "start_ode = time.time()\n\ndef ode_system_herm(y, t, P): \n #P indicates the highest degree\n dydt = np.zeros(P+1) \n for l in range(len(dydt)):\n dydt[l] = -(sum(sum(Inner3_herm[i,j,l]*ki_herm[i]*y[j] for j in range(P+1)) for i in range(P+1)))/Inner2_herm[l]\n return dydt\n \ntime_ode = time.time() - start_ode", "_____no_output_____" ], [ "start_solveode = time.time()\n\nsol_herm = odeint(ode_system_herm, [1.0]+[0.0]*P, np.linspace(0,1,101), args=(P, )) \n\ntime_solveode = time.time() - start_solveode", "_____no_output_____" ], [ "time_all = time_def + time_prep + time_ode + time_solveode", "_____no_output_____" ] ], [ [ "## Monte Carlo", "_____no_output_____" ] ], [ [ "start_ode_mc = time.time()\n\ndef ode(y,t,nsample,k):\n '''\n Build the ode system\n '''\n dydt = np.zeros(nsample)\n for i in range(nsample):\n dydt[i] = -k[i]*y[i]\n return dydt\n\ntime_def_mc = time.time() - start_ode_mc", "_____no_output_____" ], [ "nsample = np.array([10, 100, 1000, 10000, 100000])\ntime_solveode_mc = np.zeros(len(nsample))\nstart_solveode_mc = np.zeros(len(nsample))\nmean_mc_1 = np.zeros(len(nsample))\nmean_mc_05 = np.zeros(len(nsample))\n\nfor i in range(len(nsample)):\n k = norm.rvs(loc=0, scale=1, size=nsample[i], random_state=12345)\n start_solveode_mc[i] = time.time()\n\n sol_mc = odeint(ode, [1.0]*nsample[i], np.linspace(0,1,101),args=(nsample[i],k)) #t:np.linspace(0,1,101)\n mean_mc_1[i] = np.mean(sol_mc[100,:])\n mean_mc_05[i] = np.mean(sol_mc[50,:])\n\n time_solveode_mc[i] = time.time() - start_solveode_mc[i]\n\ntime_all_mc = time_def_mc + time_solveode_mc", "_____no_output_____" ] ], [ [ "## Comparison", "_____no_output_____" ], [ "### Computing time", "_____no_output_____" ] ], [ [ "#### Table 7.2, row 1 ####\n\n### PCE\nprint(time_solveode) \n### MC\nprint(time_solveode_mc)", "0.016252994537353516\n[8.58545303e-04 7.44271278e-03 7.87193775e-02 7.58476496e-01\n 8.74571085e+00]\n" ] ], [ [ "### Mean value at $t=1$, $t=0.5$\n \n \n#### Sample size = 1000 for MC", "_____no_output_____" ] ], [ [ "## t = 0.5\nmean_pc_05 = sol_herm[:,0][50] #mean value using pc at t=0.5\nmean_exact_05 = np.e**(1/8)\n\n## t = 1\nmean_pc_1 = sol_herm[:,0][100] #mean value using pc at t=1\nmean_exact_1 = np.e**(1/2)\n\n#### Table 7.2, row 2 ####\nprint(mean_pc_05)\nprint(mean_mc_05)\nprint(mean_exact_05)\n\nprint()\n\n#### Table 7.2, row 3 ####\nprint(mean_pc_1)\nprint(mean_mc_1)\nprint(mean_exact_1)", "1.1331484337155744\n[0.84243543 1.12379462 1.12573071 1.13969688 1.13387003]\n1.1331484530668263\n\n1.6486794979185284\n[0.80759128 1.64191001 1.58804937 1.67483542 1.65016842]\n1.6487212707001282\n" ] ], [ [ "# NN vs Poly\n\nFinite difference:\n$$\n\\frac{y_{i+1} - y_i}{\\Delta t} = -\\lambda y_i\n$$\nso\n$$\ny_{i+1} = -\\lambda\\Delta t y_i + y_i = (1-\\lambda\\Delta t)y_i\n$$\n\nDefine $n := \\frac{0.5}{\\Delta t}$, then\n$$\n\\hat{Q}(\\lambda)=y(0.5) = y_n = (1-\\lambda\\Delta t)^n y_0 = (1-\\lambda\\Delta t)^n\n$$\n\n## NN", "_____no_output_____" ] ], [ [ "#######################################\n#define the activation function\ndef rbf(x):\n return tf.math.exp(-x**2)\n\n#######################################\n#define the derivative of the activation function\ndef d_rbf(x):\n return tf.gradients(rbf,x)\n\n#######################a################\n#we couldn't use “tf_d_leaky_relu_6” as an activation function if we wanted to \n#because tensorflow doesn't know how to calculate the gradients of that function.\ndef rbf_grad(op, grad):\n x = op.inputs[0]\n n_gr = d_rbf(x) #defining the gradient.\n return grad * n_gr\n\ndef py_func(func, inp, Tout, stateful=True, name=None, grad=None):\n # Need to generate a unique name to avoid duplicates:\n rnd_name = 'PyFuncGrad' + str(np.random.randint(0, 1E+2))\n tf.RegisterGradient(rnd_name)(grad)\n g = tf.get_default_graph()\n with g.gradient_override_map({\"PyFunc\": rnd_name, \"PyFuncStateless\": rnd_name}):\n return tf.py_func(func, inp, Tout, stateful=stateful, name=name)\n \ndef tf_rbf(x,name=None):\n with tf.name_scope(name, \"rbf\", [x]) as name:\n y = py_func(rbf, #forward pass function\n [x],\n [tf.float32],\n name=name,\n grad= rbf_grad) #the function that overrides gradient\n y[0].set_shape(x.get_shape()) #when using with the code, it is used to specify the rank of the input.\n return y[0]", "_____no_output_____" ], [ "np.random.seed(12345)\nsize = 100\ndelta_t = 0.01\nn = int(0.5/delta_t)\n### Original data ###\nlam_in = np.random.normal(0, 1, size)\ny_exact = np.array([np.exp(-i*0.5) for i in lam_in])\ny_out = np.array([(1-i*delta_t)**n for i in lam_in])\n### After feature scaling ###\nscaler = StandardScaler()\ndata_trans = scaler.fit_transform(lam_in.reshape(-1,1))", "_____no_output_____" ], [ "num_neuron = 5\ntf.random.set_seed(12345)\nmodel_ode = tf.keras.Sequential()\nmodel_ode.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\nmodel_ode.add(tf.keras.layers.Dense(1))\n\nmodel_ode.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.01))\nmodel_ode.fit(data_trans[:,0],y_out, epochs=1500, verbose=0)", "_____no_output_____" ], [ "preds_ode = []\nfor j in data_trans[:,0]:\n preds_ode.append(model_ode.predict([j]))\n\npreds_ode_shaped = tf.reshape(tf.constant(np.array(preds_ode)),len(preds_ode))\nmse_fd_nn = tf.keras.losses.MSE(y_exact,preds_ode_shaped).numpy()", "_____no_output_____" ], [ "fig = plt.figure()\nplt.xlabel(\"$\\lambda$\")\nplt.ylabel(\"$q$\")\nplt.title(\"MSE=%.5f\"%(mse_fd_nn))\nplt.scatter(lam_in, y_out, label='Obs')\nplt.scatter(lam_in, preds_ode_shaped, label='NN')\nplt.legend()\nplt.show();\nfig.savefig(\"images/comp_fd_nn.png\")", "_____no_output_____" ] ], [ [ "## Polynomial Regression\n`np.polyfit`", "_____no_output_____" ] ], [ [ "######## With feature scaling ############\nmymodel1 = np.poly1d(np.polyfit(data_trans[:,0], y_out, 1))\npreds_fd_pr1 = mymodel1(data_trans[:,0])\nmse_fd_pr1 = tf.keras.losses.MSE(y_exact,preds_fd_pr1).numpy()\n\nfig = plt.figure()\nplt.xlabel(\"$\\lambda$\")\nplt.ylabel(\"$q$\")\nplt.title(\"MSE=%.5f\"%(mse_fd_pr1))\nplt.scatter(lam_in, y_out, label='Obs')\nplt.scatter(lam_in, preds_fd_pr1, label='PR (deg=1)')\nplt.legend()\nplt.show();\nfig.savefig(\"images/comp_fd_pr1.png\")", "_____no_output_____" ], [ "########## With feature scaling #############\nmymodel2 = np.poly1d(np.polyfit(data_trans[:,0], y_out, 2))\npreds_fd_pr2 = mymodel2(data_trans[:,0])\nmse_fd_pr2 = tf.keras.losses.MSE(y_exact,preds_fd_pr2).numpy()\n\nfig = plt.figure()\nplt.xlabel(\"$\\lambda$\")\nplt.ylabel(\"$q$\")\nplt.title(\"MSE=%.5f\"%(mse_fd_pr2))\nplt.scatter(lam_in, y_out, label='Obs')\nplt.scatter(lam_in, preds_fd_pr2, label='PR (deg=2)')\nplt.legend();\nfig.savefig(\"images/comp_fd_pr2.png\")", "_____no_output_____" ], [ "########## With feature scaling #############\nmymodel3 = np.poly1d(np.polyfit(data_trans[:,0], y_out, 3))\npreds_fd_pr3 = mymodel3(data_trans[:,0])\nmse_fd_pr3 = tf.keras.losses.MSE(y_exact,preds_fd_pr3).numpy()\n\nfig = plt.figure()\nplt.xlabel(\"$\\lambda$\")\nplt.ylabel(\"$q$\")\nplt.title(\"MSE=%.5f\"%(mse_fd_pr3))\nplt.scatter(lam_in, y_out, label='Obs')\nplt.scatter(lam_in, preds_fd_pr3, label='PR (deg=3)')\nplt.legend()\nplt.show();\nfig.savefig(\"images/comp_fd_pr3.png\")", "_____no_output_____" ] ], [ [ "# Stochastic collocation method\n\n\\begin{align*}\n- \\nabla\\cdot(A\\nabla u) &= (e^{\\lambda_1}\\lambda_1^2\\pi^2 + e^{\\lambda_2}\\lambda_2^2\\pi^2)u \\\\\nu &= 0 \\, \\text{ on } \\Gamma_0 \\,\\text{( Left edge)}\\\\\n(A\\nabla u)\\cdot n &= -e^{\\lambda_2}\\lambda_2\\pi \\sin\\lambda_1\\pi x\\sin \\lambda_2\\pi y \\, \\text{ on } \\Gamma_1 \\, \\text{( Top edge)}\\\\\n(A\\nabla u)\\cdot n &= e^{\\lambda_2}\\lambda_2\\pi \\sin\\lambda_1\\pi x\\sin \\lambda_2\\pi y \\, \\text{ on } \\Gamma_2 \\,\\text{( Bottom edge)}\\\\\n(A\\nabla u)\\cdot n &= e^{\\lambda_1}\\lambda_1\\pi \\cos\\lambda_1\\pi x\\cos \\lambda_2\\pi y \\, \\text{ on } \\Gamma_3 \\,\\text{( Right edge)}\\\\\n\\end{align*}\n\nwhere \n$$ A = \\begin{bmatrix} e^{\\lambda_1} & 0 \\\\ 0 & e^{\\lambda_2} \\end{bmatrix} $$\nand $(x,y)\\in\\Omega = [0,1]\\times [0,1]$, $(\\lambda_1,\\lambda_2)\\in\\Lambda=[0,1]\\times [0,1]$\n\n<font color = red>**Exact solution:**\n$$ u(x,y;\\lambda_1,\\lambda_2) = \\sin \\lambda_1\\pi x \\cos \\lambda_2 \\pi y$$\n</font>\nQoI is:\n$$\nQ(\\lambda_1,\\lambda_2) = u(x_0,y_0;\\lambda_1,\\lambda_2)\n$$\n----", "_____no_output_____" ], [ "$\\lambda_1,\\lambda_2\\sim U(0,1)$\n\n**In theory:**\n\\begin{align*}\n\\overline{u}(x,y;\\lambda_1,\\lambda_2) &= \\int_0^1 \\int_0^1\\sin \\lambda_1\\pi x \\cos \\lambda_2 \\pi y \\, d\\lambda_1d\\lambda_2\\\\\n&= \\int_0^1\\sin \\lambda_1\\pi x\\, d\\lambda_1 \\int_0^1 \\cos \\lambda_2 \\pi y\\, d\\lambda_2\\\\\n&= \\left(-\\frac{1}{\\pi x}\\cos(\\lambda_1\\pi x)\\biggr\\rvert_0^1 \\right)\\left(\\frac{1}{\\pi y}\\sin(\\lambda_2\\pi y)\\biggr\\rvert_0^1 \\right)\\\\\n&= \\left(-\\frac{\\cos(\\pi x) - 1}{\\pi x}\\right)\\frac{\\sin(\\pi y)}{\\pi y}\n\\end{align*}\n\n\nQoI is:\n$$\nQ(\\lambda_1,\\lambda_2) = u(x_0,y_0;\\lambda_1,\\lambda_2)\n$$\nso\n<font color=red>\n \n$$\n\\overline{Q} = \\left(-\\frac{\\cos(\\pi x_0) - 1}{\\pi x_0}\\right)\\frac{\\sin(\\pi y_0)}{\\pi y_0}\n$$\n</font>\n\nWhen $x_0=y_0=0.5$,\n$$\n\\overline{Q} = \\frac{4}{\\pi^2}\n$$", "_____no_output_____" ], [ "--- \n\n$\\lambda = (\\lambda_1, \\lambda_2)$, $x=(x,y)$, $L_k$ is Lagrange polynomial\n\n$$\n\\hat{u}(\\lambda, x) = \\sum_{k=1}^M u(\\lambda_k, x)L_k(\\lambda)\n$$\n\n**In Practice:**\n\n**Step 1:**\n- Use numpy.polynomial.legendre.leggauss\n - Quadrature point of $\\lambda_1:$ $\\left(\\xi_i^{(1)}\\right)_{i=1}^M$ $\\in [-1,1]$\n - Quadrature point of $\\lambda_2:$ $\\left(\\xi_j^{(2)}\\right)_{j=1}^N$ $\\in [-1,1]$\n- Then the quadrature points in $[0,1]$ are\n - $\\lambda_1:$ $\\frac{1}{2}\\xi_i^{(1)}+\\frac{1}{2}$, $i=1,2,\\cdots,M$\n - $\\lambda_2:$ $\\frac{1}{2}\\xi_j^{(2)}+\\frac{1}{2}$, $j=1,2,\\cdots,N$\n\nNotice \n$$\nL_{ij}(\\lambda_1,\\lambda_2) = L_i^{(1)}(\\lambda_1) L_j^{(2)}(\\lambda_2)\n$$\n\nwhere $L_i^{(1)}$ corresponds to $\\frac{1}{2}\\xi_i^{(1)}+\\frac{1}{2}$, $L_j^{(2)}$ corresponds to $\\frac{1}{2}\\xi_j^{(2)}+\\frac{1}{2}$\n\n\n**Step 2:**\n- For $\\lambda_1=\\frac{1}{2}\\xi_i^{(1)}+\\frac{1}{2}$, $\\lambda_2=\\frac{1}{2}\\xi_j^{(2)}+\\frac{1}{2}$, use FEM to solve for $u$ evaluated at $x=x_0, y=y_0$. Use notation $u_{ij}$ to indicate the value\n\n\\begin{align*}\n\\mathbb{E}[\\hat{u}] &= \\sum_{i=1,j=1}^{M,N} u_{ij} \\int_{\\Lambda} L_{ij}(\\lambda) \\rho(\\lambda) \\, d\\lambda\\\\\n&\\approx \\sum_{i=1,j=1}^{M,N} u_{ij} \\int_0^1 L_i^{(1)}(\\lambda_1)\\, d\\lambda_1 \\int_0^1 L_j^{(2)}(\\lambda_2)\\, d\\lambda_2\\\\\n&\\approx \\sum_{i=1,j=1}^{M,N} u_{ij} \\left( \\frac{1}{2}\\sum_{k=1}^M w_k L_i^{(1)}\\left(\\frac{1}{2}\\xi_k^{(1)} + \\frac{1}{2} \\right) \\right) \\left( \\frac{1}{2}\\sum_{l=1}^N w_l L_j^{(2)}\\left(\\frac{1}{2}\\xi_l^{(2)} + \\frac{1}{2} \\right) \\right)\\\\\n&= \\sum_{i=1,j=1}^{M,N} u_{ij} \\frac{w_i}{2} \\frac{w_j}{2}\\\\\n&= \\frac{1}{4} \\sum_{i=1,j=1}^{M,N} u_{ij} w_i w_j\n\\end{align*}\n\n**Step3:**\n\nCompare with $\\overline{Q} = \\left(-\\frac{\\cos(\\pi x_0) - 1}{\\pi x_0}\\right)\\frac{\\sin(\\pi y_0)}{\\pi y_0}$\n\n----", "_____no_output_____" ], [ "<font color=red>**Extra reference**</font>\n\nGauss–Legendre quadrature [-1,1]\n\n$$\n\\int_a^b f(x)\\, dx \\approx \\frac{b-a}{2}\\sum_{i=1}^n w_i f\\left(\\frac{b-a}{2}\\xi_i + \\frac{a+b}{2} \\right)\n$$\n\n$a=0, b=1$\n\n$$\n\\int_0^1 f(x)\\, dx \\approx \\frac{1}{2}\\sum_{i=1}^n w_i f\\left(\\frac{1}{2}\\xi_i + \\frac{1}{2} \\right)\n$$\n\nOur input $\\lambda$ is in a 2-dim space, and \n$$\nL_k(\\lambda) = L_k^{(1)}(\\lambda_1)L_k^{(2)}(\\lambda_2)\n$$\nFor example, we have\n\n| $\\lambda_1$ | $\\lambda_2$ | $f$ |\n| --- | --- | --- |\n| $x_1$ | $y_1$ | $f_1$ |\n| $x_2$ | $y_2$ | $f_2$ |\n| $x_3$ | $y_3$ | $f_3$ |\n\nThen \n$$\nL_1(\\lambda_1, \\lambda_2) = \\frac{(\\lambda_1 - x_2)(\\lambda_1 - x_3)(\\lambda_2 - y_2)(\\lambda_2 - y_3)}{(x_1 - x_2)(x_1 - x_3)(y_1 - y_2)(y_1 - y_3)} = L_1^{(1)}(\\lambda_1)L_1^{(2)}(\\lambda_2)\n$$\nwhere $L_1^{(1)}$ is the 1-d Lagrange wrt point $x_1$, $L_1^{(2)}$ is the 1-d Lagrange wrt point $y_1$", "_____no_output_____" ] ], [ [ "def QoI_FEM(x0,y0,lam1,lam2,gridx,gridy,p):\n mesh = fn.UnitSquareMesh(gridx, gridy)\n V = fn.FunctionSpace(mesh, \"Lagrange\", p)\n\n # Define diffusion tensor (here, just a scalar function) and parameters\n A = fn.Expression((('exp(lam1)','a'),\n ('a','exp(lam2)')), a = fn.Constant(0.0), lam1 = lam1, lam2 = lam2, degree=3) \n\n u_exact = fn.Expression(\"sin(lam1*pi*x[0])*cos(lam2*pi*x[1])\", lam1 = lam1, lam2 = lam2, degree=2+p)\n\n # Define the mix of Neumann and Dirichlet BCs\n class LeftBoundary(fn.SubDomain):\n def inside(self, x, on_boundary):\n return (x[0] < fn.DOLFIN_EPS)\n class RightBoundary(fn.SubDomain):\n def inside(self, x, on_boundary):\n return (x[0] > 1.0 - fn.DOLFIN_EPS)\n class TopBoundary(fn.SubDomain):\n def inside(self, x, on_boundary):\n return (x[1] > 1.0 - fn.DOLFIN_EPS)\n class BottomBoundary(fn.SubDomain):\n def inside(self, x, on_boundary):\n return (x[1] < fn.DOLFIN_EPS)\n\n # Create a mesh function (mf) assigning an unsigned integer ('uint')\n # to each edge (which is a \"Facet\" in 2D)\n mf = fn.MeshFunction('size_t', mesh, 1)\n mf.set_all(0) # initialize the function to be zero\n # Setup the boundary classes that use Neumann boundary conditions\n NTB = TopBoundary() # instatiate\n NTB.mark(mf, 1) # set all values of the mf to be 1 on this boundary\n NBB = BottomBoundary()\n NBB.mark(mf, 2) # set all values of the mf to be 2 on this boundary\n NRB = RightBoundary()\n NRB.mark(mf, 3)\n\n # Define Dirichlet boundary conditions\n Gamma_0 = fn.DirichletBC(V, u_exact, LeftBoundary())\n bcs = [Gamma_0]\n\n # Define data necessary to approximate exact solution\n f = ( fn.exp(lam1)*(lam1*fn.pi)**2 + fn.exp(lam2)*(lam2*fn.pi)**2 ) * u_exact\n g1 = fn.Expression(\"-exp(lam2)*lam2*pi*sin(lam1*pi*x[0])*sin(lam2*pi*x[1])\", lam1=lam1, lam2=lam2, degree=2+p) #pointing outward unit normal vector, pointing upaward (0,1)\n g2 = fn.Expression(\"exp(lam2)*lam2*pi*sin(lam1*pi*x[0])*sin(lam2*pi*x[1])\", lam1=lam1, lam2=lam2, degree=2+p) #pointing downward (0,1)\n g3 = fn.Expression(\"exp(lam1)*lam1*pi*cos(lam1*pi*x[0])*cos(lam2*pi*x[1])\", lam1=lam1, lam2=lam2, degree=2+p)\n\n fn.ds = fn.ds(subdomain_data=mf)\n # Define variational problem\n u = fn.TrialFunction(V)\n v = fn.TestFunction(V)\n a = fn.inner(A*fn.grad(u), fn.grad(v))*fn.dx\n L = f*v*fn.dx + g1*v*fn.ds(1) + g2*v*fn.ds(2) + g3*v*fn.ds(3) #note the 1, 2 and 3 correspond to the mf\n\n # Compute solution\n u = fn.Function(V)\n fn.solve(a == L, u, bcs)\n\n return u(x0,y0)\n\ndef exactQ(x,y):\n return (1-np.cos(np.pi*x))*np.sin(np.pi*y)/(np.pi**2*x*y)", "_____no_output_____" ], [ "x0 = [0.2, 0.2, 0.2, 0.3, 0.5, 0.5]\ny0 = [0.3, 0.5, 0.8, 0.2, 0.2, 0.5]\n\nM, N = 5, 5\nx1,w1 = leggauss(M)\nx2,w2 = leggauss(N)\n\ntab = np.zeros((len(x0),2))\nfor k in range(len(x0)):\n #### Stochastic Collocation Mean at x0, y0 ####\n uij = np.zeros((M,N))\n for i in range(M):\n for j in range(N):\n uij[i,j] = QoI_FEM(x0[k],y0[k],(1+x1[i])/2,(1+x2[j])/2,10,10,2)\n \n sol = 0\n for i in range(M):\n for j in range(N):\n sol += w1[i]*w2[j]*uij[i,j]\n sol /= 4\n tab[k,0] = sol\n #### Exact Mean at x0, y0 ####\n tab[k,1] = exactQ(x0[k],y0[k])\n\nprint(tab)", "[[0.2609293 0.2609164 ]\n [0.19351774 0.19350624]\n [0.07109627 0.07108757]\n [0.40917364 0.40915816]\n [0.59556611 0.59555097]\n [0.40529523 0.40528473]]\n" ] ], [ [ "# Further Discussion\n\n<font color=red> Scaling, number of layers, print out the MSE every step, how to piecewise\n \n- x_train, x_plt should have similar property. For StandardScaler(), var \\& mean need to be similar\n ", "_____no_output_____" ] ], [ [ "def model(x):\n if x<=1:\n return 15*x+10\n elif x<=7:\n return x**3-12*x**2+36*x\n elif x<=10:\n return 15/np.pi*np.sin(np.pi*(x-7))+7\n else:\n return -30*np.sqrt(x-9)+37", "_____no_output_____" ], [ "np.random.seed(12345)\nx_syn = np.random.uniform(0,15,100)\ny_exact = np.array([model(i) for i in x_syn])\ny_syn = y_exact+np.random.normal(0,1,len(x_syn))", "_____no_output_____" ], [ "fig = plt.figure()\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\nplt.title('Observations & Target')\nx_plt = np.linspace(min(x_syn),max(x_syn),50)\ny_plt = [model(i) for i in x_plt]\nplt.scatter(x_syn, y_syn, label='Obs')\nplt.plot(x_plt, y_plt, color='red', label='Target')\nplt.legend();\nfig.savefig(\"images/comp_target.png\")", "_____no_output_____" ] ], [ [ "## NN2", "_____no_output_____" ] ], [ [ "scaler_syn = StandardScaler()\nsyndata_trans = scaler_syn.fit_transform(x_syn.reshape(-1,1))", "_____no_output_____" ] ], [ [ "### One step method", "_____no_output_____" ] ], [ [ "############### With Feature Scaling ###############\nnum_neuron = 5\ntf.random.set_seed(12345)\nmodel_syn_all = tf.keras.Sequential()\nmodel_syn_all.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\nmodel_syn_all.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\n# model_syn_all.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\n# model_syn_all.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\nmodel_syn_all.add(tf.keras.layers.Dense(1))\n\nmodel_syn_all.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.01))\nmodel_syn_all.fit(syndata_trans[:,0],y_syn, epochs=1000, verbose=0)\n\nfig = plt.figure()\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\ny_pred = []\nfor j in syndata_trans[:,0]:\n y_pred.append(model_syn_all.predict([j]))\ny_pred_shaped = tf.reshape(tf.constant(np.array(y_pred)),len(y_pred))\nerror0 = tf.keras.losses.MSE(y_syn,y_pred_shaped).numpy()\nplt.title(\"MSE=%.5f\"%(error0))\nplt.scatter(x_syn, y_syn, label='Obs')\nplt.scatter(x_syn, y_pred_shaped, color='red',label=\"NN\")\nplt.legend()\nplt.show();\nfig.savefig(\"images/comp_nn_1step.png\")", "_____no_output_____" ] ], [ [ "### Split step method", "_____no_output_____" ] ], [ [ "## Step 1\nnum_neuron = 5\ntf.random.set_seed(12345)\nmodel_nn1 = tf.keras.Sequential()\nmodel_nn1.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\nmodel_nn1.add(tf.keras.layers.Dense(1))\n\nmodel_nn1.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.01))\nmodel_nn1.fit(syndata_trans[:,0],y_syn, epochs=1000, verbose=0)\n\nfig = plt.figure(figsize=(13,4))\nplt.subplot(121)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\ny_pred1 = []\nfor j in syndata_trans[:,0]:\n y_pred1.append(model_nn1.predict([j]))\ny_pred1_shaped = tf.reshape(tf.constant(np.array(y_pred1)),len(y_pred1))\nplt.title(\"Overall Fit\")\nplt.scatter(x_syn, y_syn, label='Obs')\nplt.scatter(x_syn, y_pred1_shaped, color='red',label=\"NN\")\nplt.legend();\nplt.subplot(122)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"Residual\")\nmse1 = tf.keras.losses.MSE(y_syn,y_pred1_shaped).numpy()\nplt.title(\"MSE=%.5f\"%(mse1))\nplt.scatter(x_syn, y_syn-y_pred1_shaped)\nplt.show();\nfig.savefig(\"images/comp_nn_step1.png\");", "_____no_output_____" ], [ "## Step 2\n\ntf.random.set_seed(12345)\nmodel_nn2 = tf.keras.Sequential()\nmodel_nn2.add(tf.keras.layers.Dense(5,activation=rbf))\nmodel_nn2.add(tf.keras.layers.Dense(1))\n\nerror1 = y_syn - y_pred1_shaped\nscaler_syn2 = StandardScaler()\n\nmodel_nn2.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.01))\nmodel_nn2.fit(syndata_trans[:,0],error1, epochs=1000, verbose=0)\n\nfig = plt.figure(figsize=(15,4))\nplt.subplot(121)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\ny_pred2 = []\nfor j in syndata_trans[:,0]:\n y_pred2.append(model_nn2.predict([j]))\ny_pred2_shaped = tf.reshape(tf.constant(np.array(y_pred2)),len(y_pred2))\nplt.title(\"Overall Fit\")\nplt.scatter(x_syn, y_syn, label='Obs')\nplt.scatter(x_syn, y_pred1_shaped+y_pred2_shaped, color='red',label=\"NN\")\nplt.legend();\nplt.subplot(122)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"Residual\")\nmse2 = tf.keras.losses.MSE(y_syn,y_pred1_shaped+y_pred2_shaped).numpy()\nplt.title(\"MSE=%.5f\"%(mse2))\nplt.scatter(x_syn, y_syn-y_pred1_shaped-y_pred2_shaped)\nplt.show();\nfig.savefig(\"images/comp_nn_step2.png\");", "_____no_output_____" ], [ "# fig = plt.figure()\n# # plt.subplot(133)\n# plt.xlabel(\"$x$\")\n# plt.ylabel(\"Residual\")\n# plt.title(\"Fit Previous Residual\")\n# plt.scatter(x_syn, error1)\n# plt.scatter(x_syn, y_pred2_shaped, color='red',label=\"NN\")\n# plt.legend();\n# # fig.savefig(\"images/comp_nn_res2.png\");", "_____no_output_____" ], [ "## Step 3\n\ntf.random.set_seed(12345)\nmodel_nn3 = tf.keras.Sequential()\nmodel_nn3.add(tf.keras.layers.Dense(5,activation=rbf))\nmodel_nn3.add(tf.keras.layers.Dense(1))\n\nerror2 = y_syn - y_pred1_shaped - y_pred2_shaped\nscaler_syn3 = StandardScaler()\n\nmodel_nn3.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.01))\nmodel_nn3.fit(syndata_trans[:,0],error2, epochs=1000, verbose=0)\n\nfig = plt.figure(figsize=(13,4))\nplt.subplot(121)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\ny_pred3 = []\nfor j in syndata_trans[:,0]:\n y_pred3.append(model_nn3.predict([j]))\ny_pred3_shaped = tf.reshape(tf.constant(np.array(y_pred3)),len(y_pred3))\nplt.title(\"Overall Fit\")\nplt.scatter(x_syn, y_syn, label='Obs')\nplt.scatter(x_syn, y_pred1_shaped+y_pred2_shaped+y_pred3_shaped, color='red',label=\"NN\")\nplt.legend();\nplt.subplot(122)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"Residual\")\nmse3 = tf.keras.losses.MSE(y_syn,y_pred1_shaped+y_pred2_shaped+y_pred3_shaped).numpy()\nplt.title(\"MSE=%.5f\"%(mse3))\nplt.scatter(x_syn, y_syn-y_pred1_shaped-y_pred2_shaped-y_pred3_shaped);\nplt.show();\nfig.savefig(\"images/comp_nn_step3.png\");", "_____no_output_____" ], [ "# fig = plt.figure()\n# # plt.subplot(133)\n# plt.xticks(fontsize=13, rotation=0)\n# plt.yticks(fontsize=13, rotation=0)\n# plt.xlabel(\"$x$\",fontsize=18)\n# plt.ylabel(\"Residual\",fontsize=18)\n# plt.title(\"Fit Previous Residual\",fontsize=15)\n# plt.scatter(x_syn, error2, s=40)\n# plt.scatter(x_syn, y_pred3_shaped,s=40,color='red',label=\"NN\")\n# plt.legend(prop={'size': 14});\n# # fig.savefig(\"images/comp_nn_res3.png\");", "_____no_output_____" ] ], [ [ "## 3 \"1-5-1\" = \"1-15-1\"", "_____no_output_____" ] ], [ [ "num_neuron = 15\ntf.random.set_seed(12345)\nmodel_nn11 = tf.keras.Sequential()\nmodel_nn11.add(tf.keras.layers.Dense(num_neuron,activation=rbf))\nmodel_nn11.add(tf.keras.layers.Dense(1))\n\nmodel_nn11.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.01))\nmodel_nn11.fit(syndata_trans[:,0],y_syn, epochs=5000, verbose=0)\n\nfig = plt.figure(figsize=(13,4))\nplt.subplot(121)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"$y$\")\ny_pred11 = []\nfor j in syndata_trans[:,0]:\n y_pred11.append(model_nn11.predict([j]))\ny_pred11_shaped = tf.reshape(tf.constant(np.array(y_pred11)),len(y_pred11))\nplt.title(\"Overall Fit\")\nplt.scatter(x_syn, y_syn, label='Obs')\nplt.scatter(x_syn, y_pred11_shaped, color='red', label=\"NN\")\nplt.legend();\nplt.subplot(122)\nplt.xlabel(\"$x$\")\nplt.ylabel(\"Residual\")\nmse11 = tf.keras.losses.MSE(y_syn,y_pred11_shaped).numpy()\nplt.title(\"MSE=%.5f\"%(mse11))\nplt.scatter(x_syn, y_syn-y_pred11_shaped)\nplt.show();\nfig.savefig(\"images/comp_nn_step11.png\");", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
d0a5bb76e6db00aef361b0b58641029a0a38b129
251,400
ipynb
Jupyter Notebook
MatplotLibExample.ipynb
ABPande/MyPythonRepo
51de39aee6c99b6ea2eb47fb199925ee63ca3750
[ "Apache-2.0" ]
null
null
null
MatplotLibExample.ipynb
ABPande/MyPythonRepo
51de39aee6c99b6ea2eb47fb199925ee63ca3750
[ "Apache-2.0" ]
null
null
null
MatplotLibExample.ipynb
ABPande/MyPythonRepo
51de39aee6c99b6ea2eb47fb199925ee63ca3750
[ "Apache-2.0" ]
null
null
null
158.912769
50,516
0.879089
[ [ [ "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "df = pd.DataFrame(np.random.rand(5,5), index = \"a b c d e\".split(), columns = \"v w x y z\".split())", "_____no_output_____" ], [ "np.random.seed(101)", "_____no_output_____" ], [ "rand_mat = np.random.randn(5,4)", "_____no_output_____" ], [ "df = pd.DataFrame(rand_mat, index = \"a b c d e\".split(), columns = \"w x y z\".split())", "_____no_output_____" ], [ "df", "_____no_output_____" ], [ "df ['w']", "_____no_output_____" ], [ "df [['w', 'y']]", "_____no_output_____" ], [ "df['NEW'] = df['w'] + df['y']", "_____no_output_____" ], [ "df = df.drop('NEW', axis = 1)", "_____no_output_____" ], [ "df", "_____no_output_____" ], [ "df.loc[['a','c'],['w']] ", "_____no_output_____" ], [ "df = pd.DataFrame({'A':[1,2,np.nan],'B':[5,np.nan,np.nan],'C':[1,2,3]})", "_____no_output_____" ], [ "df.dropna(axis = 1, thresh = 2)", "_____no_output_____" ], [ "df.fillna(value = \"FILL VALUE\")", "_____no_output_____" ], [ "df = pd.DataFrame({'Company':'GOOG GOOG MSFT MSFT FB FB'.split(),'Person':'Sam Charlie Amy Vanessa Carl Sarah'.split(),'Sales':[200,120,340,124,243,350]})", "_____no_output_____" ], [ "df", "_____no_output_____" ], [ "df.groupby(\"Company\").mean()['Sales']", "_____no_output_____" ], [ "df = pd.DataFrame({'col1': [1,2,3,4], 'col2': [444,555,666,444], 'col3':'abc def ghi xyz'.split()})", "_____no_output_____" ], [ "df", "_____no_output_____" ], [ "df['col2'].unique()", "_____no_output_____" ], [ "df['col2'].nunique()", "_____no_output_____" ], [ "df['col2'].value_counts()", "_____no_output_____" ], [ "newdf = df[(df['col1'] > 2) & (df['col2'] == 444)]", "_____no_output_____" ], [ "df['col2'].apply(lambda x: x **2)", "_____no_output_____" ], [ "df.to_csv(\"C:\\\\Machine Learning\\\\df.csv\")", "_____no_output_____" ], [ "df = pd.read_html(\"http://www.fdic.gov/bank/individual/failed/banklist.html\")", "_____no_output_____" ], [ "len(df)", "_____no_output_____" ], [ "df = pd.read_csv(\"C:\\\\Users\\\\abhijit.a.pande\\\\Machine Learning\\\\Py_DS_ML_BC\\\\07-Pandas-Built-in-Data-Viz\\\\df1.csv\", index_col = 0)", "_____no_output_____" ], [ "%matplotlib inline", "_____no_output_____" ], [ "df", "_____no_output_____" ], [ "df2 = pd.read_csv(\"C:\\\\Users\\\\abhijit.a.pande\\\\Machine Learning\\\\Py_DS_ML_BC\\\\07-Pandas-Built-in-Data-Viz\\\\df2.csv\")", "_____no_output_____" ], [ "df['A'].plot.hist()", "_____no_output_____" ], [ "df['A'].plot.hist(bins = 100, edgecolor = \"k\")", "_____no_output_____" ], [ "df2", "_____no_output_____" ], [ "df2.plot.bar()", "_____no_output_____" ], [ "df2.plot.bar(stacked = True)", "_____no_output_____" ], [ "df2.plot.line(y = 'a')", "_____no_output_____" ], [ "df2.plot.area(y = \"a\", alpha = 0.4)", "_____no_output_____" ], [ "df.plot.scatter(x = \"A\", y = \"B\", s = df[\"C\"]*50, c = \"C\", cmap = \"coolwarm\")", "_____no_output_____" ], [ "df2.plot.box()", "_____no_output_____" ], [ "df2.plot.kde()", "_____no_output_____" ], [ "df1 = pd.DataFrame(np.random.randn(1000,2), columns = ['a','b'])", "_____no_output_____" ], [ "df1.plot.scatter(x = 'a', y = 'b')", "_____no_output_____" ], [ "df1.plot.hexbin(x= 'a', y = 'b', gridsize = 25, cmap = \"Oranges\")", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a5c86de0449889696f323c5afbfa0cccbf4c53
280,640
ipynb
Jupyter Notebook
notebooks/3 eda_all_sides_media.ipynb
sagarsetru/debiaser
023f7dd888d5ceaaf5454a1350e12fc86fa19f1d
[ "MIT" ]
null
null
null
notebooks/3 eda_all_sides_media.ipynb
sagarsetru/debiaser
023f7dd888d5ceaaf5454a1350e12fc86fa19f1d
[ "MIT" ]
null
null
null
notebooks/3 eda_all_sides_media.ipynb
sagarsetru/debiaser
023f7dd888d5ceaaf5454a1350e12fc86fa19f1d
[ "MIT" ]
null
null
null
126.414414
48,712
0.819527
[ [ [ "# EDA of All Sides Media ratings for 'debiaser' data product\n#### Sagar Setru, September 21th, 2020", "_____no_output_____" ], [ "## Brief description using CoNVO framework", "_____no_output_____" ], [ "### Context\n\nSome people are eager to get news from outside of their echo chamber. However, they do not know where to go outside of their echo chambers, and may also have some activation energy when it comes to seeking information from other sources. In the meantime, most newsfeeds only push you content that you agree with. You end up in an echo chamber, but may not have ever wanted to be in one in the first place.", "_____no_output_____" ], [ "### Need\n\nA way to find news articles from different yet reliable media sources.", "_____no_output_____" ], [ "### Vision\n\nDebiaser, a chrome extension that will recommend news articles similar in topic to the one currently being read, but from several pre-curated and reliable news media organizations across the political spectrum, for example, following the \"media bias chart\" here https://www.adfontesmedia.com/ or the \"media bias ratings\" here: https://www.allsides.com/media-bias/media-bias-ratings. The app will determine the main topics of the text of a news article, and then show links to similar articles from other news organizations.\n\nThe product will generate topics for a given document via latent Dirichlet allocation (LDA) and then search news websites for the topic words generated.\n\nCaveats: Many of these articles may be behind paywalls. News aggregators already basically do this. How different is this than just searching Google using the title of an article?", "_____no_output_____" ], [ "### Outcome\n\nPeople who are motivated to engage in content outside of their echo chambers have a tool that enables them to quickly find news similar to what they are currently reading, but from a variety of news organizations.", "_____no_output_____" ], [ "In this notebook, I will identify a set of news organizations across the political spectrum using data from AllSides media.", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os", "_____no_output_____" ], [ "# make sure I'm in the right environment (should be 'debiaser')\nprint('Conda environment:')\nprint(os.environ['CONDA_DEFAULT_ENV'])", "Conda environment:\ndebiaser\n" ], [ "# get the entire news csv into data frame\n\n# get file name\nfname = '../all_sides_media_data/allsides.csv'\n\n# load into data frame\nall_sides_df = pd.read_csv(fname)", "_____no_output_____" ], [ "all_sides_df.head()", "_____no_output_____" ], [ "# get number of nan\nall_sides_df.isnull().sum()", "_____no_output_____" ], [ "all_sides_df.shape", "_____no_output_____" ] ], [ [ "There are 300 news media organizations represented. That's a lot to choose from.", "_____no_output_____" ] ], [ [ "# confirm all media organizations are only shown once\nall_sides_df['name'].is_unique", "_____no_output_____" ], [ "# plot histograms of bias across publications\n\nplt.figure(figsize=(10,5));\nsns.countplot(all_sides_df['bias'],order=['left','left-center','center','right-center','right','allsides'])\nplt.ylabel('N');\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.show();\nplt.clf();", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n FutureWarning\n" ], [ "plt.figure(figsize=(10,5));\nsns.countplot(all_sides_df['agreeance_text'],order=['Absolutely Disagrees','Strongly Disagrees','Disagrees','Somewhat Disagrees','Neutral','Somehwat Agrees','Agrees','Strongly Agrees','Absolutely Agrees'])\nplt.ylabel('N');\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.show();\nplt.clf();", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n FutureWarning\n" ] ], [ [ "We see that most vote tallies somewhat disagree with the given rating.", "_____no_output_____" ] ], [ [ "# what are the 'allsides ratings?'\n\nall_sides_df_all_sides_rating = all_sides_df.loc[all_sides_df['bias']=='allsides']", "_____no_output_____" ], [ "all_sides_df_all_sides_rating.head()", "_____no_output_____" ] ], [ [ "These look like news outlets that cover all sides of a story.", "_____no_output_____" ] ], [ [ "all_sides_df_all_sides_rating.shape", "_____no_output_____" ], [ "# it's not so big, so let's just look at it all\nall_sides_df_all_sides_rating", "_____no_output_____" ] ], [ [ "These look like news/media organizations that try to show all sides of an issue. It may be worth looking at what these websites do, for inspiration. These could also be generically recommended to the user.", "_____no_output_____" ] ], [ [ "# let's see what the distribution of total votes is\n\nplt.figure(figsize=(10,5));\nsns.distplot(all_sides_df['total_votes'],kde=False)\nplt.ylabel('N');\nplt.xlabel('Total votes')\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.xscale('log')\nplt.show();\nplt.clf()", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/distributions.py:2551: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms).\n warnings.warn(msg, FutureWarning)\n" ] ], [ [ "There are lots of websites with very few votes, and a few with thousands. My guess is that the websites with thousands of votes are the most popular news media organizations. Let's take a look at those.", "_____no_output_____" ] ], [ [ "total_votes_threshold = 10000\n\nall_sides_df_high_votes = all_sides_df.loc[all_sides_df['total_votes']>=total_votes_threshold]", "_____no_output_____" ], [ "all_sides_df_high_votes.shape", "_____no_output_____" ] ], [ [ "There are 35 news organizations with more than the above threshold number of votes votes. What are they? What are their biases?", "_____no_output_____" ] ], [ [ "all_sides_df_high_votes", "_____no_output_____" ], [ "# plt.figure(figsize=(10,5));\nsns.countplot(all_sides_df_high_votes['bias'],order=['left','left-center','center','right-center','right'])\nplt.ylabel('N');\nplt.yticks(np.arange(0, 11, step=2))\nplt.xlabel('AllSides Media bias rating')\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.show();\nplt.clf();", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n FutureWarning\n" ] ], [ [ "These look like larger news organizations that will more likely cover lots of topics, though there are more left and center-left news organizations than right and center-right. This is a good list of news organizations to include in the MVP.", "_____no_output_____" ] ], [ [ "# how often does the 'public' agree with these ratings?\n# plt.figure(figsize=(10,5));\nsns.countplot(all_sides_df_high_votes['agreeance_text'],order=['Absolutely Disagrees','Strongly Disagrees','Disagrees','Somewhat Disagrees','Neutral','Somehwat Agrees','Agrees','Strongly Agrees','Absolutely Agrees'])\nplt.ylabel('N');\nplt.yticks(np.arange(0, 11, step=2))\nplt.xlabel(\"Users' agreement with AllSides Media's rating\")\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.show();\nplt.clf();", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n FutureWarning\n" ], [ "# look at those with agree votes and above\n\nall_sides_df_high_votes_agree = all_sides_df_high_votes.loc[all_sides_df_high_votes['agreeance_text']=='Agrees']\nall_sides_df_high_votes_strongly_agree = all_sides_df_high_votes.loc[all_sides_df_high_votes['agreeance_text']=='Strongly Agrees']\nall_sides_df_high_votes_absolutely_agree = all_sides_df_high_votes.loc[all_sides_df_high_votes['agreeance_text']=='Absolutely Agrees']\n\n\nall_sides_df_high_votes_agree_cat = pd.concat([all_sides_df_high_votes_agree,all_sides_df_high_votes_strongly_agree,all_sides_df_high_votes_absolutely_agree])", "_____no_output_____" ], [ "all_sides_df_high_votes_agree_cat.iloc[:,[2,4,6]]", "_____no_output_____" ], [ "# plt.figure(figsize=(10,5));\nsns.countplot(all_sides_df_high_votes_agree_cat['bias'],order=['left','left-center','center','right-center','right','allsides'])\nplt.ylabel('N');\nplt.yticks(np.arange(0, 9, step=2))\nplt.xlabel('Bias rating for agree and above')\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.show();\nplt.clf();", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n FutureWarning\n" ], [ "# now let's look at those with somewhat disagrees and disagrees votes\n\nall_sides_df_high_votes_disagree = all_sides_df_high_votes.loc[all_sides_df_high_votes['agreeance_text']=='Disagrees']\nall_sides_df_high_votes_somewhat_disagree = all_sides_df_high_votes.loc[all_sides_df_high_votes['agreeance_text']=='Somewhat Disagrees']\nall_sides_df_high_votes_disagree_cat = pd.concat([all_sides_df_high_votes_disagree,all_sides_df_high_votes_somewhat_disagree])", "_____no_output_____" ], [ "all_sides_df_high_votes_disagree_cat.iloc[:,[2,4,6]]", "_____no_output_____" ], [ "# plt.figure(figsize=(10,5));\nsns.countplot(all_sides_df_high_votes_disagree_cat['bias'],order=['left','left-center','center','right-center','right','allsides'])\nplt.ylabel('N');\nplt.yticks(np.arange(0, 9, step=2))\nplt.xlabel('Bias rating for somewhat disagree and below')\nsns.set_context('talk', font_scale=1.5);\nplt.xticks(rotation=90);\nplt.show();\nplt.clf();", "/Users/sagarsetru/anaconda3/envs/debiaser/lib/python3.7/site-packages/seaborn/_decorators.py:43: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n FutureWarning\n" ] ], [ [ "The disagreement is exclusively amongst those news organizations with left-center, center, and right-center ratings. Users tend to agree about those news organizations that are further left or right as rated by AllSides. \n\nAmong those news organizations where there is disagreement between users and AllSides, we see some big name media organizations like Fox News and NYT, which I would not want to exclude from this product. \n\nI'll make a judgement call here and, for the MVP, utilize the bias score given by AllSides, even as there may be some disagreement between the scores they give and what many users think. This is not an easy call to make, but I will put my trust in AllSides's ratings for now, acknowledging that not all users will agree with AllSides. Finally, I will also include an additional center-right news group, WSJ, to add some more right-of-center news organizations.\n\nTo quote Steve Job, \"People don't know what they want until you show it to them.\" For an MVP for open-minded consumers interested in diverse perspectives, I'm betting that the ratings by AllSidesMedia won't be crucial; the idea is to show diverse content across the spectrum, not to quantify where media organizations are on the spectrum per se.", "_____no_output_____" ] ], [ [ "all_sides_df_high_votes.to_csv('../all_sides_media_data/allsides_final.csv', index_label='index')", "_____no_output_____" ] ], [ [ "Separately, I added the domain names for each news organization as a column to the dataframe. Here, I check those domain names below.", "_____no_output_____" ] ], [ [ "all_sides_with_domains = pd.read_csv('../all_sides_media_data/allsides_final_plus_others_with_domains.csv')", "_____no_output_____" ], [ "all_sides_with_domains.head()", "_____no_output_____" ], [ "all_sides_names = all_sides_with_domains['name']\nall_sides_domains = all_sides_with_domains['domain']\n\nall_sides_names_domains = pd.concat([all_sides_names,all_sides_domains],axis=1)\nprint(all_sides_names_domains)", " name domain\n0 ABC News (Online) abcnews.go.com\n1 Al Jazeera aljazeera.com\n2 Associated Press apnews.com\n3 BBC News bbc.com\n4 Bloomberg bloomberg.com\n5 Breitbart News breitbart.com\n6 BuzzFeed News buzzfeednews.com\n7 CBN cbn.com\n8 CBS News cbsnews.com\n9 Christian Science Monitor csmonitor.com\n10 CNN (Web News) cnn.com\n11 Daily Beast thedailybeast.com\n12 Democracy Now democracynow.org\n13 FactCheck.org factcheck.org\n14 Forbes forbes.com\n15 Fox News (Online) foxnews.com\n16 HuffPost huffpost.com\n17 Mother Jones motherjones.com\n18 MSNBC msnbc.com\n19 National Review nationalreview.com\n20 NBC News (Online) nbcnews.com\n21 New York Post nypost.com\n22 New York Times (Online News) nytimes.com\n23 Newsmax - News newsmax.com\n24 NPR Online News npr.org\n25 Politico politico.com\n26 Reason reason.com\n27 Reuters reuters.com\n28 Salon salon.com\n29 The American Spectator spectator.org\n30 The Atlantic theatlantic.com\n31 The Guardian theguardian.com\n32 The Hill thehill.com\n33 The Wall Street Journal wsj.com\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
d0a5d6671a2bf9b371cb867ec7108d8b3a018418
22,531
ipynb
Jupyter Notebook
Task1_Word_Embeddings.ipynb
priya00priya/NNTI-WS2021-NLP-Project
5a0865c6b49c225c909c1a192887cb4b8c4c8656
[ "MIT" ]
null
null
null
Task1_Word_Embeddings.ipynb
priya00priya/NNTI-WS2021-NLP-Project
5a0865c6b49c225c909c1a192887cb4b8c4c8656
[ "MIT" ]
null
null
null
Task1_Word_Embeddings.ipynb
priya00priya/NNTI-WS2021-NLP-Project
5a0865c6b49c225c909c1a192887cb4b8c4c8656
[ "MIT" ]
null
null
null
34.241641
439
0.558564
[ [ [ "# Task 1: Word Embeddings (10 points)\n\nThis notebook will guide you through all steps necessary to train a word2vec model (Detailed description in the PDF).", "_____no_output_____" ], [ "## Imports\n\nThis code block is reserved for your imports. \n\nYou are free to use the following packages: \n\n(List of packages)", "_____no_output_____" ] ], [ [ "# Imports\nfrom pandas import DataFrame\nimport pandas as pd\nimport numpy as np\nimport os\nimport re\nfrom sklearn.preprocessing import OneHotEncoder\nimport nltk\nfrom nltk.tokenize import word_tokenize\nnltk.download('punkt')\nnltk.download('stopwords')\nimport math\nimport io", "[nltk_data] Downloading package punkt to\n[nltk_data] C:\\Users\\piyab\\AppData\\Roaming\\nltk_data...\n[nltk_data] Package punkt is already up-to-date!\n[nltk_data] Downloading package stopwords to\n[nltk_data] C:\\Users\\piyab\\AppData\\Roaming\\nltk_data...\n[nltk_data] Package stopwords is already up-to-date!\n" ] ], [ [ "# 1.1 Get the data (0.5 points)\n\nThe Hindi portion HASOC corpus from [github.io](https://hasocfire.github.io/hasoc/2019/dataset.html) is already available in the repo, at data/hindi_hatespeech.tsv . Load it into a data structure of your choice. Then, split off a small part of the corpus as a development set (~100 data points).\n\nIf you are using Colab the first two lines will let you upload folders or files from your local file system.", "_____no_output_____" ] ], [ [ "#TODO: implement!\n\n#from google.colab import files\n#uploaded = files.upload()\n\n#Get the data\n#os.chdir(\"D:/Saarland/NN TI/NNTI_WS2021_Project\")\n\ndf = DataFrame.from_csv(\"hindi_.tsv\", sep=\"\\t\")\n\ndef split_data (df):\n df = df[:20]\n return df", "C:\\Users\\piyab\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:9: FutureWarning: from_csv is deprecated. Please use read_csv(...) instead. Note that some of the default arguments are different, so please refer to the documentation for from_csv when changing your function calls\n if __name__ == '__main__':\n" ] ], [ [ "## 1.2 Data preparation (0.5 + 0.5 points)\n\n* Prepare the data by removing everything that does not contain information. \nUser names (starting with '@') and punctuation symbols clearly do not convey information, but we also want to get rid of so-called [stopwords](https://en.wikipedia.org/wiki/Stop_word), i. e. words that have little to no semantic content (and, but, yes, the...). Hindi stopwords can be found [here](https://github.com/stopwords-iso/stopwords-hi/blob/master/stopwords-hi.txt) Then, standardize the spelling by lowercasing all words.\nDo this for the development section of the corpus for now.\n\n* What about hashtags (starting with '#') and emojis? Should they be removed too? Justify your answer in the report, and explain how you accounted for this in your implementation.", "_____no_output_____" ] ], [ [ "#TODO: implement!\n\n\n\ndef clean_data(sentence):\n\n hindi_stopword_file = open('stopwords.txt', encoding=\"utf8\")\n hindi_stopwords = []\n for x in hindi_stopword_file:\n hindi_stopwords.append(x.rstrip())\n \n text_tokens = word_tokenize(sentence)\n special_words_list = ['#', '?', '!', ';', ',','&' ,'+' ,'<' ,'>' ,'^' ,'_' ,'`' ,'|' ,'~' ,'..', '…', '....', '', ' ', ' ',\n ':', \"\\'\", '-', '=', '(', ')', '[', ']' , '{', '}','$','°', '¶' , '\"', '*', '@', ' ', '\\\\', '/', '.', '%', '।', '”']\n\n sentence = \" \".join([text_word for text_word in text_tokens if text_word not in hindi_stopwords])\n text_tokens = word_tokenize(sentence)\n sentence = \" \".join([text_word for text_word in text_tokens if not re.search(r'[a-zA-Z0-9]', text_word) ])\n \n text_tokens = word_tokenize(sentence)\n PATTERN = re.compile( #https://en.wikipedia.org/wiki/Emoji#Unicode_blocks\n \"([\"\n \"\\U0001F1E0-\\U0001F1FF\" # flags \n \"\\U0001F300-\\U0001F5FF\" # symbols & pictographs\n \"\\U0001F600-\\U0001F64F\" # emoticons\n \"\\U0001F680-\\U0001F6FF\" # transport & map symbols\n \"\\U0001F700-\\U0001F77F\" # alchemical symbols\n \"\\U0001F780-\\U0001F7FF\" # Geometric Shapes Extended\n \"\\U0001F800-\\U0001F8FF\" # Supplemental Arrows-C\n \"\\U0001F900-\\U0001F9FF\" # Supplemental Symbols and Pictographs\n \"\\U0001FA00-\\U0001FA6F\" # Chess Symbols\n \"\\U0001FA70-\\U0001FAFF\" # Symbols and Pictographs Extended-A\n \"\\U00002702-\\U000027B0\" # Dingbats\n \"])\"\n )\n \n sentence = \" \".join([text_word for text_word in text_tokens if not re.search(PATTERN, text_word)])\n \n text_tokens = word_tokenize(sentence)\n\n new_array = []\n for text_word in text_tokens:\n for word in special_words_list:\n text_word = text_word.replace(word, \"\")\n new_array.append(text_word)\n\n sentence = \" \".join(item for item in new_array)\n \n return sentence\n\ndef drop_empty_values(df):\n \n df['text'].replace('', np.nan, inplace=True)\n df['text'].replace(r'^\\s+$', np.nan, regex=True)\n \n df = df.dropna(subset=['text'])\n return df\n\n\n", "_____no_output_____" ] ], [ [ "## 1.3 Build the vocabulary (0.5 + 0.5 points)\n\nThe input to the first layer of word2vec is an one-hot encoding of the current word. The output od the model is then compared to a numeric class label of the words within the size of the skip-gram window. Now\n\n* Compile a list of all words in the development section of your corpus and save it in a variable ```V```.", "_____no_output_____" ] ], [ [ "#TODO: implement!\ndef building_vocabulary(df):\n \n sentences = [] \n v = [] #unique_words\n frequency_of_words = {}\n for line in df['text']:\n words = [x for x in line.split()]\n for word in words:\n if word != ':':\n \n if word not in v:\n v.append(word)\n frequency_of_words[word] = 1 \n else:\n frequency_of_words[word] = frequency_of_words[word] + 1\n sentences.append(words)\n v = sorted(v) \n return sentences,frequency_of_words , v", "_____no_output_____" ] ], [ [ "* Then, write a function ```word_to_one_hot``` that returns a one-hot encoding of an arbitrary word in the vocabulary. The size of the one-hot encoding should be ```len(v)```.", "_____no_output_____" ] ], [ [ "#TODO: implement!\ndef word_to_one_hot(word):\n try:\n ohe = OneHotEncoder(sparse=False)\n ohe.fit(word)\n ohe_word = ohe.transform(word)\n return ohe_word\n except ValueError: #Array_With_zero_sample\n pass", "_____no_output_____" ] ], [ [ "## 1.4 Subsampling (0.5 points)\n\nThe probability to keep a word in a context is given by:\n\n$P_{keep}(w_i) = \\Big(\\sqrt{\\frac{z(w_i)}{0.001}}+1\\Big) \\cdot \\frac{0.001}{z(w_i)}$\n\nWhere $z(w_i)$ is the relative frequency of the word $w_i$ in the corpus. Now,\n* Calculate word frequencies\n* Define a function ```sampling_prob``` that takes a word (string) as input and returns the probabiliy to **keep** the word in a context.", "_____no_output_____" ] ], [ [ "#TODO: implement!\ndef sampling_prob(word):\n probability = (math.sqrt(word/0.001) + 1 ) * (0.001/word)\n return probability\n", "_____no_output_____" ] ], [ [ "# 1.5 Skip-Grams (1 point)\n\nNow that you have the vocabulary and one-hot encodings at hand, you can start to do the actual work. The skip gram model requires training data of the shape ```(current_word, context)```, with ```context``` being the words before and/or after ```current_word``` within ```window_size```. \n\n* Have closer look on the original paper. If you feel to understand how skip-gram works, implement a function ```get_target_context``` that takes a sentence as input and [yield](https://docs.python.org/3.9/reference/simple_stmts.html#the-yield-statement)s a ```(current_word, context)```.\n\n* Use your ```sampling_prob``` function to drop words from contexts as you sample them. ", "_____no_output_____" ] ], [ [ "#TODO: implement!\n\ndef get_target_context(sentences,w2v_model,vocab_one_hot,vocab_index,subsampling_probability):\n center_word_list = []\n context_word_list = []\n for sentence in sentences:\n for i in range(len(sentence)): \n center_word = vocab_one_hot[sentence[i]] #vocab_one_hot\n context = [0 for x in range(len(center_word))]\n \n for j in range(i- w2v_model.window_size,i + w2v_model.window_size):\n if i!=j and j>=0 and j<len(sentence):\n # increase sampling chances of domain specific words in context\n #if subsampling_probability[sentence[i]] > np.random.random() :\n context[vocab_index[sentence[j]]] += 1 #vocab_index\n center_word_list.append(center_word)\n context_word_list.append(context)\n return center_word_list, context_word_list", "_____no_output_____" ] ], [ [ "# 1.6 Hyperparameters (0.5 points)\n\nAccording to the word2vec paper, what would be a good choice for the following hyperparameters? \n\n* Embedding dimension\n* Window size\n\nInitialize them in a dictionary or as independent variables in the code block below. ", "_____no_output_____" ] ], [ [ "# Set hyperparameters\nwindow_size = 2\nembedding_size = 100\n\n# More hyperparameters\nlearning_rate = 0.05\nepochs = 100", "_____no_output_____" ] ], [ [ "# 1.7 Pytorch Module (0.5 + 0.5 + 0.5 points)\n\nPytorch provides a wrapper for your fancy and super-complex models: [torch.nn.Module](https://pytorch.org/docs/stable/generated/torch.nn.Module.html). The code block below contains a skeleton for such a wrapper. Now,\n\n* Initialize the two weight matrices of word2vec as fields of the class.\n\n* Override the ```forward``` method of this class. It should take a one-hot encoding as input, perform the matrix multiplications, and finally apply a log softmax on the output layer.\n\n* Initialize the model and save its weights in a variable. The Pytorch documentation will tell you how to do that.", "_____no_output_____" ] ], [ [ "# Create model \n\ndef softmax(x):\n e_x = np.exp(x - np.max(x))\n return e_x / e_x.sum(axis=0)\n\nclass Word2Vec(object):\n def __init__(self):\n self.embedding_size = 100\n self.window_size = 2 # sentences weren't too long, so\n self.learning_rate = 0.05\n self.epochs = 100\n \n def initialize(self,Vocab_length):\n self.Vocab_len = Vocab_length\n self.W = np.random.uniform(-0.8, 0.8, (self.Vocab_len, self.embedding_size))\n self.W1 = np.random.uniform(-0.8, 0.8, (self.embedding_size, self.Vocab_len))\n\n def feed_forward(self,X):\n self.h = np.dot(self.W.T,X).reshape(self.embedding_size,1)\n self.u = np.dot(self.W1.T,self.h)\n\n self.y = softmax(self.u)\n return self.u\n \n def backpropagate(self,x,t):\n e = self.y - np.asarray(t).reshape(self.Vocab_len,1)\n dLdW1 = np.dot(self.h,e.T)\n \n X = np.array(x).reshape(self.Vocab_len,1)\n dLdW = np.dot(X, np.dot(self.W1,e).T)\n \n self.W1 = self.W1 - self.learning_rate*dLdW1\n self.W = self.W - self.learning_rate*dLdW\n return self.W , self.W1", "Word2Vec(\n (input): Linear(in_features=534, out_features=300, bias=False)\n (output): Linear(in_features=300, out_features=534, bias=False)\n)\n" ] ], [ [ "# 1.8 Loss function and optimizer (0.5 points)\n\nInitialize variables with [optimizer](https://pytorch.org/docs/stable/optim.html#module-torch.optim) and loss function. You can take what is used in the word2vec paper, but you can use alternative optimizers/loss functions if you explain your choice in the report.", "_____no_output_____" ] ], [ [ "# Define optimizer and loss\n#optimizer = torch.optim.Adam(w2v_model.parameters(), lr=0.01)\n#criterion = nn.CrossEntropyLoss() ", "_____no_output_____" ] ], [ [ "# 1.9 Training the model (3 points)\n\nAs everything is prepared, implement a training loop that performs several passes of the data set through the model. You are free to do this as you please, but your code should:\n\n* Load the weights saved in 1.6 at the start of every execution of the code block\n* Print the accumulated loss at least after every epoch (the accumulate loss should be reset after every epoch)\n* Define a criterion for the training procedure to terminate if a certain loss value is reached. You can find the threshold by observing the loss for the development set.\n\nYou can play around with the number of epochs and the learning rate.", "_____no_output_____" ] ], [ [ "# Define train procedure\n\n# load initial weights\n\ndef train(w2v_model,X_train,Y_train, Vocab_len):\n #loss_list = []\n print(\"Training started\")\n for x in range(1, w2v_model.epochs): \n loss = 0\n for j in range(len(X_train)):\n u = w2v_model.feed_forward(X_train[j])\n \n W, W1 = w2v_model.backpropagate( X_train[j], Y_train[j])\n \n loss += -np.sum([u[word.index(1)] for word in Y_train]) + len(Y_train) * np.log(np.sum(np.exp(u)))\n #print(\"epoch \",x, \" loss = \", loss)\n #loss_list.append(loss)\n #print(loss_list)\n print(\"Training finished\")\n return W, W1\n\n\n\n", "_____no_output_____" ] ], [ [ "# 1.10 Train on the full dataset (0.5 points)\n\nNow, go back to 1.1 and remove the restriction on the number of sentences in your corpus. Then, reexecute code blocks 1.2, 1.3 and 1.6 (or those relevant if you created additional ones). \n\n* Then, retrain your model on the complete dataset.\n\n* Now, the input weights of the model contain the desired word embeddings! Save them together with the corresponding vocabulary items (Pytorch provides a nice [functionality](https://pytorch.org/tutorials/beginner/saving_loading_models.html) for this).", "_____no_output_____" ] ], [ [ "#df = split_data(df) \n \n# converting upper case letters to lowercase\ndf['text'] = df['text'].str.lower()\n\n# data preprocessing\ndf['text'] = df['text'].map(lambda x: clean_data(x))\n# drop empty values \ndf = drop_empty_values(df)\n\n#building vocabulary and Calculating word frequencies\nsentences, frequency_of_words , v = building_vocabulary(df)\n\nsubsampling_probability = {}\n#subsampling\nfor words in v:\n freq_word = frequency_of_words[words]\n subsampling_probability[words] = sampling_prob(freq_word) \n\n# One hot encoding\nresult_one_hot_encoding = word_to_one_hot(np.reshape(v,(-1,1)))\nvocab_index = {}\nvocab_one_hot = {}\nfor i in range(len(v)):\n vocab_index[v[i]] = i\n vocab_one_hot[v[i]] = result_one_hot_encoding[i]\n\n \n#Creating object of the Word2Vec class \nw2v_model = Word2Vec()\n\nis_untrained = True # true for the new training of the model\n\nif is_untrained: # checks the flag\n w2v_model.initialize(len(v))\n\n#w2v_model = w2v_model.to(device)\n#w2v_model.train(True)\n\n# Define optimizer and loss\n#optimizer = torch.optim.Adam(w2v_model.parameters(), lr=0.01)\n#criterion = nn.CrossEntropyLoss()\n\n'''\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras import layers\noptimizer = keras.optimizers.Adam(learning_rate=0.01)\ncriterion = tf.keras.losses.BinaryCrossentropy()\n'''\n\n#retreiving the target and context\nX_train, Y_train = get_target_context(sentences,w2v_model,vocab_one_hot,vocab_index,subsampling_probability)\n\n#training the dataset\nW, W1 = train(w2v_model,X_train,Y_train, len(v))\n\n## saving embedding weights\n\nout_v = io.open('vocab.tsv', 'w', encoding='utf-8')\nout_w = io.open('embedding_weight_W.tsv', 'w', encoding='utf-8')\nout_w1 = io.open('embedding_weight_W1.tsv', 'w', encoding='utf-8')\n\nfor i in v:\n out_v.write(i)\nout_w.close()\n\nfor i in W:\n out_w.write(','.join([str(w) for w in i]))\nout_w.close()\n\nfor i in W1:\n out_w1.write(','.join([str(w) for w in i]))\nout_w1.close()\n \ntry:\n from google.colab import files\n files.download('vocab.tsv')\n files.download('embedding_weight_W.tsv')\n files.download('embedding_weight_W1.tsv')\nexcept Exception:\n pass\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
d0a5dc98aac35f18d62c4200ae47097edd550e8c
2,567
ipynb
Jupyter Notebook
week07/my_index_of_pytorch_notebooks.ipynb
willianrampazzo/ia368z
9f825e77edfea508630a9b7bb73615db6e326646
[ "Unlicense" ]
null
null
null
week07/my_index_of_pytorch_notebooks.ipynb
willianrampazzo/ia368z
9f825e77edfea508630a9b7bb73615db6e326646
[ "Unlicense" ]
null
null
null
week07/my_index_of_pytorch_notebooks.ipynb
willianrampazzo/ia368z
9f825e77edfea508630a9b7bb73615db6e326646
[ "Unlicense" ]
null
null
null
22.919643
150
0.589794
[ [ [ "# Lista de notebooks do meu tutorial de Pytorch", "_____no_output_____" ], [ "Estude os notebooks da [LearningPytorchbyExamples.ipynb](LearningPytorchbyExamples.ipynb) e desenvolva os notebooks iniciais feitos no curso:\n- regressão linear - estimar comprimento da pétala a partir do comprimento da sépala\n- regressão linear com mini-batch gradiente descendente\n- regressão logística - 3 classes - Iris dataset\n- regressão logística - 10 classes - dígitos MNIST\n- rede com 1 ou mais camadas escondidas - 3 classes - Iris dataset\n\nColoque aqui os links para os notebooks feitos assim como seus comentários", "_____no_output_____" ], [ "1 - [regressão linear - estimar comprimento da pétala a partir do comprimento da sépala](my_linear_regression.ipynb)", "_____no_output_____" ], [ "2 - [regressão linear com mini-batch gradiente descendente](my_sgd_minibatch.ipynb)", "_____no_output_____" ], [ "3 - [regressão logística - 3 classes - Iris dataset](my_mapa_atributos.ipynb)\n\n", "_____no_output_____" ], [ "4 - [regressão logística - 10 classes - dígitos MNIST](my_logistic_regression_mnist.ipynb)\n\n", "_____no_output_____" ], [ "5 - [rede com 1 ou mais camadas escondidas - 3 classes - Iris dataset](my_logistic_regression_iris.ipynb)", "_____no_output_____" ], [ "6 - [cifar10](PyTorch-Cifar10.ipynb)", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
d0a5f1eab5c244c1a1f5ee07afcf8412188cd5ba
21,760
ipynb
Jupyter Notebook
notebooks/Model Exploration.ipynb
dshirle7/capstone1
4c6f1757d351127a562b007d95a068fc6ceae2c3
[ "MIT" ]
null
null
null
notebooks/Model Exploration.ipynb
dshirle7/capstone1
4c6f1757d351127a562b007d95a068fc6ceae2c3
[ "MIT" ]
4
2020-03-24T16:45:51.000Z
2021-06-01T23:31:05.000Z
notebooks/Model Exploration.ipynb
dshirle7/capstone1
4c6f1757d351127a562b007d95a068fc6ceae2c3
[ "MIT" ]
null
null
null
30.055249
376
0.472656
[ [ [ "# The purpose of this notebook is to compare the\n# efficacy of various Machine Learning models on\n# the dataset.\n\nimport os\nimport numpy as np\nimport pandas as pd\nfrom sklearn.model_selection import train_test_split\n\nnp.random.seed(0)\n\nscript_dir = os.path.abspath('')\n\nfile = os.path.realpath(script_dir + '/../data/interim/train_users_2_2.csv')\n\ndf = pd.read_csv(file)\n\ndf.head()\n\nprint(df.columns)", "Index(['Unnamed: 0', 'id', 'date_account_created', 'timestamp_first_active',\n 'date_first_booking', 'gender', 'age', 'signup_method', 'signup_flow',\n 'language', 'affiliate_channel', 'affiliate_provider',\n 'first_affiliate_tracked', 'signup_app', 'first_device_type',\n 'first_browser', 'country_destination', 'days_thinking',\n 'number_of_actions'],\n dtype='object')\n" ], [ "# Prepare the dataset for our regression\n\ndf2 = 0\n\n# Remove undesired columns\n\n# Note we do NOT want date_first_booking in\n# our model because it is not included in\n# the test set of the competition.\n\ndf2 = df[['date_account_created',\n 'gender',\n 'age',\n 'signup_method',\n 'language',\n 'affiliate_channel',\n 'affiliate_provider',\n 'first_affiliate_tracked',\n 'signup_app',\n 'first_device_type',\n 'first_browser',\n 'number_of_actions',\n 'country_destination']]\n\n# Convert time-based columns to datetime\n# objects, then to numbers that the model\n# can use.\n\ndf2['week_account_created'] = pd.to_datetime(df2['date_account_created']).dt.week\n\ndf2 = df2.drop(['date_account_created'], axis=1)\n\ndf2.fillna(0, inplace=True)\n\n# Use get_dummies to convert our categorical\n# features to numerical features so that our\n# model can use them.\n\ndummiescols = ['gender', 'signup_method', 'language',\n 'affiliate_channel', 'affiliate_provider',\n 'first_affiliate_tracked', 'signup_app',\n 'first_device_type', 'first_browser']\n\ndf2 = pd.get_dummies(df2, prefix=dummiescols, columns=dummiescols)", "C:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\ipykernel_launcher.py:29: SettingWithCopyWarning: \nA value is trying to be set on a copy of a slice from a DataFrame.\nTry using .loc[row_indexer,col_indexer] = value instead\n\nSee the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n" ], [ "# Create the training and testing set\n\nX_train, X_test, y_train, y_test = train_test_split(\n df2.drop('country_destination', axis=1).values,\n df2['country_destination'].values,\n random_state=0)", "_____no_output_____" ], [ "# Logistic Regression\n# Train the model, then score it on the test set\n\nfrom sklearn.linear_model import LogisticRegression\n\nlogreg = LogisticRegression()\n\nlogreg.fit(X_train, y_train)\n\nlogreg.score(X_test, y_test)", "C:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n FutureWarning)\nC:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.\n \"this warning.\", FutureWarning)\n" ], [ "from sklearn.metrics import confusion_matrix\nfrom sklearn.utils.multiclass import unique_labels\nimport matplotlib.pyplot as plt\n\nylr_pred = logreg.predict(X_test)\n\nlabels = [i for i in unique_labels(y_test, ylr_pred)]\n\nprint(labels)\n\ncm = confusion_matrix(y_test, ylr_pred)\n\nprint(cm)", "['AU', 'CA', 'DE', 'ES', 'FR', 'GB', 'IT', 'NDF', 'NL', 'PT', 'US', 'other']\n[[ 0 0 0 0 0 0 0 16 0 0 16 0]\n [ 0 0 0 0 0 0 0 19 0 0 50 0]\n [ 0 0 0 0 0 0 0 20 0 0 28 0]\n [ 0 0 0 0 0 0 0 39 0 0 78 0]\n [ 0 0 0 0 0 0 0 75 0 0 166 0]\n [ 0 0 0 0 0 0 0 43 0 0 106 0]\n [ 0 0 0 0 0 0 0 58 0 0 101 0]\n [ 0 0 0 0 0 0 0 2166 0 0 1578 0]\n [ 0 0 0 0 0 0 0 18 0 0 25 0]\n [ 0 0 0 0 0 0 0 2 0 0 10 0]\n [ 0 0 0 0 0 0 0 1185 0 0 2361 0]\n [ 0 0 0 0 0 0 0 196 0 0 382 0]]\n" ], [ "# Random Forests\n\nfrom sklearn.ensemble import RandomForestClassifier\n\nrf = RandomForestClassifier(n_estimators=100)\n\nrf.fit(X_train, y_train)\n\nrf.score(X_test, y_test)", "_____no_output_____" ], [ "yrf_pred = rf.predict(X_test)\n\nprint(labels)\n\ncm2 = confusion_matrix(y_test, yrf_pred)\n\nprint(cm2)", "['AU', 'CA', 'DE', 'ES', 'FR', 'GB', 'IT', 'NDF', 'NL', 'PT', 'US', 'other']\n[[ 0 0 0 0 0 0 0 18 0 0 14 0]\n [ 0 1 0 0 0 0 0 28 0 0 39 1]\n [ 0 0 0 0 0 0 0 25 0 0 21 2]\n [ 1 1 0 0 1 0 0 54 0 0 57 3]\n [ 0 0 0 0 2 2 0 110 0 0 121 6]\n [ 0 0 0 0 0 0 1 62 0 0 86 0]\n [ 0 0 0 1 3 0 0 75 0 0 77 3]\n [ 1 1 2 9 21 8 4 2202 0 0 1460 36]\n [ 0 0 0 0 0 0 0 21 0 0 21 1]\n [ 0 0 0 0 0 0 0 4 0 0 8 0]\n [ 1 5 2 13 16 6 15 1458 3 0 1968 59]\n [ 0 1 0 0 3 0 2 254 0 0 306 12]]\n" ], [ "# AdaBoost\n\nfrom sklearn.ensemble import AdaBoostClassifier\n\nab = AdaBoostClassifier()\n\nab.fit(X_train, y_train)\n\nab.score(X_test, y_test)", "_____no_output_____" ], [ "yab_pred = ab.predict(X_test)\n\nprint(labels)\n\ncm3 = confusion_matrix(y_test, yab_pred)\n\nprint(cm3)", "['AU', 'CA', 'DE', 'ES', 'FR', 'GB', 'IT', 'NDF', 'NL', 'PT', 'US', 'other']\n[[ 0 0 0 0 0 0 0 15 0 0 17 0]\n [ 0 0 0 0 0 0 0 20 0 0 49 0]\n [ 0 0 0 0 0 0 0 20 0 0 28 0]\n [ 0 0 0 0 0 0 0 42 0 0 75 0]\n [ 0 0 0 0 0 0 0 83 0 0 158 0]\n [ 0 0 0 0 0 0 0 48 0 1 100 0]\n [ 0 0 0 0 0 0 0 61 0 0 98 0]\n [ 0 0 0 0 0 0 0 2232 0 4 1508 0]\n [ 0 0 0 0 0 0 0 19 0 0 24 0]\n [ 0 0 0 0 0 0 0 4 0 0 8 0]\n [ 0 0 0 0 0 0 0 1230 0 1 2315 0]\n [ 0 0 0 0 0 0 0 210 0 0 368 0]]\n" ], [ "# Looks like our algorithms are only ever\n# predicting NDF or US as the classifier.\n# I wonder if we can get a better result\n# by removing NDF entirely. If so, we can\n# just create a separate algorithm to sort\n# the data into \"NDF\" or \"Made a Booking\".\n\ndf3 = df2[df2['country_destination'] != 'NDF']\n\nprint(df3['country_destination'].unique())", "['other' 'US' 'FR' 'GB' 'IT' 'AU' 'CA' 'ES' 'DE' 'NL' 'PT']\n" ], [ "X3_train, X3_test, y3_train, y3_test = train_test_split(\n df3.drop('country_destination', axis=1).values,\n df3['country_destination'].values,\n random_state=0)", "_____no_output_____" ], [ "logreg3 = LogisticRegression()\n\nlogreg3.fit(X3_train, y3_train)\n\nlogreg3.score(X3_test, y3_test)", "C:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n FutureWarning)\nC:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.\n \"this warning.\", FutureWarning)\n" ], [ "ylr3_pred = logreg3.predict(X3_test)\n\nlabels3 = [i for i in unique_labels(y3_test, ylr3_pred)]\n\nprint(labels3)\n\ncm = confusion_matrix(y3_test, ylr3_pred)\n\nprint(cm)\n\n# Nope. It's still just saying everyone travels to the US.\n# Let's try one more time, eliminating the US as well.", "['AU', 'CA', 'DE', 'ES', 'FR', 'GB', 'IT', 'NL', 'PT', 'US', 'other']\n[[ 0 0 0 0 0 0 0 0 0 33 0]\n [ 0 0 0 0 0 0 0 0 0 85 0]\n [ 0 0 0 0 0 0 0 0 0 40 0]\n [ 0 0 0 0 0 0 0 0 0 107 0]\n [ 0 0 0 0 0 0 0 0 0 238 0]\n [ 0 0 0 0 0 0 0 0 0 119 0]\n [ 0 0 0 0 0 0 0 0 0 144 0]\n [ 0 0 0 0 0 0 0 0 0 43 0]\n [ 0 0 0 0 0 0 0 0 0 8 0]\n [ 0 0 0 0 0 0 0 0 0 3574 0]\n [ 0 0 0 0 0 0 0 0 0 585 0]]\n" ], [ "df4 = df3[df3['country_destination'] != 'US']\n\nprint(df4['country_destination'].unique())\n\nX4_train, X4_test, y4_train, y4_test = train_test_split(\n df4.drop('country_destination', axis=1).values,\n df4['country_destination'].values,\n random_state=0)", "['other' 'FR' 'GB' 'IT' 'AU' 'CA' 'ES' 'DE' 'NL' 'PT']\n" ], [ "logreg4 = LogisticRegression()\n\nlogreg4.fit(X4_train, y4_train)\n\nlogreg4.score(X4_test, y4_test)", "C:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n FutureWarning)\nC:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.\n \"this warning.\", FutureWarning)\n" ], [ "ylr4_pred = logreg4.predict(X4_test)\n\nlabels4 = [i for i in unique_labels(y4_test, ylr4_pred)]\n\nprint(labels4)\n\ncm = confusion_matrix(y4_test, ylr4_pred)\n\nprint(cm)\n\n# Our model is pretty weak. It looks like it just\n# assigns the data to the most popular destination in\n# the dataset. Let's remove 'other' and see what\n# happens.", "['AU', 'CA', 'DE', 'ES', 'FR', 'GB', 'IT', 'NL', 'PT', 'other']\n[[ 0 0 0 0 0 0 0 0 0 30]\n [ 0 0 0 0 1 0 0 0 0 90]\n [ 0 0 0 0 1 0 1 0 0 43]\n [ 0 0 1 0 5 0 0 0 0 106]\n [ 0 0 1 0 7 0 1 0 0 232]\n [ 0 0 0 0 2 0 0 0 0 98]\n [ 0 0 1 0 4 0 1 0 0 133]\n [ 0 0 0 0 0 0 0 0 0 40]\n [ 0 0 0 0 0 0 0 0 0 10]\n [ 0 0 1 2 9 0 2 0 0 599]]\n" ], [ "df5 = df4[df4['country_destination'] != 'other']\n\nprint(df5['country_destination'].unique())\n\nX5_train, X5_test, y5_train, y5_test = train_test_split(\n df5.drop('country_destination', axis=1).values,\n df5['country_destination'].values,\n random_state=0)", "['FR' 'GB' 'IT' 'AU' 'CA' 'ES' 'DE' 'NL' 'PT']\n" ], [ "logreg5 = LogisticRegression()\n\nlogreg5.fit(X5_train, y5_train)\n\nlogreg5.score(X5_test, y5_test)", "C:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n FutureWarning)\nC:\\Users\\d\\Anaconda3\\envs\\capstone1\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.\n \"this warning.\", FutureWarning)\n" ], [ "ylr5_pred = logreg5.predict(X5_test)\n\nlabels5 = [i for i in unique_labels(y5_test, ylr5_pred)]\n\nprint(labels5)\n\ncm = confusion_matrix(y5_test, ylr5_pred)\n\nprint(cm)\n\n# Ok. It's turtles all the way down. Just\n# to be sure, try Random Forests and AdaBoost.", "['AU', 'CA', 'DE', 'ES', 'FR', 'GB', 'IT', 'NL', 'PT']\n[[ 0 0 0 4 25 1 1 0 0]\n [ 0 1 1 5 68 4 4 0 0]\n [ 0 1 1 1 40 0 3 0 0]\n [ 0 1 1 3 97 1 1 0 0]\n [ 0 1 0 12 215 2 9 0 0]\n [ 0 0 0 2 114 3 4 0 0]\n [ 0 0 1 2 140 3 5 0 0]\n [ 0 0 0 2 38 2 4 0 0]\n [ 0 0 0 0 11 0 0 0 0]]\n" ], [ "rf5 = RandomForestClassifier(n_estimators=300)\n\nrf5.fit(X5_train, y5_train)\n\nrf5.score(X5_test, y5_test)", "_____no_output_____" ], [ "ab5 = AdaBoostClassifier()\n\nab5.fit(X5_train, y5_train)\n\nab5.score(X5_test, y5_test)", "_____no_output_____" ] ], [ [ "Very clear what's going on here: the models have little ability to discern between the less popular destination spots. We were getting such accurate results in the first case solely because two features—NDF and US—absolutely dwarf the other target categories. To proceed, I will need to understand how to predict even despite very unevenly distributed target classes.\n\nPerhaps it is time to re-wrangle things and re-introduce users for whom\ndata is missing. It could be that a larger training set would allow for\na better result. I think I will also start performing these ", "_____no_output_____" ] ] ]
[ "code", "markdown" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
d0a5f379fd5d3d4c9bd373f1d337623539aef42d
18,490
ipynb
Jupyter Notebook
dMRI Reconstruction Project.ipynb
zhangerjun/project_template
7b7d2dc2fa389a7e237c9c0e1e5408263b2c8b92
[ "CC0-1.0" ]
null
null
null
dMRI Reconstruction Project.ipynb
zhangerjun/project_template
7b7d2dc2fa389a7e237c9c0e1e5408263b2c8b92
[ "CC0-1.0" ]
null
null
null
dMRI Reconstruction Project.ipynb
zhangerjun/project_template
7b7d2dc2fa389a7e237c9c0e1e5408263b2c8b92
[ "CC0-1.0" ]
null
null
null
33.255396
412
0.517685
[ [ [ "# dMRI Data Reconstruction\n", "_____no_output_____" ], [ "In this notebook, we will reconstruct MRI imgaes from raw data by using Python.This includes: 1. Data processing; 2. DTI reconstruction and 3. DKI reocnstruction.", "_____no_output_____" ], [ "## Data Preprocessing", "_____no_output_____" ], [ "Data preprocessing is quit important for dMRI reconstruction. Different data preprocessing may lead to different reconstruction image qualities, which will make the comparation of different reconstruct methods unreliable. Thus, here we first preprocessing MRI by following same steps: denosing, topup (susceptibility-induced distortion correction) and eddy current-induced distortion and motion correction.", "_____no_output_____" ], [ "### __Import python libraries__", "_____no_output_____" ] ], [ [ "import os #TO control directories\nimport numpy as np\n\nimport nibabel as nib # read and save medical images\nfrom dipy.denoise.localpca import mppca #denoising\nimport nipype.interfaces.fsl as fsl #topup\nfrom nipype.interfaces.fsl import TOPUP\nfrom nipype.testing import anatfile\n\nimport timeit #compute time, useage: timeit.timeit()\n\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\n%matplotlib inline", "_____no_output_____" ] ], [ [ "### Set data path", "_____no_output_____" ] ], [ [ "data_path = \"/home/erjun/githubEZ/dMRI_BHS/dMRI_data/dwi\"\nap_file = 'sub-032213_ses-001_dir-AP_run-1_dwi.nii.gz' # dMRI data\npa_file = 'sub-032213_ses-001_dir-PA_run-1_dwi.nii.gz' \nbvals_file = 'sub-032216_ses-001_dir-AP_run-1_dwi.bvals' # bval file\nbvecs_file = 'sub-032216_ses-001_dir-AP_run-1_dwi.bvecs' # bvec file\ndenoised_file = 'sub-032216_ses-001_dir-AP_run-1_dwi_denoised.nii.gz' # output file after denoising\ncwdir = os.getcwd()\nos.chdir(data_path) #directory setting", "_____no_output_____" ] ], [ [ "### __Denoising__", "_____no_output_____" ] ], [ [ "# Load data file\nimg = nib.load(os.path.join(data_path,ap_file))\ndata = img.get_data() \n# Use dipy to denoise\ndenoised = mppca(data, patch_radius=50)\n# Save data \nnib.save(nib.Nifti1Image(data, img.affine), os.path.join(data_path,denoised_file))\n\nprint('DONE')", "_____no_output_____" ] ], [ [ "### TOPUP", "_____no_output_____" ] ], [ [ "# Set default output type and test ExtractROI tool for Define b_0 image extraction function\nfsl.FSLCommand.set_default_output_type('NIFTI_GZ')\n\nfslroi = fsl.ExtractROI(in_file=anatfile, roi_file='bar.nii.gz', t_min=0,t_size=1)\nfslroi.cmdline == 'fslroi %s bar.nii.gz 0 1' % anatfile", "_____no_output_____" ], [ "# Define b_0 image extraction function\n\ndef extract_b0(inImage, outImage):\n \"To run this, please first make sure you install fsl and can run it\"\n \"One method is that run fsl and thi pre-processing code in the same terminal\"\n fslroi = fsl.ExtractROI(in_file=inImage,roi_file=outImage,t_min=0, t_size=1)\n fslroi.run()\n \n# Test\n#extract_b0(ap_file, 'bar.nii.gz')\n#fslroi.cmdline == 'fslroi %s bar.nii.gz 0 1' % ap_file", "_____no_output_____" ], [ "# Extract b0 images\nextract_b0(ap_file, 'epi_b0.nii.gz')\nextract_b0(pa_file, 'epi_rev_b0.nii.gz')", "_____no_output_____" ], [ "# Use fslmerge to concatenate images\n#merger = fsl.Merge()\n#merger.inputs.in_files = ['epi_b0.nii.gz','epi_rev_b0.nii.gz']\n#merger.inputs.dimension = 't'\n#merger.inputs.output_type = 'NIFTI_GZ'\nmerger = fsl.Merge(in_files=['epi_b0.nii.gz','epi_rev_b0.nii.gz'],dimension = 't',output_type='NIFTI_GZ')\nmerger.run()\n\nfile = open('topup_encoding.txt','w')\nfile.write('0 1 0 0.05\\n0 -1 0 0.05')\nfile.close()", "_____no_output_____" ], [ "topup = TOPUP()\ntopup.inputs.in_file = 'epi_b0_merged.nii.gz'\ntopup.inputs.encoding_file = 'topup_encoding.txt'\ntopup.inputs.output_type = 'NIFTI_GZ'\n#topup.run()", "_____no_output_____" ], [ "#------------------------------------------------\n# FSL ApplyTOPUP \n#------------------------------------------------\napplytopup = fsl.ApplyTOPUP(in_files = ['epi_b0.nii.gz', 'epi_rev_b0.nii.gz'],\n encoding_file = 'topup_encoding.txt',\n in_topup_fieldcoef = 'epi_b0_merged_base_fieldcoef.nii.gz',\n in_topup_movpar = 'epi_b0_merged_base_movpar.txt',\n output_type = \"NIFTI_GZ\") \n#applytopup.run()\n \nprint('DONE')", "_____no_output_____" ] ], [ [ "### EDDY", "_____no_output_____" ] ], [ [ "btr = fsl.BET(in_file= 'epi_b0.nii.gz',#'epi_b0_corrected.nii.gz',\n frac=0.2, out_file='brain.nii.gz', mask=True)\nbtr.run()\n\n# total nuber of volumes in dwi data\nimg = nib.load(denoised_file).get_data()\nnvolumes = img.shape[-1]\n\nfile = open('index.txt','w')\nfor i in range(0, nvolumes):\n file.write('1 ')\nfile.close()\n \n#eddy = fsl.Eddy(in_file = denoised_file,\n #in_mask = 'brain_mask.nii.gz',\n #in_index = 'index.txt',\n #in_acqp = 'topup_encoding.txt',\n #in_topup_fieldcoef = \"epi_b0_merged_base_fieldcoef.nii.gz\",\n #in_topup_movpar = 'epi_b0_merged_base_movpar.txt',\n #in_bvec = bvecs_file,\n #in_bval = bvals_file, \n #use_cuda = False, \n #is_shelled=True)\n #)\n#eddy.run()\nprint('DONE')", "_____no_output_____" ] ], [ [ "## DTI Reconstruction", "_____no_output_____" ] ], [ [ "import math\nfrom skimage import io #用于读取保存或显示图片或者视频\nimport time\n\nfrom dipy.io import read_bvals_bvecs\nfrom dipy.core.gradients import gradient_table\nfrom dipy.reconst.dti import TensorModel\nfrom dipy.reconst.dti import fractional_anisotropy\nfrom dipy.reconst.dti import color_fa\nimport dipy.reconst.dki as dki", "_____no_output_____" ], [ "# Set new data path for DTI reconstruction\ndata_path = \"/home/erjun/Documents/dHCP/dhcp_dmri_pipeline/sub-CC00060XX03/ses-12501/dwi\"\ndwi_file = 'sub-CC00060XX03_ses-12501_desc-preproc_dwi.nii.gz'\nbrainmask_file = 'sub-CC00060XX03_ses-12501_desc-preproc_space-dwi_brainmask.nii.gz'\nbval = 'sub-CC00060XX03_ses-12501_desc-preproc_dwi.bval'\nbvec = 'sub-CC00060XX03_ses-12501_desc-preproc_dwi.bvec'\nos.chdir(data_path)", "_____no_output_____" ], [ "# Load data files\nimg1 = nib.load(os.path.join(data_path,dwi_file))\ndata = img1.get_data()\n\nimg2 = nib.load(os.path.join(data_path,brainmask_file))\nbrainmask = img2.get_data()\n\nbvals, bvecs = read_bvals_bvecs(os.path.join(bval),\n os.path.join(data_path,bvec))\ngtab = gradient_table(bvals, bvecs)", "_____no_output_____" ], [ "# DTI model\nten_model = TensorModel(gtab)\nten_fit = ten_model.fit(data, brainmask)\n \n# Save DTI parametric maps\nif not os.path.exists(data_path+'/DTI/'):\n os.mkdir(data_path+'/DTI')\noutput_path = data_path+'/DTI/'\n \nDTI_FA = ten_fit.fa\nDTI_AD = ten_fit.ad\nDTI_RD = ten_fit.rd\nDTI_MD = ten_fit.md\n \nnib.save(nib.Nifti1Image(DTI_FA, img1.affine), os.path.join(output_path,'FA.nii.gz'))\nnib.save(nib.Nifti1Image(DTI_MD, img1.affine), os.path.join(output_path,'MD.nii.gz'))\nnib.save(nib.Nifti1Image(DTI_RD, img1.affine), os.path.join(output_path,'RD.nii.gz'))\nnib.save(nib.Nifti1Image(DTI_AD, img1.affine), os.path.join(output_path,'AD.nii.gz'))\n \n#Save FA RGB map\nfa = fractional_anisotropy(ten_fit.evals)\ncfa = color_fa(fa, ten_fit.evecs)\nDTI_FA = np.clip(fa, 0, 1)\nDTI_RGB = color_fa(fa, ten_fit.evecs)\n\nnib.save(nib.Nifti1Image(np.array(255 * cfa, 'uint8'), img1.affine), os.path.join(output_path,'FA_RGB.nii.gz'))\n\nprint('Done!')", "_____no_output_____" ], [ "# DKI MODEL\ndkimodel = dki.DiffusionKurtosisModel(gtab)\ndkifit = dkimodel.fit(data, brainmask)\n \n# Save DKI parametric maps\nif not os.path.exists(data_path+'/DKI/'):\n os.mkdir(data_path+'/DKI')\ndata_path_saveImage = data_path+'/DKI/'\n \nDKI_FA = dkifit.fa\nDKI_MD = dkifit.md\nDKI_RD = dkifit.rd\nDKI_AD = dkifit.ad\n\nDKI_MK = dkifit.mk(0, 3)\nDKI_AK = dkifit.ak(0, 3)\nDKI_RK = dkifit.rk(0, 3)\n \nnib.save(nib.Nifti1Image(DKI_FA, img1.affine), os.path.join(data_path_saveImage,'dki_FA.nii.gz'))\nnib.save(nib.Nifti1Image(DKI_MD, img1.affine), os.path.join(data_path_saveImage,'dki_MD.nii.gz'))\nnib.save(nib.Nifti1Image(DKI_RD, img1.affine), os.path.join(data_path_saveImage,'dki_RD.nii.gz'))\nnib.save(nib.Nifti1Image(DKI_AD, img1.affine), os.path.join(data_path_saveImage,'dki_AD.nii.gz'))\n \nnib.save(nib.Nifti1Image(DKI_AK, img1.affine), os.path.join(data_path_saveImage,'AK.nii.gz'))\nnib.save(nib.Nifti1Image(DKI_RK, img1.affine), os.path.join(data_path_saveImage,'RK.nii.gz'))\nnib.save(nib.Nifti1Image(DKI_MK, img1.affine), os.path.join(data_path_saveImage,'MK.nii.gz'))\n \nprint('DONE!')", "_____no_output_____" ] ], [ [ "### Show basical output maps", "_____no_output_____" ] ], [ [ "# set plot background\nplt.style.use('seaborn-dark')\n\n# plot paramter maps \nfig, [ax0, ax2, ax3, ax4] = plt.subplots(1,4,figsize=(10,8),subplot_kw={'xticks': [], 'yticks': []})\nax0.imshow(DTI_RGB[:,:,30,:]); ax0.set_title('Color coded FA',fontweight='bold',size=10)\n#ax1.imshow(DTI_FA[:,30,:]); ax1.set_title('Fractional anisotropy',fontweight='bold',size=10)\nax2.imshow(DTI_MD[:,:,30]); ax2.set_title('Mean diffusivity',fontweight='bold',size=10)\nax3.imshow(DTI_RD[:,:,30]); ax3.set_title('Radial diffusivity',fontweight='bold',size=10)\nax4.imshow(DTI_AD[:,:,30]); ax4.set_title('Axial diffusivity',fontweight='bold',size=10)", "_____no_output_____" ], [ "# plot paramter maps \nfig, ([ax0, ax1, ax2],[ax3, ax4, ax5]) = plt.subplots(2,3,figsize=(10,8),subplot_kw={'xticks': [], 'yticks': []})\nax0.imshow(DKI_AD[:,:,30]); ax0.set_title('Axial diffusivity',fontweight='bold',size=10)\nax1.imshow(DKI_RD[:,:,30]); ax1.set_title('Radial diffusivity',fontweight='bold',size=10)\nax2.imshow(DKI_MD[:,:,30]); ax2.set_title('Mean diffusivity',fontweight='bold',size=10)\nax3.imshow(DKI_AK[:,:,30]); ax3.set_title('Axial kurtosis',fontweight='bold',size=10)\nax4.imshow(DKI_RK[:,:,30]); ax4.set_title('Radial kurtosis',fontweight='bold',size=10)\nax5.imshow(DKI_MK[:,:,30]); ax5.set_title('Mean kurtosis',fontweight='bold',size=10)", "_____no_output_____" ], [ "# Define a function named vol_plot\n# Visualization of MRI volume slices\ndef vol_plot(x):\n \"to create 3D MRI figure with slider\"\n vol = x\n colormax = vol.max()#获取最大array中的最大值,最后代表cmax\n volume = vol.T\n len(volume)\n r, c = volume[math.floor(len(volume)/2)].shape\n # Define frames\n import plotly.graph_objects as go\n nb_frames = len(volume)-1\n fig = go.Figure(frames=[go.Frame(\n data=go.Surface(\n z=(len(volume)-1 - k ) * np.ones((r, c)),\n surfacecolor=volume[len(volume)-1 - k],\n cmin=0, cmax=colormax\n ),\n name=str(k) # name the frame for the animation to behave properly\n )\n for k in range(nb_frames)])\n\n # Add data to be displayed before animation starts\n fig.add_trace(go.Surface(\n z=(len(volume)-1) * np.ones((r, c)),\n surfacecolor=volume[len(volume)-1],#np.flipud(volume[30]),\n colorscale='gray',\n cmin=0, cmax=colormax,\n colorbar=dict(thickness=20, ticklen=4)\n ))\n\n def frame_args(duration):\n return {\n \"frame\": {\"duration\": 500},# Duration can be used to change animate speed\n \"mode\": \"immediate\",\n \"fromcurrent\": True,\n \"transition\": {\"duration\": 500, \"easing\": \"linear\"},\n }\n\n sliders = [\n {\n \"pad\": {\"b\": 10, \"t\": 60},\n \"len\": 0.9,\n \"x\": 0.1,\n \"y\": 0,\n \"steps\": [\n {\n \"args\": [[f.name], frame_args(0)],\n \"label\": str(k),\n \"method\": \"animate\",\n }\n for k, f in enumerate(fig.frames)\n ],\n }\n ]\n\n # Layout\n fig.update_layout(\n title='Slices in volumetric data',\n width=600,\n height=600,\n scene=dict(\n zaxis=dict(range=[-1, len(volume)-1], autorange=False),\n aspectratio=dict(x=1, y=1, z=1),\n ),\n updatemenus = [\n {\n \"buttons\": [\n {\n \"args\": [None, frame_args(50)],\n \"label\": \"&#9654;\", # play symbol\n \"method\": \"animate\",\n },\n {\n \"args\": [[None], frame_args(0)],\n \"label\": \"&#9724;\", # pause symbol\n \"method\": \"animate\",\n },\n ],\n \"direction\": \"left\",\n \"pad\": {\"r\": 10, \"t\": 70},\n \"type\": \"buttons\",\n \"x\": 0.1,\n \"y\": 0,\n }\n ],\n sliders=sliders\n )\n\n fig.show()", "_____no_output_____" ], [ "vol_plot(DTI_MD[:,:,:])", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
d0a5f55634b1dae4004ed388b7266ebeee1e00b6
1,699
ipynb
Jupyter Notebook
util/metric-notebook-template/Template.ipynb
computationalmystic/augur-group7
facf2d5de08bf36148ab32d0df72f7dd5232622f
[ "MIT" ]
3
2019-04-11T19:48:57.000Z
2019-04-18T19:10:42.000Z
util/metric-notebook-template/Template.ipynb
computationalmystic/augur-group7
facf2d5de08bf36148ab32d0df72f7dd5232622f
[ "MIT" ]
8
2019-04-24T19:21:15.000Z
2019-05-08T21:25:28.000Z
util/metric-notebook-template/Template.ipynb
computationalmystic/augur-group7
facf2d5de08bf36148ab32d0df72f7dd5232622f
[ "MIT" ]
2
2019-04-09T19:44:02.000Z
2020-04-07T10:23:55.000Z
30.890909
471
0.550324
[ [ [ "import augur\naugurApplication = augur.Application()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code" ] ]
d0a60565279a0401ba630a1297e03095e49cf551
521,504
ipynb
Jupyter Notebook
model_src/xgboost-feature-selection-dsbowl.ipynb
kurianbenoy/Data-Science-Bowl
46df1a04c495cda527a2d26beec4695d8616ec58
[ "MIT" ]
null
null
null
model_src/xgboost-feature-selection-dsbowl.ipynb
kurianbenoy/Data-Science-Bowl
46df1a04c495cda527a2d26beec4695d8616ec58
[ "MIT" ]
null
null
null
model_src/xgboost-feature-selection-dsbowl.ipynb
kurianbenoy/Data-Science-Bowl
46df1a04c495cda527a2d26beec4695d8616ec58
[ "MIT" ]
null
null
null
235.33574
125,196
0.875589
[ [ [ "<font size=6 color='violet'>Introduction</font>\n\n![](http://www.gpb.org/sites/www.gpb.org/files/styles/hero_image/public/blogs/images/2018/08/07/maxresdefault.jpg?itok=gN6ErLyU)", "_____no_output_____" ], [ "\n\n\nIn this dataset, we are provided with game analytics for the PBS KIDS Measure Up! app. In this app, children navigate a map and complete various levels, which may be activities, video clips, games, or assessments. Each assessment is designed to test a child's comprehension of a certain set of measurement-related skills. There are five assessments: Bird Measurer, Cart Balancer, Cauldron Filler, Chest Sorter, and Mushroom Sorter.\n\nThe intent of the competition is to use the gameplay data to forecast how many attempts a child will take to pass a given assessment. Each application install is represented by an installation_id. This will typically correspond to one child, but you should expect noise from issues such as shared devices. In the training set, you are provided the full history of gameplay data. In the test set, we have truncated the history after the start event of a single assessment, chosen randomly, for which you must predict the number of attempts. Note that the training set contains many installation_ids which never took assessments, whereas every installation_id in the test set made an attempt on at least one assessment.\n\nThe outcomes in this competition are grouped into 4 groups (labeled accuracy_group in the data):\n\n 3: the assessment was solved on the first attempt\n 2: the assessment was solved on the second attempt\n 1: the assessment was solved after 3 or more attempts\n 0: the assessment was never solved\n\n", "_____no_output_____" ], [ "<font color='blue' size=4>If you think this kernel was helpful,please don't forget to click on the upvote button,that helps a lot.</font>", "_____no_output_____" ], [ "## <font size=5 color='violet'> Importing required libraries</font>", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport os\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.model_selection import StratifiedKFold\nfrom sklearn.metrics import cohen_kappa_score\nfrom scipy.stats import mode\nfrom sklearn.feature_selection import SelectFromModel\nfrom sklearn.model_selection import train_test_split\n\n\nimport xgboost as xgb\nfrom xgboost import XGBClassifier\nfrom xgboost import plot_importance\nfrom matplotlib import pyplot\n# import shap\n", "_____no_output_____" ] ], [ [ "### <font size=4 color='violet'> Reading and understanding our data</font>", "_____no_output_____" ] ], [ [ "os.listdir('../input/data-science-bowl-2019')\n", "_____no_output_____" ], [ "%%time\nkeep_cols = ['event_id', 'game_session', 'installation_id', 'event_count',\n 'event_code','title' ,'game_time', 'type', 'world','timestamp']\ntrain=pd.read_csv('train.csv',usecols=keep_cols)\ntrain_labels=pd.read_csv('train_labels.csv',\n usecols=['installation_id','game_session','accuracy_group'])\ntest=pd.read_csv('test.csv',usecols=keep_cols)\nsubmission=pd.read_csv('sample_submission.csv')", "CPU times: user 33.8 s, sys: 3.23 s, total: 37 s\nWall time: 1min 7s\n" ] ], [ [ "We can see that this data contains full history of the installation,ie each time a child has played the game a unique game_session identifier is generated and the attributes related to the game is stored.The atttributes are:\n\nThe data provided in these files are as follows:\n- `event_id` - Randomly generated unique identifier for the event type. Maps to event_id column in specs table.\n- `game_session` - Randomly generated unique identifier grouping events within a single game or video play session.\n- `timestamp` - Client-generated datetime\n- `event_data` - Semi-structured JSON formatted string containing the events parameters. Default fields are: event_count, event_code, and game_time; otherwise - fields are determined by the event type.\n- `installation_id` - Randomly generated unique identifier grouping game sessions within a single installed application instance.\n- `event_count` - Incremental counter of events within a game session (offset at 1). Extracted from event_data.\n- `event_code` - Identifier of the event 'class'. Unique per game, but may be duplicated across games. E.g. event code '2000' always identifies the 'Start Game' event for all games. Extracted from event_data.\n- `game_time` - Time in milliseconds since the start of the game session. Extracted from event_data.\n- `title` - Title of the game or video.\n- `type` - Media type of the game or video. Possible values are: 'Game', 'Assessment', 'Activity', 'Clip'.\n- `world` - The section of the application the game or video belongs to. Helpful to identify the educational curriculum goals of the media.\n\n We will not consider `specs.csv`,it contains description of events in natural language.", "_____no_output_____" ] ], [ [ "train.shape,train_labels.shape", "_____no_output_____" ], [ "x=train_labels['accuracy_group'].value_counts()\nsns.barplot(x.index,x)", "_____no_output_____" ] ], [ [ "## <font size=5 color='violet'> Data Preparation</font>", "_____no_output_____" ], [ "In this we will prepare the data and make it in a trainable form.For that we will do the following steps :\n- first,we will find the installation ids which are in `train.csv` and which are not in `train_labels.csv`.These installations won't be of much use to us because `train_labels.csv` contains the the target label,ie `accuracy group`.We will first identify them and remove those rows.", "_____no_output_____" ] ], [ [ "not_req=(set(train.installation_id.unique()) - set(train_labels.installation_id.unique()))", "_____no_output_____" ], [ "train_new=~train['installation_id'].isin(not_req)\ntrain.where(train_new,inplace=True)\ntrain.dropna(inplace=True)\ntrain['event_code']=train.event_code.astype(int)", "_____no_output_____" ] ], [ [ "<font size=3 color='violet'>Extracting time features</font>\n", "_____no_output_____" ] ], [ [ "def extract_time_features(df):\n df['timestamp'] = pd.to_datetime(df['timestamp'])\n df['month'] = df['timestamp'].dt.month\n df['hour'] = df['timestamp'].dt.hour\n df['year'] = df['timestamp'].dt.year\n df['dayofweek'] = df['timestamp'].dt.dayofweek\n df['weekofyear'] = df['timestamp'].dt.weekofyear\n return df", "_____no_output_____" ] ], [ [ "Next,we will define a `prepare_data` funtion to prepare our train and test data.For the we will do the following steps:\n- extract `hour_of_day` from timestamp and drop timestamp column,this indicated the hour of day in which is child playes the game.\n- We will do an on_hot encoding on `event_code` and group the dataframe by installation_id and game_session.\n- We will define an `agg` dictionary to define the the aggregate functions to be performed after grouping the dataframe\n- For variables 'type','world' and 'title' we will the the first value,as it is unique for every installation_id,game_session pair.\n- Atlast, we will join all these togethor and return the dataframe.\n", "_____no_output_____" ] ], [ [ "time_features=['month','hour','year','dayofweek','weekofyear']\ndef prepare_data(df):\n df=extract_time_features(df)\n \n df=df.drop('timestamp',axis=1)\n #df['timestamp']=pd.to_datetime(df['timestamp'])\n #df['hour_of_day']=df['timestamp'].map(lambda x : int(x.hour))\n \n\n join_one=pd.get_dummies(df[['event_code','installation_id','game_session']],\n columns=['event_code']).groupby(['installation_id','game_session'],\n as_index=False,sort=False).agg(sum)\n\n agg={'event_count':sum,'game_time':['sum','mean'],'event_id':'count'}\n\n join_two=df.drop(time_features,axis=1).groupby(['installation_id','game_session']\n ,as_index=False,sort=False).agg(agg)\n \n join_two.columns= [' '.join(col).strip() for col in join_two.columns.values]\n \n\n join_three=df[['installation_id','game_session','type','world','title']].groupby(\n ['installation_id','game_session'],as_index=False,sort=False).first()\n \n join_four=df[time_features+['installation_id','game_session']].groupby(['installation_id',\n 'game_session'],as_index=False,sort=False).agg(mode)[time_features].applymap(lambda x: x.mode[0])\n \n join_one=join_one.join(join_four)\n \n join_five=(join_one.join(join_two.drop(['installation_id','game_session'],axis=1))). \\\n join(join_three.drop(['installation_id','game_session'],axis=1))\n \n return join_five\n\n", "_____no_output_____" ], [ "\njoin_train=prepare_data(train)\ncols=join_train.columns.to_list()[2:-3]\njoin_train[cols]=join_train[cols].astype('int16')\n\n", "_____no_output_____" ], [ "join_test=prepare_data(test)\ncols=join_test.columns.to_list()[2:-3]\njoin_test[cols]=join_test[cols].astype('int16')", "_____no_output_____" ] ], [ [ "In this step,we will \n- prepare train by merging our train to train_labels.This will be our `final_train`.\n- prepare the test by selecting last row of each installation_id ,game_session as we have only 1000 rows in `sample_submission`.The last accuracy group for each installation id is taken as the accuracy group of the child.\n", "_____no_output_____" ] ], [ [ "cols=join_test.columns[2:-12].to_list()\ncols.append('event_id count')\ncols.append('installation_id')", "_____no_output_____" ] ], [ [ "- It seems that we have to group dafaframe by `installation_id` to form a proper trainable dataframe.\n- We will apply the same to form out test set.", "_____no_output_____" ] ], [ [ "df=join_test[['event_count sum','game_time mean','game_time sum',\n 'installation_id']].groupby('installation_id',as_index=False,sort=False).agg('mean')\n\ndf_two=join_test[cols].groupby('installation_id',as_index=False,\n sort=False).agg('sum').drop('installation_id',axis=1)\n\ndf_three=join_test[['title','type','world','installation_id']].groupby('installation_id',\n as_index=False,sort=False).last().drop('installation_id',axis=1)\n\ndf_four=join_test[time_features+['installation_id']].groupby('installation_id',as_index=False,sort=False). \\\n agg(mode)[time_features].applymap(lambda x : x.mode[0])\n", "_____no_output_____" ], [ "final_train=pd.merge(train_labels,join_train,on=['installation_id','game_session'],\n how='left').drop(['game_session'],axis=1)\n\n#final_test=join_test.groupby('installation_id',as_index=False,sort=False).last().drop(['game_session','installation_id'],axis=1)\nfinal_test=(df.join(df_two)).join(df_three.join(df_four)).drop('installation_id',axis=1)", "_____no_output_____" ], [ "df_two", "_____no_output_____" ], [ "df=final_train[['event_count sum','game_time mean','game_time sum','installation_id']]. \\\n groupby('installation_id',as_index=False,sort=False).agg('mean')\n\ndf_two=final_train[cols].groupby('installation_id',as_index=False,\n sort=False).agg('sum').drop('installation_id',axis=1)\n\ndf_three=final_train[['accuracy_group','title','type','world','installation_id']]. \\\n groupby('installation_id',as_index=False,sort=False). \\\n last().drop('installation_id',axis=1)\n\ndf_four=join_train[time_features+['installation_id']].groupby('installation_id',as_index=False,sort=False). \\\n agg(mode)[time_features].applymap(lambda x : x.mode[0])\n\n\n\nfinal_train=(df.join(df_two)).join(df_three.join(df_four)).drop('installation_id',axis=1)", "_____no_output_____" ], [ "final_train.shape,final_test.shape", "_____no_output_____" ] ], [ [ "Just making sure that all the columns in our `final_train` and `final_test` is the same,except accuracy_group.The instersection should return `54`.", "_____no_output_____" ] ], [ [ "len(set(final_train.columns) & set(final_test.columns))", "_____no_output_____" ] ], [ [ "YES ! It's done..", "_____no_output_____" ], [ "## <font size=4 color='violet'> Label Encoding</font>\n- We will concat out final_train and final_test to form `final`.\n- We will label encode the categorical variables.\n- We will split them back to final_train and final_test.", "_____no_output_____" ] ], [ [ "final=pd.concat([final_train,final_test])\nencoding=['type','world','title']\nfor col in encoding:\n lb=LabelEncoder()\n lb.fit(final[col])\n final[col]=lb.transform(final[col])\n \nfinal_train=final[:len(final_train)]\nfinal_test=final[len(final_train):]\n\n\n \n", "/home/kurian/Kaggle/DSbowl/.env/lib/python3.7/site-packages/ipykernel_launcher.py:1: FutureWarning: Sorting because non-concatenation axis is not aligned. A future version\nof pandas will change to not sort by default.\n\nTo accept the future behavior, pass 'sort=False'.\n\nTo retain the current behavior and silence the warning, pass 'sort=True'.\n\n \"\"\"Entry point for launching an IPython kernel.\n" ], [ "X_train=final_train.drop('accuracy_group',axis=1)\ny_train=final_train['accuracy_group']", "_____no_output_____" ] ], [ [ "<font size=5 color='violet'>Evaluation</font>\n\nSubmissions are scored based on the quadratic weighted kappa, which measures the agreement between two outcomes. This metric typically varies from 0 (random agreement) to 1 (complete agreement). In the event that there is less agreement than expected by chance, the metric may go below 0.\n\n$$w_{i,j} = \\frac{\\left(i-j\\right)^2}{\\left(N-1\\right)^2}$$\n\nWe will use `cohen_kappa_score` which is available in `sklearn.metrics` to calculate the score.", "_____no_output_____" ], [ "## <font size=5 color='violet'> XGBoost with StratifiedKFold</font>", "_____no_output_____" ], [ "Here we will use `StratifiedKFold` and `xgboost` model to train and make prediction.\n", "_____no_output_____" ] ], [ [ "def model(X_train,y_train,final_test,n_splits=3):\n scores=[]\n pars = {\n 'colsample_bytree': 0.8, \n 'learning_rate': 0.08,\n 'max_depth': 10,\n 'subsample': 1,\n 'objective':'multi:softprob',\n 'num_class':4,\n 'eval_metric':'mlogloss',\n 'min_child_weight':3,\n 'gamma':0.25,\n 'n_estimators':500\n }\n\n kf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=42)\n y_pre=np.zeros((len(final_test),4),dtype=float)\n final_test=xgb.DMatrix(final_test.drop('accuracy_group',axis=1))\n\n\n for train_index, val_index in kf.split(X_train, y_train):\n train_X = X_train.iloc[train_index]\n val_X = X_train.iloc[val_index]\n train_y = y_train[train_index]\n val_y = y_train[val_index]\n xgb_train = xgb.DMatrix(train_X, train_y)\n xgb_eval = xgb.DMatrix(val_X, val_y)\n\n xgb_model = xgb.train(pars,\n xgb_train,\n num_boost_round=1000,\n evals=[(xgb_train, 'train'), (xgb_eval, 'val')],\n verbose_eval=False,\n early_stopping_rounds=20\n )\n\n val_X=xgb.DMatrix(val_X)\n pred_val=[np.argmax(x) for x in xgb_model.predict(val_X)]\n score=cohen_kappa_score(pred_val,val_y,weights='quadratic')\n scores.append(score)\n print('choen_kappa_score :',score)\n\n pred=xgb_model.predict(final_test)\n y_pre+=pred\n\n pred = np.asarray([np.argmax(line) for line in y_pre])\n print('Mean score:',np.mean(scores))\n \n return xgb_model,pred", "_____no_output_____" ], [ "X_train = X_train.drop('installation_id',axis=1)", "_____no_output_____" ], [ "X_train", "_____no_output_____" ], [ "xgb_model,pred=model(X_train,y_train,final_test,5)", "/home/kurian/Kaggle/DSbowl/.env/lib/python3.7/site-packages/xgboost/core.py:587: FutureWarning: Series.base is deprecated and will be removed in a future version\n if getattr(data, 'base', None) is not None and \\\n" ], [ "final_test = final_test.drop('installation_id', axis=1)", "_____no_output_____" ], [ "final_test", "_____no_output_____" ] ], [ [ "## <font size=5 color='violet'> Making our submission</font>", "_____no_output_____" ], [ "After making our prediction we will make our submission to `submission.csv`.", "_____no_output_____" ] ], [ [ "sub=pd.DataFrame({'installation_id':submission.installation_id,'accuracy_group':pred})\nsub.to_csv('submission.csv',index=False)\n", "_____no_output_____" ] ], [ [ "## <font size=5 color='violet'>Feature Selection</font>\n\nWe will use module of xgboost to plot the feature importances and see what features our model think are important for making prediction.\n", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(figsize=(10,10))\nxgb.plot_importance(xgb_model, max_num_features=50, height=0.5, ax=ax,importance_type='gain')\nplt.show()", "_____no_output_____" ] ], [ [ "There are three methods to measure feature_importances in xgboost.They are :\n- `weight` : The total number of times this feature was used to split the data across all trees.\n- `Cover` :The number of times a feature is used to split the data across all trees weighted by the number of training data points that go through those splits.\n- `Gain` : The average loss reduction gained when using this feature for splitting in trees.\n\nWe used `Gain` in the above example and the model says when it used `event_code_2030` the loss on average was reduced by 8%.", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(figsize=(10,10))\nxgb.plot_importance(xgb_model, max_num_features=50, height=0.5, ax=ax,importance_type='weight')\nplt.show()", "_____no_output_____" ] ], [ [ "When we considered weight,the model says that is used `game_time mean` 1035 times to split the data across the trees.\n\nhmmm...so how what can we conclude from above figures?\nWe will find out....", "_____no_output_____" ], [ "### <font size=4 color='violet'>Interpreting our model with Confidence</font>\n\n\n\n`SHAP` is a powerful tool for interpreting our model with more confidence,It makes the process simple and understandable.We will try SHAP in this section for interpret our model.", "_____no_output_____" ] ], [ [ "shap_values = shap.TreeExplainer(xgb_model).shap_values(X_train)\nshap.summary_plot(shap_values, X_train, plot_type=\"bar\")", "_____no_output_____" ] ], [ [ "For example,this figure expains the feature importance and it's influence on different class.\n- for feature `event_code_3020` its SHAP value is high for `class 3` means it influences predicting class 3 is more than any other class.", "_____no_output_____" ], [ "Summary plot for `class 3`", "_____no_output_____" ] ], [ [ "shap.summary_plot(shap_values[3], X_train)", "_____no_output_____" ] ], [ [ "Here we can see that the variables are ranked in the descending order.\n- The most important variable `event_code_3020`.\n- Lower value of `event_code_3020` has a high and positive impact on the model predicting `class 3` .\n- Lower value of `event_code_3020` the model tends to classify it to `class 3`", "_____no_output_____" ], [ "Similarly for `class0`", "_____no_output_____" ] ], [ [ "shap.summary_plot(shap_values[0], X_train)", "_____no_output_____" ] ], [ [ "Hope you can see the differnce between them.", "_____no_output_____" ], [ "### <font size=4 color='violet'>Select from Features</font>\n\nWe will use sklearn `SelectFromFeatures` to select relevent features.", "_____no_output_____" ] ], [ [ "X_train,X_test,y_train,y_test=train_test_split(X_train,y_train,test_size=.1)\nmodel = XGBClassifier()\nmodel.fit(X_train, y_train)", "_____no_output_____" ], [ "threshold = np.sort(model.feature_importances_)[40:]\nfor thresh in threshold:\n # select features using threshold\n selection = SelectFromModel(model, threshold=thresh, prefit=True)\n select_X_train = selection.transform(X_train)\n # train model\n selection_model = XGBClassifier()\n selection_model.fit(select_X_train, y_train)\n # eval model\n select_X_test = selection.transform(X_test)\n y_pred = selection_model.predict(select_X_test)\n predictions = [round(value) for value in y_pred]\n accuracy = cohen_kappa_score(y_test, predictions)\n print(\"Thresh=%.3f, n=%d, cohen kappa score: %.2f%%\" % (thresh, select_X_train.shape[1], accuracy*100.0))\n \n \n ", "Thresh=0.000, n=14, cohen kappa score: 99.83%\nThresh=0.001, n=13, cohen kappa score: 99.83%\nThresh=0.003, n=12, cohen kappa score: 99.83%\nThresh=0.006, n=11, cohen kappa score: 99.83%\nThresh=0.010, n=10, cohen kappa score: 99.83%\nThresh=0.011, n=9, cohen kappa score: 99.83%\nThresh=0.013, n=8, cohen kappa score: 98.10%\nThresh=0.016, n=7, cohen kappa score: 98.10%\nThresh=0.020, n=6, cohen kappa score: 95.27%\nThresh=0.031, n=5, cohen kappa score: 95.27%\nThresh=0.035, n=4, cohen kappa score: 93.58%\nThresh=0.159, n=3, cohen kappa score: 91.49%\nThresh=0.290, n=2, cohen kappa score: 91.41%\nThresh=0.403, n=1, cohen kappa score: 63.35%\n" ] ], [ [ "We need to look into it further and evaluate...", "_____no_output_____" ], [ "### Under Construction!!!", "_____no_output_____" ], [ "<font color='red' size=4>If you think this kernel was helpful,please don't forget to click on the upvote button,that helps a lot.</font>", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ] ]
d0a611937998e4195cb9ace45432233241c612f8
714,428
ipynb
Jupyter Notebook
scripts/test_scripts/AVIRIS_test.ipynb
kessb/sagebrush-ecosystem-modeling-NEON-hyperspectral
92e5dadc4d36018300bec1fdf0ff14fb73813450
[ "BSD-3-Clause" ]
null
null
null
scripts/test_scripts/AVIRIS_test.ipynb
kessb/sagebrush-ecosystem-modeling-NEON-hyperspectral
92e5dadc4d36018300bec1fdf0ff14fb73813450
[ "BSD-3-Clause" ]
2
2020-07-04T07:43:48.000Z
2020-07-04T20:27:06.000Z
scripts/test_scripts/AVIRIS_test.ipynb
kessb/sagebrush-ecosystem-modeling-NEON-hyperspectral
92e5dadc4d36018300bec1fdf0ff14fb73813450
[ "BSD-3-Clause" ]
null
null
null
927.828571
628,008
0.953906
[ [ [ "# Import needed packages in PEP 8 order (no unused imports listed) (4 points total)\n\n# Import required libraries here\nimport os\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport requests\nimport urllib\nimport folium\nimport numpy as np\nimport pandas as pd\nfrom pandas.io.json import json_normalize\nimport geopandas as gpd\nfrom shapely.geometry import shape\nfrom shapely.geometry import Polygon\nfrom shapely.geometry import box\nfrom shapely.geometry import Point\nimport rasterio as rio\nfrom rasterio.mask import mask\nfrom rasterio.plot import plotting_extent\nimport rasterstats as rs\nimport earthpy as et\nimport earthpy.plot as ep\nimport spectral.io.envi as envi\nimport spectral as spy\nimport tarfile\nimport zipfile\nos.chdir(os.path.join(et.io.HOME,'earth-analytics'))", "_____no_output_____" ], [ "def open_ecosystem_structure(site, date):\n '''Uses API call to retrieve NEON ecosystem structure (CHM)\n data at a given site and date. Returns list of all rasters\n within data product. For more information on NEON ecosystem \n structure data and a full list of available dates see\n https://data.neonscience.org/data-products/DP3.30015.001\n \n Parameters\n ----------\n site : str\n 4 Letter site name. See \n https://www.neonscience.org/field-sites/field-sites-map/list\n for a full list of NEON sites\n \n date : str\n Date of data collection in yyyy-mm format\n \n Returns\n -------\n CHM_raster_tiles : .tif\n All raster .tif tiles associated with\n the site and date specified \n '''\n data_product_url = ['https://data.neonscience.org/api/v0/data/DP3.30015.001/'\n + site+'/'+date]\n call_response = requests.get(data_product_url[0])\n call_response.json()\n \n CHM_raster_tiles = []\n \n for i in call_response.json()['data']['files']:\n data_file_url = i['url']\n file_format = data_file_url.find('.tif')\n if not file_format == -1:\n CHM_raster_tiles.append(data_file_url)\n \n return CHM_raster_tiles\n\ndef high_res_imagery(site, date):\n '''Uses API call to retrieve NEON ecosystem structure (CHM)\n data at a given site and date. Returns list of all rasters\n within data product. For more information on NEON ecosystem \n structure data and a full list of available dates see\n https://data.neonscience.org/data-products/DP3.30010.001\n \n Parameters\n ----------\n site : str\n 4 Letter site name. See \n https://www.neonscience.org/field-sites/field-sites-map/list\n for a full list of NEON sites\n \n date : str\n Date of data collection in yyyy-mm format\n \n Returns\n -------\n CHM_raster_tiles : .tif\n All raster .tif tiles associated with\n the site and date specified \n '''\n data_product_url = ['https://data.neonscience.org/api/v0/data/DP3.30010.001/'\n + site+'/'+date]\n call_response = requests.get(data_product_url[0])\n call_response.json()\n \n high_res_raster_tiles = []\n \n for i in call_response.json()['data']['files']:\n data_file_url = i['url']\n file_format = data_file_url.find('.tif')\n if not file_format == -1:\n high_res_raster_tiles.append(data_file_url)\n \n return high_res_raster_tiles\n\n\ndef open_woody_veg_structure(site, date):\n '''Uses API call to retrieve NEON product data for woody \n vegetation structure. Returns pandas of merged apparent \n individual, mapping and tagging, and per plot per year\n documents, eg one dataframe with locational, species, \n and height data. Also returns a pandas dataframe of filtered \n plot data to facilitate geospatial merges and calculation of\n raster stats. For more information on NEON woody vegetation \n structure data products and available dates, see\n https://data.neonscience.org/data-products/DP1.10098.001\n \n Parameters\n ----------\n site : str\n 4 Letter site name. See \n https://www.neonscience.org/field-sites/field-sites-map/list\n for a full list of NEON sites\n \n date : str\n Date of data collection in yyyy-mm format\n \n Returns\n -------\n all_merged_df : pandas.core.frame.DataFrame\n Pandas dataframe of merged measurement, plot, and mapping\n tabular files from data product\n \n plot_df : pandas.core.frame.DataFrame\n Pandas dataframe of perplotperyear.csv locational data\n '''\n data_product_url = ['https://data.neonscience.org/api/v0/data/DP1.10098.001/'\n + site+'/'+date]\n call_response = requests.get(data_product_url[0])\n \n all_urls = []\n \n for i in call_response.json()['data']['files']:\n data_file_url = i['url']\n height_find = data_file_url.find('individual')\n plot_find = data_file_url.find('perplot')\n map_find = data_file_url.find('mapping')\n \n if not height_find == -1:\n apparent_df = pd.read_csv(data_file_url)\n elif not plot_find == -1:\n plot_df = pd.read_csv(data_file_url)\n elif not map_find == -1:\n map_df = pd.read_csv(data_file_url)\n \n apparent_df = apparent_df[[\n 'plotID', 'individualID', 'height']]\n \n plot_df = plot_df[['plotID', 'plotType',\n 'decimalLatitude', 'decimalLongitude',\n 'easting', 'northing']]\n \n map_df = map_df[['plotID', 'individualID', 'scientificName']]\n \n measurement_map_merge = pd.merge(\n apparent_df, map_df, on=['plotID', 'individualID'])\n all_merged_df = pd.merge(plot_df, measurement_map_merge, on='plotID')\n \n return all_merged_df, plot_df\n\n\ndef NEON_site_extent(path_to_NEON_boundaries, site):\n '''Extracts a NEON site extent from an individual site as\n long as the original NEON site extent shape file contains \n a column named 'siteID'.\n\n Parameters\n ----------\n path_to_NEON_boundaries : str\n The path to a shape file that contains the list\n of all NEON site extents, also known as field\n sampling boundaries (can be found at NEON and\n ESRI sites)\n\n site : str\n One siteID contains 4 capital letters, \n e.g. CPER, HARV, ONAQ or SJER.\n\n Returns\n -------\n site_boundary : geopandas.geodataframe.GeoDataFrame\n A vector containing a single polygon \n per the site specified. \n '''\n NEON_boundaries = gpd.read_file(path_to_NEON_boundaries)\n boundaries_indexed = NEON_boundaries.set_index(['siteID'])\n\n site_boundary = boundaries_indexed.loc[[site]]\n site_boundary.reset_index(inplace=True)\n\n return site_boundary\n\n\ndef buffer_point_plots(df, crs, buffer):\n '''Creates geodataframe from plot points\n within a designated coordinate reference system. \n Buffers plot points to a given radius. Compatible\n with most NEON tabular plot data files including\n northing and easting locational columns. Final product\n can be used to visualize plot locations or combined \n with other spatial data products.\n \n Parameters\n ----------\n df : pandas.core.frame.DataFrame\n df including Northing and Easting plot locations\n \n crs : str or rasterio.crs.CRS\n String of desired coordinate reference system\n \n buffer : int\n Desired radius for final plot polygons\n \n Returns\n -------\n buffered_gdf : geopandas.geodataframe.GeoDataFrame\n Dataframe with point plots buffered to polgyons\n '''\n buffered_gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(\n x=df.easting, y=df.northing), crs=crs)\n \n buffered_gdf['geometry'] = buffered_gdf.geometry.buffer(buffer)\n \n return(buffered_gdf)\n\n\ndef tiles_over_insitu_plots(tiles, plots):\n '''Takes a list of raster images and geodataframe\n of plot polygons within the same CRS. Cross references\n overlap between raster extent polygon and plot point \n polygons. Returns list of .tiff file locations that \n overlap completely with plot polygons.\n ----------\n tiles : list\n List of rasters\n \n plots : geopandas.geodataframe.GeoDataFrame\n Geodataframe with polygons of AOI plots\n \n Returns\n -------\n target_rasters : list\n List of strings with to raster locations\n '''\n target_rasters = []\n \n insitu_plot_size = plots.loc[0, 'geometry'].area\n \n for tile in tiles:\n with rio.open(tile) as src:\n extent = plotting_extent(src)\n raster_polygon = Polygon([\n [extent[0], extent[2]],\n [extent[0], extent[3]],\n [extent[1], extent[3]],\n [extent[1], extent[2]]])\n raster_polygon_gdf = gpd.GeoDataFrame(crs=src.crs,\n geometry=[raster_polygon])\n raster_plot_intersection = gpd.overlay(\n raster_polygon_gdf, plots, how='intersection')\n \n if raster_plot_intersection['geometry'].empty:\n pass\n elif int(\n raster_plot_intersection.loc[0, 'geometry'].area) == int(\n insitu_plot_size):\n target_rasters.append(tile)\n \n return target_rasters\n\n\ndef calculate_rasterstats_dataframe(tiles, plot_polygons):\n '''Creates a geodataframe object with lidar summary statistics \n using rasterstats zonal. Requires a pandas dataframe with plot\n polygons to cross reference with lidar calculations. Outputs a\n single dataframe with uniquely named summary statistics.\n Parameters\n ----------\n df : pandas.core.frame.DataFrame\n df including Northing and Easting plot locations\n \n crs : str or rasterio.crs.CRS\n String of desired coordinate reference system or \n rasterio CRS object\n \n Returns\n -------\n CHM_stats : list of geopandas.geodataframe.GeoDataFrame\n Returns a list of geodataframes with lidar max, lidar mean, \n lidar median and lidar min calculated in new columns.\n calculations. \n '''\n CHM_stats = []\n \n for tile in tiles:\n with rio.open(tile) as chm_src:\n site_chm = chm_src.read(1, masked=True)\n site_chm_meta = chm_src.meta\n site_tree_heights = rs.zonal_stats(\n plot_polygons,\n site_chm,\n affine=site_chm_meta[\"transform\"],\n geojson_out=True,\n copy_properties=True,\n nodata=0,\n stats=[\"mean\", \"median\", \"max\", \"min\"])\n site_tree_heights_gdf = gpd.GeoDataFrame.from_features(\n site_tree_heights)\n \n rename_dict_lidar = {\"mean\": \"lidar_mean\",\n \"median\": \"lidar_median\",\n \"max\": \"lidar_max\",\n \"min\": \"lidar_min\"}\n\n site_tree_heights_gdf.rename(columns=rename_dict_lidar, inplace=True)\n CHM_stats.append(site_tree_heights_gdf)\n \n return CHM_stats", "_____no_output_____" ], [ "high_res_list=high_res_imagery('CPER','2017-05')", "_____no_output_____" ], [ "# Download shapefile of all NEON site boundaries\nurl = 'https://www.neonscience.org/neon-terrestrial-field-site-boundaries-shapefile'\net.data.get_data(url=url, replace=True)\n\n# Create path to shapefile\nterrestrial_sites = os.path.join(\n 'data', 'earthpy-downloads',\n 'fieldSamplingBoundaries (1)',\n 'terrestrialSamplingBoundaries.shp')\n\n# Import insitu plot data for CPER and ONAQ sites\nCPER_insitu_df, CPER_plots = open_woody_veg_structure(\n site='CPER', date='2017-09')\n\n# Import CHM data and identify crs\nCPER_tif_files = open_ecosystem_structure(\n site='CPER', date='2017-05')\nwith rio.open(high_res_list[4]) as CPER_src:\n arr_dig=CPER_src.read(masked=True)\n CPER_crs = CPER_src.crs\n\n# Create geodataframes with buffered plot points\nCPER_insitu_gdf = buffer_point_plots(\n df=CPER_insitu_df, crs=CPER_crs, buffer=40)", "Downloading from https://www.neonscience.org/neon-terrestrial-field-site-boundaries-shapefile\n" ], [ "ep.plot_rgb(arr_dig,rgb=(0,1,2))", "_____no_output_____" ], [ "CPER_AOI_tifs = tiles_over_insitu_plots(\n tiles=high_res_list, plots=CPER_insitu_gdf)", "_____no_output_____" ], [ "with rio.open(CPER_AOI_tifs[1]) as src:\n arr=src.read(1,masked=True)\n extent=plotting_extent(src)", "_____no_output_____" ], [ "fig, ax = plt.subplots()\nep.plot_bands(arr,ax=ax,extent=extent)\nCPER_insitu_gdf.plot(ax=ax,\n color='pink')", "_____no_output_____" ], [ "tf_AVIRIS = tarfile.open(os.path.join('data','AVIRIS','f190821t01p00r12.tar.gz'))\ntf_AVIRIS.extractall(path=os.path.join('AVIRIS'))\n# tf_LANDSAT = tarfile.open(os.path.join(\n# ONAQ_directory,'LC08_L1TP_038032_20171010_20171024_01_T1.tar.gz'))\n# tf_LANDSAT.extractall(path=LANDSAT_path)", "_____no_output_____" ], [ "# Download shapefile of all NEON site boundaries\nurl = 'https://www.neonscience.org/neon-terrestrial-field-site-boundaries-shapefile'\net.data.get_data(url=url, replace=True)\n\n# Create path to shapefile\nterrestrial_sites = os.path.join(\n 'data', 'earthpy-downloads',\n 'fieldSamplingBoundaries (1)',\n 'terrestrialSamplingBoundaries.shp')\nCPER_site_outline = NEON_site_extent(\n path_to_NEON_boundaries=terrestrial_sites,\n site='CPER')\nCPER_site_outline.crs=AVIRIS_crs\nCPER_site_outline.crs", "Downloading from https://www.neonscience.org/neon-terrestrial-field-site-boundaries-shapefile\n" ], [ "AVIRIS_path = os.path.join('AVIRIS', 'f170509t01p00r05rdn_e','f170509t01p00r05rdn_e_sc01_ort_img')\nwith rio.open (AVIRIS_path) as src:\n arr= src.read(masked=True)\n extent=plotting_extent(src)\n", "_____no_output_____" ], [ "arr.shape", "_____no_output_____" ], [ "# fig, ax = plt.subplots()\nep.plot_rgb(arr, rgb=(40, 10, 20))\n# CPER_insitu_gdf.plot(ax=ax,\n# color='pink')\n\n", "_____no_output_____" ], [ "# CPER_AOI_tifs = tiles_over_insitu_plots(\n# tiles=AVIRIS_path, plots=CPER_insitu_gdf)\n\nwith rio.open(AVIRIS_path) as chm_src:\n site_chm = chm_src.read(1, masked=True)\n site_chm_meta = chm_src.meta\nsite_tree_heights = rs.zonal_stats(\n CPER_insitu_gdf,\n site_chm,\n affine=site_chm_meta[\"transform\"],\n geojson_out=True,\n copy_properties=True,\n nodata=0,\n stats=[\"mean\", \"median\", \"max\", \"min\"])\nsite_tree_heights_gdf = gpd.GeoDataFrame.from_features(\n site_tree_heights)", "_____no_output_____" ], [ "site_tree_heights_gdf", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a623b61224d406473f854c45adae042f3e8861
7,628
ipynb
Jupyter Notebook
notebook/introduction/choosing_algs.ipynb
KZiemian/DiffEqTutorials.jl
97c2bce7039bd976522f06d7e1c9900bb63c2aff
[ "MIT" ]
1
2019-03-22T12:30:52.000Z
2019-03-22T12:30:52.000Z
notebook/introduction/choosing_algs.ipynb
KZiemian/DiffEqTutorials.jl
97c2bce7039bd976522f06d7e1c9900bb63c2aff
[ "MIT" ]
null
null
null
notebook/introduction/choosing_algs.ipynb
KZiemian/DiffEqTutorials.jl
97c2bce7039bd976522f06d7e1c9900bb63c2aff
[ "MIT" ]
null
null
null
46.512195
1,966
0.607892
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a62c7919f2f724a3b4d18af9f8399e65815c64
33,663
ipynb
Jupyter Notebook
batch-norm/Batch_Normalization_Exercises.ipynb
AlphaGit/deep-learning
1efa4a57b7c67c40d19ea31304cfeb3afdf0e08e
[ "MIT" ]
null
null
null
batch-norm/Batch_Normalization_Exercises.ipynb
AlphaGit/deep-learning
1efa4a57b7c67c40d19ea31304cfeb3afdf0e08e
[ "MIT" ]
null
null
null
batch-norm/Batch_Normalization_Exercises.ipynb
AlphaGit/deep-learning
1efa4a57b7c67c40d19ea31304cfeb3afdf0e08e
[ "MIT" ]
null
null
null
47.748936
586
0.600214
[ [ [ "# Batch Normalization – Practice", "_____no_output_____" ], [ "Batch normalization is most useful when building deep neural networks. To demonstrate this, we'll create a convolutional neural network with 20 convolutional layers, followed by a fully connected layer. We'll use it to classify handwritten digits in the MNIST dataset, which should be familiar to you by now.\n\nThis is **not** a good network for classfying MNIST digits. You could create a _much_ simpler network and get _better_ results. However, to give you hands-on experience with batch normalization, we had to make an example that was:\n1. Complicated enough that training would benefit from batch normalization.\n2. Simple enough that it would train quickly, since this is meant to be a short exercise just to give you some practice adding batch normalization.\n3. Simple enough that the architecture would be easy to understand without additional resources.", "_____no_output_____" ], [ "This notebook includes two versions of the network that you can edit. The first uses higher level functions from the `tf.layers` package. The second is the same network, but uses only lower level functions in the `tf.nn` package.\n\n1. [Batch Normalization with `tf.layers.batch_normalization`](#example_1)\n2. [Batch Normalization with `tf.nn.batch_normalization`](#example_2)", "_____no_output_____" ], [ "The following cell loads TensorFlow, downloads the MNIST dataset if necessary, and loads it into an object named `mnist`. You'll need to run this cell before running anything else in the notebook.", "_____no_output_____" ] ], [ [ "import tensorflow as tf\nfrom tensorflow.examples.tutorials.mnist import input_data\nmnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True, reshape=False)", "Extracting MNIST_data/train-images-idx3-ubyte.gz\nExtracting MNIST_data/train-labels-idx1-ubyte.gz\nExtracting MNIST_data/t10k-images-idx3-ubyte.gz\nExtracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" ] ], [ [ "# Batch Normalization using `tf.layers.batch_normalization`<a id=\"example_1\"></a>\n\nThis version of the network uses `tf.layers` for almost everything, and expects you to implement batch normalization using [`tf.layers.batch_normalization`](https://www.tensorflow.org/api_docs/python/tf/layers/batch_normalization) ", "_____no_output_____" ], [ "We'll use the following function to create fully connected layers in our network. We'll create them with the specified number of neurons and a ReLU activation function.\n\nThis version of the function does not include batch normalization.", "_____no_output_____" ] ], [ [ "\"\"\"\nDO NOT MODIFY THIS CELL\n\"\"\"\ndef fully_connected(prev_layer, num_units):\n \"\"\"\n Create a fully connectd layer with the given layer as input and the given number of neurons.\n \n :param prev_layer: Tensor\n The Tensor that acts as input into this layer\n :param num_units: int\n The size of the layer. That is, the number of units, nodes, or neurons.\n :returns Tensor\n A new fully connected layer\n \"\"\"\n layer = tf.layers.dense(prev_layer, num_units, activation=tf.nn.relu)\n return layer", "_____no_output_____" ] ], [ [ "We'll use the following function to create convolutional layers in our network. They are very basic: we're always using a 3x3 kernel, ReLU activation functions, strides of 1x1 on layers with odd depths, and strides of 2x2 on layers with even depths. We aren't bothering with pooling layers at all in this network.\n\nThis version of the function does not include batch normalization.", "_____no_output_____" ] ], [ [ "\"\"\"\nDO NOT MODIFY THIS CELL\n\"\"\"\ndef conv_layer(prev_layer, layer_depth):\n \"\"\"\n Create a convolutional layer with the given layer as input.\n \n :param prev_layer: Tensor\n The Tensor that acts as input into this layer\n :param layer_depth: int\n We'll set the strides and number of feature maps based on the layer's depth in the network.\n This is *not* a good way to make a CNN, but it helps us create this example with very little code.\n :returns Tensor\n A new convolutional layer\n \"\"\"\n strides = 2 if layer_depth % 3 == 0 else 1\n conv_layer = tf.layers.conv2d(prev_layer, layer_depth*4, 3, strides, 'same', activation=tf.nn.relu)\n return conv_layer", "_____no_output_____" ] ], [ [ "**Run the following cell**, along with the earlier cells (to load the dataset and define the necessary functions). \n\nThis cell builds the network **without** batch normalization, then trains it on the MNIST dataset. It displays loss and accuracy data periodically while training.", "_____no_output_____" ] ], [ [ "\"\"\"\nDO NOT MODIFY THIS CELL\n\"\"\"\ndef train(num_batches, batch_size, learning_rate):\n # Build placeholders for the input samples and labels \n inputs = tf.placeholder(tf.float32, [None, 28, 28, 1])\n labels = tf.placeholder(tf.float32, [None, 10])\n \n # Feed the inputs into a series of 20 convolutional layers \n layer = inputs\n for layer_i in range(1, 20):\n layer = conv_layer(layer, layer_i)\n\n # Flatten the output from the convolutional layers \n orig_shape = layer.get_shape().as_list()\n layer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] * orig_shape[3]])\n\n # Add one fully connected layer\n layer = fully_connected(layer, 100)\n\n # Create the output layer with 1 node for each \n logits = tf.layers.dense(layer, 10)\n \n # Define loss and training operations\n model_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=labels))\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\n \n # Create operations to test accuracy\n correct_prediction = tf.equal(tf.argmax(logits,1), tf.argmax(labels,1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n \n # Train and test the network\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n for batch_i in range(num_batches):\n batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n\n # train this batch\n sess.run(train_opt, {inputs: batch_xs, labels: batch_ys})\n \n # Periodically check the validation or training loss and accuracy\n if batch_i % 100 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.validation.images,\n labels: mnist.validation.labels})\n print('Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n elif batch_i % 25 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs, labels: batch_ys})\n print('Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n\n # At the end, score the final accuracy for both the validation and test sets\n acc = sess.run(accuracy, {inputs: mnist.validation.images,\n labels: mnist.validation.labels})\n print('Final validation accuracy: {:>3.5f}'.format(acc))\n acc = sess.run(accuracy, {inputs: mnist.test.images,\n labels: mnist.test.labels})\n print('Final test accuracy: {:>3.5f}'.format(acc))\n \n # Score the first 100 test images individually. This won't work if batch normalization isn't implemented correctly.\n correct = 0\n for i in range(100):\n correct += sess.run(accuracy,feed_dict={inputs: [mnist.test.images[i]],\n labels: [mnist.test.labels[i]]})\n\n print(\"Accuracy on 100 samples:\", correct/100)\n\n\nnum_batches = 800\nbatch_size = 64\nlearning_rate = 0.002\n\ntf.reset_default_graph()\nwith tf.Graph().as_default():\n train(num_batches, batch_size, learning_rate)", "Batch: 0: Validation loss: 0.69148, Validation accuracy: 0.10020\nBatch: 25: Training loss: 0.36335, Training accuracy: 0.09375\nBatch: 50: Training loss: 0.32443, Training accuracy: 0.14062\nBatch: 75: Training loss: 0.32804, Training accuracy: 0.06250\nBatch: 100: Validation loss: 0.32516, Validation accuracy: 0.11260\nBatch: 125: Training loss: 0.32760, Training accuracy: 0.03125\nBatch: 150: Training loss: 0.32632, Training accuracy: 0.09375\nBatch: 175: Training loss: 0.32301, Training accuracy: 0.21875\nBatch: 200: Validation loss: 0.32559, Validation accuracy: 0.09860\nBatch: 225: Training loss: 0.32463, Training accuracy: 0.15625\nBatch: 250: Training loss: 0.32577, Training accuracy: 0.07812\nBatch: 275: Training loss: 0.32323, Training accuracy: 0.12500\nBatch: 300: Validation loss: 0.32581, Validation accuracy: 0.08680\nBatch: 325: Training loss: 0.32244, Training accuracy: 0.15625\nBatch: 350: Training loss: 0.32360, Training accuracy: 0.12500\nBatch: 375: Training loss: 0.32700, Training accuracy: 0.09375\nBatch: 400: Validation loss: 0.32488, Validation accuracy: 0.11260\nBatch: 425: Training loss: 0.32565, Training accuracy: 0.09375\nBatch: 450: Training loss: 0.32674, Training accuracy: 0.10938\nBatch: 475: Training loss: 0.32676, Training accuracy: 0.04688\nBatch: 500: Validation loss: 0.32532, Validation accuracy: 0.11000\nBatch: 525: Training loss: 0.32892, Training accuracy: 0.01562\nBatch: 550: Training loss: 0.32536, Training accuracy: 0.09375\nBatch: 575: Training loss: 0.32258, Training accuracy: 0.20312\nBatch: 600: Validation loss: 0.32513, Validation accuracy: 0.11260\nBatch: 625: Training loss: 0.32446, Training accuracy: 0.14062\nBatch: 650: Training loss: 0.32985, Training accuracy: 0.03125\nBatch: 675: Training loss: 0.32479, Training accuracy: 0.12500\nBatch: 700: Validation loss: 0.32452, Validation accuracy: 0.11260\nBatch: 725: Training loss: 0.32459, Training accuracy: 0.18750\nBatch: 750: Training loss: 0.32200, Training accuracy: 0.14062\nBatch: 775: Training loss: 0.32792, Training accuracy: 0.04688\nFinal validation accuracy: 0.11000\nFinal test accuracy: 0.10280\nAccuracy on 100 samples: 0.15\n" ] ], [ [ "With this many layers, it's going to take a lot of iterations for this network to learn. By the time you're done training these 800 batches, your final test and validation accuracies probably won't be much better than 10%. (It will be different each time, but will most likely be less than 15%.)\n\nUsing batch normalization, you'll be able to train this same network to over 90% in that same number of batches.\n\n\n# Add batch normalization\n\nWe've copied the previous three cells to get you started. **Edit these cells** to add batch normalization to the network. For this exercise, you should use [`tf.layers.batch_normalization`](https://www.tensorflow.org/api_docs/python/tf/layers/batch_normalization) to handle most of the math, but you'll need to make a few other changes to your network to integrate batch normalization. You may want to refer back to the lesson notebook to remind yourself of important things, like how your graph operations need to know whether or not you are performing training or inference. \n\nIf you get stuck, you can check out the `Batch_Normalization_Solutions` notebook to see how we did things.", "_____no_output_____" ], [ "**TODO:** Modify `fully_connected` to add batch normalization to the fully connected layers it creates. Feel free to change the function's parameters if it helps.", "_____no_output_____" ] ], [ [ "def fully_connected(prev_layer, num_units, is_training):\n \"\"\"\n Create a fully connectd layer with the given layer as input and the given number of neurons.\n \n :param prev_layer: Tensor\n The Tensor that acts as input into this layer\n :param num_units: int\n The size of the layer. That is, the number of units, nodes, or neurons.\n :returns Tensor\n A new fully connected layer\n \"\"\"\n layer = tf.layers.dense(prev_layer, num_units, activation=None)\n layer = tf.layers.batch_normalization(layer, training=is_training)\n layer = tf.nn.relu(layer)\n return layer", "_____no_output_____" ] ], [ [ "**TODO:** Modify `conv_layer` to add batch normalization to the convolutional layers it creates. Feel free to change the function's parameters if it helps.", "_____no_output_____" ] ], [ [ "def conv_layer(prev_layer, layer_depth, is_training):\n \"\"\"\n Create a convolutional layer with the given layer as input.\n \n :param prev_layer: Tensor\n The Tensor that acts as input into this layer\n :param layer_depth: int\n We'll set the strides and number of feature maps based on the layer's depth in the network.\n This is *not* a good way to make a CNN, but it helps us create this example with very little code.\n :returns Tensor\n A new convolutional layer\n \"\"\"\n strides = 2 if layer_depth % 3 == 0 else 1\n conv_layer = tf.layers.conv2d(prev_layer, layer_depth*4, 3, strides, 'same', activation=None)\n conv_layer = tf.layers.batch_normalization(conv_layer, training=is_training)\n conv_layer = tf.nn.relu(conv_layer)\n return conv_layer", "_____no_output_____" ] ], [ [ "**TODO:** Edit the `train` function to support batch normalization. You'll need to make sure the network knows whether or not it is training, and you'll need to make sure it updates and uses its population statistics correctly.", "_____no_output_____" ] ], [ [ "def train(num_batches, batch_size, learning_rate):\n # Build placeholders for the input samples and labels \n inputs = tf.placeholder(tf.float32, [None, 28, 28, 1])\n labels = tf.placeholder(tf.float32, [None, 10])\n is_training = tf.placeholder(tf.bool)\n \n # Feed the inputs into a series of 20 convolutional layers \n layer = inputs\n for layer_i in range(1, 20):\n layer = conv_layer(layer, layer_i, is_training)\n\n # Flatten the output from the convolutional layers \n orig_shape = layer.get_shape().as_list()\n layer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] * orig_shape[3]])\n\n # Add one fully connected layer\n layer = fully_connected(layer, 100, is_training)\n\n # Create the output layer with 1 node for each \n logits = tf.layers.dense(layer, 10)\n \n # Define loss and training operations\n model_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=labels))\n extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n with tf.control_dependencies(extra_update_ops):\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\n \n # Create operations to test accuracy\n correct_prediction = tf.equal(tf.argmax(logits,1), tf.argmax(labels,1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n \n # Train and test the network\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n for batch_i in range(num_batches):\n batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n\n # train this batch\n sess.run(train_opt, {inputs: batch_xs, labels: batch_ys, is_training: True})\n \n # Periodically check the validation or training loss and accuracy\n if batch_i % 100 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.validation.images,\n labels: mnist.validation.labels,\n is_training: False})\n print('Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n elif batch_i % 25 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs, labels: batch_ys, is_training: False})\n print('Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n\n # At the end, score the final accuracy for both the validation and test sets\n acc = sess.run(accuracy, {inputs: mnist.validation.images,\n labels: mnist.validation.labels,\n is_training: False})\n print('Final validation accuracy: {:>3.5f}'.format(acc))\n acc = sess.run(accuracy, {inputs: mnist.test.images,\n labels: mnist.test.labels,\n is_training: False})\n print('Final test accuracy: {:>3.5f}'.format(acc))\n \n # Score the first 100 test images individually. This won't work if batch normalization isn't implemented correctly.\n correct = 0\n for i in range(100):\n correct += sess.run(accuracy,feed_dict={inputs: [mnist.test.images[i]],\n labels: [mnist.test.labels[i]],\n is_training: False})\n\n print(\"Accuracy on 100 samples:\", correct/100)\n\n\nnum_batches = 800\nbatch_size = 64\nlearning_rate = 0.002\n\ntf.reset_default_graph()\nwith tf.Graph().as_default():\n train(num_batches, batch_size, learning_rate)", "Batch: 0: Validation loss: 0.69141, Validation accuracy: 0.09900\nBatch: 25: Training loss: 0.59129, Training accuracy: 0.12500\nBatch: 50: Training loss: 0.47563, Training accuracy: 0.10938\nBatch: 75: Training loss: 0.40750, Training accuracy: 0.09375\nBatch: 100: Validation loss: 0.36580, Validation accuracy: 0.08680\nBatch: 125: Training loss: 0.33316, Training accuracy: 0.14062\nBatch: 150: Training loss: 0.33441, Training accuracy: 0.06250\nBatch: 175: Training loss: 0.34355, Training accuracy: 0.06250\nBatch: 200: Validation loss: 0.36882, Validation accuracy: 0.12000\nBatch: 225: Training loss: 0.34512, Training accuracy: 0.20312\nBatch: 250: Training loss: 0.26877, Training accuracy: 0.46875\nBatch: 275: Training loss: 0.16839, Training accuracy: 0.57812\nBatch: 300: Validation loss: 0.16411, Validation accuracy: 0.70360\nBatch: 325: Training loss: 0.08081, Training accuracy: 0.84375\nBatch: 350: Training loss: 0.08058, Training accuracy: 0.85938\nBatch: 375: Training loss: 0.08318, Training accuracy: 0.84375\nBatch: 400: Validation loss: 0.03204, Validation accuracy: 0.95360\nBatch: 425: Training loss: 0.02391, Training accuracy: 0.96875\nBatch: 450: Training loss: 0.02798, Training accuracy: 0.96875\nBatch: 475: Training loss: 0.03037, Training accuracy: 0.96875\nBatch: 500: Validation loss: 0.02716, Validation accuracy: 0.96380\nBatch: 525: Training loss: 0.03305, Training accuracy: 0.95312\nBatch: 550: Training loss: 0.00323, Training accuracy: 1.00000\nBatch: 575: Training loss: 0.03914, Training accuracy: 0.93750\nBatch: 600: Validation loss: 0.03455, Validation accuracy: 0.94720\nBatch: 625: Training loss: 0.02496, Training accuracy: 0.96875\nBatch: 650: Training loss: 0.03675, Training accuracy: 0.93750\nBatch: 675: Training loss: 0.01709, Training accuracy: 0.96875\nBatch: 700: Validation loss: 0.02385, Validation accuracy: 0.96880\nBatch: 725: Training loss: 0.03658, Training accuracy: 0.96875\nBatch: 750: Training loss: 0.02646, Training accuracy: 0.93750\nBatch: 775: Training loss: 0.06232, Training accuracy: 0.92188\nFinal validation accuracy: 0.96100\nFinal test accuracy: 0.95800\nAccuracy on 100 samples: 0.98\n" ] ], [ [ "With batch normalization, you should now get an accuracy over 90%. Notice also the last line of the output: `Accuracy on 100 samples`. If this value is low while everything else looks good, that means you did not implement batch normalization correctly. Specifically, it means you either did not calculate the population mean and variance while training, or you are not using those values during inference.\n\n# Batch Normalization using `tf.nn.batch_normalization`<a id=\"example_2\"></a>\n\nMost of the time you will be able to use higher level functions exclusively, but sometimes you may want to work at a lower level. For example, if you ever want to implement a new feature – something new enough that TensorFlow does not already include a high-level implementation of it, like batch normalization in an LSTM – then you may need to know these sorts of things.\n\nThis version of the network uses `tf.nn` for almost everything, and expects you to implement batch normalization using [`tf.nn.batch_normalization`](https://www.tensorflow.org/api_docs/python/tf/nn/batch_normalization).\n\n**Optional TODO:** You can run the next three cells before you edit them just to see how the network performs without batch normalization. However, the results should be pretty much the same as you saw with the previous example before you added batch normalization. \n\n**TODO:** Modify `fully_connected` to add batch normalization to the fully connected layers it creates. Feel free to change the function's parameters if it helps.\n\n**Note:** For convenience, we continue to use `tf.layers.dense` for the `fully_connected` layer. By this point in the class, you should have no problem replacing that with matrix operations between the `prev_layer` and explicit weights and biases variables.", "_____no_output_____" ] ], [ [ "def fully_connected(prev_layer, num_units):\n \"\"\"\n Create a fully connectd layer with the given layer as input and the given number of neurons.\n \n :param prev_layer: Tensor\n The Tensor that acts as input into this layer\n :param num_units: int\n The size of the layer. That is, the number of units, nodes, or neurons.\n :returns Tensor\n A new fully connected layer\n \"\"\"\n weights = tf.Variable(tf.random_normal([prev_layer.shape[0], num_units]))\n layer = tf.matmul(prev_layer, weights)\n \n # missing: ReLU\n return layer", "_____no_output_____" ] ], [ [ "**TODO:** Modify `conv_layer` to add batch normalization to the fully connected layers it creates. Feel free to change the function's parameters if it helps.\n\n**Note:** Unlike in the previous example that used `tf.layers`, adding batch normalization to these convolutional layers _does_ require some slight differences to what you did in `fully_connected`. ", "_____no_output_____" ] ], [ [ "def conv_layer(prev_layer, layer_depth):\n \"\"\"\n Create a convolutional layer with the given layer as input.\n \n :param prev_layer: Tensor\n The Tensor that acts as input into this layer\n :param layer_depth: int\n We'll set the strides and number of feature maps based on the layer's depth in the network.\n This is *not* a good way to make a CNN, but it helps us create this example with very little code.\n :returns Tensor\n A new convolutional layer\n \"\"\"\n strides = 2 if layer_depth % 3 == 0 else 1\n\n in_channels = prev_layer.get_shape().as_list()[3]\n out_channels = layer_depth*4\n \n weights = tf.Variable(\n tf.truncated_normal([3, 3, in_channels, out_channels], stddev=0.05))\n \n bias = tf.Variable(tf.zeros(out_channels))\n\n conv_layer = tf.nn.conv2d(prev_layer, weights, strides=[1,strides, strides, 1], padding='SAME')\n conv_layer = tf.nn.bias_add(conv_layer, bias)\n conv_layer = tf.nn.relu(conv_layer)\n\n return conv_layer", "_____no_output_____" ] ], [ [ "**TODO:** Edit the `train` function to support batch normalization. You'll need to make sure the network knows whether or not it is training.", "_____no_output_____" ] ], [ [ "def train(num_batches, batch_size, learning_rate):\n # Build placeholders for the input samples and labels \n inputs = tf.placeholder(tf.float32, [None, 28, 28, 1])\n labels = tf.placeholder(tf.float32, [None, 10])\n \n # Feed the inputs into a series of 20 convolutional layers \n layer = inputs\n for layer_i in range(1, 20):\n layer = conv_layer(layer, layer_i)\n\n # Flatten the output from the convolutional layers \n orig_shape = layer.get_shape().as_list()\n layer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] * orig_shape[3]])\n\n # Add one fully connected layer\n layer = fully_connected(layer, 100)\n\n # Create the output layer with 1 node for each \n logits = tf.layers.dense(layer, 10)\n \n # Define loss and training operations\n model_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=labels))\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\n \n # Create operations to test accuracy\n correct_prediction = tf.equal(tf.argmax(logits,1), tf.argmax(labels,1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n \n # Train and test the network\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n for batch_i in range(num_batches):\n batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n\n # train this batch\n sess.run(train_opt, {inputs: batch_xs, labels: batch_ys})\n \n # Periodically check the validation or training loss and accuracy\n if batch_i % 100 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.validation.images,\n labels: mnist.validation.labels})\n print('Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n elif batch_i % 25 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs, labels: batch_ys})\n print('Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n\n # At the end, score the final accuracy for both the validation and test sets\n acc = sess.run(accuracy, {inputs: mnist.validation.images,\n labels: mnist.validation.labels})\n print('Final validation accuracy: {:>3.5f}'.format(acc))\n acc = sess.run(accuracy, {inputs: mnist.test.images,\n labels: mnist.test.labels})\n print('Final test accuracy: {:>3.5f}'.format(acc))\n \n # Score the first 100 test images individually. This won't work if batch normalization isn't implemented correctly.\n correct = 0\n for i in range(100):\n correct += sess.run(accuracy,feed_dict={inputs: [mnist.test.images[i]],\n labels: [mnist.test.labels[i]]})\n\n print(\"Accuracy on 100 samples:\", correct/100)\n\n\nnum_batches = 800\nbatch_size = 64\nlearning_rate = 0.002\n\ntf.reset_default_graph()\nwith tf.Graph().as_default():\n train(num_batches, batch_size, learning_rate)", "_____no_output_____" ] ], [ [ "Once again, the model with batch normalization should reach an accuracy over 90%. There are plenty of details that can go wrong when implementing at this low level, so if you got it working - great job! If not, do not worry, just look at the `Batch_Normalization_Solutions` notebook to see what went wrong.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a6469e0d504ef6aad52bcade55ec7721f6f04f
1,011,812
ipynb
Jupyter Notebook
draft.ipynb
JiqianDong/BDD-100-CV
99d4e20595ae667c79a3a6e78290425cbe60467f
[ "MIT" ]
3
2020-12-01T03:06:32.000Z
2022-02-06T04:08:50.000Z
draft.ipynb
JiqianDong/BDD-100-CV
99d4e20595ae667c79a3a6e78290425cbe60467f
[ "MIT" ]
null
null
null
draft.ipynb
JiqianDong/BDD-100-CV
99d4e20595ae667c79a3a6e78290425cbe60467f
[ "MIT" ]
null
null
null
626.508978
537,344
0.891962
[ [ [ "from imports import *\nimport pickle\n\n# device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\ndevice = torch.device(\"cuda:0\")", "_____no_output_____" ], [ "2048*6*10", "_____no_output_____" ], [ "\ndef get_encoder(model_name):\n if model_name == 'mobile_net':\n md = torchvision.models.mobilenet_v2(pretrained=True)\n encoder = nn.Sequential(*list(md.children())[:-1])\n elif model_name == 'resnet':\n md = torchvision.models.resnet50(pretrained=True)\n encoder = nn.Sequential(*list(md.children())[:-2])\n return encoder\n\nclass DecisionGenerator_no_attention(nn.Module):\n def __init__(self, encoder, encoder_dims, device, action_num=4, explanation_num=21):\n super().__init__()\n\n \"\"\"\n encoder_dims = (F,H,W) \n F:Feature shape (1280 for mobile net, 2048 for resnet)\n H,W = image feature height, width \n \"\"\"\n self.encoder = encoder\n\n assert len(encoder_dims) == 3, \"encoder_dims has to be a triplet with shape (F,H,W)\" \n\n F,H,W = encoder_dims\n ind_dim = H*W*F\n self.action_branch = nn.Sequential(\n nn.Linear(ind_dim,12),\n nn.ReLU(),\n # nn.Dropout(),\n nn.Linear(12,action_num))\n\n self.explanation_branch = nn.Sequential(\n nn.Linear(ind_dim,12),\n nn.ReLU(),\n # nn.Dropout(),\n nn.Linear(12, explanation_num))\n\n self.action_loss_fn, self.reason_loss_fn = self.loss_fn(device)\n\n def loss_fn(self,device):\n class_weights = [1, 1, 2, 2]\n w = torch.FloatTensor(class_weights).to(device)\n action_loss = nn.BCEWithLogitsLoss(pos_weight=w).to(device)\n explanation_loss = nn.BCEWithLogitsLoss().to(device)\n return action_loss,explanation_loss\n\n\n def forward(self,images,targets=None):\n images = torch.stack(images)\n if self.training:\n assert targets is not None\n target_reasons = torch.stack([t['reason'] for t in targets])\n target_actions = torch.stack([t['action'] for t in targets])\n # print(images.shape)\n features = self.encoder(images) # \n # print(features.shape)\n\n B,F,H,W = features.shape\n\n # print(features.view(B,F,H*W).transpose(1,2).shape)\n # print(transformed_feature.shape)\n feature_polled = torch.flatten(features,start_dim=1)\n\n # print(feature_polled.shape)\n\n actions = self.action_branch(feature_polled)\n reasons = self.explanation_branch(feature_polled)\n\n if self.training:\n action_loss = self.action_loss_fn(actions, target_actions)\n reason_loss = self.reason_loss_fn(reasons, target_reasons)\n loss_dic = {\"action_loss\":action_loss, \"reason_loss\":reason_loss}\n return loss_dic\n else:\n return {\"action\":torch.sigmoid(actions),\"reasons\":torch.sigmoid(reasons)}\n\nencoder = get_encoder('resnet')\n\ndg = DecisionGenerator_no_attention(encoder,encoder_dims=(2048,6,10), device='cpu' )\n# params = sum([np.prod(p.size()) for p in model_parameters])\n# print(\"len of params: \",params)", "_____no_output_____" ], [ "def count_parameters(model):\n return sum(p.numel() for p in model.parameters() if p.requires_grad)\n\ncount_parameters(dg)", "_____no_output_____" ], [ "class MHSA2(nn.Module):\n def __init__(self,\n emb_dim,\n kqv_dim,\n output_dim=10,\n num_heads=8):\n super(MHSA2, self).__init__()\n self.emb_dim = emb_dim\n self.kqv_dim = kqv_dim\n self.num_heads = num_heads\n\n self.w_k = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_q = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_v = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_out = nn.Linear(kqv_dim * num_heads, output_dim)\n\n def forward(self, x):\n\n b, t, _ = x.shape\n e = self.kqv_dim\n h = self.num_heads\n keys = self.w_k(x).view(b, t, h, e)\n values = self.w_v(x).view(b, t, h, e)\n queries = self.w_q(x).view(b, t, h, e)\n\n keys = keys.transpose(2, 1)\n queries = queries.transpose(2, 1)\n values = values.transpose(2, 1)\n\n dot = queries @ keys.transpose(3, 2)\n dot = dot / np.sqrt(e)\n dot = nn.functional.softmax(dot, dim=3)\n\n out = dot @ values\n out = out.transpose(1,2).contiguous().view(b, t, h * e)\n out = self.w_out(out)\n return out\n\nclass DecisionGenerator_whole_attention(nn.Module):\n def __init__(self, encoder, encoder_dims, device, num_heads=8, \\\n attention_out_dim=10, action_num=4, explanation_num=21):\n super().__init__()\n\n \"\"\"\n encoder_dims = (F,H,W) \n F:Feature shape (1280 for mobile net, 2048 for resnet)\n H,W = image feature height, width \n \"\"\"\n self.encoder = encoder\n\n assert len(encoder_dims) == 3, \"encoder_dims has to be a triplet with shape (F,H,W)\" \n\n F,H,W = encoder_dims\n\n self.MHSA = MHSA2(emb_dim=F,kqv_dim=10,output_dim=attention_out_dim,num_heads=num_heads)\n\n T = H*W\n self.action_branch = nn.Sequential(\n nn.Linear(attention_out_dim*T,64),\n nn.ReLU(),\n # nn.Dropout(),\n nn.Linear(64,action_num))\n\n self.explanation_branch = nn.Sequential(\n nn.Linear(attention_out_dim*T,64),\n nn.ReLU(),\n # nn.Dropout(),\n nn.Linear(64, explanation_num))\n\n self.action_loss_fn, self.reason_loss_fn = self.loss_fn(device)\n\n def loss_fn(self,device):\n class_weights = [1, 1, 2, 2]\n w = torch.FloatTensor(class_weights).to(device)\n action_loss = nn.BCEWithLogitsLoss(pos_weight=w).to(device)\n explanation_loss = nn.BCEWithLogitsLoss().to(device)\n return action_loss,explanation_loss\n\n\n def forward(self,images,targets=None):\n images = torch.stack(images)\n if self.training:\n assert targets is not None\n target_reasons = torch.stack([t['reason'] for t in targets])\n target_actions = torch.stack([t['action'] for t in targets])\n # print(images.shape)\n features = self.encoder(images) # \n # print(features.shape)\n\n B,F,H,W = features.shape\n\n # print(features.view(B,F,H*W).transpose(1,2).shape)\n \n transformed_feature = self.MHSA(features.view(B,F,H*W).transpose(1,2)) #(B, H, T, 10)\n # print(transformed_feature.shape)\n feature_polled = torch.flatten(transformed_feature,start_dim=1)\n\n # print(feature_polled.shape)\n\n actions = self.action_branch(feature_polled)\n reasons = self.explanation_branch(feature_polled)\n\n if self.training:\n action_loss = self.action_loss_fn(actions, target_actions)\n reason_loss = self.reason_loss_fn(reasons, target_reasons)\n loss_dic = {\"action_loss\":action_loss, \"reason_loss\":reason_loss}\n return loss_dic\n else:\n return {\"action\":torch.sigmoid(actions),\"reasons\":torch.sigmoid(reasons)}\n\ndga = DecisionGenerator_whole_attention(encoder, encoder_dims=(2048,6,10), device='cpu')\ncount_parameters(dga)\n", "_____no_output_____" ], [ "24078915", "_____no_output_____" ], [ "classes = {\n \"bus\": 0,\n \"traffic light\": 1,\n \"traffic sign\": 2,\n \"person\": 3,\n \"bike\": 4,\n \"truck\": 5,\n \"motor\": 6,\n \"car\": 7,\n \"train\": 8,\n \"rider\": 9,\n}\n\nclass_2_name = dict([(value, key) for key, value in classes.items()])\nnum_classes = len(classes)", "_____no_output_____" ] ], [ [ "## 1. Load model", "_____no_output_____" ] ], [ [ "def get_model(num_classes):\n model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)\n in_features = model.roi_heads.box_predictor.cls_score.in_features\n #model.roi_heads.box_predictor = FastRCNNPredictor(in_features, num_classes) # replace the pre-trained head with a new one\n model.roi_heads.box_predictor = torchvision.models.detection.faster_rcnn.FastRCNNPredictor(in_features,num_classes)\n return model.cpu()", "_____no_output_____" ], [ "model = get_model(num_classes)", "_____no_output_____" ], [ "checkpoint = torch.load('saved_models/bdd100k_24.pth')\nmodel.load_state_dict(checkpoint['model'])\n#optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n#epoch = checkpoint['epoch']\nmodel.eval()", "_____no_output_____" ] ], [ [ "## 2. Show sample plot", "_____no_output_____" ] ], [ [ "def get_preds(idx,img_datalist,threshold):\n im0 = Image.open(img_datalist[idx])\n im0_tensor = torchvision.transforms.ToTensor()(im0)\n pred = model([im0_tensor])\n total_preds = []\n for n,confidence in enumerate(pred[0]['scores']):\n if confidence>threshold:\n pred_update = {}\n pred_update['boxes'] = pred[0]['boxes'][n]\n pred_update['labels'] = pred[0]['labels'][n]\n pred_update['scores'] = pred[0]['scores'][n]\n total_preds.append(pred_update)\n return im0,total_preds", "_____no_output_____" ], [ "def plot_from_image_preds(img,total_preds):\n fig,ax = plt.subplots(1,figsize=(20,10))\n for i in range(len(total_preds)):\n xy = total_preds[i]['boxes'][0],total_preds[i]['boxes'][1]\n width = total_preds[i]['boxes'][2]-total_preds[i]['boxes'][0]\n height = total_preds[i]['boxes'][3]-total_preds[i]['boxes'][1]\n rect = patches.Rectangle(xy,width,height,linewidth=1,edgecolor='r',facecolor='none')\n ax.text(xy[0],xy[1],class_2_name[total_preds[i]['labels'].item()])\n ax.add_patch(rect)\n ax.imshow(img)", "_____no_output_____" ], [ "with open(\"datalists/bdd100k_val_images_path.txt\", \"rb\") as fp:\n val_img_paths = pickle.load(fp)", "_____no_output_____" ], [ "im, total_preds = get_preds(751,val_img_paths,0.6)\nplot_from_image_preds(im,total_preds)", "_____no_output_____" ] ], [ [ "## 3. Test", "_____no_output_____" ] ], [ [ "im0 = Image.open(val_img_paths[100])\nim0_tensor = torchvision.transforms.ToTensor()(im0)", "_____no_output_____" ], [ "model.backbone.out_channels", "_____no_output_____" ], [ "images, targets = model.transform([im0_tensor,im0_tensor])", "_____no_output_____" ], [ "images.tensors.shape", "_____no_output_____" ], [ "features = model.backbone(images.tensors)", "_____no_output_____" ], [ "proposals, _ = model.rpn(images, features, targets)", "_____no_output_____" ], [ "box_features1 = model.roi_heads.box_roi_pool(features,proposals,images.image_sizes)\nbox_features2 = model.roi_heads.box_head(box_features1)\n\n\nclass_logits, box_regression = model.roi_heads.box_predictor(box_features2)\n", "_____no_output_____" ] ], [ [ "## Test Multihead attention in pytorch", "_____no_output_____" ] ], [ [ "box_features2 = box_features2.view(2,1000,1024)\nbox_features2.shape", "_____no_output_____" ], [ "class MHSA(nn.Module):\n def __init__(self,\n emb_dim,\n kqv_dim,\n num_heads=1):\n super(MHSA, self).__init__()\n self.emb_dim = emb_dim\n self.kqv_dim = kqv_dim\n self.num_heads = num_heads\n\n self.w_k = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_q = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_v = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_out = nn.Linear(kqv_dim * num_heads, emb_dim)\n\n def forward(self, x):\n\n b, t, _ = x.shape\n e = self.kqv_dim\n h = self.num_heads\n keys = self.w_k(x).view(b, t, h, e)\n values = self.w_v(x).view(b, t, h, e)\n queries = self.w_q(x).view(b, t, h, e)\n\n keys = keys.transpose(2, 1)\n print(\"keys\",keys.shape)\n queries = queries.transpose(2, 1) # b, h, t, e\n print(\"queries\",queries.shape)\n values = values.transpose(2, 1) # b, h, t, e\n print(\"values\",values.shape)\n dot = queries @ keys.transpose(3, 2)\n dot = dot / np.sqrt(e)\n print(\"dot\",dot.shape)\n weights = nn.functional.softmax(dot, dim=3)\n\n print(values.shape)\n out = weights @ values\n print(out.shape)\n out = out.transpose(1,2).contiguous().view(b, t, h * e)\n out = self.w_out(out)\n return out, weights\n\nattention = MHSA(1024,10,num_heads=8)\n\nval, score = attention(box_features2)", "keys torch.Size([2, 8, 1000, 10])\nqueries torch.Size([2, 8, 1000, 10])\nvalues torch.Size([2, 8, 1000, 10])\ndot torch.Size([2, 8, 1000, 1000])\ntorch.Size([2, 8, 1000, 10])\ntorch.Size([2, 8, 1000, 10])\n" ], [ "score.shape", "_____no_output_____" ], [ "attention.parameters", "_____no_output_____" ], [ "model_parameters = filter(lambda p: p.requires_grad, attention.parameters())\nparams = sum([np.prod(p.size()) for p in model_parameters])", "_____no_output_____" ], [ "1024*4*80+1024", "_____no_output_____" ], [ "nn.Linear()", "_____no_output_____" ], [ "nn.MultiheadAttention()", "_____no_output_____" ] ], [ [ "## Test hard attention", "_____no_output_____" ] ], [ [ "box_features2.shape", "_____no_output_____" ], [ "box_features2 = box_features2.view(2,1000,1024)", "_____no_output_____" ], [ "box_features2.shape", "_____no_output_____" ], [ "attention = nn.Sequential(nn.Linear(1024,1),nn.Softmax(dim=1))", "_____no_output_____" ], [ "attention(box_features2)", "_____no_output_____" ], [ "score = attention(box_features2)", "_____no_output_____" ], [ "score.shape", "_____no_output_____" ], [ "box_features2.shape", "_____no_output_____" ], [ "_,ind = torch.topk(score,k=10,dim=1)", "_____no_output_____" ], [ "torch.index_select(box_features2,)", "_____no_output_____" ], [ "ind", "_____no_output_____" ], [ "torch.gather(box_features2,1,ind.expand(ind.size(0),ind.size(1),box_features2.size(2)))", "_____no_output_____" ], [ "box_features2[1,399,:]", "_____no_output_____" ], [ "box_features2[ind,:]", "_____no_output_____" ], [ "(box_features2*attention(box_features2)).shape", "_____no_output_____" ], [ "ind.squeeze(-1).shape", "_____no_output_____" ], [ "proposals[0].shape", "_____no_output_____" ], [ " boxes, scores, labels = model.roi_heads.postprocess_detections(class_logits, box_regression, proposals, images.image_sizes)", "_____no_output_____" ], [ "len(boxes)", "_____no_output_____" ], [ "box_features2_reshaped = box_features2.view(2,1000,1024)\n\n", "_____no_output_____" ], [ "box_features2_reshaped.shape,box_features1.shape", "_____no_output_____" ], [ "detections, detector_losses = model.roi_heads(features, proposals, images.image_sizes)", "_____no_output_____" ], [ "detections[0]['boxes'].shape", "_____no_output_____" ], [ "class MHSA(nn.Module):\n def __init__(self,\n emb_dim,\n kqv_dim,\n num_heads=1):\n super(MHSA, self).__init__()\n self.emb_dim = emb_dim\n self.kqv_dim = kqv_dim\n self.num_heads = num_heads\n\n self.w_k = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_q = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_v = nn.Linear(emb_dim, kqv_dim * num_heads, bias=False)\n self.w_out = nn.Linear(kqv_dim * num_heads, emb_dim)\n\n def forward(self, x):\n\n b, t, _ = x.shape\n e = self.kqv_dim\n h = self.num_heads\n keys = self.w_k(x).view(b, t, h, e)\n values = self.w_v(x).view(b, t, h, e)\n queries = self.w_q(x).view(b, t, h, e)\n\n keys = keys.transpose(2, 1)\n queries = queries.transpose(2, 1)\n values = values.transpose(2, 1)\n\n dot = queries @ keys.transpose(3, 2)\n dot = dot / np.sqrt(e)\n dot = nn.functional.softmax(dot, dim=3)\n\n out = dot @ values\n out = out.transpose(1,2).contiguous().view(b, t, h * e)\n out = self.w_out(out)\n return out\n\nattention = MHSA(1024,10,num_heads=8)", "_____no_output_____" ], [ "attention_result = attention(box_features2_reshaped)\nattention_result.shape", "_____no_output_____" ], [ "torch.max(attention_result,1)[0]", "_____no_output_____" ], [ "class DecisionGenerator(nn.Module):\n def __init__(self,faster_rcnn_model,batch_size=2,action_num=4,explanation_num=21,freeze_rcnn=True):\n super().__init__()\n\n self.rcnn = faster_rcnn_model\n self.batch_size = batch_size\n\n if freeze_rcnn:\n self.rcnn.params.requires_grad = False\n self.object_attention = MHSA(1024, kqv_dim=10, num_heads=8)\n self.action_branch = nn.Linear(1024,action_num)\n self.explanation_branch = nn.Linear(1024, explanation_num)\n\n def forward(images):\n\n images,_ = rcnn.transform(images)\n features = rcnn.backbone(images.tensors)\n proposals, _ = rcnn.rpn(images, features)\n\n box_features = rcnn.roi_heads.box_roi_pool(features,proposals,images.image_sizes)\n box_features = rcnn.roi_heads.box_head(box_features).view(self.batch_size, -1, 1024) #(B, num_proposal, 1024)\n \n box_features = self.object_attention(box_features) #(B, num_proposal, 1024)\n feature_polled,_ = torch.max(box_features,1)\n\n actions = self.action_branch(feature_polled)\n explanations = self.explanation_branch(feature_polled)\n\n return actions,explanations\n", "_____no_output_____" ], [ "class Self_Attn(nn.Module):\n \"\"\" Self attention Layer\"\"\"\n def __init__(self,in_dim,activation):\n super(Self_Attn,self).__init__()\n self.chanel_in = in_dim\n self.activation = activation\n \n self.query_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim//8 , kernel_size= 1)\n self.key_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim//8 , kernel_size= 1)\n self.value_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim , kernel_size= 1)\n self.gamma = nn.Parameter(torch.zeros(1))\n\n self.softmax = nn.Softmax(dim=-1) #\n def forward(self,x):\n \"\"\"\n inputs :\n x : input feature maps( B X C X W X H)\n returns :\n out : self attention value + input feature \n attention: B X N X N (N is Width*Height)\n \"\"\"\n m_batchsize,C,width ,height = x.size()\n proj_query = self.query_conv(x).view(m_batchsize,-1,width*height).permute(0,2,1) # B X CX(N)\n proj_key = self.key_conv(x).view(m_batchsize,-1,width*height) # B X C x (*W*H)\n energy = torch.bmm(proj_query,proj_key) # transpose check\n attention = self.softmax(energy) # BX (N) X (N) \n proj_value = self.value_conv(x).view(m_batchsize,-1,width*height) # B X C X N\n\n out = torch.bmm(proj_value,attention.permute(0,2,1) )\n out = out.view(m_batchsize,C,width,height)\n \n out = self.gamma*out + x\n return out,attention", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a647cc676a87cd96024fb50996f4c0154a48f2
8,445
ipynb
Jupyter Notebook
3_deploy.ipynb
samraj1980/pegasus_text_summary
4379fe98cac6e51fd4f040b91d84914711b2c906
[ "MIT" ]
1
2021-10-31T11:39:11.000Z
2021-10-31T11:39:11.000Z
3_deploy.ipynb
samraj1980/pegasus_text_summary
4379fe98cac6e51fd4f040b91d84914711b2c906
[ "MIT" ]
1
2022-03-30T08:23:43.000Z
2022-03-30T11:13:46.000Z
3_deploy.ipynb
joaopcm1996/demo-sm-hf-summarization
07e85814f5989f2a21168beaca598b1a366d184d
[ "MIT" ]
null
null
null
29.527972
238
0.522321
[ [ [ "# Write custom inference script and requirements to local folder ", "_____no_output_____" ] ], [ [ "! mkdir inference_code", "_____no_output_____" ], [ "%%writefile inference_code/inference.py\n\n# This is the script that will be used in the inference container\nimport os \nimport json \nimport torch\nfrom transformers import AutoModelForSeq2SeqLM, AutoTokenizer\n\ndef model_fn(model_dir):\n \"\"\"\n Load the model and tokenizer for inference \n \"\"\"\n device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n\n tokenizer = AutoTokenizer.from_pretrained(model_dir)\n model = AutoModelForSeq2SeqLM.from_pretrained(model_dir).to(device)\n \n model_dict = {'model':model, 'tokenizer':tokenizer}\n \n return model_dict \n\n\ndef predict_fn(input_data, model):\n \"\"\"\n Make a prediction with the model\n \"\"\"\n text = input_data.pop('inputs')\n parameters = input_data.pop('parameters', None)\n \n tokenizer = model['tokenizer']\n model = model['model']\n\n # Parameters may or may not be passed \n input_ids = tokenizer(text, truncation=True, padding='longest', return_tensors=\"pt\").input_ids\n output = model.generate(input_ids, **parameters) if parameters is not None else model.generate(input_ids)\n \n return tokenizer.batch_decode(output, skip_special_tokens=True)[0]\n\n\ndef input_fn(request_body, request_content_type):\n \"\"\"\n Transform the input request to a dictionary\n \"\"\"\n request = json.loads(request_body)\n\n return request\n\n\ndef output_fn(prediction, response_content_type):\n \"\"\"\n Return model's prediction\n \"\"\"\n return {'generated_text':prediction}", "_____no_output_____" ], [ "%%writefile inference_code/requirements.txt\ntransformers\nsentencepiece\nprotobuf", "_____no_output_____" ] ], [ [ "# Deploy an endpoint with PyTorchModel", "_____no_output_____" ], [ "Once you .deploy(), this will upload your model package to S3, create a model in SageMaker, create an endpoint configuration, and deploy an endpoint from that configuration.", "_____no_output_____" ] ], [ [ "! pip install -U sagemaker", "_____no_output_____" ], [ "import sagemaker\n\nsession = sagemaker.Session()\nsession_bucket = session.default_bucket()\nrole = sagemaker.get_execution_role()\n\npytorch_version = '1.7.1'\npython_version = 'py36'", "_____no_output_____" ], [ "from sagemaker.huggingface import HuggingFaceModel \n\nmodel_name = 'summarization-model'\nendpoint_name = 'summarization-endpoint'\n\nmodel_for_deployment = HuggingFaceModel(entry_point='inference.py',\n source_dir='inference_code',\n model_data=huggingface_estimator.model_data,\n # model_data=f'{session_bucket}/{<insert_model_location_key>}/model.tar.gz', in case you don't run this notebook using the initialized huggingface_estimator from 2_finetune.ipynb\n role=role,\n pytorch_version=pytorch_version,\n py_version=python_version,\n transformers_version='4.6.1',\n name=model_name)", "_____no_output_____" ], [ "from sagemaker.serializers import JSONSerializer\nfrom sagemaker.deserializers import BytesDeserializer\n\n# Deploy the model \npredictor = model_for_deployment.deploy(initial_instance_count=1,\n instance_type='ml.m5.xlarge',\n endpoint_name=endpoint_name\n )", "_____no_output_____" ], [ "text = ('PG&E stated it scheduled the blackouts in response to forecasts for high winds amid dry conditions.'\n ' The aim is to reduce the risk of wildfires.' \n 'Nearly 800 thousand customers were scheduled to be affected by the shutoffs which were expected to last through at least midday tomorrow.'\n)\n\nsummary_short = predictor.predict({\n 'inputs':text,\n 'parameters':{\n 'length_penalty':0.6\n }\n}) \nprint(summary_short)\n\nsummary_long = predictor.predict({\n 'inputs':text,\n 'parameters':{\n 'length_penalty':1.5\n }\n}) \nprint(summary_long)", "_____no_output_____" ] ], [ [ "# (Optional) If you haven't fine-tuned a model, but want to deploy directly from HuggingFace Hub to experiment", "_____no_output_____" ] ], [ [ "# We will pass these as env variables, defining the model and task we want \nhub = {\n 'HF_MODEL_ID':'google/pegasus-xsum',\n 'HF_TASK':'summarization' \n}\n\nhub_model = HuggingFaceModel(env=hub,\n role=role,\n pytorch_version='1.7',\n py_version='py36',\n transformers_version='4.6',\n name='hub-model')", "_____no_output_____" ], [ "hub_predictor = hub_model.deploy(initial_instance_count=1,\n instance_type='ml.m5.xlarge',\n endpoint_name='hub-endpoint')", "_____no_output_____" ], [ "# You can also pass in a 'parameters' key with valid parameters, just like we did before\nsummary = hub_predictor.predict({'inputs':text}) \nprint(summary)", "_____no_output_____" ] ], [ [ "# Clean up", "_____no_output_____" ], [ "Use this code to delete the resources created in SageMaker Inference (endpoint configuration, endpoint and model). ", "_____no_output_____" ] ], [ [ "predictor.delete_endpoint()\npredictor.delete_model()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ] ]
d0a65e894aaca53ae634cd3af80045234748dd80
6,028
ipynb
Jupyter Notebook
04 Pandas Certificate/Exercise Grouping and Sorting.ipynb
mateuszczubkowski/introduction-to-python
e670b38e95cb4e886d3a3bba485f6165c56cbe65
[ "MIT" ]
null
null
null
04 Pandas Certificate/Exercise Grouping and Sorting.ipynb
mateuszczubkowski/introduction-to-python
e670b38e95cb4e886d3a3bba485f6165c56cbe65
[ "MIT" ]
null
null
null
04 Pandas Certificate/Exercise Grouping and Sorting.ipynb
mateuszczubkowski/introduction-to-python
e670b38e95cb4e886d3a3bba485f6165c56cbe65
[ "MIT" ]
null
null
null
6,028
6,028
0.721466
[ [ [ "**[Pandas Home Page](https://www.kaggle.com/learn/pandas)**\n\n---\n", "_____no_output_____" ], [ "# Introduction\n\nIn these exercises we'll apply groupwise analysis to our dataset.\n\nRun the code cell below to load the data before running the exercises.", "_____no_output_____" ] ], [ [ "import pandas as pd\n\nreviews = pd.read_csv(\"../input/wine-reviews/winemag-data-130k-v2.csv\", index_col=0)\n#pd.set_option(\"display.max_rows\", 5)\n\nfrom learntools.core import binder; binder.bind(globals())\nfrom learntools.pandas.grouping_and_sorting import *\nprint(\"Setup complete.\")", "_____no_output_____" ] ], [ [ "# Exercises", "_____no_output_____" ], [ "## 1.\nWho are the most common wine reviewers in the dataset? Create a `Series` whose index is the `taster_twitter_handle` category from the dataset, and whose values count how many reviews each person wrote.", "_____no_output_____" ] ], [ [ "# Your code here\nreviews_written = reviews.groupby('taster_twitter_handle').size()\n\n# Check your answer\nq1.check()", "_____no_output_____" ], [ "#q1.hint()\n#q1.solution()", "_____no_output_____" ] ], [ [ "## 2.\nWhat is the best wine I can buy for a given amount of money? Create a `Series` whose index is wine prices and whose values is the maximum number of points a wine costing that much was given in a review. Sort the values by price, ascending (so that `4.0` dollars is at the top and `3300.0` dollars is at the bottom).", "_____no_output_____" ] ], [ [ "best_rating_per_price = reviews.groupby('price')['points'].max().sort_index()\n\n# Check your answer\nq2.check()", "_____no_output_____" ], [ "#q2.hint()\n#q2.solution()", "_____no_output_____" ] ], [ [ "## 3.\nWhat are the minimum and maximum prices for each `variety` of wine? Create a `DataFrame` whose index is the `variety` category from the dataset and whose values are the `min` and `max` values thereof.", "_____no_output_____" ] ], [ [ "price_extremes = reviews.groupby('variety').price.agg([min, max])\n\n# Check your answer\nq3.check()", "_____no_output_____" ], [ "#q3.hint()\n#q3.solution()", "_____no_output_____" ] ], [ [ "## 4.\nWhat are the most expensive wine varieties? Create a variable `sorted_varieties` containing a copy of the dataframe from the previous question where varieties are sorted in descending order based on minimum price, then on maximum price (to break ties).", "_____no_output_____" ] ], [ [ "sorted_varieties = reviews.groupby('variety').price.agg([min, max]).sort_values(by=['min', 'max'], ascending=False)\n\n# Check your answer\nq4.check()", "_____no_output_____" ], [ "#q4.hint()\n#q4.solution()", "_____no_output_____" ] ], [ [ "## 5.\nCreate a `Series` whose index is reviewers and whose values is the average review score given out by that reviewer. Hint: you will need the `taster_name` and `points` columns.", "_____no_output_____" ] ], [ [ "reviewer_mean_ratings = reviews.groupby('taster_name').points.mean()\n\n# Check your answer\nq5.check()", "_____no_output_____" ], [ "#q5.hint()\n#q5.solution()", "_____no_output_____" ] ], [ [ "Are there significant differences in the average scores assigned by the various reviewers? Run the cell below to use the `describe()` method to see a summary of the range of values.", "_____no_output_____" ] ], [ [ "reviewer_mean_ratings.describe()", "_____no_output_____" ] ], [ [ "## 6.\nWhat combination of countries and varieties are most common? Create a `Series` whose index is a `MultiIndex`of `{country, variety}` pairs. For example, a pinot noir produced in the US should map to `{\"US\", \"Pinot Noir\"}`. Sort the values in the `Series` in descending order based on wine count.", "_____no_output_____" ] ], [ [ "country_variety_counts = reviews.groupby(['country', 'variety']).size().sort_values(ascending=False)\n\n# Check your answer\nq6.check()", "_____no_output_____" ], [ "#q6.hint()\n#q6.solution()", "_____no_output_____" ] ], [ [ "# Keep going\n\nMove on to the [**data types and missing data**](https://www.kaggle.com/residentmario/data-types-and-missing-values).", "_____no_output_____" ], [ "---\n**[Pandas Home Page](https://www.kaggle.com/learn/pandas)**\n\n\n\n\n\n*Have questions or comments? Visit the [Learn Discussion forum](https://www.kaggle.com/learn-forum) to chat with other Learners.*", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ] ]
d0a662f4743ebfc10cde02657db96e7eb7ea3ee2
21,223
ipynb
Jupyter Notebook
miscellaneous_notebooks/Latex.ipynb
dcroce/jupyter-book
9ac4b502af8e8c5c3b96f5ec138602a0d3d8a624
[ "MIT" ]
null
null
null
miscellaneous_notebooks/Latex.ipynb
dcroce/jupyter-book
9ac4b502af8e8c5c3b96f5ec138602a0d3d8a624
[ "MIT" ]
null
null
null
miscellaneous_notebooks/Latex.ipynb
dcroce/jupyter-book
9ac4b502af8e8c5c3b96f5ec138602a0d3d8a624
[ "MIT" ]
null
null
null
28.718539
453
0.48994
[ [ [ "# An Introduction to $\\LaTeX$ (LaTeX)\n\nLatex is a typesetting language used for formatting equations (and much more) in the scientific communities. LaTeX is used very commonly in higher mathematical and computer science classes and academia. In Stat 140, we'll be using LaTeX to \"pretty print\" equations and answers for homeworks and labs", "_____no_output_____" ], [ "## How To Follow This Introduction?\n\nIf you're viewing through Jupyter, then you're all set! Otherwise, go ahead and download this notebook [here](#). This way, you'll be able to follow and modify the examples that we give, as well as test your own. To see the latex behind the math expressions, go ahead and edit that particular cell", "_____no_output_____" ], [ "## How Do We Use LaTeX?\n\nFor the purposes of this introduction, and for Prob 140, we shall use the \"math mode\" in Latex through markdown cells in Jupyter Notebook. Remember that in order to switch a cell to \"markdown mode\", simply change the option from \"code\" $\\to$ \"Markdown\" on the above toolbar.\n\n\nThere are two main ways to enter math mode\n\n $ You're in Inline Math Mode $\n $$ You're in Centered Math Mode $$", "_____no_output_____" ], [ "### Inline Math Mode\n\nWe use inline math when we want to put in equations and/or variables in sentences / the natural flow of text. Here's an example of inline math:\n\n Say you have $x$ cows and $y$ chickens on a farm, and you've counted $100$ legs amongst all the animals. What are $x$ and $y$?\n\nThis is what that'll look like: \n\nSay you have $x$ cows and $y$ chickens on a farm, and you've counted $100$ legs amongst all the animals. What are $x$ and $y$?\n\n*Note*: Notice that text while inside math mode are somewhat italicized; don't use math mode to write all your text, leave it just for variables", "_____no_output_____" ], [ "### Centered Math Mode\n\nWe use centered math mode, when there are particular equations that you'd like to highlight, or have stand out. Here's an example.\n\n $$ 1 + 2 + \\dots + n = 0.5n(n+1)$$\n\nThis is what that'll look like: \n\n $$ 1 + 2 + \\ldots + n = 0.5n(n+1)$$\n\n*Note*: Don't worry about `ldots` yet", "_____no_output_____" ], [ "## Superscripts and Subscripts\n\n### Superscripts \n\nIn order to write superscripts, of the form $x^y$, simply write `$x^y$`\n\n\n`$ 2^3 = 8$`: \n\n$ 2^3 = 8$\n\n`$ (x+y)^2 = x^2 + 2xy + y^2$`:\n\n$ (x+y)^2 = x^2 + 2xy + y^2$\n\n** Warning: ** Generally, the power can only be one character long; to write more, you must wrap it with braces { } . So for example: \n\nIf you forget braces, it'll look like this:\n\n`$2^10 = 1024$`: \n\n$2^10 = 1024$\n\nTo fix, simply add braces\n\n`$2^{10} = 1024$`:\n\n$2^{10} = 1024$\n\n### Subscripts\n\nIn order to write subscripts, of the form $a_1$, simply use the *underscore character* `_` and write `$a_1$`\n\n\n`$ 2^3 = 8$`: \n\n$ 2^3 = 8$\n\n`$ (x+y)^2 = x^2 + 2xy + y^2$`:\n\n$ (x+y)^2 = x^2 + 2xy + y^2$\n\n** Warning: ** Just as with the superscript, the subscript can only be one character long; to write more, you must wrap it with braces { }\n", "_____no_output_____" ], [ "## Expressions\n\nLaTeX provides commands for writing symbols in your mathematical expressions. In the last example you saw the `\\ldots` command; as you saw, this drew the dots like this: $\\ldots$.\n\nHere are some examples of such commands\n\n1. `\\sum` : $\\sum$\n2. `\\prod` : $\\prod$\n3. `\\infty` : $\\infty$\n4. `\\log` : $\\log$\n5. `\\int` : $\\int$\n6. `\\alpha` : $\\alpha$\n7. `\\cup` : $\\cup$\n8. `\\cap` : $\\cap$\n9. `\\ldots` : $\\ldots$\n10. `\\pm` : $\\pm$\n11. `n \\choose k` : $n \\choose k$\n\nWe can use the subscripting and superscripting from the last section with any of these commands as well; it all integrates well. Here's an example from the *Math Prerequisites* worksheet\n", "_____no_output_____" ], [ "**1.** Consider the sequence defined by $c_i = i$ for $i = 1, 2, \\ldots 10$. What is $\\sum_{i=1}^{10} c_i$?\n", "_____no_output_____" ], [ "\nEssentially, all the greek letters have such commands (simply go `\\yourgreekletterhere`): These we use a lot; here are some of the common ones we use\n\n1. `\\alpha` : $\\alpha$\n1. `\\beta` : $\\beta$\n1. `\\lambda` : $\\lambda$\n1. `\\mu` : $\\mu$\n1. `\\sigma` : $\\sigma$\n1. `\\pi` : $\\pi$", "_____no_output_____" ], [ "Some commands can take arguments (just like in Python). The way you pass in arguments into a command is by using the braces `{}` syntax\n\n \\command{Arg 1}{Arg 2}...\n \nThe most common such command you'll use is the `\\frac` command, which creates fractions. `\\frac` takes in two arguments, the first the numerator, and the second the denominator (`\\frac{Numerator}{Denominator}`). Here are some examples\n\n1.\n \\frac{10}{4} = \\frac{5}{2}\n\n$\\frac{10}{4} = \\frac{5}{2}$\n\n2.\n \\frac{x^2}{x} = x\n\n$\\frac{x^2}{x} = x$\n\nAnother common command you'll use is the `\\sqrt` command, which takes in one argument- the operand. Here's an example\n\n1.\n \\sqrt{9} = 3\n\n$\\sqrt{9} = 3$\n", "_____no_output_____" ], [ "### Example Combining Everything Together\n\n**Question:** Consider a polynomial $ax^2 + bx + c$. For what values of $x$ does this polynomial equal $0$?\n\n**Answer**\n \n`$$x = \\frac{-b \\pm \\sqrt{b^2 - 4ac}}{2a}$$`\n\n$$x = \\frac{-b \\pm \\sqrt{b^2 - 4ac}}{2a}$$", "_____no_output_____" ], [ "### Bounds for summations, products, and integrals\n\nFor summations, products, and integrals, we often want to define the bounds or limits. In LaTeX, the syntax for the bounds are the same symbols as subscript and superscript.\n\n \\sum_{lower}^{upper}\n \\prod_{lower}^{upper}\n \\int_{lower}^{upper}\n\nNote that the bounds will appear next to the symbol in inline math mode and around the symbol in centered math mode\n\n \\sum_{i=1}^\\infty a_i\nInline:\n$\\sum_{i=1}^\\infty a_i$\n\nCentered:\n$$\\sum_{i=1}^\\infty a_i$$\n\n \\prod_{i=a}^{b} f(i)\n$$\\prod_{i=1}^{\\infty} f(i)$$\n\n \\int_{-\\infty}^{100}xdx\n$$\\int_{-\\infty}^{100}xdx$$\n\n \\int_{-\\infty}^{\\infty}\\int_{-\\infty}^{\\infty}(x+y)dxdy\n$$\\int_{-\\infty}^{\\infty}\\int_{-\\infty}^{\\infty}(x+y)dxdy$$\n\n \\lim_{x\\to\\infty} f(x)\n$$\\lim_{x\\to\\infty} f(x)$$\n", "_____no_output_____" ], [ "### Align\n\nWhen manipulating long equations, it is often useful to show the steps in between. The ``align*`` environment provides easy vertical alignment of equations.Let's see an example\n\n \\begin{align*}\n 2x + 10 &= -4\\\\\n 2x &= -14\\\\\n x &= -7\n\\end{align*}\n\n\n\\begin{align*}\n 2x + 10 &= -4\\\\\n 2x &= -14\\\\\n x &= -7\n\\end{align*}\n\n\n \\begin{align*}\n f(x) &= \\int_0^1 g(x)dx = \\int_0^1 x^2dx\\\\\n &= \\left[x^3\\right]_0^1\\\\\n &= 1^3 - 0^3\\\\\n &= \\boxed{1}\n \\end{align*}\n$$\n\\begin{align*}\nf(x) &= \\int_0^1 g(x)dx = \\int_0^1 x^2dx\\\\\n&= \\left[x^3\\right]_0^1\\\\\n&= 1^3 - 0^3\\\\\n&= \\boxed{1}\n\\end{align*}\n$$\n\nNote that the block starts with `\\begin{align*}` and ends with `\\end{align*}`. Each line ends with `\\\\` for new line. The character that we want to align is denoted with `&`\n\n \\begin{align*}\n x&=y & w &=z & a&=b+c\\\\\n 2x&=-y & 3w&=\\frac{1}{2}z & a&=b\\\\\n -4 + 5x&=2+y & w+2&=-1+w & ab&=cb\n \\end{align*}\n$$\\begin{align*}\nx&=y & w &=z & a&=b+c\\\\\n2x&=-y & 3w&=\\frac{1}{2}z & a&=b\\\\\n-4 + 5x&=2+y & w+2&=-1+w & ab&=cb\n\\end{align*}$$", "_____no_output_____" ], [ "### Example (3c) \n\nLet $\\{c\\}$ and $\\{d\\}$ be sequences of real numbers such that \n$$\\sum_{i=1}^{100} c_i = 10$$ \n$$\\sum_{i=1}^{100} d_i = 20$$ \n\nWhat is $\\sum_{i=1}^{100} (4c_i - d_i + 5)$?\n", "_____no_output_____" ], [ "### Solution\n\n\n\n\\begin{align*}\n\\sum_{i=1}^{100} (4c_i - d_i + 5) &= \\sum_{i=1}^{100} 4c_i + \\sum_{i=1}^{100} - d_i + \\sum_{i=1}^{100} 5\\\\\n&= 4 \\sum_{i=1}^{100} c_i - \\sum_{i=1}^{100} d_i + \\sum_{i=1}^{100} 5\\\\\n&= 4 (10) - (20) + 5(100)\\\\\n&= 520\n\\end{align*}\n\n \\begin{align*}\n \\sum_{i=1}^{100} (4c_i - d_i + 5) &= \\sum_{i=1}^{100} 4c_i + \\sum_{i=1}^{100} - d_i + \\sum_{i=1}^{100} 5\\\\\n &= 4 \\sum_{i=1}^{100} c_i - \\sum_{i=1}^{100} d_i + \\sum_{i=1}^{100} 5\\\\\n &= 4 (10) - (20) + 5(100)\\\\\n &= 520\n\\end{align*}", "_____no_output_____" ], [ "## Further Resources\n\nYou've made it! These steps outlined above should cover all the LaTeX you need to write solutions. Sometime in your pursuit, you may need some commands or symbols that haven't been listed here. When so, the following resources are here to help\n\n- [DeTexify](http://detexify.kirelabs.org/classify.html): This site lets you draw a symbol, and will immediately find the corresponding Latex command. Similar apps for iPhone and Android also exist\n- [ShareLatex Tutorial](https://www.sharelatex.com/learn/Mathematical_expressions) ShareLaTeX is one of the main online work environments; they have a very simple yet comprehensive tutorial about LaTex. If you'd like to learn more about LaTeX, this is your gateway\n", "_____no_output_____" ], [ "## Exploration\n\nThe best way to learn and absorb LaTeX is to look at examples and code it yourself. Once you have the above guiding principles down, the rest of LaTeX distills into simply finding the symbol that you were trying to find. In that spirit of exploration, we've given you the source for the *Mathematical Prerequisites* worksheet. Try to understand what each of the symbols are doing, and try writing your solutions and explanations in LaTeX below.\n\nDon't worry about the `underline` command in the later questions: we only use it to print the blank spaces", "_____no_output_____" ], [ "#### Question 1: \nConsider the sequence defined by $c_i =i$, for $i=1, 2, \\ldots , 10$.\n\n1. Find $\\sum_{i=1}^{10} c_i$.\n\n`\\sum_{i=1}^{10} c_i`\n2. If possible, find $\\sum_{k=1}^{10} c_k$. If this is not possible,explain why not.\n\n`\\sum_{k=1}^{10} c_k`", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 2\n\nDoes the expression \n$$\n\\sum_{n=1}^{10} 2\n$$\n\n`\\sum_{n=1}^{10} 2`\n\n\nmake sense? If it does, what is its value?", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 3\n\nLet $\\{c\\}$ and $\\{d\\}$ be sequences of real numbers so\nthat \n$$\n\\sum_{i=1}^{100} c_i ~=~ 10$$\n$$\\sum_{j=1}^{100} d_j ~=~ 20 \n$$\nIn parts 1-3 find the value of the expression.\n\n\n1) $\\sum_{i=1}^{100} (4c_i + 5)$\n\n2) $\\sum_{i=1}^{100} 4c_i ~+~ 5$\n\n3) $\\sum_{i=1}^{100} (4c_i - d_i + 5)$\n\n`\\sum_{i=1}^{100} (4c_i - d_i + 5)`\n\n\n4) True or false:\n$$\n\\sum_{i=1}^{100} \\sum_{j=1}^{100} (c_i + d_j) ~~=~~\n\\sum_{i=1}^{100} (c_i + d_i)\n$$\n\n`\\sum_{i=1}^{100} \\sum_{j=1}^{100} (c_i + d_j) = \\sum_{i=1}^{100} (c_i + d_i)`\n\n\nIf you think the identity is true, find the common value of the two\nsides. If the identity is false,\ncan you find the value of either of the sides?\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 4\n\nLet $0 < p < 1$. Find simple expressions for\n\n1. $ \\sum_{i=0}^{100} p^i $\n\n2. $ \\sum_{i=0}^{\\infty} p^i $\n\n3. $ \\sum_{i=100}^{\\infty} p^i $\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 5\n\nThe sum $ \\sum_{n=0}^{\\infty} \\frac{1}{n!} $ can be expressed very\nsimply. Find that simple expression and a numerical value. \n\n`\\sum_{n=0}^{\\infty} \\frac{1}{n!}`\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 6\n\nRepeat the previous exercise for each of the sums\n\n1. $$ \\sum_{i=0}^{\\infty} \\frac{2^i}{i!} $$\n\n` \\sum_{i=0}^{\\infty} \\frac{2^i}{i!} `\n2. $$ \\sum_{i=0}^{\\infty} \\frac{2^{3i}}{i!} $$. \n\n`\\sum_{i=0}^{\\infty} \\frac{2^{3i}}{i!}`\n\n\nIf you had trouble with the previous exercise, this one might help.\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 7\n\nYou know that $e^0 = 1$. What we're going\nto need, quite often, is an approximation to $e^x$ for a small non-zero number $x$.\nA crude approximation is 1 because $x$ is tiny. But you can get a finer approximation\nby writing the first two terms in the expansion for $e^x$ and remembering that Taylor\nsays the rest is small compared to $x$.\n\n\n1. Explain why $e^{0.01}$ is roughly $1.01$ and $e^{-0.01}$ is roughly $0.99$.\n\n2. Use your reasoning in part (a) to explain why $\\log (1+x)$ is roughly $x$ for small $x$.\nIn this class, as in much of math, $\\log$ is taken to the base $e$.\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 8\n\nHow many different ways are there to arrange six people in a row?", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 9\n\nA committee consists of 6 women and 4 men. \nHow many different choices can be made if you want to select\n\n- a Chairperson and an Assistant Chairperson?\n- a subcommittee of two people?\n- a committee of two men and two women?\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 10\n\nLet $a$ and $b$ be any two real numbers.\nYou know that $(a + b)^2 = a^2 + 2ab + b^2$.\n\n1. Analogously, write the following as a sum of four terms: $(a + b)^3$\n\n2. Let $n$ be a non-negative integer. Fill in the blanks:\n$$\n(a + b)^n ~=~ \\sum_{k=\\underline{~~}}^{\\underline{~~}} \\underline{~~~~~} a^k b^{n-k}\n$$\n\n\n`(a + b)^n ~=~ \\sum_{k=\\underline{~~}}^{\\underline{~~}} \\underline{~~~~~} a^k b^{n-k}`", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 11\n\nCalculate the following.\n\n1. $\\frac{d}{dx} \\log (x^2)$\n\n2. $\\frac{d}{dx} xe^{-cx}$ where $c > 0$ is a constant\n\n3. $\\int xe^{-cx} dx$ where $c > 0$ is a constant (use part (b) or methods of integration)\n\n4. $\\int_0^{\\infty} ce^{-cx} dx$ where $c > 0$ is a constant\n\n\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 12\n\nLet $c > 0$ be a constant. $\\int_0^x ce^{-cx} dx$ doesn't make sense. Why not?\n\n`\\int_0^x ce^{-cx} dx`\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 13\n\nCalculate $ \\int_0^1 \\int_0^1 (x+xy+y) dx dy $.\n\n`\\int_0^1 \\int_0^1 (x+xy+y) dx dy `\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ], [ "#### Question 14\n\n Fill in the blanks (it really helps to draw the region of integration):\n\n`\\int_0^1 \\int_y^1 (x+xy+y) dx dy ~=~ \\int_0^1 \\int_{\\underline{~~}}^{\\underline{~~}} (x+xy+y) dy dx`\n\n\n$$\n\\int_0^1 \\int_y^1 (x+xy+y) dx dy ~=~ \\int_0^1 \\int_{\\underline{~~}}^{\\underline{~~}} (x+xy+y) dy dx\n$$\n\n\n", "_____no_output_____" ], [ "#### Solution\n\n*Your Answer Here*", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
d0a673a7912f018143483e216205662f1e8a2037
782,655
ipynb
Jupyter Notebook
Blood_Donation_Analysis.ipynb
rahuldiamond/Blood-Donation-Analysis-
d5cf4c2276032f471d7871590c01abd46a3267f6
[ "Apache-2.0" ]
null
null
null
Blood_Donation_Analysis.ipynb
rahuldiamond/Blood-Donation-Analysis-
d5cf4c2276032f471d7871590c01abd46a3267f6
[ "Apache-2.0" ]
null
null
null
Blood_Donation_Analysis.ipynb
rahuldiamond/Blood-Donation-Analysis-
d5cf4c2276032f471d7871590c01abd46a3267f6
[ "Apache-2.0" ]
null
null
null
277.537234
242,040
0.899841
[ [ [ "<a href=\"https://colab.research.google.com/github/rahuldiamond/Blood-Donation-Analysis-/blob/main/Blood_Donation_Analysis.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "![donate.jpg](data:image/jpeg;base64,/9j/4AAQSkZJRgABAgEASABIAAD/4Q9lRXhpZgAATU0AKgAAAAgABwESAAMAAAABAAEAAAEaAAUAAAABAAAAYgEbAAUAAAABAAAAagEoAAMAAAABAAIAAAExAAIAAAAcAAAAcgEyAAIAAAAUAAAAjodpAAQAAAABAAAApAAAANAACvyAAAAnEAAK/IAAACcQQWRvYmUgUGhvdG9zaG9wIENTMiBXaW5kb3dzADIwMTg6MDI6MDUgMTQ6MDg6NTkAAAAAA6ABAAMAAAAB//8AAKACAAQAAAABAAAC0KADAAQAAAABAAAA+gAAAAAAAAAGAQMAAwAAAAEABgAAARoABQAAAAEAAAEeARsABQAAAAEAAAEmASgAAwAAAAEAAgAAAgEABAAAAAEAAAEuAgIABAAAAAEAAA4vAAAAAAAAAEgAAAABAAAASAAAAAH/2P/gABBKRklGAAECAABIAEgAAP/tAAxBZG9iZV9DTQAC/+4ADkFkb2JlAGSAAAAAAf/bAIQADAgICAkIDAkJDBELCgsRFQ8MDA8VGBMTFRMTGBEMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAENCwsNDg0QDg4QFA4ODhQUDg4ODhQRDAwMDAwREQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwM/8AAEQgAOACgAwEiAAIRAQMRAf/dAAQACv/EAT8AAAEFAQEBAQEBAAAAAAAAAAMAAQIEBQYHCAkKCwEAAQUBAQEBAQEAAAAAAAAAAQACAwQFBgcICQoLEAABBAEDAgQCBQcGCAUDDDMBAAIRAwQhEjEFQVFhEyJxgTIGFJGhsUIjJBVSwWIzNHKC0UMHJZJT8OHxY3M1FqKygyZEk1RkRcKjdDYX0lXiZfKzhMPTdePzRieUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm9jdHV2d3h5ent8fX5/cRAAICAQIEBAMEBQYHBwYFNQEAAhEDITESBEFRYXEiEwUygZEUobFCI8FS0fAzJGLhcoKSQ1MVY3M08SUGFqKygwcmNcLSRJNUoxdkRVU2dGXi8rOEw9N14/NGlKSFtJXE1OT0pbXF1eX1VmZ2hpamtsbW5vYnN0dXZ3eHl6e3x//aAAwDAQACEQMRAD8A9VSSSSUpJJM5zWNLnGGtEkngAJKXWd1nKyMb7D6L9nrZlVVugMscH7me4O+lCBiY1/V6K8/LyL6qrx6mPi0WGkNqfBpN1mOW3W3ur99n6b0Wb/TZX/hH0+q0341/TaDc/IxXZ1Wx1zg6yt7fUd6fqRvvpsr37PV/S1WV/wA5b6v6COUjw2BQNUW5hwQGURMhKceLihXo9MZcURP9KUP/AGG7zPc4udwFJnucXnjhqymjJ6plX0syHY/T8V3oO9CW2W3N2vvJyNu6qimfQ/V/0tlvrfpfTT5eLk9KofnYWRdaygepfi5D3XiytgLrRTZcX305Gz+a22+jv9llX56dxda9I6sQ5cWIGYGWVVCjw+r5YTn+jP8AlN2Elh5GTnZPW2YmDkenRdiMvNoAcGM9R+6ylr2PY7IyGurrr9X9HWz1LPTs/m0d/RbqWephZuS3LYJab7n21PP7l9Fm9npP/O+zspsZ/gkOO7oXX8tEnlxHhE8ggZgSiKka4v8AOfuf891VB79undZzcjL6phYduO44lGXT619rC02MlrCzHp3t+k9z3frHp/4H/SW+pXVzcR/T8S/PxMnIL8Wt1rqr7n3V2NZ+ksre3I9T097GO2WU+n6b/wDMR4tLAsb3/BAwDiEJTEchPAIfNwy4uD9ZL9B0bc1tF2PU/cXZT3VsgCAWsfkO3/u+ypXFz/VOnNs6l00jKyWjJvsMNtIaz9Xvd+hbH6P/AM7VjJqyOn+niYWVbZmdQeK67MpxubU2tr7b8htft+jX+b+ff6HqIcZuVjQGl55eBji4Z+ucOM8QPD6Z5OOXF+7jhjdlJZZ6KC0vq6hmDImRf6xcN3niuH2Lb/wf2ZWOl5lmXjv9drWZOPY6jIaydu9h+mzd7tl1Tq72fyLURI3RFdmKWIcJlCXGImpacJje0v7jcSSSTmJSSSSSn//Q9VSSSSUpAzaDk4d+O07TdW+sO8C5pZP4o6bjVI6piTEgjcGw0ej3izo2LZG1zaWsewggtsYPRuqc1233VXMfWs/rlrBkdKoBHqPza3hvcMYHh1n/AG5ZVX/1xUsnr31RtuORR1KzEus1tdR6rA/T6VtLqn0vs/4T0vVSx+r/AFN3MDs71LBay43WutL3Prn0vUtsa39FXudso/mP+DUPHEx4eKHTXidKPK5Y5Tm9jmNeI8Hsz9Bn3n+7D/n/AOrdnorvTObhvAbdRlWvInUsyHuy6bf6rm2+l/xlNqsdWyG43Tcm5w3ba3BrBy57hsqqb/LtscytiqdUf0R+IOrX5HpVsAazPxnuDtrnBuz1Mbd6tXq/4J/qUqn0vK6B1DK/Vs63qWVSPUb65eRWAdnqVVOZTjts9+31fT9ZO4q9Fiz8uv6P91h9rjvmTDJwxP60DHLg9yPzx935YcX9b+bV07HsxusUYhdL6OlUVOI8WWOY4/8ARXRKpTiUfbTm7T6/pCndJjYHGwDZ9H6blLqWdV07AvzbvoY7C8iYkge1gn857vY1OiOEHsLP0YcsznyRoEzkIx/vZPBwul5eRZ0/pXS8az7O44NeRdfta5wYCyoV0Msmv1Hu+nZZXYyr/R/pFLrPTcaroufdkZORa5uPYQ+694bvLXNqb6NbqcXc+x3tZ6KB9WvsvWPq9hOJdXfhA0C2lxZZW5scWN+k26n0X2VP31f5i2KejYTiXZVlua8ggDJsL2gOBrdsoGyhr9jnN9T0vVUcYmUBsbiKJ2Gn7rczZY4eZlZljOPLL3Iwj+sySjk4v539ycWOcwjP6MSDPr2SfP7Lkf8AkUDrlPq9Q6da6yymkPtx3WVPLCHXNaadzx+Y+2j0f+OspWV03Nw8/rHUugXXW3UY2x2A974srso/R3mjIGy71GPd7Pc/9Eyz9+xbLsXpuJTYepZO+rJiuyzOtBa4CXNpY1/p0M/Of+jYkDxCR0q+/WH/AKKtyQODJiieL3I4+GMYx1nj5njnxx/rwjnn6P8AO41x0X2l323NAH/Du/8AIqz0qnEqrvGNa+8uud9osscXuNrWspe31Hf6NtbKvZ+4snf0CfTPXD9nmfS+3CI/d9bf9q2/+hC28B/T3YzW9PdU7Gr9rPQLSwR+aPT9qdGr0r7eJhz+4MZEjkIJHzQ9qH+F/W/qtlJMDOo47FOpGopJJJJT/9H1VJJJJSkx4KdMeD8ElPiHSMfHy+o4uNk2elRdYG2WAhu1p5O5/satn6zdB6L0vFpu6d1AZVr7Nj6S+t52wXeqPQ+jsc3b7v8ASLD6RhftLPxcAP8AS+0vFfqbd22dd2yWbuP3lvfWL6jW9C6f9v8Attd7A9rHMLPSd7tG+n+kt9T+p/o1nRx3CXoB/rfuvZZubA5rEDzEsZP+QEeKOb+9P9FD0yy4/VHrbXbvQbZi+nzt9Q2fpdv5u/0xT6n/AFpX/wDFrLus5TRz9lJ/8EqVOjr12Z9Uuo9KvZW37KKLqXVVtrBaciquxtjKtle/c9nv2e//AAit/wCLFw/bmUSYAxD/AOfK0+ERx4q1oftk1uZyyPK88JARM53QNivbwxjL/C4X0Zu9p0H4Lg/8YnXy97eiUPkMIszCJjd9KnH/ALP89Z/1n9xd7k5ApxrbmQ91bHPa2eS0F21eU4/1ff1B2Pe+25z84Cy65zQ0+pcaSbfTf+nsprsyvfbTXfTb/wBycX1LPs1jOZGPDH9Lfycn4TDFHKc+U6YvkH+sP6X+Cz+p3Xm9Hzh67owskivK/kOB/QZP9Vm7Zb/wa7v639er6N0p1lZH2zJmvEGkgx7r9fzKG+//AIz0v9IvPsT6u0W2AOynje0WV1ivdYanOZivtfSz137sbKdkVXUen/2k/wAF6nrIbMfK6r01+Rda+wdLpfRjFrZYa8fbc5jvpWNc6u76dno/o6/+1H6T0IYTnCBjW/y+Hd0uZwctzHMwzk6RIGaNH9Z/m/8A15/UaPTc2/BzqcvGdtvpdurcZgn9yyC39Ha3dXZ/XXafXPqmN1T6p4ebjH2WZbA9h+kx4rv9Smz+XW7/ANKLnacDBZisyrqJa/F31nUtdZ6XvueX2NY+qq36dbP8PX/1lD6xjO6Zjv6a2191T7GXO3tc0B9LrsRtzNzK2vry6nb97N/83s9W70/VTIiUYSj+jINjLLFm5jBlFjLhlof34fpwP9xsfVj6rjr7clxy/sv2YsH836m7eH/8JVt27FWs/aX1X60+tlm3Ioj3NnZYxwFjQ5vt31WfnsXT/wCKzVvVJ7Oo/JasH695tGV9ZL30OD66WMqL2kEFzAXWQR+45/p/12JHFEYozGk73RDns2Tn8/L5KyctwD0GMajcYdf0uLi/SfU+n5jM7Ax82sbWZNTLQ2ZjeA7Z/ZVhZP1UofR9XOnV2fS9BjiDyN49Tb/Z3rWV6JJAJ7PLZoxjknGPyxlIR/ug6KSSSRWP/9L1VJfKqSSn6qTHUEL5WSSU+ss/xY/WRoEZGGC3giy0H8MdOf8AFn9ZXRuycR0cbrbnf9VQvJUlV/UeLv8A/Cd/5N9yr/xe5GN0HNxKciu3qWd6IdY8FlTGVWMvNTNrbbfc5vuf+f8Ao/ZWsY/4sPrEebsI/F9v/vOvJkkZezpvtpTFh/0lc64b4/Xx183DHb+rwcL67i/4s+vU5VNzrMItqsY8gOsMhrg+NcfyVX6y/UnqODn229PxnZWDa8uqFLS99QcS70bKW7rdjPzLGb/0a8sSTT7PCavdlx/6Q96PHwVR4v3OH/B/S/dfROnfVHr3UMgU14dlLTq/IvY6qtoPtLpuax9v9Spr10/1m/xfObhYtvRWerdiVNpvp0a60NJd9pZMfrG59m9r3++v+a/m9lniiSUPa4JXf7f8FPMffvvGPg4eGzXDxe3/AFvd4nvGfV3rb3tYzpuUbHEDaaLGwT+9ZYxlTf629dUz/Fz1L/m+KGWUs6lkZFd14sc4MZVWy1jMZtlbLd9m+7fZ/gv+2v0njKSEPa1u9l/N/ffR7fBXEL4eLf8AR4vc/RfWT/iw+sR5vw/+3Lf/AHnWn0X/ABZvryGX9ayK7a6yCMWgOLXR9EW22hn6P/gmVf8AXF4mknx9ix+1r5v9Ke3L5a6+38/0fqpJfKqSsuI/VSS+VUklP//Z/+0UalBob3Rvc2hvcCAzLjAAOEJJTQQEAAAAAAAHHAIAAAIAAgA4QklNBCUAAAAAABBGDPKJJrhW2rCcAaGwp5B3OEJJTQPtAAAAAAAQAEgAAAABAAEASAAAAAEAAThCSU0EJgAAAAAADgAAAAAAAAAAAAA/gAAAOEJJTQQNAAAAAAAEAAAAeDhCSU0EGQAAAAAABAAAAB44QklNA/MAAAAAAAkAAAAAAAAAAAEAOEJJTQQKAAAAAAABAAA4QklNJxAAAAAAAAoAAQAAAAAAAAACOEJJTQP1AAAAAABIAC9mZgABAGxmZgAGAAAAAAABAC9mZgABAKGZmgAGAAAAAAABADIAAAABAFoAAAAGAAAAAAABADUAAAABAC0AAAAGAAAAAAABOEJJTQP4AAAAAABwAAD/////////////////////////////A+gAAAAA/////////////////////////////wPoAAAAAP////////////////////////////8D6AAAAAD/////////////////////////////A+gAADhCSU0EAAAAAAAAAgACOEJJTQQCAAAAAAAGAAAAAAAAOEJJTQQwAAAAAAADAQEBADhCSU0ELQAAAAAAAgAAOEJJTQQIAAAAAAAQAAAAAQAAAkAAAAJAAAAAADhCSU0EHgAAAAAABAAAAAA4QklNBBoAAAAAA08AAAAGAAAAAAAAAAAAAAD6AAAC0AAAAA0ASABvAGwAaQBkAGEAeQAgAEMAaABlAGUAcgAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAC0AAAAPoAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAQAAAAAAAG51bGwAAAACAAAABmJvdW5kc09iamMAAAABAAAAAAAAUmN0MQAAAAQAAAAAVG9wIGxvbmcAAAAAAAAAAExlZnRsb25nAAAAAAAAAABCdG9tbG9uZwAAAPoAAAAAUmdodGxvbmcAAALQAAAABnNsaWNlc1ZsTHMAAAABT2JqYwAAAAEAAAAAAAVzbGljZQAAABIAAAAHc2xpY2VJRGxvbmcAAAAAAAAAB2dyb3VwSURsb25nAAAAAAAAAAZvcmlnaW5lbnVtAAAADEVTbGljZU9yaWdpbgAAAA1hdXRvR2VuZXJhdGVkAAAAAFR5cGVlbnVtAAAACkVTbGljZVR5cGUAAAAASW1nIAAAAAZib3VuZHNPYmpjAAAAAQAAAAAAAFJjdDEAAAAEAAAAAFRvcCBsb25nAAAAAAAAAABMZWZ0bG9uZwAAAAAAAAAAQnRvbWxvbmcAAAD6AAAAAFJnaHRsb25nAAAC0AAAAAN1cmxURVhUAAAAAQAAAAAAAG51bGxURVhUAAAAAQAAAAAAAE1zZ2VURVhUAAAAAQAAAAAABmFsdFRhZ1RFWFQAAAABAAAAAAAOY2VsbFRleHRJc0hUTUxib29sAQAAAAhjZWxsVGV4dFRFWFQAAAABAAAAAAAJaG9yekFsaWduZW51bQAAAA9FU2xpY2VIb3J6QWxpZ24AAAAHZGVmYXVsdAAAAAl2ZXJ0QWxpZ25lbnVtAAAAD0VTbGljZVZlcnRBbGlnbgAAAAdkZWZhdWx0AAAAC2JnQ29sb3JUeXBlZW51bQAAABFFU2xpY2VCR0NvbG9yVHlwZQAAAABOb25lAAAACXRvcE91dHNldGxvbmcAAAAAAAAACmxlZnRPdXRzZXRsb25nAAAAAAAAAAxib3R0b21PdXRzZXRsb25nAAAAAAAAAAtyaWdodE91dHNldGxvbmcAAAAAADhCSU0EKAAAAAAADAAAAAE/8AAAAAAAADhCSU0EEQAAAAAAAQEAOEJJTQQUAAAAAAAEAAAAljhCSU0EDAAAAAAOSwAAAAEAAACgAAAAOAAAAeAAAGkAAAAOLwAYAAH/2P/gABBKRklGAAECAABIAEgAAP/tAAxBZG9iZV9DTQAC/+4ADkFkb2JlAGSAAAAAAf/bAIQADAgICAkIDAkJDBELCgsRFQ8MDA8VGBMTFRMTGBEMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAENCwsNDg0QDg4QFA4ODhQUDg4ODhQRDAwMDAwREQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwM/8AAEQgAOACgAwEiAAIRAQMRAf/dAAQACv/EAT8AAAEFAQEBAQEBAAAAAAAAAAMAAQIEBQYHCAkKCwEAAQUBAQEBAQEAAAAAAAAAAQACAwQFBgcICQoLEAABBAEDAgQCBQcGCAUDDDMBAAIRAwQhEjEFQVFhEyJxgTIGFJGhsUIjJBVSwWIzNHKC0UMHJZJT8OHxY3M1FqKygyZEk1RkRcKjdDYX0lXiZfKzhMPTdePzRieUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm9jdHV2d3h5ent8fX5/cRAAICAQIEBAMEBQYHBwYFNQEAAhEDITESBEFRYXEiEwUygZEUobFCI8FS0fAzJGLhcoKSQ1MVY3M08SUGFqKygwcmNcLSRJNUoxdkRVU2dGXi8rOEw9N14/NGlKSFtJXE1OT0pbXF1eX1VmZ2hpamtsbW5vYnN0dXZ3eHl6e3x//aAAwDAQACEQMRAD8A9VSSSSUpJJM5zWNLnGGtEkngAJKXWd1nKyMb7D6L9nrZlVVugMscH7me4O+lCBiY1/V6K8/LyL6qrx6mPi0WGkNqfBpN1mOW3W3ur99n6b0Wb/TZX/hH0+q0341/TaDc/IxXZ1Wx1zg6yt7fUd6fqRvvpsr37PV/S1WV/wA5b6v6COUjw2BQNUW5hwQGURMhKceLihXo9MZcURP9KUP/AGG7zPc4udwFJnucXnjhqymjJ6plX0syHY/T8V3oO9CW2W3N2vvJyNu6qimfQ/V/0tlvrfpfTT5eLk9KofnYWRdaygepfi5D3XiytgLrRTZcX305Gz+a22+jv9llX56dxda9I6sQ5cWIGYGWVVCjw+r5YTn+jP8AlN2Elh5GTnZPW2YmDkenRdiMvNoAcGM9R+6ylr2PY7IyGurrr9X9HWz1LPTs/m0d/RbqWephZuS3LYJab7n21PP7l9Fm9npP/O+zspsZ/gkOO7oXX8tEnlxHhE8ggZgSiKka4v8AOfuf891VB79undZzcjL6phYduO44lGXT619rC02MlrCzHp3t+k9z3frHp/4H/SW+pXVzcR/T8S/PxMnIL8Wt1rqr7n3V2NZ+ksre3I9T097GO2WU+n6b/wDMR4tLAsb3/BAwDiEJTEchPAIfNwy4uD9ZL9B0bc1tF2PU/cXZT3VsgCAWsfkO3/u+ypXFz/VOnNs6l00jKyWjJvsMNtIaz9Xvd+hbH6P/AM7VjJqyOn+niYWVbZmdQeK67MpxubU2tr7b8htft+jX+b+ff6HqIcZuVjQGl55eBji4Z+ucOM8QPD6Z5OOXF+7jhjdlJZZ6KC0vq6hmDImRf6xcN3niuH2Lb/wf2ZWOl5lmXjv9drWZOPY6jIaydu9h+mzd7tl1Tq72fyLURI3RFdmKWIcJlCXGImpacJje0v7jcSSSTmJSSSSSn//Q9VSSSSUpAzaDk4d+O07TdW+sO8C5pZP4o6bjVI6piTEgjcGw0ej3izo2LZG1zaWsewggtsYPRuqc1233VXMfWs/rlrBkdKoBHqPza3hvcMYHh1n/AG5ZVX/1xUsnr31RtuORR1KzEus1tdR6rA/T6VtLqn0vs/4T0vVSx+r/AFN3MDs71LBay43WutL3Prn0vUtsa39FXudso/mP+DUPHEx4eKHTXidKPK5Y5Tm9jmNeI8Hsz9Bn3n+7D/n/AOrdnorvTObhvAbdRlWvInUsyHuy6bf6rm2+l/xlNqsdWyG43Tcm5w3ba3BrBy57hsqqb/LtscytiqdUf0R+IOrX5HpVsAazPxnuDtrnBuz1Mbd6tXq/4J/qUqn0vK6B1DK/Vs63qWVSPUb65eRWAdnqVVOZTjts9+31fT9ZO4q9Fiz8uv6P91h9rjvmTDJwxP60DHLg9yPzx935YcX9b+bV07HsxusUYhdL6OlUVOI8WWOY4/8ARXRKpTiUfbTm7T6/pCndJjYHGwDZ9H6blLqWdV07AvzbvoY7C8iYkge1gn857vY1OiOEHsLP0YcsznyRoEzkIx/vZPBwul5eRZ0/pXS8az7O44NeRdfta5wYCyoV0Msmv1Hu+nZZXYyr/R/pFLrPTcaroufdkZORa5uPYQ+694bvLXNqb6NbqcXc+x3tZ6KB9WvsvWPq9hOJdXfhA0C2lxZZW5scWN+k26n0X2VP31f5i2KejYTiXZVlua8ggDJsL2gOBrdsoGyhr9jnN9T0vVUcYmUBsbiKJ2Gn7rczZY4eZlZljOPLL3Iwj+sySjk4v539ycWOcwjP6MSDPr2SfP7Lkf8AkUDrlPq9Q6da6yymkPtx3WVPLCHXNaadzx+Y+2j0f+OspWV03Nw8/rHUugXXW3UY2x2A974srso/R3mjIGy71GPd7Pc/9Eyz9+xbLsXpuJTYepZO+rJiuyzOtBa4CXNpY1/p0M/Of+jYkDxCR0q+/WH/AKKtyQODJiieL3I4+GMYx1nj5njnxx/rwjnn6P8AO41x0X2l323NAH/Du/8AIqz0qnEqrvGNa+8uud9osscXuNrWspe31Hf6NtbKvZ+4snf0CfTPXD9nmfS+3CI/d9bf9q2/+hC28B/T3YzW9PdU7Gr9rPQLSwR+aPT9qdGr0r7eJhz+4MZEjkIJHzQ9qH+F/W/qtlJMDOo47FOpGopJJJJT/9H1VJJJJSkx4KdMeD8ElPiHSMfHy+o4uNk2elRdYG2WAhu1p5O5/satn6zdB6L0vFpu6d1AZVr7Nj6S+t52wXeqPQ+jsc3b7v8ASLD6RhftLPxcAP8AS+0vFfqbd22dd2yWbuP3lvfWL6jW9C6f9v8Attd7A9rHMLPSd7tG+n+kt9T+p/o1nRx3CXoB/rfuvZZubA5rEDzEsZP+QEeKOb+9P9FD0yy4/VHrbXbvQbZi+nzt9Q2fpdv5u/0xT6n/AFpX/wDFrLus5TRz9lJ/8EqVOjr12Z9Uuo9KvZW37KKLqXVVtrBaciquxtjKtle/c9nv2e//AAit/wCLFw/bmUSYAxD/AOfK0+ERx4q1oftk1uZyyPK88JARM53QNivbwxjL/C4X0Zu9p0H4Lg/8YnXy97eiUPkMIszCJjd9KnH/ALP89Z/1n9xd7k5ApxrbmQ91bHPa2eS0F21eU4/1ff1B2Pe+25z84Cy65zQ0+pcaSbfTf+nsprsyvfbTXfTb/wBycX1LPs1jOZGPDH9Lfycn4TDFHKc+U6YvkH+sP6X+Cz+p3Xm9Hzh67owskivK/kOB/QZP9Vm7Zb/wa7v639er6N0p1lZH2zJmvEGkgx7r9fzKG+//AIz0v9IvPsT6u0W2AOynje0WV1ivdYanOZivtfSz137sbKdkVXUen/2k/wAF6nrIbMfK6r01+Rda+wdLpfRjFrZYa8fbc5jvpWNc6u76dno/o6/+1H6T0IYTnCBjW/y+Hd0uZwctzHMwzk6RIGaNH9Z/m/8A15/UaPTc2/BzqcvGdtvpdurcZgn9yyC39Ha3dXZ/XXafXPqmN1T6p4ebjH2WZbA9h+kx4rv9Smz+XW7/ANKLnacDBZisyrqJa/F31nUtdZ6XvueX2NY+qq36dbP8PX/1lD6xjO6Zjv6a2191T7GXO3tc0B9LrsRtzNzK2vry6nb97N/83s9W70/VTIiUYSj+jINjLLFm5jBlFjLhlof34fpwP9xsfVj6rjr7clxy/sv2YsH836m7eH/8JVt27FWs/aX1X60+tlm3Ioj3NnZYxwFjQ5vt31WfnsXT/wCKzVvVJ7Oo/JasH695tGV9ZL30OD66WMqL2kEFzAXWQR+45/p/12JHFEYozGk73RDns2Tn8/L5KyctwD0GMajcYdf0uLi/SfU+n5jM7Ax82sbWZNTLQ2ZjeA7Z/ZVhZP1UofR9XOnV2fS9BjiDyN49Tb/Z3rWV6JJAJ7PLZoxjknGPyxlIR/ug6KSSSRWP/9L1VJfKqSSn6qTHUEL5WSSU+ss/xY/WRoEZGGC3giy0H8MdOf8AFn9ZXRuycR0cbrbnf9VQvJUlV/UeLv8A/Cd/5N9yr/xe5GN0HNxKciu3qWd6IdY8FlTGVWMvNTNrbbfc5vuf+f8Ao/ZWsY/4sPrEebsI/F9v/vOvJkkZezpvtpTFh/0lc64b4/Xx183DHb+rwcL67i/4s+vU5VNzrMItqsY8gOsMhrg+NcfyVX6y/UnqODn229PxnZWDa8uqFLS99QcS70bKW7rdjPzLGb/0a8sSTT7PCavdlx/6Q96PHwVR4v3OH/B/S/dfROnfVHr3UMgU14dlLTq/IvY6qtoPtLpuax9v9Spr10/1m/xfObhYtvRWerdiVNpvp0a60NJd9pZMfrG59m9r3++v+a/m9lniiSUPa4JXf7f8FPMffvvGPg4eGzXDxe3/AFvd4nvGfV3rb3tYzpuUbHEDaaLGwT+9ZYxlTf629dUz/Fz1L/m+KGWUs6lkZFd14sc4MZVWy1jMZtlbLd9m+7fZ/gv+2v0njKSEPa1u9l/N/ffR7fBXEL4eLf8AR4vc/RfWT/iw+sR5vw/+3Lf/AHnWn0X/ABZvryGX9ayK7a6yCMWgOLXR9EW22hn6P/gmVf8AXF4mknx9ix+1r5v9Ke3L5a6+38/0fqpJfKqSsuI/VSS+VUklP//ZADhCSU0EIQAAAAAAVQAAAAEBAAAADwBBAGQAbwBiAGUAIABQAGgAbwB0AG8AcwBoAG8AcAAAABMAQQBkAG8AYgBlACAAUABoAG8AdABvAHMAaABvAHAAIABDAFMAMgAAAAEAOEJJTQQGAAAAAAAHAAYBAQADAQD/4UAUaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/Pgo8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSIzLjEuMS0xMTEiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp4YXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iPgogICAgICAgICA8eGFwOkNyZWF0b3JUb29sPkFkb2JlIFBob3Rvc2hvcCBDUzIgV2luZG93czwveGFwOkNyZWF0b3JUb29sPgogICAgICAgICA8eGFwOkNyZWF0ZURhdGU+MjAxOC0wMi0wNVQxNDowODo1OS0wODowMDwveGFwOkNyZWF0ZURhdGU+CiAgICAgICAgIDx4YXA6TW9kaWZ5RGF0ZT4yMDE4LTAyLTA1VDE0OjA4OjU5LTA4OjAwPC94YXA6TW9kaWZ5RGF0ZT4KICAgICAgICAgPHhhcDpNZXRhZGF0YURhdGU+MjAxOC0wMi0wNVQxNDowODo1OS0wODowMDwveGFwOk1ldGFkYXRhRGF0ZT4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgICAgIDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiCiAgICAgICAgICAgIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyI+CiAgICAgICAgIDxkYzpmb3JtYXQ+aW1hZ2UvanBlZzwvZGM6Zm9ybWF0PgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6eGFwTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iCiAgICAgICAgICAgIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiCiAgICAgICAgICAgIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIj4KICAgICAgICAgPHhhcE1NOkhpc3Rvcnk+CiAgICAgICAgICAgIDxyZGY6U2VxPgogICAgICAgICAgICAgICA8cmRmOmxpIHJkZjpwYXJzZVR5cGU9IlJlc291cmNlIj4KICAgICAgICAgICAgICAgICAgPHN0RXZ0OmFjdGlvbj5jb252ZXJ0ZWQ8L3N0RXZ0OmFjdGlvbj4KICAgICAgICAgICAgICAgICAgPHN0RXZ0OnBhcmFtZXRlcnM+ZnJvbSBpbWFnZS9wbmcgdG8gYXBwbGljYXRpb24vdm5kLmFkb2JlLnBob3Rvc2hvcDwvc3RFdnQ6cGFyYW1ldGVycz4KICAgICAgICAgICAgICAgPC9yZGY6bGk+CiAgICAgICAgICAgICAgIDxyZGY6bGkgcmRmOnBhcnNlVHlwZT0iUmVzb3VyY2UiPgogICAgICAgICAgICAgICAgICA8c3RFdnQ6YWN0aW9uPnNhdmVkPC9zdEV2dDphY3Rpb24+CiAgICAgICAgICAgICAgICAgIDxzdEV2dDppbnN0YW5jZUlEPnhtcC5paWQ6RDcxMTlGMDRCODc1RTAxMTk1QUZCODkxOTFFNUIxODk8L3N0RXZ0Omluc3RhbmNlSUQ+CiAgICAgICAgICAgICAgICAgIDxzdEV2dDp3aGVuPjIwMTEtMDUtMDNUMTI6MDM6MjMtMDc6MDA8L3N0RXZ0OndoZW4+CiAgICAgICAgICAgICAgICAgIDxzdEV2dDpjaGFuZ2VkPi87L21ldGFkYXRhPC9zdEV2dDpjaGFuZ2VkPgogICAgICAgICAgICAgICA8L3JkZjpsaT4KICAgICAgICAgICAgICAgPHJkZjpsaSByZGY6cGFyc2VUeXBlPSJSZXNvdXJjZSI+CiAgICAgICAgICAgICAgICAgIDxzdEV2dDphY3Rpb24+Y29udmVydGVkPC9zdEV2dDphY3Rpb24+CiAgICAgICAgICAgICAgICAgIDxzdEV2dDpwYXJhbWV0ZXJzPmZyb20gaW1hZ2UvcG5nIHRvIGFwcGxpY2F0aW9uL3ZuZC5hZG9iZS5waG90b3Nob3A8L3N0RXZ0OnBhcmFtZXRlcnM+CiAgICAgICAgICAgICAgIDwvcmRmOmxpPgogICAgICAgICAgICAgICA8cmRmOmxpIHJkZjpwYXJzZVR5cGU9IlJlc291cmNlIj4KICAgICAgICAgICAgICAgICAgPHN0RXZ0OmFjdGlvbj5zYXZlZDwvc3RFdnQ6YWN0aW9uPgogICAgICAgICAgICAgICAgICA8c3RFdnQ6aW5zdGFuY2VJRD54bXAuaWlkOkQ4MTE5RjA0Qjg3NUUwMTE5NUFGQjg5MTkxRTVCMTg5PC9zdEV2dDppbnN0YW5jZUlEPgogICAgICAgICAgICAgICAgICA8c3RFdnQ6d2hlbj4yMDExLTA1LTAzVDEyOjAzOjIzLTA3OjAwPC9zdEV2dDp3aGVuPgogICAgICAgICAgICAgICAgICA8c3RFdnQ6Y2hhbmdlZD4vbWV0YWRhdGE8L3N0RXZ0OmNoYW5nZWQ+CiAgICAgICAgICAgICAgIDwvcmRmOmxpPgogICAgICAgICAgICA8L3JkZjpTZXE+CiAgICAgICAgIDwveGFwTU06SGlzdG9yeT4KICAgICAgICAgPHhhcE1NOkRlcml2ZWRGcm9tIHJkZjpwYXJzZVR5cGU9IlJlc291cmNlIj4KICAgICAgICAgICAgPHN0UmVmOmluc3RhbmNlSUQ+dXVpZDo0OTAwNDRDMUYzRkFFNzExODM0NUY5ODM1RkY1MTczOTwvc3RSZWY6aW5zdGFuY2VJRD4KICAgICAgICAgICAgPHN0UmVmOmRvY3VtZW50SUQ+dXVpZDoyQUQ5OTYwMEYzRkFFNzExODM0NUY5ODM1RkY1MTczOTwvc3RSZWY6ZG9jdW1lbnRJRD4KICAgICAgICAgPC94YXBNTTpEZXJpdmVkRnJvbT4KICAgICAgICAgPHhhcE1NOkRvY3VtZW50SUQ+dXVpZDowQ0ZEQ0UxMUFDMEFFODExQTFBN0VDQzcxODQ2RjdFNzwveGFwTU06RG9jdW1lbnRJRD4KICAgICAgICAgPHhhcE1NOkluc3RhbmNlSUQ+dXVpZDowREZEQ0UxMUFDMEFFODExQTFBN0VDQzcxODQ2RjdFNzwveGFwTU06SW5zdGFuY2VJRD4KICAgICAgICAgPHhhcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD54bXAuZGlkOkQ3MTE5RjA0Qjg3NUUwMTE5NUFGQjg5MTkxRTVCMTg5PC94YXBNTTpPcmlnaW5hbERvY3VtZW50SUQ+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgICAgIDx0aWZmOlhSZXNvbHV0aW9uPjcyMDAwMC8xMDAwMDwvdGlmZjpYUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6WVJlc29sdXRpb24+NzIwMDAwLzEwMDAwPC90aWZmOllSZXNvbHV0aW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICAgICA8dGlmZjpOYXRpdmVEaWdlc3Q+MjU2LDI1NywyNTgsMjU5LDI2MiwyNzQsMjc3LDI4NCw1MzAsNTMxLDI4MiwyODMsMjk2LDMwMSwzMTgsMzE5LDUyOSw1MzIsMzA2LDI3MCwyNzEsMjcyLDMwNSwzMTUsMzM0MzI7MDVFNEVFMEM0NUVFMkYxQ0U1NzgyOEU2Q0NFOEQyNzE8L3RpZmY6TmF0aXZlRGlnZXN0PgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+NzIwPC9leGlmOlBpeGVsWERpbWVuc2lvbj4KICAgICAgICAgPGV4aWY6UGl4ZWxZRGltZW5zaW9uPjI1MDwvZXhpZjpQaXhlbFlEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOkNvbG9yU3BhY2U+LTE8L2V4aWY6Q29sb3JTcGFjZT4KICAgICAgICAgPGV4aWY6TmF0aXZlRGlnZXN0PjM2ODY0LDQwOTYwLDQwOTYxLDM3MTIxLDM3MTIyLDQwOTYyLDQwOTYzLDM3NTEwLDQwOTY0LDM2ODY3LDM2ODY4LDMzNDM0LDMzNDM3LDM0ODUwLDM0ODUyLDM0ODU1LDM0ODU2LDM3Mzc3LDM3Mzc4LDM3Mzc5LDM3MzgwLDM3MzgxLDM3MzgyLDM3MzgzLDM3Mzg0LDM3Mzg1LDM3Mzg2LDM3Mzk2LDQxNDgzLDQxNDg0LDQxNDg2LDQxNDg3LDQxNDg4LDQxNDkyLDQxNDkzLDQxNDk1LDQxNzI4LDQxNzI5LDQxNzMwLDQxOTg1LDQxOTg2LDQxOTg3LDQxOTg4LDQxOTg5LDQxOTkwLDQxOTkxLDQxOTkyLDQxOTkzLDQxOTk0LDQxOTk1LDQxOTk2LDQyMDE2LDAsMiw0LDUsNiw3LDgsOSwxMCwxMSwxMiwxMywxNCwxNSwxNiwxNywxOCwyMCwyMiwyMywyNCwyNSwyNiwyNywyOCwzMDtDMUZGQkQ2RUZENDBGQ0Y4MTVFNDdFNERFNDAwMDc1RjwvZXhpZjpOYXRpdmVEaWdlc3Q+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpwaG90b3Nob3A9Imh0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9zaG9wLzEuMC8iPgogICAgICAgICA8cGhvdG9zaG9wOkNvbG9yTW9kZT4zPC9waG90b3Nob3A6Q29sb3JNb2RlPgogICAgICAgICA8cGhvdG9zaG9wOkhpc3RvcnkvPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAKPD94cGFja2V0IGVuZD0idyI/Pv/uACZBZG9iZQBkQAAAAAEDABUEAwYKDQAAAAAAAAAAAAAAAAAAAAD/2wCEAAICAgICAgICAgIDAgICAwQDAgIDBAUEBAQEBAUGBQUFBQUFBgYHBwgHBwYJCQoKCQkMDAwMDAwMDAwMDAwMDAwBAwMDBQQFCQYGCQ0KCQoNDw4ODg4PDwwMDAwMDw8MDAwMDAwPDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/CABEIAPoC0AMBEQACEQEDEQH/xAEXAAEAAQQDAQAAAAAAAAAAAAAACAQFBgcCAwkBAQEAAQUBAQAAAAAAAAAAAAAABQIDBAYHAQgQAAAFAgMGBAYDAQEBAQAAAAECAwQFAAYQIBFQMRIyEwczFDQVMCEjNTYXQCIWYCQlQREAAQIDAgYKDQkHBAMBAQAAAQIDABEEEgUQITFBUbEgYXGBkSIyE3MUUMHRQlJystIjkzR0BjChYpIzQyQVNeGCwlNjRCVAouKzg6NUYHUSAAECAgcGBAUFAAIDAAAAAAEAAiARECExQXESAzBQUSIyE2GRobFAYPCBI8HR4UIE8WJSchQTAQACAQIEBQQDAQEAAAAAAAEAESExQRBRYXEg8IGRoVCxwdEw4fFAYP/aAAwDAQMCEQMRAAABn8AAAAAAAAAAAAAcS1HA7y5AtJ2lyAAAAAAAAAOJ1HacgAAAAAAAAAAAAAAAAAAAAAAAAAAAARDgesywnOV9NVqnBdzmURRF1O0AAAAAAAAFGUIBVFefQAAAAAAAAAAAAAAAAAAAAAAAAAAAQ31/r0sp7k/XVZFWV5wLSVRXgAAAAAAAHAoynOALfRkVHtu8VW6oAAAAAAAAAAAAAAAAAAAAAAAAAAAEN9f69Kae5NUVWhdzmW46S7H0AAAAAAAHwtx0nEAjxDdK3DJaRl9+MuIAAAAAAAAAAAAAAAAAAAAAAAAAAAIaa/12U89yeqqtVRcCnLe8uL3vAABxKQqXnY9AAAFE8pjregVJHKG6Vt6S0jLr8ZcgAAAAAAAAAAAAAAAAAAAAAAAAanwdp0jG7pcK7Hd7RW1WJFS/OojwXVpWz3J6uq1djWGFs2jY7cqz232lP5ckPLc/zTIhxg+NNWa3mXq9Fx6iugdw8uUdN2S8xzjK78WPhH+K3zBMabe0db263MSRsxzfE8eW0lF77rPD2XMcmDu9eL2qs3yYXf8AKaHhWNMx4iugVPtocHskJfnmTXo0AAAAAAAAAAAAAAAAAAAaAi97x6zIScmea9nvgsdvMirC9Mt1GRLOf5JdKrWj4rfMaoy5HS/Obt759KKm7EeB6xvyU0TZOZrursLZtSYO1W7yuTs1zC91W+0tVvJhVrvZZo7Fxy83MSLMJ07YWXAbYkNQozia3wtk07H7lLGd5TcyMEN0rcMno+dXozsejXmJPx7it/l5Pcmq6rQt9F+IMD1qVk3y7Ir0eAAAAAAAAAAAAAAAAAAKCi/DuA65NPYuOAC3mLWZDz/1P6C9Ftw+cqmm7EWC6tLzYeP3U5gHT5VCnXeyzb2PjGr8LZooQfU58bRwfv8AfPoBrPD2PXOJsMkpfnehYved6ymjW86gCHuu9jmrsXG+57FqG6TvCS0nNr8YBCbXOzTR2LjlR7QALPby4xQ3SpazvKgAAAAAAAAAAAAAAAAABpeO3Hre7skdNA6i1Ag1q/dZx7Nw/U0duVXdwdx52rd4ABFyF6bviT0bGbMlrfD2KTMzzcAC2UZMToPqcxJ/kYoijPgBETXuwTS2DkFL75GSG6TvyS0rLL8WLNby4xQ3SpXTvKqEqisAIb6/16Y+wch+gAAAAAAAAAAAAAAAAAEaYfo24pDUc0yIcC2lOCImv9fmBPckjLDdLkTLc9ya7hAADScduVfVYq6rWC48xvmV0GuO0AoKL8SYLq0xdg5F0lveW+m/g+LOYHiz9ot5mnI7c/Q/auAWv3yOkN0rfElpOWX4sa6xNg0TG7ptmQ08DsKl72vY5xPQpp7DxupqtgAAAAAAAAAAAAAAAAARph+jbikNRzTIhxwLQARD17r8s5/ksb4bpMlZbn9+u4IAA0nHbncKsfrqsYbjzO+JXQqkuIBQUX4kwXVpi7ByLArErHKH6JdbmLsfM1vOMqCv93AiVr3YJgbByHl75GuG6VvKS0nLL8WNZYk/oSM3nbWfqQAFQVjza+dq3P3wAAAAAAAAAAAAAAAAADTEduHT5Xu2S0wUhQAEGNX7rN7ZuH6rjtxvdzE25n6oAAItQnTt6SmiYdZkcPx5nfEroV1O0AoKL8SYLq0s57k0Jdd7PMjYuM1VVoCsIha92CZOwcg5e+RhhulbyktJyy/F0Zj1GTGuF6ZKed5UB3nw6y7n0AAAAAAAAAAAAAAAAAAFDTehxr/Xpq7FxsW4pgY7ZkYF6r330W27527vLkQoDrM0di46AB0+Vwo13skxdj4xrXC2TD8eZkFK6FcAAUFF+JMF1bfUvz3UuBtki5jnAAuxAHVu9egG0cG5e+RmhukbpkdNyvIibUfXkOtd7LLzYOP1FVuuKwoCkNVR25bvkdNAAAAAAAAAAAAAAAAAAA0LF7zidiVlBNcztvtPEstrNjZDdL4PZcTnK7lVa0JF71jFmSk7M817faRb6L8RoLrG9ZXn+dZcBrfC2TD8eZkbK6F2gAoKL8Q4Pqso5zlUPNe7HMvY+Md3tA+U3Y7Q/SNV4O0To2XiNwrsatwtm17iT8h5jnVG8xqzJ0FGRHSH6PLie5NkFePwLP57HWI6Nl13A3vJ6OAAAAAAAAAAAAAAAAAAANXYWzaLjd1uNzC5++d3tEhpfnWmo7dJAyOlVNVsayw9k0dGbtdLmLx89o6b0g5bn17yIkYfjzFksyG5M7V/oALZRkw+guqy62DkOEYs5oSK3+8XMLl75zpuyEldBoKb8cYjodN5XM7YOPxoh+j43bzfvtOf5Wv7kktMx2xJx8iehXCq0U2mjJ3rJaVtDM10AAAAAAAAAAAAAAAAAAAAUhQAA7i6AAAHwtR1gFUXAAA09gbVD6C6t6E7Z8/fHoPO97cwDq8q8w9P+j/QfaOEZvkwtpPgBcyiOkHMux9AAOBzAAAAAAAAAAAAAAAAKam5j1rPFGUwB998u1VjJq8Xl74BjVmR6vKhSF6vRv33wCtK0AHSQb17sOgYrfPRfbfnvN8mGArCtPoNPYO1ee+r9zlTM81llO8sowCsK4pCgBcSpAAOo6SrAAAAAAAAAAAAAAAANaYexecep/QYAAHJ5e7uFvWT0iXM5yu+XMPy+036UxmzIjurtem24/NNxuYoFeVYBxMOt5nmpqP0NR03pBymhzN2DkY7TrPpUneQL1vtOkY/b71cw/Snbvnmvqs8T68ilA9axfHlhw98HPz2pqtZXfi93yOl7KzNd6ikLiAAAAAAAAAAAAAAAADWeJsPnDqnffoAAAKq5Z2RlQHons/B/MDUPo3GbMgB6fbn81XGvGAuBVA4FvIgQnT4yxHROTzt9p9GNp4LmeRE3kHSdBrjFm/OrV+78XvN5K2Z5tKKZ5sOTzzw1P6D19iT4AAH33zeUjpczti41ktdjkAAAAAAAAAAAAAAAADWeJsHm/q3d/oMuvxef5UGMZsyOuMSfArblr0U2XhcA9c7TjVmQHw9Qtw+crhcxQORVnUU5idmS84dW7vTU3PpdbnmxMnWp87Jxi8n0HE88db7TqLC2QVVXlfVb9ENm4VlV+M5PPO7Ve+6+xZ4D54H0ehX3LG483U577Hxj6AAAAAAAAAAAAAAAADWWJPeb+sdy+gkdK6FMSd5YB50at3fBMaZA9Adl4lCDX+vY7azxxe+p+3fPHfcxeIAB3EDNe7DpjB2gC+3PbbQmFN8rlDJ6SBH6O2+CMD1r6DkZBdqzW/rc+ti4z2+0+dmrd419jTgqPbfpptXA+32nU+JsMLYHq9ooyvrytro9Cdj4jtXM14AAAAAAAAAAAAAAAAaxxZ3ze1ntv0Eh5PSJdzXMxitqRgDrfZ7LbyxW1WvTLauCecesdyxy1nDi99Vts+fbxcxqIpzidpVmhY/boJQPVvrwCrqqvFV22+Yno/sfFs/yImw28rzM1nt1ht5YAutdzuVSVleaynluf8AnVrHctfY02Kj236m7b89cqqOBGaJ3+IsL034c3kpZbn8xprmgAAAAAAAAAAAAAAAA1hiznm7rfaOTwcXvwH05PPhfLmLNWb5fvzP1by21bu2OWs0cHvqxtfBL1XjAAY/by/MjWu12ajJAHwrPa+1Vlt6F9Kdk4xBiD6hoDA2wAAXH25xU+guw8bhfB9P15jzI7/aPUjbOAVvtFUazxJ7zk1zs/0G6M3WfQfYeQAAAAAAAAAAAAAAAADWGNN+bmudj+gAAFfVZ33IalLGU0XNr0d5Y6x3HHLWaOL31Z2nhN6rxgAIbRHQorRe8gAAVa5TqJPSekRfjN1+PRmF6Pq/bfcp2Vkw+icHZ+inM2dl63areXrrHmB3e0er208HrKrY1/jy3mZrfaeYNrZUD6NbFx4AAAAAAAAAAAAAAAAavxprzb17r3IGZ3Y/ZOTDi2UXtOYmwdKoXaqx6UbFx7zk1/ruOW8scPPfVvaOHXqvHAA8xdc7Js3Jh7Dbyav2irqt43bzMAx5Ta2VCV1VrX2PK4FYk915uu3irHj1gbXsrJh/njKbuFt/LgdQ4k9c6rOlsTYr9Vj4LYkx3+0+sG0cLrKqBqjFnPOPXuugbrzdc9CJ7lIAAAAAAAAAAAAAAAA1fjTHm1r/AFv6CTMjp825jngEPore4mRe8AStk9JjrgbXjlvLHB76u7NxO9V44A+HlDrPbJBZ2r4zbzPrzbmVB6wx5jseZPcw8RtZ3R5VorC2XdWZr238qBh1Fb9sW/E9/tOQ14t3rsZlej8NtZ+BWJPFLOfj9GSO72n1i2biNZVQInxe7w/it+61IlbJ6VMeW0EAAAAAAAAAAAAAAAAavxpjzYgercgSSz9UmhLaCLdTdhJD9J0XiTnF4JMyGpaIw9jx23lDi9nPMc6uFVoACIMXvO0siHtdN6+V412qs6MxNi2XficEsyWc3o7BbMjr2xKSHztWvVePoXC2UbXyYXD7WdcKrXf7TeqsfUONO3mrHwSzJDt9p9L5/kvd7TqrGmYexHSce8o4vPh6Rz3KNp5MOAAAAAAAAAAAAAAAANXY8v5sQPU+QPhxejkqqFXQtATel9AiHGbrjtvKAAAHW9kbnaxHbB2cAAHp4eyokdOizHbhxAengB6ebeyYWz03dc2JQDre/SoV8nvQtDfmbrk+ZnnH0AAAAAAAAAAAAAAAAGrseW814LqPJ4AAAPhuvLgvQSZ5x5Z6/wBbx23kgAADir9MZ7lXnfCdMsFGQAABsG9G+ls9yuDUR0CO2FswAAHc89G5zmEJIre9c2JMAAD4SDzdcnLLaBcKrYAAAAAAAAAAAAAAAAGGWs2I0buX14AAO73zIa8bb+TDbiyoTmQqit5tNN0AAA95+XZmyuha3tZ8BYbpFloyKunJ7FfR7ap/bGc3Y/0OmubZhdwrbTc8+obpGnsaarKcmt8yuKm11R/fVbnNLaDIjN1qIEZueJ28sACr9oyu5ibwy4HYt+NAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAtBj9GRGqP2vWOLsFnoyshuYO7MzXJMZ2rVtVAHX57HnD2TQGFsOIW82s9t7UyImUUhqmbXcEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWw6Adzzv9prfKwAAAAAAAAAAAAAAAAAAAAB//9oACAEBAAEFAv5YjpRjcQ0QvFiI6imGpv4YiAV1CVqA7b7kuHCL1v4ChtcShoFKmwKHCH8IymQqmm2u5vrkzaN8Ei4COgb6TL/CEdKE3FQjri5dtWaaSqa6SQ/LbHc31yPzRwANApQ2olDiH+EI8YiOuTuUHCxhfs6XNtjuYOr5HwKSLgc3CGg0QvCGYR0Dq0BgN8E+phNkSDUe5nooX7OlzbSnLwi4QSXBes6cYLuAtXt3cRmVlfT5gsi8bvG3cn1iPgUAaBN3dFQghcV5TqnsXcBagie4bUErxuOGVh5+Nm08LjfOI2HtGVdzETMTLCISUu6fl1faO4LqvZ+4TUEbzuKGXiZqPmkMN1TV+smR01+4UuAwF+0dHuHGlir+AVCHKoWdWk28d7r3Eq5nlzOUWUnfRGYSvcOvdu4lW+/u9xI1MT8bBpHu+5ptYYfuE5oIfuE2pC8bghlouXYTDfZF6XQaKTtayiABSlIXB+wZSLcU3tiyfcRRNZyj4CQfO87pNGEtuzSJgmQMV26DpO4rXc28tbNwJz7Grz/HLbmkYOzoa3HNwroopokKXhCnjJq/Rk2L2x5mPfISTOr2uBwq4tq0WcKiYdTCOuFw240nG9jSzhq8TDU1dzPRwuvs5Q0DC5J5KBj7dtlzcS6KCLZPBy1bvEpeKe2VIRUkhLMdjOnCbRtarI1yXBkUNqMpHIyrGRcLikh4DtynHsbRYnnZoAEwlDhDE5CqkS1s68N9Xn+OWqyUnngJJlDJdscSSg+2z4VY2oS1HDKeMOgG+WS6NIy8CBoFdyR1ZwJdYjGSMN23gkkmgnkkGSUizsZyvGS+xr9cihb1hMgawGJh4QwvtqVvPNA1S7hOehA2cyBnAELwhl7lsgFvBufOQ95/jnbRApWGV6UDM+2I/wDrwMbLf/3sNypq7kehgvs2EisDdh21adRUygBQnMNEUyXFrFXxsbuYYQjbdICcHioOpsO5BQ8+z9N3MV1SiUgJHZr/AC8Vu2McT23ef4527PwwvUNRTAbI69L2x9ZRho3zFZw3bAtdlvIA47hxpaC7rlehPKzi0jroXfXckNGMF9mwu0wktyyp6Iion/X25X+vtyv9fbdBeNuhQXpblf7O26vSWjZSQR+aOxe5hRGNt04KQWAjoGPcgwDIIeB3JTEUIs4HjM1/m4bdscgktu7x1t7t79lpLmxdel7Y+sqYnGMMia4bouNZCwHLk7ey7dQBCMjmtBrXcT5ThgE1FT0rub6KC+zYXZqrb9m2/DS0X/irbr/FW3X+Ktuv8Vbdf4q26JZNtDQWRbZRAAKGxb8bC4t2wXoOoHBUfljfTsrmfQ8C/wBp1bfsh8D2383ct8AIRCAMYe7vx3t79loheEMXXpe2PrHS6bZCOarXrPIIItkcUgruJ99DdXc30UF9mpQ9PEQcM+3TrprYgmYaMThoB0HY7lum7b2u9PbNxYKj/bCVkUYlhItFStGgfRetU3zS0JA9vTmVRQiREBG8LvEdau78d7e/ZUi6jkdel7Y+sucwjBduAJ7fkD+pbtk05O4w3V3MIIx8AYDwpzcIYS5DWrdiSqa6VEJgqOCl4FLcGx70tc0ula96lIUpyKFEdRp/IM4xADPr6k+4qZExaemq87XGWTtm9ilAhyKFwcumzNK4Lld3Kvb8GlAsau78d7e/ZSBoXI69L21H/wBTlum7bQUge0ZtNRNYmBlUGxZq7V5dS54AtvFaLkctqvGONJQPb+dTWZn3V7tHjIzsMjOMIOeeWs5ZOGr1OhHQDmAoXFeRQqzbXGJT2ROWlFzlf5e74Q4SvcBGhc9wnoMrFXcKoIItUbwt6QnTIlEiWE5akXOV/lbuhFPde4SFGf8AcR3SNkTsqrFwsdBIYTzFeSiLRh3cOwyvTFIz7bKEB9U1AR86kFr3ZCqhK9wEqF13DeA3sWXkTxULHwyNwwiU9HMUL6twP9jcTaaAQMFwWIZdwR13CYAoHcGWLAWe2iFakodjNIrWVMxqpX/cNoAy3cFxX+XuuZPA2tGQobKV3ZCc3wDG4hxS35b4kgj4K1JEI2cyJhqbE5ikLMPxkZW25EJSGEdAyELwgcNDYFDiHNr89gqrJIJ++Q1e+Q9KTUOI+8xFe8xFe8xFe8RFIP2ChgEByHmYlM3vkPXvkPSk5EDXyEMUspzcNdwpQHcpVuyYS0PikIZL4lAjoKu20sBFVDZEwwUKI4pl0DKY3CCYbCvD8dzgYQpvJyDQ8V3DkWpo+RZyjepn7th/USs/R4pbsRHSpSQTjmLhdR0vXb+Y8o/w4DVoIUAiFArQKFGr8mAk5io58rGvmjhN62ENMA3zV7zUbLfsW4a/Ytw0PcOfGv2HPhX7FuGidyJwKadzT6xt3QUoNGNw0UOMdhXh+OfBTPpVtza0BJlMU5Zn7tiz9Hily4CIBRz8Vdw5kVFqD5iQ6iCkBLEmovEUyjXSq6JX2OJERMNAGtdupzEN91/kWcKt29n0Sds6QfIhu2FeH45jEQr6cW/X9w1+v7hqRtOdjEsTDxo2g5F1bsz92xaeixARCurQqGwm5ZGFjlllHC2CxOILPnvZZHIIgAXnPe9yuCI/UIoqxcwssjNx1Bvuv8ix1rXIQQUqzbgNDSG/Yd4/jePbX7lhvq5mSUfO4a1Y4GJbcv8AdcBpiXijxAQzAQRq9Z/3eRxSNxEWT4RsS4vOoY37cPtjHEB0EdDltaePASJTFMUN91/kWCXiow8UKJoOGOExYUM8K+ZOY11WtK/2LZ0sMtCbCvH8bx7cqpJSPnGdecZVJXLDRSUi+Vkn2Dduq7cRbEI9lMfdsB3R3oBABoyVcI1oNAQw0CYBV+3F7YyyJH4REAMCDhwwc25PIT8dUnIN4pjKSK8s/wAiCmtKk4y2HcnEAb7r/I8EvFSUEERMYcO48aQzXDX5dsHfC42FeX42G7HQK0CvlhrTCOfyatp2cSGoAAKmPu+A7o70GWUkW8SwkX7iUe5SLGJRtV6t2bWgJNFZNwlf1we5v8xXAhX1SntGcJPxl2fK5ME/ESARS6ZqKmAVeqILWzjYCvTuXYV5fjQbs7dydqpF9w3LMsNPRc2jUx93wHdHegy9y5gTrZ01RToR4ht+6/IWhqI4wDFGTmXsa1RuS5oplCvv8/bUIwfeTB2K4iSyZX2ueu78lwT8Vv4GF1fjobsLI/J9hXn+NBuwhIJ9Prfri4q/XFxU5sC5W5FE1ET4MXzqNdQEwlORkx93wHdHegy3G6NIXArb1twCcxb0YWJa25DsY5a2oWVjXsCzLbMayPJSB7STd3ELHt+RW4YBeAdajpHW5Foxf+bt+bZ1Z/5NcCYLXbIwVuw85daFrqAxtiNe03h7MmVHTZxESNzK+YnME/Fb+Bhe64IWzj2/RFW5dhXn+NBuw7Xfdce57BAg49rnSnXmPu+Bt0d6DK6MZN+rOWjc4S1rtU4tpa7VJjbIWyVKxliPkLQbBFBZCqLiM6HbbS65mFfxtR1sImjrVLaibw3PZ/5NNfmfcb8k7gciJYMbHR/XTda75NjLzEnr5zBPxW/gYdzZRPomDQ2Ha5pxL7CvP8aDdh2yVSSk/Os686zpeYimpbyuJK4Xwj88O14aysx93wHc07iW6g1/ZVt1+yrbr9lW3X7Ktuv2VbdTAA0nXLqyLgF9NQcbCry1sXMzh7gtCHMZdpDzN23LGyTS2588A8Ua9vV1Lhk4yQV4REoS1uT8TDz1oQDiXSjEXsK/LFy065tFd1eEqzmpi7JljLhA3CxQj0UrAj1Xy5JSUuZIG87gQfqI3HAlRNc9vlCY7jxTUirhw/dCOo4WdEDDwewrz/Gg3Y6BWgUAEChUMIY9tYozaOmPu/wDct5RAhF/Bt21Bf2gICUfgWRE+6T13fk2PCWuEtAYAo6hj42LbRpZ7sO8/wAZDd8DXSrXs15NrIopt0pj7x8AwCJG8c3lbYk41zEPs9tQK1wSSKSaCV/24Mc9zkIdU9oW+EBF3d+S/Atuw30mdq1QZIbDuCPWlYj9ZTtfrKdr9ZTtfrKdr9ZTtfrKdr9ZTtF7YzI0h2sU1jLEt6OoAAoU/wC3M06f/rKdr9ZTtfrKdr9ZTtfrKdr9ZTtfrKdr9ZTtG7azwpxzc7Rhc1tNrhbyEc9inNAgqNeWVoyShcIK3pC4HELCMoJnTto3fN7ntB5AK0mmB68uShbkofkKKK7lWz7KJEYTtgS8nL/rKdr9ZTtfrKdr9ZTtfrKdonbCWGmva1IBjLUgonbxvkaQjWMqhJ9tTar2ddbWggLoGm9kXU8qI7Zs0qbtkGiWJilOWY7cxT2nXb25G9GtW5SijZtzLGje2T9Q8Pb0VBk/4hTmwDlLv2Z//9oACAECAAEFAv8As3Y/MP8AmXm8P5wmAKAddtPN4fznm5Pl2y83h/Oebk+Xaai5SV1VT101q4VgorkQoB1p5vDFRcpK6qp66a1cCwV5g5KIqB8VTcJUDicp1AJXmDnrgWGumsFA4OSiKAfIo5AKAVj10lq0XLRHWComAONaljHGinW041q41qSMoI0oqBK65z101hrprBQODkohwPslwtw0i3yGKBq+aBne8MHC3DSLfIIa0qiKdIq8YU45EVOBNNEVKANMTFAaOUUTFNxBThWkUAJkVRA4NjiA4PNyfLiqpwAkj1KANMRDWjkFESH4g2MI6UiXqHynJxAcaCjDoDcvGbN4KlOOREvGOVcvEVob5UmhofKv/VXB5uT5cT/VUymLxA2HhNsZyOhGxdCZnQaHCnQ/0bl0JmeFpMdSuORmHyym3M9+d3zYPNyfLgYdAaBnV/qrsZ5uS5czzeFPKJuzOuRtyOORpy5R3M9+IjpQrkChdlrzBxpUT64PNyfLgvyNlClL5gleYJXmCV5gleYJXmCU4OBhDYzzcly5nm8KeUTdmdcjbkccjTlyjuZ76OoBK6qilA0EaBsQKAgBg75sHm5PlwX5G6RTl8sSvLEryxK8sSvLEryxK8sTY7kNSNTakzOjanCnQakbm1JmeGpMNCuORpy5R3M94jpRA65wDTK75sHm5PlwMGoMx2cIa0ibpnynPwgcvyCjBqCBumbMH1lKccjTlyjuZ71uRnuyrn4j4PNyXLip9JQB1zCv/fY7hHipFxkMcC181zPKDBwjxUi4yCOlKrCpSKXAFOORpy5R3M94hqCRuibER0pRfipZLp0A6hS5eIrVT5YdQNVU+ME1RSEBAcizim6PDslRAp66KhK41q1XGitdaANKcJCegxUQKeuioSuNYK4lhoGxjURMCYql4ioJiQMptzPfSiQHroKErjWrVcaBsY1ETAlKp8YFBVOuucDUq2oBWLX1jUk3AmB0wPQtjFriWCuNaugoek0Sk/4NwbQqBuE3wTm4jJG4i7SEdK6ha6ha6ha6ha6ha6ha6ha4gy9QtdQtdQtdQvw3R9RpI/EXO4Pwlpofaa/J8ADCFEcmCimA2CnNiXd8AxuEB+eDY+g53B+I1FHhEB1yKODFN5o9eaPXmz15s9eaPXmzUDyiLlNsZfk+CA0mfpmpTmxLu+A6PkTPxBlVPwlxbKZF+f4CTgS0A67EX5MSJievKnryp6MgYuT/APER1IfmxLuznPwgPzxGkFOEcq5+IcArdSZ+IMFuf4G+kFOAdhrcmLTfiqXQ2LfkPvxLuzrqcQ5W6muRdTQMqSnAOC3PiBArplo7Yo0YvCOA0ifiLsJblxa79QrUKMqUtGHiHAA1oocIH34l3Zl1NAzB8qTPxhRjcIGNxDmbq4Lc+AUGLouRqOw1uX4JSiakkeHA+/Eu7KY3CBh1HOmfhGlz6j8BI/ECvNmW5cW/NsJbl+AA6UVxpRTgbA+/Eu7K4N8IimhMSBqIl/soUCj0ylAcETaGV5sQxV5cUebYSvLiQgmry5q8uahQNkAdKIbiA2/Eu7KoOpumUtGTDQEwAOmAgJP6lDUelqbROjk4cCphp0ymwS5j85iFKZQC0CYDXAQ1CHCKnNiGK3LigH9thK8uLffi4DI3Gjb8S7so7+Mh6Mn8gT+SfDSPzpINKR3aJ0oYBCip/JPhwS5jc6/MvQacH06VMBhNncGyNg2Gry4t9+ta1xBSynFkb7zb8QXLXXLXXLXXLXXLXXLRt48BqEwAHEU9FOQtcoqHAaIfhrROjiA4cRTAU5C0bSijoJ+GlTcQqGA1EOGn0wofmKm/HjCuoWjLhQjrkSLwhsJXl+GgX5G3/BVL8vgkT1L8FIuoqc3wUia7EV5fhJpcWBt/wdNSmLoOchOIaWJp8FMnCCnN8AiOtAGmxDhqHQGugNdAa6A10BroDXQGugNeXoEgDEURroDXQGugNdAa6A10BroDXQGugNBuUJxUIaYa1riUgmopeHAQ1o6fDjrWuKaWmBkREegNdAa6A10BroDXQGgb0CYBt8SgNGQoUTV0hoETUVDMZEBoUTV0zV0zUCFFIBf+6//aAAgBAwABBQL/ALO3SFMQ/N/zFt+GpzfzU0jqiYolHbNt+Gfm/m23zvfH2zbXhn5v5tt873x9psolVzQsGDUPPRpa8xGKUtCJqgqkZM1teGfmwZRSzmhj2DUAexha81GHo0Q1cg7Yqthwj0CrLyzUjdZq0UcmLENmwA7jCV5uMPR4ds5K6ZqNjYs4JRSjEjG9efjQoDxa9OoD5CGlMCInV8rGVGotCGWaxwn8pF15WMp+gyKlTRgq6EIlq2DzcYShdxh6PENnJXLRRubZENGdcZOZoR1xQcHRNxJyyVuFEpT81Q8Z1xk5oRrfiRQxBj5MrsJJgLRSof1UkzM6evZIjMpjCccEljpGbLklEF0TInqGYFKWRlTuRxjpI7Q060KonhbfiPfHxjmIu1JCSKzKc4nHBNQyYtHRJNN03M3U2MmmKhpRbyTbK1cGbqN0y6n5kUhVPML+UQygOgm/+iyqH9VKLA1JvyxTgUXFxI8KtPZQqrfLF/XY4W14j3x8W4e3sjGEw5EFhRPNplXR2NBJ8TmdW43GaBV42x+aAT4nEyt1XOa3Fv7PU+mtD+quM/1MqI6HuXkz2/6fC2/Ee+Pg3JxqXGroGaP+vH7GtsPqyA6uM1tD9M/NbQf2cjqrmgR/9M2GjqH9VcXj5Uua5eTEiZj0SJdHpO3Va9oaJUwK3KlhbfiPfHwig1dTTFddb2d3Xs7uvZ3dezu69ndV7O6qHaqoJn37FtsfqyAaOM1tB9I/NbRv7OQ0VzQIf+maHV1EequL1GVLmuXkpoyUdGCOZsinuApAUm3R6UcqqYW/6fC2+d74+EWOjqZkF263vbuve3de9u697d1727r3t1QzTrY8GpwOZ1HgcZoJLgbH5oBThczSHTc5rcR/s9U6i0R6q4vUZUua5eQhBOLlUsW2OcxxyW/6fC2+d74+CJ+A9xpal2amcSGk0QetsrRsZwo3VATn5kVRSPLIA7b5SgJhU/8AnMqiPVXF6jKlzXLyRvqbk16uWKbCi1wtsfqPw0XxaD7gyMUSjlJE/wDm2PDyflxk4bWhAQxQbqLm0TiUbcMI0fmqHk/LjJQ1CAhimkZQWEcRkWRfC7UqI9VcXqMqXNcvImoKZn7cJFuYolHAhDHFnFFbhGv/ADdKk4DVEOOi4nmQlPh5RXpMHpmqj5gR+RRI6Y4B86joWpeT8wOyGUqq1oZNi5DykYagSi0qWnSkA5zHGHkUmlGHUcGUoq1r3Rk5DysaagQjE6PNN0AdvVHI4MFyoLyzwjpXKiGp7lD6dMpBRoIyjJ0HlIw1AlFp0ebRRB08Uciwei1UWPHvK9panQphOcJTJRa1F9rb1ITBnAU1dqNjEm0FgFGLUry0YWvdGTYHsms6/wCDhkOq4lUOu3+AAa00R6KUgh0V9pFKJh8kvXkl68kvXkl68kvXkl68mvQt1AygzWGvJL15JevJL/DgW/TSqQbdBbPDt+qvVwNtQ2lE+p1rWta1rWta1wUQTODmCSPS7c6BqaD9HWta1HVfxPgN0RWUIUCFqda9RPPDNuijS6QLEUTFM2LGFQWR/wA+2r/Ptq/z7av8+2r/AD7aht5vStuBTiKcIbGivU61rWta1rWta1rWtGCn7QHSQhpTTwda1rWlvE+BBNdArWhADA9bC3Vyx7XzCuGtTjTJF+l1rWta1rWta1rWn8QmuCiZkzbDi/U61rWtOnpGwe+t699bUhKILDrWta0HyGTJwOGvha1rWtL+JnaNhcKFKBQxlGfmE8sW08ulicoHK7bC3UwjPTa1rWuTXAflUsxBwTYcZ6ita1qf8PGPWFVDWtcJgdXLXwta1wX588W06CeWXZdM2MSz6p8skz8wTCN9NrWtGH5GdK6g8WCm02qnSK5VS64BUm36K2wo31GM4AmT6R66R6QYLLCgkCRMDqAQq6vVO28LFbnzRLPqnyAOChAODxqLc9IpCqdBIESZAGhqXY4R3p9a1ow/I2/CCXEDa1rWtTxNQ2FG+ox1rWta1xVXIkEhJC4wb+Fitz5UERVOimCRcoGrfTxsC5DFEoxTXpEy61xUIa1INegpH+n1rWjD8jb8Iw3C41rWtalw1b7CjvH+AcvGC8OU9OGqiA038PFbnywyGgZwNpWtOmPUc4ulRTSTWMKDNcyxPNrrHT4uHjqSR6qLDwMB3G34MfHxk/T7Cj/H1rWtacOioB7ujXu6NElUDUBgGta1pRMqhXSAonb+HrWuCvPlaE6aIO11xbuz9Q7tQ5yvFUjpujddZTpkB+JUeq80augXLSrtQVPOLImp/wCA1No3ScrKpMTLhSj05KM4dI0Q4KkZhwpYDuNvwjS6r64yptENhMPHxmeTGHVHJMl+SHh4q8+UnIVu4bUg9ETnemEzvr6yIcAvj9WpEBA/E9pkgoQ9KvB43ouBLT/wG/porwYujCr5o3nDAwSMkmjy4DuNvwh0fmOMyp8thMPGxlgEScBq4DUVA5qjGYpBrjMD/RDw8Txawj7UtXtS1e1LV7UtXtS1If2SIVyhSaCqihUV25l2zhWtDKpsmp0zOm3XKB3YU1SOQNa6KyCi7dwuCBjiVwn1E25XAFYpGRTZIGSpy2MJxM7OCYdMjQ2qWA0Zqrr5RWkIs5qQSAgCOL5fqq7CY+Nk1rWtaE4jklVuI6Hh/BYr/wB/gunnA41+DILdNJl4OOta0BtKMcRxkHfTLsNj43wnb4qQCOoo+H8AKMoKaySoKlzunAIkEdRjnXGXOI6U9c9Y7PwfgOpEqdGMJh2G3UBM/uide6J17onXuide6J17onXuide6p0aWpWQVPinJJlL7onXuide6J17onXuide6J17onXuide7p6KG4jNHYoCmqVQKBI1dE1CmYMHDkqILrmWNRDiQWr0qwUQnFXQChRChoTAUHr7q4ISBCE90Tr3ROvdE690Tr3ROhlSUaWGlXaim301TJilK0STTr3ElKSadLShhoxhMORGSOWiyKI15tKheohSsoUKWcnV/7r/9oACAECAgY/AvnMfLQ+Pr32PjwhvofHhDetStok8UCHlVqtXMFVTNTKrXKFarVzKqCqi2iTqOVWLmVQVisXNRWuUK1WrmCq3TIKboK14IUyFqm6HM2kqazOgrUwp0ZQvGHIaQhDmdDMWKe6MxikpcKJrMdiVK6OVEzFOkIQSiki3fM0EdgdgKQhATHPc4QjFAQ2J2BhtVSqC5qQhSVWrVarVarVahLdAjFAQ2J2BorXKuYwCkIUlV/IEkEdgaJmIUhCAjd+UxTU+NEllOxOwKKOyEE48u6Ji1SdBWvBCmYtUnQ5W0nYFSUjDlYhBlpkpLK5VQSapm3dXKrKJvNFUPKVZRzFVUyUjGaK1yqyjmKqUqK6JtpmbaK1ymixcx+T571tVqtVqtVqtitVqtVuzltJbzOyrVVB2s6ZbKcMo6xuc7U7XLBPZZYDsa9ymCrYDaz+AnSdlLchgPxcoJbEw2KpS3fOCW0lspqceU0GOcEt21bWanspbI/IstvKjmVVJjO/+ZTCm5Tap0SEE3LlgkUJqdyq3xP4CtTCm5GSyolGgSomVVCFXspb/wCCkFWpKpVU1UV0VUToCkdjVvSezB+Pn8jT+Dr+T6/kqqiyivZV/Pf/2gAIAQMCBj8C+c3zF6Pyy/FH46TBNSNu+n4o/HOT8d9PxR+Ocn470nY3ivyGZXR6Lpks3+d01ldUU/FGmYqHFfldMqpnoq2qeg5SeKWsdYVlZYpMCn/oeqmeirb6Keg5SeIM2pyj1Ujzeq6PRSlL0WbQMxwUipa1TV1+qPYM0czq8V1+q6/VT0nTdRyBT13Vqpnoq2ein/ncsrxunuP6R6rt6NQ4/tBmYZKVmq368lqA8UaO4/pHqu3oVN4/tBNpkuzr2+/8qVxsoasg4V4Lsf57rT9XqZrpzNMiix/UPqaLHWij/wCjVsu/dSFTYP8ArwQ/0M/5FLsE/GDLdeuzoW+38qbq6ZtMiu1q9X1Wix25w0XoaLLT9GIPajqNsfWigwXpugy/2/mKYU/7D3FDU7UHU6qJvjUg/jQ3S06uMTmHxFL8E/GDP/Y/rYpmEPFyb/obuceClwjlwRU+ATvCqN2n905vimprfCIYpmOwfj+lL8E/Glo8UzTEbmG6f77ndgn4xvxRTzgnYmMYJyahhEEzGDlE10LncAvyaqcNAzFL8E/GlmKBY2YkuhdC6F0LoXQnjUEtzuwT8Y34op4wTsTGME5MQwiCZjRlYp65zH6uUtHTXVJczjQ/H9KXYJ+NLMUGsMhJdS6l1LqXUupdW5x4qf8A5RjxrRUuIR8a43an2TneKYhhEEzFBovQazqPvxWZ1Zhfj+lLsE/GlruBTNUbuDhchrMtH0YgwJ2m2xkgig4XIazLvb+IpBS/sfc0MQwiCZimYpuEVdprpdgn4wds9Q+gpG0Rd5xlujI/pPou7oVjh+yrpysE1x1XfXktSaNHbf0n0Xc0Kxw/ZV0yaJrva9vt/KnddQxDCIJmKDhcm6mnb9VKRpk2td7/AE3XLU9MEWm6gE2Gpd1thtp7suVZhZeu9odXv/Kk4SMHc16m8P3XbZ0D13TIVjgvytl9cV1y+6tzLL/nbLxWZxmU7PfBIVjgvzNkfq9dcl1TUtBim80te6wLMzhEMUw+NHLZwX5myP1euuX3VuZS0GKbys6zOMj5InSr4Gjt6wmPq1TnlU+o+ayNGVlE2FS/0M/VVOkq3zX4WTP1euaocPkMeFacL7dk1nAJzd5yC6D5LoPkug+S6D5LoPkug+S6D5Ktp8oek+S6D5LoPkug+Wzzn+3tQ5uwHBtdA1RhvNmx5mhT0jI+iyvEjQ3CB2OxDBeg0XUdwWt9thM2uoLDei03QNe6cyr/ADV/mr/NX+av81aVyPVYq8NzN2f/AGFlDcIHY7Hum+ymRRb5RAXXwd0feBmGxmyp3usrqjuRsE33q9X+Sk01wuTcIDjsAwfQUhBV1CKu0wZTeiw0sw2NSzt6huRsDcYGuNsDk3CA7CZ6jF3G2H3gzmwe8VXULKWYQdRXUVz8w9VmbZAZWGvcbYGy4qwqwqQCDBdTmNyL+KbhAY85sEeV1ilddQGhBgj7rfv+9DMIzp3WwNduNuym4yWVvT70NwgMQaEGjYZb7lIrObT7bHwNiZG2A7jbsZKYcfvWuahuEBiOodkDcbftAXC5Z7TJTcJIjSAkEM1tHiE2NuMDtxtgm5Xq9cFVTldYi1NwgMTR4L8Qq8V2tQVos0RZeg3WFt6Ok77Iu4Iaj7TYs2UYKdhFHb0hWEBrCo3ihyafBZmgTmnZALUGSnqFZngEKYsKAjbAdxtgbjAWwNcm4QGIYL8VYWTUbJyLdFs0zuStTdYXJmmP7ey0w1WBOc++jJpNzFDuSlOhyH/qvun4p2S1SkFldsC+BrdxtgbirFYqgputMDcU3CCdSuVyuVyuUvspDmCGpq3WBHt1grObRcpaglNZtTALgQpZQfFc5mTQXadYKm665c4kUWrI4VSWVydmvK7mn1LLIDxUiZyQMHSV0lc9QXgICdxt2mXgm4bJ2n47IcBssU3ZZRadyN2ch1KZTcNkXDisw2E71NZDaNj4JuxkysqZ3IHFWFWFWFWFWFWFWFWFVNXCkCRVhVhVhVhVhVhVhVhUpIleCm2KtTNEwq7aLVaraJlZW2e9AaZqwqwqwqwqwqwqpqrO/wCbVzhdS6wrZrkqUzDzVrgupdS5Qub56//aAAgBAQEGPwL/AFc8O1hnE9H+kxxljL2buwMvuNAsqmEKKe+2oZ6NOqJZhhlgs8P+l4vDsJHJ2auvoV+VDAzltOrDPBOJxa4P9FMxoTGgaMJeq6hFM0O/WZQ280q206LTa9IMS0dmbr6FflQx0adWGWCWiJf6OfeiNrMNhdo/qq1RdfuyNXZq7OhX5UMdGnVgnowbZjJG3s5xkjF8jZGTTEsw2E9EXd0qtUXX7sjV2UUyT1qsA9mbzeMc0E3XS9WYyBaU4huqXFpd6JB23ZakxaRXJqAPuwsK1pEdU+JbvUwrJ1hCZcKc+8YbqaR0PMu8hxMXX0CvKhjo06sEoLTijU1f/wAzebxjmgm6qPq1PmUE5N1SotLvNKd12WpMWm7xS7LvA4FT+smA1f8AdxW34crCuHGDFuiemtP2jCsS072GsrKUgPMpmi0JiBV1hSXudWmaRISEc7XP82Pu2hjWvcEFn4eu4ob/AJpFpXDkEWl3ilv6POWdSYtN3kHJd6HLXlJgMX/Q84jOSmwveOQxz9C9bl9o0cS0bowzOQQulu1v8wqwbNofZg9uOca/AsrxpMg2JbU5mLRvVM+m/wCMWw+axAypSUudoQKW/afqzuQ1CBIT+kk5IS42oLQsTQsYwRC3Loa56utpCUStcWePFH6Z/wCr/lFOm/aXq7SVHmTYszPCYpkUtBapktpDCubypzZ4xXaT/wCL9sfpn/q/5Qlu+KLmKKwqa7FnHmz4Autd46vs6dGNat6C3cN3800O+laVvqMhFpy80on3vOWfJTFpu8krl3vOWp/WTAY+IrvKmj96E2VcOQx1ihe5xPfoOJSToI7Ei7aEzvCpTx1jK2k9sw3el9pL1S5x26ReMDbXpMBKEhKRkSMQwrYrWEvNqEsYxjcOaEuJUqquGsNlQOb/AJCLndaUHGnKYqbWM4KoY6NOqJx+WXeZ3hUp46x90k9swm8r8Tz9W5x0Ui8dnbXpMBCQEtp70YhhUzUMoeaVlQsTEC+7hW4llpVp1pOVvupjncTdWzxapkZjpG0cF4+INcOVjvGX1haKdrw1nNBv34gWstPG3T0vhDtJgM07SWW05G0CQwrp6thL7SxIpUNUMVlE4pVE8ZtfST3zaop62nM2qhNpO1pG9gR8O3Uol50hNWpGUk5G+7CXn0JqbxWJuOqEwjaTG0nCviBqvQCWKhIxk+CrTD3w9WzAmrmArvHE5U78bmC7h/VVqi6/dm9WwVUGS6lzi0jJ75XcEKvy/VrWw4q002fve4mEs07SWWk8ltAkMKmKplL7S8qFicNXvdK1ru9xXpGjkH0FbWiGK6nPEeGNOdJzg9h36p0ybp0KcXuJE4q75reOzTOc7YOdZ5A3tjLRFRQviaXU8Q+CsZDFLd1SDz11FxnHotTlDHRp1RUVbpkinbK1RWX7W+kRTuW0g53DyR+6MEtgttxNpDgKVpOcHLAZCj1GpkJf03MnAYnF4+INcUV3P47uu4qqH0ZjM9s4oSAmSUiSU7WxrEEekYTzzB0KTFVRKM+qO2m/FX+3BXXreLqaxSuNSPZ7S8pI2hijFlzQBnz7CkrWhZLhaeMtM7J1RPTgu4/1V6ous6KZGrYIoQo9Tplc3i8BvlnhhtlpNhtpIS2gZgMmxqKJ8TbqEFJ2tB3ovD4dqlYgVKbB8NGjdHYd9KVWVVC0N708cNOy49YouqVpGQbNxaBIVSEukbeeGNptOqOaSqRqnkoI0pxkxScWTlXN5zfxD5o29lQXikSU0ssrV42Maou6oHfsJnujEdUXj4g1xX1EuO46E2toDZVSTkLK9UXqM3NN6zhnpybG7OhT/wBhgRZ4Yu3pV6ouz3ZvVhrHyZc0ytXzReV5OCa8TaFbZ4yo0mMsSVw7CgrUYg+WlE+NxFdh7vT4VQZ7yYupAyCnRsNzDdq86qcz3lQz0adUXW3mUpw8Eou8Zk0zVn6o2bv0XUGKEHvLaR9YxePijXD3vB1YO1sKnol6ovXom/KOCWmMW9FqofbYGlagIM7yQ4R3rYKjFmjo36pWafE7sH8uuCWhSkqV5sUTl+tcy+QnmUSA4lra24ntYLt086rVF2e7N6sN7KGXmdahDrFZWCnfcqFLIkTikBmj9TT9VXcj9TT9VXcj9TT9VXcjFeifqq7kY7xT9VXcj9TR9VXci53ruqRUc1MPEAiXGTLLDR0oGrsNd6/BqDPfTF1LGenRhJ2F2ozppzPfVDHRp1RdTuZKnEnflF3rGRVM15I2bv0nUCKEnv7ah9YxefijXD3vB1bKp6JeqL16JvyjgNRWOStYmWU8tW4ILNx05o6bwx/Es9qOdvm9lOk5UImo/WVOBOjNQfDdUe1KB1egYalnCBPhiQi6ugT/ANhhIETOOLu6VWqLs92b1Yb1Qn+TPgUIdfrabnX23yi1aUMUhoMewn1i+7HsJ9Yvux7CfWL7sewn1i+7HsJ9Yvux7AfWL7sBQocaTMekX3YCRkTiHYaoUlNpVOtDm9PHDbM/SUSy2pO1lThlp2DqEGYpUJantjLFP0adUFaUzVSvJcJ2sYOuKUTm5STZd3sY+Y7Ogu1Jmp1ZeWnxcQ1xd1MO8YTwkTOuLy8Ua4e94OrZVPRL1RevRN+UYdedMm2kFbh+inGYqKqsWU0bPGWkZkd6hMIp6dpLLDYkhtOIDYWouvoE/wDYYGC7ulVqi7Pdm9WCyN+KtgifPMrT80XldjmJZk4hO2nErYaME+xD1M8JtPoKFjaUJRVXPWcVipXzVo4pKHIVv4dzDUVzxxNJ9Gnwl5hFHeVTPrF6qcdM9FrLvwwdDadUVFI8Jt1CChW/FVcddxGqhywCrFJwck/vDZLdcVYbbBUtRzAR1gpPUaXH/wCNvk8JjUIvLxRrh73g6ono2NT0S9UXr0LflGL2I/kGLwI+0Lybe5Iy2O5FplVtmmKGEL02Tj+eBgoF5kvEHfEXWRk6uj5hsG7xaQeqVKudlmIVy0w2+yq208kLbWM4OCZ3sEsDdx09J1xGJC32zjC8+LJIdiBX0I/yFMMaf5idG6M0Iuu/SWX2uI3Vr1L24ttrC05lJMxBOBVRWvpZQnN3x2gITxVU1w0Spy091Ri6ENpCG221JQgZAAcUU/Rp1YBeFAn/ACNOnGgfeJHbEJuu/iWKhriIql59pegxabWFpORSTMYVPVT6GGkiZWsyhNyXChZp3FWXXMhc3dCYFMJLqXONWPaVaNwYLy8Ua4e94OqBsanol6ovboUeUYfpXfs6htTa/wB4Siru+8UlNM6ebdUM2PirG1CXWVpdbVjS4gzGEvVLqWUJx2lmUfkvwyhbq3zYdrJZvo6Btxc1njqUk9Ze8J0KmYp6hszQ82laSNsYKpttFt5iTzSRpTl+aPyeoXYqKSfVwrvkHNugwSf3cAuoVCVVpQV82MmLNPTC6RySXRxqV7wF/thdzX22vqiVcRWXm9tOlJhL9M+ioaUJpUgzwTgrcUEJzqUZCFXZcRNRWO+jVVIEwnaRpMKvCvE7zqRkOPm0nNunP2JLriTTVf8A9TWU+MM8E3TX88zmCVS/2KmIsquvn5d/zc9RiyimFDPPZSnypwKr4hvBdS5lNOhU+FR7UIp6ZpLLDeJDackUJoeb9AFBznFWcsNIOVCADvDDbeQWKr/6WsSj42mP8NePPM5gFS/2KmIsqusVEu/5ufkkRZRQ9Tn39gDypwHviC8yEzxthVtW9mEc1Qs2VK5bysa1bpw1lFTWeefAsWjIQukrLHOLdKxYNoS2VUpRklLS5nei8myeM4wmwNw48CU1SSh5v7KpRyh3RBNz1vPM5rCpcKFTEWVXVzx8Pm+4YsopRRbdlKfKnAe+IL1URnZQq2eE4hwRzNCwEeG6ca1bphdGtXNOg26d7LZUINLTUnXKafET9okbmQxSM3qBTNpWkVNGEy4q8UzlOKARjBhV43G71epUq2unJsi1pQrNAZVSqrAnIopC/nEo5pSTQNKxKyN/PlhNdVOmtvEYwvvUE6NvBzFaxzln7N0Ylo3DBe+H7zJTPE2VWFb+YxZXRdcl31gHyZRZTdfV/p83LyiYnfN48yznSVT/ANiZCOcZQX6k/wB07jV+7o7FgbEfKnZVCQfTVvoG9OPKYo3lmTTp5l7xV4p7Hc2ClrNlKBNSjoitrhxeedKkDQM0UVVPj2LDw0KTiME7Hd2EtnLP2CU684lptHKcUZAR+qUvrU92P1Sl9anuwP8AKUvrU92P1Sl9Ynux+qUvrE92P1Sl9Ynux+p0vrU92OJXU69xxJ7cTBmNOwUhd5UyFoMlJLiZg8MfqlL61Pdj9UpfWp7sSF6U3rE92AQZhQmCNgdjt5oboEGaLvT6Q/1F5Y3Io6qfpQnmqgfTTiOwOwqEAyervQNDxuV82Cqudw/a+npt0YlCLPDsJ4J6MM852ZUc/YK8+j7fyGIkQF01a80oZCFGG271aFWxkU+nE5u6DCaqheD7Ks4zHQRgvH3hzXh0KEUfQI1fITiqvB7k06JgaT3o4YeqXTNx9ZWvfwOXW8ZM3hja2nR3cOSMcYjGMYDTsqtU13eiSdK++wU1cz9pTLtb2ccEMVbGNqoQHE7+wrqKn6vzNOuyi0iZybsZKX1Z86MlL6s+dGSl9WfOjk0vqz50ZKX1Z86OOzTL/dI7cAVt2gpzuNK7REJQzV808vIw9xD3MG3Ez2DvPo+38lZVjSYaXbPUahQTVtZpad0QFpNpKhNKhoi8veHNewo+gRq2eONqGrlYV6Nj0lZLOvMnewShLjai260oKQsZQRFPWAjnpWKpHguDLsNEZYfqQR1hz0VKn6Ss+9BUozUozUdvAdqHLjqHNLlBP/cjt4RF69L/AAj5DFlhLFepdbQ5JK5be2DDdVTOh9l7GhY7CXn0fb2DrFCEFbKLa7arOLJHJp/WiOTT+tEGoqKS2ynluNG2Buy2G2mLscUbRS1zc/E4vai8veXNewougRq2GKMkaMD9c7yk8Wnb8Jw8kQ7UPKtuvKK3FHScNsZc8WXj+BrZIqPonvVwJGYOQ7Ak4gMphQZXOhopt0+2e+VhG3DNXTqsOMrC2laCIYr2uKpfFqGvAcGUYBF7dN/CPkbC8vemE0NWv/HVapH6CzkV3ewl6dH29heXuw8rDIiYOIiLwpWBZZSu02jRaxywmLuCsVpTihuFZi8veXNewo+hRqjHspnENMGmp1zu+7yUsyyLXnXsBtYjE+9VAuesXOrpR+FWfvG9G6Nh+X0q/wAbXghRGVDWc7+wB0RtKiy8o/l9VxapOjQsbkJWk2krE0qGcGBF7dN/CMLU/DTrhom7qeZQn7saIku7KZQ6MQpVB/jqnvbH2c9tMPUVWjm32DJQ7YwoXpyxTuOGdRTehf3U5Dv9g706Pt7C8S66hoGmEitQT323HtjHrE92PbGPWJ7sKcerW3nByKdk21KO9FVXvYl1KyqzoGYYWaVhNt6oWENp2zFHQoxilbSgq0nOYvP3lzXsKPoUaoxxxeCMmDRGPHH5bTLlW16SCod43nO/sceQxI54bqKdwtVFOq00sQipbMn0cSrZ8FfcOCor6kyap0zO2cwG7FReFQfSPqmE+CnMnY2DlzR9IQm4q53jj9OcVo/l9yBuxe3TfwjC146dcMj+mnVGM4KS9kok62vmHlaUnGJ7ksMs0XnQn7xKXk/u4jr7B3p0fbgfIhmgpV1CzoGIbpjrdWUv3koStDktDQnb24kIvT3pzXhMUfQo1bKor6lUm2Ez3TmEVFfVKtPVCp+KMyRubKWURNCMmUw1Ugnq6yEVzXhI7oht9lYcaeSFtrGcGPy2mcnRUB9JLIt3PwbPjCe3AqG5tFJC21DKCM8IW4fx9LJusG3mVvxe/TfwjC146dcNYu8TqwaYvMSxtoC07yhsKdP85pxB4J9rsHevRduB8gHW0tqUMziAscCoSzVXYwtgco0w5o8AxRboHwpSR6WnOJaN0YL096c14TFH0KNWyprlaPEa9PVbvej5CUpwSc8Xi0Vfi7vPN0WPGed5P1ZwSTMqM1HbOGhoKm1zFSohyziMP3Rz/VqNFXzHWF47KdJhNNQ13XmlNBalGRKTo4sUFT8RVFQ9VXi2HG6djvQceaKgXeVqorX4ZTnKs7cWZb8U9oyp678O9v8AJPDF8dN/CMLXSJ1wz4idWG9p/wAgwNzDdktK/IPYO9ei7cDC9T0JbC6dHOL5wyxEyjl0v1jHLpfrGC4GGqkJypaWJ/PCmnm1NOoMltrEiN7C1W0TvNVDJ4p07R2op69AsqWJPteCscoRenvTmvCYo+hRq2V5O8oqqOaaHi8UQwn4krn3q+oRb6pTd4NuPz+4a41NAF2H2HsS0GKa9PiW8FtCtFqloKfGsp3oqry+GK11S6EWqihqMsoofiGgddVziw3WtL705MW/FHQNmSqpwItaBnMVFzXVUKFPd6AbwrH+9OeUdQVeVYXp2DX4+btQ20t0VNNUp5yjqk98nuwRPEcohm+/iOvVTU1V7HRsY3FiKqo+Ga55NTRJtu0VTnGC6ekOqLyZViS9X82ojQpQEPXdeVbUtUSadt1p1ItKK1HJii5vzauqacJpQKPm0k2kSymK29FXiul+GaVVlqrcEnHTnlvx1G6bxqqSvM+rqqJ2HDvwumqBKooXk25ZOKQZiK98ff8ANr4Wk4WukR5QhnxE6sN5k5XEBCd0qGwYV/IacWdXb7B3r0XbgYb091T5ewu28UJsvuqUw7LvgBaGwvWin6Kwh8J2+TF6+9Oa8Jij6FGrZVS5yW3VOKB2wsww78QIeu+8WkWFVLXJVwT1Q5e9wXqbyuxpU6hk5UbcU14/E97G7Kd4fg6TlOFJ1Rf6LgVVOuCjPWXX+SRJUrMXp8M1R9BeLBcp9pYGOUX7f9WkWrlQthhJ/nZP2R8V1Ve45N8hVY639pYUFTlEut3hw/si56C6XnX/AMtJFp0caxKQx4Gb1+Ib0/Kruc9jZ5S1A5wM04r27jcq36rqi+dee5BTtQ546tcXR0h1RWf/ANNHliFe7M9uPhv3EaoudN8uvtUSnCTzGdy0eVDNQ1W3gl1haXGzPOkz0Q5XXeSphbKElREpqTOcLnlsNf8AWnC10iPKEM+InVho7mQqbq1h98aEjEme7OCNGG9a4/dpQwj97GdXYO9ei7cDDeZdcS0DTJkVGXf7ce1s/XT3Y9rZ+unuxbfvBhtOkrEMN0RJu6gmedOK2s5SIOG81ZhTJ8uL196c17CnZX1q202lKpNZwN2P7r1X7Y/uvVftj+69V+2P7r1X7Y/uvVftiofbbC2lvCrYbcGJSHOPjEJqqpT1wV1kB9DSRzapb0VVxfDnO1HXz+NvB7FPcigF91L11Xhd7fNW0Y0KG1wRUXZSIfFLWNlNVfDnKUrIBLRDFTclUurp6NaVtPrxFXhCG6G5UFtmoeNVeRlZtObe/DjpZ6zSVSOarKbSnSINUm86ukaUbSrvljG0MUU7d03eKKko0c2hw8t3bVCsXFHKO7F3UN+vvXbWXYmw1UN40qEKpaJNQ+1VIKKy+HMo0ADRCk3RVqraUi0XliXGJyRQXgtJW3TOTcSMtnPKPzigrH3rwqKpp5bB5KAFgrMpaINbQKK2Cy2iahLGmc4uYUS1L6lTBp+0JcaUPXDf1Oqpuh9RU24jlNKOPXCaz8wqrz5s22aGziJzBWKFvMUqaNuseSGqVGRIJAivYH3JbR9VpIwtHQtM+GGgb1p5hAnx9qJqvemA8eFN3WDeNR3q8jQ2554evGucLjrptLWdQ2oJ04aZlwSqH/TVPjLzb3YO9ei7cDYZIyRyAYs5E6BsKi83UyVeCvQ9GjF85i9fenNfyKtwxcN+NjiqpWqer3ZcQ/JXitSfxd5nnKOebmuR9YiClQkpJkobY+RpyoTp6D8Q9ujkjhi+Om/hGw5IjII5IJ24xnFowpvKqb/xtEqaZ/eOjIBtDsJevRduB8jjhuoqm1U11JNpbisSndpHdhthlAbaaSEtoGQARevvTmv5FzaTjikoKlM2qijbSdri4iIfoKpMnGTxVeGnMobvyDdMkEUjRC654d6jRumG2GUhDTSQltAzAQb2pW/wNaZ1Esjbv/L5BDTSC466QltsZSTkEIbcH46p9JWr+l4O9F8dMPJHyAGc5BCKm9UKobvEiGzidd3swhumpWkssMiy22nIB2EraCnUlD1SiyhS8ke10fCvzY9ro+Ffmx7XR8K/Nj2uj4V+bHtdHwr82Pa6PhX5se10fCvzY49dSJ3LR/hgdZveQzhtvumErNN118fe1HGx7QgBIkBkAwVtS3VUgRUPLcQCVTkr92Pa6PhX5se10fCvzY9ro+Ffmx7XR8K/Nj2uj4V+bHtdHwr82Pa6PhX5se10fCvzYLYqqJIO2vzYo6Vwgrp2UNrIyTSJRIkMV7E+qVX8KtqF0lcwWHUnFPkq20nPg5MZomU4tOANUiCin++rVDiJHbMIo6NG2+93zivCOB2kqmg9TvpsuNnPCnWUqqrrUZtvgTLY0L7uDG4E7UfaR9rKCMu3CaemZVUPr5LSBMwm8byCXrzUPRIypY3NvBX17FTTJZqnLSErKp5AMfFj2uj4V+bHtdHwr82Pa6PhX5se10fCvzY9ro+Ffmx6S8KZA+jaPaidbeq3E+A0iz884SqloUF5OSoc46+E9njHVrwpk1LXezyp3DCnLnrBZzU1R2ljuR7AXwM7KwruRIXRU/VHdjj0gpk/112fmE4S5fFQaxeXq7fFb385hDFMylhlvEhtAkNgULSFoViUk4wYW7dyjdtQrHZHGaP7sEtss1acxbXj4CIsm5aneA7sBIuhxuffOkJEBV61bdM1naY46j+9ilBTd9MELVy31cZxW/8A/jM8Dlb/AGN//9oACAEBAwE/If8ArAK0IljpscLWdOsACjARQFdCOy30nTH/ACdUVwBpA/W0YLki7mxErra2L2S3Tga45wSHDYd+CtN9/wDj1jZrKrlbeLLV5uX1rz7llK/gKcbFehpwJltFUrVl7bQ/4hnYinOPXm9Jogo04thlDHpzhByzaaZJa/rJ59yxKnV+24AqBqyq5OGKafdFH3QKAND/AIdMy4TU+wHE1xzh8iy+jgEtHM+s43oS+b8nCxvpo4YweRLdz6TUNWvjBltK7yRf8JVF5owalaHXr4Mp0+6ea8nAL5T6mwfGIZbbPymVAYGD6r0JkmM19lMW36zex95AB82UzuKd0BA7aLK/Dwb835ICoGrCIbQkpLK15Ni3r2mQcVjx6N9COTRzX2U0mtPwAEV83OicwxJ/3g9Zx1OJtKJuHLEds05U4xbKJl5kTu2rF3Oa6+aOubmxpe1SFNdK6elQRrftg7jcsDzHNHLjKBSsisfFdPaCZY7HvHeK2nF0ZraVdca5g0wHRSIBJGoWrfd1+kCy5mw0RldYF6zLkbQMq3si33LaHMHVVp2kyvpmu9AT1XthraJXw1S78P6wvNbB1Yw9WDjaXQe1Q41DuadKiW611PYqPJd4MdwX2sQ0fT+wZ/SSJ7kMMIHpTmllE5L89BxzhxHooB0Di47uFl31l2lqfYb+ZobDuTSDi1bCek835JatpghLnMbglV6XKOIzxpfJb7L6ysGtAgHIDECijQ4ESWjfaZYWZSjd/tH1hdjI92C9jh5JyQmixTTSrsNXpMkMLTl49oNYYgqLP0IVRq6vBO1LBV7rUepF4PscA6HVBwygvvMW51WHgia6eUiaeXOEKAERrW6Vz1g6n7Jc6bTTTEdqScKWOSfialYznFz7UxEB2yeD8p0R4C5gQxbd+OZC/WhfklzKZo5X6OhXpDo7QJehxCzVGDPK9HrKiRWLa7eq3P3m52pofdD9HtRKeoX2nXyICX0wX6eHEdNXeBzS5oMfUZlnU5Un7DPJ+SHiW7uhg9XEuD1Cfsyj4lANXeDQ8B1M6IHQe5AojZW/r2soIANjkSeSckvq7ZICdjsuEHKJgBoVADQrwGXfXE/JcszKHkV18uDLYd3Tpj8jEbmEYO4ffwHqjpPuEGg64oCuhGe3+0lw6Ue/gdSG1SXXzbHtAI6cgdA7HhssOtbJg6rJK/DDaMD6u/T6PU8G60o9iOknnwtV4L1324935sSn6pLfst9kdaG5sH2RK19Q6/sCa5q18VLRw3Dm7Lh5NXXR+9HknJPXre23u+IFLFTu42Xqq68bF6HbvFtV38Pz45NhHkHJDVfkPGlQAeo6+ZqeTu1vu2TEnYmoU6EsfY8CsffUT8L9HM7h3qf3CIr7N4MZtxEIYX1MoJ3fZFCdJ9hPvAAUIPHEl2/5PzEIu/aXfmeRckxMtgz3fpBMa7+B5hz8TGIOuPpvF0xgRiclvwlbjJ/nWARJ6Hofa8NLL/cFJsW/Yt2TzShraFUrlZXn/CnknLxVCsA9h+Yt6guWZw6eBIkCfGiTR93wDOzpbSF2kdYrDc/D6MZDBl3f1HAs+2rj0kPAINV9XPK+SAQy51DPtGVtA8cYV0/vl/EMyvbKvxGtN8XQGzsHg8w5+JizmvnF8mZY49JmDmeiAGbSbbroPiCwfc19GPiFjaSfuCxNETYDEFDlFKFirWG7LbTyXknknLxoffyWYq52FW+FBz8QkSJEib8bCF7YRNwzJm0QHQ+jVykOgI/ZjuLyGP7nGo8zwWDGuis+jBYGrX7I/TSrf4SZdR3z+xeOgUoagPnvMZLbOX3V4G1FCb7+DzDn4GOmdrA+wTPSfNFrlr/cBUoegdvBqrbBwr/EOHkvJPJOXhRzN0qaKHquvmW+qbta/j8GZ0dY1W7GIQ1INg8/o/rTb4JqD+xlPphr142U5OJmrBvoY+7Eeb9vKPyS9H+NBiEx6MPo5lk2k+xOTVHx4gZaG4Vq9iVbmG1gfBdLlYvQ05Bx2osHpo7+HzDn4EOSInbeDp6Pmp4IFtGrAALQMsa1nJszGdLM+IcAmZs64ftGLunZ2H7TI7uk11g0jyiClQCsGTzHPtC06YQdjw2/OzhanLl4IyRvvrrQ6t/SAOmP4Tzl5GJhTziXWA9QaX7w4j6AnqTrA8BlPd06OrVlgmtxXVbp8Aml/T4gDsQ11vsuFfStgOcA+17RjN1yYYN/qQVpORPU4qEUIFH3lTPwHJ9nrnX4j8qhfSHw+DawAHh8w55VTqzAH9MDI9JUKmSlNWq6MJi9AYepxEx6hnGusuqcehfDb3FPTMbwF0BAgNWgWUTwXdvXNuHqYzItMq/dL1Q45VKupaBgLoXC48QCMM9F+UzWO88hoZYLgIsr13rPNTUDEY7mp68DZbQAgyYXqwZJNUDD902lmWRa2Q2+kjFGcUHIsx93WFeVsZ85iBZ7C2b2iPymDc+8bESsGe5svRBgLT0P33mGiY1DsrDc2WxuYHHD1GKuwIn3dYMND+EvxkD3zDeeVtAU3mO05yHWUeXpKDBVnPn+AxxxBK2y3lnT9UZrXE0x4bV0uUXlCljmjvAIZHHOm2egyrOZ1KIExjFr37ZHNk558pRjW3PR+CgKlTzj9tJpgSHIr6O8Q8SpO6YhfKA+weHqgteswEOzqMtE1kK1Bnz0h0PrH3qn1jjBF231e1lwkk9ad71LwciA3A5/jcTOkin/AC9oOFzq/nJIZPYeyiL6LL0+YiYYUgB2VX3fS33D4TfvfwKBboS5dtjwHNyPFSXQEaBkXYPmEgtRcGodtYlNcbZlnTLwC1U0wGVZcoOXS/jD+GonRn5nSQjh1vm+DWNcpfcnJxpfdAAA0PEkGtfQiTNZRuq8SyasMB4FkyZBsF0G3ZwPexAIJNBk8CFgH4NRHiTJtGneQQcZlhHSvAdfhpYytEsF8cyh9gEFETCrGZsh8lnPXXwAotL4KEbd9Qy9L8KhZqP9EU+kNw9Xgv7XHAgGeTjlniD6joRHdafoW65Ps/gTt3mNT+qKxlqaU7hUeRtMN2HU3tknDz7n42jGD6nh613m+AArQirU53Z9REtJNurvgGoKXY4PTjjjuHQKmUQi87qTAXT1lMAodN/6OOCb0AnPR6yYiF2daBaejiEqG8HDIHNhJrNemjlDz4xImrThpu7wkT4EvzsL1MD8znCO6afLn5TXJkYY5rQjrnD6H8B9kND+H2UnaWo9YKeKkAFjZgVkSedc/G/C0hpuOeItqomBgbTRKjPCa3ZrwFHM1MNwxF7EmOMR+SdeBoRB1LmxeiL298wMPKzpYb0zFsMJ1Vq8ExZpdTk1SHz0Ps9Znfh8gi9nC5cuXLly5azKmlROhq9fWWU6PpC1/Z69HkkFAqun0P4/7IaHbiYo1cLOheLDGo7Y5eesHgyjq0zJZAutgvOb+LpDk5/ZeBW1TDHOfSK0hVytvOI3Y8x+YZekfGuK1W3gNI8pQaQ9RKdy4zm4Ozh6MKbBQQ0RyPgRMDaaASwaq9r/AJ3B0DjR3BKZzJurgvAOYwe1y6cPkE+E8BTnKc/AJ+WzGBduTGPkLj3QQCNjkfofkeiGh24+V8vGgID28iOozBqGaE4uhfGwC8Os06AOoHgx0MMu5Q+mO0K8W0HlUTMireg63I6eDrKDAD8E1+V1Y83lPApEHo5Lu6D1mmOLG7rgqOQl7RDXnLue7pARiZwyBJ8wnw3FogLEke2PoIL0XSO2nUf0zR0neew2i9yc8mrE2TceAhE1NIwjFKj12K0aTs+h+f6IaHbibp04WuBRx/XjuA1OyGw9WHzbo02nsY4tvATdI1/TiD5mYBy43UnnnLAKFkQzkQjqydBmyU5sz2SVdpiZ0+k6CGMEuXLmPXP6dY7zGhhXn7Un4YWw5uzM49Q4Wi9LfS6pYlurkkel2JcuXLlCzD48pnA5PU5S+R1fqNU8/wCk+EnwXFVeXxhwD+nPspwqvMM076p8uN+mbJe7T3di+h/L9EXsS5cc6zue7PJbAGhUuYas1XmLBc1wQPnjF2vO5oNojg7lxw885fFeK+m+29VigEGOnwwY8RAivZ2lwReKStoq83LXqEzfiGB2JMqHzYOF1NJ4r0TU0YZWLs1iCqcWcsHrBr4zwvKcifeCjyHH875ZUKf14PtUyzlF1boe/iQbB5nFTmjrD6GvMdE+B/BpUF9lxIozMB8d8ZSuZ1O5+fXw1uXnnL4sTUK93Y/al9f4BGguY+otUQma04DpnR+yKbZGb5Hi0mRXAF4YKFlha8LGPeYQ1Uu0M6zKnIQGNGeB33gCdZgvmjDO0pmmb5XA5+mK9ZgfI8fzXlnl3JxpyX5yfHcbese30P8AKdE+BxF0S0GAqus8xfieYvxBdJfsCIuWWFMdyVnivy6HQbnuoDasjen7F6dPD/vPuXxHsUuloR6kNyKml3MG3WGg2yyNYulR1OWSet+B5LZ77EQWEdxC4UscY2jEFhiO3w0Ke0uCXc2/EZmxMY68xKAXWlNq5VV76TZa+fr1iKgI7U1zmQ6XcE1bfQ03gjjQVRFwpeaxUGwdOnDhc0PUBSnoxyCQVa1RxRLyQXGqqRpj2KUBAoE2w58pRg4CgmmG/LWAWz5tIqckg6KXv18FDy7k4r5XldNIFAcscSOWeyvod5TonwOPkHJ4CeJAVkB5p4CO9IV7Pt4dl59y+Fumtdod7KmzxCC5dazqFV5pwl6RHJsXN63sMS8sAbzrGrfoSzqReXLDz2l7U8daz2gZpFmXbLff3TbtIQ09+kNgq5pzcx9SsFa6cHNqtLy3V+kd4VNWDFrRhu+k8/5+OgvzfnPm/LHqRmuxrBxczYCFgg+Mvdl85RXqT1ovAUPLuTjQUe+pUO5b0nUtXEgWoPK2/Q/OdEXsS5cFvUGVppxJEjAe1T+yyjZQdMAXKsEJY0XEuXH2J3uw5a5cyQbxxNQYUBqerP1Z+rP1Z0xbxnioMiQB1rZpDycKrAo+lSw7vNQbhxeMSqxQGJBcHX2QQV/2gwJgC0wGpoYtWhttFldi5shzbpqq+VXfIRFIgHJlvVHrKLbCojAZ/vWKS7gHQ0FyvM4McF6Otabc4XJVToWc7f5jSLWBeU8wIacevljuVKDLcpkuS6aXMx2vd+h6x/RptgGOcz3oNcyDlqHbrEim4ijnAWXNGRHA07u9TSO97LiQhoWuQG4I2KKagi4g1aRVZEG27JZ7Cu8ee3F27BsNiX/uvgtC1fIN4BkfW1PY+h+c6J8A4oOpc6adNMgv6zT7sR4K9Xh30j7j/F959ylkOr9svq2n8RJCxikOx2gWw8/ZKT+Ft9y6OH11fpHa8z4P/AlH6pm6re32lPpNDg4/NZpvMLl9ppgwGh9D810TkNiek9J6T0npPSek9IgXgc2Fu4AlNpzTvAUjPoagOF3pPSek9J6T0npPSIQYZUubeN1TuBjA3RNLfTD+DEdATJofZmjhUgdARrfVo/Y+7+DPYC1ag7wKOHWaMB5fw+d5otaPVeQTvoMQKNXWc8oFxIVA+iZ3sAC2OaFgQGgeOhQoUKFCvYHqkTR8230QiqtFuxG+gQ4h6FQHQ4VDHpyWXWr+EKFChQoUKFNUTdZnqzrsGDBVXMzgqdPuP4jErCDSdmJmB3iPOdD3QfRtRMwJqcpW1p+AhczAk0+V/G3BHUMqCN+kq6J6VtA2WZHRgy1L3Q672/cBoe6oKBAOjea7ief0NDqwcJDoBMnNzvtwCpgyIaQjbxBQoUKFfOfqP8mDRxya/r/CN/XvFoKPWP8AUTRbmeSXJnnDTpAtg2FH5tFcnFjNzI/BjQ2+jkdFn4IFaKIB2PAeoaMA7IxZfLDY7qS+j6SpBOhs7b7y1k80HvHIwaXu2wkRRRT5WB7Gfl6hw/xX/idftx+TtpPKq+m//9oACAECAwE/If8A2bFDNH/mloP+4m1UAWfWviTQf94+N9a+JNB/3j431TGasNDol39pz9xGAzZpPiTQccRqw0qiL/24YIqRXEyDhgKtRRUnXcCIUYVfgYXNmhYnkMdxcz01BuWHVwwRSoAaHHbDWOAULuMiIVIa/pKvvPxO/coFcRKEq1q4xQ5TQcOqnxK955fvwAKY55T+oF+/FBe5xj4/O0AUcQajYZ5xCAb8GX3/ANQC3PgdZeHAPjeA799oy8r/AFAFHEzTw3gMfRyKu0utjxE6i0ONBBZbS32vv4kuGjs/Z4piejPitZc+TgzZuXiwh38G+N4OzEACjwki3io/R7zrMxz8fqk0ErpzlV1z48BN86cFkvi1P4Z8Lwb43G5Ze+Nx/o5e9DXa8Z9qaCPAgrtfwCx4L7/i1vDB1NcGbIvB6Dv8A+NxVQpypvxpJJLzuaPowx7x32vG/amghwor7X8AKPBa/fxa3GF3F2jR53mQhtVzQg4fC8A+NxFyvgzc6M6M6M6M6M6M6P0e86TAcvH6BNBM5yld08eAmudOC1+/i1uEAWxU0POIAo08PwvAPjcbAlL9OSFMutB8ROolHrGggst5e7niWszU7H2OK1+/i1uE+BPleIMO3EYz4XgGPo+WALPFohf0insPmV87nBviTanbGCsZoOFPYfM7vBviZtiOz9/6gV778Vr9/FrcIbLeWun5zBEs4hqiP5J5HeVDwvYLZqcd3zA+NNoENtXxcTq750lPcfScxowdl57SuOVqIsiAKNJT8kwOOs6y9DrE5aoncjq4lUa+OYqUftcD4t5ee0rjlaiFyCoQJsEFkqTHpwtdJmGq46enxGNTgKqErlydy3knRHnaaLrz/wDB3/WUr/CtS4isfqYC2dNOmnTTpp006adFB9zwobJ006adN/H2lwqn+Cx68KF+uZpzOayGXwfI44SaPb+ElW0SreGe7/f+DEGhwcBtDIm/geCqJ2p2p2J2J2odGNDQn6gNpjdB4fI8Gh2/htaevEsZV+Jnt+NTeen8gqI1kQzZp9FFSpUcqOxOxCrSVKlRzwQ1u8qVKmh/AV6JVvETU9HxaPocdUy7IH8mhsYtrR+kT4XgJw8AoTX7+DQ/gwRoeAiSnfU8GKavh1ms6PihrKehFNk5CYljjnmVHT6Hq+BA75TrTrQjLHRb8VVErOSa/fwaHjxTV8JwS7IdnALEax4SJPwHhGqaOOE8HO/Q2rKlSpUqVKlcFKELN14aveVK4aHiKxFufwJdBssmGaH8PfzhqlQ1hpxNvhUqKvofV/iD4JNN4avfwaHiz/xbzmeAzGEcq5WBuUjqZW+Th694MazR41p/Q9fwK0cQHEri7smemv38Gh4rKOrZmZ8SyGsVk0h1MpiPQGs2re8d8AXpQVacNOG13loMTWm0vLSYSzEodorTxNZo4qn9HOv4Nbt4Dx4GZJr+DQ8Y8hYeTZBLFTD0YqK3mS7RCrOqwo2cByaJnc9IzRmv34X2IK9kKt2ywk1cTWGnHEeBlX6HqeBUpTnKc4lqwFjQ8GpNfwAPDVd6Y39Ys7lOf+IjSkxDfeWW7YbXql3pFM2xzCglSjMUjNHvCHdK9ibRzcC0mMbQGzgZLWLpQU+JDnzqQnNGVv0ddT+CvBV1Jr/xeif4r3r/ABelzW8dcb9tD6Jqfxpk6QKmv/EGp5RKn+DEQKlqmj/AFzvE1v4WywIAo+iW5O3O3O3O3O3O3O3OvDmZ17i9WdududududududududeGgQT1junwF8GMQxRwAUx+zgvgAXSa3VwZs7c7c7c7c7cOdOYzSz6+TTOagNuCRtAIANPDomICdCDbR40D/wB1/9oACAEDAwE/If8A2dZThqXtPmP/AJn4j7T5D/3XmrpHZ0MPf618R9p8x/7vjk+X+tfAfafMfDf/AB34vjk+X+qY/wB5+Dee+sfwQwjyd2W6V1VX2ZSeOS/nb1IiGjafBfafMeOFOq/BvPnU/g/c8gPlji0+lfZmMt9z21JosbJo+vgkDkODmZZubsd2Zbnloe2rMXZ7vuzS0+z7MwSPue2pMU3J2ez4CR/K9pvKPLSiaS/l3jWp63+xH9Xn4d47QpIJTC50ztDzn6iBarObxG+u7fVvtDyH6nnf0jbKDe8e3Co4N10Jm59h7awxLPd92YRD2fZmAV5an7JiSfh7P0kP7nfo3jK43CPtyERW5eIZCPOecE0HzP2viAXSVe9T5jwL+zn6IitRim/TkIqrcvG5RdMSjwow+WOybk6z8dzh8mAcJRXLy06yhZeHIfz9kvmrm54klBAgrU6OwmuweX14MNBm3TyqPXp0OfV/XgC5t68nU5M6Ed1uaPpx+Kh9/wCDYo5XI/bKpADL5Z7pbJTu54jWByiUgJh59HXmTVCPk2fo+sUq94OCDXoavXTxapx8m5DbefDX1nzGaxiqDhKK7DX3eJywJF55ue5w+TNlnHnoZ7xVW+Fg2ydmGFtz3P6rgJDyuhodlz4nruv2fmHD4CfJ+AlfM9dD2jJrXL38Otar/r1nRMvs/p+/0cHOxfjEt9hr9+NBW9PTafMZn28+ukXksXpr83463aJ8MP3nTZfOZ8mO7Qt7+Kx9H3g919v4PmPsjrw+InzP349QR95sa1/B+fHXkA9sPo9i+eZbOvx4Ll+M+Yzso/KIl8z46O8ndFfafJ8a/LPvPlvt4G6TsLmmI74icg+/6j+4B+5r4i3rX6jrw+InzP340HyxK+wD1tnUe5Oo9ydR7k6j3J1XuTqvj9zOdaezcFPv9GpPn+crnX48hz/GfMZ6C/KMF8z47O8nbFfafO8Y/LPvPlvtwqr3didhV/Q/Mq2O+D2Il9gPzNWXq/aVPmPsjrw+GnzP3433ywy/GW0HNvOeSH6nkh+p5IfqeSH6nkh+p5YfqIK+w/UW2/oxidifGJf7Rfrv43c730bT5jMs3j11icri/PzfjzrQPll+06rL4xPneMfln3ny32moSqO7BevAfco7SzVfD8x9kdeHw0+Z+/HpIPzNiGnvk/P07XRVnpMjgX6bPTX08W4+56G7NOu9SvxPmM1A1czrIv8AI9X58QjIspn+QfY4fO8Y/LPvPlvtw50drffxVLG1eunxHXgNcUJ5/Aavhr1NX4j86Snv4lXd608tutu30gv7afp3ipyGUffo6R2hTxNWj7HdihkJ57D5hKVqivWfMeAf2M/TvFVzOUffmOkdoU8bLk8peIroeWY27OByOfd4fO8Y/LPvPnvtNYtD7TQR5Pz3ShNJs8a0K6ZlwAMjq683oRnbGcOVKia4Se3AHlZHr/dSt1+2f0nHWXlV/nt1gPNoHM/ZtBzGGfweQjtwc+IVRljNYzNt/wADpC/sR+jb6Tmjqvxy+01VO34UxyzzOpErfeX7VK3zzFfH7uO7BqsIN9FUXLlN3jlnqtPTlPsSX8KY5Z7n7Jkn3f1U1XudUftnoeGx2OPWCKhS6imceJBnJ94rsBfJwuua1Wj+mU3uB+GY5R5nUi9vvL9qlG/dK/t95ku5Gx2IXBZonM/fKP8A3u9dSObynJufGdNIlMAtk3rjkN4xh9SfDGtGjv8A0i/pjd79OnDDNzNnuTX851T9nvMh7mvvc1/ZX+ifbor5WzCLotPXn/4O1Omb8SkdGHc/hRUas8iHOPtF2dn6nXm12J/op/op/op/pp/pp/pp/tppZ9USvATY49U/0U/0U/00RGn+K91n8NPzLibbdnZ/gtzzHpp8y5yCZfj8/U1738ABR1lOQdoa9BeVnzPJYbnD2lxKOifIfw7qz43+JpoCvaXKDrHly/g7pHpt+5c0jB/k1i1XFmKoZzj7Ty/6Ty/6Tz/6Tz/6Ty/6Rmg+p+prvQT8n6hC3jfI/fx9G+T/ABAC7JrGxXMfjsxFTF7LwD5j+G3POHZu+svgYBY6zbTr1HivXRnsP3DBRL4NM+n4P44ukflc3+AAxjX8Xd+46Gjb6J8nwArdSrGZ1/ZOpDH7yOL7X4QTGc798x+28A+S/g9SDpuhPEGCXLgzJ8ydeZ4QueoN/BLlxirRFM9OB5m3B0j8zn4BcuXwXN2KmfnNz9fRPny+I/P5eDZpe9Y4lwaHT7Re24lz5b/BjLm9DYly5cOGM+d0/R8GKfz/ANJcuXL4erjq6esRGnWMfkc+JkdoV87nNA96JV5DnA72uC46le5B6/Q/my5cuViXoztP8Jn+Ey4EObgJoOCXLjLCC4z26/1H7KXLgz5748A/nZcuXL4iULWsevuczhr5s0YT5d2XLly+AJRj7fL3jpH5nPiZHaa/d4uPUU77+/gHTez3z+PofyZcuX4WuXK8hNgPn3frg/YS5cGfJfFqZs0azzcuXLly5RF2x2tHsYgwJMY/j/tLly5cvgJrNmGXwc79PSP2PzxMjtNTu8e+WvjwDsRH8fQ/ly5cuXLly5cGxfo19ohlPJnWUg9nZ4P2kuXBnyXxbonB+ZcuXLly5fDNp3mfue/C5c1l4FV1Vc2Z99+dZTcWrd4/edaTRUtK5p+Y/b/MuXHl2mv38FLLlxfB9z6H8zwAtZTjHHZCre4hNqziMgtRW+nU2i9pxBnzXxF3B75jKsO+6Jwe1NGX4dy0gFeQRopzRvBR2QGOSN4JonujoGgTk8HV/UXQlEMC4/blGbX9ofesORRMt26+cqrlBoQjmzWpNnTOk1/dly48u01+/HsbPBcuUrmn7+h/Mly5cXmcvAy/QyS5cuDzmT8x+wly4M+Y+KiHMfaWQh9nWBb1o85cUmrtGkqNFa7ax9UuntO0dYF8xpel44aX4dPOXBLmF2P3EEijBrcHBF7EXvPtH801TsZGm/LpExUuIP1bfaL5v3ZcuPLtNfvxa7poTBqXLnyD6H8qXLlypi/6TrPadZ7RKlfSK+odDlHKXLnknKL2EuXLjMannPMZ5jPMZ5jPMYqk5q1czE0b271lJRsJQoffWUNNmD51g3cbPtHGRD0IQF6gYPmITPxg7TCIFNq6kx2HQfOs9CRKL1SVKBRe7jE3yW/M7nBBz0PZmsl1/SKdgNZ1qv7suXFhiFAeC1/lQvRWlLG5cuVRoYPT6H86XLly+IVgFbS5cuAB7+7F7CXLly/COZmXMn5ly5cuXLly5cpPRfXX2mUuXLly5cuXLStcD8z4X5ly5ctLQNk1GXLhXf4j6J8iXLly5cuXLl8ES39neO2oz4iXLly5cuXLhv0mqwKHdF+HlLly5cuXLjrc07x2WrM08jqf1Lly5cuXLgi3QmTNGD9+s+J+f4QP0J+46W1+iaZBPOH7nnD9zzh+55w/c84fuecP3POH7jsQD9jMDdOkW+CkwFTzh+55w/c84fuecP3POH7nnD9zzh+55w/c0hB6fuELuzmd6n5OsudZ514J24WZSXLNr5bz2RnI4AXpJUMOXn24BqpCIM4MuBRF2Pm9uB8WJ5w/c84fuecP3POH7nnD9zYy/cpye5GD6/fimbXrH6jjFe8ZAdX2TADucssBb4BTJMfh+fea6vcQfbNoPaDaz1xG8vTb/wAafTf/2gAMAwEDAhEDEQAAEAAAAAAAAAAAAABIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADBAIIAAAAAAAABIAIAAAAAAAAAAAAAAAAAAAAAAAAAAABQABAAAAAAAAABIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAABUJAIAAAAAAAAJIBDgAAAAAAAAAAAAAAAAAAAAAAAAAAAB8IIoAAAJQAAALQBKgAAAAAAAAAAAAAAAAAAAAAAAAAdBXsISvqAT6hSAf+imbvIEP6AAAAAAAAAAAAAAAAAAAALuARtGdBVwRNB3RoJHQIaB0gNMAAAAAAAAAAAAAAAAAAAmAAE1tAALSQgALR5ABqEgBiAASAAAAAAAAAAAAAAAAAAA6AIAkwAAAIUAABQAAAsEQDRAADAAAAAAAAAAAAAAAAAAAaAAAfkAABfIAAHhDHCMJQK/8AviIAAAAAAAAAAAAAAAAAAUgQAdZAAAR9iAB4XpZNC0BVJJd4AAAAAAAAAAAAAAAAAAdgQCfPAAAL9wAB7wAAJC0CkAQAAAAAAAAAAAAAAAAAAAAEgADDHgAAW9wABogAShDcQ5AACgAAAAAAAAAAAAAAAAAAB6gbXDYgMUfgABzWAWZi8GbIBQAAAAAAAAAAAAAAAAAAAAE8QxARFJARwAAYYerE3chBabIAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAD4gUADASAAASAAAAAAAAAAAAAAAAAAAAb9baFAA/rACAACpgAQANwAQQCAAAQAAAAAAAAAAAAAAAANptt4mgJxACACVpwSCRJmChI1WOCAAAAAAAAAAAAAAAAAS2222ycWkgCAQWGIAAOTmBRJJJV4AAAAAAAAAAAAAAAAAD/AKtX/wD7/wDAACS1tMQChvrBGSd6TcgAAAAAAAAAAAAAAAANtsAFvtt7QAAWAB5gDwCS5aQ8kSngAAAAAAAAAAAAAAAAV7PYXZJ7TwACHNs8Eftt3/SQ+CiEgAAAAAAAAAAAAAAAAbN5tNhyt8gAAZJLAepJJJqu0yT20gAAAAAAAAAAAAAAAAD7bbbyi7AAACySSTCWQXpYtL4BVJgAAAAAAAAAAAAAAAASk4n8lcEsgACh4jKbXo9xobZcAvYgAAAAAAAAAAAAAAAAN7UAXbPbYAAE6Uk7bTjZaEq28Bm0AAAAAAAAAAAAAAAAALtol5trNpSSGrNVJBY7J2+faY4zbgAAAAAAAAAAAAAAAATpNIRJSpJJJKW22yim2W2Vdt0wNiAAAAAAAAAAAAAAAAATNtttumNtttkC2228u222rFttt8YAAAAAAAAAAAAAAAAAVNttgahNtttfCjCh4fXdOgJttvMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAS7JxgBDj0gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAkAAAAAAAAAAAAAAAAAAAAAD/2gAIAQEDAT8Q/wCtOaC1jZsMco4X6Oa5vKAg0AEZKgtZvPMORtMs5nq6f8gdj3zNXvphQ3GlP1sQ+opEAAobsQ0hVarcqzJ+b3efAFALVQTYcMvN3eBUNl9jYgKgFrgIdPzPq/8AFplwESsvRtnQiNhN3PAsRFE0TEOKs0LWBEEbHR+tSOf03l+Vxo1s9/DTcGOrM7K2xq3LQ5vP/iTpQ+YsbvzPZSoBfi2783jclK3NM1bV0Jo5izC+iSWQ2vF8n60IU67F4QJWlBDH2Zeu/Dml5ef9IYNNXyIBKgoP+FQKaDKsRyxF/XVjsA0/KP3xBQMqA9YZMtvmW+ed8sQexb9PrIa9RPPA/HHdZlsd/C5oYunmwzC9o2zDMG+/x43ho+XSG8BeyaS4D1Er+EcVbSQfXkRoI07JbdX4BUL0e7hf875Yqpzfqd8myQYsPdvIF0iWMceSChH19CPGsxUPoCHp5e1VJfogWLMJMXZDf0EHHe7K1HcNEc8V9xpWtBDcoGe+8ulhFJLttPYoaAlZKtH3T6Mrdfas9QEDKDKtNi69yA/Vkimr8RqFF85Q7YlLG+cVYRONIT1kpG72w85iPxRBXeF84JucEDcoCLRuodYLGoqxaueeTRzj9wPVqyeneLMToc5wo3K1sNQYLsxrQepcrAqACOl9q9kUefE7BFagDKq6BHjEQQ+EtkEQCAvdYgiGwOdStQsiLnaisQAnnFCrSuTGgtWtYqqNUKA4VXG2FhGMOLMSIsWVbeEg5zb4Syr4uANl1FQNMgqqYi7pmo1a5hur7eDBYwXIaS6tu3B8YDQKG9Ciy0DvLtFMbHIxbosc2Vdpk6+coFSunrPqZwg33iOMLsAq6Oagt1gbgg1gXlKqeacn6S8YYt3aJlrlLdajoOvXA2FXrZS8FlC/XRbAAO3G9vYZ6TxClEIVNkt1odCtgVrRwIo2Ze5KHhuuI+6MxGc5RD1ZqOjNZI5nbE52SVtVKLoLKXUD3aDg9CAAUFBwWEhVhEulBpwmSBPTSRcEyyBZrmJfUpdaZxXC5aInFBmg9YVFvRdexhUgZVGwpsSsB1dw/wAACgGiHq5mqLJzngJJRyAp9CkGOEaWoSOgEUNEdRmonvWjFPYvWOClKyCYZNa76aNIf1cA1ZcW66zvWJTqGjejz9iWKXTFte71YKrSjRMMoISJnIBlgW52OtigJGTEDqiDe3eZyGY+xwflrWdMEgAXWpHGaqebxvwFK4SxCNKKOhvGDSrJNGc2EK3yEJBYFwBgwtrLq8XSZxlkuhovApNoLghIs1qiqCsd9TbKsOHB1tPhU/R80rRBoAvdKHWY3i1Aih0dqbjwvZ/e/wBIlQhwUDf0gFEtTkAVzWI6QKYWohJ0zgUWVvY9UsxCulqo76I4gzme1bc1gzZ1ebz8BSPstgp1ERi6JhWa+WkNnkPOGSOLWI5EeCAXL5DjnQlsOuBLQjTaAYACC0Q5BXgawluoFZ1qkkTv3I19l+vDT+ClG7DnQ2h3S8Y684A9tHO3lXEURGkyPWGmY2ANv0ZTcA17OnxGSoLWclonIwyuF9/U/bwH+sakIE0CreUEermjJXIA8I/2OBYZPI9YmisAQkl6jXJ9HX0sDVA3dh6QpbwM2UvYvfwHuLB6sVVXKtrBpE1MkA6FBRX9QmWkX30xRbmyQIPS5FtiWGXY8bhjz5dPFp21S5VHQO8STKhuxtfVOCB0BOIwt1eTY+IFyTchwNpkttDv54lbequly9WPrpW+Cr1LIkAMAR7TRXqvscBpBFFld240prlpMRfRAsyzYoyOiNn21od2N2o2wEa6DHK95rpxfIkJNlc9aCII2ORPozpAv5lf5ikkcO935fBZF2zvvxFwJfOofDLfQ1vIJN6Gt9VInqnv4USX3XxiYOW7I3e0LIFg2ASGF7VkQSp0OdMhmDTQ7cS9KjranjB2WFafLlqXtLaijWzgmi48E6WHtHhJTcx5mYHkWVu9BVekyJFWIbNlPeVIEUkdKr6jcdLQV1awSypd+rFO5O9Lx+D9SSgSGviYXK0pq9/AqVCkbobpSGIE9n93DAgJsAoJtthojpLUnqp9GV08uRX+YuTMHqXHonEmGrk77S7tdXLxYKxvlYPgiQRQby8MkCtUcAA97wMSiOtx6OPGVYRrumr2hOFlrmGw1NoLcGGnERgDcvfTxA7KgK4DVjnyjQrU2jmsBByi2C+7G7CY9eTOQ3gXjEVJztMEqxVJXGvOWWLSsEDoFEEPNK+csPqVYLrEACHQaHhP/s3kATOU16ErIaTBMZq814unTp06XvMwun6Re9xzSgaaOSdJhJjQex9GqSFjUZOzL0gw1pq6x7F9uNBPM7GeIWgb4hQDG5X1SnpGAWCOrJawNbdL0uFJxy3G+NOsQxtCkOejtLjbSYQvZYnlvLwEBUAtWg6sBnXl9XxA7PPSdaKHWypmKc/WS915W1hYhKiK0pa6q5XwXpPyd2MpvJcQ8u5eE/8AsxcjFscpWMmOqhX0Eo7QDUYV0DwRBU++r2gyFlWYzP3phuQjdAP0cDDX7so604l+2ssksdWLmMHbjRFxQ93iMVbgHRf1fYmOftyQV5Jo7jC0kcOrI3pnWGCjnQO0a1YlccNwXkviNZiKnT3IFZuEb4dC1O63F5THisBYA0AnkvLwEABsebl4geK8oKdpVdrcT3GsuiI5oSD0y+BAC0oIeC+doKyqwdd3cvRFx3q55dy4WCOWS0PnDqMFLLH2lSyNOIvP+oqlVq2sQjVDUTbVsz16NmvWFP8AzWA7qnJs8CAzGV268Kn/AHGUuAtcB1hcbGoijAsjN1+kMtybA4jeLW+eSnG4D0VXBoitPJqmQuxEq8KjOpkkBWgtdAlOctZuY2GDFdZQ0ptTYAboUoU+YmAW0KFyAJbt3PCGYdQNUpWRbuu4xCR4FuiKAMFynetZmnf2hHR4nqEyMjQtroCwLtKcOpA2Cyr3cwHzL0yl60KDmrvw8l5YVALUAjkIWvNdXxA2PhQvSh+8vUc01EOtrIaIiobqTKBTZdyoRd4gwREaw78DODLyhr/iYGRcuukDoa252tlBaZgQMw7YiEd1LS3GoXBYecQjkxi6eDAW25oczp94VOojRdobI5lN6p02dIrm11iNMuRmJb6jFYFwFQ20N1HTy4SBRe9RSML6WwjpgLWA4u9YNLwiA6OoHIDw0mNBzZ7vjjE4RlM0CMYBdUwrUXMuAwDjsrYFbVV6/SVbEAD4YETvdCGLIygGw1GMXFy2iWw3v46YuakgdxERvqFz03aqelDoYUlNF4VfNO6Vd2Wv7WKinSJW5ndZYKe5xyGUHxmABjunJF082aau9Q6wuqaw5KMG9gzCVcVRB2THtDAMM2MoGnvk5RyKkK5MxjQOiWqq2uV4c9BMRrC1jpK9etWNatF5VAADAFB4SDq7oF7AEOZrVjsJwMNAIJNdCzNHaoahSIEKtUx5pDy9KNxi7zXaGiMUegKCW8oqBqe8xIWLQS0ZtMudlDYgLAKemcGqeCIpvE+YmvXQJ1ZV0iOI45CwoBWG2IEBvsCz3GFCVUubpKLajZbiMWL0UaF1DnZlj5DZcSuMByGZ54cCEE3WUWXbgjnXfWMUSr1TCz7i5ROQtIgaU5G5jpcVcVYy2vaYQsh1m94DtLXNkG9rTJ9hgs4KdGtAFdw7vpdfWT6QU0U7RzrnjX3Qv2fwIkrIsTM0w2x4LO0fV8X4m1ABZMKrJXq1dykIizW5pxA0U7SmyZXfbwL67ChVDYAuZpj1moA9AS3zrbpjOqB6oTG4y2K5LDm6xzly8+AXgyuhHW11LGmNJQwr5zWGNMdoq6q95WbQ5XQ1gJUFB4htuENjm/QsksmgVbwDu8ZTrDy5hq9PAbNmz9lyU/eDF8oTC8kMz0ESJ2TwLZoChQAiOo8XTqjDPfvKHx4QD7SMIngxdcDwhjaPqwVZouBD9N73FTpOSRsldFsZYRBqFadPBX4oRcWQRLGzmcaezI1gDTS5XzqGAIeLN1RUZWrRHVDF7b/o8BhTnqtuGmmN787nDcN3QlkPO6Gx4rs5lPZfxPoQGoJoMxwS5cuXLlwkY6OXuMew40MTS0L1IJUoKVrKm05VbnMzGTooKmb2R9LOFAQoIVyyWTOYGjNOHclFgo0PT4BXPPx4EZoLYAnJ+Fo9csYyxl289hexdHAsKTRqS7o486iIo4RpOAwFxyZLjFK+p+YngHWyEx8g9o8WNoCDvRi2HT0a0pscN97cGZBjAEHeX7gSXncupNwwL1S+AEtAH1YUsPX9+21txNm8ta55vmNOL3XAb0reu/ElJRLkJvVZ9sAf+rqGQSryC7BAIAsTIjPP17sfNVm9F5HQgVgwcvoTpZa5mhL7y+8vvL7y+8vvL7y+8BGy9SzuSh4SKppcotsTUw3HdQIDRA1EbIvTKF95feNDfU+8xLyV8Cg6qnfTi0CDbeKgOi3LzZhYxajuLzlDunKXECaSC9Yhv7aEe5iDKdYk5AATUDhDU7n3nxCA0A5JcyhdzjOqTYH6is91JYmlZGZHBsXato81blw6u1/mDDKK63cAa9cHNhs3a6NeHn3OYCTbmy3NlubLc2W5stzZbmy1shbwRNEYID0rjSnRGrNhkQ3wcOtqXYI6THSho0+h63lDftvtLlw1egAKNw26cIWvW6f2yjmkPPab1SVLvJkZfAx0K3ph9ogUbWMKPYglBswMLlx0vT7yrZZXb93g6zxye8HXPWx8y3KF5ZYja5jLBuO64OgORTkBYmAnCYivepfUmUDISUp1R+TuQrUFpuoDrdwirBn90AHkjcMB24uxY9QFqroBAtimwagnTs3gvqS+pKrZQU7kQpgdIwOzpHPJK3kCa2RW60eHn3OKA3Ll8B0EuXLSkwmjNXAPRbLGCotpVzWDYptpDLHBZEciP0PHsJv232ly474pqjABMGA4RIUs3AD63ZHSXLlCUPHnWksqGmdoD3uLym6XLi+L7xqYT5TDI42dns+I1SloGDlDLMjddbZWv5eAEreM7ae5AfKmjm3P1LpbZbT1rW2suUcvBR9Qa4axky37dEAADAUcdVQvRvHGJEeVlidpU97rsIfCItwiK++pVsaiM8u5xelCtlsI8OtYiKJ1i3YkqqjrBxIpeVABNvSlDrg2rLWzFo5iW2T8DyJLYzVKK6kz6mq2TPxCVa3RIBGmF9/oePYzftvtLly6vBATQi1scX59yQxnyxEkuqEBAoq1OZuVeiXLg4PXtor7BlZTus9Bvk0gzt0b9UuXH7P54HEQW2Yk5LdqdoHd1pi/tBMHtM/HwlTg93T2iYWtp+1bFfrsoDAGhO6d07ob1Ky+7aK4BYNnZGIOJKnmuYYTcmCdywxjZdx5NajwBqEqDMI6pBFBp21wBVwfNbO6d07p3Qub1u/6IaCQfuIz1BpArVdFX0dkKElICOo3FGrly5coJE1ZHOqzEKnJg4YwRtWVW9gGLly0N0FyUpgYMXndvWVXt9DdP0zTU2faW5y3OAqWxyU+08z9k73k6zOmuYZ95bnEC6Dmxav4czSYEbsJOvojl11vwAbg6Aau73npX3Etzluc3rb8+M5hSY8sKLi6QEQtMygxyFMdr1lnKWcpZylnKWcpshNrdjESAW2HkQg8BrEoFjVti2Cxph7kiTKMqdXyK0K7FqyzkyzlLOTLOTLOUKNgSw1GBORMXqJQimOMXMiASMP6JoGvLWL5kFa3q3qv5lnSWdIjbgkbDpk2cg81/ERMPu6e0M91CPs90PH4H3LlnSWdJXvJGww/Q/xkUeg+0slkslkslksjLyHH1vPxaWuyNgdUJWHWVoKwrRLsBh4I81qlksntH58ZzUX2sPGdKeyes9Z6z1nrPWesO7KgtIzE4qg0ztLsMbNXJ7EbCJezglCi9VnrPWDsJWVIp0zAlDI02YJc0LjnBZIFFAmEAxdMcxCiqXAworyECDAMSm0GhF2HPOJGOTHTtHLwl0LFDRQDooLDULPExKyfsePb611c8fzK+d0JiYnV33Z+h/BTW5s+0vrL6xjfa7JdDbbjEiOLrjqZbYuhmO8kR/qKD2l9ZfWGyo9d/lpcIxzWjKf1uBU60Yt3/VL6y+se5t+fG0GMdeb00o+seHXP82Tli62mMRXfHA0TOhlL3BNZoFGLBrMljUpbxHsWQ4VGQJWqnOI0pByB7Wh0vcRXAOi0GE68NvbsRBUjS5AO9rguM6Gnc9aL+Q9VRMUaC84DgBS1hKRRhQLKK5FiN6dIzE7YKssq4XRhsmJVdSQsEMVi8NQ6hSZWo7j1Ijz+ublWtI03FTrCdheTQ1DaY1h2ow49R5Kscy8qMeU3wK4lnAYmaEkUgCDDQ4aCzvz5gQcIRn3bq/Nlly4/Bgeqc5ueH9jOls9hUuXEVrHkJP0Tb8B9vGOMPvyy0+oKt2mmHCcbgr5ha7uC4uVx/wAD8+Lp2ufVtALaUNNN7WSrDzUN3fLEAy1ZzmamnuFy4O1FzCnoEgUWA00wYuoqfARAeADeVwS3MMHtfRo63Lju1tgWGnmiFzNBqxQCMm+USchSuYcsJg/Pwo5jaNNooCqAZWWJiVpoCgCyxrQRGUYl0EUuIq9FzEDAUE/KeePgmcwp6U28nugVUYV6s1hb4TW4LyXR6RLigxBZZ0ybzcO7a7fx4zB60+Z2mI6wHgYC0IoBzNb0Q+hqu2n7A26TyxPLEIwP7CIkC1xuXDZSugGXDPiFMmpGWTIdZl1xpAWpKK2nlieWIcdWq6I9jgf8sTyxC62GLmEsMciSllms/wADP+Bn/Az/AIGUxQFcNpoGCpLXulJL1h1Y4CEVFAUIw1FzKkKGAaAQJFaChbtlWzoyLQJuHQdFsKLyZZMC0wNdR1hEh21CMBfOjIR/Ga2NVbdV3SwhLrIWOMiJXZSZFN7gb7NkVgm5oLVzjMpATmgBQWXdrXYBkUFXohbWuJTGt0VpEMRdLR2S6nN0UhxlYgEauDiUXMPQLAzWsz8FZ09OoshEBhmrFQyIW5hsMrr5F1wAzA97q3sLrSxl8tCLUDpdGOV4GJ2UthRVqXkOY6TKOZnGgAbLoC9CCMBgMZ+SDLJZC9K7ojT0AuMHrg0AmYDEbsMdi2EGZqnYNQOxaMXF4BdLDgNImsH9zLJXCYbpWwHNjLGNQAix1SgPX6I6vIbS5cJAgaXP8if5Ec5Ip09oSaPNYnetZcuLQroZZd15MagHIA6VFL3Lly5bLZbLY/MbpZTWDJFnqD0OGOGJiYlHBoLdDWH4nSGLTpYryYA4OKWFHRJiYlHDExKOADNHW0LnMHYoy2qWdy5c/wA3+oIsJ5g/UrdkER8qlALZlHoS5ctxCAoMBOQLopzQAAAKDAB9ElTqP6JbzS3mlvNLeaW80t5pbzS3mjBg9UoPVie3SJWEGFoVWjcqk7UCONACJ0J7eaW80t5pbzS3mlvNLeaW807MbUJRKkQFh7q2pIwJ271ZDNGg40can8CEJ0oNjpCh66QXLX0YO5AVBqVAbcLUOqGqBW5/APbp0eBNVIRI2qkQsO2lA5rE6Vy7eUt5S3lLeUt5S3lLeU01CEW7ADKuwSnpzqjgzKKsBXATTlkaADfm7/RLaifzaOqo2IkLAHpHjZMmTJkyDualL0ERn1eInSg96lDLTLC8c09MwCvBAmgFAHAYhzdVCoA1pf4MmTJkyZMnOQ5SHgGJlhD1ET/8KsAwNKYsgPXga6q15Tr6iWB4PgxUtGcN8ydkMDVBYTDqvKGaDYCHepzVG9h01mjrNRdnWo9BolgjNZdrUMHPgVxC7XJ2RBE0cy0LaxVhFRetBNaZRIAsGxOiRmhRfUc80RZfrDLzxXZl6ag0qbkclVdfPYN6A5wHdHNRg4e6pxktrwotYcu4uLei+LJkyZMnPqgF2A/MKYBdjuXEhe5qAtSlqx6BAAAKDQ+uujsn0czbzA4Wisc6NcyDzOGt8gBOV+qMAdZQxyDC9xeshl6sN2CgjU72LXKCDnWChUoWqwyXToNwGALcZXLv4HexFhUjIjuJGSvuGGRa23pIbb8VBaOAvlGRC0K/YVgcFU9Sln8R3IjrTJ7pQ1OuA1GmEM6AdP8AxPxmn546c7576af/2gAIAQIDAT8Q/wDZ28GHR6zT7H/mfiP3nxj/ALqEB1gBLHJ9a+U+8+Mf93zmfBfWvlvvPjHhr/jrxfOZ8F9Uyrsn5dop7Afljnvk9JUsPRd/cluA8w/H6Yfe1vPlvvPjHHKnoH55RL4T8s1HzdiGSW9b+5M0B7PvpLss7jqcdf0m4S0mBrkbvYmKI51b+iZWj1/RNSW9f2TLSez76S1t8zc7+B5L7X9zIYX0/bF818u0FvB0ph6g5n5IAEbGKQu5jXG8v/p/cDql4xUMrKFY294/5v7nkH9wiU1dq/PCzLLoGrKNQe776RyGnr+iGSW9f2TIUc9H9MvPZ8nf6Sn9DP27QAM6yL7vNgCjBxfkR84iYF8v3PmOpY2PefGODH+kfuCAbc22/JgAUYONeROssEgOTyzG2g1Pz2eHxItyNgObLGtOTm/UUAA5HFWQjByW9Opupo8jgP13Z5esHBZq8uh+/A2xR0fwx31a6JqcfnMXsvBvqtHN/qWeUXBCmIBscaIiPOFHVOT8PTlNLN+On0fRKF+0fIBX67Hp4tDNPZ2YnUbHzPjE02BcXNRX+Ht4gCORmkeY+zw+JN8xXn4gAUbeEzupk7kdzdjs/wB8E3OZ1d/x4hoa6/CPD5DPgvAkDyvQ1hoKDB4dEgVOt3Xc/Z9HYRuD5hluV/rxmqbB/c+MTBNwekHn8nnt47TajXv/AJOoo/U+JBN2viFl0ftF7J9/4PhPuw04fIZ8R9uPSIftLw10/L47Bb1+n6PUXliULo8eS5/lPjE7mX8QBHJ9vGLfclnRv7z4k1PE+I/afDPv4C7LuZrAe2YTCnt+4fpq/qM4rCjpcNOHzmfEfbjc+n5im2W+CdN7M6b2Z03szpvZnTezOk+f1MUhr7kVjt9Gufl+EsXR48Fy/KfGJ6ufxCUcn28bp9ydw3958Dxp8R+0+GffhefsbsfOi5r8Rm29s/LAPvLNP3oS58J92GnD5zPiPtxoPT8kYXhTV5E89f3PPX9zz1/c89f3PPX9zz1/cAbPm/uBRX0ZlG4fmU251+vGDhsD9z4xMA3D+IXOYPnt48O1W/bB950FH7nwPGnxH7T4Z94g0gt9ItFHL22EPnQ0Dw/Cfdhpw+cz4j7ces4/aWrrr7YfpyvSSveLhCV67Pi2sjHV2IDXVfn8z4xNFAVEwoVeuz6niMqwEvgeYPd4fA8afEftPhn3l8cEu3p9vFcsih7f3DTg1vX8RWhyeBADNfvogBLEs8QhrNLOf6PpDf1Y/fKADGMC+z+4AsbOKWsHv6EEGFfn1fiCgKAQnxjg39WP3yhAYBi389YAsbONsQDnKjNtXyxG4i1edjh8Dxp8R+0+OfeaAAT3i6sYfxF6LHc4n2wOsrwq4f06dYQpnGXnaGTog8HMFpk9P6l7a0Op/XHRxjdfjv0i5Yd3Jj43PHTqcyCAB5cUC3BBLyOLNvyYn9WH75/ScqOofnnNTJ3/AChjt/T9MS0eyH3uXYLkP5/UPHQ2I+66rtqCgdjjkBXMa+vOb0Hf8NkMJ9j9McE9r93NCDldv6JVzu7vHW3c4lotbxnxAp5P2gAbo+/CjYJomv8AkLC06P5Qx2/p+mJKPZD73Lc+w3/R7TGF13fWOE06jyYb98EMIVl05wbIzruVpno7QbJ6RhOxV7f2ghvtH98KG3ydztNNLldP6YYT7X6qaU9r9ss6Xd+GJnDfM19OX/g6NvgfmG9o4ez/AAgLdCdYmDv9U9z6nYCg3Z/oE/0Cf6RP9In+kT/SIf3RNEb1IN+BKkE6k/0Cf6BP9IgiWfxAZ0Py+TgG/wBU9z+Cob4n5+JUtHfJ+fqfwpUqVKlSpUMaRW3HvFAPU3/TLfWcB7iVKlaCknxX8O2k+do71luVLnQ0+XP+Dsg+u8qapCmgUL4kz3jGP7nnH9zzj+55R/c8o/uecf3B6i9P7mk9QfxKcoXZx/X0b4kqVKlSpUqVKlG4QQ3zjzygiWQe4lSpU+O/hpEwZ7uXpKgXEJMJB3zR7+KyGrB3/qOW2VC01l6/kcTWD3/wSpUqVKlSoLaZt/KdoFe19E+B4AmosLzidD3Toe6aCXMz4Au8R0PKvbEPuvAPjH8Dr0OrtFGRcyoEvLJhuSfw+FamH5A/LKlTGBDrDcFD1OTwNYff/EqVKlSpUqVGDc2Zcv8AE7P7+ifAlSpUHkc/BorX98ypUqNZ6/eD30qVKnxj+DInK6vOVKlR2StmVeR1P68GefwEqVKhhuICNV1X7giWaQg9z8SpUNjvKLlcpgH2CAX+F7R4NJKhiYCLs64Pp9D+PKlSpYoOrvOk9ydJ7ktQLyMs1SFKlQnmVqGJsg99KlSp8Y8f+EBKlSpUdRLIZSkgkampyeDDQIh1GVKlSo9pdLMnlt+oaw+5+JUqDDvNDscSSajT2lSptUo56n2/36H8fwhSU4lS0BZuv8Tt++B99xKnxjxINAi7UZUqVKlQUjmG9m50hlkGZ4/I/wBSpUqVKlQYWNmJWLyH9w33vxxDh3ml2OPb5fzK4lD1E+h/BlSpUqVKlSoisr1L+8Do10x/UOt+m5wHvpUqJPhHivA0Mv4/gHhX+gev68Gm0xZehS5gSVLIKF0Ql5cly5RXpg/iD3vxKlQ4d5oduPwJUqVB8/2fofxZUqVFVOC8zrnvOue8BsB7MVU4ZUqF3pIIn17w+6lSok+AeJF9a/EALFbG0LlrZHUhehsDWPy5hgU3k3FB3YKvjUYm672QyLY5HgEoDoGrLsL2PAe7KS7wkQUH1jS0bKNpZFBu6sZvGy9GbpFOtFfYlSocO80O3HvvHzKlSpcHkP0P4MqVKh87n4DVauJUqVG5PDD7rKlRJ8A8WKdxfvKlKBqQbrupyhSidDeABddV+sDSgs7zv3DvEZc61rWZ5T+o/a9XKVEDl83gJJNtdIMsPuw+Tzn2yaJUdpbbnCIWEY6dEPxPsSpUOE0u3EaOurKlSp7DPofx5UqVAttY/M6b3nTe8LsD1gM/cZUqVD7f5g91lSokIjeDlO77Tu+07vtO77TyiAcGLpfXM1Q3K0iW13LCJJK6S0BoZURUsWsO4xb7oytWsJFcI5QcfMHvwDidwTMFHVTLdITOgy25Qa9czYnROgKmOAt/EV0jQhKhRdJSuVfYlSoMkOGMQ2wesnxHaWsqVKlCOrl+h/HlSpUqVKlcFSuCsuztB7rKlSpUqVKlSpgnIP4/is2+j0095Vfw1t6ZP4nz/wAeCpUSFOL0n5H6J8OVKlSpUqVKlSokBX39oAAwEHusqVKlSpUqVGGdJERax/AxbDWAANCYs5nR/v8AgRAasx7qywe/+JUqVKlSpUqL/sGAAoPoig6s6vy/U6vynV+U6vynV+U6vynV+UNwfMb9JM/VusCuAsTLc6vynV+U6vynV+U6vynV+U6vynV+UzXT5/UdjYnJA0ZXmngglIF4VoY57T3+PPggCxjdmefl34IbS3KW5QY7Qtg777P74FFKWdX5Tq/KdX5Tq/KdX5fqbsQP0zPZObr9fqTZN30n9zmHtOqnLHeZ530NIbQo8CCUzPZviaKD2Ynvi2sjXIDpDsfXf/xr9N//2gAIAQMDAT8Q/wDZry5RR8hCBHmX/wAz5pzTy7m/9xhH2Fvryl+wochqfWvNOaeXc3/u8u5s8z5/Wlcb5dzfDTn/ABWfwgd4I6eHy7mzzPn9UGAEe783wOsCG91ke2n6vrMy09UW4a7lX3PtLnhmo32FPQepE6rpWv8AZ14S+Xc3iKEv3aee9T0OsCaVo6u2c9Yxqob5xTs33XT1ijLyb9JVd7PaHqS5rtc3Rp4lCqI006c4qJNUW265xMu81Mec0LYcJyTm5AvvYjlqm9resVF226qesV59gH01V3r9JquNDJ8/8HpxBWjLL2OXTqOzgpu32jfITlV9bQDUo7ZvItuPkH3IZ4BdgqdPHQId4lBVIlImyS4KI20DdTvB6fJFinFdC2NiFeRjYq+FdUFkHujyFGiiA6y8U4NMZyXed3oXD9q2v0g2+9xmxTe1vWKOQb2p6xdHHc+gjXez2nS4kyHPcPnp9JSlSYHQst/7MGlxZjYktYaMbFmeSEXOpqra93igErw4eg0HRglAsr7HXQJui8BgdQUT3nl3N4Uhp8DoWW/m5uNmAAzWJprTo6WZdkjh1NVbXuueIEB0Ur4ielJArGvyjTpma7ZS7m6+T0eHzX2YrT0MWr3dOoQGAWDfOz762aI5WdUV7vFwQXY/c0To4hURV1HoxTJ3NKgjVSevIdBSdHgNQCjoDUx15e/Wo7J1A09aa36DlfFLmcjAZUF+ia4w7krAWsLHyG2Xl2lcF5TdgaH+sAOJgXgv2D2D1gKoIOn5dVt1zG+pJFe7x00wKR/s6OJgtEKvD0RtYeVYgyZcOyaHRPo5S2Yd1U3rhmxHqJ9C+J0qXJzdTokaQpxc6nsZes8u5sPGzj11fQzNWpyY3XvZnvArwvgoETURsTswlQVr6ejem92IjTPmvszBmS9yjU9T0I4S1bXmur4Ta5qef6GmCXWHqw+TlwKalG3Tgd+4Yz4dYyihybFV9tEsGeHlPNnnfPwEQU6+VXYKNc7imlqOqm19XwrHRh1rVdBY9GHblAjc7L7VfRwqWetWF7sY9wicnX3Y8b62+imx6GJ5dzYIlZ2BUPuzOF1+kv5PGRFYgdV8gL7RHv0lT4Z819mX6yqdVr7HiYLU+yhWb/hfw+Nbh57zZ5zzcVNLqfC/iWgrJnQr2a8YZWR7A+U+jmrsPk/qMvq/c8au1D5M8u5sJ5Ee9vxNV5+58aGNx+z+ICzm/A/E+S+zFi6Pv4vKuSeQ8nFagh88z+kSKd3YPuvxBzcyrfzX5j438w+wuSmwuUunuhyaE1uApOf3Gec83E2NPwT+IyC9LDUJl6k8kfmeSPzPJH5nkj8w3/MdZ/sQTxjbI3g0L0lD5L7/AEYkdj4P7iL6n3PGonQK9DPLubCN6p7f2mu0fc+NHGy/Y/MM5yfg/mfPfafBffxeVck8h5OGQGtXB9XnyDLAxuYvL11vulcCbg6Wm/MYIDy+8C/MUW751exD4leUNTjW4eec2ec83E0tMPcITAemwDkLsTpJukm6SbpJukmN8TLlild+HZarf0ZjKPWbI9yIcYheej2Y8ZR0+oH7jM8u5sEmo7hgPsxHqqvUK+DxkQqEHu+CnvEa/SVPgJ899p8F9/F5VyTyHkgUWQOpR8wA2UJrS+frY50aRma7Ra/1yNvE41uHnnNnnPNxUxr4IX8Suli29D9OS9UTuKybJjzKoPVR4gC8lbYdToErmMI2oa9GHrPLubGuow9NvUxNRdwZtetdmPEEJWAGqrQerMHFaeho3rF3rnFVtnz32nwX38XlXJPIeSAbeX+opWmrvS/x4QVolGaANylB60HppNbgDcofZ/uJqT7rfFLKhaNR1TLXyT4MveADsGnw6TDxkHh6250Vy+kMnz4ebi/I0ecBA6Et3YVrybXoIEBsiPs8NIxFWtepoASjKE516a6jm/h0yoaqFV7s8u5vBymfVzdha/qa5hJmSF1euxucm1xQwNkR9niSQVAL/wA9YnFG9Qw25uhWm3OInf3LF8nocPnvtPgvv4vKuSKvI4TT+73V16x2BGp3sz8radQ53GrXUhSe/EAkNUH7JT8mTbTT7S7OrNLgHVaPVRXktQ7KSD1VwzyAjoFa+EY5AYZpKt5AM8xhmKGsQYBFhlu8DWumy0lzSeX9yLtJg0pX2ZnX5iB0pBX9PpxFGpsCvsRMRi0ppkfcbnRqAUpdTFGB+09fpNMTrqHfr5KlRvdbt5TeJYx2w+4yxOu0M80podiHfqodku0Wv6OQYJrEa5IU3kqFpZE9W+OKHUUe6/Q10jQ9UfImzMn77Y/c+8Qv12s/bDQDTV7r95MtQaOPOatvXjfDctFuSsEwGoqWu70z4jUtaPZB8as8qK4LnNyO8fcPW4G5sye1dOk0bOWH3GWJ9EFXUaxQ+ofMJbTyGPOa5esuZZl1by6NbQBh2XV6WErnr1iiLoqd2jSlLdURh1JVSihLdQwdr97jAsdQ+5A9JSZmHN7Y95DwJRyZobDdQO/DX+amfOalPWGgFNHsf3ojre1vsw6QTq/afmFF1A+T2QitANLT5LXdrp9Mr/nrp7ZjAe79o4mx6urXrpBvjXhNnaUHNYSuaB6tRSn5DOfk9P4a+lC26YFXsE82/iebfxHy59p5U/E8qfieVPxEsvlOk+TQfuRFSU+AKcLEQTmNTzb+J5t/EfKn2joKRpHUf4nDVieqe5fhEJTvKmZ/Lz20f4GDf4D604XNfiuX6NHufU6G88P8H8mgPePmLmP9i+tl5ty/JDBQc9E5m48PP+xxwLNr3GK/P5/h1oSHpqXoGEDRB2FcKgbw9T192e1/wOB1W+m36ny4detHZ3ejTB0py9N/Uz68VQxbM3QlrTBatJ1Pajqe1C+/tQDv7UdT2o+7QfhFiq+y9yN0LG+LEEr6K68/Rj/CAOSx8xnipdyz+hXOIApGkdknnfY8A8z5/wAI6uVxodDuwPRl40hBBoHRHCRTjnZzdPbR8SGs13wfYSpBQFByJeNJc9ufl+7scdaV/wAAAAhnSOi1dh6QMD0V1uP9XSvucx2fojry9GOXETUUFNVXP9XP9lAl00Fa5aC9DwDYaMAah+L+0r83jiEDym/8GmwtvkavbB1h70wDkcSidcXovymTqERMJT4EVGVlImquifhMvVeIrIUlsDozIwGec6u9a9eGtK/K1eAXl/BQrY3IhxGzmerzTXdeN4iNP0NV5WjHPiXd593G0yNJkYytqi8yyerVvE1zlKD6gnlPbiGc8u5vjC4JVhG2vkV6+AKyKk3iOJvb+XN8/AeqsTltHbU+kv4SDgNc+T6bOTHQUGk5JqTQyvytXEcc5vswxMK38+8Ssb3/ALgbqd6DdNzWzKnhs5nMTZN+BuVLKQyVew6j0fobrzNGOfHOnR6Hc5TyV+J5K/EBrWpoOecvoTNiIvm7vq546yWiehMei1OQuHoTy3tx8s8i5vjDJlKPbOxqxv4PSxLJS8NB5edJkTWfYfU0evA47euxuvQMw6MOvuHd8P3YlhjWXHDdDn0eW81JX52rie9vsxWvkt4qRoDkKKdjnt4BSu5fg+hlXkaMc+OIlpaLeEtGJxzcvQNWP5yd9TnyHL3cPO+3Hyzzrm+IALWuxuvQIHdHXd3XVc+L64akTIaQnNZXlw6MNq0iOyYZogDHPY7Op8f1es1sFSO463FA66dN+77KlZeeXE9zfZnlnN4oo0RdlRo8QVP95+30N15mjHP+D+0UDzH7oY+O22p7vymJUdDPYfxrwp8/jj5Z5Vz8QiPse71wen8ABDVXEVsIIx6GDY9J7sEADAYPTiU6VCXk1jnuY2OgR5QEBkE5mrEzFKboU3xmtttYpiFO86RTih1X4LR6mfSef83H99fZnzX34pKOT8zK8e2/l9DCrztGOXEvvLTVmrnS9pOl7SCXc3SvcgkiaI2PqcTezDp1OSRWLDK5mV7a9Z5W24meeZc/FoECz3J9o4tNc50/r1lNTl5Ia9q5PPDBq21ih6X/AKw4wdaG/wBc9yCUUcgal30fcmVwRrm7HqwkaiezX+fiUYNb0Y687vprEgIt/B0+ZYt7wxPKdvXHrvtLVK0bD5c36RpLOw+8bXFbuFirigWgNc75i2YvMFW0M5IMKC1r5dvbPI3lLp0Vp9PvpzgFbTV65Ep6kVflexcf3x9mfJffitfRK9BjZ44cg3l6fQ/NdGOfHs7zwC3WR0laa6eBv5pb0jzdtx8pPJufhIVOgPRBFHKs0HvWa3HPKUU50Ojpe1LmA15bC/muqdIeug9Ju32bw0MfqJxfyR0vCUer+/SUMFw7Jpc3vP7yjAxpYyVxtwV3npUbLydXtcwENaDq1KrrPaj7E9jfeeZN0qPl0iruvvDgqPaOaNfirslc5QEFqG8qr8w6Vxb90fZnyX34vCwO8qLXavmZ/Jx6j7q/TB9/obrzNI58cwai0L+E/wB1+p/uv1LsLuhQBQ9HmzzZck0vj4Lmo80bccqzAYRO57Tzv6zzv6zzv6zzv6zzv6x6YDRZLMH0gaF5Uml+t+9zES/7H33vpGbDw4DnXO1+sObVOkDVb7Cz5hmBojIc3feLgD5brp5YxCbZdE8no/DmF0JjLXq5LirXWnw+VdIFHPaLN+3AnL0578oARwdJ1V+H4hNAMUbwGsVhVIXS9rlc9vmEC75utaQ4IXsN4pUYQC6hvFwxJqR0dPtjr0jbYVm27k1HgMq9WhfaK5zPduIyXR+zFgyLt1iVHtQ0/MNX0rQ757QeImHN59VitzPARcD7du9X6Gq8jSOXEGS3OW5zfF+rNMTkMHgDr2D7r4MTyBt4QuXLlzE7wL3J7vh9n1/iAAnPyv6j7wAEyOT1/hAKevz2r2x6x15erwDrTrTMVXrNX4NtuIhwLbf1e7t7/RPOdI5fwgA3ASNgDJ19XSPhaWrusfkNv4QBbppk+8eOlu+cj3izWGfkXZ/gAKHcDz5uxvGxtFXmsIcD7v2/Z/AASvQWrsG8ZE23p+X6R+dzS5cuXLly5eLiRjSvX9745x+irV+iE4q2hrH+sh/mQ/zIf5kP8yH+ZD/MgTVe37gTe8mgwpFzbH3dYity8LZglQVZ6z/Mh/mQ/wAyH+ZD/Mh/mQ/zIH9ZBrWZbRZ7xjQGS+rLCeT9h8w4NPc6DZlwW8TrOme8M0XPgWte0uX6OrGj/Y5B5zwSAqxIaYPVYOr9JcOsOjeJ/wA/uaIDvX7gEBut+ceCPVWiXzI6u/d9h78H8hpoK1XGZ/mQ/wAyH+ZD/Mh/iQHqO9H5Ywoh5q/ivzLoW3+AfX7n+lo9zRh0ZPly/cDWdBr9wJ5PtDK6IMfiWhx/gbHzHyk1Vt8DolJuSlMHnj93qesqbD0PcYFZ7r+ofb6gsoDc7AemV9yWK8NBgen7/wDGM1bfTf/Z)", "_____no_output_____" ], [ "**#importing library for reading, writing and perform basic operations**", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\n", "_____no_output_____" ] ], [ [ "**#Importing library for visualization**", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\n%matplotlib inline\nimport seaborn as sns", "_____no_output_____" ] ], [ [ "#**Importing library for splitting model into train and test and for data transformation**\n", "_____no_output_____" ] ], [ [ "from sklearn.model_selection import train_test_split\nfrom sklearn.metrics import confusion_matrix,accuracy_score,roc_auc_score\n", "_____no_output_____" ] ], [ [ "**#Filter the unwanted warning**", "_____no_output_____" ] ], [ [ "import warnings\nwarnings.simplefilter(\"ignore\")\n", "_____no_output_____" ] ], [ [ "**#Importing all the required model for model comparision**\n\n", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import RandomForestClassifier\n\nfrom sklearn.linear_model import LogisticRegression\n\nfrom sklearn.tree import DecisionTreeClassifier\n\nfrom sklearn.neural_network import MLPClassifier\n\nfrom sklearn.svm import SVC\n", "_____no_output_____" ] ], [ [ "**### Reading the Data**", "_____no_output_____" ] ], [ [ "train = pd.read_csv(\"blood-train.csv\")\ntest = pd.read_csv(\"blood-test.csv\")", "_____no_output_____" ] ], [ [ "**#Printing the train and test size**", "_____no_output_____" ] ], [ [ "print(\"Train Shape : \",train.shape)\nprint(\"Test Shape : \", test.shape)\n", "Train Shape : (576, 6)\nTest Shape : (200, 5)\n" ] ], [ [ "**#Printing first five rows of data**", "_____no_output_____" ] ], [ [ "train.head()", "_____no_output_____" ] ], [ [ "**#Counting the number of people who donated and not donated**", "_____no_output_____" ] ], [ [ "train[\"Made Donation in March 2007\"].value_counts() ", "_____no_output_____" ] ], [ [ "**#Storing dependent variable in Y**", "_____no_output_____" ] ], [ [ "Y = train.iloc[:,-1]\nY.head()\n", "_____no_output_____" ], [ "train.tail()", "_____no_output_____" ] ], [ [ "**#Removing Unnamed: 0 columns**", "_____no_output_____" ] ], [ [ "old_train = train\ntrain = train.iloc[:, 1:5]\ntest = test.iloc[:, 1:5]", "_____no_output_____" ] ], [ [ "**#Printing first rows**", "_____no_output_____" ] ], [ [ "train.head()", "_____no_output_____" ] ], [ [ "**#Merging both train and test data**", "_____no_output_____" ] ], [ [ "df = pd.merge(train,test)", "_____no_output_____" ], [ "df.head()", "_____no_output_____" ], [ "df.describe()", "_____no_output_____" ] ], [ [ "**#Setting the independent variable and dependent variable**", "_____no_output_____" ] ], [ [ "X = df.iloc[:,:]\nX.head()", "_____no_output_____" ] ], [ [ "**### Data Exploration**\n*# Statistics of the data*", "_____no_output_____" ] ], [ [ "train.describe()", "_____no_output_____" ] ], [ [ "*#Boxplot for Different variables*", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(20,20))\nax = sns.boxplot(x=\"Months since First Donation\", y=\"Number of Donations\", data=old_train)\n\n", "_____no_output_____" ] ], [ [ "***#Boxplot for Months since Last Donation**", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(20,10))\nax = sns.boxplot(y=\"Months since Last Donation\", data=old_train)\n\n\n", "_____no_output_____" ] ], [ [ "**We see from the above boxplot that the maximum people have donated blood in nearby 10 months.**", "_____no_output_____" ], [ "**#Correlation between all variables [Checking how different variable are related]**", "_____no_output_____" ] ], [ [ "corrmat = X.corr()\nf, ax = plt.subplots(figsize=(9, 8))\nsns.heatmap(corrmat, ax = ax, cmap= \"YlGnBu\", linewidths= 0.1,fmt= \".2f\",annot=True)", "_____no_output_____" ] ], [ [ "**#Printing all unique value for Month Since Last donation**", "_____no_output_____" ] ], [ [ "train[\"Months since Last Donation\"].unique()\n", "_____no_output_____" ] ], [ [ "**Feature Engineering**\n\n", "_____no_output_____" ], [ "Volume donated is also a good feature to know wether the donor will donate or not.\n", "_____no_output_____" ], [ "**#Creating new variable for calculating how many times a person have donated**", "_____no_output_____" ] ], [ [ "X[\"Donating for\"] = (X[\"Months since First Donation\"] - X[\"Months since Last Donation\"])\n", "_____no_output_____" ] ], [ [ "**#Seeing first five rows of the DataFrame**", "_____no_output_____" ] ], [ [ "X.head()", "_____no_output_____" ] ], [ [ "**#Correlation between all variables**", "_____no_output_____" ] ], [ [ "corrmat=X.corr()\nf, ax = plt.subplots(figsize =(9, 8)) \nsns.heatmap(corrmat, ax = ax, cmap =\"YlGnBu\", linewidths = 0.1,fmt = \".2f\",annot=True) \n", "_____no_output_____" ] ], [ [ "**Since Total Volume Donated (c.c.) have the very high correlation with other variables so we are dropping the variable.**", "_____no_output_____" ], [ "**#Dropping the unnecessary column**", "_____no_output_____" ] ], [ [ "X.drop([ 'Total Volume Donated (c.c.)'], axis=1, inplace=True)\n", "_____no_output_____" ], [ "X.head()", "_____no_output_____" ] ], [ [ "**#Shape of independent variable**", "_____no_output_____" ] ], [ [ "X.shape", "_____no_output_____" ] ], [ [ "**### Feature Transformation**", "_____no_output_____" ], [ "**#Feature Scaling**", "_____no_output_____" ] ], [ [ "from sklearn.preprocessing import StandardScaler\nscale=StandardScaler()\n", "_____no_output_____" ] ], [ [ "**#Fitting and transforming data**", "_____no_output_____" ] ], [ [ "X=scale.fit_transform(X)", "_____no_output_____" ], [ "train=X[:576]", "_____no_output_____" ], [ "train.shape", "_____no_output_____" ], [ "test=X[576:]\ntest=X[576:]", "_____no_output_____" ], [ "Y.shape", "_____no_output_____" ] ], [ [ "**### Model Building**", "_____no_output_____" ], [ "**#Splitting into train and test set**", "_____no_output_____" ] ], [ [ "xtrain,xtest,ytrain,ytest=train_test_split(train,Y,test_size=0.2,random_state=0)\n", "_____no_output_____" ] ], [ [ "### <center><u>StepsTo Follow\n</u>\n <br><br>\n<ul style=\"margin-left:40%\">\n <li>Create the object</li>\n <li>Do the necessary hyperparameter tuning</li>\n <li>Fit the model</li>\n <li>Predict the test set</li>\n <li>Compute roc_auc_score</li>\n <li>Repeat above step for all model</li>\n <li>Compare roc_auc_Score of all model and choose the best model</li>\n</ul>\n</center>\n<br><br>\n", "_____no_output_____" ], [ "*## Logistic regression*", "_____no_output_____" ], [ "![logis.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAeAAAAFACAYAAABkyK97AAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH4QgIFhg4Vrr+rAAAI+9JREFUeNrt3Xt8VPWd//H358zkxk1QuSi21m1RqaJY1HpNZgKCiJAETWu1urZ21ba/Wru1C3hZUGsB11W36m61umu1aktqQqBAQchMIl7W4qpV6731rhhULgGSzMz5/P7IqIgXEHMCJK/nw3kccmKSmfecc97zPXPOGQkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAuof9JU2V9KCkUJITCdAzGREAXcpZDwFIUkAEQJf6q6RLJR1IFAAAbL/RMLugAUbAAACAAgYAoBuLb68/7O526aWXlvAUoKeaMWPGe9Nem3/v6quvTrS2to7Y0u+48MILryNJ4NO9/fau8eb1e/eRpLXrc31iQbwgI4/1L9Gqu26Z9IaZbZe3grZbAc+cOXNYUVHRzSwa6OmKior+tPm8vffee9jjjz8+ZCt+dgIJYmcShmbZXDyWycbjuTAe5HJBkMnF49lcEMuFsSCXi8cyuSAehrEgm7OYe2DZnMVMpmw2FpekbBiLuWTymOXCICZJuTAWC91Nbh+a5/7JZxnsNuTvT8+cObNC0rPbI4vtdvrD7NmzbwzD8KZp06Y9zCL5gVmzZq2YOnXqoSTxoWVluLtfOHXq1NO72UPzT1kPh0ga8Gk/3KtXr2VBEGRaWlr2Zin50Dp0u5n9YsqUKU+RRjTblkRlqr9iGqJQu5uCQQp9iMv6KggHmFs/l/q6qZ9JfeXWT/L+kvrJ1Vem4k64C+skZSW1urQxP2+1yVzydknr8yvY2kDKhVJGUkvHyubrZMoqVPbLe77y2L5femPMlClTzulRI2AAn+rN/O0TBUGQISZ0lsSZqeJgdTg0tGAPSXvJNUQWDHX5IHMNdGmwmQa7a6CkQuXeexXp+ZeQLrl98KoyVKub1pp8nWSrXf6Sma2TfJ1LG8z8XZNtcLf1Jq1zC9daLtjgFm4w83dzFttQGARtlrHWjTnfWNyrNVxac9yaznzM42bOHLU9D4WigAGgB2htKy4om5ROBoENc+kLku8l2R6SD3VpT63WrqFiHz4x7oP9t+tNelOul2V62EKtdLOVZt4cuppN/kYYBmsKC4I1sSC2etWAwnUP33QoLxApYGCH4luYx1WxsM2OnrS8b2HQvm+oYF+T72eyfV22r+T7ph9RXwvU4B9u2Ba5XrVAj8n1mkuvm+x1efiaxfwND+21toKilQ/UHLWRdClgAMAMDxKPp/dVqJGSRrrbIZIONGX3dAX5V3GmUApN/rJLD+w1cNXI11YNvMzlz7rs1SAevpKuSbYQJgUM9BSMcPGZJM5MFftaGyHXyMB1iEsj9WjjQZL13mShapHrCZmWuPRsoPDZnMee7d2+/tlFi05okzoOwrrj1yffQKIUMADgY4yrvn/X1vb2MkkJBUpotb5q+W11fufxSsmWy8JHFQaPmOceTX0t+ZxmWEh6FDAAYGtHuJWp/hYGpR4oKXmiNdN+kFn+0FzXqybVyfWoZI/GPfboPfOOef0jv2QeOVLAAIBPNXHiil4tBWuTHlpSChKSj/TAY/lvvyHpd25KW85T6XnJ50mMAgYAfI7SXRtvmRC4qteqZYKFQf7yo75S8j8oX7ipeeXPkBYFDAD4HMaOXdw706t4gptXr/WWE8zVy6WcSfe6VBv3YNmy+tK/khQFDAD4nI6svr+ksL19TGCqbpeqJO/jrtDcHpCpJhfYnHtrS98gKVDAANAJyic1luYC/6Fl2ifKVOJSzqVGM68pUmHtkrlHv0VKoIABoBOMH7+waGNx71Pc/bxQ/jVJoWQpuWoKg3jdkjpKFxQwAHSasVX3DWoPs9/ZGPiP5D7UpHUu3RSE4dUcRAUKGAA6WemkxkMs8HPbPXO6TCVyvSC3qbLwxsa5ydUkBAoYADpJdfWcWHP7wCqZ/VjyY9RxMaqlFoT/kToouYirT4ECBoBOVlaRrmrO6AqZhuc/v/bGWBj75funDtWSEShgAOg05ZMaS8PAZ0k6UtIayS8pLij6z8U1R71DOqCAAaCTlVY0jjDzWaH8BLlaZf7vMcvNXFY35m3SAQUMAJ1s7NjFvdt7Ff7M5dMkxU2qyYU2pWl+4u+kAwoYADqdW6Kq6dR293+TtIeZUpYNf5SaX/4k2YACBoAIJCc2HOCxxuvlSkh6w9xOS80tu5NkQAEDQARGnb2ioM9bLVND6WKTAjNdU9xaPGPRoiPWkg4oYACIQGlF4wh7q+VWk74maUUsDM5aNq/0LyQDChgAIpBIpOLaJfipzC+VFMh89rqBfS95+KZDM6QDChgAIpCc1LCfB8Edko+S7GFZeGa6LvkEyWBHEBABgG458q1InRwGwUMuHyGzi7U6PILyBSNgAIiIe2DpRw76Z5mdZdJrbjo+XVf2AMmAAgaAiJSfuGzo8sfax7a2FQ+UKRXLZb+1bN6YlSQDChgAIpKsaijLeTBnfWvJ7r2K2v5z4fCxP+LTirAj4z1gADu9RGXjWaEHS0yKHfjlF1JlX3v0esoXFDAARMYtUZmaIfnNJr1sYXj0XgObXycXUMAAENWotzrVJ1HZVCfZdElLJT8sNa/8GZIBBQwAESk/cdlQZYK05BXm+vW6QX1OSM9NriYZ7Ew4CAvATmV0RdNXsxbeY/LBLp2Xrk9cRyqggAEgQsnKhlE5hX8yqbcFYUW6tnwBqYACBoAIJapSx7jbHyXFTeHEVG35MlIBBQwAESqblE7KNU9SRoGPSdWWP0gq2NlxEBaAHbt8K9ITzbRQ0vpYGCTStUnKFxQwAEQpWdVQYaZamVaGMTuWz+8FBQwAEUtUpMa4B7936fUgyJU13V32HKmAAgaAKMt3cuoImdVJWmsxH9dQO/olUkF3w0FYAHYooyc1HZQNwwUm5cLQxjXNTTxNKqCAASBCpSc1Dstlw8WSimU+rmle4hFSAQUMABEqn7xs7zDnDTLtolAT0vOSy0kFFDAARChRmeofui2QNNiCsCo9tzxFKujuOAgLwHY16uwVBZLNkesAmZ+X4vKSoIABIHp9m9ddJ+k4uV+Rrkv+ikTQU7ALGsB2U1aZvkSuc1ya01ifuIREwAgYACKWqGj8pkmXSlreq23DGZI5qYACBoAoR74V6WNl/huXnotZtnLRohPaSAUUMABEOfKdnNpLpj9IWhdT7oRldWPeJhX0RLwHDKDLjB+/sGhDaHebtJt5OKGhfvQLpAIKGAAitrG41/XmOlyyC1L15YtJBBQwAEQsUZU6V67vuVTbOLf0ahJBT8d7wACiL9/JqSPkdq2kx4s2tHHEM0ABA4ja6ElLB8vtD5I2KPTJS5aMW08qALugAUQ58k2k4rnAaty1R+A2MTUv8TypAIyAAUStfzBd0rEmvzxVX7aQQAAKGEDEklUNZZJPc1PTwILmy0kEoIABROzY6qaBoQd3SlpbkI2fXlPzjRypABQwgEi5BdnwZpP2MAu/s3T+MS+TCUABA4hYsqrxJ+aaJNl/pOrK60kE+HgcBQ2g88q3smFU6Jop019sl3AaiQCMgAFEbOzYxb1dwe9MavPATk7fmmwlFYARMICItZUU/cKkr8jszKa7y54jEYACBhCxsqr0kXL9P7ktSM8t+w2JAFvGLmgAn8v48QuLzHWLSesUC88lEYARMIAusLGo5ApJw9313cba5KskAjACBhCxZEXD1yU7X7JljfVlt5IIQAEDiNj48QuLPAhucWl9PBf7Lh8xCHw27IIGsE1ai3pNl+sAczuXq10BjIABdIFEZWqkSxfIlErXl95EIgAFDCDq8k2k4pL9t0uZwHP/xK5nYNuwCxrAZ9NfF0s6RNJ5DXNHv0AgACNgABEbPanpIJdNk/RA48iyG0gEoIABRCyRSMVzQXiLSR6anaUZFpIKsO3YBQ1g6wwIpsj9UMkuaKore4pAAEbAAKIe/Z6U2l+hXyzZ/w4sWHktiQAUMICozfBA2eBmmcxyubNqar6RIxTg82MXNIBPlXys8Xw3HW2ui1Lzy58kEYARMICIHTdp+Z7umiHpUV/jV5IIQAED6AIZy1wpqY9ZeH46ncySCEABA4hYcnLD0TI71aTfpurKG0kEoIABRKy6ek7Mw+AGSS3xMD6VRIDOx0FYAD7ircygH5h0sJl+es+8Y14nEYARMICIja5auptJ013669qBfa4jEYARMIAukPPYbEm7BR6e9vBNh2ZIBKCAAUQsWdkwKpR9R9KcdH35YhIBosMuaAAdZnjgit0gqTUW5P6FQABGwAC6QNljjWdJ+nrguqihdvRLJAIwAgYQsWMm3DvAXFdIesEH+NUkAjACBtAVG4J49nLJBloQfid1a3kriQAUMICIlVY0jpD5OW6al64tX0AiQNdgFzTQo7lZ4NdLynlgF5AHQAED6ALJiqbTzVXqrtlNd5c9RyIABQwgYkdPWt43NJ/p0stFG9v4qEGgi/EeMNBTV/4gO8OkPd01ecmScetJBGAEDCBiyUkN+5n0I5eWNNYn6kgEoIABdAEPglmSzM3OJw2AAgbQBconNZZKqjTp1011ZU+RCEABA4h+7Gth4FdJavECv4w8AAoYQBdIVDWdKukwmc1K1yTfJBGAAgYQdfmemSqW+xUyvdY32/saEgG2L05DAnqKd4Mfy3xvyc6cP//QDQQCMAIGELFx1ffv6ub/Iumx9MGlt5MIwAgYQBdoz7TPMGlXuX9TMywkEYARMICIlVcu+3IonSPpj+n65FISAShgAF0gtNi/mRSzXDiVNAAKGEAXKKtKHylXpdx+nZpf/iSJABQwgMi5yXWVpPUqDC8lD4ACBtAVo9/KxlNMOkry2Vx0A6CAAXSB6uonC026XKbXCje0c9ENYAfEaUhAN/RWtvnHJn1Z7t/hs34BRsAAusAxE+4dINdUSY+lRyZuIxGAETCArlipC7LTJdvVTadw0Q2AETCALjDmpHv/QbJzXVrYWJe4h0QAChhAF8iGudmS4mY+hTQAChhAFyivbDpcrpNk+p90XfIJEgEoYABdILRwlqTWMLTLSAOggIHupEjSDEl/k9SWn86QVPgZfod/wu1zSVY1VMiVdNM1TfVlr/BUATs+joIGtt6dkiZv8vU+kqZLOlDSydvrTlVXz4k1Z4MrJK0qiGeu5GkCGAED3cnx+fJdm/93SX66VtJJksZ+xt9nm9222arM4LPkOsBMly+tOW4NTxVAAQPdyWn56UxJiyW15qcz8/O/vT3uVCxebC6/RNLfi1s33MjTBFDAQHdzWH66YLP5Czb7/tb6q6SNku6XdOy23qkhX6ruJ2kvuU1btOiENp4mgAIGupuh+enfNpv/t82+v7WGSyqWdKSkeySN/Kx3qKBoQDDoi5P6SlqRri+dw1ME7Fw4CAvYOr3y042bzd+42fe35A5Jl0t6RdLhkm6UtK+ki/XhA7mOlfTVTx/9fmv3WFASmMKpkjlPEbBzse31h2fNmvV7SSMkbeBp+MjI6Cli+JBiSXtI+vv2ugOXXHLJIZlMJrj00ksfKSoqev/6ym1tbcH06dMPKSwszF122WWPftbf+9xzz/W55ZZb9ispKclOnz79sffm33XXXXs/9thju39iIL320OFjbtOGdU+GJ49rfYRF5EP2kfSGOt6nB9uWLb2wfnzq1Knf7FGPevbs2TfOnDlzFM//R16YrCCFjywrw2fNmnX7dr4bT6vjfN0Rm80fkZ+/rRu2Pvmfz2w2/4uSRn3S7ajxtUvKKhp88J5fe4Ml5CPr0O2zZ88eThJsW7Zk5syZo2bPnr3dDl5kFzSwdf4saT9JEyQ9vsn8Cfnptm7cDs1Pmzeb/3L+9hFlk9OHWagx77zZuH792mfbeWqAnRMHYQFb5878dJo6zvktyk+n5effsRW/Y76kpKTeknaRNEnSb/Lf+9PW3hELdaWk1tdeuH01Twuw82IEDGydRZLmSqpUx/m/m6r7mAJ976CoTY+zODF/29wbkv51a+5EWUV6oqSE3Ga1bXzzVJ4WgBEw0BOcoo4jmF9Sx3u2L+W//tZW/vzofFm/vcnP/0odu6Ff3dIPV1fPiVmgmZLezWYDLjkJMAIGeoy2/Eh1a0arH3eGQUP+tk1WZQafJfkBkp2/fMGx7/bp04dnBGAEDCBKR1bfX/LeJSdL2tb/ikQARsAAukBxtv1n3nHJyVO45CTACBhAFxhbdd+g0PVTlx7ikpMAI2AAXSTjmRkm9QvMpnHJSYARMIAuMLrq3n1d+p6b5jXUlTWQCEABA+gCOeVmSQqCbHghaQAUMIAukJicOkKuSpduSc0vf5JEAAoYQJcMf4OrJLVa4JcTBkABA+iK0W9F6mSZHy35lena5KskAlDAACI26uwVBTL7hUtvlbSVXE0iQPfEaUjADqbvW+vOkWyYpB8sWnTEWhIBGAEDiFiiOtVHsotderZlUJ+bSQRgBAygK7RrqkyDTX7uwzcdmiEQgBEwgIgdN2n5njI7X64H03MT9SQCUMAAukAmyFwmqbfFwgu45CRAAQPoAqMnNR0k2ZmS3Z2qLb+PRAAKGEAXyMXCa13KKQynkgZAAQPoAomK1MlyJc38mvS85PMkAlDAAKIu3zNTxTK7UtLKeDw7k0SAnoPTkIDtabUukLSPu767tOa4NQQCMAIGELHyE5cNlWyKS//XeEjZb0gEoIABdIEwHpslqbdc52uGhSQCUMAAIpaYnDpC0mku3dVYn7iXRAAKGEDk3BQG10pqjQW5C8kDoIABdMXot6rpDMm/bqZZDbWjXyIRgAIGEHX5Vqf6uPsvJL3SJ9vnKhIBei5OQwK6kGXsQkl7muyb8+cfuoFEAEbAACI25qR7/8FdP5Hbfam5pTUkAlDAALpANsxd5aZCeXg+n3YEgAIGukB5VWO5XFWSbk7PS64gEQAUMBCx6uo5sZz8GknrrMCnkwgAiYOwgMg1Zwefa+4HSXZBuibxJokAYAQMROyYCfcOkPsMSc+XtK2/nkQAMAIGumIFi2cvl2x3C8IzFy06oY1EADACBiKWmJw6ws2+L+mPqdryBSQCgAIGoi7fRCqu0P7LpI1hzs4jEQCbYxc0EIX+mipppJl+3DS/7O8EAoARMBCx0VX37iu3iyT9eff4WzeQCAAKGIicW9Zz/yVTPAztnJqab+TIBAAFDEQsUZE+y6RyN13ZNK/sERIBQAEDUZdvdWqIzK6U9Fx7vPDnJALg03AQFtBJLGO/dKm/Kax+oOaojSQCgBEwELFkReMJLlVL/t+pueXLSAQABQxEbOzYxb3d/AZJq5TTVBIBsDXYBQ18TpleRTMlfUlup6TnJ1aRCABGwEDEyiubDnfpB5IWpevLfk8iAChgIGKJRCoeKrxRUmuYsx+SCIDPgl3QwLYaEEyR+0jJzudykwAYAQNdoPSkxmEK/WJJfx5YsJLP+QVAAQPRcwtyfpNMsVgYfI/LTQKggIEukKhM/1BSwqSrls0r/QuJAKCAgYiVVjUOl+xKmZ70/n4ZiQDYVhyEBWztyDeRisv9N5KCWC44ddmtpa2kAoACBiLmA+xScx1mpp+y6xnA58UuaGArlFc0HmWuKZLuTR1cdi2JAKCAgYiNHbu4d2h+q6SWIMidrhkWkgqAz4td0MAWtPUq+qVJw2T27Yba0S+RCAAKGIhYsrLhJJe+69KcxrqyO0gEQGdhFzTwCcacdO8/uIKbXXq5uKDw+yQCgAIGIjbq7BUF2Vzut5L6BEF46uKao94hFQCdiV3QwMfos7LlKpmOlPvPUrXl95EIAAoYiFhZRXqimX7k0sLG+sS/kwiAKLALGtjEmInLvyjTrZJetZz/o2ROKgAoYCBCo85eUZCNZX9nUj8Lwm+l5ydXkQqAqLALGsjru3LdNTLjfV8AFDDQVRIVqe/J7IeS1afry3jfF0Dk2AWNHq+8ovEomV0v6al4QTvv+wKggIGoHTdp+Z458xqX1iv0SUtrjltDKgC6Arug0WMdWX1/SSbTPtekwTI/MT0v+TypAGAEDETKrTjTfoukw1z6Sbou+ScyAUABAxErq2yc6tK3XPrvxrmJ60gEAAUMRF6+6W+ZdIVL9/dq2/ADEgFAAQMRS1Y1lJn0P5L+VmQFVYsWndBGKgAoYCDK8p3YcIB7UCdpXRiz8Uvqjn6LVABsLxwFjR7huEnL92wPsgslFVngo5vuTjxHKgAYAQMRGj/+wX6ZILvQpKGBhaema5MPkgoAChiIUOLMVPHG4ta5kg520/dTdeX1pAKAAgYiLl+9a3VyJU26vLEu8WtSAUABAxGqrn6yUKutRqbjzXRNam7iX0kFAAUMRFy+zZnmP0g60UzXpOoS/0wqAHY0HAWN7lq+EyW7NlVXRvkCYAQMRGn8+IVFze2rat8r3/Tcsp+QCgBGwECExo5d3HtDUVGtycdSvgAoYKALjK5aulubxxeadLjcZqXrSy8kFQAUMBChYyc37ZELw8UmHWjSlFR92ZWkAoACBiJUOrFxnyAM75H0JbmfnapP3kwqAChgIELJyoZRLl8gqb+7qhvrk3WkAmBnwlHQ2OkkqlKnhAqaJPUKzE5orE9QvgAoYCA6bonK1Ay53WnSO6Yw2VBX1kAuAHZG7ILGTiGTLShIVDbVSVYhaXkszJ68bN6YlSQDgAIGIvLcy3vt82rzwPGS95P7DesG9/3JwzcdmiEZABQwEJFkZcNJz79m/yOpj0vnNdYnryMVABQwEJGJE1f0Whtruc6l71rgbxwy7Ln7r559NuULoNvgICzscEqrGoevi7U8aNJ3Jas/5CvPTx404J1mkgFAAQMRSVQ0nhO4PyzXMMl/lJ5bVjl4t3fWkAyA7oZd0NgxRr0TG/exmP9K8rGSnpH5Kem5yUdJBgAFDESgunpO7K32QeeZ+eUulUh2vQrCaemaZAvpAKCAgQgkqlIHNmeCm83865KeN/k/pecm0iQDgAIGIjCm+p5dspn4JXI7T3KZdHlx24YrFi06oY10AFDAQCerrp4TW5UZfFY24z+XNFBu94XS95vqyx4nHQAUMBCBRGUq0ZyxayU/WKbXLLR/TNWX3i6Zkw4AChjo7OKtSh3jbtMknSBpvZmm98n2uWr+/EM3kA4AChjoVG5llY3HmzRNrmNNyrrpNjO/KFWbfJV8AIACRmea4UHZI40TzBovkXSYS+2BdHtgsZ8vqzv2WQICAAoYnai6+snCVe2rTvFHGqfJtL+kFnf9MpbLXdnwx9GvkRAAUMDoRGPHLu7d3qv4e82Z5p/K9AVJqyS/tKig6JeLa456h4QAgAJGJyqbnD7MQjujTX6qyXeV9Ipk5/fN9f41B1cBAAWMTlR+4rKhYSz+bTc/w0J9NX8BjUfcdcGgwoF31NQc0E5KAEABoxMkzkwV+xqbaK4zQul4yeOS3nHpplgQ3NhQW/p/pAQAFDA6SbKyYVTowRlardNM2k1Sm6Q/uem2loF95j5806EZUgKAnbSAFy1aNHzDhg2HSHqYp+F9Q5qamnaX9BVJz3fZSHdianeP2XEmjZU0zqU9zCRJD8j8tmx7/PfLFxz77vYK5Xe/+93IAQMGDJMUk5RjMemQzWZ7kcJHxJYsWTLs3XffHSnpKeJ431fy25Yhkt4kjg51dXWH9OrVa/j2+vu2HR+79+/fv2716tWTWQzeVyqpUdLpkn4b1R8ZdfaKgl3eXH9kaOFYmY1z6WsmBZJc0mNyX2Dut6fmlT+zI4TSv3//21avXn26pN6SOMgrLwiCNkkKw7CINN7XS9L6/v3737569eoziON935Z0u6QySU3E8f62pXb16tVV26sL2QXdU5r9pMZhlvMxJhunt1rKw0B9JZNLb5n7XW62OB5mlyybN2YlaQEABYxtMK76/l3bsm2Hm+xwD+1wmR+unA+UJJe3m3S/zBcHFlvSUHvsI3wgAgBQwPiMOq5CtfKQ0ILDTTrcpcPbMu3DJMu3qre69H8mu8PdG6zQU+maZAvJAQAFjK0wfvzCotbi4v3lsf1kvr+HGi7T/s2Z5uGyoCj/Boab9LTkt5nbQ7Lwf9cO7vcXjlreoRRJmibpDElDJb0m6TZJv5DEudQABYzt8mTEi4NYvI8GfWHMvl8+4PtnybS/Qg2X+f4bpS/JFZO84z9TKOlFmRab7CGXP1QQzzy0tOa4NSS5Q7tT0qYHHu4jabqkAyWdTDwABYwIjB27uHeupGTvrIV7B7IvuIdfNLMvhqa9zfVFue+ljvN/LnmvaCW1SnpG8j9IeloePCULn7H+ejp9a7KVVHcqx+fLd62kb6jjiPcySXMknaSO08CWEBNAAWMLqqvnxN7UkF0L2zUwDLID5bHBoXxwIA1080HyYIhcu8t8kKQh7VJfKZRJcrlkJpdkrpWSXmlrffvl5tfTpcW9Bv924JDSu2Lx4OmlI455UTMsJO1u4bT8dKakxfl/L85/PVMdp4pQwAAF3P2NH/9gv40lG/uFHvSNhbl+LuvrgQ0wt37u3k9Sf0n9FXRMzTXg/XlS/+aM+sYUKmeSvOM02o5yVce7sfKMm5pNapbrPpmvlPSiWfByKH8lyIUv+6720iYj2ffOA14saaEk6W4W0m7ksPx0wWbzF+QL+DAiAijgLmCKFwwoOGbC/AElMSvxAi+WpNC9v4dZCy0otNB7S5IFQb9QHpMrHsj65v+/3iYrdPPAzHeRJHPrF3ZcLalPIBWEUm+TCuVWIlOxzIvd1dekfpL6b1SrFJoCudyCjnvV0Z75PcF5HbuCN0panb+9KukJk1aH0rsmb5a0Sq43Aw/eysbUXBIvWMlH8mEzQ/PTv202/2+bfR9AD7Bdrv6RrGwY5QpWRP13spn1kkJlMxvknlMut1FhtlXZ7AblMuuVzbR0/Dt/y2bWK5vtmN/xdUvHrX2dwpADiQGgu+ndu3fz+vXrB/WYEbAFvspza1JtLU8d2THCzLp7xxkYHraGkuSekXnGJSnMbXSZS55zD9s6xqje5h5mJbk8bHUplIcbO3423OBbetURf+/Bx/K3j1zMr0DSgPwN+Pwef/zx3cIwtAMPPPDtWCz2/jKay+XsiSee2C0IAh8xYsTbkvTKK6/0eeedd4q39DsPPvjgVSQLbLu99trrmwsWLOg5I2Cgh3pa0n6SDpL0+CbzR0j6S/77710YfpikPbbid3JdX2AnxVHQQNf5c76AJ2xWwBPy003flnkufwMAAJ/TeHUc0rdGHef8FuWna/LzjyciAACiUZcv281vtUQDAEB0iiRdJulFdVz7+cX813ymLwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA+IhD1PEx9M2SWiUtlzSRWD6kRh9cQakn+rqkGyW9IKlN0svquMJUT/hA+yJJM9TxGcJt+ekMSYU9eH3oycsD2w66ptNMUsfVgT7uUn3ocFp+I9OTc/FPuOUkTe7mj/3uT3jsf+jB60RPXh7YdtA1nWKIpNX5ABZJOiD/an+UpIWsO5KkoZLelXRxD19Y7pN0pqQv5JeRkfl5Lumpbvy4j9cHH+YwTlJxfvrehzmMZXnoUcsD2w66ptNcmg/kGXFd3E/yJ0kPq+NjJHkV+2H75PPY2I0f4+35xzh1s/lT8/NvYzHoUcsD2w66ptP8bz6U84jiY52rjt1HI/JfU8AfNjCfx+Pd+DE+nX+MIzabP4LRXo9cHth20DWd5t18KIdJ+o/81+skLZV0dA/P5suSWiRdtMk8CvjDZuXzuKgbP8Z1+cfYe7P5vfPz17IY9KjlgW0HXfMR/hlum8rm5/3+Y/6/dknH9MBMJCmQdK+kFerYfdSdVqLPk8umTpUUSnpWUkk33nDk8lkEH7OMeH4dQs9ZHrakO287Po/u3DXbvFF979X9EnW8KV6cn96Tn5/qoUXzM3XsPjrwE35nTy/gU/MrVIs+umuWEXDPLN+esjxsSXfednTGetQdu2abPZV/8EM3mz80P399D11YWjthhNhd/VN+VJiVVNEDHi/vAbM8sO3oRl0T7EChPJSf2mbzbZNXbT0RR+l9vPMl3ZRfPs6WVN8DHvOf89MJm81/7+sVLA89anlg20HXdJox+Qd+j6Sv5ncLfHWT3QKLWW4+pCePfC/KP/Ywv7HtKcbrg/OAx+Y3sGP1wXnAx7M89KjlgW0HXdOp7tTH7yJpUcdlw8BKJG15l9ru3fix133CY65lXeiRywPbDrqm08QlTVHHCdJtkt5WxyX2DmCdYSVigyvlR72XSXpRHUdsvpj/uoh1gQJm20HXAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADQ3fx/QmkBn0cs+cQAAAAldEVYdGRhdGU6Y3JlYXRlADIwMTctMDgtMDhUMjI6MjQ6NTYrMDA6MDCel7BGAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDE3LTA4LTA4VDIyOjI0OjU2KzAwOjAw78oI+gAAAABJRU5ErkJggg==)", "_____no_output_____" ], [ "*#Building the model*", "_____no_output_____" ] ], [ [ "logreg = LogisticRegression(random_state=7)", "_____no_output_____" ] ], [ [ "*#Fitting the model*", "_____no_output_____" ] ], [ [ "logreg.fit(xtrain,ytrain)", "_____no_output_____" ] ], [ [ "*#Predicting on the test data*", "_____no_output_____" ] ], [ [ "pred = logreg.predict(xtest)", "_____no_output_____" ] ], [ [ "*checking accuracy_score*\n", "_____no_output_____" ] ], [ [ "accuracy_score(pred,ytest)", "_____no_output_____" ] ], [ [ "*#Printing the roc_auc_score*", "_____no_output_____" ] ], [ [ "roc_auc_score(pred,ytest)", "_____no_output_____" ] ], [ [ "**## Support Vector Machine**", "_____no_output_____" ], [ "![sv.jpg](data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAXgBeAAD/4QEKRXhpZgAATU0AKgAAAAgABwENAAIAAAAXAAAAYgEOAAIAAABsAAAAegEaAAUAAAABAAAA5gEbAAUAAAABAAAA7gEoAAMAAAABAAMAAAExAAIAAAAMAAAA9gITAAMAAAABAAEAAAAAAABTVk06IHNlcGFyYWJsZSBjbGFzc2VzAABTdXBwb3J0IHZlY3RvcnMgdW5pcXVlbHkgY2hhcmFjdGVyaXplIG9wdGltYWwgaHlwZXItcGxhbmUuIM+BLiBtYXJnaW4uIE9wdGltYWwgaHlwZXItcGxhbmUuIFN1cHBvcnQgdmVjdG9yLgAAAAAlAAAAAQAAACUAAAABU2xpZGVQbGF5ZXIA/+EMGmh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8APD94cGFja2V0IGJlZ2luPSfvu78nIGlkPSdXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQnPz4NCjx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iPg0KCTxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+DQoJCTxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyI+DQoJCQk8ZGM6cmVsYXRpb24+DQoJCQkJPHJkZjpCYWc+DQoJCQkJCTxyZGY6bGk+aHR0cDovL3NsaWRlcGxheWVyLmNvbS9zbGlkZS81MTE3NDc4LzwvcmRmOmxpPg0KCQkJCTwvcmRmOkJhZz4NCgkJCTwvZGM6cmVsYXRpb24+DQoJCTwvcmRmOkRlc2NyaXB0aW9uPg0KCQk8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczpwaG90b3Nob3A9Imh0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9zaG9wLzEuMC8iPg0KCQkJPHBob3Rvc2hvcDpTb3VyY2U+aHR0cDovL3NsaWRlcGxheWVyLmNvbS9zbGlkZS81MTE3NDc4LzE2L2ltYWdlcy84L1NWTSUzQStzZXBhcmFibGUrY2xhc3Nlcy5qcGc8L3Bob3Rvc2hvcDpTb3VyY2U+DQoJCTwvcmRmOkRlc2NyaXB0aW9uPg0KCTwvcmRmOlJERj4NCjwveDp4bXBtZXRhPg0KICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgPD94cGFja2V0IGVuZD0ndyc/Pv/bAEMAAgEBAgEBAgICAgICAgIDBQMDAwMDBgQEAwUHBgcHBwYHBwgJCwkICAoIBwcKDQoKCwwMDAwHCQ4PDQwOCwwMDP/bAEMBAgICAwMDBgMDBgwIBwgMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/AABEIAtADwAMBIgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2gAMAwEAAhEDEQA/APz+1v8AYY/4eUf8HHPxc+Cn/CUf8IX/AMJp8VfG/wDxOf7N/tH7H9ln1O9/1Hmxb932fZ/rFxv3c42n7/8A+IGP/q6L/wAxv/8AfSvn/wD4Jp/8rkniP/sqvxH/APSbXa/p+oA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99KP+IGP/AKui/wDMb/8A30r9/qKAPwB/4gY/+rov/Mb/AP30o/4gY/8Aq6L/AMxv/wDfSv3+ooA/AH/iBj/6ui/8xv8A/fSj/iBj/wCrov8AzG//AN9K/f6igD8Af+IGP/q6L/zG/wD99K+AP+C53/BDH/hy5/wq7/i6P/Cyv+Flf2t/zLf9j/2d9h+xf9PVx5nmfbP9nb5f8W7j+v2vwB/4PnP+bXf+5r/9wtAHz/8A8E0/+VyTxH/2VX4j/wDpNrtf0/V/MD/wTT/5XJPEf/ZVfiP/AOk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/wAHzn/Nrv8A3Nf/ALha/f6vwB/4PnP+bXf+5r/9wtAHz/8A8E0/+VyTxH/2VX4j/wDpNrtf0/V/MD/wTT/5XJPEf/ZVfiP/AOk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/wAHzn/Nrv8A3Nf/ALha/f6vwB/4PnP+bXf+5r/9wtAHz/8A8E0/+VyTxH/2VX4j/wDpNrtf0/V/MD/wTT/5XJPEf/ZVfiP/AOk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/wAHzn/Nrv8A3Nf/ALha/f6vwB/4PnP+bXf+5r/9wtAHz/8A8E0/+VyTxH/2VX4j/wDpNrtf0/V/MD/wTT/5XJPEf/ZVfiP/AOk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV84f8FB/+Cp/wZ/4JmeAoda+KnilbC81CN5NM0KxhN1q2r7Dh/Jh/u/7blIx/fFfR9fyR/CTwhrn/AAcYf8Fy7mHxj4kvbXw74j1O7u3eGXz20jw/ZiR4bO234CZQJHvA/wBZO8xR/nyAfefxP/4PfdB0/VriDwX+z1rGqaf0gvNb8XJYT/8AA4IbWcf+Rq7T9nz/AIPXPhZ4z1tbb4l/B/xh4Bs5HCJe6Pq8PiCOPP8AHIrR2sgX/cRz7V+hnwJ/4Itfsqfs6+HLPTNB+A3wzuG09dsd/rWhwaxfv/tPc3SPJn5v79eSft3/APBuP+zH+2P8LNU0/QPh34X+FfjAWso0jXfCenR6Ulpclf3fn20GyGePeF3q6byPuuh5oA+wf2e/2mPA37W3wn03xx8N/FGk+L/CuqDEOpadOJE38bo3T78ciZG+Nwrp3FfJP/BX/wD4LoaH/wAEfPGvgux8UfDPxR4s0rxxZTT2WqabewwRxzwSIs8DI/V0SSF/+2navxo/4Nr/ANqXxp/wT3/4K+3X7P8A4mubiPQfGWrX3g3XdLE+63s9ZtPMSG6Qf3/MgeDPdJznOxMfr5/wc2/sQr+2V/wSs8XXmm2f2rxR8K3HjLTNi/O6QI4vU9ebV532d3jjoA+sP2E/2wvDn7fv7JXgv4ueF4ZrXSPGNm9x9kmdJJ7CZJHhmgcpxvjkjdOPSvZK/CP/AIMsP20B4g+GnxN+AmpXmbrw9dJ4x0KJ3/eNbT7IL1F/2I5ltn+t09fu5QAV+e3/AAV3/wCDg74e/wDBI34x+GfBOu+ENf8AG2ueINIfWpotKuoYf7Pg854Yt+/++8c308v3r9BJZUtY2dmVEX5mZu1fykaBYv8A8F//APg46mlZH1LwDqniczyHGI08M6X9zP8Ac8+GFE/66XVAH9DPx7/4KT2v7N//AATAb9pjxJ4H1y3tY9B0zXp/DIuY/t9ul9NBHHCzn5N6faU3/Q18K/s//wDB4r8KPjj8dPBvg26+FfjTw1D4u1q10c6reahbPb6ebidIRPJt/wCWab8vjtX0z/wcwRLF/wAEOfjsirtRbPScL6f8Tqxr+Ui3/Z41ST9kj/hb1nJL/Zmn+Lh4Vvgv/LvPJZi7tWz/ALfk3PX/AJ5rQB/dXRXzn/wSi/a2X9uP/gnf8J/idJcLcan4g0GCHV5PXUoP9GvP/I8UmPavoygDx79uv9sHw7+wP+yd40+LniiKa60fwbZC6NpC6JNeyvIkMMKF+N8k0iICf79fn1+wx/wdd/D39uj9rbwP8JdH+E/jLQdQ8b3zWUOoXeo20kNviF5N7onP8FeN/wDB5x+1vcaX8LPhT8AdDmkn1LxlqMniXV7O3y8klrATBZoU/jEk7zOP9u0Ffmd/wRp+Dt9+zr/wcM/Df4f6tJHJqngfx9qXh+9dPuPNapdwPj/gaUAf190UUUAFFFFABRRVa9vIdNtJJp5Y4YYV3O7tsRFoAs0VzPhX4teFfHV9NbaH4m0DWLq2bZNFY6jDPJE3+2qNXTUAFFFVb7UIdKtHuLqWOC3hTe8sj7EQe9AFqiuX8LfF3wn4+vZrPQfE3h3XLq3+WaGx1GG6eL/fRH4rqKACiiigAormX+LHhmPxSugt4k0FNcY8aa1/D9rPzbP9Tu3/AHuOntXTUAFFFFABRXG3nx78CWOtJpNx428Jw6pJ9y0fV4EuH/4Bv311lvcJdQLNEyyJIu5WX+OgCaiiigAorD/4WFoX/CXf2B/bWj/255fnf2b9sT7Xs/v+Tnfj8K3KACis/XvEFh4ZsDc6hqFpptuvWa5nSGP8Wasbwd8ZvCPxFlaPw74p8N65InVdN1OG6/8AQHNAHU186/8ABTz/AIKCaR/wTF/ZK1X4sa54f1LxNp2l31rYvp9jOkM7mebywwZ+K+iq/M3/AIO1v+ULvjH/ALGHRf8A0rSgD6T/AOCUX/BSvQ/+Crf7MVx8T/D/AIZ1Twpp9vrVzohsdRnSecvBHC5fenGP3wr6fr8of+DN3/lEjqX/AGP+qf8ApLY1+nGlfFjwvrfiSXR9P8TaDfaxb8TWNvqMMlxF/vRht4oA6avI/wBt39qPT/2I/wBlLxv8VtW0u712w8D6Y+pTWFrIqT3Kh0XYjPwPv969cr4//wCC+v8Ayhu/aE/7FSX/ANGR0AUf+CPn/BZHwz/wWC8F+Nta8M+Ddc8HxeCb22sp4dSuYZ5LgzI7h18v02HrX2dX4Y/8GQX/ACQP4+f9jBpP/pNPX7nUAFFY/i3xjo/gPSWvta1TTdHs4+PtF9dJBGP+BuaPCPjjRfHmli90TWNL1q16edYXaXUf/fSEigDYooooAKKKKACiub8TfE3w34DuLaHWvEGiaPJeOscC317HA87twoTe3z/h6VuW11Hd26TQuskbpvV1PyPQBi/E3xtF8M/ht4i8STRvcQ+H9MudTkhUfNKkMLyFB7nbXw3/AMEhf+C/ng//AIK+fFvxV4S8O/D/AMReD7nwrpCavNPqd7BcRzq06Q7F8vofnzX2J+1n/wAms/En/sVdT/8ASSSv59f+DJP/AJPQ+MX/AGJMP/pdDQB/SZRRWfr3iCw8M2BudQ1C0023XrNczpDH+LNQBoUVy3g74zeEfiLK0fh3xT4b1yROq6bqcN1/6A5rqaACiisnxV4u0nwPozX2s6rp+j2Uf37i8uUhiH1dyBQBrUVj+EvGui+PNM+2aLq+l6xatx51jdpcR/8AfSHFbFABRRXNeL/it4Z8ATW8PiDxJoOhzXWfJS/1CG1eX/cDuM/hQB0tFVNN1C31ezW4tZobi3kG9JYn3q/5VboAKKK5nxd8W/C3w7mih8QeJvD+hzT/AOrS/wBQhtXf6b3GaAOmoqppuoW+r2a3FrNDcW8g3pLE+9X/ACq3QAUUVy+ufF/wn4S12HS9U8VeHdN1SYfu7S61GGGeT6I7bzQB1FFRRSpdRq6sro3zKy96loAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/AIPnP+bXf+5r/wDcLX7/AFfgD/wfOf8ANrv/AHNf/uFoA+f/APgmn/yuSeI/+yq/Ef8A9Jtdr+n6v5gf+Caf/K5J4j/7Kr8R/wD0m12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACv5K/wBtX4E/F7/g3Q/4Kzx/ELwdp7L4fj1e51Pwbqd1A76Zremz7/M0+fYU+dI3eGRPkfPzpgeW9f1qVxPxl+Bng39ov4f3nhXx34V0Hxd4b1H/AF+m6xZJdW8nbfsfOHHOH++hNAH5Q/sm/wDB5f8AAz4oWdrafFnwf4u+GOr/ACia6sk/tvSe3zl02Tpzn5PIf/fNfop+zH/wVM/Z2/bGktbb4b/GPwL4m1K84h0xNRS11OT/ALc59k//AI5XxD+0/wD8Ggn7LfxomuLzwTceNPhNqUw3RxaXf/2hpqP6mG5Dyf8AAUnSvzn/AGtP+DN/49fBfSrrVvhb4u8K/Fq0tRvSy8p9E1aQdfkjkd4GP/bcUAf0aWH7L3w10rx9c+KLX4f+CbbxVeXX22bV4tCtk1Cefdv855gnmF9/O8nNdpquj2uu6dcWd5DDdWd0jwzRSrvSRH++rD3r+bX/AIIO/wDBeb4sfsn/ALVWh/s6/tBahr+qeD9W1SPwzAfEJk/tbwVqDSeTHG7yHzPI8zZHJHIf3A+dNgR0k/pYoA/k8+CL3P8AwQX/AODjWHRLuWax8F6P4sfSJ5ZnPlzeHNT4hnk/v+TDNDMf+mlvX9Ydfgb/AMHpH7Ef2zR/hr+0JpNkN1m7+DfEbonRG3z2Ujcdn+1IXP8Az0hHpX6Zf8ELP20v+G7f+CXXws8ZXV0LrxFp+nf8I94gO/dIb+y/cPI/+3MipP8A9t6AOV/4OJP20v8AhiP/AIJV/ELVbG8W18TeOIP+EO0Ml9r/AGi9R0mdP9uO1W5kH+3GlfDv/BmB+w+3g/4M/EH4/wCrWZjvPGF0PC3h+SRP3n2C2cSXUiH+5JceWn+/Y14L/wAHdX7S+q/tTf8ABQL4Z/s1+DUk1W68IpAj2MP+su9c1Z4xDAfdIPsuz/r7ev3s/Yd/Zc0v9if9kb4d/CnRWjks/BOjQ6dJMi7ftc4XfPP/ANtJnkf6vQB80/8ABzN/yg8+PH/XrpP/AKerCvyD/wCCJP7Hzftwf8EHf2xvA9nZ/bNdXU7PWtBVFzI2oWVr9phjj/25Nhh+k9fr5/wczf8AKDz48f8AXrpP/p6sK+Mf+DIz/k1f43f9jXZf+klAGD/wZW/tjLrnwx+KnwH1K4DzeH7lPGOjIzZJtp9lteJzwEjkS1b63T1+7lfy9/D61b/giN/wdJNpbBdK8C694peyQLmO3Gha7zBz/wA87WSaLP8At2Zr96v+CvP7YK/sI/8ABOX4sfEuK4+y6vpeiyWWiOv+sGp3X+jWh/4BNMjn/YR/rQB+MXwzP/D5/wD4OvNQ1hmOp/D/AODmovcwHO+E2WhOI4NnYxz6k6SY/uXD14V+yF/yt3ap/wBl08W/+lWpV+hH/BmV+xxJ8OP2RPHXxq1S3b+0vidqw0zS5ZB/zDrAujyI3X95dPOj/wDXqlfnv+yF/wArd2qf9l08W/8ApVqVAH9Vlfzs/wDBDb/gjB+2Z+yZ/wAFS/hf8Qvit4F1jR/APh/+1f7TvJvGOmaikXn6TewQ/uILySR/300Q+ROOvav6JqKACiiigDzP9rz9o3S/2Qv2YvHnxO1qOS50/wAC6Hda1NbRNsku/JQukKk8bpH2Jzxl6/m1/Zr/AGYf2lP+DqP9ojxv4w8afExfCfgHwzexs6TCe703QmmDmGy0/T96I7pGnzu8iE8F3d35/o+/bf8A2abX9sn9kP4j/C66uxZJ448P3Wkx3eN32SZ0/czY/wBiTY+Pav5bf2cv2qf2pv8Ag2e/aX8TeHdS8K29jb65Kkeq6NrtjJPoviNIDJ5N1aXEZQ/xybJI3/jxIh27KAPsT48f8GY3jf4V/Dm48UfCP43DxR410S2+02uk3ug/2O9/NGpYiC6S6k8uR/4EdMc/PItekf8ABrH/AMFkPid8dvi7q/7N3xl17UfEl9p+mTaj4Y1TWJC2qwSWros+nTSP883yFpF8z508mQFyNgTsP2Xf+D0n4R+ODBZ/Fj4Y+MPANxIQjX+i3UeuWMf+26fuZ0X2RJDX6UfsLftGfs8ftY+Ebvxd8BNS8A6xZNL5movotiljfW003z/6VDsSeF3+Y/vEBfB5NAGl/wAFDP23vC//AATv/ZG8YfFnxYk1zp/hu2H2Wxhk2XGqXkj+XBbJ/vyMnzYOxN79Er+c74PfDX9r7/g6Y+PWu6jrHjJ9D+GPh+7jW9aaaSHw34c8z50trazR83NxsTOeX+4ZJEDpX2x/we7fGK+0X4A/AfwLDKy6b4o17VtauVH8cmnwW8Mf/pwkr54/4JI/8HNPwr/4JifsKeE/hLP8H/GGuaxpM95fatqtnf21vHqdzPdSSCTY3PyRmGH6QigDrPjV/wAGY3xG+EngdvEXwn+OGn+LPGWjw/aoNOvNEk0FruZDkLBcrczhJDgbN4Qb+roOU9I/4N4/+C6fxMuP2m2/ZR/aWvNUvPE6XNzpPh7W9WyNWs9Qttwk0u/c8yH93IEkf94JAUYvvTy+h/4jdfhX/wBER+IH/g3tP8K/Ib/gob/wUd0P9pn/AIKm/wDDSXw58Naj4KmOpaVrsdjeyxySR6hZLD++zHx8/kxv/vl+vWgD+oz/AILgX8+l/wDBI/8AaCurWaa3uIfBt66SQvsdPk9a/mn/AOCZ37S/7UH7QPgSb9k/4C6xqGn6t8UdefV9a146i8M9vYR2saOklzy1taoEZ5Hj/eSfJGn9yT+lD/guLcpd/wDBHn9oSaNt0b+CL10Yd/kr8wf+DJL4HaS3h/45fEuaxhl1n7Tp3hmyumX95bwbHurlE9pH+zE/9cEoAyvEf/BkhqEnwua6sPj/AA3nj5Y3meG88MumlXU+3iHzftJmQF/+W2x+P+WdeWf8Ekf+CuPxs/4JG/t3/wDDM37SWqaldeC49Vh8P30euXj3cnguV8fZrm1my+bKQPC+z/V+XIJE2c7/AOlqv5u/+D174G6L4S/ae+Dnj+xt44NY8aaHqGl6myJsFx/Z8tuYXb1fZebMn+CNKAP6KPHfjTTPhn4I1jxJrFxHY6PoFlNqN9cP9yCCFC7ufoik1/MT4w/aq/ai/wCDnn9ujU/hz8P/ABDdeCfhjaCe+h0n7bNaaTo2mJJsS51DycvdXL7405D/ADyEJsj3kf0K/sTeJIP2sP8Agm/8JdW8YWNn4mi+IXw50qfXbTUrVLu31P7Xp0BuknR8pIjl3Do4IcE181/Er9sD9gT/AIIgeOPEC2jfD34e+NPESQxavofhDTDd6lJ5G8xpNBah0tsb3/1nl7y/egD4Lvf+DHq6j8Os1t+0lBNqygOI5fA3l2shwMpvF+7jnd8+w/7lfG/wh/aH/aQ/4Npf+Ciem+AfGHiDUpfB9leW93rfh+2vJLrQ/EejTPh7qzSTGyQokmyQIkiPGUcY3of1B+KX/B6T+zn4ZimXwp4B+Lfim8i4jNza2Wm2kv8AwP7TJIP+/Nfi3/wWl/4Kg6l/wVv/AGnPDvxK1DwDH8Obax8Nx6FYWf2575ru3jurqYTmYwxhvnndPkTHyfWgD+yLRdUttf0i1vrWTzra8hS4hf8Avo/zIfyNfx6/Bn/goD8UP2QP2+vjRdfDubXNa8deM5tZ8G+HUSeS6ksL291SNUngh53ziNHSNAP9ZInXGyv65f2ev+SBeB/+xfsP/SZK/lq/4Im/DC1+Kv8Awco6Ba30K3FnpHjLxHrTr6Paw308D/8Af9ITQB21z/waZftla/4ct/H99q3gG48XanOmo3Omy+Krltetp3bezzTND5BmQneXF0/OOSa/Xn/gs3/wUlvP+CJn/BNzwlpvh3UbrxP8TtUtLbwp4cv9dd7yV2trZBc6nclz+/dE2H5yS808ZfeN4r9Ga8V/bB/Zd+B/7Q/gldQ+OXg34f8AiLw/4WhnnTUvFVrB5eiQvsMzpczcwK+xN5DoCI0/uCgD+e39hj/ghH+0V/wXh8DD45/GT416loOg6/NOdH1HW4Jtf1LVAj7GkitfPhjgtfMR0TEif6v5I9mw11/7Xv8AwZ2/E79nH4c6p40+EfxXs/iNqvh2GTURo7aLJoupSpGjuRbOk8yvNwNifu95PXtX6F+L/wDg5d/Yf/Yg8M6f4B8Ga3qniTR/B9oml2Vh4J0N57G3ihXYkUM0zQQOMD78buh/v188/GT/AIPVfBMrLY/Cf4FeNvE2o3jeVbHxDqMGmkOfufubb7UZP9wOlAEP/Bpp/wAFePH37SPjbxR8Bfit4m1XxhfaPo/9veFNV1GR7m9SGGRI7m1mmf55P9dC6F848uTn7gr6c/4O1v8AlC74x/7GHRf/AErSvyL/AODQ2X7R/wAFjGkESQeZ4N1d/KTpH88HyV+un/B2t/yhd8Y/9jDov/pWlAH4f/8ABLqy/ak/4KFfCm3/AGR/gjrz+FPh6upXPijxZqcUz2cIScQQf6bOnzvCPJUR2yD947uXD7AY/sb4x/8ABlv4/wDhx8LG174c/G3TfFnjrS4BdQaNc+H5NHjvJ0O/ZDefapNj9NhdEG/q6DlPrX/gzc+Bum+Af+CZniDxotlCmtfEDxhc+fejiSe2s444YIP9xJDdOP8Aru9frvQB/Pv/AMG5H/Bbr4oeBv2rYv2UP2itS1rUpbm6m0bw7qGvu76toOqQEp/Zly7/ADvG+x0TzMvHIET7j/u/1I/4L6/8obv2hP8AsVJf/Rkdfg//AMHP/h//AIZA/wCC6dv4+8LKtjqmq6bofjqMr8m29gkeEOP+2mnq/wDvk1+7X/Bee6S//wCCM/7QE0beZDN4RkdG9f3kdAH57/8ABkF/yQP4+f8AYwaT/wCk09fXH/Bwd/wWcb/gk7+z7pen+FI7O8+LnxCM8WgR3EfmW+k28ewTX0yfx7N6JGh4dznlI3Q/I/8AwZBf8kD+Pn/YwaT/AOk09fBP/B158X7j4jf8FqvFOi6rLM2l+ANE0bRIFhP7yOCS1j1B9m7jfvvXoA9I/Yk/4IIftKf8Fw9Fi+OHxq+LmpeG9B8SK8ukarr6Sa5qeqRFyN8Nr50aQWvy/J86dE2R+Xsetj9sv/g3Q/aM/wCCPHhm6+N/wH+Lmo+KLLwajX+q3WhwPoes6VZRrved4RLIlzAg3+YN/wBznyym/Z9LeFv+D0P4N+CfC2naLpPwF8c2Ol6PbJaWdtFqlmkdtCihI0T6IAKsat/wes/CXWLCa0u/gT45uLa5RoZYZdUs3SRG++rL6fWgD6g/4N4v+C01x/wVZ+A+raD44hsbX4u/D5YE1lrSLyrfXbST5YtQROkbmRXSVE+RH2MmxJBGn6TV/Jx/wa5fF1PAH/Bc/wAJaf4fE1n4f8eWWu6KYpnzILMWU99AjH+/5lnB+Nf1j0AFfhF/wcc/8F5fiJ4N/aA/4Zf/AGcdS1LTvFCy21h4j1/Rt51iS/n2eTpdg6cxv86b5I/3m9wi7Cj7/wB3a/lg/wCCB9rD+2n/AMHH0/j7xBjU5E1PxF44HmjKyTP53kv/AMAe5jdP+uaUAe4/AD/gzZ+KXx58Gr4u+Nnxsj8IeNdej+13Omro7+IryCaT/n6umuo0eT++E3j/AG68H+J/gj9qf/g1Z/av8Ptp3jJ/Enw18RXBvoIbaWZPD/iuGNwJ4Lm2fItrvy9vzpvePemyR6/qmrx79rj9hz4U/t1eEdI0P4s+C9P8baRoF/8A2rYWt5NNGsNyEkTf8jpu+R3Gx8pz04GADmH/AGjNB/a8/wCCZWqfE/wuzHQfG3w9vdWto5TmS38yyk3wP/txvvR/9tDX4d/8GSf/ACeh8Yv+xJh/9Loa/djx/wDAjwj+zT+wR408E+A/D+n+F/Ceh+FtXWx0qwj8uC3EkM8jhE9XeR393evwn/4Mk/8Ak9D4xf8AYkw/+l0NAH6r/wDBf/8A4Kv3P/BKf9jmHWfDVva3nxJ8bXb6T4Zjul8y3s2VN897InRxCmzCd3lj42B6/Hj9hj/ghH+0V/wXh8DD45/GT416loOg6/NOdH1HW4Jtf1LVAj7GkitfPhjgtfMR0TEif6v5I9mw1/Ql+2D+y78D/wBofwSuofHLwb8P/EXh/wALQzzpqXiq1g8vRIX2GZ0uZuYFfYm8h0BEaf3BXxP4v/4OXf2H/wBiDwzp/gHwZreqeJNH8H2iaXZWHgnQ3nsbeKFdiRQzTNBA4wPvxu6H+/QB+en7Xv8AwZ2/E79nH4c6p40+EfxXs/iNqvh2GTURo7aLJoupSpGjuRbOk8yvNwNifu95PXtXv/8Awaaf8FePH37SPjbxR8Bfit4m1XxhfaPo/wDb3hTVdRke5vUhhkSO5tZpn+eT/XQuhfOPLk5+4Km+Mn/B6r4JlZbH4T/Arxt4m1G8byrY+IdRg00hz9z9zbfajJ/uB0r4h/4NDZftH/BYxpBEkHmeDdXfyk6R/PB8lAH7Yf8ABwT/AMFR9U/4JZfsP/2/4Vt4JviD421D/hHPD7zLuj0t3ilkkvXTo/kog2J/z0kjyHQOK/In9gT/AINyvjF/wWP+Fdh8fvjF8cL7RbbxkWutNm1K0m8Ra1qdskzpvcyTxpAj4fZy/r5YBr9Vf+Dk/wD4Jl+KP+Ck/wCwpZ2Pw9hXUPH3w91ca9pums6J/a8PlOk9sm7jzNrq6epj2fx1+MX/AATr/wCDij4/f8EhvCy/Bnxh4Ht/FHhnwrcyRxeHvEEU2ja14f3u8j26TbDsQu7ttnhcpn5MJ8lAHd/8FDf+De342f8ABE74eyfHr4QfGjU9d0jwzcQvqV9pFrPoGtaJHJJ5aSbI55Emg3OqOd6n95/q9m/H63/8G6f/AAVI1/8A4Kf/ALFVxqXjp7WT4h+BdQGha7cWyJCNTXyUkgvzGv8Aq3kTcj4AQyQOU2A7B4p+z3/wdr/so/tR6O3hn4reH/Enw7/teMwXsGv6YmuaFcb/APlmzw73dP8ArpAiV+nvwG8Q+APGPwx0vXPhnceFb7wjq0PnWF34eEP2G4T1R4fkoA/HP/g6R/4LC/Ej4J/Ezw7+zT8G9S1Tw74h8SWNtqHiHWdLne31B1uZJI7bT7WTgxF9geR4/wC/Gm8fvAfLfhD/AMGV3irx/wCDI9b+KHx8h0Pxlq0TTXun6f4cfWEtbl/my95JdR+d/t/uxk9Hrov+DsT/AIJM/En4nfGjRf2k/hpomreJbGx0SHTPE9npMUk99pklq7yQXqxp+8aExuFd0/1fkBzwcjyn9jr/AIPOfib8LPDem6H8ZvhzpfxF+wp9ml13Sr7+x9SnHP7yaHy3geT/AK5+SOPrQB4v+1Z+z7+1B/wa5ftLeGdV8IfEqbWPA/iiR7jTri286PRdf8h086y1DT3d0jk2OnR3+STMc28OE/pd/Yo/ac0z9s/9k34e/FTSYvs9n460S21T7Lv3/Y5nT99Du7mOQOn/AACviT9mv/g4i/Yr/wCChutaLoviK8tfCfiIXAfT9O+IeiQpHHcOdn7m5zNbRuc4GZEd8jjrX6S6bZW2m6fFDaQww2saYjSJNiItAH4W/wDBx5/wX38ffDX45T/sy/s86hqWl+Jrd4LTxP4h0sF9UN7OEMen2DJzHJsdPMdP3m9wiGMo+/yz9nf/AIM2/il8evBaeLfjb8aIfBfjPW0N1NpselP4iu7eR/u/abp7mNHk/vom8f8ATSvzv/Yy/wCChuh/BX/grPD+0p8RvDup+N4YvEmreKJ9Os5USSe9uhcGF978fu5pkk/7ZrX7A/8AEbr8K/8AoiPxA/8ABvaf4UAfFnx8/Zt/a6/4NaPiroXjLwn40XxJ8MdcvY4BdWrSf2Dq8+Xkey1DT3c+TM8aSbHQ52bvLk3hwn9DH/BN/wDbm8M/8FHf2PvCXxW8LxyWtvrkDw6hYSOJJdJvojsntXP8Wx87Hx88ZR+N+K/DX/gqb/wdD/Cn/goz+wj48+EY+EPjDSb/AMTQwSafqF5qFpPHYXMNzHOknTd1TZ8nOyR/pXtP/BkJ8WLzU/hL8f8AwPNN/wAS/Q9X0bXLWI/89LyG6gnb8rKCgDhP+DgD/gpt8Zv2tP8AgolY/sYfAfV9S8OWcGqW2g6hPp+oNZz+I9Rukjd45pkw8dpAJCrp0cpI7bx5eNDwh/wZA3uo+Eo5PEX7R0Np4iuI1eSOx8FG6tLd8/ON73qPN1+9sjrzD/g42/4Jw/Gb9jT/AIKNXn7WPwvstYuvDeqalZ+JU1vS7c3MnhLVoEj3faU2HbC7w71dx5fzlH7b+5/ZW/4PW/EejWllpvxn+Een69s2pPrfhTUfsNww/vfY5w6O/wBJ40oA+a4vi5+09/wa7/t4aX4J1jxZP4n8AXAh1OTSVu5JdB8T6U8mx3hhk/49rpPLdMp8yOn/AC0jPz/1L+C/F+n/ABD8IaTr+k3C3mla1Zw6hZTr9yeCZA6OPqjA18M/sff8Frv2M/8Agp1490OzsdW0Oz+I3+p0vSPG+jQ2mrKX/wCWNtM++F3f/nnBO7n0r77ihWKJVVdqr0WgCSiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/4PnP8Am13/ALmv/wBwtfv9X4A/8Hzn/Nrv/c1/+4WgD5//AOCaf/K5J4j/AOyq/Ef/ANJtdr+n6v5gf+Caf/K5J4j/AOyq/Ef/ANJtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigDxn9v3wh428d/sR/FXS/hxqWsaT8QrrwtqH/COXOmXL212moJA72wjkXBVnkVFzx96vw2/4NY/+Cw3iWb9r7xd8MPjp8UPF/iS6+JlvZR+G7/xdrtzqH2fU7V5E+wo9y7+W9wk3HTe8Eacu6Cv6L6/Cn/guB/waw6x8cvilrXxc/Zrh0eDWNcdrzWfA9zNHYx3F2xzJPYTPiFDJne8MhRA29kk5EYAP3Wor+WjwJ+21/wVS/4J16avhe+0T4zX2j6QnloviPwa/iG0jRAPkTUHhkLxxpj/AFc+xB6Va8Rft2/8FVf+Cidq3hbRdE+LOnaTrKbJD4f8G/8ACO2+xzgb9R8lGRPv/wDLdQfmoA4X/gv1quhftO/8HAWraP8ACXydS1y81XRPDc1zpiCQXmuJ5MD7Nn35I32Qv/00gev6x6/G3/ggX/wbXX37DHxB0/40fHObS9S+JlnDv0Hw/YkXFr4XkdCjzzzD5J7rYxQbMxx5dw8j7Hj/AGSoA8F/4KSfsdWf7ev7DHxK+E9wtu1x4r0eZNMmnI8u01BP31nMfZLhIT2+XNfiV/waHftvxfsz+J/2gPhD4+uZdE0vR9Jm8dmK5/d/2dLpw8nU/l/v+R5DY9LR6/ovr+Zf/gun/wAEdfj58Ov+Co/xD8afAP4d+PPEnhP4qafNqlzc+GtOkukt31CGSDU7KYoP+Wj+dJs/uXSCgCb/AIN3PhXrH/BVb/guH46/aP8AGFsZ9N8GXt14xnST54o9SvXkj0+2H/XBPMdP+vFK/pir88/+Dan/AIJ3ar/wT8/4Jx6db+MNFuND+InxB1GbxD4htLqHy7qwGfJtbZ+/yQRo+w/ceeSv0MoA+D/+Dmb/AJQefHj/AK9dJ/8AT1YV8Y/8GRn/ACav8bv+xrsv/SSvu7/g4C+Dnir9oD/gkN8ZPB/gjw/q3izxRrFvpqWOl6bbPPd3ZTVLKR9iJy+I0d/+AV8q/wDBo5+yP8Uf2Rf2d/i7pvxO8A+KPAV/q/iOzurG21vT5LSS6jS12O6B+vPFAHgn/B6r+yB5mn/CX4+aZbss1q8ngrWpkxnZ+8u7FvwP2wf8DSvEv+C5X/BUPUv+CiP/AAT2/Y1+G/h24k1jxh8SLGHxF4ls4T+8uNVhd9IhQf8AXS9TUDs/2I6/cL/gsR+x5/w3d/wTd+Kvw3trUXWtaho0l7oMYH7xtTtP9KtUT/fkiSM4/gkevwl/4N9v+CJ3xpg/4KgeA/FXxe+FXjbwZ4N+Gsc3iRJvEGkSWsFzew/8ecEbOPvieZJ8f9MHoA/oi/Yf/Zm079jX9kb4c/C3S/JktvA+hWumSTIMfa5kT9/N/wBtJi7/APA6/mr/AGQv+Vu7VP8Asuni3/0q1Kv6rK/m/wD2Zf8AgnL8efDX/BzlqXxM1H4Q+PrH4eyfGHxLrC+JZdFmXTWs5rm+eCbzsbPLkDph/wDboA/pAoor+cH/AIIP/Aj9ubwT/wAFW/hTqXxh0v8AaOtPhzb/ANrHV38UXOqPpK7tIvUh84TOY+Z/JCb/AOPZjnFAH9H1FFFAHkf7af7YXhT9gv8AZ0174qeOo9ak8K+GfJF6dLsxdTx+dNHBH8m4f8tJEXOf468e/YX/AG/vgX/wXB/Z/wDFN7ovhWbxB4W0LWTpGpaF410azm81hCjpP9m8yeMwurvsL4clJPk9ff8A9ov4B+HP2ovgh4s+Hfi61e/8M+NNMm0jUIUk2SeTMhQuj4Ox0+8jdnFfzi6r/wAE2/27v+Dfr9p/VvFXwG0jX/iL4R1d2tkv/DejPrdvrVkkhkSPUtPRXmhdR/GB8hZwk/JoA/V79qX/AINb/wBkP9pa1uLjTfA978MdauOVv/CN+9rGh/69ZN9qF74SNK/Ef/gnRpPjP/glD/wcS+HPhd4f8TS639h8fQeAdYls8xwa7pl1PHA5mi3/APLNJEm2Hd5ckI67K+ote/4OGP8AgpJ8bdLuvCfhT9na10XxJcR/ZnvtG+G+tXF/p0j5KOiXM08CHZ/z2jcff6V7V/wQD/4N6viN8Kf2lo/2l/2mGkj8bRyz6lonh+8u/t2of2hPv8zUdQk+dPMw7OiB3fe+99jpsoAsf8HsPwK1DxV+zF8GfiHbxPNY+CfEOoaRe7Iy/kjUYYJEdj/Cm+w2fV0+lewf8G23g34Aftof8ErvA8mpfDH4U67428DtP4a8SSXfheynvPOjneSB3d497+ZavC+8n7+/+5X6Gftd/sseEP22f2cvFnwt8eWf27w14usxbXCqdskDh1eGeJudskciI6H++gr+dPVv+CbH7eH/AAb7/tHap4r+Btn4k+IHhG6fY2o+GtKfVbHXLVC5RNT0xN8kboHb58Hy977JuSaAP6Hv+HfHwD/6If8ACH/wjdO/+M1+f/x1/wCClX/BP/4G/tyXvwA1H4FeH9W8Z6bqtrohm0f4caTeWM+oT7Atqjg+YZEd0Rxs4k+XnD18NfED/gvL/wAFK/2n/A994N8H/AvUvDmrSp9kvdW8KfDnWH1S2b7rhDNJPHDnjL+XvQ5dHTjb77/wQP8A+Dbnxd8Cfjjp/wC0B+0cIl8Y6XMNS8O+Gzefbbi2vWzm9v5l+QzJvyiI7/Od7kOmygD9FP8AguZEsX/BH39oZFXasfgq9+X/AIBX5c/8GSv7Rujx6Z8avhHeXFvDrVxPZeK9NhZv3l3Dse1uv+/ZFr/3/r9Sv+C6P/KIH9on/sSr3/0Cv5l/+CVP7An7Qfx58D+Mvjh+zTrl5b/Eb4KatZMmm6fN9nvruG5hnLvbu58uY4iKPbP/AK5JCPn/ANW4B/YxX80v/B5J8e7P43ft7fDH4T+HZBq+seANFdL23tjvkjv9UmjdLXH/AD08iG2f6TpW54g/4L7f8FMvFvh+4+H9n8CbzTfGXltBJqNh8MNXOtRHOzzPJd3gR93/AEw2Z7V7P/wQu/4N0/iN4f8A2lbT9pP9qhrh/F1nqDa1pHhzU7tNR1C81B/n/tHUJt8g3pI3mRx7zJ5gDvs2bHAPuf8Abo+KXiL/AIJG/wDBBe6u/DciHxZ8KPAOh+FLC6VBJHDef6FpSXPPyfu3k8z5/wC5X4x/8G4P/BHrwP8A8FbfiN8SviV8btU13xFo3hS8gWfS47+SO48Sahd+dNNNdXKP5+xQgPyFXkeX/WDYQ/8ARt+2l+yxoX7bX7K/jj4UeJJprfSfG+mPp73EQ3SWj53wzJnq8ciI/wDwCv5v/gt8C/2+P+Dcv9oTxBN4J+HOreNPDeuKq6hLpuhXWv8AhnxBBGzeRM7W2JLWZC77PM8mQb3GHQ8gH9AHwc/4I+fsufAJIW8LfAL4X211b/6u7vNCh1C8T/dnuRJIP++6/nz/AODwD4t+H/iD/wAFSdH8P6BcW0x+Hvgaw0LUEt9uy0ujc3d15OF6bILmDjtmvqS2/wCCzn/BSz/goFo0fhX4S/s7L4DutWBhfxOnhu9gjsz9x3S51B/ssOMv1DuMcc18vf8ABRj/AINmPj/+zz8MvAPi+zHiv49fE7x3fahceOY9Cs5tRGj3H7mSB/NP7+48zdc+ZM4QFwg92AP6Z/2a79NT/Zz8A3Ef3Z/Dmnun420dfzNf8G+X/Ky03/YV8X/+iL2v6Jf+CafiDxL4h/YE+EM3jDwvrvg/xRaeFrLT9Y0jV7V7e8tLq1iFtNvR/nw7wl0zyUdK/EP/AIIk/wDBOz47fBX/AIL5SePPF/wj8eeG/Bf9o+JpP7b1HR5rexKTQXXkfvGGz596bOe9AH9Glfzzf8HnX7bfi7Rvib8Pv2fdOvr7T/Bt1oKeL9agi/dprEz3c8FrG5/jjg+yu+z7heTPJRNn9DNflN/wcp/8ERPE3/BT/wAFeGfH3wu+y3PxO8B28mnjSbq4S0TxDYO5fy0mc+Wk0cmSm8ojCSTL8JQBkf8ABJ//AINmf2b/AAV+yr4E8XfE/wAJxfFDx94o0Sy1q+m1S8m/s2we5gSb7NBbQyeS6JvCeZJ5jPjeNgfYPve1+CXwH/4J2/CfxH440L4d/Dv4a6N4T0efUNT1DRtBs9OkFtCju+540R3zg9T8xPfNfhB+yP8A8Fk/+CgX/BN/4XaH8KPEv7PfiTxdp/h+AaZof/CSeDdXgv4IYx5cUCTxbEmhTCBPkc7OA+MV6lq/wS/4KHf8HC95aeH/AIraTH+zr8CZruObULN9Ln0v7YifOh+yTv8Aa71wQHQSOkG/5+MCgD5Z/wCDRTXDc/8ABZKGWZf3moeEtY+7/e/cyf8Astfr1/wdrf8AKF3xj/2MOi/+laV+dH/BKb/gmZ8cv+CYP/BejTZP+FVfEjXvhPpmvan4Yg8WLo8z2dxpl0kkNrfPMieXs+aB5P4E+f0r9Rf+Dl34D+Nv2lf+CUvinwn8P/Cuu+MPE15relTQabpFq91dSJHdI7sET+4KAPA/+DNf4/aP48/4JxeJvh8t5CfEPw/8V3E9xabv3gs72OOSCb/deRLlP+2VfsBX8vH7J3/BJX9tz/gnp8FPBv7SnwP0XxhpfxEje/0vxZ4Iu9NxqQtkm/dt9jf/AI/bWZEQmMDzEkjR0znMfd/F3/guR/wUs/az0K6+Gvhj4J6t4H16/i+xXt34U8A6vDq0Zfr+8upJ/svyEfvMI6cv5ifwAHjn/BcPxHB/wU4/4OFh4C8GzSa1bnWNJ+HUUkJ4V4ZNl7t/2I55bn5/+mbnpX73/wDBfX/lDd+0J/2Kkv8A6Mjr42/4N3v+DejVv2E/Fx+OPxwjtbj4sXMDw6Lokc6XSeF0mT99NPMMrJeyIzx/uyUjR3+d3k/d/cP/AAWk+GHiP4z/APBLH42eFfCei6l4i8Sa54bkttP0zT4Xmu7yTenyIicuetAH5s/8GQX/ACQP4+f9jBpP/pNPXxf/AMHUXw4v/wBnX/gt3/wsK4021vrDxhp+h+J7SO6g8y1vDZxpZPA+/wCR+bJN6f3JEz1r9Fv+DRb9kL4pfshfBn4zWfxQ8B+KvAN7rmtadNYQa5p8llJeIkE4d03/AH8Eivqz/gtp/wAEfvD3/BXX9m620OS+h8O/ELwe8174V12ZC8du7p+8tZh/zwn2Rh8fOhSN/n2bHAPSPgR+zD+zH+0X8H/DPjrwj8H/AIO6p4Z8XabDqunXKeDtP+eCZRImf3PyP/fTqj5B5Fb3ib9h79nPwZ4e1DWtW+DvwY0/S9JtZLy9upvCOnJHbwxoXd2byfuImT9BX8+f7P8A8Qv+Ckn/AAQNOo+CLD4aeI/E3gOCeSeCxu9BufEvhqMly7z2tzZvug3/ADuY/Mj673jD9NL9pD9sv/gpR/wWz8Nw/DOx+EHiLwf4J8SOIL6DQvCt7oek6im/en2zUr13AT/YEyRvx8jnFAH6af8ABLL/AIKjfsXftyftQ2vhn4LfBf8A4Rfx9Y2t3qEN63w/07T2srZN0ck32q2dzGjb0TqM/aETHL4/Tyvzr/4IG/8ABD2y/wCCS/wt1LVfE19Y698X/F8KW+s6hZu72OmWytvSzti4Qsm/53coN77OyCv0UoAK/lX/AOCNeuQ/8E2/+DkNvBHi6RdNtf8AhI9c8AzTyjy0LzNIlm4/2Jp0ttn/AF2Ff1UV+O//AAcQ/wDBu9q/7eXi4/HH4Hrp8PxYht44Na0SedbRPFCwoEgmhmchI7pERI/3hRHjRPnQx/OAfsRXxT/wW5/4K22v/BIX9mzw/wCMI/Dtj4y17xRr6aRZaHPqP2HzoRDJJPPv2P8ALHsjQ/J1nSvyE+Ef/BbD/gpd+xH4btPh94s+C2u+OL7TR9gsrvxh4F1e61KTsg+020kP2r/fO9367zVX4f8A/BK79tr/AIL7ftX6V4+/aas9f+HPgTRZY7eT+2tObR5bKz373ttM02RN/mPn/XzJs4+d5CgjoA/Y/wDZl/bi1D/go9/wSF8RfGG98Dt8Pl8VeGdf+yaS2q/2k3lQx3MHned5MP32jY/cr8f/APgyT/5PQ+MX/Ykw/wDpdDX70+NPgZpXwn/Yj134c+BdF+x6Tovg260XQtLs1MmxUs3jihQZ3u5492z71+OX/BpL+wX8Z/2Sv2rPinq3xP8Ahf428B6bqvhSG0s7nW9KltI7if7ZG5RGfHz7MnFAHLf8HnX7bfi7Rvib8Pv2fdOvr7T/AAbdaCni/WoIv3aaxM93PBaxuf444Psrvs+4XkzyUTZ9U/8ABJ//AINmf2b/AAV+yr4E8XfE/wAJxfFDx94o0Sy1q+m1S8m/s2we5gSb7NBbQyeS6JvCeZJ5jPjeNgfYNf8A4OU/+CInib/gp/4K8M+Pvhd9lufid4Dt5NPGk3VwloniGwdy/lpM58tJo5MlN5RGEkmX4SvgD9kf/gsn/wAFAv8Agm/8LtD+FHiX9nvxJ4u0/wAPwDTND/4STwbq8F/BDGPLigSeLYk0KYQJ8jnZwHxigD937X4JfAf/AIJ2/CfxH440L4d/Dv4a6N4T0efUNT1DRtBs9OkFtCju+540R3zg9T8xPfNfzmf8GimuG5/4LJQyzL+81DwlrH3f737mT/2WvqbV/gl/wUO/4OF7y08P/FbSY/2dfgTNdxzahZvpc+l/bET50P2Sd/td64IDoJHSDf8APxgV55/wSm/4JmfHL/gmD/wXo02T/hVXxI174T6Zr2p+GIPFi6PM9ncaZdJJDa3zzInl7PmgeT+BPn9KAP2R/wCCnH/BY74T/wDBJiw8MTfE+x8a3jeLo7p9LTQdLju/tDW3liZN8k0aI/76Ph3/AIvrXVaT8PfgP/wVl/Zj8E+Pte+HvhTx94T8Z6LDqulr4j0W2uL/AE6OZN/l7/n8idGLo/lycPvAc9+N/wCCzf8AwSx0n/grD+yHe+BZb6PRfF2hz/2x4V1eZSYrS/WOSPy5tvzmGRH2Pjp+7fDmMIfxF/Zv+IP/AAUo/wCCCN3qngHSfhP4g8XeBLeaS4hs7rw1d+JvDqPJ/wAt7a8sWR4S+N/kmZOT88e/NAH3D/wU1/4NQv2fNZ+B3jjxp8Jf7c+Fvijw/pN1qttZnU3vtCu3gjed0mW53yQh9mN6TBE/uEfIfmr/AIMp/wBpHxYv7Q/xU+E73l5deCbnw3/wlcds7l4LC/hure1LIM4R5o7n58ff8hP7lcn8ev8AgpH/AMFKP+CwPw5vPhX4e+CereEvDPilJLTU5fDfhS/0tNQtWfy3t7nUL+Z40j6o+x4d48xHzHvSv1G/4ICf8ET1/wCCTPwa1zUvFN7p2tfFj4gGFtbmsvmtdJtoS5jsoHYb3H7wvI+AJH2do0egDov2jP8Ag4P+A/7K/wC3FZfs/wDi618eQ+NrjWtP0S5uotJhXTNPN6I3huJZnnQ+Tsmjd3jRyBn5OK9q/aj/AOCUP7N/7aEtzcfEj4N+CfEWpXv+u1RbL7Dqcn1vLYxz/wDkSvhH/g40/wCCAOuf8FJNQ0v4t/B8WL/FbQ7NdK1TR726S1t/EdnHveDy5G+RLqMvsHmFEkR+XTy0D/Evwe/4LO/8FLP+CfPw6t/A/jL4H694ysdAUafaan4z8B6tJdRon7uNEvLZ4UuU+587+Y7/AN85zQA3/g4Y/wCDdn4Y/wDBOj9nOP4zfCXXtesdJXWYdM1Hw5rVyl0qCcSbGtJtgk+TZgpJvODv3/Jg/o//AMGof7R/iv8AaH/4JNafH4uvr7VJvAXiO78J6Xd3jl5JLCCC1mhTeT86xm5eFPRIUT+CvzC+Mnww/wCCi3/Bxb498OaN458A3Xw6+H+j3hmSPUNEuvDnhvS59mx7vZcl7q6k2ZQcz+WXk2CMPJn9+v8AgnZ+w34W/wCCcv7IvhH4S+E3lurPw5E73V/cR7LjVLyZ/MnuXA/vyMcLzsQImcIKAP5lP+CVeieDv2Gf+C9sHw1+Mmh+HdS8K2/iTVfAepR+INLgubOOR/Nhsp9kybFR51tv3h/gkLdK/qB/4d8fAP8A6If8If8AwjdO/wDjNfmr/wAHD/8Awbpal+394uk+NnwV/s+D4pfZY7fXdBupktY/FCQpshmjmchI7pERY8SYSRET50KfP8ZfBv8A4LIf8FKv+Cevhaz8B+MvhDr/AIyXR0+wWVz428E6peXnA+RUvbV4/tQyv33eQuM/P/dAP2H/AG+dN/ZJ/wCCb/7M2r/FT4jfBX4ZtoOkzw2qWmm+CdLmvtQnmkCJDAkiRo7/AH3ILqAiO3Y1P/wSG/bG/Z2/bb8BeLPFX7PXw7i8D6Vp97DpmrzR+FLbQ/tsyI8kceYOJvLSQPjPyeen9+vxH+If7Ln/AAUP/wCDh/40aX/ws3wjrHw/8H6NJ59ode0W58NeHdBR+He2gnBnupCP4/3zjhN6JX9A/wDwTo/YL8G/8E3v2UfDvwr8Fi4ksNI33F7fXPFxq99J/rrqQdMvjhf4ERE/goA8H+H/APwcJfAf4k/8FBLX9m+1s/HVj4+k1u98OyXN/pMNrptvf2vm74XkefzMvJC6R7I33uU9a9D/AGn/APgib+yv+1ubq48afBPwZJqlzlpNT0a2bR7+V/77zWflvI3/AF0L1+cP/BwR/wAG8HxC+N37R037Rn7N0b3Xi+8ePUPEGgQ3wsb9r2DZ5eoae7FE8zCIXTej74w6b3civEvB3/BwH/wUo/Z+8OWHhvxh+z3deIdWhRbWHUfE3w11u01K9c/c3i2kgjk/4BGu/wDOgDwz/g4j/wCCG3gz/gkjdeBfGHw18V61d+GvG19c2Y0vWZkfUNJuYFSQPDKiJ5kJ345TemxPnffx++P/AAQx/aB8UftUf8En/gt428aXFxfeJdQ0eazvbu6Bknv/ALJdz2STu5PzvJHAju/8ZfNfii3/AAT2/bs/4OJf2lvD/ij47aTqPwx+H+kjZBc6vpEmj2Oh2byB50sNPkxPNO//AD0kz5myPfPsSPH9Fn7OvwH8Ofst/A/wn8O/CVm1j4b8GaXBpGnRPzIY4U2b34G+R+Wd+ru5J60Ad9RRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8AB85/za7/ANzX/wC4Wv3+r8Af+D5z/m13/ua//cLQB8//APBNP/lck8R/9lV+I/8A6Ta7X9P1fzA/8E0/+VyTxH/2VX4j/wDpNrtf0/UAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQB8n/8ABdH/AJRA/tE/9iVe/wDoFfmj/wAGO/8AySz9or/sKaF/6Jvq/aL9pD9n7w5+1X8CPFXw58XQ3c/hnxlp0mmailrOYJ5IXxv2P/Ca8h/4Jz/8EofhF/wS10TxTYfCez16zt/GU9tPqP8AaWpve73gWRYyhP3P9a+aAPpqiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAr8Af+D5z/m13/ua/wD3C1+/1fgD/wAHzn/Nrv8A3Nf/ALhaAPn/AP4Jp/8AK5J4j/7Kr8R//SbXa/p+r+YH/gmn/wArkniP/sqvxH/9Jtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/g+c/5td/7mv8A9wtfv9X4A/8AB85/za7/ANzX/wC4WgD5/wD+Caf/ACuSeI/+yq/Ef/0m12v6fq/mB/4Jp/8AK5J4j/7Kr8R//SbXa/p+oAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/4PnP+bXf+5r/APcLX7/V+AP/AAfOf82u/wDc1/8AuFoA+f8A/gmn/wArkniP/sqvxH/9Jtdr+n6v5gf+Caf/ACuSeI/+yq/Ef/0m12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAr8Af+D5z/m13/ua/wD3C1+/1fgD/wAHzn/Nrv8A3Nf/ALhaAPn/AP4Jp/8AK5J4j/7Kr8R//SbXa/p+r+YH/gmn/wArkniP/sqvxH/9Jtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/g+c/5td/7mv8A9wtfv9X4A/8AB85/za7/ANzX/wC4WgD5/wD+Caf/ACuSeI/+yq/Ef/0m12v6fq/mB/4Jp/8AK5J4j/7Kr8R//SbXa/p+oAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvnX/AIKR/t86Z/wTu+BOn+KpdBvPFniLxVr1n4U8KeH7aaO1bW9WvC3kQGaT5IUwkjs78AIfavoqvjX/AILc/wDBNXUv+Cnf7G6+FfDOtL4b+IHhHWYPFPhTUXd40TUIEeNInkT540dJn+dP9W4jfnZggGf8SviX+298EPh1ceNLjwn8B/iouk2P23UPBXhb+19L1k7fvxWV7O88d1IOeHgg3hPkG8gV7h/wT2/aT1L9sb9ir4a/FLVtMs9F1Lx5okGrzWNpI8kFqXH3EL8/nX5a/sif8HHnxG/Yl+JNn8E/29vAOueDfElkiQQ+N7ey3R3aD5POuYY/3c0eR/x82O9Cf+Wf33r9Bv2n7r/hVX/BLrxR4x+AnjqbwppngjwVqXi7wvfaTDZapY6nDDZz3sMcn2qGffbPxzGUkx0f1APq6ivhL9hP9on4oftl/wDBE3wj8VNS+IN74d+JuqeHtS1ebXdM0nT2jee1uLuONXtpoXg8s+Um8IiPxw6V8jwf8FaP2mdT/wCDbpf2oNP8ZeD/APhPNL1Ce11a6vvDUcs08MmtJp0BtlR44IZIxMj/ALyGYPs96AP2lor8mf2nPH3/AAUK1T9iTS/2hvh/8QvAvh2x0Pwra+IrjwEPDdrqOpa7YpapPPd3NyUdPtUnzzfZrTYiJhEd3Hzn7Sv/AAWT+KHxP/4N/Lf9q/4QXvhHwfr2nwW9p4lsNR0mTUJLe9bUYdOm+xs02xNk0vnJ58cwMboD89AH6zUV+TP7Tnj7/goVqn7Eml/tDfD/AOIXgXw7Y6H4VtfEVx4CHhu11HUtdsUtUnnu7m5KOn2qT55vs1psREwiO7j5/RNQ/wCDgHw/pP8AwRH0f9qqTw6lx4o1wr4dtfDKSMkdz4i8ySB4Efl/I/cyT9d/kJ/foA/SKivgP9q7w7+1l+zF+x5q3xf0T41Hxl8Q/Bel/wDCSeIfA974Y0lPCmoRQp5l7a2bQ2yajHsjWXy3e7meTywMAv8AJwf7W/8AwVr+IXxY/wCCGP8Aw1Z8AdS8NeEbpdMSfVrHWtHfVrqym+3R2U8NtJ5iRo8EhmffPBMkiBP3ab80AfpxXG6D8YfD3if4v+JvA1nfRzeIvCdjp2qanbJ/y6w3r3SW25h/G/2OY7TzsCHo4r4U/Y80n9tz9o3TfgH8Urz4weA/Dvww8UeCrO48SeGZfDsM2rStNp/7vUxcCN0e6nmdLnyQ8MMAcRlJ/LfzPn7/AIIX/Cfx9f8A/BTP9uCGT4zeNrqbwt8QtMtdXvLvT9PupvFkcE+ooiXLvbfuf3cezZa+RsSTCY2JsAP2eoqOUeZGy7in+16V+Ynhz9qT49fAT/gvTon7P/xO+MOoap8KfG/hufxD4HuH8PaRa3OtXKY3afdTx2ycp5N1/qAjviD7vmUAfp/RXyl8add+JPjb/gpF4B8D+CfiRq3hvwtpfhq58U+PtMh0zS7tI7bz0t9Mihea2eaGS7nS83ne6+XYybNj/PXzTo3/AAVH0/8AbbsvHWvaP+1XoX7Omi6bq95oXg60tNO0jUr3UEtpPIOp6h/aEM3yTyI5jtYBA8cflu8j7+AD9Q6K/LH/AIJ3f8F3tW8Uf8E+Pjv49+OmnWtz4m/Z0lNvqWp6JbNaWHjKGRnjsp7bIwsk88bR/IPL+eN/kD7E9C+L2j/tmeKf2FIfjR4B+JkzfGC90+z8SWXwwtPD+kP4akgm8uR9L86eH7dJMkDt++F2nmPHhETfQB9sfHL4x+H/ANnr4SeJPHHirUE07w74VsJtV1C5b+CGNS52LzvfjCp1d+Bziu0r8U/+DlrWvih8Tf8AgkZ8IvHHivUPEfw11HW9Z0ODxP8AD6EwSWC388U8zmabYZpDA6Jsh37ARl0d0BT7e/b1/bQ1H/gkD+wb4m8f+MPFetfF/wATyahDpnhi31SzstOkvtQuU2QWv+hwwxmNPLmnc7PM2I4/uUAfZ1FfAf7V3h39rL9mL9jzVvi/onxqPjL4h+C9L/4STxD4HvfDGkp4U1CKFPMvbWzaG2TUY9kay+W73czyeWBgF/k4P9rf/grX8Qvix/wQx/4as+AOpeGvCN0umJPq1jrWjvq11ZTfbo7KeG2k8xI0eCQzPvngmSRAn7tN+aAP04or88f2HU/bS+P9z+zv8WtZ+KPw90/4R+IPCOn3vivwbc6GkuragJNPRkvftcaZa5nkZJyiPBHB5mzZP5Z34v7GX7XfxA/4Kd+GPipqvhD4/v8ADT4peE/EWraRYfDiPStFuLTQo7WZ4bV9SgubWTUZ1nGx5JoZ4U3740CGN94B+lFFeb/sm3vizU/2XfhvcePo7iPx7N4W01/EiToqSJqf2WM3W5E+RW8/fwPp2ri/+Cj2v+MfAv7EvxK8ZeBfGmoeDfE3gTwxqfiWymtbKyvYbt7SzmnSCeO5hkBhcoM+X5b/AO3QB75RXwl+wn+0T8UP2y/+CJvhH4qal8Qb3w78TdU8Palq82u6ZpOntG89rcXccavbTQvB5Z8pN4REfjh0r5X8Pf8ABU79pjxb/wAG3z/tMaZ428HQ/ELR5r86vc33hhJ5LmH+1xZQ/ZkjdIIJI0ff+8gnSTZg7OXoA/ZWvH/25h8YD+y54o/4UKfC6/FTbB/Yn/CR7/7P/wCPiPzt+Od3keds7b9meK/PX45/GH9thP8AglR4b/aY8PfGbwf4NPhf4e6b4ou/CB8I2uqS+K4RZQz3N1e6hN/qZ5kLzeRawIkf3PMc/vK7D/gol/wUx+Munf8ABCnQf2qPhFqnhbwTqF/4e0bVNWstQ0RtUurd7+6tbVxZzPN5CeXNN/y3gm3p/cNAH6MfD3+3/wDhANC/4Sn+z/8AhKP7Pg/tf+zt/wBj+17F87yd/wA/l+Zv2b+cYzXRV8e/Hv8A4KLp+xh/wS98EfFzxJbzeLPGHiDQtEtNI0tpFt38Sa7f20fkwb9uyPe+93b+BEfHofOv267H9qb9lT9hzxf8aLP9oKG88c+BtGm8Q6t4V/4Q3TD4QmjjQyT2sH7n+0U8tS/lzSXb7/LTeg3mgD9B6K/Kj9oX9tz44+Dv+Ddnwn+07oXxO1DTfiVp/hPSdW1HfomlzWGtTXuoWsEjzQyWvybEmcp5Dxp/sPxXtf7YH7RXxO0L/gixZ/HDwv4+vfC3j/Q/hzY+L7maHR9Pu7XV7l7KCadJoZ4X2Jl3/wBSU2b/AOPGygD7sor839Q/4K2at8P/APgnF+yvrmveIvDem/Fb9ozTtPgOv61aGLSdGBtRdajqk0MexHECY2Q703yTx/wB68R/ap/4K6+Lv2C/2gvhd4i8J/tDWH7UHwu8U6omkeNPDcujaYmreHE+T/iYWc2mWsGY+ZPkn3/6tE3v5nmIAfQn/BQ//goF8bP2XP8Agpj+y78L9Mt/Aun/AA3+N3iOXT5rxY57vW5UtXtfPR9+yCFJPtSfcR34PzpX6FV+V/8AwXH/AOUwP/BNb/sdtd/9D0etb/gt9+0n+0J+wJ8U/hd8RfDPxXudD+AXinxXYeHfG8I8NaZd3fhCCaSPfdW08ls/7t4kn5n8zZJs+/5iIgB+nVFfJ/8AwVF+KXjr4Zfs0+F7P4WeN7zRPih428VaP4V8KXQsbK9h1O5u5/3zXKSwunkx2iXV0/khD/ovXZlD9JeBdA1Hwr4Q0/T9T1u+8T6hawLHcapeQQQT3zj+N0hRI0z6IgFAG9RXzp/wU5/boh/4J8/ssXHjSLR4/EXibVtUs/DnhXR5Jfs8eq6vePsgjd/4EHzu567I3714J+2v4b/bS/Zo+EXhn4hfCv4gah8dvGOn6vbf8JX8P5fDekWGi6hZvu8/+z2SFL6Hy5NgTzLqd9jlzv2UAV/+Cpv/AAUF+N37Iv7df7LvgPw3a+A7P4c/Gjx3Z+H728KT3WtGJLqySZMPsghR0unX5BI/yZ3pX6HV+SX/AAcH67rMX7YX/BO/UbHR47jxAfiek8GkT3qwRyXPn6XiB59j7Pn+TeEet39tH9qn9qv/AIJj/tW/AbxR48+K3g/4jfC34xeNYfCOueFLHwbDpUHhd7lx5b2d1ve6n2Rs/wA00gz5PKfvMIAfqhRXxx8WvGH7QXxo/wCCkjfC3w+3in4S/BHw74PTxDeeP9K0iyvbvxFqck6Rpp0M1/BPawoiF3dfIeTEZ5TzENeYf8Euv2/PiR8cv2uP2nvgH4g8aeH/AIiX3wfubaXwp46GnQf6fDdRybEvYbLyYJHgcIH8jyd5877mBQB+i1Ffjj+yv+1D+3J+3J+0B+158K/DvxU+HvhnxF8J/EdrpOl+Jz4Wi/s3S/LmvY/ItrKTz3D3fl75Jrh7ryEg2JvMm9PqD4keM/2tIfiT+zb8GEmW2m8TaNdXfxW+LPhvQobm10y4tLbPkWaXULWsDzzbEEk8D/6zKQfI6UAfd1FfnL+zH+3b8QvA/wDwWt8X/sr+JviDH8YPCUng7/hJ9I126ttPtda0S6jcJNY3X9nwwQSfx/8ALFHT93+Pong39pXxx/wUD/a2+L3gH4b+N9R+FvgD4FahD4e1fxDpGmWV9rWv646O88EJ1CCe2gtbXGx8wPJJI42OiJ+8APtauM+HHxi0D4qa74vsdDvlvpvA+uf8I/q20/Jb3otbW5eINjDbY7qLODw+9ONpr5K/Y3uf2sPGes/Hj4S/GS+vtGsfD14kXgD4uadp2nw32t2c7ybHa22G1N0kapk+QiI8n3D8mfmH/g1C8B+Mte/Ze8c+Jpvih4pn022+KuuR6potxaWd1FrdybGy8y6nuZIDdeY8jo/yTY/djrvegD9hqK/LzRv+Co+n/tt2XjrXtH/ar0L9nTRdN1e80LwdaWmnaRqV7qCW0nkHU9Q/tCGb5J5Ecx2sAgeOPy3eR9/HOfsOf8Fa/jz+1r/wTd+P2pG48CaT8Z/2e0vJbrX7jw/PdaL4qs4bS6mhuYIUmg8uSR7aQeYBJGPkfyMSbEAP1mrwfxN/wv7/AIb58N/2V/wgf/DOf/CNT/215/mf8JD/AGzvk2eVj935ePI6/wDTfPOyvg3/AIJx/E79u7/gob+xv8A/i1o3xc+HvhvSZtWuf+EntNY8PwzXfjOyj1SZJ5i8EOy22RoYIYIUhf8Ac73n/efJ6TrP7Zfx28I/8HDvgX4A+IfFnhu8+F2veB73xbbadpHh/wCxTSL/AKbDCl1NNNPI8iPbMd8bxo+R+7FAH6Q0V8weNPhP8evH/wC0Z461OH41Xfwx+GGkmyg0bTrHw1pV3cXn+hpJdXT3V1HJsTz5CgGz/lg/tXyf/wAEs/2vfj9+3/8At8+ONS8L/FjUfE37KPwxun0ka7qnhvS4LzxvqmzJjtngto9lqm4Sbx8+zyf+e/7sA/U+ivy80b/gqPp/7bdl4617R/2q9C/Z00XTdXvNC8HWlpp2kale6gltJ5B1PUP7Qhm+SeRHMdrAIHjj8t3kffx6L/wQd/4KbeOP+Cgnwj8eaL8TLPTW8f8Awq1pNJvta0m1eDTPEdtJvEN7Cv8At+TNnZ8hARwE37EAPv6iivz9/Y6/ao+If/BYHxB8UvF3gH4max8I/g/4D8UT+DfDM3hvTNLvtX8T3FtDG8+oXL6ja3UCWr+dD5KQIj/f3v2oA/QKivzz/YE/4KK/E7xH+2h8YP2TPjBL4dufjD8ObJ9X8MeLoLDybHxRpj7PJnns0f5J0S5gkdI5E373T5PL8x/D/wBiL9r/APbW/b6+Kv7Tnw30vx98OvC2rfC3x9N4c/4TBfDCTWGjJayXUHk2Gnu7yTSTyQh993O6QRp/y0eT5AD9fK43QfjD4e8T/F/xN4Gs76ObxF4TsdO1TU7ZP+XWG9e6S23MP43+xzHaedgQ9HFfnP8A8Esv+CiHx/u/24fi3+x7+0trWlXnxW8M6U+reGPGekaZBbx6nbFY/n8lESF/knhmj/cp9ydH5TFeV/8ABC/4T+Pr/wD4KZ/twQyfGbxtdTeFviFplrq95d6fp91N4sjgn1FES5d7b9z+7j2bLXyNiSYTGxNgB+k3ib/hf3/DfPhv+yv+ED/4Zz/4Rqf+2vP8z/hIf7Z3ybPKx+78vHkdf+m+edle8V+b2s/tl/Hbwj/wcO+BfgD4h8WeG7z4Xa94HvfFttp2keH/ALFNIv8ApsMKXU0008jyI9sx3xvGj5H7sVjftXfto/tPfCn/AILi/B/4K+HfEHgHVPBHxE8ParrOnaT/AGS+nRq8drfBBqFyzzzTfZ/ISb/R/I87GzZH/rKAP06or8nvj/8AtWftZ/8ABN//AIKT/BPwT4s+Jnhf4/eFf2iBqelaNo7+GrXwlD4f1aEw7P30Inn+yo9zb8ySTSeWZPkeRE36P7XH7T/7VH/BMv8Aa/8AgDrXxA+LnhP4qfDX41eMYfB2s+FbDwVBocfhue5kTy5rKfzJrmaOMOT+/nJPkAY/efuwD9UKx/E8+rJ4evJNFt7CfV/Jb7LHeTvDbvJzs3uiOwX6Ia+N9I8TftE/tNftj/HTSdY8SeLP2dfg/wDC0WVl4V1jTdF0ueTxm80DyXN691qdrdQ+RBsT5II0x5ux33o9Zf8AwQf/AOCh/jL9vT4MfE6y8f6ho3iXxJ8KfG974V/4SPSIVhtPElsmHhugifIm/wCf/V/IU2UAV/8AgjT+3/8AFr9tn4u/tN+H/isvhGzvvg543/4RGztPDdtJHaweQ93DM++Znkk3vCDuc/8AAE6V9+V+WP8Awbw/8nn/APBRP/sump/+l2o1+p1AH55/sm/8FAvjb8Tf+C4fxa/Z7+INv4G03wn4B8CJ4h0618PxzzPPNPNpzxvNcz4eR0S6kTCRxp/sZxX6GV+OOkW/xC1P/g6S/aO034Z3Xh3SfEup/CfTbf8AtrWoJLu00KD/AIk5e6+yoyG6kHyIkPmRplw7v+7Mcnqn7Iv7Un7Qvw5/4KtfEX9kX4w/FS28fx6j4H/4S3wd4407wzZaPqlhukRNhtkR7X5N83+sSTP2dP7+ygD9OqK/M7/gl3+1d8etT/4Ki/Hr4A/tCfE5db1j4e2dtq3g20s9B0/TbfxPpM7yb75/Lh8zeiPZfJHJ8jyTo5fZXqviPxf8dPjT/wAFGvid4X+FnxatdD8E/DTw1pj3thrXhqy1Gw/4SO98yRLLfGkN15MdikM7p5/meZdx/Ps/d0AfblFfjf8A8Ezf2pf25v8AgrZ+wbe+KvCfxQ8B/DPWrfX9RhHie/8ADNtfSai67DBYW1tsMcFrAGw886Tzu8mAmE3yevf8EZ/+CuXjn40eDfjr4F/aWt7HR/in+zLNNJ4p1O0tfs8Go6fH5++5CR/JvQ20mdiIjxvA6Dk4AP0yor8k77/gpf4g/ag/ZM1D4p+Gf2tvCfwV8caxZza14X8BQ6bpGo2MEKeZJbWWpvdQz3b3U6Im94JIUQyFEjfZk5vib/gsp8ePjl/wQK179o3wUvhH4d/ETwFcSaR4rttR8Oz3S3E4mtY/P0/zJtkGY7pJv38c6bx5f+3QB+v1Ffnj+w6n7aXx/uf2d/i1rPxR+Hun/CPxB4R0+98V+DbnQ0l1bUBJp6Ml79rjTLXM8jJOUR4I4PM2bJ/LO/8AQyTd5bbfvdqAHUV+T3gX9tj9sL4v/wDBW39oH9nHRfFXwzRvBOhafNp2tf8ACO+TpPhyO5S1uvtz2zzyXV1deXcpAkPn+Rv3SPtCbHZ+yj+37+0v+xv/AMFcNL/Zb/al8T6H8TdK+KGnvqHgfxlpmhQ6V+8QTOsbxwpGmw+RMjph3STyTvKPQB6xqn/BQT43aL/wXq8F/s46/b+A9N+Hev8Agy98W2v9lJNdahfRf6VHAtzNME2Mj2rvsjjTj+N6/Qyvyw/aA/5W7vgh/wBkLuf/AEr1uvoj4teMP2gvjR/wUkb4W+H28U/CX4I+HfB6eIbzx/pWkWV7d+ItTknSNNOhmv4J7WFEQu7r5DyYjPKeYhoA+x6K/Or/AIJc/t+fEn46ftdftO/ATX/G/h/4iXPwfuLeTwr45/s62zqEF0kmxL2Gy8iCZ4H2B/I8nfiQfu+K8k/YQ/ar/bF/ba/aj/a1+FLfEzwF4fufhj4mtNDtfFcPhNJoPD0KSXyf6Bpjv++mn8mOTdd3UyQeX0k30AfrhRX5r/8ABL7/AIKMfEzQvjP+1J8G/wBpHxZpXi/WP2afL1uTxxYaTFpo1TSXhknd5rWBBHG6Roj/ACZ/1jp/yz3v53ff8FL/ABB+1B+yZqHxT8M/tbeE/gr441izm1rwv4Ch03SNRsYIU8yS2stTe6hnu3up0RN7wSQohkKJG+zJAP1sor5D/wCCK/8AwUC1r/gpT+wxo/j7xToK+HfGel6hPoPiG0ihkigkvIEjfzoVf50SSOaJ9hzsLumTjNfXlABRRRQAUUUUAFFFFABX4A/8Hzn/ADa7/wBzX/7ha/f6vwB/4PnP+bXf+5r/APcLQB8//wDBNP8A5XJPEf8A2VX4j/8ApNrtf0/V/MD/AME0/wDlck8R/wDZVfiP/wCk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFfJP/BUv/goLpv/AATll+B3irxNqj6T4E8SfEFPDfia58sOLe1n0vUXjmbjfsjuktpH2fwRv7CvrasPU/C2m+I77Trm80+xvbjSZXns5biFJHtJHjeB3TP3G8t3QkfwO69CaAPlj/gofrf7Kv7YH7Cni66+KnjD4aeIPhoumTXMGvR6paXT6XP5ZKT2cyPn7V9zYkZ3yH5MPv2H5g/4JBfsw/FDRP8Ag2h8U+BfE2l6pD4g8ZeEvFLeG9Injb7VHZ39rP8AZU2dcyPI8iJ6TpX35a/8E+PgHY+Pf+Esh+CHwhg8Vef9pGsp4N05L7zv+enn+Tv3/wC3nNezUAfjx/wRt/4KQ/Bjwh/wQw8J/D+48daXJ8StI0LW9Ek8FW8nn+Ipbp57x40SyTMzx+W6P5wTy40++6bHx8hfC34xeF/Ff/Bmh8SvCOm65pd34n8L6jbf2xpsNwj3lgs3i6ykjaSPqiOjoUJ+/wA4+6a/f7wt+yv8MfBnjDXPEWj/AA38BaP4g8SRvBq2pWXh+1gvNUR/vrPMke+ZH9HJr4R/4ONdG8IfDH/gi98QPg/4J8O2Ok6p4gTSY/DvhXw3ovlh0g1qxnm8m2tk2IiRxO/4UAeseAP23fhj4c/4Ig6L8SpvEmk6j4V0/wCGdrZOscyPJc3v9nJB/Z2zP/H08/7nyfv7zivz9+Nf7HXiD9iX/gzX8XeF/F1lNpfirWP7K8R6pZS532T3niTTpIYXU/ckSDyd6fwSb6/SD/gnH8C/gb8Wv2evhd8SNM+Gvw5n8faV4c0my1DXpfCltFrun38NlB5kLzvCk6SRvt9On415L/wc8eMbPUP+CSHxM8AWa32oeMPF/wDZP9kaPY2c11dXqwa1ZTzvsRGOxI4Xfd7fmAd54A/bd+GPhz/giDovxKm8SaTqPhXT/hna2TrHMjyXN7/ZyQf2dsz/AMfTz/ufJ+/vOK/NT9ob/gkT8VvAf/Br38L9I03QtSb4k+BfFcfxS1jQ7eGSS8SGf7WmzycZ86CC5tpJEx8nkT1+pP8AwTj+BfwN+LX7PXwu+JGmfDX4cz+PtK8OaTZahr0vhS2i13T7+Gyg8yF53hSdJI32+nT8a+vqAPizxD/wVG+EXx3/AOCUfiL4zW/ibRW0HWvBtyZ9N+2o93HqE9rIn9meVneboz/uUj6ufu5BFfCHjf8AZc1j9gv/AIM//FnhHx6f7D8Wa9p8erXWn3kux7Oe81eCaG12N0mEITfH2fzBX64aD+xb8G/DPxNTxxp/wl+Gmm+NkbzB4gtfDFlBqYb1+0rH5n/j9dV44+Enhb4nNanxN4b8P+IlsN32X+0tPhu/s2/G/Z5inZnYvI67BQB43/wSi8Zab48/4Jkfs+3uk3tveWo+HWgQO8TiTZNHp0CSRsR/Gjo6MvXIr4D/AOCTf7Rvgv8AZk/4LA/t6+EfG2vWPhrxT44+IemT+G9IumK3/iDzp77YLaHG+f8A4+oHwmcRyeZ9ze9frN4L+Gnh34Y2k1v4b8P6LoNvcSeZLDp1lHaxyv8A3mCKOfrUEvwh8KyfElfGD+F/Dr+Lo4Psqa4+mwnUo4P+eYudnmbP9ndigDqa/N3/AIOT/wBnPXvEX7Jfhr4+eAY9vxK/Zg1+HxvpcyRkySWCOn26P/cGyCd+2y1cfx1+kVZPiDw9Y+K9BvNL1SzttR07UIXtrmzuYVmt7yJ1KPG6N8jo6cFDwQeaAPnP/gmCupfGD4Q6z8d/EGkXWh+IPj9dQ+IodPuHzPo+ipAkGkWu/wD69UF0/wD03vp6/Nn/AIIvf8FQ/Bv/AASDh8dfshftN6hN8OdY+G/iTULnw9rt7bTyafrFlPNvA3ohKB3Mk8cmwJIkw+46fP8AtxpGj2uiabb2dnbw2traxpDDDDH5ccKJgIqoOFUYrjfi7+yz8M/2hLuwuviB8N/Anji40nmxk8QaBaak9mf+mZmjfZ/wCgD4V/4KIeJtT/4LTf8ABIT4+Q/BvS9Q1bwfe2dhP4LvrixnsZ/Gdzp99HfXr20MqeY9u/kpBDvjQyTxzfwbHrB/4J2f8HG/7PPiP9ijwdb/ABJ8byeE/ih4U0i30LXfDFxpl1JqV3qFtH5J+ywxo/nee65SNCXBcI/Ir9OtG0e10DTLaxsbeCzs7SFIYbeFPLjhROEVUHCqAMVw9p+yV8LNO+L7fEGP4Z/D+Px9IWZ/E6eHbNdXJbrm62ed/wCP0AfmX/wc06z4u8f/APBE/wAM+KvFXhuXQ9Wh8a6Rruq6WqFpNDtZBdiGGY7nHnxrNBDI4ynnb9mARWh/wcNaba/8FRv+CVf/AAmH7PusRfE7/hUfjKy8TNN4fL3tvfpHazJPHBs+SeSBLyGR9mSmyRPv70H6q+KfC+meN/D15pOsaZY6xpWoR+TdWd5bpPb3CH+B0cbXX2png7wdo/w98LWei6BpOm6HpOnx+TbWFhapbWtug/gSNMIn4UAfI3iH/gqN8Ivjv/wSj8RfGa38TaK2g614NuTPpv21Hu49QntZE/szys7zdGf9ykfVz93IIr4Q8b/suax+wX/wZ/8Aizwj49P9h+LNe0+PVrrT7yXY9nPeavBNDa7G6TCEJvj7P5gr9cNB/Yt+Dfhn4mp440/4S/DTTfGyN5g8QWvhiyg1MN6/aVj8z/x+uq8cfCTwt8TmtT4m8N+H/ES2G77L/aWnw3f2bfjfs8xTszsXkddgoA8b/wCCUXjLTfHn/BMj9n290m9t7y1Hw60CB3icSbJo9OgSSNiP40dHRl65Ffm/+0f+xV+z7/wVY/ZJ1z9sL4Q+KovgR8dvB1ne6trWsaFrP2GK11m1R/Ot78Ap5Lu6kJdJ5cjpIkjiYYSv2K8F/DTw78MbSa38N+H9F0G3uJPMlh06yjtY5X/vMEUc/WuA1L9gX4F6z4gsdYvPgr8JrzVtLbfZXk3hDT3uLP8A65yeTvT8KAOE/wCCPPxl+In7Qf8AwTR+D/jX4rW0kPjzxFov2q/aWLyZLyLzpEtbp0/vz2qQzdP+Wld7+3p4E1L4nfsOfGbwzo9s95rHiLwLrml2VvEm6S4nn0+eONF+ruBXr9FAH48f8Ebf+CkPwY8If8EMPCfw/uPHWlyfErSNC1vRJPBVvJ5/iKW6ee8eNEskzM8fluj+cE8uNPvumx8fK37P/wAf/BOo/wDBnd8SfB6+K/D7eKNDnmt73SnvVF5bSTeIYZ4AY87/AJ433p/fw/8AcfH71eFv2V/hj4M8Ya54i0f4b+AtH8QeJI3g1bUrLw/awXmqI/31nmSPfMj+jk1JZ/sw/DSy8HaT4bh+Hfge38P6GrJpmlR6FapZ6fv+/wCTDs2R/d/gFAHw78cvF2lt/wAGsD339oWYs5/2frC1SZpk2STPosEYQc48wyHZs67+K+Y/2rfFWmXn/Bl1oqw6jZXDXHhPwzYJ5cy/Pcx67YmSH/fTy5Mr/wBM39K/YMfs3/DtNCTSv+EC8G/2Ys/2tLP+xbbyPO2bfM8vZs37FxuqL/hlb4XmFYP+Fc+BPKVtyp/wj9rsB+mz/Z/lQB+Wv/Barwh4k+If/BBb9nP4jfD+D/hJ7f4Q3fhPxxfRWj+Yslna6ZJC83yD/lnJNHvI+4nmP/Aa7r9vX/gtB+zz+2l/wSC+L0Xw/wDHEWueKfFnw+1Ep4UtoXk1nTM2x877VDt/cxwc75n/AHfyfI7749/6ZeD/AIdeH/AegtpeiaFpOj6bIzyPZ2NmkFu7v98lEG3mviz/AIKy/Bn4Tfsv/wDBMP48ab4L8CeD/Beu/ETwrqGl2lp4Y8NwWl14gvZIdkaFLWHdI+9x1oA+bPiv8P8AVviT/wAGb+n6Zo1jNqF9H8M9I1EwxJvcQWt7a3cz/wDAIIXf/gFR/H3/AIKTfBX4uf8ABubd+FPCvjrRfFPjq5+DltpV14b0eT+0NV0ia3sIUuXvIE+e1hgdHzPPsToEMjvGj/XX/BCbxbpfiz/glN8IfDmJjqnhbwxbaPrumX1k8E9jMm+N4ZoXQEfdP+/X0B4d/Y1+EPhPwv4k0PSfhT8N9K0XxipXXdPs/DFlBa62P+nqNIwk3/bTNAH4f/Fb433nw9/4Ju/8E8v2oPhqIfH+lfssRppvjvS9MuvPuNJW8tbWCdJlX/UcW00e5/uGeDgo9fpF4I/4L9fAX9pvR9C0D4I+IpvH/wAVPHCeRoXhUaLepPZzvjdJqB2bILaDdvmfzPuRvs319q2HhHR9H8Lx6Fa6Xp9tosMH2VLCK1RLRIvueWIwNmzH8H6Vyfwe/ZR+F37PmqX194B+G/gHwPfaoP8ATbjw/wCHrXTZLvp/rGhjQv8AdHWgD84P+C63i/StB/4LA/8ABONbzUbO0az8Y6vNP5syx+Sk02kRxs3s7o6D/dNfoR+3B+yl4f8A26P2UPHXwn8TRr/ZPjPTXsvO2CRrOfh4LlB/fhnSOQf9c66TxL+zx8P/ABlrNxqes+B/B+sajdcTXd5ottPPLxt+Z3Qk18w/tmf8FQPEH7FX7RHhf4Q+FP2Z/i18SpNc0VJ9FvvC9nGmipMXeGOy877kCJsTzHfYII5EfYUwaAPl3/ggNd/Fj9p/xtoOn/GbQ5tPf9iay1D4c2c00nmf2vr8z+Q8/wDv2mlwwwb/AOP7fI/8dfrxXh/7Bv7O2qfs3fs4Wem+Krqz1Tx94kv7zxX4xv7dcQXes6hO9xdeXn/ljGX8iP8A6YwR17hQB+b3/B0B8DfG/wAUv+CdOj+LPh9Yzal4g+C/jjTfHzWcMbySTQ2sdxC7JGv3zH9pSQ+kcclbnwi/4OOv2dvjl8E9D1Twvrl9rnxO8SQR22nfDa1sppNdu9VkHyWSjZ5ePM48/f5IT599foLXm/w7/ZL+Ffwe8dX3ijwp8Mfh/wCF/FGrJsvdX0jw9Z2N9d/9dJ440d/xNAH5n/8ABeLxqfDf7bf/AATdTxVqmg2+vWvxNtrzVzDL9nghb7VpSzTIrvvSDzN4y/pzzmtj/g6T8Vabonhv9lFbvUrO1aP406Teuks6o3kRpJ5k3+5HvTLdt9fpV4p/Z98BeNdZl1TXPBfhPWNSuNomvL/SLa4nl2fKm53QnjtVbUv2afhzrkkLXngPwbfNbwpbQtPotrL5UKLtSNMpnYgOAO1AH5mftAft1+D/AInf8FzvHXwS/aQ8baf4J+A/gXwjZ6l4Z0XXNUTSvD3jK+mjtZ3nv5XdEvIwJpo0tZHeB/Iz5fmIa5D/AIJNftCfDXw1/wAHAn7WVjpNxb6HY+NrPw/b+EdKTSJrGTUYEtYButrby0fyPLdJ94TZ5H77Pl/PX6w67+zB8NvEuq6Dfal8O/At9eeE126LcXOg2s0mjj/p2d0/c/8AAMVqXfwe8H3ni3U9euPCnhubXNcsRpWo6i+lwvd39r/zwmm2b5If9h/koA/Lv/ggh8SdB1r/AIKvf8FENNtdW0+5vNS8fW19ZJDcpJ9sgS61SOR48H51R3QOR/fSuh/4Kw/t1Q+EP+CtvwQ+A3xQ8ZXXwy/Z18UeG7nX/EGqLfyaPbeJbz/S44bC71BHRobWN4Id6JInmfaAk2Y3Q1+ivh79nH4feE9attS0jwJ4N0nUbRt0F1aaLbQT2/8AuOiZFT/FP4C+BvjnaWNv428G+F/GMWlzfabJNb0mDUEs5v8AnogmR9j+45oA/HLwH+0V8DPhb/wct+D9W8F3XhvQfhnc/B4aJ4efRdKeDTdZuWup9iaYsCbLzzHV0R4N4kkSRE3uK0P2R/22vD//AARY/wCCtf7Unwx/aAu7vwd4L+Nfi6f4g+DfFFxBNJp8ouZ5pHR3RT1SdYy+MRvaOjnlMfsRd/Cbwve+LtL8RTeGfD9x4g0O2e107Un06F7vT4G/5Zwzbd8af7KHFZ/xk/Zx+Hv7Rmi2+nfETwL4P8eafZyebBbeIdEttUggf++iTo4U+9AHlv7JP7eWg/tz+Pddvfhg0ev/AAr8N232ObxWLSZLfWdWdwfs9m77N6QQrmZ9hTfcRojkpJX5/wD/AAa1ftC+Dfhp8D/iZ8Gda1+1sfilN8X9c/4pUuf7WEP2W28ycw53pBGbaZHl4RHj2Z3vHv8A1y8K+GNN8EeHbPSdH02y0bStPhWG1s7OBILe2jToiIg2IvsPWs3RPhB4S8MeNdU8SaX4V8O6f4k1xQmo6rb6bDBfX69f30ypvk/4GaAPxh/4Ivf8FQ/Bv/BIOHx1+yF+03qE3w51j4b+JNQufD2u3ttPJp+sWU828DeiEoHcyTxybAkiTD7jp8/3V8Uf249C/bF/4JwftGePvDatH8I18C6tZ+HfEOqWs2nf8JHN/Z919pnhS48t/sgd4YUd0QvJHP1TYa+nPi7+yz8M/wBoS7sLr4gfDfwJ44uNJ5sZPEGgWmpPZn/pmZo32f8AAK2dd+FPhXxV4YtdF1fw3oOq6Jp+37LYXenxT2tvsXYmyN1KLsTKjA4HFAHxD/wa/eNNM8T/APBE74PW9je291caP/a1lexJIHktJv7XvZNjr/B8kiOAcfI6e1eRftA+MdLt/wDg7v8AgrDLqVnFMPgxPpxV5lyLl59YdYP98xuj7PRs1+nngr4O+E/hnNcTeG/C/h3w/NdqBO+m6ZDaNP8A7/loM/jWRc/sz/DfU9Ulv7r4f+Cbi/uJvtM1zNoVq88kn3t7vszvz3oA/NX/AILdftzX37Unx60n9hv4T+MtL8K6p4vRZ/ir4tub5Le38J6Idjva73dMzTRuu+PPKSRw8+e+z6//AGdfHvwD/Ys8K/Bn9n34W654WuI9Zmn0DRdK0rWYJ7yQQWdzfXV7LsO9nPku8j95Jx/eFe263+zV8OfEurXN9qXgHwXqWoXr+dcXNzolrNPcP/fd3TLU/wAOfs9fD/wVr0OqaJ4H8I6PqlnuMN7p+j21vcRbkKPseNN/KsR70Afjj/wRe/4Kh+Df+CQcPjr9kL9pvUJvhzrHw38Sahc+HtdvbaeTT9Ysp5t4G9EJQO5knjk2BJEmH3HT5/1Q/Yx/bI039uHRtc8b+DLS8n+FpeG08N65dWE1hJ4kmj3m6uYUm2P9lBMMKO6IXkhn6psJ7b4u/ss/DP8AaEu7C6+IHw38CeOLjSebGTxBoFpqT2Z/6ZmaN9n/AACuy0bR7XQNMtrGxt4LOztIUhht4U8uOFE4RVQcKoAxQBJqWmx6np89s5dY7iNkfY21hur8gf8Ag2i8an9gjVfjJ+x38V5ofCvxE8I+LpvEOhpqDfZV8UaZPHDB59mX/wBdH/oqycc7Jx/ck2fsVXnvxn/ZX+GP7R8NrH8RPhv4E8ex2f8AqE8SaDa6r5H+758b4/CgD8/f2YPhP/w1N/wcafFj49eGXjvvhx8NPBlt4H/t63bzLTXNcdU8+GGQfJN5Ee9Jin+rkSNK4r/g30+MfhnS/wDgpT/wUC8DXOrWdr4q1D4x6tqtlp80wSa+tk1HUY5HhGctsfYH9PMSv1QB8L/Ab4br5cGj+FfC3h+3+WK2gjtbOwhH91EARE/CvyC/4Ip+B/hb+0F+2F+2dofxT8A+H/FWm/EL4t3/AIm8G23jLwl9ot9ZtXutRm8+3S9h2f6vY/Z8PQB7P+zn8NLf9rP/AION/ih8dPDapf8Aw/8AhD4Jh8BvrcLEWmp6/J888ELj5JvIgkdJufkk8selebf8Em/2jfBf7Mn/AAWB/b18I+NtesfDXinxx8Q9Mn8N6RdMVv8AxB5099sFtDjfP/x9QPhM4jk8z7m96/W3wR4I0T4aeFbPRfDuj6XoGj6fH5dtY6baJa2tunokaAIo+gqjL8IfCsnxJXxg/hfw6/i6OD7KmuPpsJ1KOD/nmLnZ5mz/AGd2KAPzE/aB8Y6Xb/8AB3f8FYZdSs4ph8GJ9OKvMuRcvPrDrB/vmN0fZ6NmtD9tLxxpenf8HTv7IlrNqFqtwvgXXIZEMy7o2ntdV8lX9N5TCDqTX6K3P7M/w31PVJb+6+H/AIJuL+4m+0zXM2hWrzySfe3u+zO/Pek1n9mX4b+IdZur7UPh/wCCr+/u5GmnubjQraSS4kf7zu7Id7H1NAH5tf8ABdTxno+j/wDBYT/gnNb3WqafazWPjHVXuVkmRWgSebSI4Wf+7vkR0Q99jehwn/Bz54y0vwz4l/Y4k1LVLGz+x/GbTNRnSWdEeO2R03zeyJx8/bNfpZr/AOzj8O/FeqS3+qeA/BupX1x/rLi50W2nkf8Ah++6E0zWv2b/AId69PDNqPgXwbfyW8KW0b3WjW0jxwouxEy6Z2IhwB2H5UAflh8A/wBvv4Z/tP8A/BS39pjQ/wBrHxhoej2fwh8UJovw48BeJrpIdGuLWGaeP7bDZOQmp30/lwyJvSaSMTp5H3xUv/Bs18cPCP8Awt/9rLwSt5JZ+Jtc+L2ravaaBLYTW1/BZ5cedNDs320aNH5Z8wR7JCiffdEP6jWH7MHw207x1YeKrX4feCbfxNplulrZ6vDoNrHqFnCvSNJ9nmIg/uA1q2PwR8G6dda9Pa+EfDNvN4tPma68WlwJJrJxj/Sfk/fdSPnz1oA/Mf8A4N0PGOk67+2f/wAFCFs9Ssbp7r406hqMKwzo/m2z32o7JkHdD/eHFfrJXE+Fv2ffAXgrWYtU0PwX4T0fUrfcIbyw0i2t54t/yvtdEB57121AH5N/su+M9Jk/4O1/2iraPVLOS5m+FVlaJGkyl3nT+x3eL/fROdnoOafrnjDS/wDiLx0m1/tTT/P/AOFLnT9nnL5n2nz5p/J/66eX8+3+5zX6Q2X7M3w30vVIr61+H/gm1vreZZobiDQ7WOWN0O5HR9md49Qahvf2bvhvpjy6pD8NfBc1/b5uUMWg2v2h5BlvkfZndmgD8/f+Dg34deIP2WPG/wAIf23vh5prXXib4E6nHpfi62h+U6x4aunMckb+yPM6D+59rd/+WdfXn/BMr4Pa58Lf2WLHWvGNt9k+InxU1O58feL4zw9vqGov532X/t1g+zWo/wBi1FfLfw6/a/8AHX/BabU9K+G8nwB+KPwi+Heh+JrbVfiDqnja1+xJqdtYXH2mDR7VPvzPPdQwCf8AgjgSdDzIlfoF8XPi/wCHfgh4HuvEfifU49K0myGXmdWck84RUQF3Y/3QD+lAH5X/APBnr8c/Cd1/wTF17wqNe0+HxD4V8V397qlhNcJHNb200MDpc7M/6k7H+fpmN6r/ALFv7Id9+3F8Uf8Agop8XvDKpa+E/wBoLTr/AOHXgPVHZobfWNljNaz6gjD79rJP5JSYff8A3lYv/Br/APs8fC34t/8ABP8Aj8D/ABa+FvhPWvH3hTxRqWqWun+MvCMc99ZWr/ZClzB9qhOxN74+Q/f9K/ZDRdDs/D2l2tjp9vBZ2VnEkMNvAnlxwonCKqDhVAHSgD8cv+CMv/Ben4S/s1fse6T8Cv2jtYuPg/8AEv4JwyeHryz1bS7rZqFrbb/L8vy0fbNHGER43GXKgpv34T2H/gsp+0uv7RH/AAQI+Onja4s28O+GfFkdp/whqarG9jqGp2H26y8m4khc7ked0lmjj4fyHh3oj78feHxE/ZD+E/xf8dWvijxZ8L/h74m8TaftFrrGq+G7O+vrfb08uaSN5Ex7EV0njj4SeFvic1qfE3hvw/4iWw3fZf7S0+G7+zb8b9nmKdmdi8jrsFAHz1+wL4c8NftV/wDBGj4T+GZL+S88N+MPhHpnhvUJNNuvLnjSTSY7S6RJE/1cyfvE9UdPavLfhD/wbf8A7O/wO+LXhbxpotz8UpNZ8I6tZ6zYC88YXM8HnWsyTx706Om+NPkNfbvgv4aeHfhjaTW/hvw/oug29xJ5ksOnWUdrHK/95gijn61rX+pQ6TYzXF06w29ujzSO3RETnNAH5J/so/Gfwz8PP+Dsr9q3Q9c1ay0zUfF3hHQLTR0uZhH9vnj0vR5HhT1k2bn2eiPXdftVfDSD9t7/AIOGfgHD4Xih1PT/ANmnQ7/xB451SEb4dMubrjT9Pdxx9q8yOObyTz5cjv6141+zZqHw1+Ov/Bxd+1JceNfDen+K/hn8TvD2h6XoNz4h8OSXWi65e2ttpds8KefCYJH8xJdmeuw7M1+u/wAK/gv4P+BXhSPQfBHhXw34P0KFt8Wn6JpkGnWsbe0UKIlAH5j/ALQHjDSoP+Du/wCCsMmpWcU0fwYn04q8qgi5efWHWD/fMbo+z0bNUf2gf25vCPjz/gul47+B37S3jLTPA3wL8BeFLPUPDmh6/qSaV4e8Z6hPHaTvPfyu6JdInnTIlrO7wEwE7PMSv0yvf2ZvhvqmqS3118P/AATdX1xM001xPodrJLI7nc7u+zO8+pNW/HH7P3gP4l+LNH17xL4I8JeIte0Ek6ZqWpaRbXV3px6/uZXQvH/wAigD8k/+CTX7Qnw18Nf8HAn7WVjpNxb6HY+NrPw/b+EdKTSJrGTUYEtYButrby0fyPLdJ94TZ5H77Pl/PXe/8EAvGOk+J/8Agp7/AMFFn0/UrG8W++IdncQNBMkgniFzrCF1I+8gY9fcevP6cXHwd8Iz+LdS16Twp4dk1zXLH+y9R1NtLgN3f2v/ADwml2b3j/2HOz2rO0X9m34eeHdRjvNN8A+DdLvIg4Sa20W2gkTchR/nRB/AzLjuMigD8jvgT4Lt/wBpr/grl/wVA+Hei6xpv9sfETwInh/TJFvP3ZmfS/sjncv9yR1D/wBzpWj/AMEZf+C9Pwl/Zq/Y90n4FftHaxcfB/4l/BOGTw9eWeraXdbNQtbbf5fl+Wj7Zo4wiPG4y5UFN+/CfrH4f/Zv+HnhTVrbUdJ8CeDNN1KzffBc2miW0M9u/wDsOiZX8KzfiJ+yH8J/i/46tfFHiz4X/D3xN4m0/aLXWNV8N2d9fW+3p5c0kbyJj2IoAp/sn/HW6/af+GUnj6PSb3R/DPiG9ebwol9aSWt9eaUEjSG7nhc7k89xNNGpCEQSQb0R94r1iiigAooooAKKKKACiiigAr8Af+D5z/m13/ua/wD3C1+/1fgD/wAHzn/Nrv8A3Nf/ALhaAPn/AP4Jp/8AK5J4j/7Kr8R//SbXa/p+r+YH/gmn/wArkniP/sqvxH/9Jtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/g+c/5td/7mv8A9wtfv9X4A/8AB85/za7/ANzX/wC4WgD5/wD+Caf/ACuSeI/+yq/Ef/0m12v6fq/mB/4Jp/8AK5J4j/7Kr8R//SbXa/p+oAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/4PnP+bXf+5r/APcLX7/V+AP/AAfOf82u/wDc1/8AuFoA+f8A/gmn/wArkniP/sqvxH/9Jtdr+n6v5gf+Caf/ACuSeI/+yq/Ef/0m12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAr8Af+D5z/m13/ua/wD3C1+/1fgD/wAHzn/Nrv8A3Nf/ALhaAPn/AP4Jp/8AK5J4j/7Kr8R//SbXa/p+r+YH/gmn/wArkniP/sqvxH/9Jtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/g+c/5td/7mv8A9wtfv9X4A/8AB85/za7/ANzX/wC4WgD5/wD+Caf/ACuSeI/+yq/Ef/0m12v6fq/mB/4Jp/8AK5J4j/7Kr8R//SbXa/p+oAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/4PnP+bXf+5r/APcLX7/V+AP/AAfOf82u/wDc1/8AuFoA+f8A/gmn/wArkniP/sqvxH/9Jtdr+n6v5gf+Caf/ACuSeI/+yq/Ef/0m12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAoozRmgAoorgPjl+0T4P/Zs8LW+teM9ch0mzvb2LTrJBBLdXep3UpxHa2ttCjz3M7/wQwo8hx0oA7+ivH/gh+2x8O/2g/HmueEfD+r6na+MfDMKXGpaBr2iXuhapBbyZCXP2a9hhkeFz0mRGjycZr2CgAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAr8Af+D5z/m13/ua/wD3C1+/1fgD/wAHzn/Nrv8A3Nf/ALhaAPn/AP4Jp/8AK5J4j/7Kr8R//SbXa/p+r+YH/gmn/wArkniP/sqvxH/9Jtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAoozmjNABRRnFGaACiiigAooooAKKM0UAFFGaKACg9KKD0oAjDKAtc/Y+JrjVvE1xbW9qv8AZtrEyy3plQiSfIHloFYn5Pn379vVNu/59mD8XtR1yXwpfaL4Qlt7fxRd27rbXsrwyR6QHztuZI2+Zuj7B5bgyABxs3MPkv8AZI/4KU+NviB8adL+F+pfDvRbO/trhrC7NlfvZx6YkGRN+7cSb9m1/kD54FKpPlPLxmbYfCVoUa28/hPvYcV+Z3wLvbj9vP8A4OHfil4qvJmvvAf7JXh2Hwh4ei3fuI9f1FA97cp285ES5gf/AK5w1+gfxu+LWkfAX4NeLfHGvTeRovg3SLrW9QcdRBbQPPJ/44hr81f+CEn7OXx80f8AYMvvidp/i/wP4f8AEf7Set3/AMQtT/4SDwnPqV1pk17M/k3SPDewedHJAsM6Qyf89D+8+fZTPUNh/Fkvxz/4Or7aHwq+6x+DvwabTPF9zEfl865unnhtXPc/6VbSbP8AYf8AuV+oFfOv7BH/AATw8L/sJaT4svrXVtY8ZfET4j6l/bXjLxjrJQ6j4hvPm6on7uGFN7+XCnyR+Ya+iqACiiigAooooAKKKKACiiigAoorn/H3xB0H4V+FLvXvE2taR4d0XT0D3N/ql8lpa24/25HIRPzoA6CivnA/8FF9I+I6+R8H/A/j74zSS58vUtD08WPh4D/np/a168FpMn/Xq87/AOxVvwl4Z/aI+JPinS9S8U+I/Afwu8PWt5FdT+H/AAxZvr+p3iI+94JtTvEhhSOT7jiCx3jf8k4wHoA+hKKKp6patqOnXEKzTWrTIyCWI/vI/wDaWgC5RX4w/wDBP74F+JPjp/wV8/be+GOu/Fz4sXngnw/PpNpOj+Ibj+1r62k8+SOzS/3mS1tU8x/kg8t3GxN6J5iSH7JH7PPif4U/8Flf2gP2RvCvxb+Jmh/Ay48JWHjZNPXW57nVtP3vao9lZX8zmS1jkkvH8yaP9+8caJv8wedQB+z1FfkL8Avhvdf8E4P+Diex+B/w98WeMr74a/Fv4WTeKrrQvEWu3WrQWGpJPdKk6PM7ybv9D++X3nz3GcbKwPi78B7D4F/8E7fi9afH34hX3xH/AGyZND13xWdZ8F6lqmqap4UkjSSbT3R4UT+y7FEhh3l0toCPMT5+KAP2Zor8lfiH8VPHv7Sv/BrZa/FTWPiN460rx1pvw9m1WXV9D1V9PudTmgkeD/SZE+eQPGg8z5xvLue9dZ/wTP8A+CZb/HT4A/sh/HvxT8Xfit/wmfhLwhpF7aafputGHQZ7D7FGI7J7Zkc/PHs899++d3n5CeWkYB+n1FflP/wR7h03/grb4J/aM8Y/Hmxl8ReMrf4kah4TtdGurt0PgTTbWCA2sNgiPmzmSSa6/wBKj2SO6ffOzNYP/Bvf4Q8eftMfB34zf8J58YPjpr1x4F8b6n4C03V28bXMkF9Zwi3O9EcuEuEHHnJ0Sf5MON9AH33+wd+yZ4k/Y9+H3ibQfE3xa8cfGO717xTea9b6n4ml8y40uCcIEs0y7/Inll+Nib5HKJGDsr3uvyv/AODZdPEHxj/4J3/GLTfE3jbxpqmsT/FnXNPm8QTavJPrUipa6cnmfapC7+Zx9+vKP+CF37J+s/trfAj9obw78Tvi38VfEHhPQ/jJrOlzWtt4kubLU9cmhtbWPzr/AFBJPPmj2eXshR0TeJHcSfu/LAP2nor8xP8Ag3K8X+KPDupftSfBTWvFWteLPD/wK+KV74f8MXWr3T3V9BYb5o0hd2/g/cB9vZ5JK/TugAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/wCD5z/m13/ua/8A3C1+/wBX4A/8Hzn/ADa7/wBzX/7haAPn/wD4Jp/8rkniP/sqvxH/APSbXa/p+r+YH/gmn/yuSeI/+yq/Ef8A9Jtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKbHKJE3L92gB1FFFABRRRQAUUUUAFFFFABRRRQAUUUUAFBooPSgCIK0ffrz06V5B+07+238M/2O9L026+IniiHQo9WlaGzj+yT3M820fMRFCjvt9W27R+WfXMs56fL/ADr8J/8Agvn8G/G/hX9trVvGWtW97ceDfEcdrF4eu/OeaCARW0Ky2w/54t5ySvsHXfv65rweIs2rZdhfrFGHOfq3gxwDl3GHEcMnzLEexhySlfTmly292PNpzfa2fuxZ+03wU+OHhf8AaB+G2l+KvCesW+taHqUW+2uYt/z9PvK2HRh/dfDetdpG/wAnzfeFfmL/AMG3Xww8V+EPhj8Qta1QSW/h/Xp7F9Ht3uf40Fx50wh/gWQPFh/49n+xX6dKvy46/wBa68mxk8Zg4YmrHllLofO+IvDOF4e4kxeS4Gt7alRnZT76J2005o/DLzRMKKKK9M+LCiig9KAIyML7UxHz/u+4xSSLlV+Y8eg614/+07+298Mv2QbHT7j4ieKYdBTUpGitIvsk91PMVHzHyoUZ9vH3tu0ce1Z1qsKUOerLlidWX5fi8diI4XA0pVaktowi5SfyR7FGxC/MMegpwJxnvXD/AAL+OXhf9oH4eaf4o8Jatb61ouoQiS3uot/zjj7yth0b/ZcBh3rtVT5SN33ulVCcZR5oGOIw9bD1ZUcRFxnHRxejXqnsycdKyPEniOPQbHzGXzppGCW1sk0aS3b/APPNPMdE3/Vqu32oQ6fZS3NxNHDbwpvkkd9qIvXdmuc8MWT+ItZk1y6WNVCm30ra8blLZwhabemeZnQHG9xsSE/I+8VRmT+BtCm0rSpJb1d2ragwutQZTlPN4+RMKmURdsaEoH2Rpvy+SeXuf2XfA9xqXii/k0G1F94uuYb2/uVBjm8+JAkboyfNE6Y370IId3fOSa9Kxg/ep27imYVKNOr8cT5T/bJ+Atv8cP2ePEHwl+LGseKZvhj4pjht7rxTok6WuoQQxzRuYL8+W4WF9ih7lECbM+Z5f+sf6Q8E+GdL8IeC9J0jQ7W1tND0uzhs9Ot7b/Uw2yIEjRP9kIFA+lbDRiWDDbWX3715TceANa+CcrXnge3bUNBZ2e68MM4Qx9dz2DvhIWH/ADxc+S/Gww8l4+Ew9+j/AHo/1/4F+fqeq3FxHbW7SyssaRqWZmP3RXnH7MP7UPgP9sf4YHxl8Otdj8S+GTfXemJqMcEscNzPbTNDMU8xE3x704dMo/Y18U/8FDf2hPE3/BRz4r6J+yL8FdQ1jRIPFlkdU+LnipLd4H8J+Hd8kT6epb7t7dujw7CPueqO7x/eHwP+DHhv9nr4T6D4L8H6Tb6L4Z8NWUdhptjbgiOCFOB6lm6kt1YkmqOynUjKPNE7SiiigoKKK5/x98QdB+FfhS717xNrWkeHdF09A9zf6pfJaWtuP9uRyET86AOgor51P/BQzS/iQGg+EXgfx38YJJc+VqWjaf8A2f4ex/z0/ta9aC1mT/r1ed/9imr8Pf2iPjQR/wAJJ468JfBzSWODp3giz/t7WCuMf8hPUIUgT6JpxI/v0Ae2ePviDoPwr8KXeveJta0jw7ounoHub/VL5LS1tx/tyOQifnXiB/4KL6R8R18j4P8Agfx98ZpJc+XqWh6eLHw8B/z0/ta9eC0mT/r1ed/9itfwN/wTq+FXhfxXb+JNY0O8+IXi61IeDX/GuoTeJNQtX/v2z3jSJaf7lqkKf7Fe8UAfN0PgH9o/43NnxF448H/BfR2ODp/gqz/4SDWimP8AoJ6hClqnXounP/v1r+Bf+Cc/wt8KeKrbxJq2h3XxC8XWuJIfEPjnUJvEeoWz/wB+Brp3S1/3LVIU9q96ooAKKKKACs/Vr1dK0+e4dbhlhR3byoZJ5P8AgKIC7dPupWhRQB+TP/BLPWdc8J/8Fqv2uPFmtfDX4zeH/CvxevdMXwrrOq/DvWrGx1D7L5kb75XtQIAfM3B59nyZpfgF4z1q3/4OTfih8Trj4Z/Gu1+G/jL4d2fhTSvElz8NNegsZb9JtOfa++zzFH+5m/fSbIxs68g1+stFAH41/ttat4+8Tf8ABwF4f+J3gL4e/GZtF8P/AAf1TwVbeKf+Faa7Jpllr7/2u9t8/wBl+eDzLm3HnIHj+frsyRyn/BOHxb8WNI/4JF/Fj9nWb9mP40Q/HLWdM8VWviXxJrmj/wBn6b4hur1Lo/bZtSn+e6uhHNFCiJHM8nlwc+W5kT9v6KAPxP8Ag/rPxI03/g2H1H4R658EfjZp/jabwlqHhjStOXwZqF1f6vdvezumy1toZJIIEheD9/deSH+fZv2ZP39/wRb8Y6jf/wDBNf4ReHNc8I+NvBfijwL4U0/QNW0nxL4cv9GmgmtoBBlPtUMfnI/k790e8Df26V9ZVynxj8I6r46+E3ijQ9D1268La1rekXWn6frNtHvn0e5mhdI7pF6M8bsrgdMpQB+Jnhz9smH4b/tYfHjxX45/Zd/ash8R+MPGd/oWo6r8HPtUGg6rYWUnkRo72U0EdzfR/vi8+/zt88nzx/cT7e/4Jlf8FO/2c9e+H+pfC/4Z/DT4ofB+b4b6WNT/AOEE1jwRdJrT2X2lIHu4bW1+1TXX7+dPMf55N7l3/jeuA/Yzs/28/wDgn78KNA+EN58CfhR8aPDXg+EafpvijR/H3/COPdQF3w91HdQO7z/Pl3SP5/8AbfL19cfs+fBP4i+Lfi9Y/Fj4zf8ACK2PjLTdGutC0Hw74akkutP8N2t3NazXu+8mRJLq5mks7X59kccaQbET55JHAPiT/g298Va9+zT+yd8YdF+JHw1+NPgfVJPiFqvjK1t9S+HevGS8025t7JEaELav583mJIDBHvk437CKd/wbveJ9e+AvhD9ozT/Hnwx+M3gmfxF8TNW8caX/AGx8Otag/tDTp4Ywvln7L8837nPkD5zvGxK/WCigD8p/+CEl54l8Kftv/tiSeJfhx8XPB+m/Fr4kT+J/CepeIPAuraXZ6pZebfSbjNNaokDeWYzsndH/AHgT7+RX6sUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV+AP/B85/za7/3Nf/uFr9/q/AH/AIPnP+bXf+5r/wDcLQB8/wD/AATT/wCVyTxH/wBlV+I//pNrtf0/V/MD/wAE0/8Alck8R/8AZVfiP/6Ta7X9P1ABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQB8Rf8F9P+CjK/wDBN3/gnd4o8SabffZPHXi4Hw14URWxJHezo++59vIgEkw7b0jT+OvBv+DUf/go5J+15+ww3wz8R6h9q8cfBYx6YHlkJmvtGcf6FL/2z2vBjskEJP36+uP+Cv8A4B0Hxd/wTM/aCvNW0PS9TvNH+GPia60+a8sknksJv7IuPnhdgdjfInKY+5Xgv/Brf4G0Ow/4Ix/BnxFBo2lw67qEWuwXOpJaIl5cIPEGoDY82N7gbE4PHyD2oA/RiiiigAooooAKKKKACiiigAooooAKKKKACiiigAqOW2jn/wBZHG+Om5QcVJRQCbWqPOdV/Zx8K3Pxx0/4kQaa2n+MbGxbTWvbG6e0Op2p8zbbXiRkJcxxu++Pzt/lvkps3vvj/Z4+Kfij4leF76Pxl4L1DwJ4o0G//s/UbLzvtun3bbEcXFhebE+02rhx87JHIhDo8cboRXpVcH8c/gjo/wC0P4Bm8Pa42q2kHmx3tnqGl3r2Oo6XdRndDdW0yYeOaN+QR15Rw6O6EDfc7yivJR8Y5vhn8VfCvw713TPGWpLremJFp/jE2Uc9lq+oQo5ngufsyYtZzHH52ZI44XzIiHKbK9aoAKG+6aKD0oAqRD9zww3NnBFfhR/wXu+Dvjjwv+2tqvjPWob268G+I47WLQL0SvLbwCK2hSW2/wCmLeckr7R13l+ua/dohUQHb905AHvTJbdLqNd0cch7EqOPzrx8+yeOZ4b6vKXLqfpHhP4kV+CM8Wc0aEa3uShKL93SVvhlZ8r93s9Lrqfml/wbf/B/xp4L+FvjrxDrdveWfhTxZJYPoSSv8kxiNx9omRP7r74V3/x+X/s1+mBRQ6g/eOdv9aERYE2oqj2AxXKeNtauLgLoumX0VnrV8ozIksJuLG3L7HuQjh87fmCfu3QybA42byOjKMvjgcJHDRlzcp4nH3F9bijPcRn1akoSrNPkjsrRUV+EdX1eompL/wAJ/wCIo7aFoZtH0qffeSRzQyLcXMbZjgKfOyiN/nf/AFb+YkI+dPMSu0qno2jWugaXb2dnDHb2lqnlxRJwiLVyvQPkgoxRRQAYr5Q/4Km/t/3P7FXwd03TPBemr4q+N3xOvP8AhHvh14YQ73v9QkwPtMydfssG7fI/CfcTem/ePbf2mP2ifCH7JfwK8S/Ejx1qkek+FPCdm97qF033lXoiIv8AHJI5REQffd0HWvjv/glT+zp4w/aJ+LesftkfG/SX07x58QLP7D8PfDNz84+H3hh8vDGB/Bd3QcyTPjf+8x+73yRgA9E/YV/4JnXv7GP7PlnGnjLUNS+M2sXkviHxh4snlkuYPEurXJEl0k8T432u/wCRB8joAXQo8km/3/wB8YhrOsf8I74gs28P+LYl3NZSNugvkHWa0m6TR/k6fxonFeg7MrXM+Pvhzo/xG0Q2eqWpuFhfzoJo3eG4tJR9yWKRPnjcZ4dDnr9Kk5ZUZRlzUjl/2rf2qvBf7FnwJ134ieO9Uj0vw/oceWO5TPdzP8sVtCn/AC0mkfCIg6n6Gvmb9lb9s3xR43tNb8W6d4b+Inxg8XePJILhNM8Pp9l8E+ELNPMEFlbanevBZXUib3e6urTzpJ3/AIPLjgjT1z40fDfwx4v8Nx+Ffj54T8L/ABI8DQzb9P1vW9HhvYLR/uIbxHj2W83P/H1Hsj5P+pON9nwp+xf4H+F3xF0ebwn4x+I3ha4hk+3poNv451G903UIYZI98YsLyaeBIPnRH8iOM/OPnQ1djSOIg9yuvw9/aI+NBH/CSeOvCXwc0ljg6d4Is/7e1grjH/IT1CFIE+iacSP79bHgD/gnl8LfB3im38Satot74+8YWbCSDxD411CbxHqVs46vbPdM6WgP9y1SFP8AYr2DxDrEfhzS3vJFlk2yJGqRLveRncIi/wDfbiqvhyx1C209mvrmG4vLqbz5kEflxwDpsjTe+OEH8Z+cu/faEbG9RRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV+AP/B85/za7/3Nf/uFr9/q/AH/AIPnP+bXf+5r/wDcLQB8/wD/AATT/wCVyTxH/wBlV+I//pNrtf0/V/MD/wAE0/8Alck8R/8AZVfiP/6Ta7X9P1ABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQB8//APBWH/lFn+0t/wBkq8U/+mi6r59/4Ndf+UFnwN/7j/8A6kOpV9Bf8FYf+UWf7S3/AGSrxT/6aLqvn3/g11/5QWfA3/uP/wDqQ6lQB+gVFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFeFxw3H7EXgPxhr2veKvHHjfwNa3sd7ZWcmnyaxqvhm3kkxc5mTfcXVrGXE3zo80KJIN8ieWkfulFAGF4M8Z6P8R/B+l6/oGp2WuaFrdsl7p9/YzJNb30LrvSRHX5XR1wQelbteY+PPh940X4q+F9a8J+K7ax8P2cYsNd8M31ikljqFrnImtpEAmt7qM9OXhkTKOmdkibHwr+OHhX41f21/wjOsw6lN4Z1WfRdYtvLeG60q8hyHgmgdVkQ4IdN6ASI6SJuSRHIB22KMUVVvtRh0+ylubiaOG3hTfJI77UROu7NAFDxX4ltvC2lm4un2sz+XBCsqCS7lOdkKBsbnfoFqp4I0K80bSg+oyrc6pqEhuLyVAuwOeRGmET5I+I0JXeURN+597mp4Ujl8SapLr8zTPayILfTYTLHJGIerz/ACJ9+Y/7bp5ccJTYXfPX0AFFFFABRRX56f8ABU39oPxd+1d8b9N/Yv8AgvqVzpfijxlZi/8Aid4qtMSDwL4ZfG9B/wBPt0h2Roedkno4kQA5Gw/43vft0C7kP279kH9nvWv9GTG+0+KPiqE/f6Ym0603+8cj/wDPRHPl/pzXBfs6fs/+FP2V/gn4X+HngfTItG8LeE7JNP0+zjXb5aLn53/vyO5d3c8u7u55Oa72gAoooJwKAKd7ard20kMiLJG6bSjD5WzX5yar+zL+0F8KP2tNP+Iek+FbfUdD0W5dbfSdG1dJoLLTXd99nCs7I4URuxCImzfjZGlfpL1XpXE+JYZviDqU2hLFIum27KNVaa1R4rxfkf7L879HQqHPlujxs6bkflFKPMePmuUwx3I5TlHl/lF8P2Z8e6np3iS4WT7JZo76VbXFmitHI6eWbr77OHMZdU/1bok86ODvwnb0DpRTPYWisFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8Hzn/ADa7/wBzX/7ha/f6vwB/4PnP+bXf+5r/APcLQB8//wDBNP8A5XJPEf8A2VX4j/8ApNrtf0/V/MD/AME0/wDlck8R/wDZVfiP/wCk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP/APwVh/5RZ/tLf9kq8U/+mi6r59/4Ndf+UFnwN/7j/wD6kOpV9Bf8FYf+UWf7S3/ZKvFP/pouq+ff+DXX/lBZ8Df+4/8A+pDqVAH6BUUUUAFFFFABRRRQAUUUUAFFGaM0AFFGaN1ABRRRQAUUUUAFFFFABRRRQAVwHj74YXT6R4l1bwPJ4b8N/ELWLOG1j8RXmjJeGYWzu8EN0EeOSaFDJONnmLs89yhQmu/ooA8/+H/xOuJo/Deh+NLjwzoPxI1jTpb6fw9Y6uLrekLok8ttvSOSaFHkj+fy0wJE3YNaN9L/AMJb4lXT7e4m+w6PMJr17aaPbczAYjtX+Vm+TiZ8PG+Uh++kkiVz/wAePhnoHxjl8P6XqWn299r2l6kmraNepKiX3hyaP/mIQuUfY6Deg+Rkk8zyX/dyOKg+GniXxh4V8aeJNB8UeHLCy8JaUPtfh/xLZ6j51rdWbPj7NcxzP58N1H3Yb4JE+cOhPkoAerUUUUAFFFcN+0H8c/DP7M3wW8S/EDxnqcGi+F/Cdm+oajdytjy40/u/3pH4REHLu6IOTigDxX/gqV+3+v7BPwKtZtA0tvFPxW+IN8nhz4f+F4TuuNb1ab5EOz/njDvV5H/3EyC6VF/wS1/4J/N+wz8FL+58Vaq/iz4wfEa9HiT4ieKpPmk1nU5Acxo/X7NBvZI0wEHzuETzCK8Y/wCCXfwP8VftpfHS8/bU+M2k3Ok614mtH074T+FLoZ/4Qjwy+dty65/4/b1HLyP/AM85MD5JPLj/AEPoAKKKKACiis3XNVXQdLkvPJmnMZ+WKFNzyN91VH1oAzfE+ty6fLDp9ikkmpagG8hvI86C3RSm+dxvT5ELr8u8Oc/JnFWfCvh6Pwp4fgs4QzfZyTJK8aRvcyMd8kz7Ail3dndyBy7uareHNCl065u9VukaXUdY8t5w0SLJboi/JB8m/wCVMu333AeSQr9/FdJQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8Hzn/Nrv/c1/+4Wv3+r8Af8Ag+c/5td/7mv/ANwtAHz/AP8ABNP/AJXJPEf/AGVX4j/+k2u1/T9X8wP/AATT/wCVyTxH/wBlV+I//pNrtf0/UAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAHz/8A8FYf+UWf7S3/AGSrxT/6aLqvn3/g11/5QWfA3/uP/wDqQ6lX0F/wVh/5RZ/tLf8AZKvFP/pouq+ff+DXX/lBZ8Df+4//AOpDqVAH6BUUUUAFFFFABRRRQAUHpRQeaAK6LuX5vl+tfJH7ZH/BYf4T/sXfEiDwrrK+IPEGuLAZbqDQoYbn+zh/Aku6VNrv2Xrx/u5+t3UYKjO3p9K/A3/gpX/wTS+Lnw//AGsfF2raX4R8SeNtH8Zaxea5ZXuhaTcXyxLcXDzeTMER9jpu2/7dfO8SZljMHQjUwcOd3P2bwP4N4Z4jzqpg+J8V7ClGDlFcyjzS005paab+Z+4PwN+N/hv9on4b6X4t8I6lBqmh6tF5kUsX4fK391l5G011/wBnBbjPTB5r5A/4Ip/sleKP2Qf2RLjTfF+221fxJrUmutZD72nrJb28Kwv/ALf7ncf9+vsWNst1+lerl9etVw8ataPLLsfnfF+W4HAZzisFltX2tCE5RhP+aKej/rsSAYFFFGcV2nzoUUUUAFFFFABRRRQAVj+JvEFv4X0tp5tzSfcghRl8y7f+FE3n5nNaFzdR2lu80zrHGib2dj8iVy/heSbxNrUutXEdxHb5MGmRC6DxyQj79ztRBzI+MAvInlxwumx3kSgC54G0O70zTJJdUaT+1b5zc3gF19ojib/nkj+XGDHGMIj+WjukaM+XLmrHjfwTo3xK8IaloHiHSdP17Q9YtntL2wvoFnt7uFxh0dH+V0I9a3KKAPEri61H9jvwX4K0Dw/4S8WeNvBFrdSafqN5Hqk+raz4fheT/Rn8mQPPe2se/wAtyjvPDHHGfLnG94/baK8mf4Nt8KfiR4x+IHhq68V6g2tac9xe+EYb+F7DWNRjWPyZ4ftJ/wBGuXRPIOySOCTejyDevmUAes1+YfieQ/8ABdn9uObw3Gz3P7I/7O+tIdaaPLW3xP8AFMJytr/ck060/j/gkf8A56I6Oml+3P8At+a9+3Fq/hv9l39ne68QeHviL8UbOafxzq17p01lf/CzQEk8i9a5jfHl3sj7oI+T98Oj/PDIfuL9lv8AZn8IfsefAPwz8NfAWlx6P4V8I2aWVnBx5j93mkYD55pHLu7/AMbuxoA9EihWKJVVdqr0WpKKKACiiigArjdFtP8AhN9dttaubOaO20vedMguYkyzumz7UPvuh8suiY8twk8wdPnqPxXbN48vJfDrW0jaSHVNVkdI3t7iLHmfY9jnnzEKCTMZQxSSLv34xzvxt/bU+GP7POuQaR4o8YabD4lvI99r4esEk1TXb3/rjp9qkl1N/wAAjNAHrFFfOi/tB/Gj4wyNH8PfhEfCelsAI9f+JWof2b5if89IdMtfPupP9y6eyfr7b2f8MOeIvioY5vjD8XvG3jaPJkfQfDbv4P8ADwJ/h8myf7bMnX93dX08f+xQB1Xxg/bk+FvwT8WDw5q3iy2vfGGwMvhbQrafW/EMi+qafZpJdbP9vy9ldl8JvH03xM8DWWvzeG/EfhNtQ3lNO12CG3voF8xkR5EjkkCFwA4QuJAH+dEfKCP4PfADwT+zt4XbRfAfhDwz4P0p38yS20fTorKOR/8Ano4RRvf/AGm5rts5oA5/4geP9B+E3gTVPEnifWNN8P8Ah/RLZ7rUNS1K5S1tLOFPvvI7nYifWvmLwj/wUw8RftFeCJvE/wAD/gH8SPiZ4PaV49P8S3d9pfhqx8QImP3lhHe3KXU0fUB5IYY3w+1zXw5/wdX/ABg1f4l/Fr9mH9mKxuLq28P/ABg8VQzeIEt5PLkv0+22traw/wC55k0z4PG9IX6oK/YXwf4Q0r4deEtL0HRbG303RNDtIdO0+0hXZHaQQoI4409FRFwPYUAeG/smf8FK/Af7VXxN8RfD37H4k+H/AMV/Bo3614G8WW0dlrNpF0+0xBHeG6teR+/gkdPmTkeYmfo2vxM/4Opdc1D9iT9pr9ln9qLwT5un+NvC+rXWi3tzHJ5Y1OzjCTpaSn/nm6Pexv8A7E719Z/th/8ABZjWf2Tf25vg/wDCqT4P+ItT8PfFKC9ubLXIL6G6utU8mGR0gsrOHeTM8hhQGZ4+JhuwMugB+gFZeqeILHw5HDJqF5bWa3FwlpC88qoJJpH2Ii+ru2BtFfmb4t/4LtfFz9kL9rzwH4R/ai/Z4tvg78Pfijcmz0PxFa+LINb+wTb0T/SXhTyyELpvwYyiPv8AnFYX/BYz45fHXw7/AMFYf2P/AALojeCY/BPiDxTc6poelvqN3HJrOoWUUaeZqciQHyY4/tX7tIFm6u7n7gQA+8f28P2l/F37Jf7NWo+N/BPwp8V/GvX7O6toI/DGgHy7ydJJAjzfKkj7Ix8x2RufomXT17w5qU2v+HbG8ms7jTbi7to5ntZv9Zau6AlH/wBtOhr4l/4Krf8ABUL4o/8ABL79ie3+I+pfCXQfF2sG4jstUmsPEmzRdImnd1gIMkKXU/3U3oIYx8/+s4rqP+Cpv/BSnxV/wTt/YRk+MekfDGT4gQ29rZvqLNrEenWekNdSQwRu/Dzv++mQbET6yJ1oA+xqK/NT9pL/AIK4/tE/Cj9jzS/j74Z/Z50TVvhjpmjafrHiGbV/EL6Vq17DKkHnT2dl5bvDapI77JJ38x0+fyNgDv0fxo/4Kw/GbxR+ypH8dPgR+z/H4u+F+neHU8Vajf8AirXRod9f2yQ/aLqOys/LeSTyYw6GaTZ5jxv5Mc8eySQA/QiivgXxb/wW40vxb+zf+zb4l+Gvg248ReLv2ptRGk+GNL1TUPsNjpM0biO9e9uUR38u1k38RoXk2HZW94Q/4KT+OvhX/wAFPfC/7M3xg8J+F1v/AImeH59d8G+JvC17PJa3P2ZJ5JrW6tp03wuI7WZ96SOn+r/vnYAXvjb/AMFW7r4Mf8FLPhL+zrdfCnxJbyfFO4uvsnii/wBRtYbCWC2gmd3toYXmkk+eNE2TCDHmBq+zq/LH/gp//wArGH7A3/Xr4i/9JZK/U6gD5U/4Kuf8FJLj/gl9+zNqnxKb4Y+J/iFpunmJLiaxvrOxsdPeaZIYftLySefh5HRR5EE2P4tgOa+g/g/45f4ofCnwz4kkt1tZPEGk22ptbq+/yDNCkmzd7b6+GP8Ag6m/5Qd/F3/r50L/ANPVjXDfGn/gqp8dP2NP2GvBfxU0H9nuDXvgr4T8P6Omralq/iT+zdevIHigg+1QWHkP5MIkdAjzfvH+/wCSic0AfqRRX5zf8FNP+CzPjr9h/wCCPwz+MXhH4Y+G/HHwZ+KUdg0Ov3mvT2Nxob3sKT28l5CltJ+5eNs743flCuASm/6b/bW/ak8QfsnfsNeJ/ipb6b4b17WfC2kJqM+nS6jNBZ6hNgZt7aVI3dnkkZUhynzl09aAPfqK/OH42/8ABaP4ofsr/tR/A/4W/Ef4A3Wnap8WLK9u7i50PXE1jfNBC7/YtPhjTfNN5hgTfP5EeZP7mZE5fxb/AMF2vi5+yF+154D8I/tRfs8W3wd+HvxRuTZ6H4itfFkGt/YJt6J/pLwp5ZCF034MZRH3/OKAP1Gorwf9o39rO++H/wAafDHwp8B6JY+LPil4usLnXI7O/wBQNjpuh6TbPHHNqF5MqSSKhkmjhjSON3kkkH3ESSRPAfh7/wAFWvG/wp/4KQWv7M/x68A6DonivxjpEmt+CPEHhTVXutJ8SoiTP9mZLlEe2n/cTJ+8fZvTHAdHcA+9qK/MbTv+C2fxm8e/tp/Gr4EeE/2cftnxG+GekQ3WjaK/ieBxqDyKk32q8vDsgtYPJmhxH+8d5Jo03pvd4/t79if4oeP/AI0fsx+FfE3xQ8AyfDHx1qcMx1Xw218t59gdJ5EQ70/56Rqkm08pv2HpQB63RXx7/wAFkP8AgpX4i/4JcfsrXXxG0f4X3Xj61t7iC1ubl9WhsbHTGmfy43mHzzv+82DCR4y6/OK8F/bI/wCC1X7QH7Mnw+t/i5pf7Kt9rH7PuntbTX/ia88Sw2upXtvNsT7TFYbWntYPMf5HnT94mx3EO/FAH2J/wUJ/bTk/YJ/Zq8QfEtvh/wCJ/H2n+HbWS8votIubK3FnCm0edM88yP5fzf8ALBJn/wBit79iT9o9v2vf2R/hv8UZtHXQX8feHrPXW05br7ULLz4w/l+ZsTfsyPm2LXzN/wAFKf2jfCv7Xf8AwQA+LXxN8F3jXnhrxp8OrrUbJydkib0AeJ+flkjcOjp/fRxVX9jr9rbQf2Ff+Der4WfFnxNZahqGk+CvhfpF3Na2I/0i8keGCGGND0G+R403n7g+hoA+/aK/N39o3/grr8Yv2Iv2cPhb8dfin8NfAZ+FPj69sItb07QtdurjX/CcGoQmeFv30KQ3rxxj94ibPn+RPkzJXdf8Fhf+Crfiz/gmBZeAbrTfhe3iXQ/G/iK28OP4gu9UCwWE0weTCWcKvczv5ccnHycpxvPUA+6KK/Nv9sT/AILJfGT9hjxP4T8dfEn9n2Hw3+zt4k8Q23h6TW5/EiP4m0vz97pe3VlEjpCnlo58je7/ACbHdJHEdfVn7Rv7Wd98P/jT4Y+FPgPRLHxZ8UvF1hc65HZ3+oGx03Q9Jtnjjm1C8mVJJFQyTRwxpHG7ySSD7iJJIgB7xRXwb8Lv+CqfjH4Zf8FK9P8A2Xvj74K8NeF/EnjjS/7Z8FeJfC2qT3mka+n77fA6zRpJBN+4m4/vpjo8bv8AeVABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/wfOf82u/9zX/7ha/f6vwB/wCD5z/m13/ua/8A3C0AfP8A/wAE0/8Alck8R/8AZVfiP/6Ta7X9P1fzA/8ABNP/AJXJPEf/AGVX4j/+k2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP/wDwVh/5RZ/tLf8AZKvFP/pouq+ff+DXX/lBZ8Df+4//AOpDqVfQX/BWH/lFn+0t/wBkq8U/+mi6r59/4Ndf+UFnwN/7j/8A6kOpUAfoFRRRQAUUUUAFFFFABRRRQAYprRK/3lU/UU6igAC4HSg8Cig9KAIUnJ4IX86dIMp/9asfxd4x0vwB4bvtZ1i+ttO0rTYWnubq4YRxQIv3mZv89K8h/Zp/4KKfBz9rjxVeaH4B8Z2+uapZxCeS2ayubOQof4k85E3j/czXPUxVGnUjSnL3pbHpYXJcyxWGq43DUJzpUvjnGMnGH+KVrR+Z70OlFA6UV0HmhRRRQAUUVyvjLW7qGW30jTZYW1nUPuqZ41ktIN4D3W3Y/wDqxkplCjyeWjlA+9QCtq94vjfxN/Zdu6yaTp7b9Qmt7nBe5T7lsQoPTmR/njdCkPDpI+zs6oaJpsOg6ZHZ26yRwW6bUDu7kf8AA2q/QAUUUUAFfM//AAU3/b7sv+Cf37PP9uWunz+KPiJ4qvo/D3gXwrb5e58SazOdkEKL18tMh5G4+QY++6A+0fGf4t+G/gL8L/EHjTxhqtrovhnw1ZS6hqV7csfLt4Y0y38uF/iNfDH/AATe+EXiT/goD+0af20vixpl1ptpPbPpvwW8J3wyPC+hyff1OZP+f2+TD7/4I3/jQx+WAey/8Erv2C9Q/Y0+FeueIvH+ox+LPjp8Vb7/AISD4h+JMBvtV4/+rsoe62tqrmONfuffcBA+xPrKvNfEv7Qeg6R+0H4e+GcMOpat4s1yxm1W4hsIVli0DT0WTZeXrlwIY5Z08iHh3kk37EKRzSRy/G/9qf4e/s429q3jrxloHhmbUspYWl3dIL7U2/uW1t/rp5P9iNHPtQB6LRXzi/7VvxG+MLlfhV8G/EUloxIj8QfECZ/CWmY4O9bV0k1R8f3JLSBH/wCenXaSfso/Eb4wSK/xS+M3iOaxbBk8P/D6B/COmOOQUe6SSbVHxz88d3Aj/wDPPoEAO++N37X3wz/Zwuba18aeNNF0fVtQj32Wked9o1bUB/07WUO+5n+kcb15br37XHxd+KVzZ2Hwp+C2qWKapv8AI174j3qeHbVY+N06aam/UXCblzHPHavz+fqHww/Z0+Gn7J+i31x4O8H+H/CrXjRi+vbW2RL7U5HfYhublz5kz7ynzzO5966rwloM0Oo3msahFH/a2rbEdvIhSSC2TcYLUun3/LMkj/O8gEk8+whGAAB4P4b/AGE/FXi/Rltfil8YvFevWU7O0+g+CYf+EH0ad5HLzSu9k51F3kkZ3fffbHeR8x/PXsfwR/Zw8Bfs4aDdaf4E8G+HfCNncv5lyNKsEgkvHx/rJnX55pP9tyWrvaKACg80UUAM2/JSfdFDLx1rwv8Aa+/4KGfB/wDYM0jR774seNrXwnFr07w2KPaXN7cXbIMvsht43k2Lxl9mwb15ywztgcDi8biI4XBUpVaktoQi5SfolqyZyjFc0j8//wDg6j/Zu8VP4O+Cn7SnhPS21V/2ePEf9o65bW8Z+0CykubWaO46/chmtVD+guN/CI9fqF8DfjHoH7Q3wl8M+OvCt9HqvhvxZp0Gradcof8AWQzJvTOOA+OGTqrAg9Kzfgj8cPAn7XHwas/FXg/V9O8ZeD/E1s6JMqF4bqM5R45IpF3L1KOkibh0YcV4von/AASA+GPw3n1SL4b698VPhDo+tNJLc6H4H8aX2laMJpMb5oLLe8Fs5x/ywSPoKzrUatGpKjiIuM4XXK9Hdbpp7NehXxHwv/wcJ/DKT/gqb/wUC/Zz/ZP8GXEl1fabLdeLvHF3byB08OaY/kxiSYfwTeWk2xH2Zee3H/LTNem/8FTr6y8Lf8F8P+CfHmBYLdX8R2seD0L2scaJ/wB9ulfdn7L37Efwz/Y30zVofh/4aGl33iS5+3a3q13dTajquvXPP7+7vLl3nnk+d/vyH774xmvzq/4LY/D7wl8Zv+C2v7Cvg/xgBcaPqZ16KeKG+ksbhJHjjNrIk0LxyQyCeNGR43V98fyc1IHWf8HXvwwj+N3/AAT+8B+CdKsl1Lx14s+Jmk6X4Us4tvn3F7NHdRuE7/cd9/aqP/BY5P7A/wCCzf8AwTfa5uC6/wDCQa5bfaJT/wAfDldOTn/aO9Pxevtr4d/8E+vAfgj4r6J481K78ZeO/GHhe2ms9B1Xxfr11rUugRz/AOu+ypM/lwySJ8jzbPOdMIZCOCftq/8ABOf4R/8ABQ3wzoGm/FbwzJrg8K3v9o6NeW2oXWn32lzk/M8M0LpIu/YmVzj5EPVEIAPj/wD4OutXt9a/4Iu+MpLOa3ukj8S6VBI8biTy3jvgjrx/Erjaa2v+Diydbv8A4N0viXIjK8baR4WZGXv/AMTrSq+lvjr/AMEu/gb+0d8AtD+FfjDwXeXnw78O7Ws9AsvEGqafZl0LsJJltbiPz5Nzu5ebe7O5cnfzVj4yf8Eyvg7+0B+zbpXwj8ZaH4l8QfDvR44kh0m58Za0FlWHYYlmlF4Jp9hRCnnO+zZxigDwL9vb/lWu8Vf9kVs//TfBV/8AYudZP+DcPwiVZWx8C3T5f+wQ9e7+Mf8AgnR8J/G/7MUPwY1jSfFV98M4LSPT00VvGetbZLaNERLV5xeee8KJEg8t5CnTjrTPDn/BN/4S+CP2X5Pgvpem+MLH4azwSWZ0ZfHGukJbOjo9sk323z0gdHceQjiPn7lAH5S/8E6f2+9c+EX/AATr/YL+AfgGXw3ofjj43XfiBIfGGrWCXlv4UtoNYvkd7WBsJNeyb3SMO+zePnD+ZXofxy+GGi/BL/g6B/ZB0xvGXijxdq8nhHXrjWdR8R649/dyzvpmsIh2ZENqj/8APG3jhh9Er638Zf8ABvn+yn46+DHhvwFN4C1y28OeC76a/wDD0CeMtalk0J5nDzi1ea6cwRzON7omEd/nxv8Anro9a/4IffsveJNW8C32ofC6xuLn4dzz3OlyS6hePLeST7PMe/fzt9/v8tP+PoycLs5QlCAfMf8AwUsv4dX/AODh79gO6tZY7i3uLLxA8UsT743X7K53LX6r185/G3/glp8E/wBoj4++HPif4u8N+ItS8deE9p0LVYPGWtWP9jYYti2hgvEjh5zwiD9K+gNPt1sbWOFTJtiXYu92dv8Avt+W/GgD8/P+Dqb/AJQd/F3/AK+dC/8AT1Y1F/wVimW6/wCDbLxlIjK8cnw00Z0Ze/8Ax5V9Wfte/sK/DP8Aby+H48J/FbRtW8S+G/MWZ9Lh8Q6npdpdMrh081LOeETbHQMnmb9h5GKwvHv/AATO+EPxL/Zfs/gz4h0rxZq/w1sYI7RNGn8b67skhj8vy4JJvtnnSQp5UeyOR2RNnAoA8i+CH7Juh/t0/wDBvt8KfhP4g8tbHxp8FfDllHcGPzPsFz/ZNo9tdY9YZ0STH+xXx3/wSr+PPjT9uXR/hD+yb8RtN1G38Tfsu+Jrq++J32jeY7+20J44tCt9/wBx/Mu5oH55f+xnk6SCvtjx/wDtn/sof8ESfDfg34S+JPHV98OtNvNOe58PaVqM+t+ImtrKH93sSaT7U8MI2bEjLonyEIK2v+CYvwqF7e/Ff48ap4bbwxr37Q3iRNags5rP7LeQaFaQR2WkJcp1Sd4Ee6dH+dHvpE/goA+a/wDgq5q1vo//AAX6/wCCfc1022OSbxHAP+ukltHGn/j7rR/wde/DCP43f8E/vAfgnSrJdS8deLPiZpOl+FLOLb59xezR3UbhO/3Hff2rk/8Agtj8PvCXxm/4La/sK+D/ABgBcaPqZ16KeKG+ksbhJHjjNrIk0LxyQyCeNGR43V98fyc198fDv/gn14D8EfFfRPHmpXfjLx34w8L201noOq+L9eutal0COf8A132VJn8uGSRPkebZ5zphDIRwQD85f28fEfw3+Cf/AAcYeA3/AGjtN0XWfhh8SvhLD4X0LW/EFvH/AGZpepw6i8m95H+RPnDo/wDc+3oXxHzX2pcfs3/snfBD9oP4at4f+Gvwzk+J+t6iv/CMLpdlatqdqiRvPLeoQTJHBHAsj7/uZ8tB87oD6l+2h+wD8I/+ChXwxXwb8WvB9j4o0mzm+1WcjvJBd2Ex6vBcRlJI84w2x/nHDZ6Vw/7BP/BH74C/8E2ptYufhb4Rl0/XPEEAtL7Wr29mutSeD/nikznMKcBikezJVCc7EwAfKv8AwTynVP8Ag5t/bjjLL5knh7w6yL9LGxz/ADr6M/bX/Yd/aR/aA+Nn/CRfC39rvVPgn4WNhDb/APCN2/gCy1xBOm8vP5806P8APlfk2Y+Suu+F3/BJ/wCBvwY/aY1L4weG/DnizT/iVrXy6lrz+O/EF1Pqifu/kuUmvXjnj/dJ+7kR0Plp6CvpigD8ff8Aguz8DfiV+z3/AMG9/wAStB+K3xeuvjb4kk8TaTdR+Ibjw/Dockds+oWmy28mGSRPk2Od+efMNfdH7VWt+Gta/wCCSfj7UNfms/8AhF774U301y0jx+XJBJpMh4/g5GNvvivnT/g7Cu4LX/gir4+SaRVe41fRo4Fb/lpJ/aMD/wDoCPXrvwb/AOCWvwf8ZfAHwbptzc+NfEnwzks7DWLPwVqPiu9vfC3CRzwR/YpHxJao+xktpC8HCfu6APjT4G/CLxB8Ef8Agzk1zSPEdvc2OpXXw913V47a4TY8Fte311dwf99wTI//AG0r1Hw/+3Xaf8E6/wDg2X+FPxOuPD9t4rubPwBoen6fpNz/AMel5c3MccKCfg/uU373/vhNmQXBr70/aW/ZU8D/ALXPwivPh/4+03UtV8I6lF5N5p1prV9pUd4gx+7kNnNC7p8o+QnYfSuHs/8AgmT8Fof2QL74C/8ACJ3d98JdQtVtn8P6jr+oailvGkgkjEMl1PJNBskG9PLdNjoCKAPyr/4OMfhpqg/4IyeFvH3xG+KmpePfH3jHXdHubWG0uzpvhqzhmhkmeGw0yF/LeNE2Ynn86fv5iB9lfQH/AAdJa7aaz8Av2cTa3MN0sPx20FH8l9+xja3T/wDoDqf+B19B6V/wb+/ss6T+zvqXwxb4dzap4f1WOCKe41HXL671KJYZvPjSC5ebzLaPeqZjgKI+BvR6634sf8Ea/wBnT43/AA68G+D/ABJ4F1O68K/D8JLoOlWfi3W9PtbCZC+Lkx213GJLr53JuZN8772y5oA+Wf8Ag8I/5Q/zf9jtpP8A6BcV5/8A8FAvFfw2+D//AAceeC7n9orR9B1D4VfFL4VJ4a0HVdfgT+zNH1CHUXn+eST5F+cOj/3Ptyb8Rmv0I/an/wCCY3wc/bT+HWj+D/ih4f8AE3jLw1o0qzW1heeNNajieZN4Saby7xDNIPNf55N789aZ+0d/wTF+Cv7Wv7PWn/DH4ieDZPFnhnQWzpcuq6te3Wq6Ywx+8i1B5Xu8kesh3gBXz0oA4i4/Zv8A2Tvgh+0H8NW8P/DX4ZyfE/W9RX/hGF0uytW1O1RI3nlvUIJkjgjgWR9/3M+Wg+d0B+wq+Vf2AP8AgkB8B/8AgmVcapd/CjwcNP17W4/st5rWpXr32pSQZ3/Z1kf7kZdUcpGE3lEL58tMfVVABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/AMHzn/Nrv/c1/wDuFr9/q/AH/g+c/wCbXf8Aua//AHC0AfP/APwTT/5XJPEf/ZVfiP8A+k2u1/T9X8wP/BNP/lck8R/9lV+I/wD6Ta7X9P1ABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQB8//wDBWH/lFn+0t/2SrxT/AOmi6r59/wCDXX/lBZ8Df+4//wCpDqVfQX/BWH/lFn+0t/2SrxT/AOmi6r59/wCDXX/lBZ8Df+4//wCpDqVAH6BUUUUAFFFFABRRRQAUUUUAFFFFABRRQeRQB8R/8F2Pgv44+NH7E5s/BMF7fPo+sRarq1naTMkl3YxRTb12D/XbXaF9h7oG6pivyd/4JT/B7xp8Vf24fAM/gy3vG/4RvWLPVtYuopPs8dnYJMvnb3/2496bP49+yv6NnAVduwMO+T0qKDTobQsFhjjGc5VcV8rmXDMMZj4Y9ztyW0/ws/d+BvHXF8NcI4vhSGEhNV+a07tcvPHllzRt7393WNvMtxjEa564p1A6UV9UfhAUUVWuryGwtmmmkSGGNNzu7bURaAM/xF4ptPCunNeXjiNC2yNcjzJ5MnbGgz87v/Cg61U8F6JfWWmrdaqyNrF2fOukguXnt7eQ/wDLFHKpuSP7gfy037Q7oHJqn4QuZPGurz64Zj/ZLYg0lY5n8u4jH37kgon+sfhD+8QxpG6P++dK7CgAooooAKKK+Dv+CqP7WfjLxp8QfD37KPwFvvL+M3xYtnm1rWkG+P4c+HB+7utUm/uTSZ8uBeMv3R/L3gHnXxbvZP8Aguh+2td/C/TZJJv2T/gLrcT+Ob6F8W/xG8Rw/OmkI/8Ay0srU4efs7/9sJK/RDxrHrGl+AtSj8HWuiy69bWbxaRbahI9rp/nbMQpM8KO6Q/dzsQnArkv2Qf2U/Bv7EH7PPhn4Y+BbJdP8O+GLXyY9wAmvJT801xM/wDHNJJvd29X9MV6nQB+WfwL/Zp/aS+Df/CUzfG+0+J3iW+8XarJqviTV/gtqGl26a3JnZH59zdTQ6wIYIfLhghsfJ8tI9nl/fkf7s/Zs/Zi+GfwZ0WLXPA/gOx8Napr1pFPeX15ZN/b15vUPsvbmffdSSf3/Pd3B617BRQAUUVxvi2GXx9dT+H4pI1sopYzq5aKGZHh4k+y7H38yJsD5j/1cj7HRyjoAN0i1PjjX7PW5RDNpOnl30hHihk3TOrRtdLJ87r+7Z0TYUOyecOhymztKKKACiiigAoPSig0AQr93rye9fzX/wDB0H+z58RfBP8AwUM1j4geILbULj4f+MLaxtfC9+87z2dsILKGOa1/6Yv56TTbP49+/wDv1/Sky4A9qjeJbhen6dK/RPCfxGr8EZ4s5oUI1nyShKLfLpK2sZWfK7x3s9LrqceYYP6zS5D8hf8Ag0i+A3xA+GXwD+JXizxJZ6lp3gvx1cabP4YjuJm2XLw/akubmOH+BH32yb/4/I/2K/X7jPv2psaiJVUUFiX9PevE4+4vq8UZ7ic+r0lCVZp8sdl7qil62jq+r10NMJR9hSjSJq8X8Zf8E8PgH8TvGlx4m8TfA/4P+IPEV3P58+ran4P0+6v7iTdvDvNJCXL7+ckmvaB0or5Y6DL0XRLPwxo1rp+n2tvZWNjEkFtbW8flwwIo2oiIvCoP0FalFFABRRRQAUUUUAFFFFABRRRQAUUUUAcT8R/gL4F+Lus6Lq/izwR4T8Uap4am+1aRd6vpMF9Ppc39+B5EZ4X90xXbUUUAeL+Mv+CeHwD+J3jS48TeJvgf8H/EHiK7n8+fVtT8H6fdX9xJu3h3mkhLl9/OSTXqmi6JZ+GNGtdP0+1t7KxsYkgtra3j8uGBFG1ERF4VB+grUooAKKKKACiiigDzL41fsh/Cf9pTULa5+Inww+Hfj65sY/JtpfEnhuz1WS3Tn5EadHKD5m4H9+t/4V/BPwf8CfCq+H/A/hHwz4M0GOTzY9O0LTIdOtI3/v8AkwoiZ+VecV11FABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8Hzn/ADa7/wBzX/7ha/f6vwB/4PnP+bXf+5r/APcLQB8//wDBNP8A5XJPEf8A2VX4j/8ApNrtf0/V/MD/AME0/wDlck8R/wDZVfiP/wCk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP/APwVh/5RZ/tLf9kq8U/+mi6r59/4Ndf+UFnwN/7j/wD6kOpV9Bf8FYf+UWf7S3/ZKvFP/pouq+ff+DXX/lBZ8Df+4/8A+pDqVAH6BUUUUAFFFFABRRmjdQAUUZozmgAooooAKKKKADFGKM4ozQAUUbqM0AFcff6y3jfxW2k2N032TR5A+pTW9w8Z87+C13IOuMu+HR0xBlHSarHjTXLi1WDSdPuo01rVgUhC432qfx3X3H+4Om9NjyeWhK762tI01dK06G3VrhlhTbumneaRvq7ksaAL1FFFABRRXO/E34maD8Gfh5rXizxPqdpofhvw7ZS6hqd/dv5cFnBGu95HPsKAPF/+Cjv7d+i/8E9v2d7zxZdWk3iDxVrF3Hong3wzaZku/E+sz/Ja2cKJ85y/3yOiZ6nah4v/AIJV/sC61+yx4N8R/EH4oXkPiT9oT4yXi658QNbU7o7d8Yg0y27Ja2qfu0Ccf8AEYT5x/Zd+EvxP/wCCpvxqk/bIvl0vw1b6GJLL4A+FvFWnST2dnpm8efq97Ejo8dxforJHMhfyY5Ek2T+XDX6IeGPi3Y3euaL4X8QX3h/Q/iTqGix6xceGYdVS6mij4SZ4cqjzwpJlPMEadshN2KAO8oozRQAUUZrL8Ra9B4e0ua7uJRHHGQMH+NmOxU/FiKAM/wAW67caalvZ2MbTatqAdLVCU2pjG6WT51/dpuXfsO/B+UGrXhTw9H4X0GGzjZpvJ3NLPJBHHJdzOS8k7pEiJ5kkjs77EQF3c4FUfB+jXEEt9ql95q6jqnlt5MscObKFf9XbfJ97YWdzl3/eTSbH2FETqKACiiigAooooAKKKKACjGKKKACjFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/wDB85/za7/3Nf8A7ha/f6vwB/4PnP8Am13/ALmv/wBwtAHz/wD8E0/+VyTxH/2VX4j/APpNrtf0/V/MD/wTT/5XJPEf/ZVfiP8A+k2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP/8AwVh/5RZ/tLf9kq8U/wDpouq+ff8Ag11/5QWfA3/uP/8AqQ6lX0F/wVh/5RZ/tLf9kq8U/wDpouq+ff8Ag11/5QWfA3/uP/8AqQ6lQB+gVFFFABQelFBoArhVC5bkKckivz8/bj/4LveFP2U/jBJ4P8LeGY/iFqWkl4tZnTVTY2+nXKkZgD+TLvZRv39NhG3n59n6BHbuY/Q8V+Ln/BQv/giZ8XNV/aZ8Q+KPhrpLeONE8balc61Lm9tLObTJppnmeFxNKm5N7fIydO9fO8S4nMaOHjLLo3nf10P2fwNyPg7M87qUOMqyp0eR8vNPkjKf96WlrLVe8rv7j9T/ANkb9rHwp+2X8GbPxj4TuluLWd/JurY/67T7hUR3hlHZ03Ln/eWvV1XKelfLv/BJ39iPUv2Gv2ZJPDutX0d5rmuai+s6ikY/d2kzwwxeSjfxBEhT5u7bq+oGcRg5yccmvWy+deph4SxMbT6n51xhhcswudYrD5NPnw0ZyUJd430/4fruWBRQDkUV2HzoUHpRQelAFRyzEH5sY4P92uV+LHxk8L/ArwfdeIvF2tafoOjWpHm3N5MIo068f7Tf7NdXPIIY2YnbhSa+FP2ffhpb/wDBSj9oLxP8WvH0EmufD/wTrVzoHgbw5dnfYO1tlJtSlizsld5N4TePk+qoRyYzFThy0qXxSPo+Hclw+KjVx2Pk44ehy83L8UpS0jCPTmlr8WijGUtbWfXz/wDBZH4b6nMJ/D/hX4p+KNBkOE1rS/Cd09k4/vBnVXx/wCvUPgV+3z8Lf2kfDOqX3g/xJDq1zpMZku9L8podTicE4j+zSBJN5YbF4+Z+BXmH7X//AAVw+Dv7D/j618Jaxa61rmswwmS6tNAtoLj+zV/gSXfIgVm7KP8A4nPGfHn4OeHv22fg7/w0V8FdQh0nx34cjbUPCusaWWSbVxbr+/tr5CqbhMVeHY+8ARxvvG90ryY4+opSp06sas4/FHqfdVuE8DPDUa+JwFfB0q/u0q85c8JN/DzLkj7v96NrLVRkj7P8KaNd6ZE1xqFxNNqd65muALt57e1J/wCWMOQg2IDsDiNC+ze43E10a/KmK8x/ZF+Ptr+07+zh4T8fWcflReI9PjuJYu8EwJSWP6o6sv8AwGvTkXYoH1Ir36U41IRnE/Ksdg6uExNTC148s4SlGX+KPu/oTDpRRRVHOFfmT+0jf3X/AAW7/bRuvgToM1w37M3wV1WK5+KusWsrRx+MtZhPmQeHoXT78EL4ef3T+BxBI/p3/BWH9sDxlZ674Z/Zj+BF5j49fGSF8X6k+X4A0LlLnWpyvKfxpD0zJnZl0SN/oT9iT9jfwl+wT+zX4b+F/gm1eLRvDsPz3EpH2rVLlzvnupmx800j5c9gMIMIi4APVNG0e10DTLaxsbeCzs7SFIYbeFPLjhROEVUHCqAMVg+N/hD4Y+ImpaHda/oel6xeeGb9dW0m5uLZJJ9LuU/5bQufnjf+ElDyvy9DXXUUAeQaF8R/F3wz1nxzcfFGfwVp3gbR3/tLSfFcF79hgSzd3za3sU7nyZoEEYM6SGOffv2QH93XlH7YP/BU3Rf2ffiRp/w18C+E9e+M/wAZNYhS6tPCHhx0zYwvjZc39z/q7OA7k+dw3EiPjZlx71+0N4/s/hX8B/GnibUrSG8sfDuhX+pXVvL9y4hhheR0b2ZFxX59/wDBLD4Pax+xF/wTq8LfGrQvDvgXULr4gwv42+I6yypo80WkyL51qdPnd/ssENja5b7NMUhfL7ZoP4/q+HcvwUcLVzbHw9oozjThSvy89SV5e9Je8oxSvLls5c0UnHVnPWlPm5Ynpy/Gz/goC8P9rf8ACl/gLHZ+X539gN4vuv7XA/54/aNn2bzPf7lbn7Hv/BTbSf2ofj5beAfiH4V1/wCDfxS0Wze9tPB+tPDPb60/zo95YXiJsvI40V0Qo6f6yclH2I6fFPx1/wCDtPwdP8UtR8K6D8K9W134eR6lHay+Il11LW71OzEkZmKWUlnJtSRfMQI8iO6P9+B+U+qv+CmVl4O/br/4Jjw/H74X6ta3niD4ZWT/ABC8D+JbeMrPZTWD+ddQY+/86W8sLxf30X+5X6TmnBOOwiw9LibKo4KGKlalVp8y5JS+FTi6lROOqcoySqW5mndNHHTxMHzSpS5uU+/YY9rk/wB7FPJ+WuE/Zn+Li/Hz9nPwD46SE26+MvDthrixH/lmLm2Sbb+G/wDSu7Pymvwyrh5UakqM/ijp92n6HqXuOooopAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8Hzn/Nrv/c1/+4Wv3+r8Af8Ag+c/5td/7mv/ANwtAHz/AP8ABNP/AJXJPEf/AGVX4j/+k2u1/T9X8wP/AATT/wCVyTxH/wBlV+I//pNrtf0/UAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAHz/8A8FYf+UWf7S3/AGSrxT/6aLqvn3/g11/5QWfA3/uP/wDqQ6lX0F/wVh/5RZ/tLf8AZKvFP/pouq+ff+DXX/lBZ8Df+4//AOpDqVAH6BUUUUAFFGaM0AGKMUUUAFGKKKACiiigAoPSig9KAKc8X2i3kjK/K67fcA18f/8ABIDxBF4Y+DHjD4S36/Y/FPwv8V6lYX9tPxJJDPcyXMNzjujo52N/s19jHmPp19e1fJf7Z/7H+t+IPjBpfxM+DXiK18F/GlohZPJdIW07XrFPvrfIqP8AKi5CS7Pv+SmeUZPNxUZRlDEU1zOP/pMv+GPruG8ZhamExOT4yfs41uSUZ/ZjUp83LzdeWUZSi39ltPZM/LD/AIKW/wDBOP4reGf2xvG11ovhXxJ400PxTrF5r0N94f0u5vEh+0XEk32abykcpMm78th/jr7v/wCCWejXH/BPD/gnn8SNQ+JFvqWhXmj6pcave2V7p09sI9+n2nlQwu6bJ9+1E/cbwHYp/rEevQ/D3x//AGqvh3oqaXcfs06b4gurdcHVLL4hW6294/8Ae23O+cf8Ddz71geLv2A/id/wUPvbe4/aM1yz8L+E9Mc3ek+DfB925eC72HbdXN04xNJHyUTy3TP+wXR/msHlNGhjJ4rCRnzz/mjyxjzfL/M/c+JvEHM834fw2RcQVcPDCYfkvKlUhUqVPZq0YxjBy5br7TUY9X2PXP8Agkj8NNR+EX/BPT4baRqsbw3v2KfUHiZPLaMXNzLcqm3thZQPwr6URduF9K8nT4uXHwk+IPg3wDrml+MNVh1jTo7a18YppqXFheX6I5kt7z7Lj7HM6J5okkijtXLbEdX2Rn1oJ/jX2GHoqjSjSj9k/nHO8ynmWYV8xqfFVnKf/gUuYkHFeC/8FBf26PDP/BPr9m7VvHniCKfVNQmkj0rw5oVrlr7xLq0/y2tlCn3y7v1wDtRHbtivXvGvjbR/hj4P1bxB4g1O20jQdDtZr/UL68k8uGzgjQu8jueFVEBNfnn+wj4N1j/grJ+17D+1x49sL6x+FHg2SfT/AIGeGL+Py/MT7k/iSeF/+W85T9x/cRP+mccj7nmHrn/BKH9hDxP8AvDvij4ufF+6h1v9or41zx6x4wvc+ZHo0AH+i6Lbf3ILWPanyfxp1dEjr7MoooAKD0ooPSgDk/jB8PbP4x/CnxJ4VvnYWXiXTLnSblk/hjniaJsfg1fnr+wJ41b4xfsA+Ov2MvHDNonxk8A6Hf8Aw51XTlmSGSTTZoXtrXVrbzgnnWnkSw/vERn6N5Z8xN/6Ratq1vo2nyXV1NBbwxj78kixof7uWbivmP8Aam/4Ji+E/wBtvU9P8aa9d+JfAPxS0d9+h+K/C+oJZazoEHOy286NNk0fV3SQyYeaYI+zZX1nDOcYanQnl2PlyxlKNSNSMeb2dSF+VtfajJO0o7/DJX5bPGtGfxQPwx+IP/Bs5+1Z4T+KuoaDofgvT/Enh2G88m18Qw69pltb3UB4E3kyTpOnH3k2Zz039a/Sf9oTwm3/AATz/wCCT3gv9jHwhrcfjX40fFe0uvCmk2cMfluU1K6nmv71058m1hS4uVDt2T/ZfZ7O3/BPb9rP7CNNT9uTWP7DWTaA3ww0ptTMP9z7Z5u7d/002b69Q/Yr/wCCYPgX9jrxPqvi1dQ8S+P/AIoeIkCat418WXh1DVrmPqYY3+7DD/sIOcR7y+xMftvGPjJXzrC4aOd4qjiY4WXPTp0KVWHPUirRlWlVUbRV25RhfmV46XUl5eHy/k5uSPLzHtX7P3wntfgN8CvBngmxbzLHwfodnokDH+JLaBIE/RK7RRupA20/hSqct9a/mapWlVqyqVPil7x7mo6iiigkKKKKACijNGaACijOKM0AFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8AB85/za7/ANzX/wC4Wv3+r8Af+D5z/m13/ua//cLQB8//APBNP/lck8R/9lV+I/8A6Ta7X9P1fzA/8E0/+VyTxH/2VX4j/wDpNrtf0/UAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAHz/AP8ABWH/AJRZ/tLf9kq8U/8Apouq+ff+DXX/AJQWfA3/ALj/AP6kOpV9Bf8ABWH/AJRZ/tLf9kq8U/8Apouq+ff+DXX/AJQWfA3/ALj/AP6kOpUAfoFQaKbIu6Nh6jFAHyJ+2R/wWI+FH7FvxJj8K60niHXteWHzbmDQYYbn+zh/Ak2+VNrv2Xrx2+XP0H8FPjV4b/aE+HWleLvCGqW+raDq0IkhmhwT2+Vh1Rl5BU1+IP8AwUv/AOCaXxc8C/taeMtX0nwf4k8baP4y1m91yyvdC0i5v1hW4uHm8mbYj7HTfs/26/S7/gin+yV4n/ZA/ZIuNN8XBbXWfEutSa49l/Fp6SW9vCsL/wC3+53H/fr5HKc3zHEZjVw2IpcsIf1/29zH9DcfeHvBmV8E4HOcox3tcXU5OeHNGV+aN5e78UOR6H2UOlFGaM19cfzyFFGaM0AFFGaM0AFFFNklEabm+7QBl6/rdv4e0xru6laNIxhcIZHd+oRUTLu/H3EGW7VneFtEurO3OpatsXWrwD7SsNy88Fr38mEuE+ROm/y08zZvZAelbRm/4TfUZNWnZX0a3n/4lCxSTf6TswGuJEdEXmTeI/8AWIUjjmR/3nydhQAYzRiiigArxXTPB2rfsp6L488S2+pfEL4jeH7qX+1rPwsqpqup6Q7SO90llNM6TTQneHS2d3dPLdIch44E9qr4m/4KvftteLfALeGfgD8D3W8/aH+Nhez0Vg2Y/B+mDIu9duTzsjgQP5efvyfcDmMoQDxL9of4rx/8F3/2l7H4DfDnVL5/2bfAsllrXxb8RQRzWp8SXJImtfDcO/Y6fd33XR49mw+W8eyT9L/Dvhqx8G6Dp+l6baW+n6ZpcEdra2tvGI4beFE2JGi9FVUwMCvJ/wBgr9ifwn/wT3/Zh8O/DHwmslxBpa+fqWpSrm712/k5uby4bq8kjj/gCBE4RAK9voAKKKKACiiuL8WyL4vvB4ftZrf7KHB1oI8TyLbFM+RsIc5n+RDlB+5eTa6PsNABpbDx94nj1b9zcaJpZ3aYyTQzJd3J3I84KbsBEyiYdTmSfenCPXYB801I1SJVX7q8Cvhv/goH/wAF8Pgd/wAE8/i1beB/EyeLPE/iQRfaL218MW9tdf2R02JctNPCFdwSwRN7hRlwoePf6uR8P5lnWK+p5TQlWq2vywTei66dDOtWjSjzzPudRzSYyK4D9nT9o3wf+1P8INH8ceB9Xtdc8Oa9CJba4ibnP8SOuco6HhlPORXoG7mvLxGHq4etLD4iPLON1KLWqe1n5mm48UUUUgCiiigAoPSig80ARR9+D+NfDn/BQP8A4L3/AAN/4J4/Fq18D+JV8WeKPEixfaL618MW9tdf2PnGxLlpp4QruCW2IXcKMuFDx7/uLbxt9K/mT/4LBf8ABH348fD/APbv8feIND+H/jD4keH/AIja/qXibTtQ8MaFdamkEd1cvObaYQo/kyR+YE+f7+N6e37D4H8G8M8R53UwfFGK9hSjByj7yjzSutOaWmnxeZ52ZYmtQpc9GNz+jD9nb9ojwh+1V8INH8c+B9Ytdc8N69CJra4ibn/aR1zlHQ5DKeQRXeFfm/Svg/8A4N6v2GPGn7Bf7B0mh+PYYLHxB4u1+fxM+nID5mlpNa2sCQSnp5n+jbz/AHN+ztX3ovzCvzzi7LcDgc4xOCy6r7WhCcown/NFPR/8HrY68LUnKlGctxw6UUUV4JsFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/AMHzn/Nrv/c1/wDuFr9/q/AH/g+c/wCbXf8Aua//AHC0AfP/APwTT/5XJPEf/ZVfiP8A+k2u1/T9X8wP/BNP/lck8R/9lV+I/wD6Ta7X9P1ABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQB8//wDBWH/lFn+0t/2SrxT/AOmi6r59/wCDXX/lBZ8Df+4//wCpDqVfQX/BWH/lFn+0t/2SrxT/AOmi6r59/wCDXX/lBZ8Df+4//wCpDqVAH6BUUUUANZFbqqn6inYwKKbIu6Nh6jFAHyJ+2R/wWI+FH7FvxJj8K60niHXteWHzbmDQYYbn+zh/Ak2+VNrv2Xrx2+XP0H8FPjV4b/aE+HWleLvCGqW+r6Dq0IkimiIz2+Vh1Rl5BU1+IP8AwUv/AOCaXxc8C/taeMtX0nwf4k8baP4y1m91yyvdC0i5v1hW4uHm8mbYj7HTfs/26/S7/gin+yT4o/ZB/ZJn03xcFtda8S63JrjWX8WnpJb28Kwv/t/udx/36+RyrOMxr5jVw2IpcsIf1/29zH9DcfeHvBmV8FYHOcox3tcXU5OeHNGV+aN5e78UOR6H2KM+V93FIrbTzj8KA+VHoT3r5G/bH/4LF/Cn9jH4jx+Fdaj8QeINcWEy3NvoUUNz/Zw/gSbdKmxn7L147fLn6TFYyjhoe1xEuWPmfiOR8O5nnWJ+p5VQlWqWvyxV9F1Prlfv9P1oBAVvlrhvgT8dPDP7Qvw303xZ4T1CHUtE1SISQyxHd1x8rd1ZeQVPOa7ks20jv1rop1Iyjzw2PLxOGrYetLD4iLjOLtKL3TJh0rjdU1GXxj4qbRbGZRYWKE6tJG80ckbyY8uFHT5N/wB93w++MCHKbJg4ueLfEEtibXTbGa3/ALa1WUpbq8cjCNP453CK/wAiJ3fYjv5ab0MiGtLRtDj0LTVih3yOPnZpJHkeR+7FnLP+ZqjMvW9ulrbrDEqxpGu1VX+CpqKKACiisjxJ4n0/wT4av9Y1W+ttP0rS7aS8vbq4k8uG0hRS7u79FVEBOfSgDyb9v79tvwn/AME+P2Y9c+JXixZLqPTylrpmlWpAvNe1CTK21lbp/G8kmP8AcTe/RDXi/wDwSZ/Yj8VfDKPxR8ePjb5d9+0R8bGS+18kfu/Cmn/8uuhW39yOBPL8zH35ByX8tHPk/wCxz4f1D/gsV+2RD+1D4xsb2P4E/Cy9nsvgpoV5HtTXLpJNk/iWaFu+9NkAcfJs3gI8e9/0woAKKKKACiis/Xtds/DWmSX19dQ2drbj55ZpVjjH1ZqAMvxZ4lk8P2n2e0+yya5qO9dPtZZ9ouJOPnb+Py0JXfsDlE7VY8KeHf8AhGtEitPOnvWU757mZY1ku5f45XCIib3bLnYiLknAA4qr4N0q4uHvdW1D7VHdapIGjt3kU/YIU/1cIKInoXcMXIeR03uiJXTUARuuVwP/ANVfzI/8Fg/+CP3x48A/t3+PvEGhfD/xh8SPD/xG8Qan4m07UPDGg3WppBHdXLzm2mEKP5MkfmBPn+/jenfH9NxXaMGhkU5J/wD1V+jeFvilmfA2Z1Myy6nCo6kOSUZ7W3Wqs90cuMwcMTDkmfBn/BvV+wp40/YP/YRfQ/H0MNjr/i7X5/Ez6cg/eaYk1rawJBKf+en+jbz/AHPM2dq+8++exoJx1pSeK+P4lz7EZ1mlfNsVb2tabnLl0WrvZeRtQoxpQ5IDx0oooryTQKKK8t+Nv7Y3wy/ZwvrTT/GXjbR9L1u/GbLRI2e71nUv+vawhD3U/wD2zjegD1KimxTeZGrf3qdQAUFQ3avFf+Chfx71r9lb9hn4u/Efw3YrqWv+CfCeoavp8DJ5kazQwO6O6f8APOP77/7CGviH/glP+x78Bf8AgpH+wT4M+J3i7W/Efxc+J3iG3S58WeJbnxbqFvrWlazjM9tH9mnT7HHA+VhjjCJ5YR8fPyAfqRiivi3/AIJafA3xR+xPaftCaL8SvH3ijxlp1j8QFvdJ8WeMtQea6vNJfRdL8ky3MzYfyPngd+Bvgkr6o8M/GDwj4w0K+1bR/FPh/VtL0tc3t3aalDPBaDbu+d0cony8/TmgDqqK5fwP8W/C3xOa6HhnxJoHiIWG03X9m6hDd/Zt+dm/y2OzOxuD12GsXUv2ofhr4f8AAt14mvviF4GtPDen3Tafc6rPrtrHY29yn3oXmL7EkHdCc0AehUVleGvEmm+MdCtNU0nUbLVNNvo/OtryzuEnguEP8aOh2uvuK5LUv2ofhr4f8C3Xia++IXga08N6fdNp9zqs+u2sdjb3KfeheYvsSQd0JzQB6FRWV4a8Sab4x0K01TSdRstU02+j862vLO4SeC4Q/wAaOh2uvuK5L4kftP8Awx+Dniaz0Txd8RPAvhXXNU/49LHV9etbG6u/+uccjo7/AHe1AHoVFV7a6ju7dJoXWSN03q6n5HrgdR/ar+F+jeHv7auviX4DtdH/ALU/sP7dN4htEt/7Qz/x579+zz/+med/tQB6NRXx38Xf+CuHw58A/wDBRjwB+zza+JvDjavfWt/qni67ub6NLfw/DBbSGC1eTfsS6kn2NsPKRx8/6xK+s9D1a18RaZa31hcQ3ljeQpPDcRP5iTIwDo6OOGUg8UAaNFcP4i/aN+H3hHWbjTdY8d+D9J1G1+We0vNatoJ4P99HkyPyrR8VfFjwr4C021vde8TeH9Dsb8j7Lc3+oQ2sdxxu+R3cBvwoA6eiuV8R/GXwl4P8Br4q1bxT4e0rwxIiOmsXmpQ29gyP91hM7BOfXPNHww+MXhH43+GI9c8F+KPD3i7RJG8tL7RNSh1C0dv7vmQu6frQB1VFcP8AFv8AaP8Ah78AltD478eeC/Bf9oNttf7e1u2077QfRPOdN/4Vk/HX9rD4e/s1/AHUPid4u8WaRpXgjTbL7d/apuUkhvE2F0SDB/fPJ/yzSPJfI20AenUV82/8E1P28NF/b4/Zg8KeNodQ0C217xNBd6m+hW97HJd6XbfbZEhWaIOW3pH5Adjgb34+8K9Q+JH7T/wx+Dniaz0Txd8RPAvhXXNU/wCPSx1fXrWxurv/AK5xyOjv93tQB6FRVe2uo7u3SaF1kjdN6up+R64HUf2q/hfo3h7+2rr4l+A7XR/7U/sP7dN4htEt/wC0M/8AHnv37PP/AOmed/tQB6NRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/8Hzn/ADa7/wBzX/7ha/f6vwB/4PnP+bXf+5r/APcLQB8//wDBNP8A5XJPEf8A2VX4j/8ApNrtf0/V/MD/AME0/wDlck8R/wDZVfiP/wCk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP/APwVh/5RZ/tLf9kq8U/+mi6r59/4Ndf+UFnwN/7j/wD6kOpV9Bf8FYf+UWf7S3/ZKvFP/pouq+ff+DXX/lBZ8Df+4/8A+pDqVAH6BUUUUAFFFFADWRW6qp+op2MCig9KAK0qkptX9B0r8Dv+Cln/AATV+LngP9rTxfqul+EfEfjbSfGesXuuWV7oWkXF8sS3Fw8xilCI+x037f8Abr98SFVNrZxnAJ70NCGbcfc/SvEz3I6OZ0Y0a0nFR10P0zws8U8y4GzKpmOXU4VHUhySjP71qtdz4+/4Ipfsj+KP2RP2SLjTPGCrbat4k1mTXXsv4tPElvbxLCx/v/udx93xX1b4k16HwzpNxeXUkkcEMbO2yJ5pJCP4ERAXdz/CiDcxwACa194Vfm4z+tclpTN438QvqTNKNLs53isoGW4hM7xnY8zo+wOm/ds+R0IjSZH+cV6WCwUMNh4YeHwxVj43ibiDEZ3mtfNsWv3labnLl2110LnhbRriFG1DVFzql7/rI0ldktU7QJnj5P76Im/YGIrp6B0orpPFCiiigAr81v23/Fmsf8FZ/wBsW5/ZL8BaheWnwj8Dywaj8c/EthOVFwN++Dw3BMn/AC2n2Zn/ALiIV/gkjf1z/gq5+3B4l+AmgeF/g58G4bfWP2ivjVO+meFLMHemg23/AC9a1df3IYE3ON/33H8YjcV6v/wT5/Yc8L/8E+v2Z9K8A+G5ZtSuvNk1HxBrd3n7b4j1WfD3V7M3Uu79M52IiJzszQB614N8G6T8NvCOlaFoWm2uk6HodrDZafZW0flwWcEaCNI0T+FUQAYrcoozQAUUZxRQAVyOl3C+N/Ei30LSNpekyOlqYZ45LfUJicPN8q/8sxuRf3n33m3R/JG9L4tum8TajH4dtriZfMKzanNbTrHJaW3aP7j8z48v+A7PPdJEeNK6qOLykVV4VaAH0UUUAFFFef8Axp/aU+H/AOzRo0N94+8ZeHvCUN6/l2o1K9SGa/f/AJ5wR/fmk/2Iw70AegUYr5y/4bA8cfFqVofhN8GfFWsWkh+TxB44kk8F6N/wGKeGTUn65H+ghH/56UJ+y18VvjHGH+KPxo1SwsZMbvD/AMNLX/hGbMp/ckv3ebUX/wB+Ce1/3KAPRfjf+1b8Of2cTZr458a6D4cutQP+g2FxdD7fqTf3La2TM87/AOxGjmvN1/at+JvxdjjX4V/BnXlsZ+V8QfESd/CdgVx95LPZNqjP/sTWkHb9512+ifBH9kf4a/s4vdXHgnwTomh6lqQ/03U0h87U9Q/6+byTfPN/20d69MoA+ch+yT8QvjDL53xW+M/ie8sWOJPDngGF/B+kn/euY5JNVf8A4BfRo/8Azzr0n4G/svfDz9mewuLXwD4H8O+FV1Bw99PYWaR3WoP/AM9Lmb/WTv8A7cju9eiUUAFFFFAGH43bQ18J6gfEjaaNBeF4r/8AtEp9jaFvkdZd/wAmw5wd3Br8a/2qf+DeD4t/sNfFbUfjT+wL4/1XwrrEjm4vPAV5erHb3aAhxDA8/wC5mj4/1F3wOf3n8Nfp9/wUf+Fvij44fsG/Fvwf4JUN4v8AEnhe90/RR5/kf6XJHth+f+D58V4/8J/2sv2oPhT4H0/w/wDFb9mrX/HHjK1tPJbxB8OPEOizaHrl0g/1jpqF1az2W8ff3o4znZ/cABxn/BDz/gr7qX/BUDwH448I/Ejwivg/4xfCy4TT/FWj/Znt4L1JN8fmLDN88b+ZDOkkL/6s7OfnwnnH/Buj4csfCn7Qv7emj6dZwWWl6X8ddWsrW0hTbHbwx3N0iIqdNgTA/CvZP+CXP/BPfxV8GP2kfjh+0R8UrLRPD/xK+PWoQyHw1o119rtfC9hEP3cD3OxRNdP8nnOg8sumU+/ivFf2fP2bv2pv+Cbf/BRj9oabwD8IdE+MHwv/AGhfFb+NLHXLjxlbeH4/DF1NJNJMl0jRzzuge5KfuYXYpCjrkl40AON/4Jgfs9+CfiL/AMFDv+CoXgbWPDenXXhLUvEOhC40vZst38xNYkf5E/23d/xrz/8A4Nw/+CYvwb/bf/4JRaXqnxd8Kn4g7Nb1XT9LtdQuZhaaHCzx+Y1tHG6BJpH+Z5x+9OxE3hI0QfQH/BOP9jT9or9m/wD4KO/tZePvG3hnwnd+B/i9dwXB8QnVfsc9y9rDPiSysEWd/J/0l02TzwuPLB3yV41/wa4fH34kfCb/AIJgJZ2nwY8VfEfw/d+JNSm0XUvC+p6TG0cvyeZa3keoXls0Z8xXdJY96FJPn2OPnAIv+CHXwRj8E/Eb9tr9jDxp9p8b/Cn4a6xHc6Fa6kjfuILr7VlH9pI47aTZ9zekjj79ZP8Awbh/8Exfg3+2/wD8EotL1T4u+FT8Qdmt6rp+l2uoXMwtNDhZ4/Ma2jjdAk0j/M84/enYibwkaIPvL/gm5+wRrX7L+n/G34pfESPTl+Lnx+1qbxH4hs9PupJ7PRLaNJvsemI//LbyEnk3zY/ePJ3CIT8Uf8GuHx9+JHwm/wCCYCWdp8GPFXxH8P3fiTUptF1LwvqekxtHL8nmWt5HqF5bNGfMV3SWPehST59jj5wDkP8AglV8c/FH/BKXwn/wUO+EMd9qHiTwv+zVDdeJPBaXke8QO6XZjR+4Sby7J3UfICJn/jOfsz/ghp+z/wCEfjd/wSY8L+JPHuj6P8QvEXxwhv8AWfHOr6vbJdz+JZ5ruePZctIPn8uNUhRPuJ5fyYroP2I/+CVreHPhr+0Nq/xot9Pm8dftaX17P42sdKu2mtdF02aOeG20uCY8O8EdzP8Avwg3u/8AsIa83/4JlfDP9pD/AIJKfDHVPgTrnwj8QfHTwB4d1K5f4f8Ai7wlrWk2jSWszyTm2v7bUL2B7bZI7/OnmD53QB9iFwDzP/ghVYNc/E/9r79ir4iR3Pjn4X/CHxKIPDFrrLvdCDSJp540s3fP3PLhtpEjGPneevMv+Dd3/glX8F/26/8AglvqC/GDwnJ44srPxvqdtpFtdahdWceiIvkb/spgkSSN3f8A1j798myNHykaCvvH/gn/APsR+Kv2JtB+NXxc8TeH4/GPxs+O3id/E+taJ4cvIfJ0+HzH+xaXBc3TwRyJbpM5ed/LLl3+R9ib+D/4N2P2VfjN+wZ+yvrvwr+L3w9n8O3P/CR3niDT9Vs9Z07ULGSKeOAeS/kzmZZhIsn/ACzKFMfPnigDyv4/fAzwTa/8HSvwP0FfB/hn+xbz4O391c2P9mQeRPN5+q/vnTZh3+QfO9fq3oek2vh3TLWxsLeGzsbOFIIbeJPLSFEARERBwqgDivzy/wCCgv7IPx407/gsF8Ef2nPg74J0H4mab4Z8K3Pg3XtAvPEkGiPbpI906XJnmR/kH2wP+7SR/wBxjZ89foB4F/t4eDNL/wCEkbTG8Q/ZkGo/2ar/AGP7Rt/eeTv+fZv6b+aAPzL/AODmX9lCG1+EfgH9qrwn4c0u++IP7OPinTvEF4ZLZf8AicaUl1GZIZv+eiRzLC/z52R+f/fr3b9qTx34S/4KXeDvgD8NtHW28QeBvjTJZ/ETX0kRZI/+EY0z7PfbJBn5TPfvplq6dfLkuh1Q19c/E/4a6P8AGD4b+IPCXiOzj1DQfFWnXOk6lbP924tp0eORD9UdhXw1/wAEGf8Agmv44/4J8/Cbxha/FDUZtY1yz1S58KeFHkmST7J4XtL66mtdmz7nnz3l1PtPzhHgQ8RoiAHLeAvi54o/aQ/4LU/H7w1p/gzwD4y0r9nXQ/Dui+E9K8Ra7Po9ro/9o2sk97ewwJp90jzv8kPnfu/LgjRE3id60f2af+CbXxM/ZL/4Ke+PP2hrJvh18O/hR418PMfFfgHwxql7qKXd7DDuS9jV7K1j8zzELcIh/eT/APPR88f/AMFIP+Ce/wC0h8CP+Cj0X7XX7JNvo/irxH4k0yHRfHfgnVb6O1j1yGFEjR0eZ449nlwQ5xIjo8COnmeY6V9F/sS2/wC0v+0d47tvHv7RvhXwr8JdL8N288Gg+BdE1QarNd3U6eXNqGoXKO8OEhZ44IY/+e87v/BgA8a/4IMaZoP7fH7E/iL47fErSdJ8XePvjdr2rJ4hm1G3F19ksoLp4LbSYfMHyWsEKZRE4zIzffJqxd/8ErvDH/BOf/gi9+0Z8No9e1H4i6DbaB4r8R+Hh4jtoZv+EZ8zSZNkNtnds8t0Z942ZkkkfCbznwH9nD9jz9tP/gh38b/HHg74A/DPQfj/APs+eNNYfWNFsLzxRa6XdeHJXGzY8kzo/mCNI43fY8cnkI/7t3KV97aL+yp8Ufi1+xf8XtF+KXiDw9J8VPjV4e1HS7v+yo5G0TwvHNYyWtrY2u/948MHmPI8j5eSaaduEMaIAfL/APwTK8daN+xl/wAG3eh/GrRfDOgr4w8L/DXVdVjvE09Fmv5Y5LmSGOeZE3um9Id/+57V33/BDT9n/wAI/G7/AIJMeF/Enj3R9H+IXiL44Q3+s+OdX1e2S7n8SzzXc8ey5aQfP5capCifcTy/kxWb/wAEmP2Q/jlov/BOpv2Z/wBoX4c+F/C/gfSfDmreE7q/svEseo3/AIotr2SYI8McCeXZJHBM6F3kkkd9j7E+esn/AIJlfDP9pD/gkp8MdU+BOufCPxB8dPAHh3Url/h/4u8Ja1pNo0lrM8k5tr+21C9ge22SO/zp5g+d0AfYhcA8z/4IVWDXPxP/AGvv2KviJHc+Ofhf8IfEog8MWusu90INImnnjSzd8/c8uG2kSMY+d568y/4N3f8AglX8F/26/wDglvqC/GDwnJ44srPxvqdtpFtdahdWceiIvkb/ALKYJEkjd3/1j798myNHykaCvvH/AIJ//sR+Kv2JtB+NXxc8TeH4/GPxs+O3id/E+taJ4cvIfJ0+HzH+xaXBc3TwRyJbpM5ed/LLl3+R9ib+D/4N2P2VfjN+wZ+yvrvwr+L3w9n8O3P/AAkd54g0/VbPWdO1CxkinjgHkv5M5mWYSLJ/yzKFMfPnigD9D9OsodKsYbe3Ty4YEVET+4o4qzRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABX4A/wDB85/za7/3Nf8A7ha/f6vwB/4PnP8Am13/ALmv/wBwtAHz/wD8E0/+VyTxH/2VX4j/APpNrtf0/V/MD/wTT/5XJPEf/ZVfiP8A+k2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP/8AwVh/5RZ/tLf9kq8U/wDpouq+ff8Ag11/5QWfA3/uP/8AqQ6lX0F/wVh/5RZ/tLf9kq8U/wDpouq+ff8Ag11/5QWfA3/uP/8AqQ6lQB+gVFFFABRRRnFABRRRQAYoxRXOeLtem0ee1tbNWm1TUZvJtkMbyRp1d5pNg+RERHPzlEd/Lj3h5EyAU9cun8U+IZNBt2uIYYIVuNQuI/PgKeY/7uGKZNg8z5HZ9km+NAm9P38b11NvbR2sCxRKsaRrtVV/grJ8OeG18OWLRxec8jP5kkkjuzzvxlnd2dj0xz0XYnRAK3KACiiigArxr9tr9svwl+wL+zR4m+KPjWdl0vQYsQ2cZH2rWLx+ILOBc/NNI2FHYcucIhr1TXNWtfDumXV9f3ENnY2cLzzXEr+WkKKC7u7nhVAHNfm5+zHpd5/wWu/bRg/aA8TWsy/s2/BbVpbX4RaRcRFI/F+sQyeXN4jmRz88aOuy29Ov7t0kEgB1P/BNv/gnZ4u8T+Pv+Gp/jjq3ibRf2h/H1yLy407T74x2nhbQP+WHhpoXDxvDs2STfIJPPQEOkiO8n2D8L/jJrXjL4g+KfDeveCfEHhW88Oz+ZZ38mLrSdes3aRYJ7a5T5BIQg8y2k2SRv/fQpI/pVYfj/wABaT8UPBWreG9esbfVNF1y2ksr60mGY54XXY6H60AbAORQOG9q+aPjv8bY/wDglt+zJY69qlr43+IfgXw3M0Wr6leaxDfa9pltNMRbbPO2PepG7xw/PJ5+zY379w+fM/CHgP8AaY/bztbbxN4j8cXH7PfgHUE8/TPDWgW0c/iSe3fBSS9vJP8AUSbOiRjjPzoHSunD4F1I+1nLlj/XzZMpWPuMjP8A9eud8X+JZvDdssdlDDe6peErY2Uk4g+0ScfffDvsQ4LuiOUQ52PjFfJPjH9jT47fs86FJ4g+G/7TXiTWLqwzPPpfxJit9S03U/8AYe5CJJaj/rnXffsA/thv+09P4ts/GGi6l4J+LPhG4hsvE3hW8m8xNKQp+4ktfkTzIJv3km8733u6eY8YhNaVss5aXtaE1KPW32f+3ZfmHOfQHhPw4PDGjwWn27UNQmi5nurx1M9zJ0eRggWNC5y5SNEQZO1E6DK8KfGbw14++IvijwrpOrJqWueCRaLrcNvC5j097lXkhgeXb5fn+Wod4Q/mRo8DuiJPGX8w/wCCinxL+M/w3/Zf1y7+AfgObx78SL4/ZbGEX9lappCMPnvD9qkRJnjXlIfm3uUyNmc/KP7J97qul+G/Dvw38W/ETx98Dl1S82SaLpXgHVNPv/EWoXT/AL6S88T6navHfXU88m+Se0S1k3yfwVxFH318ZP2gfA/7PnhtNY8eeMPDfg3TZJPKjuNY1CG0jnkAPyJ5jDe/+wmWryf/AIbY8SfFeXZ8IfhF4z8WQTcR674oR/B/h/PXdvukN9JH1+eCxmRwPv8A3N/YfBv9iX4Y/A3xLJr2h+E7abxbIhSfxNrE8+sa9cD0fULx5Lp0/wBjzNlevZoA+dF/Zy+MXxheST4i/GKfwvpkhx/wj/w10/8AspCn9yfUrrz7t/8AftfsRrtvgv8AsafDD9nbWptU8JeDdLsfEF6hS716636jrt+P+m+oXLyXU3/bSR69VooAKKKKACiiigAooooAKKKKACiijNABRRRQByPxftvFF98O9Qt/B8GiT65dwvDC2sXM0FrBvU/O/lo7vjj5PkyM/OlfK/8AwRN/4J4ePv8Agl3+y4/wn8Va14U8VaXa6pdavZappXnwTlp/LzC8Lpsx947xJ6DZ/HX2tRQByPxftvFF98O9Qt/B8GiT65dwvDC2sXM0FrBvU/O/lo7vjj5PkyM/OlfK/wDwRN/4J4ePv+CXf7Lj/CfxVrXhTxVpdrql1q9lqmlefBOWn8vMLwumzH3jvEnoNn8dfa1FABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFfgD/wAHzn/Nrv8A3Nf/ALha/f6vwB/4PnP+bXf+5r/9wtAHz/8A8E0/+VyTxH/2VX4j/wDpNrtf0/V/MD/wTT/5XJPEf/ZVfiP/AOk2u1/T9QAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAfP8A/wAFYf8AlFn+0t/2SrxT/wCmi6r59/4Ndf8AlBZ8Df8AuP8A/qQ6lX0F/wAFYf8AlFn+0t/2SrxT/wCmi6r59/4Ndf8AlBZ8Df8AuP8A/qQ6lQB+gVBooPSgCMMcf54qGdsr2256Gvkv9sn/AILD/Cf9i/4kw+FdbXxB4g1xYTNdW+gww3P9nD+BJt8ibXfqF68f7ufoT4GfG3wz+0T8OtM8WeEdUt9W0TVYvMili7Dj5WH8LLyNprioZhhqtaWHhL3o7n0GYcJZzgsvpZni8NOFCr8E5RajL5/8N5XO6HSigUV2nz5k+INXh8O6TcXVw0wWCMsRDDJPI55O1EQF3b/YQFnOAKzfC+j3AvP7Y1SONdavI9kkSSu8dinB8lN7Fey73QIJDGjbOmKtja/8J/r/ANunjB0jTrg/2fGTOn2t1xm4dNyo6B8+WHRx+7SZH+dMdlQAUUUUAFFFfHn/AAVZ/bz139l3wj4b+HXwqsovEf7Qvxou30HwHo7fNHZPjNxqlz2S1tUPmEvxn/YEhQA8g/4KC/ETW/8AgqJ+1g37G3w21LULDwLoKQap8dPFVg2z7Fp7/PB4egl/5+rv/lp/cT+/snjr9Avhv8ONB+EngHR/CvhrS7PRfD/h6zi0/TrC0Ty4LOCNNiIg9lwK8a/4J1/sI6H/AME/P2bbTwbY3Fxr3iLU7qTWPF/iS6G688T6xOQ9zfSv9/l/kQHlEROpy5+haACg9KKCcCgD4x/4KTadH8Tv2pv2VvAOrNnwrr/jC91nUY2P7m6m06yM1rDIDw6PI5+TvivKf+C9Xxw1T/gm/wDBtvjb8M5NS0bx/wCOtUh8H3ksV+Bpt3vsrl47y5s3R0muoI7bZC6eW4+QO8kaeQ/0d/wUk/Zt8SfHX4SaLr3gJ44viZ8L9ah8WeF/P/1V5PB/rLN/9iePen+/sz8ua8/t/j78H/8Agrp8Fbj4UeKLCxs9cvnMHjDwdrdx9j1nw5ND9/ykdN7v5mxUkj2fI7uHSRPLr7fhvFYGji8BjMfS9vh6E/3sO65+Z6f3lZfKxz1qc5QlGn8R+Lf/AASC/wCCvvxw8H/t3fD3w/4o8eeMPiV4a8eeItN0C7sfEuv3V6LV7q5S2S6hd3cxunnH5PuSZ2P/AAOn7XfH6wX4d/8ABYv9nnWtKfy7z4jeHfEfhvXUj6T2tlbJewM/0mfH/fFePfAT/gjt+yb/AMEefH0nxY13xbq9xfaXDjTrjxtqVlNHpz/89raKG2hd5u38Z/uDfXqP7GNhr/7bv7W95+0j4g0XUPDngrSdIfw38NNNvo9l1d2zvvudXkT+Dz+ET1j/AOAO/wCi+LHE/Due5s814cw3sKEaThJ8qpqpOV1Hlim1pdP/ALduzjy+jWpQ5Kx9tZ25pofNB+ZRXwz/AMFA/wDgvh8Dv+CefxatvA/iZPFnifxIIvtF7a+GLe2uv7I6bEuWmnhCu4JYIm9woy4UPHv/ABjI+H8yzrFfU8qoTrVbX5Ypt276fmehWrRpR5pn3OvWk6iuA/Z0/aN8H/tT/CDR/HHgfV7XXPDmvQiW2uIm5z/EjrnKOh4ZTzkV6Bu5ry8Rh6tCrLD4iPLOOkotap9n5mm48HIooopAFFFFABRRRQAUUUUAFFFB6UAM3HFMY+3518O/8FAf+C9/wN/4J5fFq18D+Jl8WeKPEgi+0X1r4Ytra6/sfONiXLTTwhXcEtsTe4UZcIHj3/U37O/7RXg/9qj4Q6P448Daza674c16ES21xE3I/vI65yjoeGU85Fe5mHCec4PL6WZYvCzhQq/BOUWoy9H179PK5jCtTlPljI9CHSigdKK8M2CiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAr8Af+D5z/m13/ua//cLX7/V+AP8AwfOf82u/9zX/AO4WgD5//wCCaf8AyuSeI/8AsqvxH/8ASbXa/p+r+YH/AIJp/wDK5J4j/wCyq/Ef/wBJtdr+n6gAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooA+f/APgrD/yiz/aW/wCyVeKf/TRdV8+/8Guv/KCz4G/9x/8A9SHUq+gv+CsP/KLP9pb/ALJV4p/9NF1Xz7/wa6/8oLPgb/3H/wD1IdSoA/QKmyLvjYeoxTqCeKAPwF/4KY/8E1fi34C/a28YatpXhHxH420fxlrN5rtle6FpFxfLEtxcPP5MwRH2Om/Z/t1+mH/BFj9kbxR+x/8AsiXGmeMNlrrHiTWn12Sy6tpySW9vEsL/AO3+53H/AH6+wArSHuo6+tOWXeu3HzenpXzOX8M4fB42ePhJ3nf/AMmP2Xi/xwzniPhnDcL4unCNOjy3lFS5pezXLHrZedtywOlcj4gvLjWtYOj2u6Ozjj8zVLlWmSSJX4SGF49nzv8AM2Uk3xgJ8n7xKueJ/EE2kfZ7e0VrjU9Qk8m1VoJJEj6u8j7PuIiZ+/sDt5abw8iVJ4X8OxaBbTLCsbXF1J513MocefNsCF/mZ3/hVQC52IqJnCV9MfjRf03S7bQtPgtbaGG2tbdFhhiiTYkaJ91VX2q/RRQAUUVV1DUINJs5Lm6mjgt7dC8ssrhEjXH3mz9KAPM/2x/2svB37DX7Ovij4oeOrxbHw/4ZtTM6Ljzr+Y/LDbQr/FNJJsRB6t6ZNfM//BKn9kvxp4k8beIv2qvjxYhPjd8W7aOPS9GkJdPh54czvttJhDDMbvkST/7Z7P5hfzb4JW83/Bc79tu2+LGqQvN+yl8CdYki8A6dKuIPiF4jhykmtTJ/y0tbXlIM/wAef+m8dfpvQAUUUUAFFFZ+t6xaeHdNlvL+6hsbW3Tc880ipHH9WbigDL8Z+KG8NWAW3W2uNavv3WnWc1x5K3k46IW2O4jHV3SNyib328V5L8bf+CcXwf8A2mbWGT4ieB9K8V6uE/f6tLvtb+du+ZoXSTZ3Cb9idEAr1TwRZ32rNNreofaIZdR5t7N3fbZ238CMjImJHGJHBTeju8e90RDXUdR61VPEVaL5qMuWXl/wAR8w/DL/AII+/s4/CXxKmtaP8LNGn1KFkkjl1S6utW2On3GVbqaRQfcYr6cVRFFhelAJp+crV1sViK0uavNy9W3+YcthjrlMD/8AVX8yP/BYP/gj98ePAP7d/j7xBoXw/wDGHxI8P/EbxBqfibTtQ8MaDdamkEd1cvObaYQo/kyR+YE+f7+N6d8f03EbRg0MinJP/wCqv0Hwt8Usz4GzOpmWW04VHUhySjPa261Vne6OXGYOGJhyTPgz/g3q/YU8afsH/sIvofj6GGx1/wAXa/P4mfTkH7zTEmtbWBIJT/z0/wBG3n+55mztX3n3z2NBOOtKTxXx/EufYjOs0r5tire1rTc5cui1d7LyNqFGNKHJAeOlFFFeSaBRRmjNABRRRQAUUUUAFIwyp+lLRQB/Mb/wWF/4I/fHjwD+3j8QPEOhfD/xh8SPD/xG8Qan4m07UPDGg3WppBHdXLzm2mEKP5MkfmBPn+/jent+vn/BvX+wt40/YM/YPk0Px9DBY+IPF2vz+JZNOUfvNLjmtbWFIJT08z/Rt5/ub9navvDbvNOQjB4571+wcXeOGc8R8MYbhfGU4Rp0eX3oqXNL2a5Y9bLzstTzcPllOlW9tEkHSiiivx89IKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/4PnP+bXf+5r/9wtfv9X4A/wDB85/za7/3Nf8A7haAPn//AIJp/wDK5J4j/wCyq/Ef/wBJtdr+n6v5gf8Agmn/AMrkniP/ALKr8R//AEm12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigD5/8A+CsP/KLP9pb/ALJV4p/9NF1Xz7/wa6/8oLPgb/3H/wD1IdSr6C/4Kw/8os/2lv8AslXin/00XVfPv/Brr/ygs+Bv/cf/APUh1KgD9AqKKKADFZPiDV7fw7pM11cSSRpDGWIjhknkc8kKqIC7t/sIC7HAFa1cdbIfHWsfaJ1A0SyvMwQzWkiPeTxPxP8AP96NJE+T5OfLSZHKFKALfhnQ7pr2bVNUit/7SuEKxpFuH2OE4/c8u4ySq73j2CTYmU+SumoooAKKKKACvzk/4KPfFjxJ/wAFE/2lP+GMfhLq15pWl/Z4dR+Nni2zf/kW9Ff/AFekQv2vb0du0Z/jQybPZf8Agqh+3xqP7IXw80Lwj8O9N/4Sn48/Fy9/sL4feHsBxLcnHmXs4/gtbVP3juflztzgF3Tov+Can7Aen/8ABPr9nhtBn1KTxN4+8TXzeIfHHiy4Jku/FGsz/PPO7t8+zf8AIiHonP33c0Ae2fCX4TeHfgR8MdC8HeE9LtdF8N+GbOLT9NsYFxHbwImxErqKKKACiiigArjNKvz4/wDE0d7a3e7QtGdkiltLt/Lv7jPz52AB0jHyffdHkeQOivClP8Uak2vavH4ZsbqWK6nU3GoTQTmOWztQ3AyinY83+rTlH2CZ0ffDXUWsC2tssKlisa7fmbc9AFiiiigAxRRRQAUUUUAFGKKKACiihhlTQB8L/wDBQL/gvf8AA3/gnh8W7bwP4mXxZ4o8SCL7RfWvhi2trr+yOmxLlpp4QruCW2IXcKMuFDx7/qb9nv8AaI8I/tS/CLRfHXgXWbXXvDPiCAS21xERkf3kdc5SRDkMh5BFfzn/APBYP/gj98ePAX7eHxA8RaD8P/GPxI8O/EbxBqXibT9Q8MaDdamlvHdXMk/2aYQo/kyR+YE+fG/G9Pb9fP8Ag3p/YX8afsF/sJPofj6GGx8QeLvEE/iZ9OUHzNMjltbWBIJT08z/AEbef7m/Z2r+guPvD3gzK+CsDnOUY/22LqcnPT5oyvzRvL3V70OR6a/M8rB4zE1MTOE4e4few6UUUV/Pp6oUUUUAFFFFABRiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvwB/4PnP+bXf+5r/9wtfv9X4A/wDB85/za7/3Nf8A7haAPn//AIJp/wDK5J4j/wCyq/Ef/wBJtdr+n6v5gf8Agmn/AMrkniP/ALKr8R//AEm12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigD5/8A+CsP/KLP9pb/ALJV4p/9NF1Xz7/wa6/8oLPgb/3H/wD1IdSr6C/4Kw/8os/2lv8AslXin/00XVfPv/Brr/ygs+Bv/cf/APUh1KgD9AqKKKAOT8SXN3rusf2NYfbLULCk17eCCby0hcunlwzB0/ffI5ym8x7U3oPMQ1u6bpdtoWnwWttDDbWtuiwwxRJsSNE+6qr7VfooAKKKKACvM/2q/wBp3wb+xn+z74n+JvjzVo9I8LeFLNrq7mB+eQ/cjhhT+OaSTZGid3cV6PLKlrGzsyoi/MzN2r8yvCFsP+C7f7b1v4ruo/tP7I/wB1p18O28nNr8T/E0HyPesn8en2n3U/gkfP3w8kaAHoX/AASz/Zi8YfGD4r65+198ddLNr8UviNZix8G+HJyX/wCFdeGcl4LNAfuXU+8yTP1+f/lnvkjr74oooAKKKKACuf8AGPiL/hG7NfJVLjULqQRWVszmMXMzdB8iO4jzy7hG2IHfohrW1C+h0q0kuLqVILeBNzu77EQVzvgiO81F21i+W7t5tQyLay82bZZ22f3fmI6oBM4w75TejyPHvdEDUAX/AAloEmgaMtvcXl5fXLANNcXEu+SeTPzvjomT/AmEToordoooAKKM0ZoAKKKKACiiigAooooAKKKKAArntRiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAr8Af+D5z/AJtd/wC5r/8AcLX7/V+AP/B85/za7/3Nf/uFoA+f/wDgmn/yuSeI/wDsqvxH/wDSbXa/p+r+YH/gmn/yuSeI/wDsqvxH/wDSbXa/p+oAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAPn/AP4Kw/8AKLP9pb/slXin/wBNF1Xz7/wa6/8AKCz4G/8Acf8A/Uh1KvoL/grD/wAos/2lv+yVeKf/AE0XVfPv/Brr/wAoLPgb/wBx/wD9SHUqAP0CooooAKKKKACiivlX/gqX/wAFAJf2HPg3pNj4T0v/AIS74z/Eq9Ph74d+FlO6TVNTkx++kHX7Nbhg8jcDlE3pv3gA8Z/4Kd/G3xR+2x8d7b9iz4O6pcaZqXiC0TUfi54stM/8UZ4ck/5dUfH/AB+3yHYif885Mn5JDIn2x8B/gd4Z/Zt+DfhnwH4P0u30Xwx4TsU07TbSJNvlxJ6/3nPLu/V3JY8mvF/+CXP/AAT/AIP2B/gTc2+sao3iv4o+Or5/EnxA8VTDdca/q83zSfN/zxj3FI06dXxvdyfqCgAooooAKKK5PxFeyanqEGiaf+7kvGMt/MssiSWlrjBKMiffkfaiAvGdnmOj5h2EAhtJv+E/8SzlGlGj6PceWHhuZE+33Gfnzs4eGMYTG90dzIjohh57KqWl2UWmWMNvCu2ONAi7mLf+PH71XaACiig9KAOf8ZeLNN8A+FdU17XL620vRtFtpb7ULq4fZDaQwoXkmduioiKWJr8hfiV/wd5eD/DXxvvtJ8P/AAl1TxN4Hsr9LWPxE3iD7DPeW3G+5jsmtXH9/Ykkyb/k3+Xk7P1e/aN+Dlt+0Z+z943+H91eTabZ+NtAv9BnuoAGkt0urd4GdP8AaXeT+Ffzd/ET/g2d/au8K/FTUND0TwPY+I/D0F6Le18QQa9ptvb3cJwBP5M1ysyD1XZu/wB+v6G8A+G/D3NFi/8AXivGm4qPs4yqeyi0780ub3eaSe0b/wDbrPJzSti48v1ZH9IH7PH7QHhb9qP4M+H/AIgeCtUh1rwz4nthc2V1Ec7sOY3Rv7rpIjo69UdHB5Fd92/nXgf/AATX/ZAb9g39ijwL8KpNY/t268L285ub0J5azT3N1NdS7B/zzR5nRO+xBXvS/Ln0r8Jzujg6WY16OXTc6EZyUJP7UFJ8r+cbM9Snz8vv7kgOaKKK88oKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/AIPnP+bXf+5r/wDcLX7/AFfgD/wfOf8ANrv/AHNf/uFoA+f/APgmn/yuSeI/+yq/Ef8A9Jtdr+n6v5gf+Caf/K5J4j/7Kr8R/wD0m12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigD5//wCCsP8Ayiz/AGlv+yVeKf8A00XVfPv/AAa6/wDKCz4G/wDcf/8AUh1KvoL/AIKw/wDKLP8AaW/7JV4p/wDTRdV8+/8ABrr/AMoLPgb/ANx//wBSHUqAP0CooooAKKKKACvi39k/9gnxheft3fEb9o745Xmkan46uJpvDXw+0vTp3u7DwX4dR/kaF3RP9KuuXkcICA7px5joPtKigAooooAKKKKAMvWG1CHRrptPgs7q9SJ/s0VxcG3hkfB2K7qkhROnzbHI5+U9Kr+FNC/4Ry2k824kvLy4bzLm4dpCHkPXaHd9ieiJ8ic1uUUAFFFFABRRRQAUYoooAMUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUVwfxx/aI8E/s2eD7fXviB4q0fwbod1fRaYmparcC1s455siNHmf5Ez/fchfegDvKKoabqltrunwXVtNDc2twizQzRPvSRH+6yt71foAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK/AH/AIPnP+bXf+5r/wDcLX7/AFfgD/wfOf8ANrv/AHNf/uFoA+f/APgmn/yuSeI/+yq/Ef8A9Jtdr+n6v5gf+Caf/K5J4j/7Kr8R/wD0m12v6fqACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvz1/wCCiv8AwSA+OX7Z/wC0ndeNvAf7afxW+B3h640+2s4/C+gpqBtI5Y1w837nU7ZN79T+7/Gv0KooA/Cz9tn/AIILftJfBj9jL4ueLtd/4KFfG3xpofhfwXrWr6j4fvo9UFrr1tBZTTSWcu/WHTZOiPG++Nxhz8j9D5p/wRt/4Iu/Hz9rn/gm18OfiB4J/bi+Lnwc8M68dS+x+EdFTUTY6R5OqXUEhjMOqwR/vJIZJjiFPnmfqfnP7Y/8FG/AGsfFj/gnt8ePC3h3TbnV9f8AEnw78QaVplhAP3t5czaZcRwwoP77u6qPXNeNf8G+fwD8Y/szf8EhvhH4J+IHh/UfC3i7RBrP2/Sr9NlxbedrV9PHuH+3HIj/AEcUAfKf/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe0f8Q437U3/STf4/f9+tX/APl7X6/UUAfkD/xDjftTf9JN/j9/361f/wCXtH/EON+1N/0k3+P3/frV/wD5e1+v1FAH5A/8Q437U3/STf4/f9+tX/8Al7R/xDjftTf9JN/j9/361f8A+Xtfr9RQB+QP/EON+1N/0k3+P3/frV//AJe185/8FPP+Def9q6x/ZlRbH9qj42ftQ3d9rdlaReBtShvfskruzYupHudTngjSHrvkQKM/fSv6CqKAPyv/AOCBP/BGz9oD/gm/oVvffEn46asNFuISzfDLR3XUNHtJHIO+SedX2Sc/OLRI8un+ukTg/qhRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV+AP/AAfOf82u/wDc1/8AuFr9/q/AH/g+c/5td/7mv/3C0AfP/wDwTT/5XJPEf/ZVfiP/AOk2u1/T9X8wP/BNP/lck8R/9lV+I/8A6Ta7X9P1ABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV+AP/AAfOf82u/wDc1/8AuFr9/q/AH/g+c/5td/7mv/3C0AfP/wDwTT/5XJPEf/ZVfiP/AOk2u1/T9X8wP/BNP/lck8R/9lV+I/8A6Ta7X9P1ABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV+AP/AAfOf82u/wDc1/8AuFr9/q/AH/g+c/5td/7mv/3C0AfP/wDwTT/5XJPEf/ZVfiP/AOk2u1/T9X8gWt/tz/8ADtf/AIOOfi58av8AhF/+E0/4Qv4q+N/+JN/aX9m/bPtU+p2X+v8AKl2bftG//VtnZt4zuH3/AP8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFFfgD/xHOf9Wu/+ZI/+9dH/ABHOf9Wu/wDmSP8A710Afv8AUV+AP/Ec5/1a7/5kj/710f8AEc5/1a7/AOZI/wDvXQB+/wBRX4A/8Rzn/Vrv/mSP/vXR/wARzn/Vrv8A5kj/AO9dAH7/AFfgD/wfOf8ANrv/AHNf/uFo/wCI5z/q13/zJH/3rr4A/wCC53/Bc7/h9H/wq7/i13/Ctf8AhWv9rf8AMyf2x/aP277F/wBOtv5ez7H/ALW7zP4dvIB//9k=)", "_____no_output_____" ], [ "**### SVC classifier**", "_____no_output_____" ] ], [ [ "SVMC = SVC(probability=True)", "_____no_output_____" ] ], [ [ "*#Fitting the model*", "_____no_output_____" ] ], [ [ "SVMC.fit(train,Y)", "_____no_output_____" ] ], [ [ "*#Predicting on the test data*", "_____no_output_____" ] ], [ [ "pred = SVMC.predict(xtest)", "_____no_output_____" ], [ "accuracy_score(pred, ytest)", "_____no_output_____" ] ], [ [ "*#Printing the confusion matrix*", "_____no_output_____" ] ], [ [ "confusion_matrix(pred,ytest)", "_____no_output_____" ] ], [ [ "*#Printing the roc auc score*", "_____no_output_____" ] ], [ [ "roc_auc_score(pred,ytest)", "_____no_output_____" ] ], [ [ "**## Random Forest**", "_____no_output_____" ], [ "![rf.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABJIAAAKYCAIAAACnxEUUAAB6s0lEQVR42uzdD1RU173w/U3CbeZtSMIATx+S0pTCtCUrZEnbMQ/txVWSmMJVmrAu5GWMJtFXb3QobbW1oiCvcaEimMasmAKmcWme6MNwxTd4FSPFlfBGWtNI4nBDr7RhlERieN4OgoHezEpI5126e/c9d/4xDH/m3/ezuppxODNzztnnd/b+nX3O3rFOp1MAAAAAAELVDewCAAAAACBtAwAAAACQtgEAAAAAaRsAAAAAgLQNAAAAAEDaBgAAAACkbQAAAAAA0jYAAAAAIG0DAAAAAJC2AQAAAABI2wAAAACAtA0AAAAAQNoGAAAAAKRtAAAAAADSNgAAAAAAaRsAAAAAkLYBAAAAAEjbAAAAAIC0DQAAAABA2gYAAAAAIG0DAAAAANI2AAAAAABpGwAAAACQtgEAAAAASNsAAAAAAKRtAAAAAEDaBgAAAAAgbQMAAAAAkLYBAAAAAGkbAAAAAIC0DQAAAABI2wAAAAAApG0AAAAAANI2AAAAACBtAwAAAACQtgEAAAAAaRsAAAAAgLQNAAAAAEDaBgAAAACkbQAAAAAA0jYAAAAAIG0DAAAAAJC2AQAAAABI2wAAAACAtA0AAAAAQNoGAAAAAKRtAAAAAADSNgAA3JhMppiYGJPJxK4AAIC0DQAwrcwqJiYmISGhq6vL5a+VlZXqrwF8+ejoqPp/AABA2gYACERhYaF8MTIysm3bNu2fLly4sGPHDvn63nvvZV8BAEDaBgAIApPJZDQa5ev29nZth9vTTz+tXtfW1s7xipWWlsbExJSWlobCXjIYDDExMRaLhQMGAEDaBgAIgt27d6vX69atky8uXLjQ0NAgX5vN5nnz5s3xWl24cEH9f9DZbDYhxAcffMDRAgAgbQMABEFOTk5eXp583d3dLfuUtF1t69evZy8BAEDaBgAIJu09kJs3b9Z2tdXW1qalpU36DcPDw6WlpQkJCXL8ktLS0uHhYY9L9vT0VFZWzp8/Xw12YjKZtDdn5ufnx8TEtLe3y/s2Y/5DT0+PXODEiROlpaXyxsWYmBiDwVBaWureLzc8PFxXV6cWM5lM6hu0y1RWVqpl5s+fX1dXp11z+b58XV5ePp0BWgAA8CbG6XSyFwAAkyotLVWpWnp6urwtUK/Xv/fee4mJib4/29PTc999942MjGjf1Ov1CQkJNpstLy/v5MmT8s2urq4FCxbIv8phTt566y35wba2tkWLFgkh6urqXnvtNfm+WkwIUV9fn5aWVldXV15eLlfSYDDI1E5+YXd3t8owh4eH8/Pzu7u75TcMDw93d3fL2x3VMmq11a/IrzIajSdPnpRbbTKZRkdH5fvqF+Pj43nODQAwk5wAAPhB5mkuamtr/fmsvMdSr9e3tbU5nU673a7tvsvLy1NLNjU1paenNzU1qXfsdntJSYlMlty/U/tZyWw25+XlnT59Wrvm6enpQoiKigr1ZmNjo0y07Ha7fMdqtRqNRqvVqn5XfspsNqtlbDabHKDFbDZrf3RKewMAgKniJkkAgF/S0tLMZrNLd9mGDRsm/WBPT4/sjKqvr5fdZYmJiRs2bLBarXq93mVhk8nU39+vnYA7MTGxrKxMPlbnz3rW19efPHkyJydHu+ZPPvmkEOLtt99Wb169elUIUVJSoroK582bd/bsWTW2SktLi8z36uvr1TJpaWlygJaGhgZvN3kCADDjSNsAAP46e/as9p8jIyPuE3C7UzcQapMxmSYFcba32267TWZf7s+zSa+88ooQQuZ7WiohPH/+PIcEAGBuxLILAAD+sFgs7v1d27ZtU4+leTMwMCBnNvP/t2QH3TvvvDM6OiqfQ5vq2p44caK3t/e1116T/+zv73dZoLi4+MUXX+zu7s7KyiopKXn88cdlT6BLtvnCCy+oLwEAgLQNABDSNm/eLF+kp6f/4Ac/kMOTyAm4tXckupvq1GrasU8CoMYa8b1YYmLiyZMnq6qqGhoamq9LT09/7rnnXJI323Uev+GWW27hqAAAzA1ukgQATK6urk5lL9u2bdNO1LZ8+XLfn42Pj5/SDzU0NOj1+sbGRpvNJp/DPn36tP/fsHTp0u7u7vT09La2NjWUiHYEFG3mVl9fb7fb5TgoNptt8eLFJ06c0C7jY5SRuZ9hHABA2gYAgGfDw8M7d+6Ur/Py8kwmk3Z4EpvN5nuw+29/+9seb1P0SP7Qxo0bV69e7c90cC7U8CcHDhxYtGjRpDMTyORNjoMix6t87rnn5PtyxEg5cgkAAKRtAICQtm/fPjXlmrpVUtvhtnnzZh+Pn2VmZsrszmX8EovFIlMsLflD8iPKxx9/7P61999/v/ubY2Nj8sVdd92lfd+f7Euml8oPfvADIURzc7M/u0jORgAAAGkbACAILly4IGevlsmJeozNpcNt37593r5h0aJFcgK0hx56SGZuw8PDdXV1S5YscV9Y9nE999xzKg88ceLEsmXLvH15e3u7HAqyp6dneHhYZWvPPPOMWqaurm7Hjh0uH5w/f35dXZ36leHh4cOHD8vtku+sXLlSr9fbbDaTyaRNSoeHhy0WS2VlpfvKvPDCC3JJfwbYBABgCpi6DgDgg3auNu0c1i4TcOv1evUgmTuPD6cZjcaKigqXKbPb2trUF+bl5cl8L/06lzrLfaQQuXryO+Wn8vLy5NRw7nNzq/6xvOvUj6oH6uTc32pmObmYzCrdp/luamqiegUAzB7qFQCALypvKSkpcf+rypGEEFar1cf3WK1W+fCYTKhqa2vlWCDu33z69Gn3JWVm5fKdbW1tKo8qKSlReWNTU5N632g0NjU1Xavw/muuZbfba2tr1WLp6ekVFRXumafNZjObzTJpVMlkY2Oj+5K1tbVyMb1eX1FRwZEDAJhBMbImAwAAAACEJp5tAwAAAADSNgAAAAAAaRsAAAAAkLYBAAAAAEjbAAAAAACkbQAAAABA2gYAAAAAIG0DAAAAANI2AAAAAABpGwAAAACAtA0AAAAAwlAsuwAAEBYcDkd/f39fX19/f/8f/vCHM2fOfPjhh1/96lcXLFjw1a9+1XBdampqUlIS+woAEGFinE4newEAEGqGhoZ6e3v7+vpsNlvfdQMDA/58MD4+XqVw6enp8nVKSgq7FABA2gYAQIDGx8dlN1pfX98f//hH2Z82Pj7uslhcXJzBYLj55pvPnTv37//+71/84he///3vnzlzZnR0VAhx++23JyUl/fnPfx4aGnL/CZ1OJxM5g8GQnp4uXxgMhthY7joBAJC2AQDwX/Vfp7rRent7PSZaqampGRkZBoPhm9/8psFgyMzM1Ol0ZrO5paVFCJGbm7t///7U1NTR0dFnn322trbW4XAIIUwm06ZNm/7617/29/cPDAzYbDb5c9566lT+pm6zNBgMcXFxFBMAgLQNABAVRkdHZYb2hz/8QT2WJvMrrbi4OJmh3X333fJFRkaGTqfTLtPS0mI2m+12u06nq66uXr9+vfavQ0ND27dvb2xsnJiYiI2NXbNmTWVlZXJyslpgYmJCJm/9/f0yl5Ov3VdGCJGUlKRus5RJY2pqqvbbAAAgbQMAhB+ZF6luNPlYmt1ud19SZmWyGy3jOt8Z0ejo6Lp16w4cOCCEyM7O3r9/f0ZGhscl+/v7q6qqLBaLTAV//vOfr127Nj4+3seXDw4ODgwMyHWWiVx/f7+869I9t9TeZqleU/QAANI2AEAostvt6lE01Y02MTHhslh8fLxMzGSGJruwXLrRfDt+/LjZbB4cHJSdbGvXrp30aTSr1fqLX/zi1KlTQojk5OTy8vI1a9ZM6UftdrtK4d5//325dR7v5IyNjVW3WWofmZvSzwEAQNoGAJgWOey+zM3keCF9fX3u/VGxsbHu3WjTGYvf/042jzo7O9etW2e1WmX/XmVl5fLly2dkP8hcTj0y556synRR7g1mJgAAkLYBAGaYHHa/v79f243mvlhSUlJGRkZmZqa8XVBmazM4KmMAnWwetbS0bNq0SW5CVlZWTU1Nfn7+zO4xlb9phz9xHwaTmQkAAKRtAIApk8PuuwwZ4p5vyKHzZW6mutF8PzA2zbX68Y9/LDvZsrKy9u/fn5WVNZ0vnJiYePHFF7du3SrvcszNza2pqcnOzp7t7FfmbzL7lbdcenzSj5kJAACkbQCAv5EDb6hutN7e3sHBQffFkpOT1dNoqhttzlays7NzxYoVAwMDsbGxlZWVmzdvnqnsxeFwPP3007/85S/l7Z2FhYW7du2a4wFF5LiaajRLWSIeS4GZCQAApG0AEOHUsPtq6uq+vj73ke51Op0abf/uu++WL4KVGIyPj2/atOn555+fqU42b3tm69atjY2NDocjNjZ2+fLl1dXVwR3Z3+FwaIc/UY/PeXxkjpkJAIC0DQAQlmRiJrvR5HghHsc8TElJyczM1M5eHTpPVc1eJ5tHg4ODVVVVBw8enJiY0Ol0a9euLS8vn73bPgNeSVmy2uFPmJkAAEjbAAChzm63y/sbbTab6kZz75bRzl6tbnQMzfvr5qaTzaPe3t6qqqrW1lY5WEhlZWVZWVmID9wvDwCXp+aYmQAASNsAAMExMTGhEjM5e7W3MS1UYpaeni5HdwyXu+a6uroee+wx2cm2du3a6urquU8qurq6qqqqOjs7ZYdkdXX1smXLwms4EDm0jHpkTr32uDAzEwAAaRsAIEBDQ0Py/kZtN5r7YnL2apdutHDsP3E4HFVVVc8+++zExERGRsb+/ftne2hH31pbW7du3SonecvIyKipqSksLAz3tN9l+BPZTef+iCMzEwAAaRsAwEPGohIzNWSIt9mrZW6Wnp6emZk5zdmrQ8ebb765YsWKvr6+IHayecxzDh48uHXr1oGBASFETk5OdXV1bm5uhB1+g4ODcgRLbS7n8ZE5ZiYAANI2AIgWg4ODLrNXy6zAhRwhMDMzUzvsfuS1j0Otk83jGjY2Nm7fvl3ej1pYWFhdXZ2ZmRnZR6kcelSNZimPUmYmAADSNgCIQOPj47LrTE1d3d/f7z57dWxsrBp2X85ebTAYouHhItXJJoRYv359iHSyeUtjnn322draWjlPwLJly7Zs2ZKamhpVx7PD4VDDn8hH5uRrZiYAANI2AAgbKjFTs1d7HNDPZfZqma1F276amJjYunXrzp07JyYmUlNT9+/fHxY3Hw4NDW3fvr2xsVHOE7BmzZotW7aE2jwBQTny3Yc/cb88wcwEAEDaBgBzanR0VI4Xoh5F8zi0g3b2atWNRivfarWuWLFCjvZRVlZWU1MTXjfU9ff3V1VVWSwWOXTHz3/+8/Xr1zOMvnuKq6YlUI/MeRz4lJkJAIC0DQCmS46/p7rRZLbmsfWpZq+Ww+5nZGRE2010/uzMbdu2bd++Pbw62bwln7/4xS9OnTole1C3bNmyatUqBueY9HqHSuHkCCh9fX3eHpljZgIAIG0DAM/sdntfX5929ur+/n6Ps1dr+9BCefbqkMpzwrqTzaPOzs5169bJjTIYDDU1NcXFxZT1VJN57fAn6rXHR+bkzATa2yyZmQAAaRsARDI5voIaMsTbsPtyoDyZm4Xd7NWh0y5XnWwpKSn79+9fuHBhJG1gS0vLpk2b5HzWRqNx165dkTdPwNwbHByUI1hqhz/xNjOBx9ss6fwEQNoGAGFmaGiot7dXO3u1bGS7kNfyMzIywn326tDR19f32GOPdXd3CyGWL1++e/fuiHy6b2Ji4sUXX9y6dasch2bhwoW7du3KysriAJhZdrtdjXqinprzOPaPvG9ZhrD2NkseLgVA2gYAIWF8fFx2o2mHDPE4rp0aL0TOXm0wGOhGm9lM5tlnn62qqnI4HCkpKQ0NDQUFBZG9yQ6H4+mnn/7lL38pO4XkPAGMlDg3Ia9Gs1SPz3lc2GVmAtk1R+ADIG0DgNklRzVQ3WjeRjhQs1fLGx1ltsY9VLOnr69vxYoVb775ZmR3snk0Ojq6devWxsZGOclbWVlZeXk5icHcXzVwGf7E24iv7jMTqNss2Y0ASNsAIJDWsOxG085e7d4IU7NXGwyGb37zm7IbjQHo5rK5HG2dbN4uKGzduvXgwYMTExPx8fE//elP165dyx16QSdnJujr69MOf+LxkTntzATqNkvumgZA2gYA/4XMymQ3mnwszeOw+3KUcJduNPZeEEtNdbIVFxf/+te/jvJEpbe3t6qqqrW1VR6r5eXlZWVldPOGGnlJSN5pKfvtBwYGfM9MoB3KkpkJAJC2AQgnx48f7+7ufuqpp6b6QTnMQG9v7x//+EfVjeY+/LdOp1PDhNx9993MXh1qnn/++U2bNo2PjyclJTU0NDAUvtLZ2bl169bOzk45T8CWLVuWLVvGbglxcrBZ9cicej1nMxPs3LmzrKyMmUUAkLYBmMn2zS9+8Yvnn38+Pz//1VdfnbQlJHMzNXu1xzuUkpOTMzMz1bD7zF4dygYGBlasWCHTkuLi4oaGBjof3LW2tlZVVfX29gohsrKyampq8vPz2S1hx+PwJx4HPZrmzAR/93d/l5KS0tTUlJ2dzW4HQNoGYLqsVutjjz0mG6OpqakXL15UfxoaGlIjOqpuNPdvcJm9Wr7mGnO4oJPNfxMTEwcPHty6devAwIAQIjc3d9euXUajkT0T7oaGhlymJejv7/d4X7efMxMMDAx87Wtfk4/YrV+/vrq6mntrAZC2AQjcs88+u2nTJu24INXV1WrIEI9XoLWzV8vH0qZ5KxGChU62wDgcjsbGxq1bt8p+5uLi4pqaGpdxC+UlD6btDmtyZgJJjoDibahb95kJ7Hb7unXr1F+zsrJefvnlzMxM9ioA0jYAUzM4OLhixYpTp075WEbNXq3tRmMctsigOtni4+P37NnDw1pTNTo6+uyzz9bW1sp5ApYvX15dXa3mCZDB9e677/L0ZoSZmJjQDn+iXnucmUBLp9NVV1evX7+efQiAtA2Av1pbW//pn/7J4y1ABQUFixcvlv1pzFUVqRm72Ww+fvy4LO6Ghgb6SwM2NDS0devWF198cWJiQqfTrV+//uc//7ndbr/rrrsmJiaKi4sPHz7MXoqSsJIjWMpcrqura2hoyH2x3Nzc/fv386AvANI2AH7p7++3WCxtbW1yqHetmpqajRs3sosi1YEDB9atWzc6OhofH7979+7ly5ezT2YkoKqqqiwWi+yjTk1NtVqt8k8vv/wyPZlR6JFHHmlpaXF5Mysrq6ioaNmyZaRtAEjbAEzN4OBgS0vLkSNHurq65DvLly/fv38/eyYiy5pOtllltVrXrVsnnxVU4uLi3n33XZrp0eZb3/qWSt1ltmYymVwegARA2kbaBiCQNv3x48ePHDnicDhOnz7NDokwqpMtLi5uz549dLLNnvnz53d3d2vfyc7OPn36NAMJRhW9Xp+UlGQymZYuXZqRkcEOAUDaBmCGjY+PM3Z/JLHb7StWrJCdbDxXM9tOnTr14IMPur+/efPm6upq9k/0sFqtWVlZ7AcApG0AAA8ZmssI/i0tLWaz2W63x8XF1dTUlJWVsZdmlfbWOK3Y2NjXX389JyeHXQQAIG0DgOh14MCBI0eOHDt2TKVwZrNZDopAJ9vcGBwcfPHFF9U/P//8c+0cX1/96lefeuop9hIAgLQNAKLU+Pj417/+9aGhoT179pSVldHJBgAAaRsAILRUVVVt27ZNjlu4cOHC1tZWIUROTs7LL79MJxsAAKRtAIAgGxgYuOuuuxwOh3pHp9NVV1evXbuW0QsBAAhN1NAAEF2qqqq0OZsQYu3atevXr2fPAAAQsuhtA4Ao8uabb373u991eVOn0507d47ZogAACFk3sAsAIHqsW7fO/U2Hw7FixYqJiQn2DwAAoYmbJAEgWhw8ePDNN99U/0xJSUlNTTUYDOnp6ampqQ6Hg5nTAQAITdwkCQDR4umnn9bpdDJVS01N1el07BMAAEjbAAAAAADTxbNtAAAAAEDaBgAAAAAgbQMAAAAA0jYAAAAAAGkbAAAAAIC0DQAAAABI2wAAAAAApG0AAAAAQNoGAAAAACBtAwAAAACQtgEAAAAAaRsAAAAAgLQNAAAAAEjbAAAAAACkbQAAAAAA0jYAAAAAIG0DAAAAAJC2AQAAAABpGwAAAACAtA0A5sTw8HBpaWlCQkJMTExCQkJlZSX7BAAAkLZFtZ6eHpPJFHOdwWCwWCzsE8C3+fPnx/yH0tJS9wXq6urkX/fu3RvA9y9durShoWFkZEQIMTIysmPHDvY5MEu4SgKEhRMnTqjKd/78+T09PewT0raQO0ZV69BgMHR1dbnXN6qyCay6uu+++5qbm+U/bTbbgQMHOLYA37q7u9XrhoYG99zstddeky+uXr0aQNS3t7ezkwGJqyQA7dWurq7Fixeryre7u5uKkrQt5PT29qrXNpvtoYceclng/PnzqrIJ4PufeeaZwD4IQNm1a9csRb3VanU6nadPn56lNTcYDLKWpZsdIYurJADt1XXr1rGTSdvCzMjIyMy2rt5++235Ij093X5dbW3tbKy5xWJR12A4dhEx8vLyZBXlfmVx+oxG47x584QQOTk5s7T+NptNvvjggw8oTYQFrpIAUdheVZdvSkpKnE6nzWYrLi6ejTUvLS310bcP0rYptA6FELN0E2NJSUnidbKZOONUo1A1E4EIsHz5cvli27ZtM/WdqusgMTGRPQy41INcJQGiub36+OOPCyHSrpuN779w4YLLC5C2Tdn999+fnp4uhGhvb5/BGkvdHHLbbbdxSAFTZTKZZjAw5UU+FZXt7e3uT+x0dXWZTCb5hIB8LNv9nrHh4eG9e/dqF0tISDCZTCdOnFDLyAv/6p/l5eUu1xfVSEUuIzRUVlbK900mk3pT/pDsS6+rq5P/1F6qvHDhQmlpqeptMBgMlZWVw8PDLmtusVjU77qvM7hKwlUSIArbq9rvufXWW9nJYckZBdQti7W1tU1NTepKhlpAezvHlL5ZBpU7eZeI0+mUN0wajUb5vl6vN5vN6q+KzWarra1VF1fk/ZbaJdVqu7PZbE6n02q1qp+Q76hv1uv1LmvV2Ngo32lqarJarfJ39Xq9dsWampq061NSUtLW1uay2na7vbGxUW2dyzoDvmmDTh2T8s4NSR2BtbW1/n+t9rjVUl9SUVHhcQGj0Wi32yeNbu1X+bhQ6rIy2hOOt/fVx81ms/tXNTU1qVjWcolc7Wej7VQPP8NNHdunT5+eZrh5O94aGxsDqE1KSkrUQa7X612W9BaSZrNZLqA+6/L9qpJSa6VqTLPZbLfb1VY0NTVpGwba9TEajdqN0m5dSUmJt3UGQqG96u0WLb1er13GbDarKEtPT6+oqNDWiWoFKioqVEzJ0NAu6e2ckJ6e7tIENRqN2m9ua2tzXysZXLKaa2trk6unvmpKzeyKigq1dS7rHE5n8mgLA+2ZXZVrwGmbt0abrAutVqu3akZbN/i+41F+lY+H5eQC2k3Q1sQe31ffpq2T1AJ2u91bw1fVjnIxbdy6tzIBP8PHarXa7XZ1KKrrDrPRjlQVxqRHuO8LXvJ07zEEtN8TWNrmHlCqlem7OvSxGMcb4Tb3V0n8rE1m6iqJ+0e8va9qRuN1AV/c4SoJwqK96uNZ0yldFvTRhaDX62VoeAt59VvaBq3Hbde+r77NJdZUledPM9tqtXrcuimd60JEdN0k+c477wghNm7cKP8Z2DDHLq0lj+/fcsstw8PDRUVF3lKyJUuWqPt9L1++7OMn5LA/mZmZ3ha44447Al7/5uZm98GIfvSjH3kbE6yhoUE9HdvS0qIdmgwI2NjYWGJiogrMp59+ejrfVl9f7zEvWr16tRBi06ZNqvmlrlOo+qChoUEFprpwrlpp2pTv/PnzQoizZ896bBTW19dPc5+UlJRoxzeqqalRa6VqI7U+NptNBqaKXNW4tNvtTU1NPtrEiDY9PT3FxcWyEdPc3DzNJ0+8PRgjHxnwszbx/cB2eXm5vBPY21WS6Tyc032dy5t79+71NoFBd3d3VVWV2pMNDQ0cUQj99qq3hqI8D/T09CxZssTj0JQjIyNFRUXqnz4eKB0ZGXnmmWeEEN/5znem1GD2k3us+d/MrqmpiZjx3qMrbRsdHRVCrFy5Uh6p2iZaYPr7+z022ubNm9fS0qIOptraWtmEOn36tMr4Vdv0lltukb26qjdM246UNcqiRYu8XYqY5uOker1e3tFhtVrvuuuunp4eNQFdSUmJ7Pew2Wyqvty8ebN88corr7hcNJW3esbHx3PCRQBmMDC96erqUufu3bt3y1ETEhMT6+vrVWC+9dZb8sWVK1csFsuiRYvU4zoy8ZMGBwdnb1cYjUaLxaId30hF5caNG9XjcKtXr1Z3Z73xxhtCiDvvvFOdN1paWoaHhxMTE00mU39/PwcY5vgqif+1SdCvksivUqPq+Xlxh6skCJf2alpamsfbr65cueL/ZUF5OSY9Pb22tlZ1wWkjWg6rvn37do/nhOlXQ0ajUTtQrf/NbHUiUltntVq9dZWTtoWcGayxfFBZTV5e3oYNG2TLLycnR4XHb37zG/li3rx5V65cqa+vV0NvJSYmPvroo+qrZnUcnoMHDy5atEiuRmJioqqH9Hr9r371K5kTpqWlvfjiiyqA5fqodNFiscgBD9LS0jZs2MCgzJh+YLa0tPi4aBew3/3ud9rXdRpq6lLt1cQTJ06UlpZq5ylWf5rVUeweeeQRl2xTvR4YGNCutqza1VniwQcfVDXWmjVrvv71r5eWls7GgIHgKsmk/K9NgnuVpKKiYsOGDWpUPf8v7nCVBJHRXvXzsqB8s7+/f8OGDWq89LS0NFVhzeqRr9frm5ubtQPV+t/MVldSfvnLX/b09MgWb319vQz88BIbtTXWzp075YQY1dXVd9111+zVWHJQOG2rS9VY6s3h4eGOjo7W1tbu7m73Dt/Lly/P0gitsh9P+081IFhCQsK+ffs8fkSuz6OPPir7rEdGRhYvXpyenl5SUrJy5crZW1VET2Du3Llz5cqVszo0a3l5ue8FSktLQ/AOKN+rlJiY+Prrr6v7RkZGRhquM5vN0++RQOQ1B2UUtLS0bNiw4Tvf+c7MTpntf22irpIcP3787Nmz7nctzupVkn/4h3/wcXFH+8+EhASZ0cn1kVdJ5Dtr1qzZtGmTyWR69NFHZ2/yA9BenfH2qvtlQfVPl8uC6nVHR8crr7zy1ltvudx5OKtzU917770uzUv/m9lPPvmkPNd1d3dnZWUZjcZVq1YVFxeH48i3sVFeY42MjOzbt29WE+7263wsMDw8nJ+fH2rPidlsNt/t2pycnKamptLSUhm3Npttx3VNTU3aMc2BwAKzo6MjKOsgnyPdu3evSpDMZnNqaqqfyV6wqJ7JefPm9ff3WyyW1tZWdQ21oaEhNTU1HK8sIgKukkxam4TsVRLfq81VEkRYe3XSGOzp6bnvvvtC7TmxSZvZGzZsuHr1qnpgVT7RumnTptdff32WplmePTdEc40lb3h44YUXgrIC6m7gH/3oRzJn0+v1FRUVtdeF7E23er1ePdtqMpnee+897RwA8jFQbsrC9ANT+9zLzKZkkreRf2X/s/buC3krhRTwT7vMruY+2ZoP2sur3sYW3759u/YjJpPJYrHYbDZ1c4jq+gC0zUGZbwTlKomqTVyuktT+h5DddepMIq+SaOcAkG1f7bV/INzbq+qyoMrZ5BNuko+hI4NL2zTdvn27yxwAIyMj991335TqYtK2kKixtE9bzsbhUlFR4bGZdfbsWbmA9q7i7du3y9ah9tm2qfr44489vp5ScLrMp6FcuXJF20+dmJi4evXqs2fPah911d5VAgQcmC6ZRk9PT2lpqZyEOiEhweNM0779j//xP9Tr/Px8eY+7TKLkY2xyqmut/v5+eX9IT0/PpN3Ir7322vB16pYSNUJPd3e3HArswoULU+1dT0xMVOeTZcuWaefO7urqqqysTEhIkL+4d+/e/Px8i8Ui98xtt93mbfA9YLavkvhfm8z4VZKrV68GdolkShd3uEqCCGiv+n9ZUPvY54EDB1Sc3n///QH/ujY8tWE7s81sKS0tbfv27f39/eqS0MjIiBzuiLQtzGqsAwcOuP/1xIkTBoMhJiYmPz8/gC//wQ9+IF/s2LFj79696tDs6enZu3evwWBwj7133nlHLnbixAk57r8Psumm2p233HKL+tOWLVtUG27ZsmVTWu2///u/V63M0tJS1fq8cOGCxWLJz88vLS1VFVVpaalqQd5xxx0Mn4WZDUztbQ8XLlzIyspqaGiQ1cbIyMiOHTuWLl061cpPTcck73GXA40kJSUtXry4oaFB3QqvWpyyKRYTE5OVlaWusLhQ1xrb29uTrlNhUlhYqBZbs2ZNTExMenp6AE8Qbd26VVUzixcvVkOkLFiwYMeOHSMjI3IekatXr7a3ty9ZsiQpKUlul1rn6dSsiMKrJPKELw8zg8EQQPeR/7XJTF0lUUOGNDQ0yJs+urq6tBdrZvbiDldJECLtVdkkk9E61adU/L8sqCWvzg8PD1sslp07d/r4/rfeeksFtXxHez92VVWVjOLKykpvs25Mv5ltMBgqKyvVCnzve98L40MhqqYvdJ8J2v02DDmeqbbPd9L5o9WS2pn7bDabx9n93Bee9ESvJgbwOGGi+uukWZP7dNseDwDf66P2ho9uce1838CkgeN+wLgEphxuWA3G7c98o97mufYx/6923mrt9N9a6k1tsLe1tbkspp3F2GNAqbugPU637XEOUG/z/6r00sdk4i5zBINw00aEy6FeW1vrcVJdH1PTegs3P2sT38e2y0+7x6/6nkkfLnCfbtvj+WfS9fHWfvBnXwEz2F51nwXbR5PV22HvXoV5bNTZ7Xb/cwqP0aGax/5/j7dzy5Sa2d4WUFOEM912+F3A0Lp8+bK8W306N+ympaVpxwt2p8YO3r17t7fjyeWdnJwc91pQ9bM999xzHptrU73+9+KLL/rIANUdkt7mZzObzYyjBX/II1yv17sPjbVhwwYVfXq9PjMzc968eWfPnpWHVk5OzqSxqbrLXA7UxMTEkydPymdRVIjp9fq8vLzGxsbf//73arH33nvPbDarWMjLy2tra1MjC2vvpFq0aFFbW5sKNKPRWFZWpv568uRJs9msNtZsNttstvXr18t3tKunfkudHLS2b99++vRp7SrJtaqoqLDZbDIwi4uLGxsbXa46NTY2nj17NhyHzMJcdrhpXb16taSkRDtFUmA3/vlZm/zsZz/zfZVE6yc/+YnLOyqIVFi5fIn2wTN//OxnP/N9cce908Cl2l25ciWHFuagvSq7vGTH9VSPc20V5vtShXwGNTEx0WMy5nGt1Jz+7oulpaV5/LmpjungfzPb2zL19fXhVzNGw9ULdQXa4wWwtrY2VaIu2bw8cU/a26aaa42Nje4XAyoqKrSJk9Fo1M6srSb+U41IWcfId+Q/5XV07Uzc2iagy7Zo245yYkEVHmp6RHVhRvUtuLDb7S6NPzm+v5qp0H3T1GSpXC3DHFAhwK4AphQyHq8ua6+SuJ/GJ72Kr2oZbT+z/7WJqtpcrpKouttllVxqOm19qqpONWqC3W5XnQmqjrZarWqHqJrRhbeLO2oHum+aXICDDXPZXpXZjhzQznecarun3A97H5cFXdZBHfPp6elms1n12Ls8xeoSjC6hUVtbq36rpKRERrGMa23NrnI5l+bulJrZLpsmV9tb4Ie4GB+9h8jPz29vb8/Lyzt58iR7AwgRe/fuXbNmjazYGNQemJtwY3IXINR0dXUtWLBAr9e/9957+/btKy8vp8ka2W5gFwAIIz09PZs2bZJX1MjZgFlSV1cnRzhQl0jI2YBQs3z5ciHEwYMHuQ2etA0AQi5nk/PGGI1GLigCc2bnzp2zMVMOgIDt3btXDqHnMiMFSNsAILRyNi4uArNnw4YN6rEQo9E4MjKyZMkSNYI2gKCTEx62t7fLjvHy8nL1T0KVtA0AgmZ4eHjVqlXkbMAcy8nJeeSRR+TrsbExdggQIrwN6A3SNgAIppaWlu7ubjl1r5xLWgpgFmAAvplMprq6OjlJ7oULFw4fPizfV/PNAAg6i8XiPrGbHEly3rx57B/StmjR1dUlW4Tt7e2qxzkhIYE9AwSLtymSAMy4hISE8vLy9PT0mJiY9PR0ecXEbDbTFgSAIIplFwAIfabr2A/AHKiurk5NTX3hhRdsNpu8fr98+XICEAhl8uImd05GNuZtAwAAAICQxk2SAAAAAEDaBgAAAAAgbQMAAAAA0jYAAAAAAGkbAAAAAIC0DQAAAABI2wAAAAAApG0AAAAAEOli2QUhoru7++jRo11dXQMDA/KdlJQUo9FYVFSUk5PD/gGIRIAYZP8ABGPUinE6neyF4BocHDSbzcePH/e2QG5u7v79+1NTU9lXAJEIEIPsK4BgJG3DXDtw4MC6detGR0dF/JfEgiIx7/siOU18QSc+dYihC+L870Vnsxi+HBcXV1NTU1ZWxh4DiESAGGSPAQQjaRvmzmOPPXbw4MFrrxYUiTXPiJtv87CQ4y9i3ybR8T+FEAUFBceOHWO/AUQiQAyy3wCCMarc+NRTT7EXgnUxo7q6OibuNrH2BVGyQXxB53m52C+Ie/9BGLLEv77xp389l5SUdO+997L3ACIRIAbZewDBGD3obQuOwcHBe+65Z3R0VGx4SXzvYb8+031SbDPFxcW9++673EwMEIkAMUgMAgRj9GACgOAwm83XAmNBkb+BIYQw5ovFT46Pj69YsYIdCBCJADHIDgQIxuhBb1sQdHd3z58/P0b/353Pv+X5pmFvHH+J+en3nP/7/Y6OjoULF7IngelHooj/kvjVWSIRCK8YFD/5rvj/PiAGASrE6EFvWxAcPXpUCOHM+cepBYYQQnez874lQojm5mZ2IzAjkSgWFBGJQNjFoLj/UWIQoEIkbcPsOnXq1LX/zPt+IB/+1gNCiK6uLnYjQCQCxCC7ESAYSdswW4aGhq79JzktkA8nfVkIMTo6ym4EiEQgEmLwy98I5MO3p8lBFNiNQJCDkQqRtC2CDQwMXPvP/xEXyIcT7/jP6AJAJALhHoNfujOQD9+aKIQYHx9nNwJBDkYqRNK2CJaSknLtP586Avnw8GUhRFJSErsRIBKBSIjB69E0ZX+5KoTQ6XTsRiDIwUiFSNoW+bHx4Z8C+bD9QyFEfHw8uxEgEoFIiMEP/i2QDw/+6T+/AUAQg5EKkbQtgmVnZ1/7z/nfB/Lhnk4hRG5uLrsRIBIBYpDdCBCMpG2YLUVFRdf+09ksHH+Z2ic/dYg3DgshSkpK2I0AkQgQg+xGgGAkbcNsycnJyc3NFcOXxb5NU/vk/9ouBv+UnZ3NJQ2ASASIQWIQIBijR4zT6WQvzL2BgYF77rlnfHxcbLYIY75fn+ntitlSeNPfxZ47dy4jI4N9CBCJQNTG4Bdib7RarcQgQIUYPW586qmn2AtzLz4+/tZbb3311VfFv74h7rxLzj/jOzDE0/+X+GSspqbmoYceYgcCRCIQzTG4c+dOYhCgQiRtw6wbHx+32+1dXV1jfx4S/+8/i7Er4u7vidgveFj0U4d4eWvM3vXik7Hbb7/90Ucf/cY3vnHDDdzdCgQtEr/xjW/ceOONN9xwg8FgIBgBYhAgGAnGOcBNknPKbrcfP3786NGjnZ2d2dnZRUVFdrt9+/bt4+PjMbenOb//f4pvPSCSviwS7xDDl4X9Q9HTGfPGYefgn3Q63caNG2+//fbDhw9brdbCwsKlS5dyGzEw95FYXV29bNmy48ePNzc3E4zAnMWgeOOwIAYBKsRo5sTsu3Tp0p49exYuXBgfH28ymV5++eWxsTH114sXL/o+xLOzs8+fP6/9tt27dxuNxpSUlLVr1547d449DMx9JBKMADEIEIwE45yht20W9ff3WyyWtra2/v7+wsLChx9+eOHChTqdzuPCp06dam5u7uzsHB8fHxoaSk5OjouLy83NLSkpyc3NjY2N9fb9L730khBi6dKly5YtMxgM7HZgjiORYASIQYBgJBhnG2nbzOvu7j569KjFYnE4HMXFxQ8//HBOTo63g3tGWK3WQ4cOWSyWpKSkJ554ori4+G+z3QNE4hxGIsEIEIMAwUgwkraFtImJic7Ozra2tpaWlvj4+MLCwqKioqysrDlejc7OzkOHDrW2tmZmZi5durSwsDApKYnSAZE492tCMIIYJAYBgpFgJG0LFQ6H4+TJk21tba2trRkZGXl5eSaTKeh9wRMTE8ePHz9y5Mjx48dzc3OLiooKCwvj4uIoLxCJBCNADBKDIBgJRtK2aDE+Pt7a2nr06NFTp04ZjcaioqKCgoIQ7PmV63nkyJHOzs6CggK5nrPdJw4QiQQjiEFiECAYCUbStqAZGhqSg5y++eabCxculMdZfHx86K+53W5vbW09dOhQb29vYWHhE088kZOTQ4GCSCQYAWKQGATBSDCStkWI/v5+eQGjr68vPz+/qKgoPz/f26g7IW5wcLClpeWll16y2+0mk2np0qVBucsZiPJIJBhBDBKDAMFIMJK2zQyr1SpDwm63yz5cH6OdhmPAHzx48NChQ0KIJ554IkTuewaiLRIJRhCDxCBAMBKMpG2B6Orqko9LCiFMJlNeXl5k99hardaXXnqppaUlOTl56dKlDM8KIpFgBIhBYhAEI8FI2haKHA5HV1dXc3Nza2trSkpKUVFRcXFxRkZGVO0ENTxrVlZWSUkJw7OCSCQYQQwSg8QgCEaCkbQt+MbHx48fP97W1nb8+PGsrKyHH36YbN59eNbi4uLwvVsaRCLBCBCDxCAIRoKRtC0syYFrjh492tXVlZOT8/DDD3MhzeOJo7W1tbm5uaurq6CgoKSkJD8/n+FZQSQSjCAGiUFiEAQjwUjaNovkMDVHjx61Wq0FBQWLFy8uKChgmj8/TyUvvfRSX18fw7OCSCQYQQwSg8QgCEaCkbRt5vX19bW0tBw5cmRwcLCwsLCkpCQnJ4ebHAI7uVgslkOHDjE8K4hEghHEIDFIDIJgJBhJ22ZAV1dXe3u7xWIRQsgxUrkkNlP6+/tfeukli8USGxu7dOlShmcFkUgwghgkBolBEIwEI2mbvyYmJjo7O+UDi0lJSTIkuAA2e7q7uw8dOtTS0pKSklJSUsJTsyASCUYQg8QgMQiCkWAkbfPM4XCcPHnyyJEjJ0+ezMjIkM9xcrlrLp06daq5ufn48eOZmZkyQuLj49kt0YZIJBhBDIIYBMFIMJK2uRodHZWDgZ46dSo7O7ukpKSgoCA5OZnDNOgnKTk8q5xeg3u1Ix6RSDCCGAQxCIKRYCRtE7K78+jRo6dOnRq87r/9t//26aefOhyO7373uytXriwsLGTUnZAyPj6+e/ful19++eLFizqdbnx8POW67Oxs7uQOdyoYh4aGBgYGbrrpps8///xrX/uayWTasGEDkRhq3njjjWeeeeaNN964evXqX//61zvuuOPOO+8kEiMjBmWFSAwSg6BpimgJRmdou3TpUkFBgY/1z83NvXjxohMUGShZUF6UKWVKeYGSRYQWWUj3th04cGDdunWjo6PJyckmk+mBBx4wGAw6nc7hcPT39//2t789ePDg4OBgXFxcTU1NWVkZFxJCp8hE/JfEgiIx7/siOU18QSc+dYihC+L870Vnsxi+TJGFb8kSjEQiKFNwzqRkKVlOnvS2/adly5bJNTSZTCMjIx6XGRsbW7VqlVysoKCAywkhUmTXouLQ+6J11MP/LB+KBx+nyMK0ZAlGIhGUKThnUrKULCfPoAjRtG3//v1CiPj4+MOHD0+68LFjx+Tjnnv27OEADW6RxcTdJja85DkqtP/bbBHxX6LIwqhkCUYiEZQpOGdSspQsJ88gCsWbJAcHB++5557R0dHDhw8XFxf785Hjx4//8Ic/jIuLe/fdd1NTU+kRDlaRXQuM7z3s12e6T4ptJoosXEqWYCQSQZmCcyYlS8ly8gyiG0Jwncxm8+joqMlk8jMwZJ9mWVnZ+Pj4ihUrOFKDVWRiQZG/gSGEMOaLxU9SZGFRsgQjkQjKFJwzKVlKlpNncIVcb1t3d/f8+fOTk5PPnz8/pUnxxsfH77nnnoGBgY6OjoULF3K8znGRifgviV+dFTffNoVPOv4S89PvOf/3+xRZKJcswUgkgjIF50xKlpLl5Bl0IdfbdvToUfms51QnMo+Li3viiSeEEM3NzRyvc19kYkHR1AJDCKG72XnfEoosxEuWYCQSQZmCcyYlS8ly8iRtc3Xq1CkhxAMPPBDAZ/Pz84UQXV1dHK9z6W87fN73A/nwtx6gyEIWwRiO5UUkUqaUKedMULIUWUSePEMubRsaGhJCZGRkBPBZg8Egn0HkeJ1LAwMD1/6TnBbIh5O+LIQYHR1lN4YgGYwyrAjGcCkvIjECy/TL36BMw6i8aMBQslopKSlEIifPmRJyz7bFxMQIIT777LPY2NiAPx7Kc4hHnq985SvXapp9/yYS7wjk84XxFFloktF06dIlWesQjGFRXkRiBJbpEbu4MZYyDZfyogFDyVKynDxnScj1tskGYmAXnGRmrNPpOF7n0t/OYp86Avnw8GUhRFJSErsxBMlgdDgcBGMYlReRGIFler10KNNwKS8aMJSslvwUkcjJM5LTtt7e3gA+29fX959FhTmOjQ//FMiH7R/KySvZjSFbsjKsCEYiEUEr0w/+jTINo/KiAUPJuqdtRCInz8hM27Kzs4UQv/3tbwP4rHwAMTc3l+N1LhmNxmv/Of/7QD7c00mRhSyCMRzLi0ikTClTzpmgZCmyiDx5hlzaVlRUJIQ4ePDg+Pj4lD7ocDgOHTokhCgpKeF4nfsiE53NwvGXqX3yU4d44zBFFuIlSzASiaBMwTmTkqVkOXmStrnKycnJzc0dHBxct27dlD5YVVXV19eXnZ3NJY2gFJkYviz2bZraJ//XdjH4J4osxEuWYCQSQZmCcyYlS8ly8gy6mBAcJmVgYOCee+4ZHx8/duxYQUGBPx/p7Ox88MEHY2Njz507F9gIrZiRIhObLcKY79dnertithTe9HcUWXiULMFIJCJYZZqZec9f/kKZcs4EJYtoP3ne+NRTT4XaOsXHx996662vvvrq66+/npmZOem0UZ2dnUuWLPn4449ramoeeughjtQgFpn41zfEnXeJ29MmDQzxy5Xi38cosnApWYKRSARlCs6ZlCwly8kzmJyhSvVOlpWVjY2NeVzmk08+Wb9+vRyAPiMjo7Oz04kQKDKx+Elh+VC0jnr43z8PicIfixtulGOZfPbZZ+y3MCpZgjEiI/Gb3/wm5RVhZUoMhtc5kxikNgQVoj9CNG376KOPUlNT//Ef/zEuLk4IYTAYtmzZcubMmZGREafTOTIycubMmerqatmDqdPpdu3aderUqZSUlPPnz3OMBsX58+dTUlJWrlx5883XikzcniZMG0Vth/iftmshcej9a68frRQp3xBC3HSTbsmSJcnJyRcvXmTXhThvwfjnP/+ZYAynSDz0vqdIvGnXrl0HDhwgGMMiBv0vU2IwFGLQ/wYMMRjWtSFNUyrEqE7bPvroo4yMjN27dzudzosXL/p+KDA7O1vFw+HDh1NSUi5dusSRGpTAOHz48JSKbMuWLZmZmd6uV4FgxNxEIsFIDIIYBJFIMIZ+MIZc2jY2NpaVlbV+/Xrtmx0dHatWrTIYDPIKR1xcnMFgWLVqVUdHh8tddg0NDRkZGR999BHH65w5ffq0CgxvRabT6TwWmclkys/P51bJ0ORPMHorWYIxNCPRx8mTYAzTGKRCJAZBJCJKglGEWmDk5OSsXbt2Ol+ya9cuo9HIJas5C4zk5OTXX389sI9/8skn2dnZ0yxxEIyYZiQSjMQgiEEQiQRjiAdjCKVtn3322cKFC5ctWzb9r1q7dm1OTg7hMTeBcfr06el8ibxZvKGhgf1JMCKIkUgwEoMgBkEkEoyhHIwidAKj+LqZ6pFcvnz5DH4bZi8wnE7nu+++Gx8f39HRwV4lGBHESCQYiUEQgyASCcaQDcZQSduWLVu2cOHCGTyUZbDNyAUSzHZgOJ3OV199NSkpidGWCEYENxIJRmIQxCCIRIIxNIMxJNK2Weo4/uSTT6Z/OzLmJjCcTueePXsMBoMcVh4EI4IViQQjMQhiEEQiwRiCwRj8tG3jxo2z95imHPynpqaGA3qmdHR0zFJgOJ3ONWvW5Obmcv8AwYjgRiLBSAyCGCQSiUSCMdSCMchp2+7du2d7UFQ51caePXs4rKdPTj9y5syZWfr+zz77LD8/f9WqVexqghFBjESCkRgEMUgkEokEY6gFYzDTtj179szNRBYXL150n70BgQXGbN/gOzY2lpGRwVUoghHBjUSCkRgEMUgkEokEY0gFo4j4HS2dP38+OTmZoZnCorzkuezYsWPsdoIRwS0sgpEYBDFI4RKJlFeIBKOIhh0tzd7TigTGbBRWUlLSuXPn2PkEI4JeWAQjMQhikMIlEimvoAdjENI2eYyePXt27n+6o6ODQXXDIjCcTufLL7+ckpIyBzcqRDOCkUgkGIlBSoEYBJFIMIZFMIqgBEYQryvIkn7vvfc46EM8MJxO55YtW7Kzs2dpKCcQjEQiwUgMEoPEIIhEgjFcgnFO0zZ5F++rr74a3PKegzGCIsPu3btTU1ODewXIdB1lQTASiUG/FkswEoPEIDFIJBKJBGMQg1HMZWCEzpg51dXVmZmZXLUK/TPIJ598kp2dvWXLFkqEYCQSCUZikBgkBolBIpFgjNpgnKO07aOPPkpNTX355ZdDp+xnaf57AiMaDp6wRjASiQQju5EYJAZBJBKMYXf8iLnZsIyMjN27d4faEWAymQoLC0Nh1nMCw7d33303KSlpVudSjJ5aimAkEglGYpAYJAaJRCKRYAy7YJz1tG1sbCwrK2vXrl0heBB89tlnhYWF3Cke+oHhdDpfffXVlJSUS5cuUUYEI5FIMBKDxCAxSAwSiQRjtAWjmO3AyMnJWbt2bSiHboivIYGh7Nmzh9u+CUYikWAkBolBYpAYJBIJxigMRjGrFwwWLlwY+ofd2NhYZmYmz/hu2bIl9EcxWrNmTUFBATcPEIxEIsFIDBKDxCAxSCQSjFEVjDHXUrdZMDExsWTJEiHE4cOHRcgbGhq67777Vq9evXbtWhGV1q1b193d/corryQlJYXyek5MTPzwhz/MzMzctWuXAMFIJBKMxCAxSAwSg0QiwRgdwThjaVt3d/fRo0dPnTo1eN0Xv/hFnU63bNmyRx55JCcnJ5T3tVzzY8eO9fb2fv755ynXZWdnFxUVhfiaz0hJpaSkfP755zExMQcOHHjwwQdDf+UPHz68Z8+eL37xi8PDwxFfWDNSvg6HQ6/X//rXv/7+978fFisfhcE4NDQ0MDAQFxcXExPz+OOPm0ymsDhttre3nzt3bmJigkgkBiOjsIjByC5fmqY0TcM+GKffYXfp0qWCggIfP5Gbm3vx4sUQ7NMM3zWPwu2NtsKifAlGTpsUKyvP9lJSlC+RyGlTmW7atn///vj4eCFEcnLy2rVrjx07dv78+c7OzrfffvvYsWMbN25MSUkRQsTFxe3Zsyek9rXHNW9vb+/t7Q3xNZ/m9or4L4kfmsVmi3j+LfHCv4otR8SGA6JonUi8I2S3172w/uVf/qWvr+/8+fMRWVgzdTxfvHixvb398OHDIb6LojYYw/e0+V9OI1v+H7G769rr0D6NEIOBlOxzZyKyZCOv6WK1WqkNaZrSNI34YJxW2rZs2TKZTZpMppGREY/LjI2NrVq1Si5WUFAQIvs6fNd8mtsrFhSJQ++L1lEP/7N8KB58PAS3N9oKK9p2UdQGY/ieNsPxNEKxUrI0XQhGypemabgXlphOlnktPY6PP3z48KQLHzt2LDk5WQgRCuly+K75dLY3Ju42seElz1Gh/d9mi4j/Uuhsb7QVVrTtougMxvA9bYbpaYRipWRpulAhUr40TSOgsAJM2y5duiR7Bv1ZY7Xesq8wuLeohu+aT3N7/QoMFR5C3Hxz8Lc32gor2nZR1AZj+J42w/E0QrHORskSg6w55cuaR+eZJ7iFFWDaJp/Am+ok7mVlZfL5vCDu7vBd8+lsr1hQ5G9gyP8tfjIUtjfaCivadlF0BmP4njbD9DRCsVKyNF2iJG2jfGmaRnxhBZK2nT17Vj6B5+1uTm/GxsZSU1OFEB0dHUHZ1+G75tPZXhH/Ja83DXv7n+VD8aU7g7u90VZY0baLojMYw/e0GaanEYp1lko25r9/lRhkzSlfyjfazjxBL6wbApgz4OjRozLR/FsXp9/i4uKeeOIJIURzc3NQplYI3zWfzvaKBUXi5tum9kndzeL+R4O7vdFWWNG2i6IzGMP3tBmmpxGKdZZK1nnfEmKQNad8Kd9oO/MEvbACSdtOnTolhHjggQcC+Gx+fr4QoqurKyi7O3zXfDrbK+YFNMHrtx4I7vZGW2FF2y6KzmAM39NmmJ5GZpvctLCOwegpWZouVIiUL03TcC+sQNK2oaEhIURGRkYAnzUYDEKIwcHBoOzu8F3z6Wyv+PI3Avlw0peDu71y5eVuj4bCmuPjWU4nQvmyvf6eRpLTAvnw7WmRHYkDAwMBF6uMwdHR0fCtIIK48kE5YQZre8N3zdlLlG+EnXmCXlgxTqdzyp+JiRFCfPbZZ7GxsYH85PWPB/C70xe+az6d7RVH7OLGQLZXFMYHcXvlyl+6dEke6BFfWNF2PEdb+Ybv9v7tNLLv3+S0p+F1GpltX/nKVwYHB8P0MA7rCiKqTpjR1nShfGmahnjTNIiFdUPA+WJgF1BllqnT6YKyu8N3zaezvWL4ciAfvv6pIG6vXHmHwxElhTXHx7P8FOXL9vp7Gvk0kDUXf7ka2ZEoa+7AilXGYFJSUvhWEEFc+aCcMIO1veG75uwlyjfCzjxBL6zA07be3t4APtvX1/efZRak3R2Oaz6t2Pjg3wL5sP3D4G6v/Gm526OhsOb4eJanD8qX7fX3NPLhnwL58OCfIjsSp1OsMgan+lx7SFUQQVz5oJwwg7W94bvm7CXKN8LOPEEvrEDStuzsbCHEb3/72wA+Kx/my83NDcruDt81n872ivO/D+TDPZ3B3d5oK6xo20XRGYzhe9oM09PIbDMajeEeg9FTsjRdqBApX5qm4V5YgaRtRUVFQoiDBw+Oj49P6YMOh+PQoUNCiJKSkqDs7vBd8+lsr+hsFo6/TO2TnzrEG4eDu73RVljRtouiMxjD97QZpqcRipWSpekSDbUh5UvTNFoKK7Dp3mSyuGrVqil9av369TJV/eyzz4I1U174rvl0tlc8+PjU5jQs/HEobG+0FVa07aLoDMbwPW2G6WmEYqVkabpEQ21I+dI0jYbCCjBtu3jxYlxcnBDi2LFjfn7k9ddfj42N1el058+fD+LuDt81D3h7b7752vaKzRZ/A2PbcXHDjTfddFPQtzfaCivadlEUBmP4njbD9zRCsc54ycbcSAyy5pQvax6lZ57gFpYI+JN79uwRQiQnJ7/66qv+rHFycrIQYteuXUE/XMJ3zaezvSL+S+L/bvGrsaX/76GzvdFWWNG2i6IzGMP3tBmmpxGKlZKl6UKFSPnSNI2AwhLT+XBBQYG807KsrGxsbMzjMp988sn69evlKMkLFy4MkZ7c8F3zaW6vWPyksHzoOSr+eUgU/ljccGOobW+0FVa07aKoDcbwPW2G42mEYqVkaboQjJQvTdNwLywx/YxT9hUaDIYtW7acOXNmZGTE6XSOjIycOXOmurpaTiWu0+l27doVUoeX+5r/+c9/Dos1D3h7/9YlfXuaMG0UtR3i0PvXQuLQ+9deP1opUr4hhLjpplDc3mgrrKiKxOgMxjDd3rA+jRCDM1GyN0VqDIZv0yWMDjP2EuUbSWeeoBSWmP5XXLx40fdwltnZ2aF56234rnkUbm+0FRblSzBy2qRYWXm2l5KifIlETpszmbZJHR0dq1atMhgMMvWMi4szGAyrVq3q6OgI8esBLmuu0+nCZc2jraSisLAo36gKxjDa3rA+zNg5lGzkNV2IQcqXkoqGlY+5lroBAAAAAELVDewCAAAAACBtAwAAAACQtgEAAAAAaRsAAAAAgLQNAAAAAEDaBgAAAACkbQAAAAAA0jYAAAAAIG0DAAAAAJC2AQAAAABI2wAAAACAtA0AAAAAQNoGAAAAAKRtAAAAAADSNgAAAAAAaRsAAAAAkLYBAAAAAEjbAAAAAIC0DQAAAABA2gYAAAAAIG0DAAAAANI2AAAAAABpGwAAAACQtgEAAAAASNsAAAAAAKRtAAAAAEDaBgAAAAAgbQMAAAAAkLYBAAAAAGkbAAAAAIC0bQYlJCTE+GHv3r1CiBMnTnj86/z58ysrKy9cuODy5fPnz/fxnV1dXXIxb18r5efnT7oVJpMpJibGYDC4bJfJZHJfuKenJz8/X365xwWAoOjp6YnxT09Pj7f4SkhIMJlMFovF5csvXLjgO9jVkpWVlT4Wq6ur83Mr1Drs3btXu9pqferq6vLz89VaGQwGj6cRYM6oY9W3hIQEH3Wctxj0v6bzs+qcZgyeOHGitLRU+1vz58+vq6sbHh7mSEBkt079rOb8rzdnpHUqN0T7i5WVlRwJpG3/xcjIiD+LXb16VQjR29vr8a/d3d07duwwGo0u1Ul3d7c/X+7ta/1ns9mEECowhoeH5XZ9+9vfdlmyrq4uKyurvb1d/nN0dJRjACFibGxsSkt6jK+RkZHm5uYlS5a41AqXL1/2M9jffvvt6WzFhx9+KF+kpKTIFx988IF8MW/ePFUzpaenl5eXt7e3q7Wy2WzyNKJtWQJzSdZ0/tebU4pB/2s6P6vO6cRgXV3d4sWLGxoatL/V3d1dXl6en59P5obIbp36Wc35X29Ov3Uq//qTn/xE+4vTrI5J2yKQXq/3Z7HbbrvNnxh76KGH/D/d33LLLf4sFh8f72cll5aWJv95/vx599Xu6ekxGAzl5eUUOkKTnxHh55LNzc2T9oxN9Tzgz6lAVZ933HGHtuLR/oSP9uvIyEhRUREHA4LCn5rO/3iZUgz6U9P5Gf7+xOBrr73moz6tqqriYEDUtk5n8DzgT+tU2bdvn0zzQNrm1ZUrV5z/4fTp0+r92tpap8bq1atdPqj+ZLVajUajio2Wlhb3X3H5Nkld9vP4tYr7rSYuVPd3amqqfPHxxx/LF3fffbdarLy8XMZDY2Mj5Y4QNG/ePO2RX1tbq/50+vRpH7Gj4stut2sP7507d3r8IZdvczqdV65ccV8sLy/PPR7dTwUuBgYG5AtVUcnK8t5779VWiunp6U1NTTabTa52SUmJ+qvNZpv0NjBgNqxevVp7tOfl5Xmsm9zjZaox6GdN53/VOdUYTEtLy8vL08ag2WxWf/3Nb37DwYBoaJ36X835WW8G0DpVC8t+Bb1er1YbpG2z0tZsbm5W//TzJpMZIe9mTk9PV4mZvBt48eLF8p0FCxaohxCEEEaj0Wq1TtruBMJUYmLi6tWrVfNrmnd3TJV8SKahoUH+U92dLy83tre3q+cQVq9e3d/fbzKZZLMyMTHxV7/6FcUHYnDOYrC+vv7kyZPaGKyvr9deOqEoQet0zlqnQoiKigr5or6+PjExkeIjbZtF6pLeHPPnUQFVa9bX1589e3bS65RAuFMX9uaYPw/keKs4qaVADAY3BrW3kGm7GQFap7PaOhVCdHV1yQwzPT2dofJI22ad9o4mP+8JnhGZmZmTLqNuPg5W9AJzTD214v8TazPCn/s6vJ0fXJ468P8xP4AYnH4MnjhxQjuU5U9+8hMKEbRO56Z1KoRYt26dfPHcc89RaqRts+vEiRPqgBNCPPjgg+7LqA5ixdsTay6LaUdNdbdo0SKn06l6ll0eSEhPT/fn5mMgYshR9dVAqd4u2i1YsMAl0DwOuy9vqdIqLS318etnz5612+3ytXr8QD2T0NTU5OPRuH379qnXeXl59IojsmPQz5rO/6ozgBjUzjiyePFi2VOn1+ubmpoWLVpEUSIaWqf+V3N+1psBtE4tFouMvry8PELPo1h2wfR5nLCiqanJz04tNR6xb/7cYS/HyFL3EMs41I64CkS28utc3jQajdXV1X5+w+XLl/2J3EknVVPDZN15553yxe9+9zv5Qo1F7vFrtSM3bN68mTJFVMWg/8+STVp1+h+DHmccMZlMPkIViPjWqf9zh05ab/rTOh0eHlaJInWfN/S2zTyj0Xj69Gn/b8lVNYpv2sPdm7feeksbBirk7r//fsoFUUiv11dUVJw8edL/B8bUQOG+TVrt/eEPf3BpIKpB7XJycjx+ZHh4uKSkRN3lX1FR4W1JIFJj0J+azs+q0/8Y9HgrckNDw4IFC3z3qwMR3Dr1/4GaSetNf1qnzzzzjKz+SkpKqPtI2+aOzWbz8TiK+yjG3kLIZbH+/n4fPyrnkpdHvOrpdhm6h6l7EW1GrvPRXnQfyNhjReU+MrJ2rDkXe/fujYmJWbNmjfynup9EO6idx6jPz89X4yjk5eVt376dEkTEx6CfNZ3/VWcAMaidccRqtWon4WhoaGASDkRD69T/as7PenNKrdPh4eEdO3bIN9ULkLbNCnnUNjU1yacqR0ZG7rvvPv9nM5ypqnHSZTzeBwJEEtm2s9lsquHV0NDg/zy/M8KfsZVHR0dd3iktLVU5m9FoPHToEKUJYnAuY1ClcBaLRTuA5PPPP0+xgtbprLZO1S3NsstdPTWnHo6VKR9jS5K2zRiTyVRTU6MOU+24AnPAn0G6GJIOUSItLe1Xv/qVup63c+fOuayo/BmkKz4+XvvPyspK1Q9gNBqndFcnQAxOPwZdaO/d8pbgAbRO57h1SjCSts2k1atXB6uWunLlSltbm3yt+q9ra2vlO3a73el0MiQdokdiYuK2bduCUlGtXr3a6XTK8ce1t53Iv1ZUVDidTu0geBaLRd0TQs4GYnDuY9Cdegpu0gQPoHU6/dapn0+VE4ykbTPsySefVLVUR0fHXP60mtNQXbd45513VPVJ0SDaPPjgg+o63wsvvDDHvy7veFR1jHqy1KUf4MSJE0uWLCFnAzEYrBhMSEiorKxUD7DJeQtU77cQorCwkKIErdNZbZ2mpaU5PVG3K8vrL76vtpC2YcpWrlypXnscwNR98hlv88+4L+Z7HH91N7/qVZO9ye6zjpaWlqrvVG9qp+zwf9RXIGQlJiaazWb52mazeYwy9/lnPB7/7hPa+J66TV3L/Pa3vy1fqCdLXeYe1c4o2t3dnZSU5PIrCQkJFCUiOAb9rOn8rzqnGoMjIyM7duxQp4L09HTtBAZGo5HHaRANrVP/qzk/680AWqcgbQtyLeXnCFQzMnWbnBZDSw7J5X7xftKs7PLlyxQlIqyiOnDggJ+f8vP49xFH2qerpcHBQfni1ltvndIm+PM8NxBJMTgjU7f5H4M+phyQHeAUIqK2dTqlqdum3zoFaVuA7rjjDnVrh8fJYdSb7mf8Rx99VH1WzRvj+6KC+jaXq4AufE9oI28F0Y5cLCee/853vuOypPs7Lu666y6OAYQOFRd6vd7js8sqNFyiNS0tTUWEnDfGJbq9hb8/keJjyGO1kuoGD3m5Ua/Xu9zBP+nEOP48zw3MNhUL3qqhKcWg/zWdn1XndGLw97//fWNjY15enoq19PT0kpKSpqams2fP0rJEZLdO/azm/K83p9M69bEC/k8iF9li1EO6AAAAAIAQRG8bAAAAAJC2AQAAAABI2wAAAACAtA0AAAAAQNoGAAAAACBtAwAAAADSNgAAAAAAaRsAAAAAkLYBAAAAAEjbAAAAAACkbQAAAABA2gYAAAAAmFmx7ILgeuqpp3z8EwAhCRB0AIhHxDidTvZCMAsgJkb7T4oDICQBgg4A8QgX3CQJAAAAAKRtAAAAAADSNgAAAAAgbQMAAAAAkLYBAAAAAEjbAAAAAIC0DQAAAABA2gYAAAAApG0AAAAAANI2AAAAAABpGwAAAACQtgEAAAAASNsAAAAAgLQNAAAAAEDaBgAAAAAgbQMAAAAA0jYAAAAAAGkbAAAAAJC2AQAAAABI2wAAAAAApG0AAAAAQNoGAAAAACBtAwAAAADSNnYBAAAAAJC2AQAAAABI2wAAAAAgEsWyC+bY888/b7fbvf31qaeeUq91Ot3GjRvZYwAhCRB0AIIbj/Hx8WvXrmWPkbZFkYmJia1bt3r7q/ZPxAZASAIEHYBQiMctW7awu4Irxul0shfmksPh+NrXvjY0NOR7MZ1Od/HixeTkZPYYQEgCBB2AIMZjfHz8xYsX4+Pj2WNBxLNtc02n05WXl0+62Jo1a6irAEISIOgABD0ef/rTn5KzBR29bUEw6YUNLjEChCRA0BF0QCjEI11tIYLetiCY9MIGlxgBQhIg6NhRQCjEI11tIYLetuDwcWHj/2fvXqCsKu+8QZ9T51BV1AWCFAgawk0D4dIaYlBiPhCXDhjxY+JKa6dnPnEiyZrP9Lck2qs7MysRcTLT6V6NgenEno6X1mR1jCZj2hY7MLoEaR2RqK3h0hAFQYKFUIRAUWVVUVXnW2YXb23PjUPVAUp5nuVyHar2ue3a7373b//f/W6nGEGTBI1Oo4OB0B6V2gYO1bYzo8iJDacYQZMEjc4qgoHQHpXaBg7VtjMm74kNpxhBkwSNTqMD7ZEsqm1nTN4TG04xgiYJGp2VA9ojWVTbzqSsExtOaYAmCRqdRgfaI7lU286krBMbTmmAJgkandUC2iO5VNvOsHBiwykN0CRBo9PoQHskL9W2Myyc2HBKAzRJ0OisENAeyUu17cxra2v71Kc+9eKLL2oeoEmCRgdoj4htA9SuXbvGjRtnPYAmCRodoD0itgEAAHzIuLYNAABAbAMAAEBsAwAAENsAAAAQ2wAAABDbAAAAxDYAAADENgAAALENAAAAsQ0AAACxDQAAQGwDAABAbAMAABDbAAAAGBjSVkEhS7e1rd7fubOlu6kjk0gkxtVUzByWWjqpekp9T9Zdsqlt5c72RCLRUJk8cM2QE77gdS+1rNrXGT2uSSUfn1kzb2T+9b9mf+d332jf1dq9q7U7WnjakIpZw9Irplf3YbFCtjZ3/3BXx9MHju1qzbR2vf8dp9RXXD1iUIlPh7IrZZsc8csjUZO8bULVCbfVNfs7r9/YGr1UIpFYMCr95KW1hRZesqkt/tbjaiqmDan4s/FVWe30nh3tj+49FvYMDZXJaUNS141K3z6xqsTmH9SkkuNqkhNq87wL6PjK2PEt3dZ29/b2vA3wc+ekvzGxKnxBGJjW7O988O2OzUe6tjb3bvk3nj8odD0P7+m49fW21q5MTSp570XVi8ZUnrCxxxvFnZOqlk2uLtQ1L9veFt466przNpwSu9E+739IZjIZayG3bdy++b2wdWbt5UNjuPHl1sf2Hot+nlk49ITHo1OfbY7/ZPHYyvsuHpz32DHqFHPFn1LiYoXcs6P9r37THjWMLONqKp66rFYj4TQrcZtMPnE4+uEN5w969JKa0g8Zo/bbsmBI3uZ57YaW6Cgwt8nHnzL3hZZ1TZ153+uhGYOLdJNTn23Ou0sJFoxK//WUwdodOr5T0fHFP7Zejw+dr7723k9+eyycgoy7oiG99vLaqA+9Y3Nb9MPl06qLn0lMJBLjn26O93ozh6Veml2Xu1hIg7m/ij+l9G60P/sf7KTydDPXb2wtdIDV2pW5962OPrzsD3dlP+uZA515N9xCfVIikTjSmTmpxYocH9+xuS3v8XEikdjV2v29He22BE5zZjsV2+SG33Vltd+l29pyF/v6r9/L29lET4kfWRbKbIlE4mBHv06BrdrXee2GluLRDnR8fe74itjV2v31X79nM2BguvHl1vt3d+QNTolEYl1T58N7Ovpwmiar19t4qCvvHuDPN7cVeuujsUZXYjd6Ovc/YttZ4Y9/1RK2sHE1FXdOqsosHJpZOPShGYNnDktlbaale/FQZzg5EfqJNfuzO7DvvtHbJy0eW7nlyvrMwqGrZ9UuHlvZUJkcXVVxUosVah7f/o/2+MmSh2YMjr7jnZOqxtVU9LP/gz4cMp6KbfLhPR1hHGP0IolEYv3BrtzeK4SxcTUV8be+oiHdUJkMS/7Tb3t6jppUMr5nWDAqXZNKTq1PlfKpptRXRE+MnnvD+YPix45/udWxIzq+8nd8caEB3jahqiaVDMe+NgMGoHt2tMcLxTecP2j1rNrMwqFbrqwPG3AfThr+/J1jWW0z73mWJZt6T6de0ZCOv/WU+oqRxxtd6d3o6dz/fCS5miK7eYS431CZjI+aWDSmctGYyiWb2kI/dFJHpRsP9Rws/uOna8KgkZ+/cyxrvO/+9u6w4YbxHvNGpueNTMeHf5S4WF7LtveeOMmqiS+bXL1scvXcF1qGpJM2Bk6bU7RNhm7psnNSNalkdCIwtMRgS3PvT24aMyiMxMgd5R96rytHpMJvoz1D37549NwvnNs7BGXVvs41+ztd54aOr7wdX14rple/fqRLYGMge2B3R/xsRdjUp9RXrJhe/bVxldduaBleedL9Y6h7zx+ZPtqZifYAuc28sb23AvaDP+oZxh+9dSJR3Ydu9LTtfz6qVNs+4MnYZTC3jq/MHem+Ynp13rG/xYUTGDOHpabUV4RzG3mHi0TynpLs82Jxz8be9B8/nefSoLWX1/atC4S+OUXbZBgh+aXzBn3h3J7DxELjJCO5tbjir1wWi8ZULhiVzk2boOMrV8d3QqHsBgPHmv2dIdLEz1YEU+or3rq6/mRPHYYRkjWp5LLJ1Z87p6cDKjROMqtFF1diN3qq9z9i21khnBqMNuVyvewT+3qOw2YNS4f/R31P1ojkkbFhHtdvbF2yKf/xZYmL5QrDxkJX6o/OmXWKtsml29rCCMnojF0Yp7H6gwd88cGN65o6L11/tNARYXiFpo7M+Keb7ynfJaBLJ/XubX59pMtWgY6vjB1fIV997b1QaoufOoEBIn4W76oRZdtEv/9We+hzozOb4VdZl5HHxx6v3Nl+48v5r0ArvRs9bfsfse2jb2tzd2xwbbKMJ0vCZZrX/OGU/9fGVeZtk4lE4psX9s7809qVWbmzfcQvj3z1tezZdUpcLNfOlt4FwtU+cAadom0ynPCbNqSnO7nsnFToJOItZd7I9BUN6XgXMv/FlrkvtOSmsv/p470td1dr9x2b26Y+29z/Y8fojGn47gbxo+Mrb8eXJfnE4ei/+/8w/KwmlSxlWlo4/fbFxih+qnzn2cOAkYv+0D/OG5kOHVBWMXzF9Or4lWmP7T322eeOXvdSS1YqK70bPT37H7HtrLB6/7FTerJkXE1FNKB/Sn1FKClkjbaaNzJ924QPzNna1JG5f3fHZ587Gh/ZVeJiubYdNU8dA8sp2ibDCbzZw3vS2txYp5I12OMHfzQ4KzGua+q8Y3PbpeuPxo8IV0yvjvdMUZezcmf7+Keb+zCRV5aalG0BHd8p6fiKa+3KbD7S1f8mDGUXP61ZLvERLuFkSpFJg/52WnV8CHFrV2bVvs75L7bc+HJrH7rR07P/Edvou3DqIl7gDiOJmzoyWb3FiunVq2dl30CmtStz9/b2+Hn9EheDs9DSbT0zfMQHXdw+sXfWuKzrm6PLAxaPrcy6vmXjoa6sSfnXXl67fFp11rxYu1q7b329zWEffEg7vq3NmjBni/gckqEphcu/c4vhi8ZU/mpOXe4o4sf2Hpv7QksfulHEtvLow1Q8JxQfKBIfPRx/nDsDwbyR6S1X1q+eVZt1av++3R1Zg7tKWSzOFJEMNKdimwwjJONTG8f/mffC6/suHtyyYMhtE6riqWxXa/ey7R84HLx9YtWBa4Ysn1YdP3Zs7cr8zRv9utRtf7uxkej4TknHlyXcAGD1rNpwMNr/JgxlV3cK+sdQ6w6Xm0bZLHR8uZMGTamvePLS2i1X1t9w/qB4KlvX1Jk1DLLEbvSU7n/EtrNFfCqech1CxXum+S+2hCH1819syW1CuX3Y2strV8+qDXXn1q5M7kw+JS4WiY+NdpjIQHAqtskwQnJdU2dodMknDsdn+i7URlZMrz5wzZDFY3v3Bs/mm/ju9olVW66sXx4bPbK1ue+z221t7g6jVkZW2S2j4ytnx1fIvJHpJy/trd0pCDDQxIcdvt1ahuYZHyG5cmd7vH8MPy80U+uU+opHL6n51Zy6+BmT+DyQJ9uNnqL9j9h2NraQpo5MWaaJ++fGEw/bLf5e80ambxrTe4Yyfg+NPiw2f+Sg+KGtXoozruzb5JJNvXeBK+LpA8Xa5n0XD47vDQotdvvEqitH9Bb04veuOSnxM5EXDXGVGzq+cnZ8xU3T4hioJtdVlNhnleiht0t6kQffLngGZEp9xQ/+KM/dFPvcjZZ9/yO2nUXio/D/bmdHP48g42c1ist7uiKv0aWdiS+0WPzmOa1dma//+j1/dM6ssm+TJd6Xs/TiWEPJozjikyCf1I5i1fE9QE0qGZ9wD3R8/e/4inOOnwFr2eQPDOjo/8wFYShKcc8eKLVtljg8pEg3Wt79z0eeG5V8wDcmVv3kt8eiU/W7Wruv3dBy05hB0ZQGW5u7H93bsf5g1/727i1X1pfyamGgSE0q2bJgSO4CI355JOreQkOqXXVk5rDU7OGpG8/vuefg0m1tP9rTe3Ykmkm5xMXyuvH8QfEhZJeuP3rr+MqoTr1mf+cv3+18+sCxaUNSZkPmtCnjNrm1uTu81OKxlbk3J12yqW3lzvbQQueNTN/4cuvGQ11XjUh/fngqvOmDb3eEi3Oik/H37Gj/q9+0X3ZOam5D+vaJVdF7/XBXx7MHukK3FM2Yd1IHuP/6bueqfZ2hPLhgVNrdFNHxlb3jK7S7WLa9LYyd1vQYgBaMSj+2t2cjX7mzvbG9+yufqIz6mof3dDx/sOufG4/9b5+sinql4sJkXYlE4qEZg3Nv0j33hZaoOUSTBi0aUzn12ea6dHLWsPQ156bDm977Vm8tLhoeUmI3ehr2Px95yUzGeabszfru7cWqtFPqK6Kt58aXW0NbyrV6Vu3//Epr1Dld0ZBee3lt7jLXvdQSzrLfOalq2eTq5BOHi7feJy+tjW47U8pihcTfNy83seE0K32bLLLxX9GQvmhIKqSy1bNqc3PU1ubuqc82R4/H1VS8dXV98YZck0o+PrNm3sj0PTva79hc7Exn1IQL/Xbqs80nPIkY9i2g4ytvx1f8Y0eWT6su5dgXTrPxTzeH/FNk0y3eSd02oer1I11RKmuoTB64Zkjx/UDUoIr3XFEfesL2FbrR/u9/cG4p27LJ1bdNqMqawLQPHtrTHgaKFLpYJT6tVpj7rsghafEwVvpiT15amzuXK5xBZdkm97d3hxGShWpf8TGZhS68jnc2915UXUoN7bYJxTJbKRaMSuuW0PGduo6veEu/bUKVzMbA9NRltf0vBTe29w5FKVT7io/JLDRpUDyzPXVZbSmN64TdaLn2P2eD1F133WUtZJl/bvqLowd1dCfaujMHjvdANankhXUVC0cP+sbEqgtq328/m490PVe4y/l4dcWbf7hPYk0q+Q8XDx5RlWdzvHho6uE9x35/LLrBVOLr46uGDEp2dL//OLxvQ2Vy5rD0f5tQ+Q8X9Q73KnGxIv7045Wzzkn/vjPT0Z2IPkD0ItOGpL50XuUdF1Tl/cBw6pS4TYYmk2vqkNS/H+6Ofrlg1KA/jh0dxv32ve7Qcj/9sdTisVVRY88kEq1dvelu4ehBD3665urjw+7f/2zHEplEYlBFMnyAcTUVnx+e+r+nD/6zCSe4IG31/s7f5NxYvKEyOa7m/Tf6v6ZUf3tStW0AHd8p6vgKfewp9RVzR6S/O6X6hE0YzpQRVcnQTI52ZkI/Na6m4uKhqZvGDPrfP/l+93GkM/F4Y2eB7jFx/uCKbcf7oP824f3eNu9iGw71dFWtXYm7JldfUJv6fWcmnUy0dCWiV65JJT/9sfc75dWzakMDn1qfKqUb7f/+B4MkAQAABjThFQAAQGwDAABAbAMAABDbAAAAENsAAAAQ2wAAAMQ2AAAAxDYAAACxDQAAALENAACA/NJWAQBwxt11111ZDwBtkyCZyWSsBQDgDB+RJJPRA0cmoG2SS7XtzHMOAzRDAIBi+VluPvN/A+cwQDMEzVAzBG2TwkxJAgAAILYBAAAgtgEAAIhtAAAAiG0AAACIbQAAAGIbAAAAYhsAAIDYBgAAgNgGAACA2AYAACC2AQAAILYBAACIbQAAAIhtAAAAiG0AAABiGwAAAGIbAACA2AYAAIDYBgAAgNgGAAAgtgEAACC2AQAAiG1WAQAAgNgGAACA2AYAAPBRlLYKBo5kMmklgGYImqGVAAPNokWLrIQzS7UNAAAo5uabb7YSxLaz3dKlS+fMmWM9AAAAeSUzmYy1AMCHvj87PrJOvwbAR49qGwAAgNgGAACA2AYAACC2AQAAILYBAAAgtgEAAIhtAAAAiG0AAABiGwAAAGIbAAAAYhsAAIDYBgAAgNgGAAAgtgEAACC2AQAAILYBAACIbQAAAIhtAAAAYhsAAABiGwAAAGIbAACA2AYAAIDYBgAAILZZBQAAAGIbAAAAYhsAAIDYBgAAwGmWtgoAyiWZTFoJ/gpnrUwmYyUAnCKqbQAAAAOaahtAmak5cLZR4QQ41VTbAAAAxDYAAADENgAAALENAAAAsQ0AAACxDQAAQGwDAABAbAMAABDbAAAAENsAAAAQ2wAAAMQ2AAAAxDYAAICzSNoqABggbny59bG9x8I/Zw5L/eOna6bU5z+/dt1LLav2dYZ/3nD+oEcvqcldbOm2tru3t4d/3jmpatnk6kIfYMmmtpU731+4JpV8fGbNvJHZfcSa/Z3ff6t9Z0v31ubu6CcNlckJtRWzhqVXTK/OXfj6ja2tXZmsnzdUJqcNSV03Kn37xKr4z+e+0LKu6f1vdEVDeu3ltQ/v6bj51fdKWW//w4j0/3egs9AXjH+MxWMr77t4cNbTw/sW+taJROLhPR23vt4WvchDMwYvGlN5suvk0vVHNx7qOuF3mTks9dLsuvB2NankvRdVh7cb/3Tzrtaed4nWUqFNKO83jbaH9Qe7drV2h9cZV1MxbUjF3IbsPwcAA4pqG8BAsfnIBw7rNx7qWra9Le+S9+xoj2e23OcGP9pzLP7P1fs7i3yAxvaeQ/nWrsyW5q7coDj/xfezYsgniUSiqSOz8VDXyp3tI3555OE9HfHltzR35Wa26Cnrmjrv2Nz21dc+kMr2H3/36MHBjkyJ621cbW9fdu9bHVm//e4b7eFjfCpfBr7g+NNbuzI/f+dY3rf4+TvHwot89mPpPqyTo50lfZ1osYMdmejtWrsy8fUQslYikVjX1Jm1AuObwZGct3t4T8f4p5vv3t6+rqkz/jq7WrtX7Xv/z3Hp+qPaIIDYBsBJe/ZA/pT15L7OUp6+Zv8HDtCjKBgPGKWb+mzzqqJv2tSRufX1tqzkVtz9uzvu2dFe6LfDK5Mlvs7nzkld0ZAOH2PJpt6su7W5O6qkJRKJKfUVeQtKXzpvUHj86wLpd/ORnpU2c1gq1D9Pap3UpUv6OiUuFvnnxmMlLrlkU9vNr76XtTFk2Xioq8ifAwCxDYAPqEklw3F/bhCKR5GGotkm1I5mDkuFH/5wV8fJfp4bX24NYa8mlbxtQtWWK+szC4duubL+tglV4TO0dmX+5o38x/3Lp1VnFg7NLBy6elZtiFiJROLRvQWDx6IxldFTov9uOH9Q7qtF/y0aU/nNC3vz2BP7el8zXq68ZWxl3jeaNzI9rqaiSKyNp99Zw9J9Wycvza6Lf+bw4lPqK+I/f2l2XSl/kSg6NnVkSglaa/Z33re7I/7cOyf1fNrMwqF3TqqK/0UAENsAKMm4mmQIEg+9nR1sQu4aV1MxsqpYbHvmeLFu/sh0qBG9eKjzpD7M1ubuUFOKLrVaMb06erUp9RUrplc/9/m6kDO3NncXL7jNG5lee3ltSDUlDh08oXkj0yF77GrtjsJM/JMXKrVFrhqRLhJr4yMnvzausuzrpA+uHtETYh/YfeJXvn3ze2GE5xUN6S1X1i+bXB22h2WTq9deXvvQjMFhkwNAbAOgJCFI5M5j8fSBY7lhI1eoEdWkkssmV3/unN4XPKlxkj/c1REO+q8ckYrPxhES0Z9+vLcU9q/vnjgWFk+bfRMvuEVFvPgnL1Rqi8THSb6eM07y//9dzzcKIyRPxTo5KSumV0fRd2tzd/GC25r9vdfdTamvyJ3FJLJoTOVbV9eblQRAbAPgJIQg0dqVWbqtLe9ReDxs5Pr+W+0hbGQt/L2TuYQpHmMKvePnh/cOwiw0OUqwtbl7V2tP5jmpS7mKixfcNh7qWrO/M4yWLF5qyx0nmfVpwwoPIyTLvk764H8cPSieUQuJlwqLbzAAiG0AnHQICUFi/cGu3KPwcTUVeaeqDzb8rudZFw1JZb3gMwdOovgTJniMajJ5lyn081wP7+n4X/69964AIQiVRbzgdue2tnBBWvFSWySULrNycnzMZDRCsuzrpA8e3tPxjYlV0TjMKKMWWvLNlt6PWuTeDwCIbQD0RQgS65p6K2xh8sCFowYVP6xvOj5xfAgbYWKSXa3da/Z3ns7vcsfmtuQTh5NPHL751fdCOeuKhjx3e+tn1o0X3KIHJyy1ReKVqHhODpcCxueQPOMOdmSm1FcsGNXzZb/7RnspqTtYs78z+nPE/6tddeQ0bxUAiG0AH3rxIBHVfPKGsbzic0iGsPGFc9O5C5wRDZXJxWMrC11n1R/xgluklFJbVjUyPqYxxL/yFgbLYumk6qjgtq6p86QSV+5N+QrdrA8AsQ2AUoNEVPMJs0qesPITRkjGw8aiMZVhCseTGidZdk0dmdpU8hSttPh09iWW2iKhvBnuuxAfLVk8J58R8YLbg2+//4En1JbUrU+tT2lfAGIbAOURn09yzf7OEis/8aLcyp3t8YFw4ed9GydZaCL7+GSGI6vy9CzRndaWT6sOQXTlzvZTdHPn60b1rpxpQ04in8TLm1E1MoyWLJKT+7xOyiIU3Fbt69za3F2TE4bjayCk0Hkj0+FOccunueANQGwDoB/iQSLcfasmlSxe+cm91VteUX2mkOHH63Lx4/7nD+YfRLe2qTcBXlQ4Kd0+sequyb21r1LuOXY6xcubUbmyUE4u4zrpp1Bwa+3K5L2R+uS63o5+tUvXAMQ2AE5FkAjziIRZSU44QjL3Vm95PVt0nGSYCDF+RdxPfnss955vD+/pePZAzzueMFIuGlMZBjGe8J5jp198nOTSbW1hxsusL1XeddJPoeD2T7/NE9uWTa4OI2M3HupasqlNswIQ2wAos9zxkLOHFyvdxMPGQzMGh7Fw4b+QmqIruNbs7xz/dHN0O7i8QW7RmMqQElu7MnOeP7p0W1sUVLY2dy/Z1Hbr673vuGBU+oTTLcYHMT65b2DVf+Llzb99s6NQTi77OumPUHBr6sjkvUHcreN7Q+PKne2Xrj8aRktube5+6ZBpSAA+BNJWAcBA9rVxlSt39pakalLJ4nffCpdjNVQm8946bPbw1Lrj4/d+/s6xuQ3pXa3d819siS8TSnyRv7iwKuSQpo7M3dvb797enjc/PHpJzQm/0e0Tq/5uZ0d0U7VoCsTiN6A7naJxktFnK35zufKuk35aOql61b7O1q5Mbt0vKri9erhr1fGEvPFQ18ZDXXk/LQADlmobwIA2pb4iHqKyAlWuMEKy0GwcyyZXh4krNvyuK++kgjee/4Gbwi0aU3nvRb1j7fJaMCr9s8+WOqF//KZzxS+xO/3COMl4cs5drOzrpJ8byYJRxaLvk5fW3jahquZEs3cOr0xqcQBiGwDF1KV7DpqzpnGfPzIdHXDXpJI3f+IDgSosGT334T0doUZ0XeHj+CtH9ES1po7MvJHp+ASPM4ellk+rzp00f9GYygPXDLltQtXMYal4VokCw+pZtU9eWps1FHB4ZTJ87Kw88LVxvbciiEpb8ekW8867GKbWyH213OwR8kl8Qo4Sfem8QfF4U2SIYx/WSRBWeN4vW2jVhR/OH5l9s/VHL6kJ71WTSuZ+8RXTq381py73046rqZg5LLV4bOXqWbV5y7MADATJTCZjLQCUZ5eafP9o2H4VWz4A5aXaBgAAILYBAAAgtgEAAIhtAAAAiG0AAACIbQAAAGIbAAAAYhsAAIDYBgAAgNgGAACA2AYAACC2AQAAUF5pqwCgvJLJpJUAAJSRahsAAMCAptoGUDaZTMZKOLNCqdPfAoCPEtU2AAAAsQ0AAACxDQAAQGwDAABAbAMAAEBsAwAAENsAAAAQ2wAAAMQ2AAAAxDYAAADENgAAALENAAAAsQ0AAEBsAwAAQGwDAABAbAMAABDbAAAAENsAAADENgAAAMQ2AAAAxDYAAACxDQAAALENAABAbLMKAAAAxDYAAADENgAAALENAAAAsQ0AAIBeaasAoLySyaSV4K9w1spkMlYCQNmptgEAAAxoqm0Ap4SaA2cbFU6AU0e1DQAAQGwDAABAbAMAABDbAAAAENsAAAAQ2wAAAMQ2AAAAxDYAAACxDQAAALENAAAAsQ0AAEBsAwAAQGwDAAA4i6StAoCz3JdfXv7Tvc/n/VVdurr52kcSicTjjRsWvbryaGdbXbr64Rm3XT/6stznzhg64XvTvzJ7+NT4Kzyw+5nFr/0gkUjMHj71uc9/J/ctlr/5xJ9veSj882+n3nzHBQtzF1uw4TtPvftKIpG49tzPrLrsWyf7Hdcf3PLjPc+9+vsdrx7eGX2vGUMn3jTmilvGXhWWOW/NVxrbDiUSiTsn3bBs8pdP+ILXbvjO0c626J9/cv7nH7nkjkILL932yKp9L/+m5Z1o+QtqR1/ysYn/dfz83HX1oz3r3mh5J/oYo6uHzRg64YujL4t/SADENgDORr85+k6hX4VYcqjjaPT4aGfboY6jeZ/76uGd3/6PR7Ky2YZDvzn+Uu/lfYv/Z9ea+D//Zd+v8sa2KMnEH5TuG5sfvH/30+G7RN9i/cEt6w9u+UXjhhACwyu/U8Jb/P1bq+MvuOrdl/MutqNl3/wX736zpTH+wzdbGt9saVz17stRJM7KpfGv/FTbK0+9+8qwyrqQkwE4OxkkCUBBdenqk1o+ykIntXxWpFl/cMuOln1l/Apffe3eFTuejEesuKfefeXxxg19eNnnPvg1j3a2LX/zidzFbtt0f9YXzI3EUTkuK7PFxXMyAGcn1TYAeswYOuGVK5b3+bnR+MMf73kua+xfEb9ofCl6MHv41JD3frRn7QnHKJbogd3P3L/76fDPxWOv/i9j5swePnVHy74f7Vl7z45/ySoelujxxg1hHGNtqjoKZmubNmXVCdcf3BLC2AW1o/966k1R0Wz5m0+sbdoUra7Ifcc/ZF26+q5JfxK9zuONG36294VV7758Yd1oGyeA2AYA/bVg1CVRDvnp3n/75oXXT6wdVcqzVu3rGVv4n0d99mjne9ErrGvaUq5Pde9bvwyPl0y87nvTvhI9nlg7atnkL980Zu78F+8eVll3si/7s70vRA/mDJ9alx4cxbbncsqMbxztrbP9r+PmhYGOd1ywMCvghfGZC869JPzq+tGXGRsJQMQgSQDK4BODR1x77meisX/ffePxUp4SRkjWpavvuGDh7Iap4edlGSe5/uCWUNG6oHZ0yGzBxNpRb1x1bx+iUUhof3z+5decOyN6XGicZGRt06aTemUAENsAKL+vHJ/w8Kd7/62U5f/+rdXRgzl/GFT5xdGXhl99/61/7f/nCSMwo2Jgub7m8jefCCMko4LY6Oph0a/+Zd+v4kvGBzc+9e4rc57/VqEL/8IrNLYduvCZWx/Y/YzNCQCxDYAy29z89vWjL5sxdEJUd1q67ZETPiVUlj7zsYnR5W0X1PbknDB4sj/2xSaEnFb/iXJ901A3i75siJ25dcLZw6dGFcjw2znPf2vBhu/kprKvjr06PH6zpXHxaz/4zLo7SlmHAIhtAJxdXj28M/nEF+P/zXm+1DukRfMi3jr+muif98UmAskrzOqRSCRuGjM3enBFw7QQXdb3e7hgkRsb9EdIm3MbpkcPrhpxUfjtj/asjS+8cvrikEUjT737yuLXfjDn+W/FA96yyV+OB7zob3H39scufObWvk10CYDYBsDZotDN1gq5ZexVUQ2qse1Q8WJRmNVj9vCpYf6ScJ1Y1hDHgWP5m09EATW6Hi9863CnhKz5VKLL55ZMvC7rVgrrD26Z/+Ld8eS26rJv3X/x18NoyRBfF726UnIDQGwDoKC69OCTfUoouEUDHSfXnZ93sVCzuqKh924B8evE+j9Osg8f/oTCCMk5H7zJQaFxkpHvTftK87WP3Dnphngqe7OlMWvullvGXvXOvAfvv/jrYfhlVMb8P7f/zKYIcJZzAwAAevTnvm3x7HHvW7989fDOVw/vfGD3Mx8bVJu7THyE5N3bH7t7+2O5y0TjJEu/BVyuT9adF0Zavv3egbKsopA2n3r3leQTX8y7TKH7zi2b/OVlk7/8jc0Prtjx5PEXeTnvCrxl7FUP7H5myeYHosreq4d39nNVAPBhp9oGQJmFgtsvCozue7C0mRJ/vOe5/nyMeKGvLHOcLN32SJSjiiv+Xt+b9pVwtVtjbNKU3PC24Nze2S/jt4ADQGwDgP4KV7g99e4rectcJd6dLG8xqnR3XLAwXFH26uGd/Z+YscT7gEfFsVKWzLqSrYj4vQQAOAsZJAlA+d06/prFr/0gb+kpzOqRSCT+35l/mXu36wUbvvPUu69ExajHGzf04XbYwZ+c/5/uPz6n5d3bH3un7dB/GTMnGm34eOOGfzu49dG9z/8fk//0luN3nCtiR8u+EMaWTLwu9+bdS7c9EkZ7/qLxpdnDp371tXvXNW1eMOqS/zR8SvQt1h/c8uM9z0U3GQ+3EHhg9zPf3vaTOcOnXjXiouiT7GjZ96M9a1cdT62jq4cZIQkgtgFAmcWvcMv6VZjVI7pdde5z5zZMj2JbNOFk1jLRXQriPxldPeydeQ/m/Rj3XXzruqbNISbdv/vp+090Z4K8i1177meim8tF4ncGD24aMzfEtlX7Xo5y3ZstjSt2PBkuZourS1f/xYU9X6Sx7dBP9z7/073PR1k3yx0TF9qiAM5yBkkCcEqEK9yyhBGS8fkSP5BSYoMbSxlOWeQKsUQisXrWnYXeqHSNbYfCCMlCta+JtaPCz09437m6dPXDM24rpYZ256Qbwp0GADhrqbYBnO1GVw9LHE4Uv9RqWGVdXbr6aGdbXbp6WGVdLH70TLJ/2bBPZj3llrFXbTj0m1C2mvGHatXjjRvCCMkvFh79uODcS3669/l4JPtk3Xm5hbvez1/YxNpRr1yx/IHdz/yiccOrh3eGF7ygdvSkuvPmNkwPIyQvqB0d6nK5bxEC5LWxmUKy/OdRnw1p7Y2jjd+88Pq6dPX6pi2N7YfC+84YOmF2w9Q/G/+FcLe6W8Ze9fZ7B9Y1bXmn7XfhA1xQO/qSj038r+PnGx4JQCKRSGYyGWsBoJw71mQykUjYu2LLB6BcDJIEAAAQ2wAAABDbAAAAxDYAAADENgAAAMQ2AAAAsQ0AAACxDQAAQGwDAABAbAMAAEBsAwAAENsAAAAor7RVAHAqJJNJKwEAKAvVNgAAgAFNtQ2gzDKZjJUAAJSRahsAAIDYBgAAgNgGAAAgtgEAACC2AQAAILYBAACIbQAAAIhtAAAAYhsAAABiGwAAAGIbAACA2AYAAIDYBgAAILYBAAAgtgEAACC2AQAAiG0AAACIbQAAAGIbAAAAYhsAAABiGwAAgNgGAACA2AYAACC2WQUAAABiGwAAAGIbAACA2AYAAIDYBgAAgNgGAAAgtgEAACC2AQAAiG0AAACIbQAAAIhtAAAAHz7/PQAA//9A2/fKwHZoZAAAAABJRU5ErkJggg==)", "_____no_output_____" ], [ "*#Buildin the model*", "_____no_output_____" ] ], [ [ "RFC = RandomForestClassifier()", "_____no_output_____" ] ], [ [ "*#Fitting the model*", "_____no_output_____" ] ], [ [ "RFC.fit(xtrain,ytrain)", "_____no_output_____" ] ], [ [ "*#Predicting the test data result*", "_____no_output_____" ] ], [ [ "confusion_matrix(pred,ytest)", "_____no_output_____" ], [ "accuracy_score(pred, ytest)", "_____no_output_____" ] ], [ [ "*accuracy_score(pred, ytest)*", "_____no_output_____" ] ], [ [ "roc_auc_score(pred,ytest)", "_____no_output_____" ] ], [ [ "**## Decision Tree**", "_____no_output_____" ], [ "![dtree.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAskAAAHzCAYAAADSPIC7AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAAZiS0dEAP8A/wD/oL2nkwAAAAlwSFlzAAAASAAAAEgARslrPgAAgABJREFUeNrs3Xd4VGXax/Hv1Ex6L4RAQu8tdKmCgEoRRRR7xe7u2rGuu2LB1XeLDbuoqyyCCEqVKk16rwESSALpvU1//wjnMDOZhIDABHJ/rmuuZPqZk8nMb565n/vROJ1OJ0IIIYQQQgiV1tcbIIQQQgghREMjIVkIIYQQQggPEpKFEEIIIYTwICFZCCGEEEIIDxKShRBCCCGE8CAhWQghhBBCCA8SkoUQQgghhPAgIVkIIYQQQggPEpKFEEIIIYTwICFZCCGEEEIIDxKShRBCCCGE8CAhWQghhBBCCA8SkoUQQgghhPAgIVkIIYQQQggPEpKFEEIIIYTwICFZCCGEEEIIDxKShRBCCCGE8CAhWQghhBBCCA8SkoUQQgghhPAgIVkIIYQQQggPEpKFEEIIIYTwoPf1BgghxMXidDrdjjscDiorK6moqKC4uBiHw4HZbMZut2Oz2bDb7fj7++Pv74/JZMJkMqm/63Q6NBqN2+15HhdCCHHpkpAshLiseAZhp9NJeXk5RUVFlJaWYjabcTgcAFitVrKzs8nKyuLAgQNYrVYKCgqwWCyUlpZitVqJjY0lJiaG6OhooqOjiYmJISYmBj8/PzUUa7VajEYj4eHhhIeHYzQa0Wrdv6iTAC2EEJcWCclCiEuaZyi2WCxUVlZSWVmJw+HAZrOxY8cOfv31VzZt2sSxY8eorKys9fqep3kLtxqNRr2MRqPBYDAQHx/P6NGjGTNmDE2bNsVoNAJgNBoJDAzE39+/xm0IIYRouDROb+8QQgjRgHm+bNntdux2Ow6Hg/379/PDDz8wd+5c8vPzcTqdbudrNBr0ej16vR6tVovBYECr1aLX69Xgqvx0Op3qweFwqLflcDhwOBxqWYbNZsPhcKDX69HpdGophkajoUePHtx3332MHTsWQL2Ma0iWwCyEEA2PhGQhxCXBWxmFctqMGTP48ssvOX78ODabjcrKSqqqqtTAajAY1GBsNBpJTEykU6dOJCQk0LFjR4KCgmjXrp06+lvXfaekpJCXl0d2djbHjh3j0KFDHDhwgIKCAmw2G1arVQ3NNpsNnU5HYGCgWtP88MMPc//99+Pv768GaU8SmoUQwvckJAshGjzXlymn00lBQQHLly/nu+++Iycnh5ycHHJzc3E4HBiNRgwGg1ojPGDAAAYNGkS7du0wmUxoNBr8/PwwmUwYjUb8/f3R6XTqeWdSVVWlhmGltKOqqkodYc7JyWH9+vVs2rSJQ4cOUVBQgNVqxWq14nA41JpmvV7PsGHDuPnmm+nSpYvbfUhIFkII35OQLIRosJSXJ+XnokWLmDt3Lnl5eWRkZHDkyBFsNht6vV6tC+7atSu9evWiXbt2+Pn5ERsbS2xsLKGhoeh0ugu+zVVVVWRnZ5Obm0txcTEnTpxg586dbN68mQMHDqgh2263Ex8fT6tWrWjatCn9+/dn8uTJAFKKIYQQDYCEZCFEg6WUVHz//ffs37+fTZs2sXnzZiwWi1o+ERUVRc+ePenatStJSUkkJibSokUL4uLifL35AJSWlpKens7Ro0dJS0tjz549bNy4kYyMDHUEWqfT0aZNG0aOHEl8fDyTJ0/GZDIBEpKFEMJXJCQLIRocp9OJ1Wpl9uzZFBUV8fXXX7N//35sNhsGg4HQ0FASExPp1q0bLVq0oGfPnnTu3JmwsLCLMlp8ro/JbDZz4MABtRRj//79HD58mOzsbMxmM1qtlvj4eB5++GEiIyMZMmQI8fHxGAwGQAKzEEJcTBKShRANhtPppLCwkK1bt1JcXMxzzz1HdnY2DodDDcdJSUl07dqV5ORkRowYQUJCwiUZHvPz89m0aRPr169n+/btHDx4kOzsbCwWCxqNhoiICB544AF69OhBt27daN68uXrdS/HxCiHEpUZCshDCp5SXoPLycvLy8li3bh1TpkwhLy8Pp9OJXq8nICCAJk2a0LlzZ0aOHMm1115LdHS0rzf9vKiqqmLbtm3MnTuXtWvXcvToUcrKyrDZbACEh4fz8MMPc+uttxIVFUVYWBggQVkIIS40CclCCJ9wnZRXVVXF6tWr+eSTT1i4cKFbOA4JCSEyMpLbb7+dG264gWbNmvl60y+I8vJytmzZwt/+9jfS0tIoLi6mqqoKq9UKQHJyMg8++CA33ngjgYGBNXo6CyGEOL8kJAshLjrXgOxwOHj++eeZMWMGRUVFaLVaTCYT/v7+BAQE8NRTTzFhwgRiY2N9vdkXhcViYfXq1bz++uukpKRQVlaG2WzGbrej1Wrp3r07K1euxM/PT5a+FkKIC0hCshDigvO2EIjNZuPrr7/mk08+4cCBA1gsFgwGAyaTiZiYGCZNmsSDDz5ISEgIRqOxRiC8XCn7pry8nFWrVvHxxx+zbds2ysvLsVqtaLVamjVrxn333cf9999PeHi4el0ZXRZCiPNHQrIQ4oLyXAgEoKioiL/85S/8/vvvZGZm4nA4MJlMJCYmMnr0aLX+NiYmplEHvpKSEnJycli3bh1z5szht99+w2w243A4iI2NpUmTJnz22Wd06NBBXb1PgrIQQpwfEpKFEBeM52IgZrOZgwcP8sYbb7BixQrKysoICAggKSmJkSNHMmzYMFq1akWrVq18vekNSl5eHqmpqWzevJl58+axbds2iouLcTqdXHnllfzlL39h4MCBNWqVJSgLIcS50/t6A4QQlydlIRDl97y8PNavX8/MmTP5+eefcTqdBAcH06NHD8aMGcOoUaNo3bp1g+1z7EtRUVFERUXRsmVL4uPjmT9/PkuXLiUvL4+VK1cCkJmZyfDhw2nevHmNcCxhWQghzp7u1VdffdXXGyGEuHy4BmPlkJqayoIFC/j2229ZuHAhOp2O2NhYBg4cyI033siYMWNo0aJFo6k7PlcBAQE0b96c2NhYtFotxcXFlJWVceTIETIzM9FoNERFRREREaFeR8ovhBDi3MhIshDigjpw4ACzZ89mzpw5HDx4EH9/f5KSkujbty8TJ06kX79+hISE+HozLxkBAQH079+fpk2bEhERweLFizl06BC7du2itLSUoqIibrzxRrp06eI2Ku90OiUoCyHEWZCRZCHEBeF0OsnIyGDatGl8/fXXnDhxAn9/f9q2bcv48eO55ZZb6Nu3LwEBAb7e1EuOVqslLCyMtm3b4u/vT1lZGYWFhWRmZrJnzx4yMzPp1q0b4eHh0iZOCCHOkUzcE0KcN0p5hcPhoKSkhAceeIBff/2ViooKgoKCaN++PRMnTmTChAk0bdoUvV6+zPqjioqKWLRoEbNmzeL3338nPz8fnU5Hp06dmDNnDvHx8eqIskzmE0KI+pOQLIQ4LzwD8i233ML69eupqqoiPDyc7t27M378eCZOnEhkZKTUH58nTqeTrKwsNm/ezLJly1i5ciX79+9Ho9EQGBjItm3baNasGRqNBq1WK0FZCCHqScothBB/iGcf5Pz8fO644w42bNhAVVUVsbGxjBo1ijvvvJORI0cSFRUlAfk80mg0BAQEEB8fT2JiImFhYZSVlZGTk0NVVRVHjhwhOTmZiIgIt3AsQVkIIeomIVkIcc48O1lkZGTw+OOPs2bNGioqKoiPj2f06NHceOON9OvXj8jISGnxdgFotVr8/PwICwsjKioKg8FAaWkpGRkZnDx5koqKChITE4mKipL2cEIIUU8SkoUQf4hrQP7666+ZOXMmZWVlhIeHc+211zJhwgR69epFeHi4BOQLSKPRoNfrCQwMJDIyEo1Gw6FDh8jPz+fEiRNUVlYSExNDXFxcjbZwEpSFEKImmTUjhDhrniPIxcXFbN68mZkzZ1JaWkpQUBD9+vVjzJgx9OjRg4iICCmxuAiU0ovWrVtzzTXXcOzYMWbOnElWVhbz588nIiKCiIgIEhMT1b+HRqOR9nBCCOGFvGsJIc6K6yIhTqcTu93O3r17mTlzJgcPHsRgMNC6dWvGjx9P7969JSBfZBqNBpPJRPv27bntttvo3r07gYGBZGRk8Msvv7Bo0SLy8/Pd/oZCCCFqkpFkIcQ5UTpZpKSk8MMPPzBv3jx0Oh0JCQmMHj2aYcOGERsbK23efMRgMNC/f38mTZpEVVUVu3fvZseOHRgMBqKiopgwYYLa7UIIIURN8u4lhKg319FHh8PByZMnee+99/j8888BiIiIYNiwYdx///3Ex8fLCLKPaTQa7rnnHsrKyigrK+PAgQNs3ryZ6dOn06dPHxISEgDcSi+EEEJUk3cwIUS9eAbk8vJy3nrrLWbNmoXD4cDf35/OnTvz6KOPEhcXJwG5gdDr9dx+++1cddVVREVF4XA42L17Ny+++CLl5eU4HA4pvRBCCC/kXUwIUW9KQLbb7cyePZsNGzZQUlKCRqOhRYsW3HbbbbRv315KLBqYqKgo7rzzToYPH47BYKC4uJh169Yxd+5cbDYbdrtdwrIQQniQkCyEqJO3iXoWi4U5c+Zw4MABHA4HCQkJXHPNNUyYMAGDweDrTRYeNBoNHTp0YMKECQwZMgSArKwspk6disViUUOyEpSFEEJISBZC1INrSLZYLDz77LNs3boVs9lMaGgo11xzDY8++ijBwcG+3lRRC6PRyNChQ7n99tvp3LkzdrudjIwM7rvvPrXbhRBCiNMkJAshauXZD9lqtXLgwAFWrlxJYWEhGo2Gq666ijvuuINmzZr5enPFGYSGhnLVVVdxzz33EBwcjN1uZ9myZcyfP5+8vLwa3xoIIURjJiFZCOGVZ0B2OBxUVFTw/vvvc+LECex2O82aNWPo0KF0797d15sr6qlJkyYMHTqUgQMHAlBaWsqMGTPIzMzEbrdLUBZCiFMkJAsh6qW4uJgffviBOXPmUF5ejlarZfDgwfTu3ZuAgABfb56oJ41GQ/Pmzbn55psJCAjA6XSydetWli1bRmZmJg6Hw9ebKIQQDYKEZCFErZQRZIfDQU5ODq+99hqVlZVoNBpiYmIYMWIEnTp18vVmirMUHh7OkCFDSE5ORqvV4nA4mDFjBrt27XKbwCcjyUKIxkxCshCiBs+v3EtLS0lJSSE3Nxen04lOp2Ps2LEkJycTGBjo680VZ0mj0RAZGcmf//xn/P39ATh69CiHDh2ioKBASi6EEAIJyUIILzwXDtm+fTtPPvkkdrsdrVZLREQEd9xxB+3bt/f1popzFBQUxJgxY+jRowdGoxG73c6MGTNYtGhRjdpkIYRojCQkCyHcePZEtlqtFBcXk5mZidPpxGg0ctttt5GYmIhOp/P15oo/wGAw8NRTTxEZGYlGo+HIkSMcO3YMi8UiZRdCiEZPQrIQwo3rCLLD4eC3337j/fffx2q1otPpaNmyJXfddRdxcXG+3lTxB2m1WkaMGMGwYcMIDQ3FarUyb948vv322xor8QkhRGMjIVkIofIss3A4HBw7doxt27YBEBgYyOTJk0lMTJSV9S4TgYGB3HvvvbRp0waDwcDhw4fZu3ev2uVCRpOFEI2VhGQhhMozJK9evZqFCxdSVlaG0WikS5cujB07VibrXWaSk5MZNmwYcXFxVFZWsmnTJmbPnu21b7KEZSFEY6H39QYIIRoWJSDb7Xa2bt3K77//jkajISoqiokTJ9K8eXP0et+9dBzbsYrHX3iD0kprzTO1fvTo05vefQZx9YhBhAf5+3BPXjpCQkK4+uqr2b17N3l5eRw6dIilS5dyww03qC3iNBoNGo3G15sqhBAXjYRkIQTg3hPZ4XCQlZVFamoqBQUF+Pv7065dO8aNG+fTgAywdPb3/Lzo11rPX7ViCQAJHfrw3iefMX5gF59uLwBOO3M/fYePf1yJRuPHpEemcNfY/r7eKpVGo6Fbt2706dOH/fv3k5aWxrFjxzh27BgtW7Z0C8kSlIUQjYWUWwghgJoT9jZs2MD+/ftxOp1ERkYyZMgQkpKSfB6S7E73FeFqC28Z+zdxyw3Xs+FQrk+3FwCHmdmffsmSJUtYvHg+/527ytdbVENwcDB9+/alXbt2aDQacnJyWL58udvkPSm1EEI0JhKShRA1ak4dDgdr1qxh37596HQ6kpKSGDt2rM8DsietLp4D+afaldmtpOzaxM1Xnx6hrco9wrvvf+vrzbwkKKPJXbt2JSgoiNzcXFatWuW1LlkIIRoDKbcQopFTQo9rQDabzRQUFFBWVkZkZCRdunRp+MtPa3S07tKbTz+dzoZu/TleUAHAhh2/Y6Pmi92J1P2sXL2GTes3U2yxEhARR9+eyQy+8ipaxEfUeVcFJ1P5dfkqtvy+idyySnRB4Qzo3Yv+g6+ifYtYXD9KpO3bzv7UY5woLldPy8s4xKJFiwCITGhF7y5t8fXHD61WS1hYGF26dKF9+/Zs2rSJffv2UVlZiV6vR6PRoNVq3YJyQ/vQJIQQ55OEZCGEyrU3cnp6Ok6nk6SkJK644gqMRqOvN69egmMSaBEUpIZku82O69inpSyXV5/+E+9/8xOlFVVu1/0IDYHBsdw/5e+8+ex9+Ovdv2yzVRXz79de5K0PviLPJfQCfAEYTeHc8thz/PNvTxAeYARnFa8+dCsz1hxwu+z2X7/i2l+/AiAx+Tp2b/iRYKPvv9jT6XR06NCBrl27smnTJgoLC5kxYwaTJ09WJ/Bptb7fTiGEuBjk1U6IRszbKLLD4eCHH37g8OHD6HQ6OnbsyPDhw329qfVWnHWc1NIy9Xib5q3V0QCHuZjJN4/lrY9n1gjIp/YI5aVZ/PvlR3nwb5+4hWunrYKXH72TZ9/8sEZAVliqCpnx7gvc9pc3qLSd7jNcF4fd7utdplJKa7p3705sbCxFRUV89tln6gp8rs8RKb0QQlzuJCQL0ch5q0fOyMigtLSUhIQEOnbsSGRkpK8388yPw2EjM3U/z/zlL6QXVo8ia3UB3HTTeLWUYe7Hb/DNwo3V4VdjYMzdT7Jpxz6OpaWybP5/6dmqafUFHVb++/YLrNx/etLf2h8/5V8zfsbhdIJGx8Cxd7F6006OHUtjw4r5XNmtjbIhLP7iHWav2gcaPx5+6Q2mvTWV7gnR6m216XU106ZNY9q0abz59ycJbACjyFBdPmEymUhKSqJr1644HA6Ki4vVyXuy+p4QojGRcgshGjHPEWSHw8G2bdsoKCjAbrfTvHlzWrdu7fO2b7Vx2HMY1bsjRp0Gp8NOQV4O+UWlAGh0RiY8+gL3je1TfVlLMe+9/4U6Otx52K18+9E0Qk3Vj615YhLfhhnpO/xmSqwOHJZCvvnfzwx79V5wVvHR+x9TZa++dtPOw5n5zXSahpqqr9s8kf/NjqRz1yHkVNpw2sv55vs53HZVZ/pfPYH+IyrYOfsbdmRUh+6kLoN59tlnfb37vNLpdDRr1oxu3bqxdOlSrFYrW7Zs4YorrsBgMKjPGalHFkJc7hrG8IUQwqdcR5J//fVXCgsLAWjZsiVt27b19ebVwUbakRQOHTpEyuEjakBu1q43733zIzP+MYUAQ/XLXGnGHjam5KnXnHT3/WpAVrTtM5L+nZqpx3ds2ILDCeb8NNbsSlNPH3PL3WpAVkS37seYgR3V44e2bKPMUnepRUOk1WqJiYmhXbt2GAwGLBYLS5cupaysTMoshBCNioRkIRopb/XIdruddevWUVxcjNFoJCkpiebNm/t6U+ugI7ZJPE3jmxAU4KeeWlZShNYYjMmgU087cXAvp6uQ/ejcuUWNW9P6+dMxMU49np6Ths3pJP/4EYorzKdO1dOlc2sv26KlU9vTt5lfkk5plc3XO+isaTQaAgICiIuLIzo6GpvNxtq1aykvL6/RM1kCsxDiciYhWYhGyDMgKyHZZrORmppKVVUVkZGRNGnShKCgIF9vbq20ulh+23uMjIx0tq37lX7tEgAoPJnCkw/cycrdGeplLeUVLtc0ER5g8naL+IcGq8fMNjNOJ9iqqnCqE/AMhAcGet2egLDT17XZLdgcl2aA1Gq1hISE0L59e+x2O2lpaZjNZgnHQohGRUKyEI2ct7pkgMTERGJiYi6N2lONjjbdB/HFx+8Q7Fc9elxVcIyXX/8PtlNZTufWwq6CnBIvHSqcDkrzitSjMSGx6DSgPdUnuJqFnJISr5tRkluo/h7iH0WAsWHWcte5K0+tXhgaGkqXLtVLeivfMnh2tpCgLIS4nElIFqKR8hxFttvt2Gw2NQQ1b9780gnJp3QYOI4JA7qqxzcv+IZ1+7IBiG3VmtPFF1Z2bDtY4/rWikK2HjymHm/driM6rYbwps0x+Ssh286Obfu97FALG7fvU4/GJbUn1F+HNw2p7Zs3ykiyUo/udDqx2Wy1lltIWBZCXI4kJAvRyHiGY+VgsViYO3culZWVAMTGxhIWFubrzT07On8mP3wPxlO53lqexXufzwQgokV3OsedLh35/qvp5JdbXfcM6+b/l81HlLZvGsZeNwYNEBjbhl4tY9VLLpj5OUfzK9zu+sCan1i8JU09fvW4Mep2oNFgcGnzdnj/bswNcE6fMoqs0Wjw9/cnLi4Og8GAzWbj22+/JT09XVrBCSEaDQnJQjRySli2Wq0sXLgQi8UCQHR0NKGhob7evLPW5+ob6dX69OS7Rd99yoHcSnQB0dx/+3Xq6Uc3zGXkuFv4ZuZsfpk/j2kv/4WJD7yAEpsTuo3hzmu7VR/RB3Hf3ZPQnQq9eSnrGDVqHJ998z9++eVnPpj2MiOvv5+KU8ExomV/Hrp52OmN0hpp2u5014xjm39g6Kix3HLzjYy48VEqbA0rcGo0GvR6PQEBATRp0gSn08nevXspKSlxG00WQojL2aVXMCeEOK9cQ/KWLVuwWq0EBQURFhaGyWT643dwkemDmjD5rhtY/9KHAFRkH+CLmUt5+/HreODlfzBv1SaWbUkBnGxbMYc7V8ypcRuhMR35Zsb7hBhOl5pc9+Bz3L58DTN+2QDA4a3LmXzn8hrX9Q9pxkeffEyLqACXU3WMvmYMb3+5lOp+F05+X/YLvwOxna7CYnMQoPdemuELGo0GrVaLyWQiLi6OzMxM9u3bR2lpqbSBE0I0GjKSLEQj5a3soqSkBIfDQVhYGEFBQQ1yEZHYuHj8/aq3KyouHj99zZrpsbfdS5fmUaeOOdj4e3WwNYY0Ye6SFUx54DaiQmt2qND7hzDq+ntYtnYZQ7s19zgvgunfzeftKY8SHxVWc8MM/gy8agK/rF7JxOFdapzd97p7+evjt+NvdA/DLVp2xN/Q8F6KtVot/v7+al16WVkZVqtVfb5IPbIQ4nKnccornBCNimfLN6vVitlsprCwkI4dO2Kz2ejVqxfPP/88I0eObHAt4Ow2C3l5edjsTvwDQwgPC6ZmTHZSUlBAaWV1Z+Sg0AhCg/xdznaQm5XJ/j272LbrAHYgsV0XunbuRMtmTdDr6gitTieFedkcPLCHTZt2YgXiktrSo2sX2rRohqGOEWGH3UpqygFW//YbhaVVtOral0F9k4kMCaChUCZxVlVVcfToUT766CO++OILAL777jsGDBiAyWTCaDRiMBjQ6XRotdX761Ka5CmEEGfS8IaJhBAXRW2t3wDi4uIICQlRw09DotMbiY2LP8OlNIRERBJS69laops0I7pJMwaPGH12G6DREB4dR7/oOPoNuuqsrqrVGWjVvgut2nc5q+v5gkajwWQykZCQgEajcXueeOtuIQFZCHG5kZAsRCPmWXKhiIiIwN/fX4JPI2c0GomIiFCfB2VlZVgslkuyVl1c+nJzc/nxxx/JyckBIDw8nIkTJxIbG/sHb1nUl91u59dff2Xz5s3qaZ07d2b8+PGX5fuFhGQhGhFvI4BOpxO73a7WmwKEhoZKSG7klA4XSrmN0+kkNTWV7t27ExwcLDXJ4qL7/vvv+fOf/+x2msVi4cknn/T1pjUKGRkZvPjii3z33XfYbDb19OTkZMaMGYPBYPD1Jp53EpKFaKRcQ47NZuPkyZNq4AkODsZkMqHTNZyOC+LC8wy+Go2GgIAANBoNTqeTgoICKisrpVey8AnXb7vqOk2cXzabjVmzZjFlyhTS09N9vTkXlYRkIRohz5Fki8VCVlaWer7JZGqQnS3ExaPRaDAajW4TN6UmWYjGJTU1laeffpqffvqpUX4gkXdBIRo5pUdydna2OjIYGBiIn5+fBJ9GzHWU2GQyYbFYKCwsVEeSZRRZiMvb8ePHGThwICdOnPD1pvhMw5u6LoS4oLyNAlqtVreRZH9/fxlJbsRcnxtarVb9wGS327Hb7VKPLEQjcODAAbeArNPpGD9+vK8366KSd0EhGjHXkKzMGIfqkUODwSAjyY2Qt/Cr0+nQaDSUl5djsVhqDchSdiEuBbm5uaxdu5adO3eSlpaGxWIBqgcHWrduzeDBg+nXr1+NORkFBQVs3LgRq7V68fro6Gj69u1bZ6vMTZs2qQMQgYGBXHHFFfj7+9e43NGjR1m0aBE7duygvLwcqJ5A3aVLF0aPHk1iYmKdj2fz5s3YbDYCAwPVXuY5OTl8+OGHHDp0CH9/f6677jrGjh1b7/9R5XFptVquueYaXnvtNRITE/npp598/Se8aCQkC9GIuQYd1xd6ZbRQAk/j5PlNg7fzPS8nREO3c+dOXn31VZYuXUplZWWtz1udTkfv3r356KOP6N69u3r6yy+/zEcffaReT6/XM2/ePK699lqvt7Nv3z4GDx6M2WwGquv8//Of//DYY4+pl8nJyeG5557j+++/Vy/n6emnn+bee+/l9ddfJzQ0tMb5zz//PF988YX6mv3VV1/RoUMHbrzxRo4fP65ebvXq1QwfPpzAwEDqo0uXLjz22GOMHz+eK6+8Eq1WS0FBgQ//gheflFsI0cgpX6kHBwerp1VWVrq1hBONh7cArHxoiouLU99g5bkhLiXl5eXcdttt/PTTT1RUVNT5/LXb7fz+++9ce+21HDp0SD29VatWbtez2Wz897//rfV2fvjhB7fgq/wPKdLS0rjyyiv56quvag3IUP16/MEHHzB27FiKi4trnK98u6Pcx4EDB2oEZID8/Hyqqqrqvc9iY2N57733GD58eINcWOpiaJyPWgjh9mKvdDJQVFRUYLFYGuVsZnGa0h6wsrISQA3JdY0uC9EQFRYW1mhfFhAQQNu2benQoQNJSUk1guDJkyd566231OM333yz2+skwLJly9xK1VzNnz/f7Xh0dDQDBw4ETof2ffv2uV0mJCSE9u3b06ZNmxr3tWbNGl5++eUzPtZ//vOfNQIyVI98N9awe65kbwkh0Ol0xMTEqMdLS0upqqqSLgaNTG2LzCjfKmi1WinBEZekoKAgQkND0Wq19O7dm+nTp7N//362b9/Otm3b2LlzJ5988gl+fn5u11u8eLFas9y0aVOuusp9KfqcnByWLl1a4/5SU1PZsWOH22lDhgxRVwf8+uuv2bBhg9v5t912G7t27WLbtm1s376dVatW0aZNG7fLfP7552fsNuE6WhwQEEDr1q0JCgoiOjpaVss8SxKShWiEPL9S1+l0REZGqudbrVZsNpuE5EbGc5lyi8VCUVERDocDjUZDeHi4+iYrtcjiUhIaGso//vEPfvzxR3777TcefPBBmjdvTkBAACaTiZCQEO69916GDx/udr3i4mIyMzPV43fccUeND4qzZs2q8b8wf/78Gt/E3XbbbWg0Gqqqqvjkk0/crtOtWzc+/fRTEhMT8ff3JzAwkP79+/Phhx+6jf5WVFSwcOHCej3m/v37s27dOnbu3MmuXbuYPXu210mDonYSkoUQwOkOBlBdA2c2m93afYnGQwnJNptNHUVTQrK8yYpLkUaj4eabb+a6666rdTRVo9HQq1cvt9OsViuFhYXq8dGjRxMdHe12mTVr1riVcjidzhqlFnFxcQwaNAiorkXes2eP2/m33nqr1/+tXr160bRpU7fTtm7desbH27VrV+bNm0f37t0JCAigRYsWdOzY0Wf7/1Il3S2EEPj5+dG+fXs1JBcXF1NZWamGZHH58yy1cDgc2O12NSQDUm4hLnuuE5gVriPCwcHB3HDDDUyfPl09raioiIULF/LQQw8B1S3Z1qxZ43Ybw4YNU7+t2759Ozabze38gwcP8q9//avGfVut1hqT+rKzs+t8DEajkf/85z81wrw4exKShRBotVpMJpMagHJzcyktLcVut/t60xq8tYu+5Y13PqPCDldOfJSXH5mI9hLOkcqHIofDQVVVFTk5OWq5hRCXOqfTSU5ODitWrGDr1q0cOXLEbaTYc3KfN7fddhufffaZW9CdOXMmDz74IBqNhgULFqi9lBUTJ05Uf8/IyKhxm1988UW9H0N+fn6dLToNBgPNmzf33U6+jEhIFqKRc32hVX4vKCigrKzMrdxCQpI3Tjas+IVFK1YDkONI5IWHb0R7Ce4rbysxms1mcnNzfb1pQpwX+fn5vPLKK3zzzTeUlpae8+3069eP9u3bu5VMbNmyhUOHDtG2bVt++eUXt8vHxMQwZMgQ9XheXt4fehzR0dHyenyRSEgWohHzDMjK8fz8fHUkWcotGg/PzhaVlZVkZ2d77Wwhb9LiUlJYWMjo0aPZuHFjjfM8n8tnes3T6/XcfvvtTJkyRT2tvLycn3/+mcmTJ/Prr7+6Xf6aa64hLCxMPe7ZQQOqyzGUzhdnuu+nn37a17uz0ZCQLEQj5BqIleNarZaEhASOHTumllvYbDa1PtXzOuLy4q0euaKiQg3JMTEx+Pn5uT0P5PkgLhXvvvtujYDcv39/pkyZQvfu3QkKCiIiIoJ33nmHZ5555oy3N3HiRKZOnUpZWZl62nfffUdSUlKNUeqbb77Z7X8lIiKixu098MAD3Hzzzb7eTcKDdLcQohFTAo9Go0Gn03HllVdiNBopLi6muLiYqqoq6XDRCCjBWDnY7XZ1EZHs7Gw0Gg39+vUjLCzM6wcs+QAlGjK73c68efPcTmvdujVLlixh3LhxNG/e3GtwrUvLli3VbhWKvXv38vrrr7ud5trVQtGqVasat7d3715f7ybhhYRkIRox14Cj1+sZPHiwuspTXl4excXFaq/khhiSreYKdm35nRUrVpF2Mo/6bKLDZiHjyCFWrljBihUrOHA0nSqr7cxXdDrITk9l9YoVbNq+t37X8bjf4yn7WLFiBStWrONkfhENZY9662xhtVqpqKggJycHjUZDz549CQ0NdXvOeAZjCcqiIaqsrKSkpMTttDZt2tToZOF0Ot16Ip/JXXfd5fact1gsNRYQufbaawkKCnI7rUuXLgQEBLid9u233/6hOmlxYUi5hRCNjGfAcR1J7tSpEwaDAaiu4SspKXELyb6awFd6Yj/XjZvA3vQCek94lLnvPcuS7z7hpb+/y4G0DMw2B4EhIQwefScf/vt1kqJrtnGylBfy1fR/8/7HX5GamUNZRfVSy/6BQUTENOeuyY/zl8fvJjqoZg/VzINbePKJp1i8ZiulZeXojCZatu/Jq2+9c8aga7dWMOerD3n7X9M5mJZJWUUVoCUkLJQufa/m9Tf/zuAerfFlvHQNx8rBbDZTWlpKeXk5RqORqKgot3ILGT0WDcG8efPq7EgxZswYBg0ahE6nczt906ZNpKSkqCvaVVRU8Pbbb/Phhx/W+76vuuoq4uLiOHnyZK2XufHGG2uclpCQwIABA9xql1NTU7n33nv5z3/+Q5MmTdwub7PZOHz4MMuWLaOysrJe5SDng9PpZPv27W4r+Hl+2IDqeuwNGzag11dHSq1WS9euXWt8ELgUSUgWohHzDDw6nU6doHXixAny8/Ox2+04HA60Wq3PQnJFfia7d+0nzwpbtmzknRcf5q/vzsBqO92/tLykmEXfv89NxWZ+nfMhoabTb4qVhZnce8sE/rdkY41QW1leRmbqPt548VEWr9rALz98TJOQ00E5O2UT11wzlt2pOeppNksVh3at464bR5EYH1brdtvNJbzy2F28/eV8bHbX1bcclBQVsm7J94zeupav5izixsGdLvp+VSgB2TUsl5SUcPjwYfVvHhkZidFoVJ8fEpBFQ7B27VrWrl1b6/knTpxgxIgRdO3aldTUVPX0/Px8rrjiCq666iqMRiMbN27k0KFDZ/WNWWRkJGPHjuWTTz7xen5CQgJDhw6tcbper+fZZ59l9erVbn3IZ8+ezapVq+jbty9RUVFA9WDFvn37OHHiBBUVFURGRl60kHzo0CEGDRpERUVFnZc7ePCgW/cOjUbDjBkzuOOOOy7Kdl5IUm4hRCPkGoy1Wm2NA0BKSgrHjh1zq0luCCUXuTt+5ZV3vsJq15DQojXt27XBZFQCsZPNi7/lp/UH1Ms7HRbeevYRZioBWWOgz7AxvPzq33j5hWfp16V19Quh08G2X7/lydc/Ox2knVamPvNnt4AcGt2Ejh07EBcZirWihMOHj9eypU5++epd3vliHja7g+DIFvzrq9nsP3iI7Zt+47ZR/dEA5XnpPPXn58gps+ALnmUWSk1yUVGR2uJKq9Wi1+vVVRmV54mMKIuGTlmI4/HHH6+x0l5eXh4zZ87k66+/5uDBgzidTgwGw1mtKnn77berI6iexo4dW+ttDRs2jClTprgtOa1s04IFC5gxYwYzZsxg/vz5HD58WA2qF/N/raCg4IwB2Run00lWVtZF284LSUKyEMIt7PTo0YOgoCByc3PJzMysUZfs66DssFpw6AP509Tp7Nmzh7179jD7o9cwKit4OCpYu/L0sq1Z+9fx/rcL1ONj7nuFlYvn8fe/vsLfX5/G8pUruLpX9VeuOB3M//wDDhdUv7GWpG3h20Wb1Ou27TeWdVt2smfPXnZs3cD944fV+iJqLT3BtHc/weJwgsbIE298wJ/vmkD7tm3o3nsQn333DW2iAwE4vnM5izam+GR/ugZk5afFYqGwsJDDhw+j0+lITk7G399fArHwqeDg4LN+/inBeNiwYbzzzjteV9RTREdH8/HHH5OcnKyepnworE3v3r3p3Lmz1/O8lVootFotr7zyCh9++GG9Wr8B6HQ6BgwYUON0b3MDPMtLzoWfn1+tHwDO5HJZvl7KLYRo5DxHBgcMGMCuXbvIzMwkMzOTjIwMoqOj1TZwvl+aWMddT7/Du8/fj/7UZlx7+4Mkv/YPfk+rXjnrWNYR9dKrf5lPQdWplQMN8Ux97UkCDKff9AIim/HsUw+x6JancAIVhWls3H6MNsPbsvW33yixnCqT0ATxyuvT6NS8eqnX2MQOfPjtLMrGDWfmip01tvLojvXsOlq9fKxfWBzXjxnsdr4pogW9u7Xg0LI94Kzi9zU7uWv4xS+5UEaPXVu/lZaWkpmZSXZ2NgaDgYEDBxIQECClFsKnbrrpJrKzsykoKKj3dcaOHQtUv849+uijDB06lC+++IJVq1apyzvHxMRwzTXXcN9999GyZUvi4+NZtmwZTqeT2NhYOnbsWOvtm0wmXnvtNcaNG+c2gBAXF8fgwYPr3DadTseDDz7I+PHjmTt3LitXrmTPnj0UFxerl4mKiqJ169b06NGDwYMH07t37xq38/jjj5OQkIDZbEaj0dC5c2cSEhL+8P7u2LEjL7/8stc65LpotVp1v1/qJCQL0Ui5Bl7XoJycnKyO2Bw/fpwjR47QuXNn9Hp9A+mZrOeKPn3UgAygMYbRvGWcGpLLzBU4AQ12du49vSpWaPsudIqrOZmkQ7eeRBsgxwo4LBxJSwXacvDoQZRKYv8mrRiY3MLteobACJKT23sNyUdS9lJur37TtJcX8ODEq/HTue+zw/tOl2rkFV78ryc9J+spXS1ycnI4fPgwVVVVBAcH06NHD/z9/WuUWQhxMQUHB/PCCy/8odvo1KkT7777bp2XGTVqFKNGjar3bSq1+65uuummeo/CxsbG8tBDD/HQQw+d02Pq1asXvXr1+kP7xRuTycQrr7xy3m/3UiIhWYhGxnMhCNeQrNPpaNKkCSaTCY1GQ0ZGBikpKZSVlbl1NvD9aHKNR4X+VFcOd3aKs07XEzeJjMfbl5Cm0HD8A41QZAEc5JZXh+2SUyNNABHhcYSajPXeosL0062kbJYyNq1fe4ZrXPz96dkb2W63Y7FYyMzMZN++fWi1WgIDA2nSpAkGg8Gtbl3qkYWornmeMWOG22larZaJEyf6etPEeSAhWYhGRKPRuI14eHa3UAJQ06ZNOXr0KLm5uRw6dIjMzEzCwsLUDhe+H00+N5WWKq+nO6xWHGqnDA3BxurRZo1LLWKVtRKbw0F9p3JotKf3jdE/gjHjrsZf731/af0CmXzfhIu+PzxHkm02G2VlZRw/fpzDhw9jNBpp27atOmHP20GIxmzt2rXs3r3b7bQWLVrQv39/X2+aOA8kJAvRSHmGYyXwaLVa7rjjDnJzc9m8eTNHjx5l586dtG7d2i0sNbzRZG90hMXHqMcy01Mpd0CQR87NP5lBYcWpxUE0RlontAQgLD5evUxhVjonCiuICghxu669lkVFwpo2Pb0VAeG89p/P6BjTcCazeJZZKCE5KyuLw4cPU1BQQHR0NHfddRd+fn5eS3MkMIvG7rPPPsNut7udduONN56XiXPC96S7hRCNmGvg0el06qF169aEh4ej0WhIS0tj3bp1VFZW1uil6+tOF2emo0eXbuoxW/o2Fm865nEZBwt++B9lpwaS/ULi6de3etnYrm07ozbNKDnG97/85tZneffqufzrywVe77lt2y4EnapBrizIYMmq7b7eGSrXSXquP61WK/v27WPXrl04HA78/Pzo2LGjWmqhPD8kGAsBx44dY9GiRW6nGQwGbrrpJl9vmjhPJCQL0Uh565PsGpQTExOJjo6mpKSEffv2sW3bNmw2m9vIY8MPyTBk7Hgi1YVFLPz5wbtYu/0wDqcTS1Up87/6F6998qOyVxgw7jY6RFeP+HYfNoL4AKUO2cF/nnuIN97/kl+XLuH/Xn2a4dfdQXaJ9xKOpB4D6dnh1Giy08zrzzzIdwvXUF5Z3Q/ZabeSlZnGLz/9wNRp71FqcXCxOJ3OGgHZZrORn5/Pnj17SElJwc/Pj6ioKPX5oPRJlppkIaqdPHnS7TUwKCiIiRMn0qNHD19vmjhPpNxCiEbGNdTUtaBI//79OXjwILm5uWRnZ7N27VquuOIK9Hq9ej2HozrY1dVH1NdiWvflxcdu4cl3vgXgxK7VDL+iG8GB/jgddkqKi7E5qt/oopon84/XnlBHD/yi2vGXB27gmX/OrG4PV5zJS4/f63b7cS1aUZZ6hDKP+9UHxvDKC08w5q5nqbTayT++hzuuG05IcDA6rQacTiyWKsrKKiC6LTfedx/toy78Mq6uk/SUgKwsQ33w4EFSU1Mxm820bNmSSZMmeX1uuC46I0Rj1bdvX44fP47NVl1ypdPpzqmXs2i45FVOiEbMsy7ZdSS5VatWxMfHo9FoKCgoYO3ataSlpamjyZ5LGV/Q7TzL+metxuWlTWvgsdfe49VHbyfAVN0Bw1JVQX5+PgWFRacCspZ2vUfy4/w5JCdGuN4zj/31/7hrZD+0nnev0dFz2I3Mn/0VYbVsx5U3Pcbn/3yV+PDqBQwcNitFhQXk5+eTX1BAaVl1q7qQkAhC/C78mIW3lfXsdjs2m42Kigo2b95MSkr1oiZhYWF069YNvV6vjiLLBD4hTtNoNISGhhIZGUlkZCRhYWFSi3yZkZFkIRqpugKyTqcjKCiIxMRE4uPjycjI4OjRo/zyyy889NBD6HQ67HZ7jZB0ocJSZKs+fPjpxxzKyMU/vAljB7X1fDRMeeUfdBm8BYfGj+HjJrg1VDOYwnjlP18y6f6H+fb7H9m+ZRPHc4vQ+gXQu3cfBl41mutGDiUsyK/GffuFNuHjuYsY9e0Mfpy/gAPHs2japgvjrruJWydeQ7DBxjsfv8eRvBI69R6F3iVNa3RGbnnkRYaMvpF5P8xh47ZtbNtbHUKDw5vStXMbWrXrwqirR9EkuP7t5c6VUmbheTCbzezatYsdO3aQm5tLeHg4LVu2JCQkpMbzQgnKQghxudM4L4WiQiHEeaX827uOKNpsNqqqqjCbzVRWVlJZWcnevXuZOXMmP//8Mzqdjo4dO/Lee+/Rrl07TCaT2wijhKeGz2azYbPZsFqtWK1WbDYbFouF7OxsPvzwQxYuXEhOTg49e/bk7rvvpm/fvgQGBhIQEICfnx9+fn4YjUa3v/el0eVECCHOnpRbCNHI1TaSrNPpaNOmDb179yYuLg6r1crBgwdZtGgRJSUlXutaRcPlWWLhunjI7t272bRpE3l5eQQHB9OhQweSk5NrPB+k7ZsQojGRkCxEI1RbXalrJwPl0Lp1awYMGIBWq8VisTBr1iyOHz+O2Wyu0WdXvphqmGors7DZbJSWlvLLL7+QkZGB3W6nTZs26jLknvXI0tlCCNGYSEgWQnhddU8JRUlJSfTv3x+TyYTD4eDYsWOsWLGC/Px8t1FkZUKYaFhcJ1h6fqixWq1qH+zi4mL0ej09e/akf//+aLVa9Hp9jUVEJBwLIRoLCclCNGKegUcJyMrooTKSmJCQwLXXXotWq8Vut/P9999z7NgxrFar29f4UnbR8CijyEo9ss1mU4+Xl5fz7bffUlxcDECzZs2IjY11G0F2/d3bCLIEZiHE5UpCshDC68p7er0eg8GAwWCgadOmXHPNNeh0OpxOJ8eOHePHH3/k8OHDauhyPUjZRcPgWmbhGpCtVitlZWXs2LGD+fPnU1VVhU6nY8KECQwdOlT9uxsMBjUkK88RIYRoLOQVT4hGzLMm2dvKezqdDoPBQGRkJCNGjECv12O321myZAn79u2jqqrKa72rBGXf8tYPWTlYrVays7OZOXMmFkv1CoAJCQlER0fj5+dXY7Keay2yZ12yEEJcriQkCyFqncTnOnkrPDycsWPHYjQa0Wg0ZGVlsWHDBg4dOuRWduFaoyxB2XeUkKyMIrsG5KKiIvbv38+GDRuA6pXCrr/+etq1a+d1wp60ehNCNEYSkoVo5LyNJruOJCplF35+frRo0YJrr72WwMBA7HY769evZ9u2bZSWlspocgNR1wiy0gs7PT2d5cuXU1hYiFarZfz48fTu3ZuIiAjpaCGEEKdISBZCANQouVAm8CkHg8GAyWTi2muvpVmzZhgMBtLS0tiwYQP79++vtSWcBOWLq7YRZKUnck5ODtu3b2f9+vVoNBqio6MZNWoU0dHRNeqQXUeRPUssJCwLIS53siy1EELlOZLscDgwGAzYbDZ1NLlZs2YMHTqUwsJCcnJy2LJlC3FxcYSHh9OqVSv8/PxqjDrKV/UXh+cKiq6T9SwWCwUFBWzfvp3ly5eTlZWFwWBg+PDhxMbGYjKZakzY87aICEhAFkI0DjKSLIQAqNfCIkpQHj9+PJ06dSIgIID09HSWLl3KggULyM3N9drtQhYaufCUUXuHw+EWkJVlqEtLS9m9ezcLFy5kw4YNGI1GWrVqxe23305QUJBbQPYMyVJqIYRojCQkCyFU3mqT9Xo9RqOxxuGmm25SV2ZLTU1l3rx5rF27lpKSEgnKF5lrOLZarTUCckVFBSkpKSxYsIC1a9fidDqJi4vjxRdfrPF3rWsUWYKyEKIxkZAshFB5C8hKYPIMUS1btqRz587ExsaqvZO/+uor1q5dS2lpqRrWJChfWEqJhedCIcqhqqqKgwcPMnv2bNavX09FRQUhISF06tSJ6Ohot37YnjXJ0u5NCNGY6V599dVXfb0RQoiGx7X+1PWrfNew26xZMwoLCzl48CA2m42KigqqqqoIDAwkLi4Og8FQ621L6PrjaqtBVj6gWCwW9u3bx8KFC1m/fj1ZWVloNBo6duzIn/70J4KCgjAajZhMJkwmE35+fvj5+dU6miyEEI2JTNwTQrjxnHDndDprtIJTwlhQUBBJSUnodDpsNpu6iltQUBAxMTF07NiRwMBAt9t1Ha2W4HVuPD+0eNYgKxP1srKyWLlyJZs3byYrKwu73U6PHj249dZbCQ0NVUtplHBsNBrVcOw6iiyEEI2RhGQhRK0828IpX8ErIWr37t189NFHWCwWtFotDoeD7Oxstm7dSmRkJFqtlrZt2xIcHAycDndOp1O9LQlhZ8dbH2TPkGw2mykoKGDDhg2sXr2aI0eOqEtP9+vXj5YtW6p/T8/VFV1bAAohRGMmIVkIUYNrcPW2wIhOp2Pnzp28+OKLVFZWAtVf/Wu1WqxWKxkZGaxfv57w8HB1ERJlRNn1dp1Op9S81pPrBwzPBUJcDxaLhcLCQnbt2sWSJUs4cuQIFRUVarj+3//+R5cuXQgNDVWXHPcMyzJZTwghZOKeEKIePCf0bd26laeffloNyHB6hNPpdFJZWcnRo0dZv349mzZtIj09naqqKq+r8ikr88mEvtp5lld4C8euAXnfvn0sWbKE7du3U15e7rZ/c3Nzefnll8nIyFBDsmtQltX1hBCimowkCyFqUMKR0+msEZh+//13HnvsMcrLy2tcTwly/v7+lJaWsmfPHreAnZCQQEBAgHofruFNCWfyNb8715ULPUeQlZ9Wq1VdLGT//v0sX76cNWvWUFRUhEajqbH6YU5ODk8//TQfffQRnTt39rr8tBBCNHbS3UIIUSvP+tdVq1YxefJkSkpKar1OkyZN6NGjB0VFRRQWFpKbm0txcTEajYaQkBBCQkLqFYQlqOG2xLfn6LtrSDabzWRlZbFjxw6WLVvG8uXLycvLU/eht9Z75eXl/Pbbb1x55ZXExsbW6GYhgVkI0dhJSBZC1KAEKtev+VesWMG9995LUVFRrddr3rw59913H23btqW0tJTi4mLKy8vJycnhxIkTmM1mYmNjvQZlZWTZ9XhjDWjeyitqC8hVVVVkZ2ezatUq5s2bx8aNGykqKsLf35+kpCQqKysxm81e76esrIyVK1cydOhQt6Aso/pCCCEhWQjhQQmqrqOYy5Yt45577qGwsLDW6yUlJfHggw8SFhaGXq+nRYsWABQVFVFWVkZ+fj5paWkUFhbStm1b/P391SWPXe/XW31yYwrLriPHyiIhtQXlqqoqTpw4wZIlS1i0aBF79+6lvLyckJAQ+vfvz8iRI+nYsSMHDx6kqqrK6/2VlJSwYsUKhg4dSkxMjFvHkcb8QUUIISQkCyFUtQXku+++m4KCglqv16JFC5544gkiIiLU1mJGo5FmzZoRHBxMcXExhYWFVFRUcPz4cfbt20d0dDTR0dE1FhzxDMhKaHYNbpcb1w8IykRGz1Fjzwl7VVVVHD9+nG+++YZffvlFbfMWGxvL8OHD6dmzJ4GBgURERNC+fXv27dvnNtHSVXFxMcuXL2fIkCHExMTUGEW+HPe5EEKcicYpU8qFENQMyHa7nWXLlnHXXXfVGZBbtWrFlClTCA0NxWq14nA43G6voqKC9PR01q1bx7p16wAICAggMTGRJ554gr59+xIeHo7BYHDr0evtAFxWnRdc27p5Ts6rrdTCarVSVlZGeno606dPZ9OmTRQUFOBwOOjcuTOjRo0iNjYWPz8/t44kJ0+e5D//+Q/5+fm1bk9SUhKzZs2iS5cu0ulCCNHoyUiyEI2ct/pjh8PB8uXLufPOO+sssWjVqhWvvvoqUVFRbu3EjEajunqbv78/4eHhxMTEYDKZOHLkCFarleLiYo4fP47D4SA0NJTAwEB1hb/aRpO9uRTDm2cwdi2vqK1NnjJ6XFBQwPbt2/nqq6/YuHEjBQUFOJ1OevXqxahRo2jVqhWBgYE1/hZRUVF069aN7du3U1FR4XW7ioqKWLZsGUOGDCE6OrpGQL4U97UQQpwrCclCNGLeArLdbmfFihXcfvvtdU7Sa9WqFW+88YYakJUSC2WZYyUk6/V6/Pz8CA0NJTY2FpPJxOHDh7Hb7eTm5pKdnU1FRQUBAQHqhD7PMOYakD0D86U0yuk6Wu/5oaS2iXlK94ry8nJOnDjBunXrmDt3Lhs3blS7jPTt25cRI0bQrl07AgIC3P4Ofn5+ah/kyMhIevTowebNm+sMyr/++ivDhg0jOjrabT+7/hRCiMudlFsI0UjVFpBXrVrFpEmTKC4urvW6rVq1Ytq0aYSHh7uVQ7guYe258IXVasVut1NQUMCcOXPYunUr+fn5OJ1OmjZtSp8+fejfvz9dunShdevWash2vV3Pn95alTWkEOe5jz1LWpTyirpavZnNZnJzczl48CDbtm1j48aN7Nq1C4fDQUxMDB06dGDYsGG0atUKPz+/GuUpyui8sh1arZZjx47xwgsv1Fl6kZiYyNy5c+nQoYOUXgghGiUJyUI0Up5f9zscDlavXs2NN95IaWlprddr1aoVb7/9ttskPWUkWa/XqwFWCXyui11YrVZsNhsVFRXMnz+fQ4cOcejQISoqKjAajSQlJTFw4EAGDRpEmzZtiI+Pd6tVPlNIri3EXYxQ5zna7bmPPeuOgRplFq4/zWYzxcXFpKWlsWvXLtavX8/27dspKirCaDTSrl07unbtyrBhw9S/hfJ3UPodK4/d9W+sbMOxY8d49tln6wzKzZo1Y968eXTs2FHtRCJBWQjRWEhIFqIRcg3Hys9Vq1Zx4403UlZWVuv12rRpw1tvvUVkZCRarVb9Gl+pfzUYDGqAdTqdWK1W9WA2m9WgrITn/Px8vv32W/bt20dJSQk2mw0/Pz+aN2/OmDFjuPLKK2nevDnBwcFew3JtIVnh7fTzFe5qKwFxXbjDW0BWwqpyWc9aZKvVSkVFBZmZmezZs4cVK1aoo+4ajYaAgABatWrF5MmTiYuLc/uQ4m2JaWU7bDab2wcXu93O8ePHefrpp+sMygkJCcyfP5+OHTt63d9CCHG5kpAsRCPjrYvFypUrmTRpUp0r6bVt25bXX3+d6OhoNSC7TgxTyiOUcOZwOGqEZCUoK6PLNpsNi8XCmjVrWLVqFampqVRWVqq336FDB2655RaSk5Np0qSJOiHNs+vFmUKy6++e57v+PNM+qy0Ynykke47iun5AcQ3HVVVV5OXlceTIEebNm8f69espKirC4XBgMBiIiooiOTmZW2+9FX9//xoBWalD9gzJnqP6ysHhcHD06FGmTJlCbm5urY9fCcpSeiGEaEwkJAvRiNTWB/lMk/TatWvH1KlTiY6OVkOZMjnP9aB8za98xa9MPLNYLGpA9hxRtlgsWCwWCgsLWbVqFXPmzFFHWpXJgN27d2f8+PH069eP6OhodSESzxKM+gRl15+ev3s7XtsiJ7WF5LpGj11/Vx6j3W6nsrKSkpIS0tLSmD9/PsuWLaO0tBSr1arWEV9xxRXcfPPNREZGYjKZ1PIWbx9WXEf1lftQPrQo+1v5GzgcDo4cOcLzzz9PTk5Orc8BGVEWQjQ2EpKFaARc/809+yCfqc1bu3bteO2114iJiXFr82YymdyCsmctrDJKqoxaugZk15CmhGWlzCA9PZ1ly5bx22+/4XQ60el0GI1GwsPD6dmzJyNGjKB3796Ehobi7+/vVuJRW2lFXSHZ23HX02sLw57HvY0m1xWSlf1RWVlJWloaq1evZvny5WRmZmI2m7HZbGg0Gnr27Mnw4cPp0KEDQUFBGAyGGqPHSjB2HUlW9otGo3HrlOH5t1D2/+HDh3nxxRfJzs6u9bnQvHlz5s6dS6dOnRr8pEkhhPijJCQLcZmrrYtFfRYK8RaQlSDmGpKV4Oa5zLTr1/yeAdk1KLsGOLPZTEFBAQUFBfz222+sX78es9mMTqcjODiYuLg4OnbsyODBg+natSvh4eFqqzMlqJ8pLNf1e332YV2nuZZSeIZki8VCVVUVZrOZ0tJS0tPT2bx5M1u3biU1NZXCwkIsFgsGg4E+ffowYMAAEhISCA8PJyAgQA2/ygcHbyHZ9e/hWm7hLSQrB6VeOSUl5YxBOSkpidmzZ9OpUye30XxlP0pQFkJcLiQkC3EZq6029tdffz1jQO7SpQsvv/wyMTExbl/tKwHZZDKpwcw1lCkhybXm1rMWVgnDrl//u15GGVk+ceIEJ0+eZNeuXezatYv8/HwMBgMhISHEx8fTrFkz2rZtS4cOHUhISCAsLIzAwED8/f3V8o+6Ju65BnpXrpfxNnLsrZOFt2DsughIeXk5ZWVlZGRksH//fo4cOUJGRgYnTpygoKCAqqoq/P396d69Oz179iQ+Pp4mTZqopSWeE/OUDyuufwPldNe6bWXbXEeTPUOysv/tdjuHDh3ipZdeOmNQ/uGHH9SV+aTrhRDiciQhWYjLmLfQVp8R5O7du/Pqq68SFhZWIyArByUkK+e7jiIrZQqAGr48w7JrSFaCm+fIsnK99PR00tLSSElJ4dChQ5w4cQKtVou/vz8xMTHEx8fTvHlz9ZCQkEBUVJRbOYi3IF9bGK4rJLvuV9ffPUOoEkTLysrIzs4mLS2NjIwMDh8+TGZmJjk5OZSVlWG324mIiKBt27a0b9+eFi1a0KJFCzXoKvvXdbEWz5DsGp6Vg+vj9AztyiRB15DsWqN8tkFZapSFEJcjCclCXKZqm6R35513njEg/+1vfyM0NNStBtk1ILvWvnp2UvAMmLWtKOcakl0n8LkGZWVUWQmhJ0+eZN++faSlpVFeXk5KSgolJSVoNBpCQkLUwNykSRPi4uIIDw8nOjqayMhIgoODCQgIqBEqlVFa1233FvZdP3C4li641lxXVVVRWVlJfn4++fn5FBQUkJeXx8mTJ8nIyCAnJ4fc3FwcDgdBQUEkJiYSGRmplpC0atVKDZxK+Yrr9rqGZOVv4vlBxXMBFtftV8ouvJVcKH8PJSinpKTwwgsv1DmZr7bSCwnKQojLgYRkIS5D3kaQlaWm65qk17VrV6ZOnUpISIjbUtOeAdn16/0zjc56bofy07UFnGdI9mwTp4Q71+vm5eWxcuVKTpw4gdVqdRuZ1ev1BAYGEh0dTZMmTYiNjSUkJEQtxwgICCAoKIiAgAD1d9dg6fqYXHsMK4+jvLyciooKysrKKC0tpbKykrKyMkpKSqioqCA3N5esrCxyc3MpKCigsrJS7XHctGlTjEYjcXFx9OnTh8TERPz9/d3CsesIcm2t9pTTXEeOlet7BlXXD0zKBxXlw0htQdlut9er64USlDt37iwjykKIy4qEZCEuM976INdnqekuXbrw5ptvEhQU5DZJzzMke1uwwlvNb20t0ZTtUsopXBcX8RxNdq2VdV0EQwnbyu0VFxezYsUK9u/fT2lpqVoHbLFYcDqd6jaFhoYSFBREYGAgISEhBAcHExwcTGhoqPo49Hq92jVDq9Vit9upqKjAbDarj6G0tFQNxUVFRWoLt5KSEsxms7ofXEfgTSYTSUlJjB8/Xi1jcV0URRk59jaC7G1SnusIsucCK94Cqrf971oT7lmjrPxdjh49yrPPPkteXl6tz53ExER+/PFHdWU+CcpCiMuBhGQhLiPeSizqs9R0ly5deOONNwgODq7RB9lbQPb8Wr+uMFRb9wfX8gvXEWLXJaw9V+hzPbh+CHBtsaa0VNuyZQt79uyhuLjYLbgrobm2bTYYDAQGBuLn56e2TysrK6OqqsptoRDl9pT7VkKqcnpwcDCtW7emV69edOjQgeDg4BqhWKPRuAVjb6vnuU7MU4Kx63Lg3kJpfT6weJaM1BWUU1NTz7gyX/Pmzfnpp59kwREhxGVDQrIQlwFvfZCVpaYnTJhAeXl5rdft1q0bU6dOdQvInuH4TCPI9W2h5m0bPbtgeK7G5xmSlfOUcOz6u+dCHcp9KwtmHD58mF27dpGWlkZVVRVQvwVGPPevZxu4wMBAevXqxbXXXkt0dLS6T5TQ7DrC6xpslbDsra7YcyTZs/67tlH8+nxg8fygcqYRZYfDQVpaGk8++eQZl7CeN2+e1z7KEpSFEJcaCclCXOJqG6VduXIlN998c50jyN27d+fvf/+7WoPs2gfZMyB7hrRzCT/e2qm5hmTXDgyudcveQrIymuxat+x6G7W1Y1NCt+e+U2RmZqojxxUVFej1ekJCQtBoNERERBAeHq5O9IPTwVTZd56TGD1rjV1HjJUR4dpCsutpnuHaNYS6qs8HlvoG5aqqKrca5dTUVJ555pk6Sy+aNWvGTz/9JKUXQohLnoRkIS5hnp0LlJ/Lli3jtttuq7MGuUePHmqbNyWseSux8FxNz9vEsD+y3Z61sq6jwq6t4DzDsmu4cz2uHDz3iWcoV3geVyYSupZRKI9ZCa3eyhpcR49df69tQp7nwTNEuy7Q4hmOXUepz+XvUNsHCM/2dUqbONeuF0eOHGHKlCn1XsL6TL2qhRCioZKQLMQlqraFQpYtW8Ydd9xRZxeL5ORkXnnlFXUCmU6nqzF67G0E2TMEns/t93wcroHZtaTCcwKfazmGZ52z5yi15315WxzEtVTDlRJK4XTI8wyq3oKs66Q8byHZc8KeZzB2DcieYdx1W85133srd1GCshKSXSdTKl0vXnjhhTr7KCsjyq6lF39km4UQ4mKTkCzEJaiupabvvPPOegXk8PBwtFqt1xIL11Zjf7TEor6Pw/Px1Da67NkOznNk2VtQdg3M3kaVvS0S4qm2kWPP4Ort4DrRsbZw7K0sw1vHivM1IuttpcC62sMpo8vKvj58+PAZl7Bu3rw5P/74o9pHWafT1diXQgjRUElIFuISU9so4NKlS7nnnnvqXCikR48e/PWvfyUsLKzONm+urca8LTt8oUKy8rvnyHJtI8zeRpY9Sy48R5XrE5RrU1c4Vn73VjfsGYhdQ3Jt1/G8beX+vf08388lb8tXK7+7jiinpKTw8ssvk5WVVet9JCYmMnv2bFnCWghxyZGQLMQlxltZwpw5c3jkkUcoKSmp9XqeAflMNcjeVtK7UMGmrqWfXR8v4DYxz3Ximbe2ct4OZwrJntvj6mwDsuviJJ4lFPUJxp77+0LU9NZWtuPaXUQJx0rv6bMNyrKEtRDiUiQhWYhLiOfKaQ6Hgx9//JEHH3yQsrKyWq/nudT0ua6kdzECTW0lGK6/1zbZzzMce36Y8BxRPt8h2bM1W13lF7WVUdTVzu1C7X9v+9e1b7XrstveRpTrU3rRokULZs2aRefOnaXrhRDikiAhWYhLhLeV9H788UceeOCBM/ZBfu211855qWlfdSSorfzBM9i6BmHP32s7fi4h2duHBc82bJ4T1LyNFHv+rK384GLvd2+9tl0nR1qtVreQ7NlH+fDhw/VawlpGlIUQlwoJyUJcArwF5Llz53L//fdTUVFR6/WUpaaDg4Pd2rxdqD7IF/Kxe57mrZ7WszSjrhpk1/PPNSTXNqrsretFXSPGrp0zPO/PF/v6TO3hvHW9UNrDPffcc+Tm5tZ6H0lJScyePVudzNeQnmtCCOFKQrIQDVhtoWXevHncfffd6qpx3rgGZKUG9lILyHXtE9fj3sJubUHYWyCuz6Q9qH11vrrKJc50GW+33xD2r2epiusS1q5dLzz7KB89erTeS1i79lGW9nBCiIZGQrIQDVRtnQfmz5/PHXfcgcViqfW6Xbt25Y033lBHkGtr8+bn5+fWaaGhB+Ta9pHr8do6ZpzNz9rUtYR1XfXDte3ThrqP6+p64S0oKyPKSpeR1NRUnnrqqTqDcrNmzZg3bx4dO3aUPspCiAZJQrIQDVBtI8g//fQTd999N2azudbrdu/enddee43g4GB1hPhMNciXS31obSUT3sJ0Xdc9E2/7p64Jdhdj8t35VlfpRV1BWRlRTk1N5emnn65zCWtlZb4OHTpckh/ShBCXNwnJQjQwtYWTuXPnct9991FZWVnrdZWlppUuFt5GkC/FEovztU9rO17babU525B8qTqbPsq11SjXdwlrCcpCiIZGQrIQDYznBDSli8XkyZPrnKSXnJzMX//6V0JDQ+s1guytxAIun4B3Ns7lZbCx7KfaPrTVtTKf5xLWZ+p6oQRl19ILCcpCCF/T/vGbEEKcL54jd65t3uobkM/U5s1bDbJCAkn9NKb95Flz7brMtsFgqPFthecHsVatWvHGG28QGxtb631kZGQwfvx49uzZo/a69jbJUgghLiYZSRaigfDW5m327Nk8/PDDdS4UkpyczCuvvFJjJT0ZQRbnk7duIcpEPWXk2HWhEdfOFw6Hg5SUFF566aV6LWEtC44IIRoCGUkWogHwtiRwfQJybUtN13cEWQKIqC9v7eyU59OZRpS1Wi1t2rRh6tSpdY4oHzt2jIkTJ7J7924ZURZC+JyEZCF8qLYllufMmXPGgNy9e3deffVVwsLC1K++6wrIjWWSnrhwagvKnqUXSlj2DMqtW7fm7bffJjExsdb7SEtL46abbnILygoJykKIi0nKLYTwkdoWbTibpaa91SCbTCY1oDS2Lhbi4jhT1wtlCevaul5kZGTw/PPPc/z48Vrvw3UJa1lwRAjhCxKShfCButq8nWmp6a5du/L6668TEhLiVmLhOVFPGdlTvhL3XB5ZiD/CtYZeeR7X1R7OdWU+u93OiRMneO6550hPT6/1PhITE5kzZw6dO3eWrhdCiItOQrIQF1ldS03fc889dfZB7tKlC2+88YYakJUgbDKZ3L7i9vPzUwO0jCCLC8U1KCs/zzYoP/vss2RkZNR6H82bN2fu3Ll06tRJ/bAnz2UhxMUgIVmIi8w1HLsuNX3nnXfWuZKestR0UFCQWgPqWv8pXSyEL/zRPspZWVk89dRTZGZm1nof3pawlqAshLjQJCQLcRF5TtJzOBzMmzePO++8E4vFUuv1unfvztSpU90CsjJRz2g0qiPJdU3SAwnI4sLwFpSVLi1KazjXUWXluMViwW63k52dzZNPPllnUE5ISFCDsnw7IoS4GKS7hRAXibeJenPnzuXuu++uMyD36NGD1157jeDgYLdJeiaTST3Up4uFBAlxoXg+z5QFR1w7X3h+4+FaMx8XF8e7775Ls2bNar2PjIwMrrvuOvbt2yft4YQQF4WEZCEuMNc3cdc2bz/++CP3338/VVVVtV43OTmZv/3tb2oNsufkPNfQUVcfZCEuBteQ7NoeTvkQV9sqkFqtliZNmjBt2jSaN29e6+0rQXn//v3q/5JrUJawLIQ4nyQkC3EBeRs9Ptelpl1rkD3bvLm2epOALHzJtbTHc0TZW1BWnr9arZb4+HjefPPNOvson2kJawnKQojzRUKyEBeIZ0BWJjPNnj2bhx56qM4+yEpAdl0oxDUgu07QU0aQJSCLhsJb6YXyIc51VNn1oJReJCQk8MYbb9QZlI8fP86ECRPYs2ePW0gGJCgLIc4bCclCXAC1jSDXZ6lp14CsjCAro27KCLLnYiHSGks0RN5W5vNc+EZ5TruOKCckJPD666+TlJRU623XtoS1EEKcLxKShTjPPEe0lDZv9VlqukePHjWWmq5t0pOspCcasjMtYe05kuwZlJs1a8bUqVPrDMppaWlqUPZWoyyEEH+EtIAT4jzzDMf1XWq6e/fu/P3vfyc0NLTGV9PKqJtnVwC9Xl+jxZuEZNGQ1LaEtWsfZYvFQlVVldomzrWPcmZmJi+88ALHjh2r9T5cl7CWPspCiPNFQrIQ55HnQiE2m42ffvqJyZMn1xmQz3ahEKX+WPogi0tBffooK+HYtZeya1B+/vnnOX78eK33oSxhrazMJ0FZCPFHSUgW4jxxbfOmjCDPnTuX++67r86lprt27cqbb75JYGCgWxcL16Wmla+ipcRCXKq8lSEpy1crS1V7LmPtGpRPnjzJM888U+cS1q5BWZkwKP8jQohzJSFZiPPAWx/kefPmcdddd9V7qWnXgOxaXuEakKWLhbiU1bWEtbeV+TyDcn2WsG7evDk//fQTHTp0kA+TQog/REKyEH9AbW/68+bN44477sBqtdZ63W7duvH666+rAVmv17v1j/VWgyxv+uJy4DqarHywtFqt6qiya0hWgrIy6pyTk8MTTzwhS1gLIS446W4hxDmqrc2bstR0XQG5e/fuvPbaa2pAdi2rcG2L5dnmTd7sxeXAs4+yVqtVS4k8u7l4fpMSGxvLu+++S0JCQq23L0tYCyHOBwnJQpwDbwHZZrPx448/ct9999W51HSPHj3429/+pnax8OxgUVs4loAsLje19VGuLSgr7eHi4uL4xz/+cVZLWHsuOCKEEGciIVmIs+RtApJrm7e6JuklJyfz6quvqktNewYCzw4WMoIsLlfe+igrI8re+oO7fnjU6XQ0adKkXktYX3fddezdu7fGgiMSlIUQZyIhWYiz4BmQ7XY7NpvtrJaaVgJyXSFACQIygiwuZ8rzWXmeuy5h7bnQiOeHSJ1OR9OmTXnjjTfqXHAkPT2d66+/Xl3CWkovhBD1JSFZiHryLLFQZuXXZyW95ORkXnnlFbeA7LrctGsAkBFk0Zi4jii7BuXa/k88R5SbNm16xpX5jh8/zoQJE9QlrO12uwRlIcQZSUgWog6ub6Ses/HPZqlpZQTZ9evkupaadh1BFuJy51l64TqiXNcHSiUoJyQkMHXq1DpLL44dO6YuYS2T+YQQ9SEt4ESjV9e/gGf9YmpqKhkZGeTm5rJhwwY+//xzKioq1PM1Go3b7bVo0YI//elPNGvWjPDw8BpfH3u2eVNGkF0XQVBuV4jLnev/juuHUaWPsmt7ONd+ylarFYfDQUZGRr2WsJ41axZdu3aVciYhRJ0kJItGx/Mp73m8oKCAgwcPsnv3btLS0igsLFQvV1hYSElJCcXFxezZswebzeZ2G64hWavV0q5dO0JDQzGZTAQEBKgBWKfT0b59e7p3706rVq1o3ry5Wl7hWWahkDdw0RjUFpSVPsm1LWFts9mw2+1kZGSccQnrpKQkfvjhBzp37uxW+w/yfyaEOE1Csmh0XJ/yTqeTRYsWkZmZSXFxMVarlZKSElJTU0lJSSEzM5OKiooaM/BrG+X1/PrWW7mGUrKRlJRE27Ztadq0KXFxcept9+nTh969exMcHOx2+/LmLRoLby0WlbCsrMBnsVioqqpSQ7JyusPhIDMzk+eee4709PRa7yMxMVFdmU+v18toshCiBr2vN0CIC80zFOfn57N3715OnjyJ0+nkq6++4vDhwxQUFGC1Wt2+gtVqtQQFBdG0aVMCAgIIDw8nICCAoKAgAgMDgdpDstPpxGKxUFpaSmVlJaWlpZSUlJCbm0tpaSknTpwgIyPDrS0VwMiRIxkxYgRhYWHExcXRrl07mjRpgtPplMAsfMrbh8CLdX+u39C41iwr5Rg6nU79PS4ujmnTpvHMM8/UujKfzWajoqICq9WK0+n0+u3NpcDzA/ultv1CNGQykiwuW65P7ZycHE6ePElZWRn79+/n22+/ZfPmzW6jR35+foSFhREZGUlERIRaHuHn50evXr2IiIigRYsWhIeHEx0dTUxMzBm3oaKigpMnT5Kfn09WVhaZmZns37+f3NxcdRSstLSU/Px88vPzqaiocJtQ1LNnT66//nr69u2LwWCgSZMmJCQkSBmGuOicTicnT57k888/Z9OmTTgcDvX0i3X/Ctf/EddJeJ4t3sxmMykpKZjNZrfb8vPzo3PnzgQHB7tNkL2U/pdctzUoKIiBAwfywAMPYDQavV5GCHH2ZCRZXFZcR7isVit5eXmYzWZ+/vlnvv32W/bt26deVq/XExQURGRkJMHBwYSFhdG1a1cGDRpEz549iY2NrXH7Z/umExAQQKtWrWjVqpXXMFFSUkJKSgrr1q1j7dq1nDhxguLiYoqKiigtLWXbtm1s3boVrVZLeHg4t99+O5MnT8ZoNBIREYHJZFJrKc91G4Wojee3MFVVVRw5coS1a9f6etNq3U5vp3v+jwDs37//svhfcTqdhIeHk5SUpH5wcT3vcniMQviKhGRxWVFGlioqKjhy5AgPP/wwR44cwWw2Y7PZ0Ol0+Pv7ExgYSFBQEN27d+fGG29k8ODBBAUFqcvjXoivLb3dXmhoKMnJyXTv3p2HH34Yp9PJhg0bWLBgAevXr+fEiROUl5dTWVlJXl4e//nPf/joo49o0qQJb7/9Nn379iUsLAyDwSBvhuK88jbB1W63U1VVRXl5OXq9npCQEK8BVFwcZrOZiooKKioqsFgs6ki668i4BGUhzp2EZHHJc61XLC8vZ82aNbz88stkZmZSVFSEXq/Hz8+P4OBgIiIi6N27N+PHj6dv374EBQWpLdh89UbiOrMeYODAgfTp04fKykqOHDnC0qVLWbhwoRr2LRYLqamp3HPPPRiNRh599FEefPBBIiIipAxDnHee/cGV41arlVtvvZXOnTtjMpl8vZmNTlFREUuXLmXx4sVu5SaunXaEEH+MhGRxyfLsIDFz5kw+/vhjMjIyyM7ORqPREBgYSEREBD179mTkyJEkJycTGxtLSEgIAQEBDfKNRFlQxN/fn6CgIFq3bs2kSZNIS0tj+fLlzJkzh/z8fKqqqigtLeWDDz5g5syZ9OvXj9dee43Y2Fi3VnQN8TGKhs1zcp5rv2LldJvNRt++fRk1ahShoaHyPLuI7HY7x48f58iRI+ppDodDbYPn+cEb5HVAiHMhIVlccjxn1v/zn/9k06ZNHDhwgKNHj+J0OjGZTAQHB9O7d29uueUWWrduTZMmTdTShEuBMplQmVDYpEkTWrZsyTXXXMOCBQtYvHgxmZmZlJaWUlhYSF5eHunp6URHR/Pmm28SFxeHXq+Xr1vFOfMMyK4hGXAbXZbn2MXhcDjUlnd2ux04XQqjhGSFrNopxB8jIVlcksrLy/n444/JzMxk8eLFpKen43A4MBqNBAUFkZiYyPjx4+nduzc9e/YkMDDwkn6z0Gq1BAYG0rJlS1q2bEl0dDQ9e/Zk/vz5bNy4kby8PEpLS1m9erW6et/o0aO58soriYiIkBAjzolnb2+bzSZLOPuYsqiK5yQ9z5Fk5e8m//dCnDsJyeKS4PrGnJ+fzxdffMH06dPJysrC6XRiNBoJDAwkKSmJ/v3706NHD0aOHElMTMxlObGoQ4cOtG3blpiYGDp27Mjvv//Ojh07KCoqwmKx8P3333Ps2DH27NnDuHHj6NChg1o3Km+a4mx4jiYrq0yKi08ZuffGdbRfWUXQlQRmIc6ehGTR4CkjIiUlJRw5coQNGzYwdepUzGYzfn5+BAUF0bx5c1q3bk3fvn25+uqradOmja83+4LT6XQMHTqUbt260a1bNxYuXMj+/fs5fPgwRUVFrFu3jnXr1nHixAnGjh1Lly5daN68uXp9ecMUdXEdQVbCV20BTVx4SklFbec5nU51JFkZTZb/cSH+GAnJokHy7M+am5vL2rVr+e677/jll1/QarWEhYXRtGlTWrduzdChQxkxYkSjCMeewsPDGT16NP3792f9+vXMmzeP/fv3c/ToUYqLi5kxYwa///47EydO5KabbiIxMVEWHBB18rasureaZNFwuC7f7drlQibwCnHuJCSLBsf1xd1isWA2m/nqq6/45z//SUFBAQaDgZiYGHr27MmVV17JhAkTiIuL8/Vm+1xERARjxoxh5MiRLF68mHnz5rF582bS09M5cOAAU6dOZfny5Xz44Ye0aNECo9Go1i7KG6jw5PlB1bUNnGh4PFcfdO1QIoQ4NxKSRYOivKgrL/SzZs3i/fffZ/v27QAEBgaSmJjIqFGjeOaZZ4iMjLwsa47/CKPRyLhx4xgzZgyff/45s2fPZufOneTn57NmzRpGjx7N66+/zrhx4/D395feyqJWnhP3XEcoRcMiH2CEOP8kJIsGwfPrXbPZzLvvvss333zD8ePH0el06ujxDTfcwPjx4wkMDJSAXAetVsudd95Jx44dmTt3LosXL+bQoUNkZGTw7LPPkpqayuTJk4mMjKxxXQnLwhsJyA2b64ca+VsJ8cdJSBY+5Vk353Q6KS4u5vHHH2fdunVkZ2ej0+lo164dw4cP5+qrryY5OVldQlrUzc/Pjx49ehAaGkqLFi2YP38+69atIycnh48//piDBw/ywgsv0KpVK/UDhyxlKzxJ8Lo0yN9HiPNLQrLwGW+Tg3Jzc/nrX//Kr7/+SnFxMX5+fgwYMICrrrqKK664gk6dOhESEuLrTb+kBAQE0KZNG0JCQtTVBleuXMnJkydZvHgxVquVP/3pT/Tq1UuCshCXKAnIQpx/EpKFT3gLyMePH2f69OnMmTOHsrIyQkJCGDJkCKNHj6ZPnz40a9ZMAvI58vPzIyEhgYCAAIKDg4mMjGTJkiWcOHGChQsXYrVaefDBBxk6dGiNyXwSlIUQQjRGEpKFz7gG5IMHDzJjxgy+/PJLSktLiY6Opm/fvtx888307t2bmJgYAgICfL3JlzStVktUVBSDBg0iICAAjUbDihUrOH78OPPmzcNms6HVahk8eLBbQJYRZSGEEI2RhGThM0pATktL45tvvuFf//oXGo2G+Ph4hgwZwrhx4+jTpw+xsbFufX3FH2MymejcuTO33XYboaGh/Pzzz6SmprJgwQKsVitBQUEkJycDskqXEEKIxktCsrjoXEeQc3Jy+PLLL/nss89wOp1ERUVx7bXXcvfdd9OyZUvCwsLQ6+Vpej5ptVrCw8O54ooraNasGX5+fsydO5eUlBSWLFlCeXk5n3/+Oc2aNZOSCyGEEI2W9M8SF5VrH+TS0lLefPNNPvroI4qKioiIiGDAgAE88MADJCcnExUVJQH5AtJqtSQmJvLoo49y880307JlSwC2bNnCQw89RH5+vrrCmnQ3EEII0dhISBYXnd1ux2Kx8Je//IWZM2dSWlpKaGgoPXr04KabbqJLly4Sji+i6OhoxowZw5gxY0hMTKSyspItW7bw5z//mYKCAnUVL4UEZSGEEI2BhGRxUbiu2mWz2fj3v//NunXrKCoqws/Pjw4dOjBhwgSuueYadDqdrze3UdFoNHTo0IHRo0czZMgQQkNDKS0tZeXKlUybNo38/Pwaq61JUBZCCHG5k+E6cUF5tnqzWq0sX76cmTNnkpGRoX7lf80113DdddcRFBTk601ulIxGI8nJyRQWFpKZmcnq1aspLCxk1qxZmEwmHnjgAZo1a6ZeXmqUhRBCXO5kJFlcMJ61rA6HA7PZzP/93/+RkpKC3W4nPDyc4cOHM27cOGJiYny9yY1aUFAQ/fv3Z9KkSSQlJaHT6cjKymLWrFmcPHlSHUmW+mQhhBCNgYRkcUF4C8gFBQVMmzaNtWvXYrFYCAwMZNCgQYwZM4a2bdv6epMF1fXJw4YNY9KkSURFRaHRaEhLS+PTTz9l165dMpFPCCFEoyEhWVxwDoeDvLw8fv75Z9555x2sVitarZbOnTtzww030Lt3b/z8/Hy9mYLqMoqmTZsyadIkhg8fjp+fH06nk//+97/8+OOPpKenu9UmCyGEEJcrCcniglJGkQ8fPsy///1v7HY7Go2GyMhIbrjhBgYOHEh4eLivN1O40Ov1JCUlMXnyZNq1a4fBYMBms7Fw4UJ+++03zGazTOIT4pTU3eu4/9YbaJYQT1LLzjz4zDTKrPI/IcTlQEKyOO88yywKCws5cOAABw4cwOl0EhAQwHXXXcf111/vNhlMNBwmk4nevXvzl7/8hYSEBLRaLXv27GH16tUcPnxY7Xbh2hpOiAvK6eTE8SNs376d/SnHsDeAD2f5RzYxctRoPv9+LhmZJzmWupcZ33xKRnGVrzdNCHEeSHcLcV65BmS73U5FRQU//PADzz//PHa7Hb1eT58+fXjppZdo2rSprzdX1MFkMjFp0iT27dvHN998w8mTJ/nf//6HVqvl7bffJiQkBI1Go44kS8cLcSFZyjIY16cbW7PL8fNvwpoDB+ndPNin2/T9h//k8MliANr0vJJbRg+ioNRGhL/B17tLCHEeSEgW541nuzeHw8H8+fP56quvqKysRKvVEhERwbvvvktsbKyEqkuA0Wjk8ccf58iRI/z666+UlJSwatUq/vWvf/Hiiy+i0Wjc/o7yNxUXitNho6y0HABzZQkVFruPt8jBms1bT/3uz9R/fcJNA1v7eJuEEOeTlFuI88p1FNlut3P48GG1zCI0NJSbbrqJdu3ayYp6l5C4uDgmTZpEp06d0Ol0nDhxgm3btql/Y8/WcEI0DlZy8/JP/R5Gy6ZRvt4gIcR5JklFnBfeWr59+eWX/PTTT1RWVhIQEEDnzp154IEHMJlMMuJ4CdHpdAwdOpStW7eSkZFBeno6mzdv5rHHHuODDz5Aq9Wi1Z7+vO10OuXvKxoEp8PGifRjbNqwnpMFpTRp3o4+fZJpEh2G1stztKqijOPHjrBp7QZKrA6im7akZ49uNE+IQ6/VuF2uoqqYinKzck+UFBdRUODAPzAYfz8ptxDiciAhWZw3rgE5PT2d3377jUOHDqHRaGjSpAnjxo2jXbt2EqAuQREREQwfPpz9+/eTnZ1NYWEh69atIy8vj9jYWBwOhxqU5e8rGoLirKM8/9Sf+HLmAqpc5pf6h8Vwz59eZtqLDxNk1AHgsFXy+f+9xjsffsWhYyfdbkdnDGDMLQ/xwb+m0jTMH7AzdfI4Xv9upculshjeowUAD/x1Bh+/eqevH74Q4jyQcgtxXilBefHixezduxeLxUJQUBCdO3dm9OjRGAwywnIp0mg0dO/encGDB5OYmIjT6aSwsJBPPvkEi8UipRaiQakqymTiNSP56LsFVDn19B92LbfecjOdkuKoLMrhw78/wZ9e+xwlO2ftWc5Dz73JoWMn0Wi16HQ6dDodGo0Gu6WCeTP+yV9enY4TwOmkoqS81vsur+M8IcSlRUKyOC9cR5HLyspYvHgxqamp6HQ6WrZsyYgRI2jXrp2vN1P8AZGRkQwcOJDBgwcTFBREUVERH374IYWFhTVqk4XwHSczP3iDX3ccATT86e3/sm75Av773Uy2bN/MwI7xgI3vP3ibnenVnSn8gsJo3qoDU6b+k50p6VitNixV5cz/7C0CDVrAycq5c8mrcoBGz+NvfcisWd/QLtp06j7DeeuDr5g1axYv/HmCr3eAEOI8kZAs/jDPWuSNGzdy/PhxzGYzYWFhDBw4kLFjx8rX8JeBDh06MGrUKDp27IjT6cRisbBq1SoqKirc+iZLUBa+Yq8qZMb38wDQBrXl6YcnoLzymMISuOeGqwGoKjzO+m0HAYhsPYDDB3fz5ot/oUvLeDQa0Or9GD3pLjrGRFRfvjKH4gobAC069WTixBuIClQqFv256trxTJw4kY6JMb7eBUKI80RqksUf4hmQLRYLf//73zly5Ij6Ff3IkSNJSEjw9aaK8yAoKIjevXszduxYdu7cSWVlJY8++ig9evSgVatWaks4nU7n600VjVRZdiqHDp4AwKkp5q9/eQjXZ+OxfVtO/WblQKZSf1z9nM09nsJvGzazb+d2jucWgcNCZrGUTwjRWElIFn+IEo4dDgc2m43U1FQyMjIwm82EhoYyYMAABgwYIKPIl5G4uDgGDRpEz549Wb9+PWazma1btxIVFUVkZKRbyYX83cXFVllaTKHtVM/20iy+/OyzWi6pwXiqFaXdXMZ7f3+av//7awrLK339EIQQDYSEZHHOvK2u9+CDD5KfX9079Morr2To0KGEhYX5elPFeWQ0GuncuTOPPPIImzdvxmKxMGXKFJo1a8YVV1yhjia7toWTsCwuFo1Go5ZXaCJb8saUh72+0Rn8QrlhwhDAyfxP3uDpNz/G7oSQ6ObccuskWiVEo3FU8vGb/+BwUamvH5YQwgckJItz4llmoYwkHzhwAKvVSmRkJOPGjaNPnz5uYUlcHE6HnYy0oxRXmIlqkkhc5Plbvlej0RASEkLv3r0ZM2YM8+bNo7CwEIvFoj4XlKAs4VhcbAHhUcQEaEmrcKCxBXD3Y08SZ6rjNchWyff/m4vdCWDk/dlLuWPwqUnG1kIWffCBhGQhGilJL+KcuQbloqIivvjiCyorK3E4HPTt25d27doREBDg681slPKOrKdLly506dKFEbc8gc1xfifS6XQ6oqKiGDVqFDqdDqfTyZo1a0hLS5MV+MRF4CDz2FEOHz7sdsgvKScwMpGOHZKqL1VygA+/Xozn099us5CdnYuT6gVHCsorTp0TROc2zdXLFeVkkl1Z5esHK4TwEQnJ4pwpo8h2u52ioiK+//577HY7UF1qkZiYeN7uKz9lC9dfPYxevXpxzzPTqLBK+KqL3VJFcUX1amDFxUVciKwaEBBA3759iYmpns3/yy+/cPDgQWkFJy6CSm67qidt2rRxOzwx9b9o/YJ59IE70WkAp403/zyJG255gGn/+AfvvPMPpjz5OEOu6MMVwyaRX+VAY/AnuX3LU7dbxP+99T5Hjh1n29rFTLzhZvbmFvv6wQohfERCsjgrnstPK0G5srKSlJQUHA4HTZo0oUePHkRFRZ23+y1K28uCpSvZunUrv8xbQLnF/kcfCYu+nFbd93fktfy27+QfvL3GRaPRYDAYaNq0KT179sRoNHLw4EFycnLUnskyoizOK40Gjbbu8p3yslJAw6i7nuT5u8ei12mxVZUy73+fMuXZZ3nmmWeZ9s/3Wbd5J+VV1WVBaPQ8+OenaBLqDzj49j/P0jopkZ6DruH3w+XccuPVvn7kQggfkZpkcU48Sy12796tjiL369ePhIQEjEajrzezrkfAhqULWLduHQC/78xkcMcmvt6oS4pGo8HPz49Ro0axZs0azGYz6enpZGVl0bRpU7RaLU6nU+qSxXlhDIpnypuvs25Xaq2XGTZuCAA6v2D+/ukchl0/m8+/nc32zZtIzysGrYkevXrSrl0Xrr/lFiL8qp+brfqNZtXKRbz99r9Ysnw1ZRp/Rlx9HU898xwx9lTCI5phimtDdKBrMzk/HnrmBTruSCU4pi2J0f6+3kVCiPNMQrKoN2U00HMUOSUlhX//+984HA60Wi1XX301ERERvt5a7DY7DqcTnU6PVvtHg5rz1AipE9Cg1+vqFf6cTic2mw2tVodOV/sXNw67HbvDgUarRV9Hj2Glk0j130KDTq9D68MQajQaGTRoEGFhYRQXF7NkyRLatGnDhAkT3BYXUSZvSmAW50qjNXLXY89zV30vrzNw5dhbuHLsLfW5NG17DOGz74d4OS+RDz4e6uV0Hbc/8jy3+3rHCCEuGCm3EGfFMyDb7XaKi4vZv38/TqeTgIAABg8efFHbvpVmpTD5rtuYOPEuNh/IoSwvjeceupOW8dEYjYEk97+Kz+auUCfvHN20iAkTbuSHtfvV2/jmn88yYcIEJt76AEey3RcPyDq6kyfuu5XObVpgNBoJiYhj6LU38NW8lW4T4px2M++/8TwTJtzIv75YQmVRJo/cOoYgfxPthk7gRKkFp6Wct156ggkTbuLzH9bisFbw5f+9Qq+OrTEajbRo3Z2n3/iIUrPNbRuqSvP5dvo7jBk5nBbxkRiNRgLDohk0ZATTPppJhfWPlp+cPWXRkKZNm9KtWzeCgoLYvXs3KSkpbstUCyGEEJciGUkWZ8217Zvdbsdms2G329HpdAwePJjo6Gj0+ov31MrPOMBnX38H6GjToQ8PfPlXdmTkq+fv3LSCBydtpOKbJfzppgEcWLOGH3+c63YbezavZM9mgGDueuJFWsUGAk62Lv2e0RMnk11SoV62qiyf3xb/xG9LFrLhxQ/44O/3o9eA01rF999/zfo9J8iqbMKmb6bw/aodABw5uJ9Si40Yivnm22/YdyyfUpqz4bOn+XzpRvW2M9L28O6Lj5B6opSZ/3kWgxbslhJuHT2YuWv2uW2ztaKI9WuWs37NCtbtOMLsj17EeJE/9mq1WvR6PQMHDmT79u2UlpZ6rUkWQgghLjUykizOiucCIps2beLrr7/G4XCg1+vp37+/D2uR7bz9+p/ZkZFPi47JXDVsKJEhJgAclnLee/c9yuxOWvfry8iRI2gZF65es22XfowcOZJrx42nXbPqUpHS7EPce89jZJdUEBTTntnLN1FeXs7R3Ru4smsiOC18/s4UFm45WmNLfl/yETNX7SAoOoHhw4fRvX1r/A3uZRTLf/oXXyzdSGhsIleNHEHrZjHqeb98PZ1dxwtPbXslaZknCYtpxq2TH+P96Z/y+eef8/zjdxOo1wJOFv73I7al5nOxaTQa9Ho93bt3JyQkBKfTyZYtW1i5ciV2u10tDZGgLIQQ4lIjI8mi3lxrkpWRwqysLPbtqx7h1Gq19OnTx6cT9uwODXc9/U/+89ojBPvp2bLwK668YTLlFgcZRzdzLL+KTgOuY8mSsbxyy1Bem7kGgPuef49nb+nldltLZn7JrhOFgJZHX36LCcN6A9Cicz9ef+UZhk58DEtVPj/MXsq43g+5XdfhsBPWrAsLf11Mv7ZNsFqtGIxG7KWul3HQbdBNzJn1MS3jwig5uZ8Bvfqz90QxltJ0Nh9Io2dSOIbAKP7zxRxadU4mLjJUXU0M5x3kpuzis8XbsJfnczgti36tIi/6PtdqtbRp04bw8HB0Oh0ZGRkcPnyYYcOGqc8TZRIfSF2yEEKIS4OEZFEvrgFZ+el0OikpKSE7OxudTkdsbCxdunTBYDD4bDvju1/DR2/+GX99dRDrNWw0HSLD2XIynypzOQVlVRBTj1noTgsrli6v/l2nJyRQy6ZNm9SzS2wa/Iw6LGY7Ow7uwg64Rz8dr/7rM/q3iwfw+sHBGBDDh19+RKu4MABCm3Rg3MBk9s5aCdjILDjVn1WjY+CQKwGwmivJyzpJenYeACZ94Klbc2C1X/y6ZKgOvYGBgSQlJbF7926ysrI4duyYlFwIIYS4pElIFmfFc9Ke1WqlqqoKPz8/OnbsSEREhE+XofYLClYDMlTPcA902Z76RjWntZLDx3Kqj9gtvHjvOF6s5bKVVWYcgHsxhT/9+nSu8z40WgMhwe4rEgYZ/Vz29enTi7PT+Ne7bzNz1s/kFBZTUNJwlsnVaDTqaHJkZCRHjhyhuLjYbfKecpBRZCGEEJcKCcnirCkh2Ww2U1VVvWSrn58f3bt392lAPr8P0oH5VPsyNBriE1sS7Oft30XH2JHD0FP/AH62KvKPMenaq1i87QgAIWHRtGkbj1YDBVmZ5BaX+XpvodVqadWqFeHh1XXeFRUV5OfnExcXJ6PJQojzwm63c+LECSoqqidS+/n50axZM3R1tM0U50dhYSEHDx5k586dZGVlAaDX62nRogV9+vShZcuWl8/7vwsJyaLeXEeRnU4nJ0+eJD09HafTidFopEOHDpfNP4lGZyDS/1SJhM6P//vfKm7uk1D3/rlA27J69pcsORWQO4+4hR8//T9aNo9Dp4F/3Hsdz34537f76lQruISEBMLCwtBoNOTn55OSkkJMTEyNFRqV6wghxNn43//+x4MPPkhZWfXAgL+/P9OnT+fOO+/09aZdlpxOJ9u3b2f69OksWrSIzMxMr4MdAQEBXHPNNUydOpX27dv7erPPq8sj0YgLyttS1E6nkx07drBp0yY1JCcmJl5S4UfjEujLiovdz9T7075Lm+rfbRZWL/vNR1vpZNehlFMBXMett06mTWJ1QAYnVrvtD936H6X8vbVaLaGhoYSHh2MymcjLy+PgwYOyLLUQ4rzJyclRAzJAZWUlOTk5vt6sy9Z7773HoEGD+PTTT8nIyKj1dbyiooI5c+YwZMgQ1q5d6+vNPq8kJIuz4lqPnJ6ezrFjx9DpdAQFBREREXEJhWQN4c1Ojwx/8/7f+df7H/Haq6+TcqIE0HHDDTdg0mgAB19Me5p/fPIDGVm5FBUVkXnsKAtm/5dHH3iIzYdzL+h2xocrreoc7Nm9B5vDidNhZ/Ov/+P9X3wV3j228tQS1ZGRkQQFBZGZmcnvv//utS5ZCCFEw/d///d/amlLfeTk5HDfffdRUFDg600/byQki3pzLbdwOByUlJRQUlKCn58f8fHxBAQE/PE7uWg0DBkwGCXTp+39jScef4RX/jaNg5nV/Yl7XH0LD94wCA1gLjnJsw/eRLMmMYSHh5OQ1IoxE2/nw0//S0Z2yQXd0uHXTyAmwAA4+f6fzzLoyqu4athgrhx9G05TGAadb/+NlYl7JpOJqKgogoODqaysJD8/X/1AJUFZCCEuLcqcI4Ver8doNGIwGGodEDt06BA///yzrzf9vJGQLOrFMyC7Hvz9/UlISLigrd/8I+KICq9ud9a8VRv89KefuqagMGJDAgENzZo2c7+izkhiqyYAhEXGEWY6XYbfY/Rd/O3BG9z+2f1Dm5AQE3rqqsH8Y8Zc3nzuYaJCvH0AMNL7yqvp3LJ6ERCNVkdiXCwAIZFNCTXVnEyi0RlpHhsNQHh0AkF+7v+CCa0S0Ws1GPxDaBoZAkB8pyF8/9V/SIwKxums4vffVrBi9Ub6jXmAhfO+IT7EiNboR3CgSb0dv4BQmoQFVd9mfDMu5AC/sv/0ej0REREEBweri814BmSFhGVxaXBwaMfvzJw5kxVrd2Bz+Hp7hLh4lNal7dq14//+7//YuXMnmZmZpKam8u9//xt/f+/tVFesWOHrTT9vZOKeqDfXkUDX4GMymYiPj7+gITm+5yhO5Hvv4hDXfhBZtXV40AUxY/VOZng7T+vHyx/N4ZGXj7By3Vb0/uEMHHQFUaGB6kUMgRE899aHPP33d0jZs4edKakAJLbpSoe2SYQGnX6R0BiD+O7XbXxXx+PQBUSxaOP+Ws+/89Xp3PnqdM8NZdjEhzg89g62bFrPsZMFdO47hI5JcWiAtILKGrcT3qIPJwovTps4JezqdDoiIyMJCQlRT6/tcOmU5YjGzFmRw6QxY9iemY8+vAkb96SQHB/4x29Y+FxVVRWHDh0iPT0dq9UKgMlkokWLFrRp08Ynk9AzMzPZu3evWuIQHBxMhw4diI+PP+fb3LVrF0ePHsVkMtGnTx8iIiLqfd0HHniA6Oho7rnnnhq9/h9//HGqqqp49tlna1zv2LFjF33fXSgSkkWdvAUc10UiAAwGA+Hh4ej1+ksy/ETGt+LGia3qvIzOGED75D60T+7js+3UmwLpN3gE/Xy2BbVzOp3o9XoiIyPVkWRlNFkWFRGXLKcT2wXrWyMutsrKSn788Ue+++471qxZQ0VFBXaXRZiUTj1NmzblkUce4bHHHnMrI/znP//JX//6VywWCwBNmjRh0aJFtXZ0yM/P54orrlBDY3BwMF988QVjx45VL+N0OlmwYAFvvfUWmzZtwmazuXUBMhgM9O/fnxdffJERI0Z4vZ/XX3+dt956C6vVSnBwMN9//z19+vTh8ccf57///S92ux2NRkOPHj1Yv349fn5+1MdLL71U5/mjRo1iypQpOBzuX7HU9/YvBVJuIerNW1CG6pAcERGBXi+fuRoj5QVdq9USEBCgvkCazWaysrIkIAshfM5ms3HHHXdwxx13sHDhQkpLS90CMlS/ltlsNo4dO8aUKVOYMGGCWzeNzMxMSktLMZvNmM1m0tLSmDFjRq33+dNPP5GSkqJePi8vj8zMTPV8s9nMY489xvjx41m3bh1Wq7VGWZrFYmH16tWMGTOGv/3tb15fRw8ePEhZWZl6H3v37uX222/n66+/Vh+j0+kkPT39rCbinas/MvLd0EhIFmdFCTtms1n9ikqv1xMYGCghuRFTSihMJpMakisqKkhLS5OALITwuezsbJYvX17v1yKn08mSJUv44IMP1NNuvvnmGmUYc+fOpby83OttzJs3z+3+/P39GTp0KAAOh4Pnn3+eDz/8sEZY98ZisfDaa6/x9ddfn/Gy7733ntfJc1artcao7x+xZ88er7eXnJx83u7D1yQki7PmdDopKiqirKwMp9OJVqutc7aruHy5vgFotVoCAwMxmaonENpsNoqKiiQgi/PCbqni8P69bN++m6Kyyjov63TaKcg5wfbt29m+fTuZOQXYHWd+HlYwISZuAABxCUlEQVSUFLBz+3b2HTyK2Xbm4ALgdNjIOZHO9u3b2bXnAMXllfW6nri4DAaDWlfr7+/PiBEj+OCDD1i3bh2bN2/m119/5frrr3d7H3M6nXz99ddqEOzVqxcdO3Z0u93Dhw+zZcuWGvdXUlLCsmXL3E7r0aMHrVu3BuC3335zC+AALVq0YPr06WzcuJE1a9YwZcoUt8lxdrudV155pUbXCU9HjhzxenpYWFiN2uJz5XQ6mT17do3Tg4KCuPrqq8/LfTQEMvQnzshbXXJhYaH6NZTRaCQyMvKyWW1P/DEajQaNRoPT6VS/PpTJe+JsHNq8iEn3Po1VG8uXs/+HJv03nn7uVTbvPki52Ul8UlseffFvPHPPBAw6l+eR087m5fP427T/Y+vO/WTlVvdrjYiOo3XnZJ598VXGD+uFzuO5ZzeX8cU7r/Lm9P+SlpGFwRREl35X8o+/P1NHRbKTXWsX8NJLU/l99wFyC4rR6f2Ia5bImEn38/fnHiMm1B/RMERERHDNNddgt9t58skn6dq1a43lrK+44gr69+/Prl271NOOHz9OTk4OcXFxaDQabr/9dqZMmaKeb7fbmTVrFkOGDHG7raVLl1JZ6f6B6eabb0av12O323nnnXfU2mZl+5YuXaqGaIABAwYQExPDk08+qZ6Wnp7O0qVLGTdu3Bkfc1hYGE899RRDhw5l165d2O12AgPPz8TT7du388svv9Q4/brrrqNVq1bncIsNk6QacVaUgFNUVOT2FZMEnsbLs/+xn58fJpMJi8VCfn6+12WphajLyeNH2L5nH3t2beHLd17j6rG3smrLHsrNVsDGibR9vPTIXXz+s8sIntPB3E/e5Koxk1iwbJ0akAEKcrPYtHIht4wexb+/W+kWfJ0OK/+e8iAPv/QuqRlZOAFLVRlbV/3MuLHXkZJd7GULnayZM51BIyfw8+qNlFk0tGjRkrioYE6mHeLjN6cw/ranKKqq32i0uPD0ej1fffUV33zzDT169KgRkKF6eeXhw4e7nWa1Wt1W9bv55ptrtD5bsGABxR6rts6bN6/GbY8ZMwaoLv347Tf3haDuuusut4AM1e+rt9xyC5GRkeppTqeT9evXn/HxhoaG8uOPP/LSSy8xcOBAHnnkER5//PHzMphVWVnJn/70J7XkUhEdHc0rr7xyWQ2YXT6PRFwQtY0CKuUWUP01VkhIiNcXHXH58wy/Wq1WfZGs7fkjRP2U8uEn71Hi9GfspLv50yOTaX6qj7nTWsGn07/EfKqMInPfWv78/BuUmK1ojYFMfuZN1qz/nd9WLuXe8UPRaMBqLuTVKU9xMOt0e8QTe35j6kf/ozrOauk2YBRPPPFnrh7cG3Npkdeyi8r8Izzw8LOUVFoIb57M4rVbOHzkCAf37eaxicMAB78v+pxPfzpzmBENi+ekM4fD4Tbim5SUVGPU+Pjx427LMVdWVrJo0SK3y/Tp04fExEQAduzYQWmpe4vOtm3bcuLEiRqH8vJywsLC3C6bmppa52PQarW88cYbXHnlled9/zgcDt544w3WrVtX4z5fe+012rZte97v05ek3ELUm7c+yVD9z3G+6pzEpcXbAiGeIbiuYCxlF+JMTEGx/HvGD0w+tfrlvaP70GP0ZJzAwcNbKamyER2gZ+anH5JeWP319rBbn+GjaVNQKjH69+nOoR5dWHsom9KMXcxdvp3nbxsMOFk4538UmquDcKt+N7Dy1+8J99fjsFby8d+e4Im3PsFsd3/+/jrraw7kVg8S3POXlxjcrfrr5cDwOJ589ilmzFlBsd3CL/MW8cykQb7ehcKDsmLsoUOHOHz4MEVFRep59RmlvfPOO1myZIn6uuZ0Ovn+++8ZPXo0AOvWrSM/P9/tOjfccIM6kJSSklLjNh9++GEefvjhem2/8g1dba+d/v7+XHPNNed9vzmdTr777jumTZtW47zbb7+de++997zfp69JSBZnxVvgsdlslJWVqcFZQk/jUlu9sV6vJzQ01Gt4lueJqK9e197B/acCMkDXISOIBzKB8vIKKmwOcFSwYc2m6gto/Jk08XpcS5X1AdHcNHoIaw/NAhzs3LwDbhsMTivbNiglGxpuvXcy4f7Vb4tagz8PPvcyH3w5i70nCk/fmNPOuhWr1KM5mfv54ovT51cVZWDw00OFjX1H92MFLtwyS+JsOBwOVqxYwXvvvcfKlStrjObW17XXXktMTAzZ2dnqab/++it5eXlERUUxd+5ct8ubTCa33sieAfps+aqT1MKFC3nwwQdrlFkMHDiQ999//4IuKOYrEpJFvXm2slFafdntdrUpu4SfxsXbQjNWqxWLxaJO6HQNyFJqIc6W3mBwqwvUaHU1QqejooRDuSXVR0wmmrZoUuN2Ylo0V3/PKMzECWhsFjLyTtWSanW07tTB7ToarRat5+uZw8axk3nq0W/ffZFva9l2m82OrGTdMCidIaZNm1avlmt1CQ0N5frrr2f69NOro+bm5rJ8+XKuu+46Fi5c6Hb5gQMH0qxZM/W42WyucZtarbZe750Gg4Gbbrrpor/Prlu3jttvv71Gn+VOnToxe/ZsgoODL+r2XCwSkkW9eIYbPz8/9VOjzWajvLz8vPZfFJcWJSDb7XYsFgtWq1VdZMbbZZWf8oFKnA8Ohx3rGT6AuU4m8tP7qSPTDtfr1fNDnM15+rWux+BRtIwO8nIpDf1H38nls/bYpW3WrFm89dZbbu9TgYGBXH/99XTv3p3g4GDCw8P5+eef+eabb854e7fddhuff/65OqqqlCK0aNGCtLQ0t8vedNNNbnN2vL0uzp07t14dK3xh165d3HDDDW5lKQCtW7dm4cKFxMbG+noTLxgJyeKsKKOCBoNB/crHZrOpqxdJe6/GS5ngorR90+l0BAWdDg8yiiwuFL0piIQAEwcAqqrIPpYDHaLdLpN56Kj6e1Jci+pfdAbiI0Kqf3fYST90FAacHnF2OhzuIRpAqyM+9PTz+q6n/sGfx3Xx9S4QZzB9+nS3gBwSEsKyZcvo3bu32+WUJaTPpF+/frRv357du3erp61cuZLQ0FC3y/n7+6tdLRSuo8pne78X29GjRxk/frxbhw+o7um8fPlymjdvfo63fGmQ7hbinISEhKj9Fh0OB1VVVWpIFt7t/m0Bjz70AI889gxbj5z09eb8YZ61xna7nfLycvWrRGWRGSEuOGMwyb1PlUo4K5k9b75bmYO5OIPvF6yuPqIJ4MoRfat/1xpp1639qUs5+farzyg5NYnPbi5l2gvPcuhkkft9afR07nN6RbGf58zCWo+FSoTvlJeX1xjd7d+/f42ADNSot62NXq/ntttuczuttLSU//73v26nDR48uMZIa+fOnWt0g5o7d26D+zb25MmTjB8/vkY3jYSEBH7++efLPiCDjCSLcxQWFkZgYCAajQar1UpxcbGMJJ/BT99+yoefVvfOjOwznJ6tmvzBW/Qtb/XIZrMZi8WCRqPBaDQSHh7u680UjYKOm2+/jff+t5JKm4PFX77Jo2Fabrr6CmyVBXz89mtsPpILQNt+YxjdT2lTpWHsdeN56d//w+yAA799z5BheQzo0ZI9G39jzdZ9NUeSgfGTbufFt74gz2xnxX/f4VZ9FXffdDWhgX5UlhWze9smFi9ezv2vfc5NQ9r5eudc1lJTU93ar3lq3bq113rZ9PR0zGazOrcG4Pfff3erMz6TG264galTp6rtUIEaQXfChAk1+ga3bt2a9u3bs3fvXvW01atX8/nnn3P//fd7ff90Op2kpqZSXl5Oly4X/psLi8XCXXfd5TZSDtC0aVMWLFhAp06dLvg2NAQSksU5cR1JNpvN5Obm1lhdTYKyu8txrMn172232ykpKVEndsjfX1xMPa6axMsPLeavH/2A1VzK9LemMP0t98vEJvXgk4//Qbjp9FtfmyvG8shNw/nnzOWAgx3rl7DjVBewsPg2XJscz3e/rHa7nZh2A/jn3x7j/hf+jdlexewv3mH2F+94bJEfN5XIEtUX2ocffsiHH35Y6/n33Xcfn376KU2bNuX48ePq6fv37+fGG2/knnvuwc/Pj5UrV/Lpp59SUlJS7/tu3bo1AwYMYMmSJV7PDwgI8FpnHBAQwP33388TTzyhnuZwOHjkkUdYvHgx1113HVFRUQAUFhayc+dO1qxZw549e4iIiLgopRkbNmxgxYoVNU4vKChQW93V5cUXX+Shhx664Nt5oUlIFmdFCT7K0sMajYaqqioyMjKwWCxSbtGIeI4kW61WsrOzKSws/OM3Lhq1+BbtGdgzmSKzg0F9e7ifaQxh5NhhrE/NIbbzEIKN1V9ba/T+PPvul7Tu0Z9/ffglBw4fpaC4DPR+JDZL4IprJvLSM3+hY5L7V99aQyBvfPwdIXHP8fl388nIKSAoNJLeA6/hrbdfx35gIQeO5+PXtB3RgafKhzQ6bnvmH8S27sY//v0Rew8e5WROPv/f3n2HR1GubQC/Z3uy6Y0QQhMIhN6RXgIiAmooomLDeo5iQTo2OB6P9bOjBARRUEEgoYp0qSIl9BIgCZBAEtL7ltmZ748ww+xkdpNAwm6S53ddeyXZOtsy9777vM/LA/D2D0ZIUDB6DhiOwT1bVuLekpp048YNceW6gwcP2k0c3rhxo+LSygzDVGpfxjAMnnrqKWzdulXx/FFRUQgJCVG87AsvvID169dj165d4nEsyyI2NhaxsbEOb/NuLdqVkJCg2AWktLQUqampFV5+27ZtFJJJ/SEEYunvwsFsNiMjIwNmsxkcx4HjOLvzk7pHXmYhTNrLyspCYWEhGIYRWxopHQhxplXXodh75KjyiWofxKzfoXySzhPjn30DY558GRkZacjNLwLUOjRpHA5vo4fD2zP4hGDuF0vwxux0XMssC8mNwhpAo2KAtv/C0THld/aMSothYydhaPSTyM3KQnrmzZDsF4TgoAB46Kkev7rdznLHwmWee+457Nixo9xy0fLzjhkzBgkJCeXKDBwZPnw4GjZsiOvXr5c7bdy4cQ7/3xmNRqxYsQKTJk3C5s2bKz3A1LDh3SnTk5ah3I7K1na7OwrJpMqkYRkATCYT0tPT7UaS3abkgueRk52J4lIL/INC4OWhQ3FeJv7+axeOXbyKe1p3xaDBvRHorbwD5WxWXE2+hKNHDiEp5QbUnr7o26snWke2gZ/R4OA2OaQkXcCuXTuRUcCjX9QQdG9fcV0iZ7PiyqULOHjwAFJv5CGkUQvc27sXWjYNg1rlXsFSqR7ZZDIhNzcXRUVF0Gq1Ys26y18DpN5Ra3UIC2+KsPCqXIqBX0hD+IVULYQwKg0CQkIREBLq6rtd5913333o3LlzlRbj6NevH4CyEodff/0V3333HWJiYpCamgqTyQSgLBC2aNECU6ZMwZNPPom5c+ciPz8fPM+jYcOGit0oBIGBgZg8eTLmzJljd7x8ARElISEhiIuLQ1xcHBYuXIiTJ08iPz/fLmDq9Xp4eXkhPDwcgwYNUlzV7t5778XBgwdhMpnAMAy6dOnicAS7sgYNGoSuXbsiMzPzti4/ZMiQO7p9d0Ehmdy2xo0bo1mzZkhKSkJeXh6ysrLQvHlztyq5sBakYkDP7jiTnI1X3vsZz/bV4PnJb+LYhWvieZq074Nff/8dfSMb2V0268pZzJjyBlb8sQulZtbutKbte+K///sSj4/qDWl+5aylWPTfGZj52SLkl5R1eWB0BkRPfB0Nioodbmd+RiJmvv4qlq3ZUraC2E1aox/GPPUavv3kLQR5udfS3/J6ZLPZjJycHBQVFSEgIACtWrVyenkKz4SQqmjTpg2OHTt225f39PTEtGnT8Oqrr9otR+3r64uIiAjodGX/Yz/88EN8+OGHlb7etm3blivRGD58eKUmLut0OkyYMAETJkxAVlYWUlNTUVx8a1/h4+OD0NBQBAUFOfyf+fLLL+Pll1+u1se6adOmOHr06J1fUS1HIZlUiTAyqFKp0KJFC0RGRiIxMREmkwlXrlxBly5doNFobutrsZrA2yzIKyoBYMNfcd9j9aeHkVFiv9rR1dMH8OK/Z+OfbUvhpS3b7sKMi4gecT/2nUsBAOj0HjDotWCtZpSUmnHl9CE8++jD0GzaiUcHCbN8eayL+S9e/c+3kH7RxFtMiP3xY4fbyBbfwOMjR+CPoxehUuvR//4RaBMegDOH9+Hvkxew8vv/opjVIS7mLWjcKFcKZRZCSC4uLkZubi5MJhMiIiLQpUsXhyPJFJAJIa6i1+urrTsDz/NYvHixXUAWaqCrKigoSJywR9yDeyQZUivI60obNGiAsLAwsQ1cUlISWJYt1z/XXZw5uQ85NgNenPEB1m/cgHcmPy6uunV272acuCJMOLPhuw/miAE5ot9DOHTyEvLz83H10ik8NbIvAMBacgPz3vkARday+2nOv46PPl0gBuQug8ZiVdx6/LL4W3Rt0cjhdq1d9Bn+OHoRAPD83IXY9UcsFi5ajL/278foe9sA4PDn8q+xL+H2vvaqbvJSCyEkZ2Zmiq0Ag4KCEBERUWEtMoVlQkhtlpCQYDf5Digb/R0xYoSrN41UAwrJxCl5wJH+DAoKQkhIiDh57+zZs+XawLkTtdYb7y9YiQUfz8HokaMw78tFGNruZv0hl4UTl9MBAObcq1geuw0AoNL54tMvvkGniDAAQGBYK3z19f8hxKPsa7kLR//CieSy+rirp/fj6NWcstvyb43Ytb9i3MOj8fizr2Dnnh0Y1K58XRvPluKn5b+X/eHRDFP+NQ7qm4+x1hiESePLWu2wpdnYe/C4qx/Csm2+WYMsrUdmWRZXrlxBfn4+gLKRGh8fH/EbBZq4Rwipi5YtW2bXJxkARo4cCR8fH1dvGqkGVG5BKk2pS4FarYZarYbFYsG5c+dgsVjsOlwIHQ7cgWdAY4x/aLA4esyoPdC5eTNsO1O2+l2JqWwM+EZSAlLTysKeT2hH9OtkP/vH756O6NEyEJtOpYErzUVSSgb6RgTh0qFjEBrm9IqKRjPfWzXEvmGtMbhPV/x1JsXuuiyFqTh14mbPS6YAb01+DnpJkfONK2dv/mbDBYXZ064g7WghHFiWRWJiInJycqBSqaBWq8Xn3p1eA4QQUl0KCgrw66+/2h2nUqkwYcIEV28aqSYUkkmlSeuRhd8DAwPRvHlzJCQk4PLly0hPT4ePjw/UarXdaHJtCknZuZkouDl3Tt0gEEatfNt18A72AZAGwIS0gjwAwPX8PPEcwU7KK6RsxYXIEuYEluQgduUKB+dk4KFz/cQ9pdZvNpsNVqtVHEk2Go3iKIrSa4ZGkwkhdUF8fHy55a4bN26MoUOHunrTSDWhkEwqpDSCrFKpoFKpxJB84cIF2Gw2HDhwAOHh4dBqtWAYBhzH3bXm59VFrVJDBYADwJdaYOMB2GU6HjZWGDPWIdDTCwBgUN/qi2ourORKWwxzq+bJrwk+mTdDnDxot00aIx4YM7py11mDhHBss9nsRpGvX7+OGzduwGKxIDIyEm3bthVfI/KATAghdUHPnj2xYsUK5OSUldn5+vpi0KBB8PT0dPWmkWpCIZlUiTwsN2/eHP369cO2bdtgs9lw7NgxjBo1Cj4+PuVqk2tLQApp0Aj+OhUyLRxM1xJxLc+CVv63RnG54gycP1dWv8zo/dCicVl/1CZNb/VXPRcfD5aH2I2Ct5mRmZlT7ra0vsEI91HjXIENsGjw4OOT0DrIPf/BKk3YExYROX/+vLica2RkJLp3704LiRBC6jRPT08qrajjaOIeccpR0BFGCI1GIwICAqBSqcBxHI4cOSI2Q5fWrNYmgc3aIKJ52dK1JXmJ+OHnDeCEOYg8h9gl83Euu6yPZbMOvdGpRTAAoEXXHvBRl4XAK4c34Pvfd4PleJQUZOL/Zk/G4g37y92WxjMEXbtFlv1RchlfxqyCxWY/4ZFjzbhyNfXWNriIvMxCGEW2WCw4deqUuNKet7c3/Pz8FEeSCSGEkNqCRpJJlUgDsnDw8/ND27ZtcfLkSVy5cgWJiYlo0KABtFotOI6DSqWqVaPJGmMI/v38ozg44wvYeBZfzH4OFw9vRadWYbh+8Th+XbMJLMdDpTXi1alvwl9f9lkzNKInht/bGqv2nwdsxZj69Eh8815jWIvzcPVaOlRaLVQcZxd2GZUOr03+N37/6xVYeQ6L5v0b5/dtQr+ebaFRATkZ1/H3gX24mu2Jo+f/QSOja0pXlAKyEJJLSkpw6tQpFBUVISIiAs2aNSv3GqH2b4QQQmobCsmk0uSjyEJni4YNG+L+++/HqVOnUFJSgoMHD6JNmzbw9vaGWq0WO124Y20yo7jcM4NHXn4HZ88n4dOlG2AtzUfcLwsRJzmH1sMPr777OV4e3188TqXzwQeff45DDz6GKxn5sJqLcTHhPADAv0ELzJn+LD6e/RayZEva93z4OXw2/TBmfbkMpZZS/PXnKvz1p/15fEK6u3xpaqEWWahHFlbZS0pKQnJyMsxmM3r27In27duLrw2hywVN3COEEFLbUEgmlSIPyNKgHBgYiJ49e4qBeM+ePRgxYgTCwsKg0WjKdTi4m7ReoZj31ts4fTkdYfd0Q6iXVnIqg8denwbLPXug9fTDqK7Nbl3O0x/vL1iBodHrsHzFWhw4+DfyiszwC2uGPn364cmnJ2FAt0ioZPenVc8R2LVzG7788kts+mMnrIYARA0fiddfn4LWoQz4/AKklfJ4sHfHWxdS6fHaRz+gz8jxWLp8FeIPHUZyRjbAaNGhSzdERLTD6PHjEOLhuuoo+SiyEJZLS0uxZ88eFBcXQ6VSoUmTJggNDbX7EEXlFoQQQmojCsmk0uQ1ydIQpNFo4OPjg5ycHJw8eRKXL19GREQEdDqdWG7hisVFGJ0Rz70+0+HpnYdEo/OQaMXTVBoDBo+cgMEjqzYxo3nbHvhq4S/4SuG06f/5yMGGqtF9wAPoPuCBu/4YVUSpzILjOFitVhQVFeGff/6B2WyG0WiEwWBQDMc0ikwIIaS2oYl7pELyYCMdRRZ++vv746WXXoJarQbLsti3bx+uXr1abmW22jaJj9iXWQgHlmVRXFyM5ORknDhxAlarFQ8//DAiIyPFkKzRaBTDMiGEEFIbUEgmlabU4UIIRN7e3hg8eLDYH3n//v1ISEhAcXFxuTpWd1uumjgm7YssPVgsFmRmZmL9+vWw2WxQqVTo1q0bGjVq5LAWmRBCCKlNqNyCVJnS5D2tVgudTidOzktJScGRI0cQGRmJyMhI8XIAauUCI/WRo4DMsiwKCwuRmJiIPXv2gOM4BAcHw9PTE1qt1m7SnlqtpnKLespVJVZ1Db1nCHEdCsmk0pT6JEsPBoMBzz//PJYsWYLCwkLs3bsXLVq0QIMGDRAcHCx2uRB+qlT0RYY7ky89Lfw0mUy4evUqtm3bhqysLKhUKjzxxBMIDw9XHEWm0eT6R3jdUEi+c66a9EwIoZBMqshZyYVOp0NUVBRWrFiBoqIiXL58GXv37kXLli0xYMAAMRQLAYrnefrH76aEUUD5KLLVakVmZiaOHj2Kv//+GyzLQqfToUuXLvDz86uw9Rup26SvGQrI1Uf4P0sIubsoJJNKEQKO0miydKJWSEgI+vfvjx07diA3NxcnTpzAgQMHEBkZiYYNy5ZtFkYkAdAIiRtSGkGWjiJfunQJe/bswfXr16HVajFixAgEBQWJ5Tby7hY0klw/CAFZWG2TVB/hgwd9+0bI3UUhmVSZfARZfoiOjsbp06eRl5eH9PR0HDp0CN26dUNgYCB0Oh1sNhuVW7gxR6PILMsiIyMD8fHxOH78OGw2G4xGIyZOnAhfX1/F1wI9x/UDz/PiEuU0glwzqEyNkLuPQjKpEoZhxDIJ6WiyRqMRD02bNkVERATS0tKQl5eHCxcuYPv27ejQoQPCwsLsRirddSW++koakFmWtQvJJSUlOHHiBP7++2/k5eVBq9WiefPmCAgIgE6nE59/YbKeUncLGk2ue3ieh8VigcVioRHku4DeQ4TcPRSSSZUpTd4Tyi20Wi00Gg2mTJkCs9mMbdu2ITMzE/v370fnzp0xYcIEMTgJJRc0OuI+nHW0uHr1Kvbt24cTJ06A53kEBwdjzpw58PDwsHvuHS0mQuoejuNQXFwMlmUpIN9FDMOAZVkatSekhlFIJpUmX1CE53m7r9alo8kajQZBQUHw9fVFbm4uUlJSsHLlSgwdOhTBwcF21yX8TkHZtYSAzLKsOIos/F5UVISdO3fi0KFDMJvNYm/skJAQMRxLR5KlE/eoJrnuunDhAjw8PGAwGFy9KfVOYWEhMjMzKSgTUoMoJJMqk351Lh1Fttls0Gq10Gq1YFkWjz76KMxmM+Li4lBaWoqEhASsXLkSzz//fLmQTCOOriUdQRaCsRCUzWYzjhw5gt27dyMlJQUqlQoRERF4/fXXodfrxedcHpTpQ0/d5uXlhfXr12P37t1UMuUCNpsN6enp9NgTUoMoJJMqUQq30qAsfO3Osiz8/PzQqVMnXLhwASdPnkReXh6WL1+O4cOHo1mzZuIIo1CfLIQqCsp3l3TZcHk3C2H56e3btyMxMRE2mw0tWrTAAw88gObNm4sBWVhExFEtMqkbhOdUr9dDo9EgNTUV169fp+fZRWw2m/h/l95vhFQ/CsnktjmrS2ZZFlqtFu3bt0dSUhLOnDkDlmWRmpqKuLg4PP300wgODhavRzgIQYvcHUJAlpZXCD+tVitKSkpw4MABxMfHIzc3FyqVCk2aNEGvXr3g4eFhF5KFnbVSmQU9p3WDwWBAZGSk+DqRLxpCX/3XPPk3eSqVCp6enmjRogVNkCWkmlFIJrdFOtlOrVaLI5FCSLZardBoNAgMDESfPn2QmZmJTZs2wWq1Yt26dWjRogUGDhyIoKAgxSBFX9XXPHk/ZGmZBcuyMJlMuHjxImJjY5GSkgKbzYZOnTphwIABCAoKcliLLF+KmtR+wnPp7e2N/v37o02bNigtLRU/TAmBmcJyzZL2HVer1dBqtdDpdPD09ESDBg1okIGQakYhmdwxR2UXwqF58+a47777kJSUhLNnz+LSpUtYuXIl/P390aNHD/j6+totMCK9TlJz5BP1pAez2Yz09HTExcXhn3/+QVFREZo2bYqBAweiW7du4ochaUcLYRSZnru6R3hODQYDWrZsibCwMJSUlMBiscBsNtuFZOFAqpdS202dTge9Xg+DwQBPT0+7+mR6DxJy5ygkk9umtDw1x3GKC0qEhISgWbNmSEhIgM1mw759+9C0aVP4+fmhXbt28PT0LHfdws6AVC9ni4UIE/Wys7Oxb98+rF+/HoWFhVCpVOjZsye6du0Ko9FoV3/uaBlqAe2say9HK2wKAU36WqKQXLOkzwPDMHZtFx2VORFC7gyFZHJHpP+4eZ63G+WQBqj4+Hhs374dKpVKHL38448/EBgYCC8vL7Ro0QJardbueqWLlpDqI+9kYbVaxRFki8WCvLw8HDlyBMuWLUN+fj4AIDQ0FO3atUNYWFi5OmStVltuBw3QtwG1nVI/dPn7WwjDNpvNLiBTSK5+SoMSOp0OOp1OfC9SUCakelFIJndMaaRJOrq4c+dOfPXVVzCbzQAgTuzLycnBmjVrYDAYMGHCBDRo0MAuYAkhmersqo90oRB5iYUwUe/s2bP45ZdfcPnyZXAcBz8/P0yaNAkdO3YUR62EHbPSAiLSoExqJ+H9J/wuHUGW1h4L70+qSa550n7y0ppkoS5Z2mVGPmmW3o+E3B4KyeSOyGdTS3eoDMNg69at+Pjjj2EymcTLCCUZHMchLS0N27ZtQ0BAAB588EH4+vqKO1ihvo7nebHFEbk90kl68oVChENpaSnOnz+PzZs349SpUzCbzdBoNHj55ZfRsWNHscxCOoIs74tMO+a6Q2nkUvi2R9qNRvjQJR1BppBc/eTf0Mjbbko/sNIHVUKqB4Vkcscc1Z9u3LgRc+fORWlpqd35bTabWGvMsiwuXryI7du3Izg4GAMHDoSHh4fiP3gapaw6aQ9kZyG5tLQUV69exd69e7Fv3z4UFxeLpTG7d+9G586dxdFieW9k+nq37pIHZY1GY3eaEJLlZRYUkqufPCRLv7WTfmiluQGEVB8KyeSOSb96FXaUsbGxmD17drmALOA4DiEhIbBarSgsLMSpU6fg7e0NjUaDXr16wdvb225HK28PR//wKyaEY+kkPaUlp00mE65du4YdO3Zg27ZtSEtLA1D2mAuTLLVaLWbNmlWuc4nSTpnCct0hD2TS44RwplSLTCG5+sm/tZOWXSi1YCSE3DkKyeSOSHeMQr3r0qVLMWfOHLsSC7mOHTtiyJAhOHjwIM6ePYusrCz8888/UKlU8PX1RWRkJIxGo91lhB01z/O0I6iAvAeyfKEQ4WdJSQmuXbuGXbt2YdeuXUhOTgbLsgBuTcYCgF27dkGtVmPevHl29cc0klx3SecFCBNzheNVKpX4+uA4DgBowt5dIg/JQlB2tIgPvScJuX0Uksltk44eCWHshx9+wIwZM8SgpaRLly6YOHGiOFLJsizOnj2LjIwMHDp0COHh4WJ/ZR8fH7vbk/6kFnHKpC255OUV8tX0rl+/joMHD2L37t1ISEiAyWSyC0BS27dvh0ajwaeffmpX90jLidc90oAskC4SI235KH2dUEiuefKyC6U2ffL3Ir03Cbk9FJLJbZEGViEgL1q0CNOnT7dbFESuR48emDRpEhiGAcuyaNWqFYCy2uQzZ84gPT0d27dvB8/z6NOnD9q3bw8/Pz+ny63SaEkZpQ8tzkaQExMTcfToUezatQvnzp1DcXExfHx8UFRU5DDs/Pnnn9BoNJg/f365dm/Sn6T2k7dh5DhODGHU7s31lMKy9G/574SQqqOQTKpMKYwtWrQIb775pvjVq5LevXvjX//6lxiQhT6rHTt2hE6nQ3FxMZKTk3H+/HmUlpYiNzcXNpsN7dq1Q0BAgLh4gbQ+Uvi7Pn+1KH0u5AFZHo6FgHzp0iXs2rULe/bsQUJCAkpLSxEYGIh7770XDMNgw4YNDp/LjRs3gmEY/PDDD3YLSgjhqT4+B3WVtBWctOSCao/dizwYS48nhNw+hqf/cqQK5AGZZVksWrQIU6ZMcRqQ+/Xrh9dffx1qtRpWq9XuOgRJSUmIiYlBZmYmOI5DcHAwevTogVGjRqFHjx4ICQmBXq8vVwsrP9SnHYP8w4pQaiEEY+nBYrGgsLAQycnJWLt2LXbu3In09HQAgI+PD4YMGYIePXpArVbjwIEDiI2NdfqcPvjgg1iyZAmMRqPdcwLQzrmuotIK90TBmJCaQSGZVJo0IAvBa+HChZg2bZrTGuSBAwdiypQp4qij0kQf4Tpv3LiBTz75BBkZGbBarfDw8ECbNm0wevRoDB8+HOHh4dDpdE5Dcn2YRCb/sCKEZGkNsjwgZ2Zm4vjx49iyZQv27duHnJwcqFQqNGvWDPfffz9atGhhtyDM3r17sXLlSqflM9HR0Vi0aBG8vLzsHn+AdtZ1Fe0y3Be95wipXhSSSaXIQ5nVasXChQsxY8YMWK1Wh5cbPHgwpk6dCr1erzjjWj4qzbIscnNz8ccff2DLli0oKiqCXq9HUFAQBgwYgCeffBJt27aFTqerMCTX1ZFlac9jeXmFPCRzHAeTyYT09HTs3r0bGzZswIULF1BYWAi1Wo3+/fsjKioKQUFBduFW6GCxc+dOLFu2zOmHoLFjxyImJgZeXl6KCxnUtcefEEJI/aCeO3fuXFdvBHFvSiPICxYswKxZs5wG5CFDhmDatGnw9PQUm93r9XrxoNPpFJdT1el0aNasGfz8/HDt2jUUFBSgtLQUGRkZuH79OgwGA4KDg+0WNnDUp1X+GbC2Bjb54y8dPZaGYunvVqsVRUVFOH/+PDZt2oQ//vgDFy5cQHFxMTw8PPDQQw9h2LBhCAkJgcFgEJe3lT4nbdq0gbe3N06cOOGw9OLcuXNISkrCsGHDoNVqFesja+vjTgghpP6ikEyckgdklmWxYMECzJ49GxaLxeHlhgwZgunTp8NoNEKtVkOn09kFZCEkywOyQKvVIigoCEFBQcjKykJ2djbMZjMyMzNx5coVFBYWIiwsTKxRlocwee1kbQzL0m2Wjxo7CsbCKL/ZbEZBQQH27duH2NhY7Nu3DykpKTCZTAgKCsKzzz6Lbt26ISgoCB4eHmI4Fp4T4adarUabNm3g4+ODY8eOOQzKZ8+exeXLlzFs2DDodDrFx7o2POaEEEKIgEIyUSQdiZWWQ1QlIHt6eoqrQclDsjSQCV/tA/aBSq/Xw9/fH2FhYTCZTEhNTYXZbEZOTg5SUlJw9epVWK1W+Pn5ieUc8u2X3gel3wXuEOCUtllaWlFR3bHQ/7iwsBAnT57E+vXr8eeff+LkyZO4ceMGWJZFo0aNEB0djV69eomPm/x50ev1dh9e1Go1WrduDR8fH8THx1cYlIcOHWo3okwhmRBCSG1EIZk4JP+KPyYmpsKAPHjwYMycObPCgCyEMGE5VXmYEgKjXq9HaGgofH19odFowLIssrKykJubi8uXLyMtLQ3Z2dmw2Wzw8PCAh4eH3eXl90fpd6m7HeSUPowodatwtHqetLRC6Fyxfft2bN68GX/99ReSkpJQXFyM4OBgdOrUCQMHDkS/fv3g5eUlflDR6/UwGAwwGAzicyQ8NxqNRqzvbt26Nby9vZ0G5XPnziE5OdluRJmCMiGEkNqIQjIpR6nNW0xMDGbNmuU0IA8aNAizZs2Ch4cH1Gq1XQ2yPIBJA7I0JDua8BUSEoKmTZvCaDSCYRjk5ubCZDIhLS0NFy5cQHp6OkpKSsDzvBj85CPL8kBalQUR7jTcKY0SOxotlgdipRFj4XkR6o5TUlJw+PBhbN++HevWrcPx48dRWFgIjuNwzz33oE+fPoiKikL37t3tRoylAVl4joQPMRqNptzkRyEoHz161OFjdvbsWSQnJ+O+++6zK72goEwIIaQ2oZBM7CgF5IULF2LmzJlOJ+kNHDgQc+bMsQvI0pFK6U9pQJa3bBMOSgHMy8sL99xzD9q0aYO0tDRYLBaYzWaYTCZcvXoVZ86cQWpqKgCIAVAI4BUFZKWR3KqsJibt1uHsdqSjwhWFY2l5hfw4q9Uqfkg4c+YMtm3bhtWrV2PPnj3Iz88HAPj6+orlFYMHD0ZoaKg4sq8UkoUPMDqdThxFlo/yAxBLL44cOeI0KCclJeG+++6DVqst9zhRUCaEEOLuKCQTkdJKegsXLsT06dMr7IM8Z84c6PV6sYuFvJOFwWCwq0FW6mnsbMlp6WkeHh7o2rUrNBoNMjMzUVJSIrY6u3LlCo4cOYKUlBRxUppQMlDZx8DRT6UOGkp/SwO2tFWbo7+d1R/LQ7LQ8zg/Px9Xr17Fhg0bsHz5cuzatQtZWVkAylZG8/b2xtChQ/H000+jdevWdiP40kl68hpx4TmUjvBLR5KFFfWEEeXDhw87DcqJiYkYMWIEBWVCCCG1DvVJJgCUA/IPP/yA119/3emqawMGDMBbb70ljtoK4VgpgAkjyPLRYmkoFDozCKPEFovF7mC1Wu2WWeY4Dt988w2OHTsGk8lk1yPZ29sbo0ePRnR0NJo3by7W4SqNXDsr93AW6pSOc9aKTqm8Q6n0Qh6ghXBsMpmQl5eHXbt2IS4uDomJiTCZTOL5hcVXnn76aYSGhor3V/jwIu1e4Wh0X7odQkmHcNvC8yA89rGxsfj222+dvkaio6Px448/wmAwUB9lQgghtQaFZOJwJb0pU6Y4LTcYNGgQZs+eLQZk+eiks0l68tuXtjZjWdYuJEuDsxDYhEBts9lQWloKlmWxfft2bNiwAcXFxeKCGDqdDiEhIRgwYAAGDhyI1q1bi10d5KHdWUiuKMjJyy2kj2tlfxeCqfA7y7Iwm80wm80oLi5GZmYmTp06hdjYWCQnJ6OkpEQc4ff19UX37t0xcuRIBAUF2T0n8vIX+WiycJyw0p50W+QhWfrBRQjKcXFx+Pbbb52uzPfQQw/hp59+sgvK8seYEEIIcScUkus5pYAcExODadOmOQ09QhcLeQ1yRW3ehFFe+TbISwyEYGa1WsXfpQchvElHlvPz85Gfn4/Lly/jl19+QVFREQBAp9PBy8sLAQEBaNKkCTp06IDu3bvjnnvugZeXl7iQhrxXc1W6MjgKyfLHWH68fIlu4QOCyWRCaWkpbty4gXPnzuH48eNISEhAeno68vLyYLFYwHEcQkJC0Lt3b7Hnsa+vr11NsfC7UkiW/pRO0pNum/S5EAK7cJA+9nFxcZg/f36FS1gvXrwYRqNRcTSfEEIIcScUkuspZ32QZ8yY4bQGWd4HWRgplnawEAKZENakNciA44lu8hFl6UimNDALo6zC8dK2aEVFRUhKSkJpaSmOHj2K+Ph4mM1mqNVqeHp6wt/fH6GhoWjWrBkiIiLQtm1bNGvWTFwZUFqTeztLW1cUlKWBWFo6ItzXa9eu4cKFC0hKSkJSUhJSU1ORnZ2NgoICmM1mAEDDhg3Rt29f3HPPPWjUqBECAgLEx1v6nEhH8uVlL87a8AnbLO2/LA/J8vKXuLg4fPfdd05fO2PGjMGiRYvEoKz0miCEEELcAYXkekg+qikNyBV1sVAKyEL4krYQUwrIjkYN5SvLKS2aIQRkRyFZCGvC+YXLJicn4/Lly0hISEBSUhJyc3PBsiy0Wi28vb0REhKChg0bIiwsDA0bNkTDhg0RHh6OBg0aiKsFCqPMwrYr1dRW1G5OqaWbMDqbm5uL69ev49q1a7h27RquX7+O9PR0ZGdnIycnB6WlpeB5HkajEb1790ZoaCj8/PwQEREBf39/u1AvhF75SLH8d+Ey0i4jFZXBSEfylUaUWZbF2rVrKwzKY8eOxYIFC+Dt7V3utikoE0IIcRcUkusZ+Vf/0oBcUR9kRwFZCMZCB4uqBGTpdgkcdXkQwphQkyyf0CeEZ6FWVtodIjExEUlJScjJyRFX7MvMzITVaoVKpYKHhwcCAwMRGhqKhg0bIjg4GF5eXvDz84Onpye8vLxgNBrF0gxPT0+xF7Nw/+T1vDzPw2q1oqSkRKybzs3NRUlJCYqLi1FYWIjCwkLk5uYiPT0daWlpSE9PR1FRkViGIfSH9vPzg7e3txiShfph4fEVAq9SBwvp8dLSCqUuI0qvE+mHFWmNuLOg/P333zv9sDVu3Dh8//338PHxKXf7FJQJIYS4AwrJ9YjSV/5Wq7VSS01HRUVh2rRpigFZaTlj+Vf4lak7dVSaIA270tFiaTh2NKIsHVkWAt+NGzdw4sQJJCUloaSkBOnp6SgoKBDvv7CdOp0OwcHBMBqN8PPzg6+vrxiOvb294eXlJYZktVoNg8EAjuPEsM7zvNiurbi4GCzL4saNGygqKkJBQQHy8vJQVFQknhcoa2/n5+cHf39/aLVatGrVCl27dkWjRo3Ex1AYNZYe5EFYOpIsPV1e+iL9ACO970r9o6WlF/LRZGmteGVHlMePH4/58+fDx8enXL06BWVCCCGupnH1BhDXEGpiK7vUdEUBWf5VfmVGj+WctVhjGAYcx4lBiuM4sbWZcJCWZEjLNISRZSHohYaGokGDBmJw3rFjB86ePYucnBywLAuGYcQR4Bs3boijzY6oVCrodDr4+PjAZDKJXSectX9Tq9XQ6/Xw8fERHy+NRoNGjRqhU6dO6NChg10Ilx6kddPSn0ohWdq1Qj6yLw/Ijp4L6XmcrVAoPf7hhx8GAKdBefXq1QAgBmXhMVapVGI/ZkIIIcRVaCS5nlD6+rwyJRZCFwuDwWDXJcFZmzdnI5RV2V7p7/L+wfJ6ZWm3C+lEPunIsrwuWBqc5UE2LS0Nf//9N5KTk3H16tVygV/amULaz1jo/yw9r9B7WHqfGjRogMjISERGRiI8PBxeXl52j5m0jEJeGiHUHDsLyUIdtby0oqLe0I6eB+movvRxdzaiXNnJfNIRZZrMRwghxF1QSK4HlAJyTEwMZsyYUamlpg0GgzhaWtk+yNXR2stRCzV5vbI8LAvhWN4lQwjHjuqWlcoLpL2LpdsjTLgTDnl5ecjPz4fRaIRGo4GnpycMBgO8vLzQtGlT+Pj4ALgV+qRhUP6YSY+Xl1RIJ+dJj1c6yGuOlT64VOb5kU/yrErpRWXbw40bNw4LFiyAl5cXTeYjhBDiFqjcoo6T1/ZyHFelpaaFxR8qM4JcU3WlSmFJHsSFYKXRaMCyLNRqtd2osVJIlh/vaKlopcfSYDDA29sbjRo1sgvtStul1GJN+lNe/iAdPZaHY2koVgrQ8r+VOnJU9XmRro6o1Ofa0etOMGbMGDAMg2+++cbhynyrV68Gz/NYuHAhjEYjAIjbTKUXhBBCXIFCch3lqAXZokWL8Oabb1Z5qWlHC4XIW4hVZ0B2dB1Czar8OJVKBZvNJoZEeUCW92CWh2Slkg6lBT+UlpV2tu1KI+vykWPpBEBHI8jS2mVprbH04GjkWL5Iyp0+F9JFRyrCsiyio6PB87zTJazXrFkDjuOwZMkSeHp6irdLNcqEEEJcgUJyHeSoD/KiRYswZcoUpwF54MCBlQrIwuixvAYZqJmvx6W1wNKSBeE0nufLhU0h+Ep/F0K0cJww8lxRSFbqDCJ9vB1tr/C7o4M80ErDrrykQnq8Uhs36Uh+RUtt38lzIBC2pTLXybIsxowZAwBOSy/i4uLAcZy4hDVN5iOEEOIqVJNcx8hDnDBKunDhwgpHkAcPHoxZs2bZlVhotVro9Xq7hUKcTdK7WyFGWp+sVK8slJfIW8hJ28jJa5rl/ZnlZRfysKy0PQJHIVlpcp6zkCztTCEvw5Bf9nYm5t3pa6wy7eGEPsrCedauXYtvv/3WaY3yQw89hKVLl8LDw8PuA4D8sSWEEEJqCoXkOkZeVytM0quoBlnoYuHh4VFuJT0hJNdEF4s7uZ+OfsofA0eLkyiFZKXzO6pTlt+2nKPSCmdBWTo6LH2MKyqncFTSUVPPSUVBWZjAJ5/QJw3KVVnCmvooE0IIuduo3KIOUQotwlLTzsKIsJKeo4AsXSTEWR/kuxlc5LclneAlPAbSfsryEWFpSNZoNIqjxs5CckXlFtLtchSSAZQrk1AKy45auCmVuNyt58PRSLmz+84wjNhuMDo6GoDzPsqxsbFgGEZcwppKLwghhNxNFJLrCEdt3mbNmuW0zVtUVBSmTp1aLiBLV9ETQrLQm1f+9T7gupE9pVpl6fHS0CwNzmq12mlphaNgrBSShdt3tH1KIVk4zdHIslK9stJ1ye/v3XwuHNUoV/aywoIjzpawXrNmDRiGwXfffSe20RNui4IyIYSQmkQhuQ6QB+TKLjU9ZMgQTJs2TTEgKy01LXzl78oRZEccbYM0JAuUeiIrTcyTnyZ/rOXPgaPbd9TVQmlEWKkjhfw6pOev6P7f7cddqKt2RvpYViYor169GgzDiAuOSFvSufr+E0IIqbsoJNdy8j7IQolFZQLy9OnTHS41XZmFQgD3DCfSbZKvfiec7ij0yicBKp0u/Sm9HUfbUZlOF45Oq+i63E1l+ijLH6vKlF6sWrUKwK0lrKWPFyGEEFITKCTXYo6Wmq4oIA8ePBgzZsxwOIJsMBjE46Ttxqqj3+7dJt9GeWh2NPEPuLWctPx46fmrug3OJtVV5gOIuz7mjvooK1F67KoSlIXSC+nIOpVeEEIIqW4Ukmup261BHjhwYLk2b8IkPXl5hVBiIa2NBdw3qFWG0iiz0vHy3283ICvdrvTvyowM15bHu7JBWfr4SS8jBGVnfZRXrVoFnufFJaylt0VBmRBCSHWikFwLyRcKsdlsiImJwYwZM6p1qWl5R4W6pjL3ST7iLLjdkWRHx9WVx7cyQdnZqHx0dHSFS1ivWbOm3BLWwmuUgjIhhJDqQiG5llFq8xYTE4Np06Y5XZzBUUBWOiitpOfONcjVrTITw5TO4+z66hNHo+PSD3fO6q8ffvhhp0tY8zwvLmG9ePFieHp62k2ApKBMCCGkOlBIrkWUAvKiRYsqXEnPWUCWjyLX94AscHZfhbBXlcejPj128vvrqJ+zs8dFGFGeP3++w29H4uLiwPM8li5dCoPBIF4flV4QQgipDrTiXi2h1MXiTpealh7kfZDrc0Am1Uupft5ms4lLWFssFphMJrsV+oTX+I4dO/Dxxx87nYgqX8K6rtTPE0IIcS3VnV8FqWmOSiymTp3qNCAPGTIEM2fOhF6vFxd6kI8eK/VBpoBMqpO8J7TwOhNej9LFa4SDcJ6oqCjMmDEDOp3O4fWvW7cOzz77LIqLi8WVFKVLh9M4ACGEkNtB5RZuzlEf5JkzZzqtQZYuNa1SqRyWVwgT9dx5oRBS+0lfR5VdbETo0jJ06FDwPI/PPvsMZrNZ8TJxcXFgGAYxMTHw9va2u12qUyaEEHI7KCS7MXlAlvZBdtbmrSoLhQiLhAgBmb6mJjVFXqcsVVGLvaFDhwKA06AcGxsLlUpVro+ycNsUlAkhhFQFhWQ3JF/AoioLhdzOSnpUXkHuFmdLawsclUdUJiivXr0awK0FR+T1yRSUCSGEVBaFZDcjXwK5JgKy0kp6dbUXMnFv8hFlZ631eJ6/raAsvR16jRNCCKksCsluShqQv//++ztealoekKUT9JRadBFSU5wtOOKoR7X09Tl06FAwDINPP/20wqD8/fffw9vb2+7yNJpMCCGkMigkuxFpeQXP85UeQR40aBBmzpxZLiDLD0J5BfVBJq7mbDlurVZrd5r0NSq8R4QR5U8++cThe0MelIUwTn2UCSGEVAaFZDfhqM3bzJkznU7SGzhwIGbPni32QZa3eZMGZGkNMgVk4mpKC45IX4+OVuRjGAYsy2LYsGFgGAYfffSRw/fI6tWrwfM8YmJi4OXlZXd7FJQJIYQ4QyHZDSgF5IULF1Z6qWl5H2SlEWQKyMQdKZVeCC3ilHocS/9mWVYcUf7www8drsy3Zs0a8DyPRYsWwWg02t0WBWVCCCGOUEh2MaXVyKqy1LRer4darXbYB1kakKkPMnFXztrDOVPZoBwbGwuO4/Djjz/Cw8PDYe0zIYQQIqCQ7EKOlpqeMmWK01XCBg0aZFdiUZk2b9QHmbg7aXs4+XHSmmQ5aVB2Vnqxdu1acByHn376CR4eHuB5ntrDEUIIcYiWpXYBaThWWmraWUAePHgwZs2aZVeDrNVqK9UHmQIycXfS5auFgzDZVFpCJF8QR6VSISoqCrNmzXK6hPX69esxadIklJSUiEtYy9suEkIIIQCNJN91zvogz5gxo8KlppXavClN1FPqYgFQQCa1h/SDncBZiOV5HkOGDAHP807bw61duxYMw2DRokXiZD76AEkIIUSOQvJdJB09BmDXB3nWrFkO6ymBWwuFeHh4lBtZUxpBFvog02IhpLZx1kfZ0XmkNcZRUVHged7pgiNxcXFQqVRYsGABvL29AcBuwiC9VwghhFBIdhGO42C1WsU+yM7avEVFRWHq1KniSnq3E5AJqU0cBWVnH/iko8yVWZlvzZo1YBhGXJlPWupBQZkQQgiF5LtEXoMsDcjOFgqJiorCtGnTFJealtdmUps3UpfIeyMLB3lNvyNRUVEAKl7C2mq1Yv78+QgKCqIFRwghhIgoJN8FSm3eKhOQhwwZgmnTpjlcalr4vaIRZNrRk9pKaURZo9E4PI8QmoXjKjOivG7dOpSWlmLJkiUICgqyuy0KyoQQUn8xPE3nrlGORpBnzZrltMRi6NChmDZtml2bN2kHC+qDTOobpZaJNpsNVqsVVqsVZrMZJpMJZrMZFosFZrMZLMvCZrNhx44dTpewBoBhw4Zh6dKlCAwMtOuuAdD7iBBC6iMKyTVIaae+YMECp0tNMwyDkSNH4o033hBHh4UgbDAYKh2QaadO6iKl1olCSJYGZYvFApPJBKvVaheUP/74Y6cfTocNG4affvoJAQEB9K0MIYTUc9QnuYY46oM8Y8aMCgPylClT7AKy0kIhQkgWSiwoIJP6QP46F8ovhA+MSvX6wofIoUOHYtasWeXKNaS2bduGJ598Enl5eeA4TuyjTAghpP6hkeRqJu2BLK1BXrhwodOlpqUBWdipS/sgC6PIzvogU0Am9YGjxXhYloXVaoXFYhFHkoWyC2GkmeM47Nixw+kS1kDZfIDffvsNvr6+VHpBCCH1FI0kVyOlgCwsNe0sIAPAqFGjHAZk+egxBWRSnymNJgutEeUr88nfNyqVCkOGDMHs2bOh1Wod3sbOnTsxYcIEFBQUiCPK0tX5aGyBEELqPhpJrkaOVtKbOnWq0xHk0aNH47XXXitXYqG0kh61eSPuiOd5FBcXo7S0VFw1sqb/tchXr5RP5rNYLGKdsjC6bLVaxeWod+7ciYULFzodUR44cCDmz58Po9FoV/cPuP/7Tdg+rVYLo9EIvV7v9ttMCCHuxG1bwCntYN09zyt1ssjNzXW43UJAnjx5srgDlo6ICXWW0sl50tXHpAFBuuJYTVHawdJOlwivuw0bNmDLli1IT093yTYofZMj/JT+Ln2fNm7cGKmpqQ7fO/v378fQoUPRpk0baDSaWvmNTfv27fHoo4+ia9eu4nG17T4QQogruGVIFnZYK1aswOHDh5Gfn+/qTarStksPVqsVrVq1QnJyst2OmGEYBAcHw2Qy4auvvhJ3vmq1WgzD0r7HSqPGrtrRtW/fHtHR0WjSpInd/SH1i/T1zHEc0tPTER8fj0uXLolLPLtaZT44Ki17LZWbm4tDhw7Vyte4zWaDWq1GQUFBucelNt4fQgi5m9wqJEt3aDzP49KlS9ixYwcyMjIq3JG5C/lOWQjLWq1W/BoaKAuVZrMZ8fHxil/fOvpK19U7NmGClLBIg/R+unrbyN0jf68K5Q5COYNer4eXl5erN7Ney8vLE3tFC6Pp8pUL6T1LCCGOuU1Ilu90OY5DaWkpcnNzkZeXJ3Z3qK2qGhikXx27A47jUFJSArPZjKKiIrAsW25knHa69Y+0nEEoZWBZFq1bt8Ybb7yBhg0bunoT6x2WZZGcnIzp06cDsH+OhG+kCCGEVMxtQrJAWk8o7HRtNhtat26NqKgoeHt7u3oT6x2r1YorV65g9erVMJlMAEA7XSK+N4UuLsLkVJ7n4e3tja5du6Jly5a15lugukD4MOvh4SE+7sLzxLKs2CNa6ApCCCHEMbcKydLJNcJXhEDZV/xt2rTBSy+9hCZNmlAou4t4nkdBQQF2796NuLg4ABBDkVA+Qv1j6xf5B1khgEkX3pBOkiN3hxCGhS4e0uOFVQelcxmE54bet4QQoszlIVk+I10IX/Kv86WjVe4yKaiuE3auJpMJZrNZPF74ECNtnSUdmaKdbv0g7wdOodi1hPeldO6DcLzwf1WlUonfABFCCHHObf5Tyts20U7X9Sra6dLiCvWXvN2h8Dqh14FrOBu5l472Kz1H9JwRQogytwnJQPmRKfloMrl7pF+nywnPjbDTlZ+HnrO6TWlJaEevFVLzKiptkU+ulM73IIQQ4phbhGRHo1K003WNytSTSlc3k44ok7pNXgKlNIeAuBdpGZs8INN7lhBCHHOLkAzYBzNhdIqCl3tSGpGinW79obS6HZVHuS+l/6n0v5UQQirmFiHZ0U6X/om7J+nIFO106y/5e5ZeA+6JnhdCCLk9bhGSBfL2UTQy5Z6ovRcR0GugdqAPMoQQUnVuFZIB+mdem9DzRAB6HdQG9PwQQkjVuV1IJoQQQgghxNXcMiTTqId7o5FDQgghhNR1bhmSCSGEEEIIcSUKyYQQQgghhMhQSCaEEEIIIUSGQjIhhBBCCCEyFJIJIYQQQgiRoZBMCCGEEEKIDIVkQgghhBBCZCgkE0IIIYQQIkMhmRBCCCGEEBmNqzeAEELqJg6JZ45j059bkZKeh8hOvTBq9AiE+BpcvWGEEEIqgUJyLcKxFhw/fADbdu1BqVWPvkOGYnC/btAwrt4yQog9Hn8u/wJPTX4XmfklN49T4b0lf2DupOGu3jhCCCGVQCG5luCsxfhw+ot4/7uVMFttAAD1hwHYn3gVvRoZXb15hLjUtp8/w/T/+xk26PHvtz7By48Mdun2lN64gKkz3kdmfgkYlRoNGzWGFjaoVLyrHypCCCGVRCFZkRkfvPoC1h48Bw+fcHyxMAbdWoS4dItObv8d/5u/AmaWg1pnQIPgQLA2BlaWc/WDRYjLndi/CydOngIA/HXwhMtD8rnD+3EpPR8A0LrPOOzb8jO8GCtY+pdLCCG1Bv3HVsKbsX3tWhxJLQRwCqeTb7g8JG/ftAklLAdAhSemf4Ef/vMSinNz4Onv7epHixAik5p2BZabg8Zduw1CoKcOgA56V28YIYSQSqOQXCvYcOl6ys3fDbhv4EBoVAx8AwNdvWGE1GI8rBYLzBYrwKhgMBigUaucnp+1WmEyWwAAGq0Oep0WDFN+UkBxXq74u15LE/UIIaQ2ohZw1YnnYbOxsFqtYG028BWVH/I8OJsNVqsVVisLjnN0ARuKcwtu/q6GXkvjUYTciezU85j5r6fQpUMb+Hh7wz+gAe4dOAwfLfodpVZZCRNvQ/yezfj3M4+hZ9cO8Pfxhre3DyLadUb0xBdx8PQV8aw5lw4h+qHR+HTJBvG47bFfYtSoUXj8xTkoYqkmmRBCagsKydWCx9nD2zFp/IO4JzwUOp0OIQ3vwcgJz2DzwdPlzm2zlmLzqh/x2JjRaNuiMXQ6HQweXuh27wC8+d7nyCqyiOfdE7sI48Y9ij1nrt08phSfvfcKxo0bh09+WO/qO05IrZN4ZAu6dumFT2KW48yFy+ABsJYiHN2/E7NffAzRk96FSfKBdW/sd+g1eCQW/LQSx05fQFnO5ZGSeA7rfvsBg/r3x94zGQCAtHMnsGnDRhw7d1m8fErSCWzatAkbVq9HTinNISCEkNqCQvId47Hm+3no2XcElq7ZiKvp2QCA3Myr2LzqJ4zu3w9f/PqX3fnff3UCRj7yLFas3YSEK2kAAM5mxvHD+/DFf6ai7/AnUWQt20kf3fYH1qyJw9XMwpuXZ3Fwz59Ys2YN1m7c7eo7T0itwpmz8cyjT+FqVgH0vo2xZN0emCxWZKZewMRh3QBw2LriUyxcd0i8zLVrKWChQ6+okfjo82+wfPlyLIn5BgM6tgAAmPNS8Ol3ywAAYe264uGHHkTXts3Fyzdu0RmjR4/GQ+MfQqAH/cslhJDagv5j36Gr8Vvw0pv/Q7GVRbOOUdgTfx5FRYU4smMNIkJ9YWPzMefVl3Ahx3TzEjwuJSdD7xWIB8Y/ic+/no/Fi3/Ax3OnI9TbAwBw4cAqrNhxAQDQZeBQ3DcsCg39hTZvGnTq3g/Dhg3DsKh7XX33CalVDq7/FfsTbwAAJrz8Np5+sD/0Wg2CGrXCBx/Og7cK4G0WrFqxTrzMiPEv4cCx09i3dT1mTpmMiRMnYtKLk/HDdx+JE/Eunj8HAPC/pxt+j1uHaZNGiZcfGv061q9fj+UxH8BITc0JIaTWoIl7d2jJd98g22QFGCPe//Jr9O/SGgDQbUg0Zv9rGybNXQBTzkWs/eMIZjzRD4AKMz5YgLeDWiCiaQOoxEk/HDxLbuDVT34CwOP82TPA/a0x6NFXMOjRF/Dk4E5Y/td5AB5459OfMHbQPa6+64TUMjz+2rYFQiGF0VuN/Xv3iqea8vLg66VFYYEVpy6ehBWAFoBvwxbo3RAAzyEvOxNJFy+h2MqiIDULHgDMAFgb6+o7RwghpJpRSL4TXCF2bNtf9rtXAHSWLBw6dOtrWrWX183feMQnnALQDwDQsXtfAICNtSArKxNXrl6DDQDHqsXLsjarq+8dIXULzyExKVX88/s5z+N7B2c1myywoSwk85wVW1f/hE++mI8TF66gIC8fVo5qiwkhpK6jkHwH+KIsJKYVlf1RmIIJ9w90eN5ik1n83VyUg6ULvsIPP/6GKxlZyM7OBe1yCal5Fs4m/h7WPAJB3krt2Rh0HzlKLKVY9fXbeGbGZyi1clBrPdC0RSt4eehhM5cgIeESaAyZEELqJgrJd4JjIUZftQ7NmzeFVlW+5pBhdBg1oBcAgGeLMP2ZMZgfuxscD3gYfdGsRUto1CqU5GUj9Ua2q+8VIXUUgyCvW0u4v/F/v2F6dFfnFzFn4qOPF6DUykFlDMJ3y1dj4v29YTTokHvpH7RpdS9uuPpuEUIIqREUku+E3hMBWgY5VgA+TbFx/zG0DTE6vUjSwa1Ysm4vOB4IadMba377CT3b3wOdRo2tX7+L4a+/7+p7RUjdxDCI7NIB2PAPAGDvn1sx5aEu0KgcT6az5F3H+axiAEBY8x6YMKI/jPqy+c6s1QpbxbdKCCGklqLuFneA8QhGmzahZX/kpmLPofMVXuZi0qWby0sDg4c/in6dW0GnKatFttLkH0Lu2M4VX6FPnz52h/se/hcKWGD0mEfgc3NVvc0/f4QZ78/H+cQryMjIwJWki9i05je89sLTWH/gEgBAa/RHA0PZ+XMzknExJQs8eBRkXsXst94Bfe9DCCF1F40kV8iKj6c9h58C7EeIez7wPD6a9jgmjH0Yf8R/Cw6leO+152C0fo7hAzpDr1EhNzMD/+zbhc1/ncGnC75CsEGFYD9/aBjAygNJCWdRaGbhrVPh6rlDeHfBz66+s4TUetlpl/F32mW74wxemcgu5tC80xC89+p4zPhqJVhTPr6Y+yq+mPuq7Bo0aDX03wBagvEKx+PRA/G/ZdtRnHkeDwzsg7YR4bh26RyuZJkQHhaE1OtZrr7LhBBCagCF5ApxOHfiEM7Jjs3Tt8VH0x7H+Jdn4OffN2Lbicu4kXwCT42JKncNKnUY5nzxBYINKnQYcB/aNw7EsavZOPznD+jd5yQaBRhw7J+DMOv94OupQX4JjSgTUhVtuveA57KdsDlY2t07KBTeBhUABlM++xEBjdvggy++x+X0bLBsWdGESq2GRq1Du+4D0atLy5uXVOHtL5cgq2QSfl6/G5nXE7EnPRkNGrfF98u/Qd7+BXjru7UICgm0u73QRk3h7eUJC69Fk6Yhrn54CCGE3AYKyUoYA9p0jsRfqYccniUgKAgAoPdrjNV/bsc7s6bjh183oMRqH3BVKiOixoxDmE/ZQ63za4pff1+GJ598AUcuXsOZ+L9xBgza9rwfCxZ8iLmPD8PO85nw8fORXgvCwsMBnIfeNwABfgYQQm4Z9cJcFL8wt1LnZdQeeObNuXjqtTlISbqIE2cvggMQEt4SkRHN4e9j/62RR0BjxKzehv+mJuKf+DPQ+zdA357d4KnXAmMHYern5W9j8GNTUfDYVFc/LIQQQu4AhWRFOny/4R+HPVTlfEJb4Kulsfjo23ycPRaPi9cyAEaDyI5d0bJ5YxgNWrvzt+k1Av+cTcK5k0dw6sJVNI3sjB4d2kCjAnacU5orr8HHy7bh42WuflwIqTtUGh2aRrRD04h2lTg3g+DwlhgV3rIS5yWEEFIXUEiuRh5evujWfzC6VeK8Ko0O7br2QbuufVy92YQQQgghRIa6WxBCCCGEECJDIZkQQgghhBAZKrcghBBCCCFO5eXlISMjAyUlJQAAlUqFwMBANGzYEGq12tWbVyMoJBNCCCHE7e3fvx+zZ8/GjRtlE9z9/PzwySefYMCAAa7etDorLy8PsbGxWLNmDY4fP46cnByYTCYAZSHZx8cHrVu3xrPPPosnnngCnp6ert7kakUhmRBCCCFu7/Dhw9i7d6/dcYcOHaKQXEO2bt2KyZMn4+LFi4qncxyHvLw8/PPPPzh06BBiY2Pxyy+/IDAwsIq35L6oJpkQQgghhNh58803HQZkOZ7nsWXLFrz++uuw2Wyu3vRqQyGZEEIIIYTYycrKEn/39PRE165dMXbsWDz44IMICwtTvMyaNWtw/vx5V296taFyC0IIIYQQYsfLywsWiwUvvvginn/+ebRseWsxpfz8fLzwwgtYtWqV3WVMJhMOHDiAdu0qs0iT+6OQTAghhJB6h+d58DxvdxzDMGAYps5sj/Q6q3pdv//+Oxo1aoQGDRqUO83X1xfz58/HunXrYLFY7E67evWqSx6/mkDlFoQQQgipFzIyMhATE4Nx48YhIiICBoMBarUaarUaRqMRHTp0wOTJk3HixIlyl929ezeeeOIJPPLII3jkkUfw+uuvIzc31+FtWa1WTJs2TTz/c889p1iKkJeXh08++QR9+/aFj4+PuD3+/v7o378/Pv/8c+Tl5Tm8nc2bN2PixInibVy8eBE8z+P3339HREQE1Go1vLy88NJLL4Fl2Uo/Vl27dlUMyILg4GAEBQWVO95oNN6tp7PG0UgyIYTchtKCLPx98DDMnAH39u8Df6Pe1ZtECHHiiy++wDvvvIPi4mLF000mE86cOYMzZ85g8eLFmDt3LmbMmCGOvi5fvhy//PKL3WUiIiLwyiuvKF7ftm3b8MUXX4DjOPG4tm3bok2bNuLfGzZswHPPPYfMzMxyly8oKMD+/fuxf/9+fP3111i2bBn69+9f7ny//fYbfv31V/HvHj16YO3atZg9e7Y4ia60tBSbNm1CcXExfH19q+XxLC4uVgzv7du3r5brdwc0kuw2eGSmJmLHjh04ePgUrBx/51dJCKkxK7+ci6H3P4AHRgzFO4vWuHpzCCFO3LhxAx9//LHDgCxnMpnwzjvvYO3ateJx999/f7lyhRUrVjjs5rB27Vq7gKxSqXDvvfeKf69atQrjxo1TDMhyV65cwYMPPoj4+PgKz7tx40bMmTOn3HYVFxdXaSS5Ip9//rm4sIigUaNGdvextqORZHdhM+H1xx/Gb3tPQ63RY8OZ6xgREeDqrSKEOGCymFFW6sfBytadlkeE1EUsy9rVzvr5+eHee+9Fp06doNPpkJeXh7Vr1yIlJUU8j9Vqxddff43o6GgAwKhRoxAcHCwuZgIAR48eRUJCAtq2bVvu9jZs2GB3XKtWrdC5c2cAQFJSEl555RW7bdJqtRg+fDg6deoEi8WCnTt3Ij4+XqwpzsvLwxtvvIHdu3c7rS3etGmT4vFGoxEaze3HPpZlUVhYiOTkZCxZsgQxMTF2p2s0GsydO1exBKO2opDsLngeRaVlq9jYWDOsNhpJJoQQQqqDv78/mjRpAh8fH7z88suYOHEiwsLC7MLma6+9hn79+iEjI0M87vjx48jLy4Ofnx/0ej0eeeQRfPvtt+LppaWliIuLKxeSDx48iPT0dLvjHnzwQbFe9/PPP7cbQdZoNFi+fDnGjRsHlarsS/7i4mK89NJLdiUe+/fvx5EjR9CjR48K77NKpUKfPn3Qu3dvnD59Gl5eXlVeES8tLQ3Dhw9Hbm4uOI6DxWJBQUFBucl6QUFBmDdvHp599tm7+8TWMArJhBBCCKnTPDw8sHXrVmi1Wvj7+yuep2XLlhg5ciSWLFkiHmcymZCeng4/Pz8AwMSJE7FgwQK7soXY2FhMnz4dOp1OPG7dunV2161WqzF+/HgAQG5uLuLi4uxOj46Oxvjx4+1Cu9FoxLvvvovY2FiUlpYCKFvlbvv27RWGZLVajXfffRczZ86EXq8Hz/NgWRZarbZKj1t6ejpOnTrl9Dzh4eF4//33MXbsWDHg1xV1694QQuohHgXZmbh06RLSb+SC451/C8OxVmTdSMOlS5eQmHwZhcWlqOh7G461Iv1aCi4lJiO/2FTpLTOXFuNKchIuXUpEZk5+hdtGCKk5ISEhDgOyoFWrVnZ/22w2FBUViX/37Nmz3Kjx6dOncfLkSfFvpVKLyMhIcULbuXPnkJaWZnf6iBEjFEsoGjVqhPDwcLvjzp49W+F9feaZZ/D2229Dry+bUMwwTJUDMlAWtisKvqmpqXj22WfRu3dv/Pnnn+Xa2NVmNJKsgOdsyM3ORonZCr+AQHh5Gpyen7WakZuTA7PVBo3egAA/P+i0zh9a1mJCZlY2eJUWQUGB0KFyrBYTcrJzYLVxMHr7wtfHGyrXtHQk5K7jLUWY9srz2HM8CaOfehvTn+qF//vPHCz+bQOupGXCL7ARoqIfx/999C6aBHrZXZZjLdgauwyffRuD0+cuIiMrD4xGh2bNmqFX1Gi8NXM62jcv3+4o8eguTJ31Lvb8E4/cIguatWmPyW++CzXLOdzOkvwbWPTlR/hxxQacT0yG2cohJKwxuvQZgrffm4t+7Zu6+qEkpN4rLi5GUVERrFar3XHOqFQqPPHEE5gxY4Z4nMViwapVq9C9e3cAZSE4ISHB7nKjRo2Ch4cHgLJQLQ+S//vf//Ddd9+Vuz2O4+zqpAEgOzvb6TZ6eHhg+vTp1TKq27JlS8ybNw/5+fmwWCzIzc1FamoqTpw4gZycHPF8PM/jzJkzGDNmDJYuXYpHHnnkzp8gN1DvQ3Je6hk8/cIbyClW4/2vF6ON9w3MmTEHm3bsw438UjRv3RHPT5mFqc+Ph15tn0YtxblYuuBL/PDrOiQknEdBsRkePgGIaN0aY576F9549lH4eNrHX55nsWv1j3jv429w+MRZcBpP9Bg4DP+d+47T0SxzSR5+W/gVYn5ahVPnLqDYzKJBo6bo1mcw5rw3F33bNXH1Q0lIjeOsJdixaydOJGZCF7Aex3+Zjbh/bo2q5GZfw+ofPkVmvg2bf/0MHpqy9yxvM+OzmS/g7c+XwSq5Pp61IPnSBSRf+j/s+GMn1m7ZhD6RDcXTU07uwvDhDyExu1A87vK545jx78cQ7O+luI2mvGuY+OBIrNt7AlCp4evrC28VkHfjGrasXor9u/Zj3Y4dGNKpsasfTkLqnZycHKxZswbr1q3D+fPnkZ6eXumOF4Lx48dj3rx5dpdbu3Yt5s2bB4PBYNcRAygL1uPGjRP/VupmcenSpUrffmlpKXiedzh5T6VS2ZV+3AlPT0+8/fbb5Y7PyMjAZ599hi+//NKu9KS0tBRvvvkmBg4c6LTHcm1R78st8jOSsf7P7di3dzvilv6EAb0H4cfYP3EjvwiADckJx/D25Kfx2U/b7C5nzr+GR+4biH9N+w8Ox59AQbEZAFBakIMTh//Ge69OwshHpyDfZD/rffPij/HQ4//GvqOnYGZtsJoKcWBLLEbffx8OJFxT3EZrcRb+PWE0np0yFwdPnAWn1sHby4js9Kv4Y9WPuK/fIGw9esXVDyUhd9WBrYsR9885hEd0waTnn0ffrq0h7DL2bfgNhy7cmoH+9/rFePfLsoCs8w3BWx9+h23btuP3ZTHo3rosrGamHMPkqfNgFgaIeRYfzJkhBmRP3xCMf/xpPBo9Ej56ICNTaTSHR8x/Z2Dd3hPgGS1efOtrJKdcR0b6daxb8gmMGgZF2Rfx6vR5YOvON5KE1Apbt25F165d8eKLL2LTpk1ITEysckAGgGbNmmHQoEF2xyUlJeHgwYNgWRYbN260O61du3Z2yzQ7W4CkIgzDoGvXri5bFVDQoEEDfPrpp3juuefKnXbt2jVs3rzZpdtXXer9SPItNnz91VtgGBU69BmKDk38sXvHn7iWWQieNWHR19/h1aeGwUfDALwNX7z1GtYdKCtmD23RFW/NnoLmoT44e2gXPvzse+SWmLFv40J8tPh+fPjKaACAJTcJb8z5CEU320WFNG2DqL7dcP3SSew5fBrFDup4fv1iLn7auA881Jjwyn/w7QdTEGhUY+eqBXj4yTdQlJeMV6e+hWPbf4anpt5/7iH1BoM+I1/A+pXfINCog7UoHYO7dcH+C+mwmTJwJCERA9s2AM8W46vPvxHD7xv/W4r3Xx5xM1BHoVe7ZujabxSyS6w4tXstDiX+D/1bBcCcdR4rNx8pu5DaB/NX/Iln7u8CgMep3esw7pGncOFGod0W2QpTMH/pOvAAQtsMxH9mvQg/z7J/s8Mf/zdGxyzCiv3ncXbHJlzIsaBtYPWM9hBCnIuPj8cjjzyC/Px88TiGYRAQEIB27drB29sb/v7+OHv2bKV6ET/11FP4448/xLIJlmWxatUqNGvWDEePHrU775gxY2Aw3CrbDAgo39517ty5YrmGMxqNBgMGDHD1wyl6/vnnsXjx4nL9l48ePYpnnnnG1Zt3xygk29HitQ8W49NZT0DLMEjYH4ueQyegwMQiLeUYLmeVomOoJ4ozzuDLH27OXNU3wC8bNmFIZCgAYOTI0Qj3YvD4jC8AnsWyH5di3iujoQOwb+MaJGaWTQDwb9INO/ZtQ/twf/A2M37+eBb+9c6XMMnKHLmSdHyzeCU4AAFNuuGT/05DkE/ZjjVqwr8xYfFiLN5+Ehf2bsWZa4Xo0bR6VtIhxN1p9EH44vvPEGgsez9ovUIxondX7L/wBwAOGXll77XSnBQcPXax7ELqMDzzaBSkYzBNOvbFoLaNseZIEtiSXJw6lYj+rQJw6e99yLv5fmzcaRDGDOl48xIMOgx8GK9OGotXP15qt01XjuzHldyy5vp6by12boqDWjyVh1V3c1U+Lh3xyVloGxjm6oeRkHrho48+sgvIKpUKH3zwAaZOnWo3oe2zzz6rVEgePnw4GjZsiOvXr4vHrVu3DmFhYXaLeDAMY1dqAQCBgYHlrq9p06YYOXKkqx+mKvPx8VGsfZbWeddmFJIlwruPxmeznsTNMka07t4fbYP8cDA1CxaLCTnFJgCeSNi3CxnmsjdB657D0TdCWnfDYPiYCQh79ytcN3HIPHcMV/OsaOmnRfyBfRAy8P0TJqFdeNksW0atx1PT38fK1euw+Viy3TZdP3MUF6+VFcfr/L1xdM8W3FpRnofN4+Ya6Vwm4pMzKCST+oNRlZsgq1eX/5dWkHkdKcU3d1phTdHYXzZ6qzagYbNQ4EgSACtSsst6pKZev1U3GB7ZDkad2u5iSk35U66lgb25WuaVQ5vx2COOv3K0WKtv5StCiGMmk6nc6G7fvn0xa9as275OX19fREdHY/78+eJxaWlp+OSTT+zO16lTJ0RERNgdJy29EOzevRtPP/20y8soqurUqVOKgVjekaO2opAsodHrxYAMAFCpYWDKf0I6l3xrpmlYZCvoZBP6PAMawDfQE9evFYFls3E9rxQt/bRIuXarRrJ1p052o1kMo4JGrS53W+nX01BqLYvW6Sd2YMyDOxxuv5l2uoSUw7IWiO8MBii/C2KgUqvE37U3gzYrWU4WldxxSS/j3aAp7u0YoXg+o38TREXW/kkthNQGLMtWajnmvLw8bNmypdLX+/jjjyMmJka8bo7jUFBQYHeehx9+uNwkurZt2yI0NNRusZGVK1di6tSpYps4V9u0aROmTZuGqVOnYsKECfD29i53nuvXr+O9994r16lDrVa7VUnInaCQfBtKLGbxd0ah/xrDqMTjGUYF9c3frdytr2BUlawdZjmbOPrs4R+K7h0ioLS79vAJw9D2deOTGyHVycs3EIEqIJMDcD0NN4psaO4t+UBqMyHt8s2dFaND05Cy7hYNg271U81Kugwzy8NT8ila+pWqIMi3rCUjxwNNu47Axk3fQ1e7BoYIqVXee+89fPjhhw5Pf+211/DWW2/By8u+G83ff/+Nr7/+Go899hi0Wi3i4+MxY8aMciPOznTv3h3t2rXDiRMnFE+Xd7UQBAQEYMyYMXYt30pLS3Hffffhgw8+QFRUFHx8fAAARUVFSEpKwr59+8Tey3///XeNP65JSUk4f/48XnzxRbz77rt44IEH0KdPH/j4+MBqteLYsWP4+eef7VYnFPTo0QO9evWq8W28Gygk34Ym/rfqiXKSroHlAK0k85bkZaIgp2x1HI22ARr5l/VGbBjgJ57n+oVkAH1uXYjnwXHl+676+XhDxwClPBDaYSi2/rUMBtrpElJpXiHN0KpFMDIvZgLsVazdcgRTxt36B55y+gD2nC37dsjg1wi9epQtJtC8UwdoAVgBJB7fgb+OJeOBHvcA4HHu7z/x/U/ryt3WPZ07w6jTIN/MIvnobpxJyUGXJgGV2EpCyO0oKSlBSUmJw9Pj4+Oh0Whw33332S3CwbIs3njjDcydOxcqlQr5+fmVGm2W0ul0eOyxxxyG5O7du6NNmzaKp82cORPr1q3DtWu3ulqlpaXhueeeg5eXlzj6bLVaUVRUJOYDITzXNKGcjOd5pKWlYfHixVi8eHGFl/Pz88Pnn38uLmJS21ErhNvQoXc38dPFuaM7cSlDOsOdx94/1+F6cdmbrXnPvmjsXXbulp3ai6PAm9etQFp+2cpdvM2K9T9+jb2nr5a7rcat28LXu2xWbPqpv3E8KROEkMrTeAbisTEP3PyLw/tvPI3vl6/DiRMnsX39Cjz25EvIKDQDYDB0/HNo28ATAODXsicGtyubkMuVZODp6JF4beoMTH7hKQx+YDzOpOWUuy3vxp0xdkAHAEDxjfN4fOxj+C1uC06cPIkTx+Pxx9qVeOPlZ/Hkqx9WuMofIeTOCaUAb775Jpo3b17utNzcXGRnZ4sBuXnz5lWqpx07dqxiKQJQttS0WqGMEgCaNGmC3377DaGhoeW2qbCwENnZ2cjOzkZBQYHdAJqj26pu7dq1q/IKfeHh4fjtt9/Qu3fvu7KNdwOF5NvQoN0ARHUqW7zDnJ2Ap556CTv2H0bC+bNYuej/8PI7X4IDwKg98NJLz4mz24cMfwDeNyf/XDn6Bx586BG8N3cunn30YTw6+R0UWMp/ijU2isTYQV0BAKW5SXjm0YlYuX4HzickICHhHHZv3Yg5017F06+8DycLgBFSj6kwafp7eKB72Qhx7rUEvPzkw+jcuROGPfQY9p9KAsCgbe/R+PL9yeI/RUZtxHv/eUecsJd17Ty++fxTzP9hOW4UWDB0zFiEyju4qfR495PP0KahLwAe549sxeNj7kfnTp3QuUs3jIx+FF99/yPOXEh19YNCSK0TGRkprlpXWULgbdy4MTZu3IioqCjFSbd6vR5jxozBjh070K1bN/F4T09Pp8G0RYsWil0pVCoVxo4d63Tb+vfvj/379+OJJ56An5+f0/MaDAZERkYqLuwREBAghnGGYRAYGFiuvKSq+vXrhxUrVqBPnz4wGo0Oz8cwDBo1aoSpU6fiwIEDuP/+++/odt0NlVvcBrXOF//35Wc49dBTuF5gwpHtv2Ho9t/szsOotIh+YRZeGnOveFyDdgPxxpMj8P6SjeB5Dkd2b8CR3WU1RjpPP0QP7YK4zbvsb4zRY+YH/8Puo9E4fS0XCUe24dGHtpXbpsh+T4HjeQBUi0HqLkZjQK8uXZDHJiCoUScEetr/C4vs2R0tdp0EtEa0b3prhMYY2Bwr/9yJTz+Yhx9XbMSN7CyYLSw0eg+ENGyE0Y89j3fefBmNgux3hn2iX8KKJRzmzPsECUmpYKFBaHhzvPjGu5j8eB9MSjmPk5klaN381oqXTTsPwZbtWzD3P//Flr/+QVZWNiw2Doxah8BAf4Q1jsALz42ldyohVXTffffh2LFjTssr5Jo2vbUEfNu2bbF582YcO3YM+/fvFyfOhYSEICoqCh06dIBarcbixYsxd+5c8DwPLy8vtGzZ0uH1MwyDf/3rX1i5cqXdBLYuXbo4vZzgnnvuwbJly5Camor4+HicOXMGeXl54ulBQUFo2bIlIiMj0bx5c8Uyhg8++ADPPfccWJYFwzAICQlBcHDwHT3WKpUKY8aMwUMPPYTk5GScPn0a58+fFxdCYRgG4eHh6NSpE9q1a6fY+7kuqPch2ejXAB1b3YOMghJ07iCbVao2oEuvjjhnOQW/Ju0RYrw1bNRu0Hjs2hGA6bPfw97Dp5BXUAie52Hw9EJo01Z48dUZeP258fCQTtBjtHjrqx/Beb6JmOVrkZVbCLXOgGYRnfGfj79AF48rOHbiLCwGfzT0vvVGaNx+IP7Y9ifefW8eNu34G3n5+bDaOIBRw9fXB8GhzfHM02OhUdNul9RtKr0PYlY5nn0+8qV3MPKldxRP8woMx7zPF2H2+4VITLyIgmIzDD4BaNW8Gbw8HdTPMWqMmjgZQ6OfwoWz52GCDhGtW8PPu2w0a/2h04oXa9K2F5as2IDighwkXUxCkcUKtdaIe1o0RYC/L32FR8htYBgGrVu3vqPr0Gq16NmzJ3r27OnwPIGBgYq9jB1ZtWpVuQ4PEydOrFI7t/DwcISHh+PBBx+s8n0yGo3o0KHDHT0ujqjVarRs2bJSgb8uqvchOahFD5y4kKh8osoTn6/ahs8dXDaiexTWbRuC3BvXcfr8Jdg4HsGNmqNls3Dotcp1SDpjEN7/+ie8MfsqziYkw+AXgvZtI+Ch0wC4F8nXJihernFkT/z4+yYU5efg4rnzyDdZwKg90CayFYKDAminS0glGYzeaNexa9Uu4+mDjt17VukyAGD0CUCHbnVzhIUQAmRmZiI2NtbuOK1Wi+joaFdvGqkG9T4k3zkG/iGN0D+kUZUuE9iwKfo3bFqFy5Tx8g1Al3v7VPlyhBBCCKlecXFxSEtLszuub9++aNasmas3jVQDGoAkhBBCCKkim82GJUuWlDt+woQJt3FtxB1RSCaEEEIIqaL4+Phyi48YjUY8/PDDrt40Uk2o3IIQQgghpIpCQkLwyCOPICenrGe6r68vxo8fX673Mam9KCQTQgghhFRR06ZN8csvv7h6M0gNonILQgghhBBCZCgkE0IIIYQQIkMhmRBCCCGEEBkKyYQQQgghhMhQSCaEEEIIIUSGQjIhhBBCCCEyFJIJIYQQQgiRoZBMCCGEEEKIDIVkQgghhBBCZCgkE0IIIYQQIkPLUhNC7iqe58HzvKs3o05QqWicgxBCakqdCMm0w60+DMO4ehNIHcZxHDiOo/dsNeF5nt6zhBBSQ2p9SOZ5nna61UilUtFOl9QIm80GjuNgs9lcvSl1Bsdx9J4lhJAaUqtDsjAqRSG5+vA8T1/hkmpns9lgtVopINcAhmEoKBNCSA2otSGZ53lYLBZwHOfqTalTbDYbGIaBWq2mDx6kWthsNphMJvowW4OEoEwIIaT61MqQzHEcTCYTjUrVIJvNRjtdcsdsNhtKS0vpw+xdwDAMGIahDyKEEFJNal1I5jgOZrMZLMu6elPqPGGnS0hV8TwPs9mMnJwceq/eZTabjQYQCCGkGtSqkGy1WlFYWAiWZWm05C5hGAZWq9XVm0FqmdzcXBw+fBj+/v6u3pR6x2azITMzkz6cEELIHao1ITknJwenT5+Gn58fBeS7iGEYWCwWpKSk0OgUqRSdTof09HTMnz8fWq3W1ZtTL7EsS98EEULIHaoVIVmr1SIxMRE///wzDAaDqzen3uE4Dnl5ea7eDOLmGIaBVquFTqeD2WxGamoqhTQXUqlU0Ol0YucLei4IIaRq3DIkS/+hazQa6PV6ZGVlIS8vjyaTuYjQj1Wj0dBOl4iE1wHDMAgNDUXXrl3RpEkTu/aMAvoGqGZJ348qlUp8n3bo0AFBQUHieeh9SwghleOWIVng5+eH5s2bIz8/H8Ct5WxpZ3t3SYMQx3Fo0qQJjegTMWwJr422bdvC398fRUVF4uRaq9Vq976l927NkL5HhQ+zWq0Wer0eDRs2RIMGDSgcE0JIFbldSJb+I+/QoYO40xUWIpBO2qO2UjVHOuKkUqmgVqvFUf1WrVohICCg3PlI/dakSROEhISgtLQUJpMJFotF7GUu9EimkFz9pO9Toce5TqeDTqeDwWCAh4cHfaglhJDb4FYhWT4y1a1bN7Rt29Zup8uyLO10a5h0VEppp+vp6QmDwUDhuJ6Tj16q1Wrxw5RWqxXfo9L3Kr1fq5/8/6b0OdBoNFCr1XblF/S+JYSQynGrkAzcWjlKOnqp1Wphs9nA87z4lT+F5JqjFJKFHa50p0s73PpJHo6VArLwvlSpVBSSa5ijkKzX66HT6RTDMi1jTQghFXOLkFzZna6wA6CQXLMchWRhNJl2uvWXsKKb/PUhvFeF96NwmkajsVuOmt6vNUMalFUqFbRard1BeL+q1Wq7D7f0niWEEMfcIiQDt/65C10UKtrpCvXItNOtfvKRKaGVlNJOVx6Oaadb98mDsvAhSjhN+Cn/QAvQ+7UmyN9/8m/hpB9sqeyCEEIqzy1CsnxkQ9jpCjti6fG00615SjtdodRCHpRpp1s/ST9A8TwPtVpt94FWeM3Iv/Gh92v1k79fpSUX0p/CgT7YEkJI5bhFSBbIJwFJV+uine7doxSS5UHZWW0y7XTrPnlJjkajKVd+QbXId5/w3hPem9JwLC21oA+1hBBSMbcJydKADJT9kxeOlwZkqkW+u6QhWXgepCNSVItc/wjlFtKFfaTBSzqyTCH57pKGZPl7VzhIz0fvXUIIccztQ7Lwj10+KkU73btD+rwIo4TS32lkikjft/IATd/43F3y+QTy9y+9TwkhpPJcHpKFf9jS+mP5Dlf+1a1wflKzKtrhKu14aQdcv0hHj4Wf8hplgN6vd4v8/Scvh6L3KSGEVJ7LQ7KU9B+38M9dutMFaGTqblPaucp3ujQ6Vf8oPd/S4+j96R7kzxOFZEIIqTy3CcnyiV9KYZh2vK7naJIe7XTrJ3re3ZujkEwIIaRiDO+myVO+WW66mfUO7XSJI/QedV/0PiWEkKpz25AM0E7XndFOlxBCCCF1mduUWyihIEYIIYQQQlxBdedXQQghhBBCSN1CIZkQQgghhBAZCsmEEEIIIYTIUEgmhBBCCCFEhkIyIYQQQgghMv8P4xW+CfnafvQAAAAldEVYdGRhdGU6Y3JlYXRlADIwMTYtMDItMjVUMTM6MzY6MzgrMDA6MDDbAJwVAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDE2LTAyLTI1VDEzOjM2OjM4KzAwOjAwql0kqQAAAABJRU5ErkJggg==)", "_____no_output_____" ], [ "*#Building the model*", "_____no_output_____" ] ], [ [ "model = DecisionTreeClassifier(max_leaf_nodes=4,max_features=3,max_depth=15)\n", "_____no_output_____" ] ], [ [ "*#Fitting the model*", "_____no_output_____" ] ], [ [ "model.fit(xtrain,ytrain)", "_____no_output_____" ] ], [ [ "*#Predicting the test data*", "_____no_output_____" ] ], [ [ "accuracy_score(pred, ytest)", "_____no_output_____" ] ], [ [ "*#printing the confusion matrix*", "_____no_output_____" ] ], [ [ "confusion_matrix(pred,ytest)", "_____no_output_____" ] ], [ [ "*#Printing accuracy score*", "_____no_output_____" ] ], [ [ "accuracy_score(pred,ytest)", "_____no_output_____" ] ], [ [ "*#Printing roc auc score*", "_____no_output_____" ] ], [ [ "roc_auc_score(pred,ytest)", "_____no_output_____" ] ], [ [ "**## MLP Classifier**", "_____no_output_____" ], [ "![mlp.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABrsAAARvCAIAAAAnp8hUAACAAElEQVR42uzdCZxcZZ3v/+c559Ta1dn3TsLOiAOIICChM6b5O1aLetm8LvfqXHCZua//eFl0Jg2SMYlmRrqVYXTGCzozf3EURWbcYNRuHTvEbgKBBAJERUAgJOkkJOlsvdRyznn+rz5P1amTqu7qTqc7qar+vHMoTtdyuuqpSlH15fc8P0spJQAAAAAAAADAYzAEAAAAAAAAAHwkhgAAAAAAAAAKSAwBAAAAAAAAFJAYAgAAAAAAACggMQQAAAAAAABQQGIIAAAAAAAAoIDEEAAAAAAAAEABiSEAAAAAAACAAhJDAAAAAAAAAAUkhgAAAAAAAAAKSAwBAAAAAAAAFJAYAgAAAAAAACggMQQAAAAAAABQQGIIAAAAAAAAoIDEEAAAAAAAAEABiSEAAAAAAACAAhJDAAAAAAAAAAUkhgAAAAAAAAAKSAwBAAAAAAAAFJAYAgAAAAAAACggMQQAAMCkaGtrk3nd3d1juUlzc7N/k5EubWtrG/dd8g8+7oOc+BFOCf9uNzc388oEAACjIjEEAABAFWhra+vo6BBCtLS0jDF/BAAAwPiQGAIAAKDKbNy4kUEAAACYPCSGAAAAqALLli0bdh8AAAATjsQQAAAAVaCxsTGZTAohkslkY2MjAwIAADB5LIYAAAAAVaG9vZ1BAAAAOAmoMQQAAAAAAABQQGIIAAAAAAAAoIDEEAAAAAAAAEABiSEAAACqQHd3t8zr7u4uf83m5mb/ys3NzW1tbcf7u078CG1tbcEjlDlI8JrD3gEpZVtbW/lHfRK0tbWN8V4Fr1P+bgeHaCy/VA/jSMcM3rHSZ+F4n0QAAKYyOp8AAACgRnR3dy9fvrzozA5PZ2fnqlWrTs4R1q1b19HRUXQEfaoPUr7Xc3Nzc9HNhRAtLS1CiK6urlPSJ3rYu+Tfq2QyWdSUprW1VV8khNi4ceNI9zkY4V111VWll/oHKXou9K9YuXLl8T6VAABgjKgxBAAAwKRbvny5HINhY6kxKp8QdXR0jJofTdQRyjwKfZAyZXdtbW1lbl7+tpNk1Oelo6OjqEIwmOWVpn6+zs7OYW+iM8oyN9SHHaksUR+ZuBAAgBNBYggAAICqVxr2tba2dnV1tXqSyeTJP4K+ucoLHmHdunUjHcSPyZLJpH8HglfYuHHjqRpk/y5pwUfU0dFRNOc3eLdHSjn9ILJoeIMljfqX+sPY1dUVvPmoR9b3ROOvCQAAx0EBAAAAk+AEM5qiowWjomASV5rHJZPJUe9MMISaqCMELy29h0VHCN68dKCKbh587OP7AF/+oY1622QyOewjKnPHgheVjlXRox7p/JHu7Ui/tDTi5K8hAADjQ40hAAAAJp2OnEY1voN3d3cHa8qK1tTTVq5cWeb4E3KEYEA27Mp9wYm3wQm5pXFh0c0bGxvHUrI3ec9de3v7sI+ozB1rbGz0Q9iWlpbS++yPQFGBYXBkhn0ixljAWObmAABgVHQ+AQAAwKQbtd2Hlkwmx7GUYXCibplQr8wdmNgjtHjK3+eRHmYymTwlvU3KKJ+7LVu2rMyT7j/M0v4n/kVFPU+CI+M3jy7/7A87YuMOoAEAAOsYAgAAoOoF47nxxW0Te4QxGrY4rrRlcKVp8zR7pJRlGowER7JofIKLHgZLL4sWQxyLkao1Ky14BQCgulBjCAAAAFSKMiV7p1B3d/e6devGUf7Z2trqZ4Xd3d1+ijfSlOSJMkmHBQBg6qDGEAAAAJgwY++2US1FcM3NzcuXLx9HXFhUPxicuD3SlOSgYXutlBp20nTll2oCAFDhSAwBAAAwJZx4w5CxHGF8yVrFam5uDj6iZDLZ6vHjvFE7YvtX8IsN/anHyWQyGCkWCSaMAADgJCMxBAAAQHULTkEtsxBemQTqpB3huK5TCfy4UNdOtre3r/SMvUAyOM9aR67+lOTSSsDglceyNGS3h78CAABMOBJDAAAAVLdg8DRSH4zu7u4yCdTEHqGlpaVMjNXd3d3c3DzSb6kowUcx0jzfUR9IY2OjX2aoI1c/hSwtMCwKIpubm8scua2trUzfFQAAcCJIDAEAAFDdgsFTR0dHaczU3d1dPlqakCMEywyXL18+bBWhDrmqcebysMlg0ZzlkfiVgy0tLf7YjtScpKurq+i5KI1fdeo6jv7UAABgjOiVDAAAgKoX7Mnb0dEhpQyurzeWaOnEj7Bq1apgfNbiSSaTfnVehSRcY5wQXTT1WId3q1at0me2tbV1dnYWxYXr1q1btWpVaalg0XH0zkhFi42Njclk0r9ahye4HmLp7wUAABOOxBAAAABVb+XKlUVBUmk8F8wEJ+MIjY2NXV1dRaWIfuBVRCdrJ99I92fYAdEVf/4jGva2RUlrR0dHMpks7V9cOjJlep60t7eXVi8OO/LJZLJaWk4DAFBdmJUMAACAWtDe3j7SRFedWJWJqCbqCDo0LHMQHXJ1dXVVS86lH9GJjIl/nOCPo3ZYbm9vH0sX5tJoEgAATAgSQwAAANSI9vZ2pVTS45/Z2trqJ3Tls7wJOUJjY6NOu4oO4h+nvb29usridGgYzO/0Q1NK6QcSjBTLjE/wCGPJGVeuXKl/b9Exk8lka2urUmqMYSUAABgHqZRiFAAAAABMKn+i8bDTlgEAQEWhxhAAAADA5Gpraxu15wkAAKgc1BgCAAAAmFzBTiZ8AQEAoPJRYwgAAABgEnV3d/tx4agLQQIAgEpAjSEAAACACdbc3Lxq1SohxMaNG1taWvzz+fYBAEBVIDEEAAAAMNFfM6QsPbO1tZUGxwAAVMd/ykkMAQAAAEzw14ySxJAWyQAAVBHWMQQAAAAwwVpbW/39ZDLZ1dVFXAgAQBWhxhAAAAAAAABAATWGAAAAAAAAAApIDAEAAAAAAAAUkBgCAAAAAAAAKCAxBAAAAAAAAFBAYggAAAAAAACggMQQAAAAAAAAQAGJIQAAAAAAAIACEkMAAAAAAAAABSSGAAAAAAAAAApIDAEAAAAAAAAUkBgCAAAAAAAAKCAxBAAAAAAAAFBAYggAAAAAAACggMQQAAAAAAAAQAGJIQAAAAAAAIACEkMAAAAAAAAABSSGAAAAAAAAAApIDAEAAAAAAAAUkBgCAAAAAAAAKCAxBAAAAAAAAFBAYggAAAAAAACggMQQAAAAAAAAQAGJIQAAAAAAAIACEkMAAAAAAAAABSSGAAAAAAAAAApIDAEAAAAAAAAUkBgCAAAAAAAAKLAYAgAAAKCMnr0Hd+87tGvvISnkU8+/1vPGISGkEGLX3oPe5d7+G4eUchvmzmhYMEMopZS7aN4MIVTD/FlCuIvmzXzbhWc2LJjFYAIAgKoglVKMAgAAAODr2Xtwy7btm7dtf2rb61Jau/YfEdLwZufoTR57dTW06T+FE1cob0+4Q5+3XVspW6jspeefsXDe9MXzZzTMn/G2C88mQwQAAJWJxBAAAAAQOiLc/PzrPfuO7jrQJ6UpREgYppQyYYmEqepMNdMSi2OO8ELCREjMCzn6tkvjwpKi3xF7UsqLDEW/Y+wZHNpRSr06IDOO25tVGUf1pl2hXOU6StjCtZWbveyCpZdeeNa177qsYcFsngUAAFAhSAwBAAAwdT3S+dx9D24QRrRnf5+UlpCmkDJiyjPj6rw6e1ZIzQqLmSFVb4p5ESdhCksWPjzn9gKfp9Wxp/7OgC12p73TlHgjLf7Qp/am3Ff6lNLpocoKJ71oTuK65KUN82ZcetHZDQvm8NQAAIBTiMQQAAAAU04uKDSju/cNCCMUNuXssJwZUm9KqHfNzi6MuHHT1SsUSikNIaRh6P2hHenNSvZOc/s6HwzyfxTC1YRw8zFi7lIhfnNYPnvYfXy/25d19g44XnqYVk768gtOv+wtZ12bfHvDQqJDAABwCpAYAgAAYKrwgsJfCzO6e3+/MMJCGOfVu29OuOcl3AVhd0nUqTOVjggNj2VZQ/8yTWEY0jCEjgu99HDoU3Q+LhzaUUq5rnBdPRVZBKJD13Fsx3HcvKEreqf5ZQ77bbE7pXYNimcPuc8ftHf0OUMXumnXHrj5z5qvTV5GbggAAE4yEkMAAADUuJ43Dj3S+fzDG363e39KGKGEJROWvHpO5tyEe2bciUqRsIQppWHmGJYl9WaaQm9eXDi06XzQiwhzm84N9b4XGg5t/qVCKMdxbVs5Qwpxocd29Lm5CsS0qzKueO6w2tLr/P6w84fDtnCzrjO4aG7shne9/dp3X7F44VyeTQAAcBKQGAIAAKBmbdn22sOd2zb/bs/uAykhQ9GQceUM50+9eceLI7YhpSGlZVmhcNgIhWQoJE1TWFZu0/uGkQsNdVDoT0kOZIK5Uz8u1Imh4+Quchxh2/rUdV3lOMJLDB3XtW3bdhzHth3vx6Ez85OX96TE7484z/XaP9+ZFkMXplx78JYb331t87LFlBwCAIBJRmIIAACAGrRl2/bPffXhngMZacbqw+bZCfHeOdnz6u2IVLMsZRqGaZrhcFjmNxEOF7JCvflZoa4xLEoGiyJCfY6mE0N/x3WF4wyd6txQb0q5jqPsHCc/bdnxN6UcITKu6s3Ix/dnn3gj88rhzNF0Rjmpy85f3Hrnx6k3BAAAk4fEEAAAADXlkV89e9/3u72sMCoN811z7eQc++yYXW8JwzBCXkWhDIUKQaG/hUK50sJgSqgDPp36Bfd1gBhMDP1lDf3aQx0aGkauJlHTR9CbbSvbdm3bsXNLHdr+bGXXdRzHzlcd9mXVc4ed/3w99ez+tHKyyu6/+X+9+7qrl5EbAgCAyUBiCAAAgBrR88ahT676zu7erDCjC6LGRdOc6xfaCdNdGHJNwwiHw1YkIqNRGYkUIsKixNCfR6ynEuvNzwr9xDA4JTlYY+hPW9bn66N5zVKOyRNzn8RloQLRm5us1zp01NC//SUObZ0bep1SXCGe2G8/8trgy4fTfYMp1+6/5aarr7+6cfGieTz7AABgApEYAgAAoBZ8/cFfP7zh97sPZCOW9d552eRcZ2nUiUhlmWYkHDaiUSMaFf4WzApDISFlLhzMZgtBYWlW6JcHBlcw9DbXdaX0QsGihQ71vr8MYjAx1DGinyR6B/cWOMyx9fRkb9KyrQsPvanKe1LqiX2ZJ/am9val9x4dcLJ9t3zsfTe850/IDQEAwEQhMQQAAEB1e+RXW+976PHdvdn6cDg5T71rTmZ2yJ1lqZBlhYNZYSxWSAxLg8JMZug0mBgemxW6uZUGXaW5rtcIWbdDVq5SOi7MncpcfqiEMLx9wzCMwJlSCCPYf/lYyvt1dr6ZcrDecOjUyw0dITbtTT+8feCVQ4NHBwca5sauf/cVt/75B3k9AACAE0diCAAAgGrV88ahT9z5bz29tmHF5kfkhxvsq2Zm6i1hmWY0GpU6KwwGhfpH0xSOk8sH/aDQ3/KJocrX+rleZuc6jpvPB3MfoP1/+5+tC5+ypf9jPj/Mh4amqc83pDRMU58e8wE9f3PXX9PQuwO2P0PZ23GFsIXYk3I27U1v2jv43N5+1+67IXnpLZ+8YfGi+bw2AADAiSAxBAAAQFX6+vd+/fCG3+0+kE2EreZ56rp5qdlhETVENBq1YjEjHhelcaFh5FLC0qDQO8fNZh2/ebEuKvQmF6sT/tDsz1mW+QxRB4iGRwYqDQ1vqrJ/daWU3wXFzndHKaxv6BUb/uGo/eBLR185NLjnSF/DnOgtH7vm/de8k1cIAAAY/0cXEkMAAABUnas/+dWeXsewomfVif+1OLNsum1KGQ6Hw9GoEY/LeFzE4yIWKySGSgnbFum0yGRyp4G4UGWzdnZIJpt1HUfng5P9OVkng4XQsJAoBkJDnTAahl4qMZcV6h1/oUPXtb3c8Fc7BzbtHXx852GVOXrLJ97HDGUAADBuJIYAAACoJg//6pn7vv/47l47bIU+eZq9YlZ2miXjpozGYoYuLQxukYhQKldCGIwLvS0YFDo6KDwVn411JmgcuwaiT6eKekd49YZ+blioPfSKDW0hUq66++n9LxwYPNjXt3hO5HvfWMsMZQAAMJ7PJySGAAAAqBb3fffRh3/94p7ezPyIXPsm+7SoGzNEJBIJ+VlhLFaIC4UoDgrzcaGdTmc8Tr6TSUV8NNc1hnrO8nDRYa5xip6nHKw31DteR5QjGfe5A6nvv3jold6jbvrIbZ+87tb//WFeOQAA4Pg+lpAYAgAAoCpc/fF7eg46kVB0+Wz5wQb7rKhtmWY8Hjf9acj+ZlkimxXptEilhk7zcaHKZOxMJp1OZ7PZ3BqFlfkZ3a819GoPgy2YRT5VzPVFCcaFesebofz8gdQ3t/W+3DvgZo/+9+bLvvyFW3n9AACA4/g0QmIIAACACtfzxsFP3PlAT68dDYU/vtR577xsRKpIKBStqzPr6oTedHVhJCIcR2QyuaxQn2YyShcVptNZ2/aiwir4DBwsOSxNDPWe67qOUsW5oVds2Jty/+PlQ+tfP3ik7+jlFy69+/O3LGlYwGsJAACM6XMIiSEAAAAq2cP/9cx9Dz3R05s9PWHefLq9NObMCYloJBKuqzPq6kQiIerqRDw+dCpELiXUiaEXF6pUKu3FhVnbrpwJyMf3kT0QHfpLHfrRoVLK8QsM/cTQKza0hejccfTb297oPXp08ezIg//6d4SGAABgTB8/SAwBAABQse594FePbHippzfz5unmzWdkzom5lmnGolGrrs7QWaHewuFcK2QdFOYLDNOZTHpwMOs4ynWr/VNvaW4o9I73g+MXG/olh0rp0PCJ3f0/eHH/i3sONcyJ3P2Fm6+49C28rgAAwCgfPEgMAQAAUJnufaDzvoc2yVB8xRzx/55uzw6JkGXV1dUZ8bjUQaEuLTTN3GKF/kzkVMpJebWF3nqFNfOJN58UFmYoe+dKfZrriOLnht6ODg0PpOz7ntmzacd+N3X47s//5QevezevLgAAUIa5Zs0aRgEAAACV5t4HOu996EkjFL9+ofjz0+yZIRENh+N1dWYiIfVkZF1jKIQYHBQDA0Obt+MODqYGBvoHBvSShbU6PoX51frf3qlhGNLPEPPpohAiahlnz473Z8X2Pqfjvx5TTnrZZRfxGgMAACOxGAIAAABUmlV//x8Pb3jZCMWuWyg+utieZoloOFzoc5JIiFhMRKMimy1MQ/a2dDpdM9OQR6K8fFDq1FBngkqJ/I6U0vKyw6Ez/UFQalHc+uAfzxuw3U2vi3u+8QMh1Gc+dROvNAAAMCxqDAEAAFBZVv39Dx7e8LJhxa5fJP5Mx4WRSNSrLhSJhKivHzoNh0U6nSst9DZ3YMhgKmU7ztRZeCdYUVgoLQwsehi8Zl3IqzS01fYj9sYnnl68YOb5553D6w0AAJSixhAAAAAVxKsufMmw4h9dqq5f4NT7caFfXahnIg8MBEsLs+l0KpXKZDLuFFukW3kLkwsdF/rTlL180BDCFEIFQkOl1MK49dEL5kcs4+cvuLf9zVeE637w/e/lVQcAAIrQ+QQAAACVYtXdD3mTkePXLZQfXeJMs1SsKC6Mx4VSIpPxs0KVSmVSQ2zHcafwJ1tdbOg/fqU3pVzdAkUpR7dO9k4PZ5wfvrDvh9t2OKneH36r7YrLLua1BwAAggyGAAAAAJXgJ7/c/PD6F2Qo9q554iOLs9NMtxAXxuNDWzQqXFcMDor+/qHNm4mcGhjo9xYudKf2/wjX+aDXDyVHr3VoSGlKaXmnpld1aAoxLWy+79w5FyyaZYSnX//RT2/c9DQvPwAAEERiCAAAgFNv8/N/uPd7j8lw3bvmio8sdqZZIhqNRuNxMx4XsViuz4njBBcutL2FCwdSKWcqLVw4qmMmIXu5oZEPCs1Abjgrat14UcPlZ8wzIjNuvb11x84ehg4AABQ+UfDpCgAAAKdc8sa7dx9yr5xrfuYsV3dGjsfjlq4uTCRENCqUys1ETqeFnomcTmezWZdPs8NR+ZbKKv+jk5+ebOt9IWwhdvVnvrbpted37l08O/yD73xlyeIGhg4AAFBjCAAAgFMv+b9adx/MXjnX+IvTVb0lQpYVjcVMXVoYj4tIJFddODioN71wIXFhGTK/sqFfclgoM/RPhVhUF77x4iXnLpizY1//zSvXMW4AAEAjMQQAAMCpdNNf37e715kfD//lmWpBRIVMMxaLWdGo1Imhjgv9tsjpdCaVGkylsrZNXFheMDTUO4aUlhBWIDq0hDhrRuyTl54+KzH98adf/D9/9TnGDQAAkBgCAADgVLrzy99/6ndvJKLhDy6Ws0PCMoxoNBqOxQx/7ULX9bNC5ZUWDhAXjllRpaGRb4RiHhsanjY9+q7zGoxQ/fd//KuNT2xm3AAAAIkhgJOqra1N5nV3dzMgDBGAqWzz8y//pHObYUU/slS+Y7ZjSRmNRCKxmBGNimhURCLCdfWqhSKVUqlUOp1OpVI2fU6OR+n0ZCMwK1k3RYkZ8j3nzGk6Z6EZmfZ//vrzr+/YxbgBADDFkRgCAADg1Pjsl35gWPEzE+Y1C9xplohEItFYzIhERCQiwmEhZS4uTKfdVCrtFRgSF46DDOSGhhceGkXLGgoxLWz9z4uWvLlh3s59/Tf/1WoGDQCAKY7EEAAAAKfATZ/5p57edCIc+tgZ0pAyFAqFw2EzHJY6LjRNv7rQ1dWF6TRx4bgVQsP8j4W5yd4WEmJWLNT8Rw1mqG7jlhc+9ek7GTQAAKYyEkMAAACcbJufe/nJ3+42zPBdF4hLpruWYUTC4XAkIsNhEQ4LyxKZTG7twnQ643GIC0+M1C1Q8mWGUi9rGNhCQly5dOZnmv64PjHrwR/9YuPjTzJoAABMWSSGADBh/CUIm5ubGQ0AKOOzX/p3acUvnmmeGVeGEOFgXBgKCdsW6bTIZEQmk8lmM5mMTauTCf3ob3hlhoYuMwwUG1pCXNww882L5pjh+tZ/+AYjBgAAHxsAACeqs7OTQQCAUd346X/adSC9IGZ96mwppQwXzUd2XZHJiGzWjwuz2Sxx4cTIlxn6+8axqxmGhIgb8gMXndYwa/bGzb9pu/sfGTMAAKYmEkMAAACcPJufe+nJ3+6SZuidC+SCiLIMI+wxQiFhWUJKnRUKLyjUcSGTkSfy039wbnJ+x/DON/IB4pkz4yvObTCsROs/fOP113cwaAAATMXPDAwBAAAATpo72h6SZvyaxaH/sUQa+QLDXFxoGLm4MJu19eKF2azruuSFE/4FQLdAkcEyQ28z8nOTG8+Y8yfnLjHC09r+/v8yYgAATM0PDACAidHR0cEgAEAZn2399q79qfqIuWy2NIQIWVY4HLYsS1qWsKzcfORMxtG1hcSFkyRfZpjrnuzt+HGh4SWGC+oil58x3wjVffeHP3ts4+OMGQAAUw2JIYBK4bcNya2vJER3d3dzc7MMaGtr6+7uPq6bB49Q5ubDHqH8dfxDld6qo6Oj6G6f/MFsa2sLPvbm5uZh70Z3d/cY72fwaMMOY3d3d3B89OCPdMzS0S66w/yNAGrSU8/vlGbo7bOMC6YL0zBClmWZpmGawrKGLvbiQtfLCm3bpjny5PHjQiNfZijzE5P96PCyJbPe+ebTzFD9XXdTZggAwJRjMQQAKlNzc3NpyV5LS4sQoqurq7GxsfzNu7u7ly9fPuzNk8lke3t7rY5bW1tbZ2dn6dB1eFpaWopGL7jf0tKybNmykcbWP2YymSy6Tnd397p164p+qf6xo6Ojs7Nz1apV5Z+yYZ9uADXGKzDsP3tG/JoGQwphmablkZYlTFPYtshmlV1AXDipDCFcKZVSUgglpaGUKYQSws2XGbpCNJ298MU9+x/bvO2uL91z+1/fxqABADClPioAQMVpa2srkx8tX768TKngSHGhr6Ojo7m5uVbHraWlpXz0Vjp6ra2t/v7GjRtHOrK/f9VVV5WOdplf2tHRUf4pIy4Epognn98hzdCHTzPPTEhdYBiyLEPPR3YcYdvCth0dFjqOS1442WRuRrKRrzfMVR0Gygz/aG79DRefG4lOu+vurzFgAABMKSSGACqRLgbU5Wytra1dXV3BVKtMsKX5ceFIN+/o6JjAmcIqzz8nmUyqgJUrV578MdQPXCt6+EVx6rJly0pHvkhnZ6e/H3w4ReGs/qX+A08mk/5F69atG+muBqsXWz3BGwKoDX926z09+wbOrLcumJFbwdAKhXIFhoahE0PHcWxvYz7ySfomkF8Dwl8PItj/RLdAuWDhjNPnzjUi0+760j8wYgAATKHPCQwBgIrV1dXV3t6+cuXKxsbGlStXdnV1+ReNFGyVuXlRgNXS0lK+ULF66bxSP3Ct9OEHA9PGxsZgpDjssARDveD5wbiwq6tL/1L/nPb2dv/Io6a0/vO1cuXKGp42DkxNu/bs79k/KEzzw6eb9ZY0DUPPRzb0fGQvLnT9AkPmI5/cLwPSW77QX81Q7xj53HBa2HrPhWdEwvVf/PI/MlwAAEypDwkAUIlKFyscS7BV5uY6wAoGXuULFat33EaK21atWjXSrYJlhqXDMtKU5OBT0NraOuxKhcGCxGChYpGRbg6gNtxx17d79vWfNS10Vp0phbC8+ciWaUrTHLo4Px856xUYMh/5pPLLDPOlhqVlhufMqT9j/mwjnPhi65cZMAAApggSQwCVqLS3xkTdPJiajaVQsboUlfgVCV5UFN4V9T8puuFIU5KD2WJLS4scgX+dkRYrTCaTp2TiNoCTY9eeAz37BoRhfug0K2EpyyswNE3TMAx/PrKt40L6I5+i7wP++3VRjaHeFtbH3n5Wg2nFNz7xNMMFAMDU+YQAABWnqLfGBN58ShWyteU1e4LhXangpO+i+k0/6StaD3EckeuwlaEn+HQDqHB33HX/rn19Z04LXTjdqLekmY8LpWEIpXLdTry1C13iwlPi2M4nepKyEeiCYgnRdO6imdOmdT/57Hce+B4DBgDAVGAxBAAqUHCS7DiUL1hLJpO13Zm3ra2ts7PzeB9jMEvduHGj/2NwSvIkVQKe4NMNoMLteqNPGuaHTrcSIWkYhpnfpFdg6AbiQuYjnyqGlI6XDyohpFJFZYamEPVh679dfM796w8++NCPP/I/P8yIAQBQ+x8PGAIAqBnd3d1SypaWlmHjwlEbEPslhMG2MP6U5DI3L+oNXcawNZ6sYAjUsB91PL7rjaNn1FtnJSwphGkYpq4w1AWGgazQcQkMTyV/NcNh5yZbQpy3YNa8GTO7ntza3f0YwwUAQM0jMQQw5ZxIgWGZ3h2nXHd3d7BzcdLT2tra1dWl07pRGxAP2//EH64yc4dru2YTwIn40c+fFIb1wTMidaYypDS8uDBXYOi6ruO4Hr18IXnhqf1WUFh/Nj8xOVhmuDARPW3ebDNU93d33c1wAQAwFT4bAMAUEpxjW6ZobqRGzJUcja1bty740No95XuhFAl2o9ZrFJafkhwcwOA1xzL4AKaCXXsOPPncq/OiVn3Y8Kcka1II5bo6L9SnBIan2LGdT4KnOjGcFg5deXZDOFrX/eRWRgsAgJpHYgigBo2U9xUVCR5vw43m5uZqGYFhywnHEtgFywy7u7vLT0kODmBwIvOwz0hzc3MlV2gCmAx33HW/MEIXzAqdEZeGEGaeIaXwpiE7+biQvLBCvhjozici3zrZPLbMcNkZ8xbMmGGE6754F/8HCACA2v9gAAC1Zvny5c3NzaUBVnNzs18kmEwmi4rmgmFZcHqv1tbWVkVzb0sfe3d391haGzc2Nvrh4MaNG/2HvGrVqtIrr1y5MpgkLl++fNhQsq2tbfny5cxcBqagXXsPC2meOc1KhKT0SgtNw9CxlJvrd+JRShAYVoJ8UCiHW9DQ9LbGcxsMI9L12CZGCwCA2kavZAC1qcPjz7Et7R1cGoEVzd6VUpa5+bD8LswdHR3Nzc36V+gFAcfXZXijZ9SrLVu2rLGx8aqrrvLv5PLly1tbW/X53d3d69atK7r/HR0dfqpY9MBXrVqlr+wnjMlkcqSpzf6VtRZPMpn0yw/HElMCqEk/6nh8194j9bG6y+eGDN3zxDB0JKW8AsNcc2QvLiQvrBCFoFBHh0oZgTMtIS5aPO9n9dO6n3iasQIAoLaRGAKoZSPFVV1dXcNGYF1dXcHqwtKbt7a2jjEC05Glf6uJvf+l96qxsXHlypXBZLP0troeMJgq6odcdLXSkSkzfbuxsbFo0Ioee9CwhYoAatWPfr5RSOvc6aGEKaSUpmEY+X4aujOyPx2ZwLBy6KDQEEIpVdQuWUeHiUho0expBw7G/u6LrZ+9g/8nBABAzWJWMoAaVJqC+ZLJ5EhxYdGc3GEPG5y5XGrUZsSTrb29faT7r3uhjHHpxqKIs3yBpA4Ny4zbqMMOoCbt2nu4Lmw1LgiFTWFIGezD69i267quF0u5xIUVRuZPgzt6M4WYn4i+aeFcw4p2MzEZAICaRmIIoAY1NjYqpbq6uoLJVzKZbG1tbW9vL59btbe3D3tDpdRYAi+lVNLjn6NnB5+0xz7S/ddp5sqVK4suGvZBBSPC8lGgP+Dt7e2tra1Fj10//K6urlGHHUCN2bz1xZ17D8ctedmcUMSQhixsynWVzgp1gaEQRIYVpTArOb+sYbDG0BTiT998mmFEup7YwlgBAFDLHwmYBgKgNrS1tfnzcHlnm8DBbG1tHd8ijACmss/edf8PfrH1PadP+4vz6kNChEOhiGVFLStsWUII13Ecd+jEdl0dIKKiuF7tp5PfbCGy+S0txIAQLQ89+srOl1tu/hgTkwEAqFXUGAIAinV2duqd0o7SADAWm7a+JKRx4eyIrlbT1YVevVpuJrKi50kFK52P7NcY6qbJl561yDCZmAwAQC0jMQQAHKO7u9vvWzLGdQ8BIGjXnv07dx8SQp4+3ZJCGLrnSaBLsp6S7NDzpFLlFjEMTEwO5oamEGfPmxkOx7sef4qxAgCgVpEYAgCOsW7dOn+fAkMA4/Dksy8KKRcnrIgMrIinV8NRSriu4y1hKIgLK1b+Kcv9dGyxoSnErHj0tHkzpRn9znceYLQAAKhJFkMAAFNcW1ubEGLZsmUbN27s7Oz0CwzH0vMEAEr96GcbhTBOS4TCXu5kSGnkJ7q6uXYnTEmudHKEWcl6f259dHpdTJqhHTt7GCsAAGoSiSEAQPh9ToJ0e2UAOF479xxUUp4/J5IIHVOqpoNCXV3IhOQKV+iYrNNe7+nyA8REODS3Pi6NUFf3E3cwWAAA1CJmJQMAhtHV1cUgABiHnXv279x9YEbEWhw3ZaAwTadOflzIQFU6KYUX9pZ2QdFP69vPWSINa3sPNYYAANQmEkMAmOqWLVvmT0BOJpOtra1KqcbGRkYGwDg8tfVFIeSMqHnuzFAwYxJCuILiwmoih4sLRT4xnJeIS8PcsWMPAwUAQE0iMQRQI1auXKn8tbFwPBobG9vb2/XQtbe30+0EwInY0bNPCOPNM8NhQ5eo5UrVhLeKoZ7c6uopygxWxZPH7ge3GfHIwpnThBH6zgPfY6AAAKg9JIYAAACYME9tfVFIsSAR0nGhvxxeLijUUSFxYTUYdj5ysBHK2QvnSmnR/AQAgJpEYggAAIAJs2tPrxCqIRGSSuVaZ0hZCAqVcvNtNFDhCiWi3lYaGp41f6YwzK7HNjJWAADUHhJDAAAATBidE9aZUhy7+J3wF46gwLBaHJsSipLEsGFmQhrWdmoMAQCoRSSGAAAAmBg79+zftWf/jKg1PWIECwx1UaHyeiXruJDQsCrIY0+LEsOwZSWi0R2v72agAACoPSSGAAAAmBhPbf29EKIhEU5YhdnIhbhQX4kSwyr8thAMDf2dkGFGI5aQZlfXYwwUAAA1+RkAAAAAOFE7evZ5caAsviA/H5khqi5yuH0/NJwZj8yZlhBSbn99B2MFAECNITEEAADAxNi954AQoqHeiphSBvJBJYSrKw0Zo+olZdE85ZAp6yIRKY0dO3cxPAAA1BgSQwAAAEyMnbv3CyFmRMywIQvlaZQWVi2puyTrdsn+mfnTukh48ZzpSuRXqwQAADWExBAAAAATJiTl3JjFONSYonUM/f2IZQlhbN/OrGQAAGoNn+cAAAAwMZQQsmTmca7vid8xmWGqKkURYdGOkFIY8vWdOxkoAABqDDWGAAAAmBi7du8fNhAkLqxqsuS06CKeVgAAag+JIQAAACaIEmHTmBE1S84nLqxBUojp8agQ4nVmJQMAUHNIDAEAADAxdu7ZJ4QKmd4nTN0OQyk9JZm4sCa/M8SjIe/fdD4BAGCq/NcfAAAAOE5q5J+UUjRNrkJjmndMr2QAAGoOiSEAAABGt2bNmkc9I11h5+59IyZLhIU1asG0aV5YSGIIAECtoVcyAAAARrfW4/+4YsWKd7zjHXrHPxVCWaacETGFUrruTAeFxIW1bccO1jEEAKDWSP6PLwAAAEbV1NRUpsBQi02bHwqFfnDbzYvCIiRESEpLypCUMj9rVTJ9tdq4Stne5nhbRgi9ZYVICfFSb1/L/f8+eODVL7V98dZbb2W4AACoGcxKBgAAwOhWr1496nUGj+w9cmDnwUFbSKkCBYb+/6Dm/1VXNXXsvhLiSGpQKLe+ftptt902aqAMAACqCIkhAAAARpefdzy6TD4XVEK4OmYKJoWEhlVFlewEL+pPZ5RQDaefc9FFF42lChUAAFQLEkMAAACMydhDQ1WaNNEruWqpY5/Wok16T+s999yjp66vWbOGEQMAoAbQ+QQAAACjOK7asUODtpgZLuSGSinWL6xF3vM79AwvXbJ4xYoVq1ev3r59u26PQ24IAEC1IzEEAADA8HRQ6LdIvuaaa8YSHWYcNzgHWUnpt05G1Snqdl1UYHikLyWE0E/tmjVrmpqa7rnnnttuu43QEACAakdiCAAAgGMUBYWrV6/W85GbmprGcnNdd6aGW/lOn092WF1KO9j4Z/ans0qJJUsW+y+VtWvXrl+/Xr9UCA0BAKheJIYAAAAQZYJC3/r162+66ab7779/1EMF5yMrpVwpDaYmV7PSLsnBWcn+87pixYq1a9c++uijhIYAAFQ7EkMAAIApzQ8KH330Ub0aXZkOJ9/85jdHTQwPZexcUOj1SnYDvfYoMKxeariGNrbjSCFOO22Jf7XVq1c3NTWtWLGC0BAAgKpGYggAADAVlQaF69evH+OtyhvIOoWl7rwVDJW/jiELGlaVoSfu2A7XResYHjh0tGj2uX4t6bnJhIYAAFQvEkMAAIApZHxBoU/Xjt12221bt24d6Tr7+7MZJUL5H3We5ArBxOSq488u15sfFLre6dF05mD/gJDiyisuD95Kt0DRLzBCQwAAqhSJIQAAQO07waCwyNatW9evX6+PVnrp/lQ246i4UThHCSHz1YXBfVQ+VbK5+Z3+dPZw/4AUYunShqJb6bnJSilCQwAAqhSJIQAAQM2a2KBQa2pq0msdrlixYthGKPv7M7aSQgqVr0oT+aTJzO+TF1aFYJfkorjQFaI/nTnqJYal9Iutqalp/fr1hIYAAFQjEkMAAIBaMxlBoabjQj/3uf/++5VSTU1Nr3n0mYdS2d19mVkzQrnOJ16v5EJWSIFh9VD53jX+FowO+zP24YHB009bvHTJktLbBucmExoCAFB1DIYAAACgNjzqafKsXbt29erVSild5DUhx1+zZs073vEOP/FZs2bN6tWrhRDr16//5je/qZTSP0qhDqQdJaUqabBb1EYD1aWo2HDngcNCqSuvuGyk6+sWKHp/xYoVSqkNGzaQGAIAUBW81nUAAACoWqUVhRMVEQbpoCcY90g5zCfJO/72Gz/ueKLx9DmfumiB5TiWN6XFEsL0mp4MnUoppDQoM6x4SilHCFspvWWVygjhb4NCfO2XTzz23G/vXPmplZ/+y5EO0tTUFEyZhz0HAABUIGYlAwAAVKXJm3pcas2aNRs2bAge3y8wLLJw/mwplBSqqMbQVcqUsrCUIX2TK57ynqaCYwsMXSF27T8kpVj29kvLHGT9+vVSSj0x2T+nqalpjYdBBgCgYpEYAgAAVJOTGRRqa9asWbt2bVE5Yek52uVvPe++f3tk/0C2N+3OsaSrlCuEE2h1opSSXoEiT2WFU8cuXFja/OT1fb1Dz+toz6Ruqx2seyU0BACg8rGOIQAAQBWY7DUKy/zetWvXFoWSIxUYCiEaFs4Vyh3MOkezTtEihn6pGqqCGq7ziZs/8+nXdivlGEIsXdxQ/jj6JVoUDq5fv541DQEAqGSsYwgAAFC5Ts4ahWV+e1NTU2kuOewKhr43LfvI9PrEe89Z8N/OqLdc1/JmIutTw1vH0D/l+a1Y5RcxTAvxyLMvP9C58cwlC57Z9Mtxv5BY0xAAgIpFjSEAAEDFOVUVhUWamppKM8oyBYba9e9Z3p929g1mhWEE69QKJYdKCcoNK5teelJPKndLZiWnXPe5V3dK4X74Q9eO5Wg67Pb7JvuoNAQAoGKRGAIAAFSKCgkKNR0XlqY5a9euLR/xLJg/Wwj3cCqbEdL1up0UZYUuz3TFKwSFXrKrn0d/6+1LHe4flEKWb3sSpF8zumY2iNAQAIDKRGIIAABwilVUUKitWbNm2OmioxYY6uYnynWe39t3NKNEUcfk/HWC6xuiYh3T8yQQGh4eGOw9etSQ6rTRFjEMGrbMkNAQAIDKxDqGAAAAp8apXaOwDJ3dDJvglF/B0PdHy/5HJDbtpouX/D+L4tK29TqGevMXMTS8o0lWM6w8bn4RQ0cvYqiXL1Qq7a1gmBbiW91bf7H5+f/5gfd97Z6/Pa4jl1m4kDUNAQCoKBZDAAAAcDKVBoVFnYhPrTVr1mzYsGHYuzSWAkPtLz92wz9/t/3l3oF3LqlXjqNntqr86nimLjAkK6xUauRGya4QjhD7DvcbUnz4A9ce75HXr18vpVzhKb2oqalpjYenAACAU44aQwAAgJOhYisKg9asWbN27dqRPh+OscBQCLFpy28+dtuXL1g899bLFsecrOl1TM7VGObLDE06JleqXIvk/I6uMUx77ZLTQuw6OvBX9z9suJmtj7cvWdIwjr8Ia9euHSklp9IQAIAKwTqGAAAAk6gC1ygsc1fLRDljLzAUQlx+yR87TrY3ld22LyUNQxem6c1PHHUjFP7vdaVx/YJQ76kprTR8/cBhodylSxvGERfqvskjzXlnTUMAACoHs5IB4ET17D24e9+hXXsPSSGfev61njcOCSGFELv2HsztDJ0jlHIb5s5oWDDD+4bsLpo3QwjVMH+WEO6ieTPfduGZDQtmMZhAzajwqcfD3mHdHHmkKLNM7eGw/vKm6//1+x1bdh9967y5lnRE/rZMTK5wrhCO189aHTsT2T997DevSqWuHHOX5FKrV69uamoadm4y05MBAKgQzEoGgOO2ZdtrPXsPbd72+lPbtksjtGvfYSFNLxw0h6vd9mo19J/CiSuUv6KXEq43/UtlLz3/jIXzpi2eP7Nh/oy3XXhWw4LZjDZQdXSlXiVPPR5W+dmgZXqhjGTTlt9+8q/+fsnsmbcvW1KvskZgYrJ17MRkup9UDtf7z1JhSrIQWa/hiT8r+Q8Hjnz5x+uPDBzd+tjPxldj6L+iRloucywvSAAAMNlIDAFgTLZs2/7wr57teeNIz/6+Xfv7pDSFDElpCCljMSMaFtGonDHNmDEt97U3FhUz6nPp4cL5ISFUKuX2HnL0NLxUWg3te9nh7n12KuUODtqptDswYAvlKtdRwhaurdzsZecvvfQtZ177rstJD4EKV6VBoTZqOjP2FQyDzmv8s1hi+g1vmvee0xLStoOJYS4uFMIkM6wkbj4rdLxKQ38Fw3Q+MfzBU7/70RNbz1iy4OnHfnbir7ryf1PGkVMDAICJQmIIAOV8/cEND3c+v+vAgJQhKUPCiwgty1g03zxzqRWNyFkzzEhY1CeMRJ2IRqQZKDFU4tg3WJVbtMs/0ZcrIVJpdeiQnUqJ3sP2/l573/7sG/uz+w5kvOnLQ1/ZlJNumJO4Lnlpw7zpl150TsOCOTw1QIWo6qBQGzWXGXdw89m/+0bHhq3nzK6/44rFIp02vcnIfv8Tk/4nlUfXFTpeYqh7nuis0G978lf/9tP9hw8//NA3Gk9gVrL/d6epqan8lxFCQwAAThUSQwAYhh8UGkZEGGHLMqYljETCOGOJeeGbw3NmyZAx9BaqpyAbQ192h77tet95/VIZ6f3Jvdnq1f1dpXLr/LtCeTtCCcd1XNf1lozKvSWrfKq4fUd22+8HXns9NTiY7T2Y9dLDtHLSl19w+mVvOeva5NsbFhIdAqdGDQSF2qiTQ8ddYCiE2PT0b//ir+85c96sD50379x6gzLDCucGskJdaegXGGa8002v7f7/Oh4fzKSeeeynS5YsmpCX36iBIKEhAACnBIkhABR8/cFfB4LCkFdLaJ15mrV0oZlIGAvnG6YpdERomEN/hliWoUlpeH+kYejUrxAXDlGuq1zX1bGhCnAd13EcLzdUQ9fIUd4Kh0Pv0YMp99Ahe99++5XXU7//Q//Bgxlv6cO0aw/c/GfN1yYvIzcETpqaCQq1scSFJ5jX/OkHPtM/4FzzpvlXn5ZwUildY2gKEaLMsPL41YX6NBOsLvQSwwcee65720vXXfOuf7z78xP1S0edm0xoCADAKUFiCABiy7bXHu58fvPv9u7ePyCMUCxmzp1tvuW80NlnWnNnSK+GUBiGaZqGZZqmNfQnd5Ln5YX6HyOfEOYKCXVkqIRwA4KJoeM4ju1o3mXeH489dL6tHOXqo3k1iXv2ZZ7c2r99x0DP7pRwM66TWjQ3dsO73n7tu69YvHAuzyYwGWosKNTWrFkzlvbH4y4w1H74s651X/neGbOn3X7F0lA2JR1nmDJD6XWPoszwlCoUGHrVhW6wutDb6Tky2Prjzt7DR37y0D9f+fZLJvYv16idxAkNAQA4yUgMAUxpW7a99rmvPrLrQMYwY8IwY1HzkgtC555hzpppzpo59NXVNEwrZIVCoXA4bJmWZekTy0sLdXmhaUpTCsOQhiGk8L7u6ogwNxFZr14ohDu07zpeHOjkCw6DiaFt246r5yh7waE7dI7tpYb6bDV0mpvLfOiI/fqO1CuvpzZtPqhcW3klh7fcePW1zcsWU3IITGiWUWNBof/Qmpqa1q9ffxJqu85f8fG6RP2Hz1/U1BBzUym/xpDVDCtKocDQ28nks0K/S/K3H3v+F8/87owlC7Z0PzKxv3qMbZEJDQEAOJlIDAFMUVu2bf/cV/9z94G0MGMzZ5rz54YuuSCyZJFhWaIuLk3TsCwrHA57UeHQP+FQ2AqFLMuyhGUZliEMSxqmsExhGsIwRK7jiRr6ruUo4SohXOG4QuVPXX35MQFiblKyx3G9cHDoH32WzhD90FAXILr6347rOEMHcGzxxoH0q9sHf/NC3+49A/0DaeUMXnb+ktY7P069ITBuNRwUFj4CSrl69epRw5cTLDDUvnb/j7710Po3zZ/2f9660MqmDNcdvszQqzCkzPCUCBYY6tAwrZRfXZhRavfRwbt+smH/ocMPf//rE1hg6P+NG0t+PcZ59AAAYGI+LpIYAphqvK4mv9ndmxFmREpz2aWR888NLZhnRqPCNI2QJ6zlosJQ2AiHjbAlQ5awLGEawpRD33N1LaHjCNcdOi1saugcpYQKzkT2pttJ74tZvu+Jlx/qdil+w5PCkoaBxLA4NPSKDh17aHO9KsbBlPPq9sGNTx18+dU+5WRVtv/mG9993dXLyA2B44otaj4o1E5ySdcTm3/7v2//6pwZ0296y4Lzp5tuOu3XGAbLDI38xkvx5DumwNCLCDOB0DAjxA83/+4XT/8+lU093fXIhPQ8KX2xjTEKJDQEAODkIDEEMIV4c5B/urs3a0Wi9QnrzKWh5ZdGps+Q0YiwLDMSCYfDkWg0GglHclGhDIeMcFiGwiJsDG2WF/k5SriOsG1vc4Y2Lyh08i1M9FqEXhqo/F4mypWi8Eflag2Fq5QhpR8a6ipEoVc/lNLLI4eOazv50HDoB9df9VCHhrrsUDlDR3vm+SNPP3t41+7+gb5B1+6/5aarr7+6cfGieTz7wEimTlCojT0HPPECQ//m937rkW//YP3iGXWfuXhByEmZgTJDU7eTyq9maIxrNUMqE0+Em5+JfEyBod/2RKmeo4OtP3l0/6GjX7v7bz70/v82SXdjLC1Q/NcwoSEAAJONxBDAVPH1B7vufWiTEaqrT1grlsXOPdOcPUsaQlohKxyORCORaF7EjIRlOCTDYREyRFjmgkLbFllHZL2g0Bk6dW3bsZ18fJerDXQcJzfd2EsMvVJC3QRZBNb119/Dvfdf5fVXll63ZcNb/VDPXxZD55uFb85eOaGrw0HHDvRKcfRs5kJuOJByXn1tYMuzB3t6+np7+5xs3y0fe98N7/kTckMgaKoFhdrY48KxX3MsHyY3PfPCp+78v6FQ5NOXN5xdJ91MJjgx2SyamzwJ+R+RYhl+UOh6PU+y+axQb1khfvjUCz9+8nlTqi2//slkFBj6fyWbmprG+N2E0BAAgMlGYgig9n39e48+/OgLu3uzsbrwJRdE3/7W0PQZZsgUIW/WcTRIxqJGNCqiUoT8oNBLCbPZoS2TVbbtFKYJ614lhfTOcVwl3FxvY+V/kfa6HysphR8XBr+7Si8rNAw9Kc8onG8O/dGdmo/5cu41Tcn9Sh0d5u9APjd0XUcNppxXX+t/bNOBnt19/X19DXNj17/7ilv//IO8HjDFTc2gUDuukKW0wPAEPzSu/vK//eqx5y+cV3/jH8+27LQRaJqsSwv1OoanqgXKlI0UC1lhoMDQry5MC3E4nfnqTx9/sWfvTx68d8JXMDyRlyihIQAAk/vpiMQQQA3bsu21v/nqf/YcyBpWdOYM60+Xxy4635JShkJWNBKNxvyqQi8olNGIiBoiKoe+vWbdQlCYzahM1s5qdjbrLytYRLkq+KXan1+s/yWH/VrqzVKWRu6PV2soh65tCC9GzCn+Wusdw80XNfrdlv1KQ9t21NCOOHg4+9sXDm95tnfXrqOu3XdD8tJbPnnD4kXzeW1gqpnKQaE29nhFKbV27VohxOrVq8dy5DF+mNz87O8/teq+6Ym6my6Y98czrOzAwLBlhkaltkCpyUjRzWeFbj4uzPrVhV50mBViwwvbH+x6Jm3bmzf8aMnihZN9l8Y+N5nQEACAyf3wQ2IIoFZ9/cFf3/vQE4ZVF6sLX3lx5LKLrXjMDIeMaGzoTzwej8ai0XA0asSiMhoWUUNEvKJCr5ZQpDMim1XZrJMLCjPeHy04DVkFZiCf2PuxFx3K/EqHhjS9Wcn6j197WFgK0avAkcJQQrdOdmzHW+JQR4jeWofe7tC927Mn9V+/3vvKK0f6+o42zIne8rFr3n/NO3mFYCogKNTWrFmzdu3aUXvR+h8LDcNY7Rn20lGNdM2vf+fn3/3xhlnR8KorF5uZQZXNHhMa5msMT6QFyknL9WojQLTzBYY6LrS9nieFGkMh9hwZ+MrPH991oPfH3/u/V15+yUn7Ozv2EJDQEACAyfq0Q2IIoPb0vHHwE3c+0HPQMczwWadHrr4qunC+YUgZiUai0Wg8Ho/HhraYEYvKWEhEvcUKlRLZjJcVpkU6befTwayXE2YKP3pdTHJLFE7uG7Qhg+WH+UpDfbb3ddXIleHoL65eWOjmFzf050nrhixKuerJLb0vvXz4ud/uU5mjt3zifcxQRg0jKCwajaampqK4sMwnQL12YWmB4agfGke9ws7d+9//51+Mx+P//c1z37Ewmu7rMwItUKzgaobevpFfyWFy32wn5/iVHyk6x8aFjhCZYxNDW4gfbn7hJ5t+c8Zpi374wD8taVh4cu7YGHt5B1+xhIYAAEz8hxkSQwA15pFfbb33oSd292Znzoy8952xpQ1mfcKwLCsWjcXisVxcaMXjRjws4qYIC+E6IusVFWbSKp2205n8n2yAX054Kt6qhSGNXAGiT5ck6qnMXnrozXtW+bgwGB26XoMW5brKzqr/7Ni59Tf7B44cXTwn8r1vrGWGMmoJQeHwbyFSrl69WucvI72JBc83TfNzn/ucTgyD54/ltqNe4ZFfbvr7f354fiL6kfPnLo04bjpdNDfZCNQYGhOXu1VI/+XKiRHdQFboSml7PU/0ZOSMUmnXzQrx1Cu7/3X9loF05sff+6crL7/4JL9oRy2JDSI0BABg4v9zTGIIoJZ87h9+/PCGl4xQbNGCyPvfm5g7WwanIQ/9E47FzXhcxA0Rk0I5IpPxigozKpNOp9NZLypMp3VimMlkvT7HqoLeKnWDFJFvDZArRJSBVb+kUMproey4Rbmh43VEGbRfebXvVxt27ew55KaP3PbJ62793x/mlYOqRlBYRrBcq/StrDQQ/PznPz/0Xvq5z5VvezLsu2L5t0r/0k/ded8fduy7aF78xjfPTPX1FbVACfY/OfG+ycd76zFe/xTmmBNFr12os0K3dD6yUkfSmX/ufOaZV3cuX3bJV1tXLWlYcDLv3jgSQEJDAAAmFokhgBrRs/fgJ+78t56DTjgcedtbYn/y9tiMGTJkWfF4PF4Xz5UWmvGYjIdF3BCmlxWmUiKVdtKZbCAn9NhZ2/Wywkp98/YjQz05WQam7uV+cpUqSgzdXGjoKkf17B38aceOl/5w0M0efX/zZXd/4VZeQqg6BIWjGnZ+8bBlg/5OKBT6G0+ZKw/7Y5kziy7a8vzLLXd9O2KFPvGWeX+UEPbAgJmPC3Vu6BcYnsiChsebx4165ZMQJp60ANERQuWzQh0aBuPCtOvaQvz0mZceevx5yzSeevQ/TnJcqB3v3GRCQwAAJvhLJ4khgBrw8H89c99DG3sO2OFI+CPXJc48w7IsGY2E4/G6IYm6eLgubsRiIm6IiBDZtEinRSrlptPpVMpLCv240LZt5aqqeW/MpYWFxDA3Wzn/1dN1lZcXqkCZ4dCOctSRPvvRrp4tW/f2Hzly+YVL7/78LafkOyFwvAgKx6g0LiyN/4oSwy984QtKKR0XloaJoyaGY88Q7//3zh+1bzpjWvhDb5qRcNK6BYo5woKGxngLDScq4yt/hRO5dDJuOCqdEvqhoW54klYq5bq55QuV2nN44B9/sWnngUP/1PY3H7i++VT9TW9qajre/xwTGgIAMFFIDAFUvXu/u/6+hzZJK37a4mhyRXzeHGPaNDMajebCwrq6OquuTsZD3jTkjMikRSqt0qlMKpVOaToudGxH9zSpwvfyXFpYqDXMxYbej0p4OaHOCpV/6jqu66qNT7yxvvv1wwePLJ4defBf/47QEBWLoPC46Nyks7NT/zhsPlh6ZjQavfPOO1etWjXSTXxjjAtHOv/pbX+4vfW7dbHIisXTrl4aTfX1Sccxj13QUIeGZRY0HEumNiHXKXOFCb/oBB/LqHRcmFvE0NvRcaFfXZhRatB1/6Xz6W2v77nkkou+ctftp/C/C+OL/wgNAQCYmG+ZJIYAqtq9D6y/79+fklbstMXR9783NneWZVqGriwcEq+rM+rqZJ0pQo7IpsRgSqRTWS8l9PJCfeoV3VVnVnjMO3o+JTx2jrJOFF3XDcaFfmiYyagXXzq4vmvn9tf2N8yJ3P2Fm6+49C28rlA5CArHYc2aNWvXrnVdtyjyKw0Kg6fr1q0TQtx5553D5omjdkE53r4o7Ru23vedXySi1vvOmP7WGTLV328qFeyCooPCMgsaTmz53khHG0cJ4fEeapIWWxzmucj3OdFxoRLCljKTzwr9AsPul3bc3/mMMI0nf/XgKf/fSE1NTeP4i09oCADABHy/JDEEUL1W3fODRza8LK34VY3xKy6J1teZoUjICwrj8bq6RLSuzozHRJ0UMiPSgyKVcnJ1helUajCVTqdS2WzWdZWopXfCQG6os0KRzw2FUsOGhq6jBgay//GTl57/zR43dfjTf3HDpz91E68unFoEhScydE1NTZ2dnStWrCjK/oKhYWksWFdX99nPfvaOO+4YqbRwfIlhmSt86euPPPXcy3Pj1gfPmT7fyGQGB3V1YSjYBcWbmOyHhsZE1NmNo/DwBJPB48oKJ3b5xSAnX1foSqknJmeE0HWF/vKFm//Q890ntvX2pf6x7bMfuDZZIW8F48j+CA0BADhB5nEtJwwAlWPVPT/04sLYO5fXXXlZLBE3I7FIoq4ukUgMncYSCbM+JmKuUINicEANDGQGhvTntsGBQb1kYQ0Ojcp9O9df0FVuPx8mDv2jvBBR+YmiZRlLlkxLpdTuNzIbn3xWOelll13EawynJB24ybN9+/bVq1fff//9N9544+mnn87IjH0AdU3WjTfeWBoU+lWHpf72b/92+fLld9xxhxqb0uMU3oFGvkmRZZec+7P1W/vS9qGMe3nDtKxtu46jL5KBTXlZYeENa7xOZFbvSI903L9l7Fnh2O92+WvqhQtd778COjHUBYYZIbJec2RbiIMD6e889pue3iPLr3zbJz/6/unTEqf8JX366ad/61vfeu211473fxusWLHitddeW7t27eke3hwAADjuz07UGAKoRh+//V+eeuGAYcXe+6eJi8+PxmNGNBaJ55YtTNSF43EjHhGRjMimvNLCwdRgKpUaHBhMeaWFuSULa/89PlBl6FcaCqFcMVKl4QPff+HFl/e5mSO3/fn1n6HSECcLFYUTRbeXXb169bCh3kjBnxBi+vTpt99+e0tLy3HVFY6ltLD8NZ/93etrv/rDeDT0gXNnnB1zRapfOI517IKGfsdkf0HDcSR/E1i4N2rB4FhqDEetYZzYdRt1ROgGdjJ+f2RvPnLGdfsy2e9t/M3jL+6QpvXELx9Y0jC/gv5rJuX69evH8c6gZ+iP77YAAExx1BgCqD5+XPjOxrrL3hrTcWFdXV1CVxiGEwkjYQozJdIDqn8gM9A30O/VFQ4ODAzqVQun1v8skcdmh7lCQ/2jlELJQKXhwgWJrCP2vJHe+MTTixfMPP+8c3i9YfJQUTixho0LXdcdNTf84he/2NjYWBQXlhYGnmBiOKwFc6fPnFG3edv2Axk3ETYXxs1MNltYKSK/yELgDa3knIl6p5ygPipFdZ0ncpwyNzmubi06JVRSKsNQ3o4tpU4M9Zb17utz29/4+TMv20J+5a47rrj0wop6ba9YsWLt2rU33njjOG4ohLjppptWrFjBewsAAMeFxBBAlfnY7f+y+YVew4r+9/dNv/iCaDwmY7GYV1mYSNQlEuFEnRFXQg2K1IA7MJAaHBgYHBwY1LLprJ6UN+UEA0P/rKGv3So/iTl3aaIutHTx9N6D6X0HMj//5aOL5888/83n8qrDxPr/2XsT+EauOtv/3iqtXnrv9L4lhOwLZO+201YSYrMMhLANj5CFGd5j3pvhMW9INzB5WHqEATsMw/CHxzxgYJp1IBMIZIB2gLY7djpkXzrpLenVvW92txdZqrr3/j+6t1Su1mZZtqSSdL5RK+XSVrollXSPzu93IBQWg3A4vHLlyvb2dvVnRmUwm3r47ne/e82aNatXr07pcphyVxkfd+o/wFyw/LznXt17bHDk8Ij51kUNXiJM0zznaCUETbZbIGpZnk/iEDhlhbEw12H6GKbnyRTgQHT+OfFDa5qSC+3zcblQnhtCcEJePz74s6deG4jG1/3Pv/iLu+5w28u74NpkiIYAAABAwUAxBABUEh9b9+3ndp7S9MAdbTPecpkvENBsuTBx5qsP0iAnPErGosZoVEmFSbmQmazW+zBY8SdqEnnuWks2TCx7vdqqFbMCft+e/cO/+333musvX7ZkEV57YOpAKCwe6utcNrnQFgozKoadnZ2rV69et26dfVvnPU/lsJn/bS9/85ItL+yJGmzUEEtm+H0aSRUNk8KY1dCQWibpabcaTt39l+dd5S8g5tAHJzQecodQSKR0yCg1ktZCJRcyQoZj8Yf/tH3XsYGbV1//t//9IzMbG1z4Il+5cmXBqh9EQwAAAKAAoBgCACqGB/7xZ93P9Wve4G3Njdde7Q8E9GAgWFdfVy+p89UFNSkXiuhoXGqESa1wbGyMc05qvmur7dBx/mmVKCfWUuXioYT4vPr8eXVHjo4MDJr//vCjq6+/ctlSiIagQCAUFhuVCfv9738/+RY/p6zYKRfay3YKCiHkjjvuUAbDlJtnPZIU4aeXhrrA8oVz/vjU9gGDnBrjl53XoBHObNEwmfxuHaOkaGjJY8WoT87EZB9nshkm2QTEHDeZUFh0yoXqnFEakyqhpRhybgpxfGj0X/7wwt7jg8tXLP/OPz+wbPECd77O1UFjw4YNBdQmQzQEAAAACgCKIQCgMnhu654fPfbicFy/tblx9XWBujo9GAgoubCuri7or6vTgiZhUR6Nxiy5cHRUVSLHa7QSOX166fifsP+J8VQUQahdo+zzaYsWNhomOXI8tmXLn9puWzNzRiPGEOQPhMLSoIId9u7d61yZYjBM8Rg6646VwfD+++9PudvSO7IXzJ85e2bDs6/sG+bCJHTl7HoqGGOMOIUwh2hIVIZyCUXDrIfW6Q5Hdu6gfNTDjCZEZSokmibkiWiaqWkxlXbCuSEXGCEjceOHT766tf+E5gn87uGvu1YuVLS0tGzYsKHg7GOIhgAAAMCk0DAEAAD389zWN/7+q48ePh1fc139mmsDwSD1+/3BumAwGAwEg3WBuoAWiBNz1BwdHR0dGRkZHZHno6NGPF4Tmcj5z2zVGaXjaShyWdMo1ammE6rrVFeJpNqiRQ1vu+2CCy9cePBk9H0f/WT/wUMYQDAhPT09oVCIUhqJRFQEB1JKizraKgfWXpNeU5w7NPkrX/lKxkvL8nRuW33JX3zw5rE4eeZ4bOPBUX9Do+bzMULskynPhRBMnkTyvLwdJ7KFUE94tfSVGa9ma77OovJcd0gIkyEnnFKWlA4NSuOOYuQY5wbnA6NjX+96fuehUz5/8Mnf/avL5UJFe3t7KBQq+ObhcFjdQ09PDw4gAAAAQG7gMQQAVAD3rf/XIwP8mqsaWm6qa2jU/X5/MBisS1BfH6wL6AGTGNF4NBodk//GFGhcmJGk05COp6EkfTpqVuu8XsDvWbSgcc++ocPHBrZufeXP3/cuDCDICByFZWHVqlXt7e3pRZrO0uMUgcm58NBDD61evfrTn/60e57R+cvmz5pZ//xr/adi3O/1Lp/pZ8zM4BOXRy3lMRR22XKZvYYTHXsnk26c7frp3sPUq6kaZOUrTIYjm3YxMucxqRianMdN9h/P7np+7xGh+R7+fsdlF59fEa95dVTp6ekp+HcIOA0BAACAPIFiCABwO7ff/eWjA/zSixvedVvjzBm61+etCwbr6yz8eoBRcywekyJhLGaphTHOIBdmn5c6J5rUuUpV/J1TBNhY71uxfNaevUM7du3bv2/3O24PYQCBDYTCMhIKhe65556MX+SUxJYt7URdRwhx5513rpa46nmtWjpv5sz6l7b17xuON/i9Sxp9jLHEs1CbPX50skTD8fJk96uGKYfiqW1sBjMjpVxKhCKZc6LchaoMOcZ5jPO4fEkMG+bPn975wt4jTPN+7R8+fXvohgp65be0tEQikYJrkyEaAgAAAHkCxRAA4Gruu/9buw6O1jcGb2luXLLI4/F6lLtQNi8M+r0BTvlYfCw2do5iyBmHXDjBTNU5YaXjFh11JsS4f0c6Db3xON+97+zW115dc/1Vy5YuxgDWOBAKy04oFFq7dm2Ob3EpTrT0Q+JXvvIVtxkMbVYtnRcz+O79x3efjeuatrQxQARjSdHQefSy5UKhMpQrTTQkmZ5R7vXpV1PCIdc0LlVCoWlE11XaiZF0F47LhULEGP/5U9t7dvQzzfe1L376zne1VNyYrVy5MhKJFBaBooBoCAAAAEwI+hgCANzLc1t3P7v9WLA+cOfbZ5y/zOvxyLQT2bzQ5/f7vH5O2FjSVWgDuTDf2alz9ikDBWgyOlnTCdU0XaNUo5pGfX7tphuXNjed7w/O+pv7/8+BfjQ0rFHQo9AlhMPh3HLh+Ns8i/eOUqo6GLqWD73jmrdcdj7RPM+dMp8dYJ76Rt3vNwkx5Mmk1HS2NSTEamtICJeiWCXu1mydEEV2rEsdachc1iMz2cowTmmU8xhjY+rEOeP86NmRnz617aUDx33+OikXrs1sV3Q36rAzRd8DehoCAAAAE3yTxLwaAOBabr/noaOD/PprZtzR2qjpWiAYqLMJ1mm6Fo/FLZkwHouNjcXGYpALJz1HHZ+SJs45IUK12Gey3T6TC4xzU5w5M/ajf3/x9Z37brpq1aM/+y6GrnZQ8RqqcVh7ezskwvISDoc3b97sTDux38T2gl2JrOp57dwMe31nZ6cQwo5Idu1h85s/3vzStn0NXnrZLO3mhQEeHYrHYh4hdEI8lCbOCdHlD+C6CnGSYqhOrN8/aNV8X8+WsyzrjgWlRNetxoWJ568bUk6Nca56FxqExOW+H44b/9b72ov7j/sCDQ+F/+bOd95MKa3Mkm7rB4yp/2gxXfcDAAAAVB/wGAIAXMp9n/7W4VOx2bN911wRIJR4fd6AX+Hz+X1Uo1IstFJOYlHIhYVORJMzRafNkFKqafJMo1QjmkY1nc6Y4V99wyrdW7/l+R1//b/+HkNX9cBR6EKyyYUpco8tA2nj7+TEuc1DDz1Ek9jXd6Fg9D8+svZtzVcMxen2IfLCIBfBRn8waGqaQYghhLIcKqchkz94KI8hSy5UTVJ+hghmKQ6qQGTh8aiEEy7rkeOExIQY43yM8yhjUc7HGGNCDERjP396166jA75g44+/FX7vO5pTStcrC/UDRiQSmfr9dHd3w2kIAAAAZPh6idk1AMCFfPMHv/u///5UfeOMj39k3qIFHo/XYxkL6+qDdUG/3x+Px2NjsVjiXzwmQTLylKajGWyGiXPOCOeMMyGYYIwzUzz7bP+v/vOlodNHfv2Tb66+6XoMXfUBR6FrCYfDkUgkh25rHwOdaSdSLBJOj+GXv/xlIcS6detSopPT78ctr8mnX//Bo0/PrNNWNdC25QEaHYmOjFDOPdJaqGyGym9oGwy1pOVQU8pplb0UpJ2Q6LpKQybJZGSuYpE5j0t3YYxzQy4IIQ4Njvz82dd3HhnwBWf88Jv/+/q3XOTUlB2Z+RXmywyFQtNymILTEAAAAMjwjQNDAABwIb/6w1bqCay5rmHBeR6qaT6fz+eX//w+r8cbj0uVMB6Lx424BHLhFKEpNkNNlbtRqiXNSqqnoU4vuGDem960UPfN6PjadzBu1QQche7fQbnlwnPe0Q6UJOT0GHZ0dDgvUitpFtzw3FtuuPCuO244NcL2j2oP7x49SYL1M2cKVXsrhJFsbmgosyEhPGk55PIKymxYJZ8QSijUdSHLkLmmCblMNI1pWoyQqGxZGJWnMcYMKRLvOjr4Lz1b950Y9jfM3vjvD11z5ZucJepOp2HFfZKqRoTT8BqD0xAAAABIA1nJAADXce/ffXNH/9AFq+pva26sC2p+vy8QDAYCgUAw4Pf7BRdSJDQssTAeNw0TcuE0zEOdocmEEpGYYFMqhFwjiJDnJBjwzJ/buGfvwPYdrxMzumb1DRi6igapx5XCqlWr2tvbCwiHTTk8fulLX2pqavrsZz+bbitzFimPHxncIR2uWjr3guXnvbTz6JBBjkeZpmkLZwQEESZjXB6l7CcpHNvKk8si+WTGWzFUHNJLSKQ4qERD6yR1Q5WJHJfuQuUxjEtX6YhhPrHz8GMv7xuI8uWrVv30W59bumh+xkicCm1oqA5WU8xNtu+qpaVF6Y/4sQQAAACAxxAA4Dq+ueG3z247XFcXuOaK+pkzPbrHY/kLfT6vx0sIUSoh5MJiQJNyoaxVS0oHGqGaNB3K3GRNo4sXNd7c/GbN29DxtW8fONCPcatE4CisLFTp5YS/8mZU+rRz+dKXvuT0FTodiLlthmU3Hl755kXrP34b1XzHYtqmo8YTJzgJzqhvbBTJoA+DkLgdpqxilJ2uw+RC5fkNNY14PETXidebOPd4rD+ldGho2pgQUcaiphk1TdW+MC5jkU8Mj/1gy87HXt43aGh3/tnt/6/zU4sXzOUOMoYvVxzqfTEt3kDlNIxEInBUAAAAAOhjCABwHbd/9MuHB4z3/dl5b70i6PHowWAwWJc4CwQDAb/fME27IDkeS5xxzgkOY9PHeFczQQQRnAkiBOcyLplZ58wUp06OdD2+7ek/vfqhP7v5G1/7EsatUkCPwkpEiRd5ShjOxGRnYobiC1/4AiHkc5/7nJ2nnJKqkbFA1fldMeP3xhJ/mfyXnz/18vZDPp0vDfDQEv88jzkyNGTG41oyOlnFKOuyy6GWDFCmco3yGGpJpyF1s+UwGYVsVSLbNkOVjEwIo9QQIi6EIfsVKoOhKkOOmezQ4MjPnt19cHDEF5zx8bvf/ZcfeluKiOxUkzMKxBV3ZMuYCFTYveWp0QMAAADVDTyGAAAXcc/ffv3Qyejs2YElC3xU07w+r9fn9SSRcqHUCqXH0DAMzgXkwmmeoianqeO1iWrGmphUkqTNkMyZU3f5ZUs0b/1PfvHbJ7f8CePm/uk0HIUVyqTkwvTK4hSR6Itf/GK669ApG6WfOxeyOQ1L7D38xAdvarnxwrNj5JDh/3W/sX3EE5gxK9jQwGWGsimdhnGH35DJmmXlN+QyHsTpN3Sb5dDKqtd16vGoE0k7cU2LUzqmApFlGrI6xaRcGGf8jzsPb3hq1+GheLBx/i++2/6xD9zqFI6ryWCoUEez6RL44DQEAAAArK8l8BgCAFzCoaOn7lv33UOn4retnXNLU6PX6wkEA8pdGAwEPV49FotbeqGUC1GPXDyE8gxxwYkQTM0w5STTtM2Gwozz32587Q+/f/7Gq89/7JEfYNBcCByFlU44HN68eXMBzql0p6Hq9UYIeeCBB9Sf6R7DlOjkjE7DbH7D0nsPj58e/s4jzx47Mdjooxc1kitmkdk0Pjw8bMbjmgxQ9sjmCnoyT1mTCzTpK8x2ImX6WFFpU+OmQnV+7oJIpiEbnJvJQGTlKzSTptHXT5z5/fbD2w6f1n3Bd7997X0fuG3RebOdCnK6wdC5pnJDk9U2T+NvIXAaAgAAAEg+AQC4hb/539/dceDMm1Y23to8o67O4/N5fX6f3y/zkb1eI24qlTBxbpqQC4s770oW6gmi8k4S01Uh5BlJriLE5/Xs2n1qx67XhTnWtOZGjJtLSAkzuffee8PhMMJMKo5wOByJRPbu3VvIW/jc2lJ1ftttt62V2Bel9yjM6FJMuX7GwtWMAlNRVaf6oO/ma1aNRM1Xdp86K/RXT8VHGbly8UxCqSkFNS5TUFSzBa6iUeQ5TdtK50kku7kWWzKjtp1QOgqtGBPVoFCezlmv60zX7XiTmAw2sXNOmMyDNjh/4vUjP39+39HhuL9u1sfves8H3rl64fxZ2ZyntoCYe79XFhs2bJh6BIpCBaHcd999CEIBAABQu7NCTLkBAG7guZd23X3/t+tmzLrr/YtWrfAqg6F0GAYCwQAhxIgn/4sbhhFnjKEeuagI5Q/iIjHr5lzwTDZDg7/4Yv9Pf/pE9MyhwaNvYNDKS4qjELPcSt+boVBoioYppz1QGQw///nPpxsJM3YzTLEZ5tPlcELL4YQXFcbW149+79EXBOc6MS6fpb2pQbypngyPjIxGo4QxLekxVD0NLb+h3d/Q7mnosB+q78ckaTmk1m8l1tZP6Wu3HS7lPElnn6VROq2FycaFJiGmFAQT50KY0lpoygVTGgsJIU/uPvbbV/cPjXGPv+Hat1yx/q/erbRC50M7S9Rtj6Gu684/K10xDIVCa9eunUY/BJyGAAAAahkohgAAV3DP33792R0nLrtk9vveObu+3uv3+wJBpRcGPF6PaZqGYUi5EPXIpcMWEFSxW+IfI5wxzoRggjHBTD50Nvbd7z2x5/U31v313Z+5/1MYtNIDobAq9+l0iRT2oVLTtM9//vOqi2UOKXCyimG2muWUQ3QJBMRf/GFb11OvNwY9fmLO0eNtywIzdXN4eHhsbIxwrsvyZE9SNFR/qpplzS5JThENlVyolpOiYYay5dzbn5TfiC0UqgVbm3Mohik1yFwlPktx0JIL5bla4Mm9MjAS+/XW/u1HBmLC4wnO/unXP5miFdobYrsIdV13Koa2hqjLfJWKVgzVe2d6vyFANAQAAFCzQDEEAJSf517a9dH7v029jZ/8yyULF/h8Pk9AiYXBgN/vI0JIW6HUDOOGaRiy2gwUHctmKAQX59oMk6HJ3BSmwbe+fOgHP+qJnjl45thuDFopJ8YQCqv2yxml0yhP2AZDWy7Mof0VphhOquNhtjXZNj7/Z3r89MiWl/p//6c36vyaGY9ePku7YhZd5OfDw8OxsTGV9+extUKHaKj6G+rpiqGzv6GS0NT22Oqh037o2H0pfxPlFnQKiCkqocNjyChVKiEjRB5oLV+hbTDkyd2x99TQlt3Htx4eHGOaJ9B434fedvvNVyycl1UutBecjQurzGM4le6fuY+3EA0BAADUIOhjCAAoP5/58o+PnI5dsLLx5ptm6Lru8/l8fp/P5/V6vVSjpsKw/g+5sHSyhfon23pZEoBcklIiocmcal3T9h04NTg4LIyR5qbVGLeikq1HIdoUVg2hUOiee+6Zxq9nlNJQKNQiSelXmLGbob2cHricnpWcvj5js7wct82ds5zPdWzqg76LV81bvmjm89uOMaKfFZ5dZ8w3zhiXL5zREPQLSpkMCeGUcpmVrAKUVVyyFZ2s+hhK/cxuaCiclcLJWBJqL0uPnnWRphFdty6VC+Mr7b6EyT/VAlUnTeO6bmpanJCYVAbjQsRkm0LrJLsWWp9/QgzHzEde2t+969j+M2PMU3ftNW/5p8/fveaaixrqAtkG0N4vKVEndhB2dfQxVL+dbNiwYXqPiuhpCAAAoEbng/AYAgDKy3Mv7brr0/+vfsasu96/YNWKgM/nDQT8/sQ/n+9cg6FhGMxkOGqVGGUv4oJwxolQZ6mhyU8+tefRX/Red/nKx375Y4xYMYCjsEZQQuH0/prrvM/cSccTGgad584fbzIaElMeJXffw2xrCnYj9jy777EndnHBfbqIjo5cOVu/eo6+pI6MjI5Go1FmmoIQVadMHR5DTZ5s+6Fqd0jlsrIoKsOg6nJoLad/t9Y0ZyIMSSaMjK+Rf6pgFia3nBHC5bkpTYWmzKhXJ2Uq5Mmnd2Jo7A87jr56dDDKNN1X95YrLlr3X9+5YN7M3OEzKQbDlFaGtmhop6BUtGJoHzCn12YIpyEAAIAaBIohAKDM3P2prz27/cRll855/7vm1Nd5fX5fIOBPEPDrumYkOxiOGwxx0CotdgQKU4XJQgirMFmeTM4Mcfbs2ENf/e3Asf7/r/Pv7/rIhzFo0zvvhVBYIxRDLsxY45ytyWC2BeefGS/Kph6mXDRhTEr+xct5fn19+PFtz7522OTMS/kMD3tTA1ka5BfO0Eej0dFoNBaPCxmNQqVEaJ97ktKhpRie+6eWLF4eL0x26oYOcW58jbQuKtOi8jaq376sZc7VmnGVUJ7s6mMixcTdJ4af3n/60JnoQEwIPXjV5W++/+Ot6TXI2QKsnV5Cp0ToFBDT47Arl2mPQCnBPQMAAABuA4ohAKCcHDp68tb/8qDma/zLjy49f0XA6/NKsdDnD/h9Ph8XXOWcKGAwLBcOmyFLzF05EYwrjyGToqEZ5088seuXv+y96apV//mrn2HEpgiEwhqkGM3XcquQOcJJctsPJ/Qh5lADUy7NeJ0cK3Nsf471//H49idfOWgYLOCnhMUbNLYiwFY2kqUBIcx4LB5njCll09INbWVQLtMU0VCeiB2vnDw/R2izE06I9SMXt8+l/CcynStroUg6Cikhw2Pm/sHRLftOHR+OnRo1NN2v+epb177l7jtuOm/ujHRdL2WNljQ22lJgip0wRSusJsVQPYsppo1nA6IhAACAGgGKIQCgnHxjw2++8aPNSxbP/Ou/WKp5NJ/P61cGQ79P13XTiWFyAYNheVA2Q1kal5J/YtUmszjv7x/4/r/1HDu09zcP/2tT0xoMWgFAKKxZwuFwJBIpxley3CEq+Yhu2UKQJ6sk5s5Tzr0y29ZOSkw8OTD6xIsHnn310Fjc1DXmMeNLA+z8BnLBDL1e42OxWEz1vuCcCKEkQlWqTJIVyrZuaAuFxDYU2mtsuTC5wO2jqFxWiilLO1daKlXZzZTGGH9q38DOk8OnR4zjYybR/ZonePd7m1qbrjhvboO9c9N3t3MhpUNluscwXTHMds+Ve0SddhVeAdEQAABALQDFEABQTm79cPvAKHvfu5ZcdkmdrutSLrQMhkIIh1pochgMy0rSZig4k3XJjAjGmKygY0wwkw+fjT3yi+eee+aVm65a+ZvHfoERm+y0FkJhLb8AQqFQMcxQ+Zc559lAMLd6OL2KYW7PY+5Nzf1h8fAftm95+VA8bvg9VAjDiMUWB8RcH79wlqdRY41eSrkRi8eZSiXmXAlpmsNsqNaIlBhlh4aY/JZNba1Q2Oph0lFIZO9CXZUGEzIUY6OGuef06KlRY/9AdNfpqKbpRPcR3X/VJStvX3PZVRctO29u/Tlf4jMVIKfn2KTnzNj9Cp2KYRVknqRTVF0PoiEAAICqB4ohAKBsPPfSrrv+7luLl8z6wLuXLFro9fp8Si70+326R2eMMZPZKckwGJaXxIw3MX8mqTZDM5l/YvDt249++9uPx4cPnz2+ByM2IRAKgfVVLKcNsMT3nL/rMGV5srbESXkM81cP8/chnjg90vfSoS0v9w+Pxv26oFR4idFAzHrdvGy2Z34dXRDQrM8fxkzOTcaEassoxHjvwuQCdZQh26OfeghV2SlSfNRlA0Fd02JMHB2OvX5i5NBQbCBqHjgzxohGNR/RfFdcvOL2psuuePPiBUlTob1bM/6Z0VeYUTFMJ6PmWB2H2SLJ8QqIhgAAAKr8ayoUQwBAubj7U1995rWja24477ab59Q3+JIlyT6v30sISfoLGTNN5WvDiJUXrvJPhKUYpuafMGHG2Of/z6NnTh1a/9d3f/Yz92PEss1gIRSCEigOU89RKcDNl08y8tTFwakIiOkrj58e6X2xf/uek7v7Twf9uodyLkwzHvMI8/K5PsLNVTO99bqo8xBKRJ2HatbPJYl74XZBsbxf67u1Et2SOpymXISUck5GDWPU4GMGPxllp6PG6ydHT0aNgTEuNUhNUJ1Q7+UXLb+9+dIUoTCbSphNMcx47uxmmLKmKuVC+41QjA6hJXgLAwAAAGUHiiEAoGzc+l8ih06O3fWBZZdfVK/7PKp9oc/v9Xg9nHFzXDLkAgZDFzBuM5R7hDH5p+pjaArBuBHnv/rVS93dz9141Yrf/vo/MGJOIBSCEmsN02tdzL9jYP5uxEmJgAUohgX0Pdy259T2vSd7X9g3NDzm8VCNCkq4YIyZ5ryAoMycV0d9hMz2a5xzv05n+jUVIB/waI0+TQilHopjw3FBiMHFwKihyn6PDhtRk4/G2ekxYzTORxmlQhOEcqIJol950bLz5s982+qLzpvXsGBOQ+4ehRmXsxkMUxTDjEJhFcuF9hutvb29eIdciIYAAACqFSiGAIDy8MuuP3228+d1DfWf/u+r6uo9PpmS7PV7fT4v1SgzOUvqhdLPhiOVK7Bshun5J7I2mRl8z56T3/v+H08f2Xv2BAqTCYRCkIOpewBLf+c5DsUTuvnylPYmG4EyLUXKGVe+tvvk1jeO6pq2fe/JgcGR02ejmiY0wSlVIqEVBUWYPBIKLhhLphwTJR2q/8sv21T2LiRcEM6JoNrlFy5eMHfW5W9eeNXFi5VEeM6384lCkDN2LZys3zDbXVXlcTgUChX1iwREQwAAAFUJFEMAQHm49c8fOHQydsO189/z9gUer+7ze30+n9fn9fo8KvNE9THkUp2CwdAlJGbBiZkx4Ym58Xj+iRINTVMMnhr9wY97d722bd0n7/ncZ9bX7EBBKAS5KXalZPF6I44fDSb6AjmhWpfjCnnWPqf/mU8ESv7ew5TrKA1Ro+T46ahGybHTI5SQY6eHKeHyXBAlFMqcqPPmNEhNkZw3t/68OY3z59SdN6dxgbQQCkIWzm3IodBN1mCYWzGc8M/qlgvtd1zxBHoFREMAAADVBxRDAEAZOHT05C0fjsyZO+uOdy6+8II6n8/j9VmKoUdmnpiMcZb4x00YDN2F8tWM558wWZhsCqZEwzjveWLno7/YfOOVK377nzWXmAyhEORDseXCEogj6RQgIBagKk79+gUrhoVdOcM37zy0uYIVw3zsh7m7IlbznIfS4kWgKCAaAgAAqDI0DAEAoPQ88/LrhGozZ+rLFvs02SBeo1riXKOq1lWoXlBclXQBN026Uj45ZJN/Qolq9E91evWVy6nm6/3T87UzJqrkjVIaiUTa29u7JS0SvGBACuFwWL1OivcQkUikDEeGc4N3c18hpd2eE9X1L8efimxrNAcpf2a8TgHoul7U2054nZRnnfLUcqQhp9Qjp/9Z3XR3dxf7rdHd3b1582YohgAAAKpn6gfzDgCg9Nz9qX98euuh1lsXr109z+PVpcHQ6/V5vV6PrHXlJpPWNZQkuw87/4QzNm4zNBPnzBTMFGaMff0bv9u9a+f6T973uc9Wc2EyHIWggNdMKBQqqtGpLAbDfI8e+X3nzL/V4LRcuQDbYGFfnvPU5jJeLf/Ohil/5nlvNUJpPIBwGgIAAKga4DEEAJSBQ0fPEEIvWF5PLYOhhSBWC3miSl9hMXQfqrwtaU6x/q+chlQ5DTV68cVLNd3f9+TTVTkCcBSCqUgJRc1sLZfBMN+jR04TYvrVcrsRc5DiQ0w3JGazLmb0JOa+n/zJ895yeyon9BKmZCJnHN6afQO2t7ern3mK+ihwGgIAAKieqR88hgCAEvPcS7s+8j+/7gkEP3//pV6f5vV6fD6v1+/1ej1Uo9JcmDgTMBi6FSFjQs9JTE56DLnJmclfe/Xw977/eHTw4NCp/qp51nAUgilSAueRmw2GeR5bpveaeUaauPEL+mRyUSa8qJZVwvT3SFG7iJby/Q4AAAAUG3gMAQCl5qkXdwiqLVlY5/FQjRJN06imUUJluasQqo0hlz0MgTuhRDYu1NSCZTq0PIaJfzNmBhcvmUc9gR/96MeV/lzhKATTQjgcLoF84GaDYV6Hlizd9/K5ZsFM6AEsJVPcmGxDhDeg852oDuzFfiA4DQEAAFQBHgwBAKDEPPPS65TSlSvq5ZRGs6YzckbDORdMliILQQhKkt06qydUJHaZkAqhkNIho5ZumFg1e3b9jBl1VPP1Hzxcoc9ROQrVcrsEEiEomNJY/8LhcHt7e5UpFNnUrjyNgbnFspQ7cZuyVsD2QBzMh/b29lAoVAJvaXd3dygUCksw7AAAACoRKIYAgFLzzIu79EDdssWBxPRGsyY4VE7euIpJ5lAKXT+Nlx51ZmmHQvYvFNLTIigldUHvnNkNVPP09v3psxX1vFKEQpQeg6lTsirIYkcwu+sQNBkpsLA7qegRADlQnSVUBlGxH6u7uzucBCMPAACg8r5voI8hAKCU/HLjk+v/4UeLFs36wB0rFi8OeH0ej9fr9eoer4dQIphQ+bucE0LQxNC9CIlsN2m3MuTcFMzknAkzzg8dHOx86OeL5/tee+Ep9z8dCIWgSITD4UgkUoLvWpXewbBcxzFXf0eHJlhMShBDhLcnAACASgd9DAEAJeXg4ZOE0tlzAvPn+zVqVbImwzQIHy9GhlzoblQWJ9HI+JTWSkyW1lFSV+elmt7ff9TNT0L1KAyFQsqcpdoUokchmMYXWCQSKYGPqQo6GJbpMDZtFOP+sYOKispNLs1jQTEEAABQqV+W4DEEAJSSj37yoadf3tf6thVrV5/n8Woe6S70eD26rhMr70T9D4qh2+FCugtlrrVgnDNpMDQFZ8I0uRljD37pkZOHD3zzq5G7PvJhV205HIWgNC8zVfZYglcX9AgACqDEccZ4nwIAAKg40McQAFBiKCF0/hw/1VTaSfJfMutEglGqnH1JKCVCqPAT1c1QGg0ppSuWzT955LB7wk8gFIJSUsqax5rqYAjAdNHd3U0pLZmvPOwAgw8AAKAypnuYmgMASjqL/uDnDh07+1cfu3T5ijrdo3mlwdDj0ammyeAToZyGMBi6H+UGZZwnzpkQjKsmhsppaBhs48aXf/ebvjXXvvm3v36kjNsJoRCU4UBXQu8SjEsATOXtU5pgojI+IgAAAFAw6GMIACgdB4+ePHTkFKFk/jy/1dZdtmoSROqERMmFEAsrBErsJobKKUqp85zOlnHJ+8vkMUSPQlBGDaKUpY7oYAjAVN6tJRbc1fEhFAph8AEAAFTAhA8eQwBAyfjlxifXf3HD7DmN93/yCs1DPB6P7vV4PJqu60QjsiseEYQTJSICdyMIUanWqpWhjEsW3OTMFJxx0+Avv9z/05/8cfj0kbMn95Vsq+AoBDUlQMBgCMDUPzVCoVCJJ0RwGgIAAKgI0McQAFBSBKGLFtbJRSr/U9/RheAqJBlKYcVACRGqcSGl50i80nhIKQkGPI2NdcMDem/vk83Na4o95YNQCMpO6VUAdDAEYIq0tLS0t7ernKJSHivC4XCJHxQAAACYLFAMAQCl45kXd9rSElVxJ/ZlCDypQKh9oqoQWUgV2LpwzqzGhsYAoXT/gf7m4myAUyhUWiGEQlAuwuFwJBIp5YEsHA63t7fDYAjA1N9KoVCop6enlJ8gEA0BAAC4H/QxBACUjoOHT1BKli9pdOhKhBA76ASSYUVD7XMlBweC3saGOkK1/oOHpveRnD0KlVDYLYFcCMqFEq9LPPNHB0MApov29vbSv6HQ0xAAAIDLgWIIACgdB4+cIoLOmuV1rJOOnJRqZCiHlQU9d9mKQSHBOu/8+TNlwg2dlseBUAjciXplltjiCoMhANOIevOW/g0F0RAAAICbgWIIikVnZydN0tfXhwEBCn9AD/h1x4qklmTV8kEsrDCSxkJVljwuHirl0OfzEqrt398/lYeAUAhcjmomWGKtAQZDAKaX7u7uSCTS09NT4seFaAgAAMC1oI8hAKDE2Jqgsy55XC5USxTjVBFQZy2ylYbiuFSJiPTAwYMF3Dd6FIKKIBQKrV27tsRyIQyGABQDJRqW/rMGPQ0BAAC4E3gMAXApVenQPHjkRCYToSUXiuQSqKDXqWUwPEfkpSn/n9RehaMQVBDKHFR65Q4GQwCKQblqk+E0BAAA4E7gMQQAlBLh8WjBoKMqWQhlTBMOYQkGw6phzqwGQsiBPKqS4SgEFUc4HN68eXPpPUEwGAJQPNrb20OhUIuk9G9tOA0BAAC4CngMASgEu0tjW1tbke6/+gbt4JGTRBCPh3o8qUceFX4i/8FkWIFkDzbxeNW+znoFOApBhVIuuRAGQwCKSktLS1lyk+0DC5yGAAAA3AM8hgAUwqZNmyr6/stHBk1QmgyTYcmwF1Ybco+mSYpwFIKKJhwORyIRUY42CjAYAlCCd1koFOrp6SnLBxOchgAAANwDPIYAuIvOzs62traurq7qfpqWgJTUCSEXVivz5jQSQg70W1XJcBSCKkDp3eWazMNgCEAJKKPNEE5DAAAA7gEeQwDKT2dn5/r166v+aR46clIQoXu0gE93qIWJ/wuaFpYBKgEZjUzTfKMi5TqmEXfOfOAoBBVNKBQq12sYBkMASkNLS0tPT4+y+5VlA+A0BAAA4AbgMQSgEKreA1gUBKGCeD2616slxULiNBqCyt2zOS8U0eFTO3bsgKMQVAFKLiyXiACDIQAlQzUf6OnpKeMGwGkIAACgvEAxBGXAjg2hye5mfX19bW1t1EFnZ2dfX9+kbu68hxw3z3gPua9j31X6rbq6ulI2G/s3B4II0zBHowZx6IXjOcliPAMFVMgOHe9NKTJdPDg4QoTwejxjY2OhUAjeKFDpCsLatWvL6DmCwRCAUtLd3V1emV4dcyilZRQuAQAA1DJQDEH5aWtra25uTnHtrV+/vrm5OYfqZ9PX16du7rwHdfMiBRlPO+vWrRPn0tHRUa272zQZZ8IpNsl/SeEQVC7nmkXVbjUMUxCxZMWFg4OD7e3tSnaH5AEqEfW6LeOrFwZDAEqMcsSX9zNL/VSgkliwRwAAAJQYKIagzHR2duao8J1QNFRyYbZLu7q6KkU0rAWWLJ4rM5EdyqAYN6dBL6wGkjtXODykVIj6xpktLS0bNmywr6J0Q0iHoFIIh8ObN28u4ysWBkMAyoKKQCmvWgfREAAAQLmAYgjKjJ340dra2tHR0dvbm2Kv27JlS46b23Jhtpt3dXVNY6Ww7QG017S2tjq9gevWrcM+zT2EYzFzaCiedBgmxlL+J9DQsIJ3qrMmWSSdo4768uXLlra3t+/bt0/Nu8LhsHoTRSIR6IbA/Si5sLz5AzAYAlAWWlpaypubbB+FIBoCAAAoPVAMgSvo7e3duHHjunXrmpqa1q1b19vba1+UT4hwys2FEK2trc57yKe6GZQIkTQWivEmeMklUIk7NNUhaleYC0KGhkcJIcuXLm2R3HPPPfa8C7ohqAhU+oGqqS+vWID3CADlegMSQsou1UE0BAAAUHqgGILy09vb29TU5FzT1NTktArm1vvSb04I2bhxo1M0zG1UBKVh6aL5yWATO+okqSydqxZCO6wczhF7he0uTO7WaHQssVJmBbW3t+/fvz8lOMKpGyIaBbiNnp6eSCRS9oBvGAwBKC9Kqiv7ZkA0BAAAUGKgGIIy09ramq73TcvNH3jgAXs5H6MiKAWUElXVrXIxEudciHOFJ+iFFcR4MXKGkGuRLDdX4eJKc2lpaUnvB6d0w7Vr1yIaBbiKUCjU3t5eXrkQBkMAyo6qTYZoCAAAoNaAYgjKzC233FKkm09FiARF4sZrLh2LmcMjhp2la/W7c9Qog8pCpC2oRS7/jo7FKaErVixTF6huUNkayavaZEQqA5eg5MKyvw5hMATADbikNhmiIQAAgFICxRCUmdWrV0/l5rmTRpyFycANLFk0nxI6PGKkZmU4U5NBpZFsTSmcwSfqf0NnR5271fZq5WgkD90QuEQdSKmgL6M0gHcBAG7ADREoziMDREMAAADFBoohAKCkUEqHhmLjBcjJImWRsa4VuB4xvitliTknhFuS4anTQwODQ5SSNTfdkDLjmlCOgW4Iyjsbt8/LCwyGALgH9aOXSz6PIBoCAAAoAVAMQTXT1dVV8G03bdqEAZx2Fi+YSwjpPzREBHXknXCiJCfu1J9ABZBUeUVKSbKyGcbGjJGRKCFk+fIlKTOunp6ecDic3tAwfUZkR6NANwQlm4dP+MospSKAlz0A7qG7uztjV40yHiIgGgIAACgeUAxB1dLZ2Wkv5yhPzhbEPBW1EWRj6aL5lJLRUZNqKgzDkpy4dbkYL20FrifZeVJYZ6orpXQaEp7489SpoYHTZzVKli9b5ryhXdiVraFhxnkRdENQmhm4Ckd2w8bAYAiAC3FPbbI6ZHV3d0M0BAAAUCSgGILKJpvel2ISnGy+SltbG8a2GCxZOI8IMTAYHRg0nBqTVZ6c7IIHzbBiSO7EpPgrktZRQTiJRmOxWGz58qUpN7Jthip9Mv+pF3RDUFR6enrcIxfCYAiAO3FP1wL7IxWiIQAAgCIBxRBUNs3NzW1tbem6YVtbm20SbG1tTQlIccatNDc3p9y2s7MTBsMiccM1lxIqKOGDg2OC0PECViQlVyLCriG39qCwI2wE4YIcPnKaCLLmpuvTb2oLhQXkS0A3BMWgp6dHhSPb+TzlBQZDAFyLq2yGEA0BAAAUDyiGoOLp6upqbm7uTNLW1kYpdUp+DzzwQMpNmpqanH9SSp03X79+/YQPapc5d3V1Kcmyr69P3QP2SG6uv/oSQsiefYPEkposzdAqTObn9sQDrmfcIiqs2BPBlXVUDAwMEyJWrliecXqjNJqC28al6IaYJoEpEolE3OPpg8EQADejDPKhUMhVmwTREAAAwLQDxRBUCeuTpNgDe3t7U/RBe33um3d0dOT50EqybG5uzkdqBNddfTEl5MzZmPW3NKONew1Rl1w5WBXl9p4SztiTxJ48cWKQUrL6xusy3tzp0ci/oWEKtm4YCoVUTAr2CyiAUCg0Wa9rUYHBEACXow4XrpLnbNEQH4UAAACmCyiGoLJJEf6ctLa2ZpMLlc0wRxxKb2+vs3I5nY0bN2LwC2PJ4nmUkhMnR8aiTDnRlPJkFyaLcekJuBerdaFqXJgsS7Z2KLcWjp8YIJQsX7Yk29ymR2L7NQpWSWzdMBKJQDcEBbx+XCUXwmAIQEXgttpkWzRUH4XYQQAAAKYOFENQ2TQ1NQkhent7nZbA1tbWjo6OjRs3ZpMLFRs3bsx4QyFE7hsqhBCtEntNR0dHbp0REEKWLJgnOOOMjI6y5EhaGqEsTJZ6k1qPwXIzlh80qRkKosyilutQiG3bDwrBNEJoftOtqas20A1BYS+bAoriiwoMhgBUBKq9hts+ayAaAgAAmEaoQMswUGl0dnba9b94AVcil6z5qDdQd+d7Lrns0pmUEqpTTaOJc13TCCFa4k+NaoRSisFyK0IIxgU3OWeJk1pmJmdxwUxuGrzr8Rcf3/jUzTe95deP/CDXhxCl3d3ddtbEdBWHKhmop6cHXi0w4evEJeHI9ia5UIMAAOT5KeYS7CgnHEwAAABMBXgMAQCl5r3vbOZcHD0+QgW1EjNkNbLgycJkTjhBBIqrkabCZBk5IYQnY0/kHo2OxvbvO0II//CH3pv7flKqugpuaJhCOBzu7u5ub29HpDLI8SKJRCKukgthMASg4lCGPrdtFZyGAAAApgUohgCAUrPwvLlEsIHBGKVUEEqSYbtWYbLV0BByoXsRVg9DYfWdtFsZWlnJZCwaHzo7QglZc9P1E85qnPrgFBsapqBqk6EbgnR6enpcKBeigyEAFYc7a5MhGgIAAJgWoBgCAErNDW+5RAi+bfsJxuUhSNiSk7KtJZvjoZWha0nm1Iz/Tyi7odp14tDh0wODQ5qWNfbEOaVpaWlxzmemPYYCuiFIR9Xrua2QEAZDACqR6XLHTzsQDQEAAEwRKIYAgFJzwzWXctOgRBw6MkyJJpKonF2uroTCZHfj8BUmlUJZmKx24e49R6LR2J9/4I7851rONcUIo1C6odI4oRvWOO5s7wWDIQAVyvS646d92yAaAgAAKBgohgCAMvDedzQTIQ4fGtWoTmRhspWYbMUkC5KM3QVuQzjirZP1yUkBkQvOhRDi8OFTGiUf/uAdec5nUmyGRbVsQDescaYrXWfagcEQgMpFHVJcaDOEaAgAAGAqQDEEAJSB97Q1C24cOTYUM1QdcuLElQBlFSYTFCa7FNtbyInqPSndhUmjKBenB4Z37z1CiVixdHGed5nuzii2ZQO6Yc3O6t0pF8JgCECl41qbIURDAAAABQPFEABQBm645lLTNPb1Dx4/MaYRjSerklGY7H6E0nbFePaJdBcSVZIsBDlyZIAIvmL5kmUTNTF0TmZSIlBKI+5AN6wpilHtPl3AYAhApdPS0uLOHyTszYNoCAAAYLJAMQSVx7p168Y734GK5b1vbx4Zjr267RSlGrGrkMfLk1GY7EYcKclCpVtLoVAk+xgm/t/b9yoVYvWN103qnjO6M0oj8UA3rAXUa8lt4cj2tsFgCEB1HGfcGYGigGgIAABgskAxBACUh2uuejNn5v7+s1xQQqgtOlmmtXMKk6EbuobkvrG0XD7uLuRy/x3Yf+L48QGqifV/+4nJzmQyNggvhKIAAIAASURBVIEqWQZlim6ICVX1TeNVXrYLgcEQgKpBSXKu3byWlhYhhGvd1gAAANwGFEMAQHm4/q2XmYZx8sTwzl2DGtEsTyG32uRZ9ciccIFehm5BWT+T1ceygFyphyrwRJ6/+Mq+oaGRphuvzb8k2SajzbDEGZS2bqhcGJhTVQE9PT2RSKS7u1up0m4DBkMAqgl1nHH5O7q7uxuiIQAAgHyAYggAKA/LFs+/4+1NlPL9B85yIQuTuRjvkWflnwhLPMR4uYHxYmRhFZJLlZArh6E0Gz7/whuUkvv/1ycKnmil2wlLn1YB3bCaCIVC7e3t7pQLYTAEoPoomTV+KkA0BAAAkA9QDAEAZeOaKy/kzDx+cvTEyTFN06SdUIikZMiTlclcIP/EFSTtntwqFOdccGK7CwUXr23vj45GNUpWLFkylYlW+vqyZFZAN6wClFzo2n0HgyEA1UeJrfEFA9EQAADAhEAxBACUjTvfuTYeix06PDQ0ZGjUI6xmeCo/w2EzJLAZlh9hGwmT6TRcEM7lSiYVQyGef/51QviH3/9ny5YtLniildFmWEbXhq0bIhql4ii9O3WywGAIQLUefLJ9lrkKiIYAAAByo+NDAgBQRrjgL726+/QZ87q3LuScCyIooYRSLXFGiJY4U2soUWegPCSbGFqqruCEMc6Z4IwzJgQTp0+P/LbrBW4YP/zOV2fObCz4gVauXBmJRO6999709YSQDRs2pF9UAlokajkUCtniJnD5jN3NX3LC4XBLSwu+hgFQlaxcuTIUCrn/DX7vvfdGIpF9+/bhQw0AAEA68BgCAMrJ33zszng8fuL48Ot7zuiaruyF6TZDApthWXEYDGXVuJR6iVWPTIRc+P0fX45Gx5puurZgg6Eih82w7JYxVZusInfhN3QzZSljnywwGAJQxajaZPULk8uB0xAAAEA24DEEAJQZLvjL2/YyRi9YNYtqgiRNhQ6boTIawmZYNqykE85VnrVghDPOTcIYZ4wrg+Hv//Dy2NjYN/4xvHzp4ik+XDaboZqDld0NAb+hywmHw5FIZO/evS7fSBgMAahuWlpaNmzYsFLi8k2F0xAAAEBG4DEEAJSZa664yDCMA/1DJ0/FzrUZ2qHJ6m/YDMuDUBHJXLUvVKXJyl3IuaxHFlz8YdMr0WhU18TyJYumZZaVowOUS2Iold9QtTiE39A99PT0RCKR7u5ul28nDIYA1AIVEYGigNMQAABAOlAMAQBlQymDN7z14nfedl0sFu/atE/XdSHzNIRskKdEKcaEXOCMcytiY5JgqAvfR7bBMJl2Ij2GgsuUZJ74JwYGR1597UAsFv/lT789xZJkm7Vr12abZbkthhK6oXvo6elR4cgut8kgIhmAGkEdiyrlzQ7REAAAQApQDAEARWdCIe9db7sxbhjHT4yePGXomkdw28Um3W1Je2FijdXWcHo2AJJiPmOnKpGTwiEhsnEhZ7bBkDzx5PZoNKppdFoMhopwONwjyXap2zJwoRu6ASUXun/8YTAEoHZwiS8+TyAaAgAAcALFEAAwPUxFj7v+6ouvvGSlR9dffPm4z+ejVFf5GtJveG5tsihKbTIkxczDoqyFqhTZqg1Xdk/BLRMoOT0wfOjgKUrIL3/yrekyGDpnWdkudWe0BXTDMlIpciEMhgDUFG7zxU8IREMAAAA2UAwBAJOjAE0tn8LhT9z9TiMe3/nG4IH+Ua/HK2TRqwrhVZIhVzY3TnhSx3LDs65uSVF1MJSibWLklXSrtEKuopNlB8MjR0+vXL5k6oEnKeS2GbrZuJGiG1aKtaSicaHtNBswGAJQgweoHM15XQhEQwAAAAqKcjwAQJ5MKAtO8X6+/aPf/fhXT9QFvR+/+1Iu4lyYVKeaTjUtcU6plvhT/V/TCktNLlnUchVkOitDJ1eZJ5xzqRUygzGDmwYzDcZNcfr00L98+/eDZ848+tP/u+aGa4oxy9q8eXOOFIsJr+CGiWIkEmlvb6+gVlYVOhuviOGtoE0FAEwjqstqZU27QqFQpfwSAwAAoEjAYwgAyEAOG50oFJ4k2xWuvnxVbCw+Nsa27Rj0+/xCENUpz6qBVcm8MgCFWaEbkzb3lSw1pdLzWMbrkblVk5xYZFYijdwRRHDx+z9uPT041HTTtcWQC1UxV25ThvudZbbfMBKJqHhlHF6mfYQryAsDgyEAtYmqTQ6FQhW0zXAaAgAAgMcQADBOtgNCPutzHEzyNyf+5++f/up3H5szu+5D773A6zUNZmjSZkh1qmtUOQw1tZD4/7RZ+Qq7n2k3ErrHmciFIFzWI1slycI0uWlwljhJgyHjW1/tf/iRp2Kx2C9/8o01N7y1SFuSj8ehUnwQ8BsWaUgr5ZsMDIYA1DgVEeaOAxcAAAAbeAwBABbps26nGy6jYdD5Z0Y7ofNqPAvOS99x63WzZtQPD8effPaYz+cnhEhHm/QVMuv6TKg1jKU9bsH+wcL8htNuHnSJCVGIZLhM8pwxwpmyfCqDoRiNxp9/cU90bKzppmuKJxdOmH/ivI77W0Q5/YaIRpk6PT09kUjEzTXpKcBgCECNU1kRKPYnFxRDAACoWaAYAlDr5FN0nCIOpouAKYJgRn0wh2joZP1/vzNuGP39wwcOjXp1H+ecJWuTmapNNpOBvSaT4b2Fy4WT1RbzFBOnqxK5LIXMIhmNzDm1apNVH0OpFZqWdCuefGrXa9sOeDyef+74+6JuT4sk91ylspIolW6onIbQDadCZbl1EJEMAFDHq4o7DkA0BLnp6+vr7Oxsa2ujSdoknZ2dGJxpp7Oz0x7nvr4+DAgoNqhKBqCmyVZWbAtqBVyU7c/8K5c/1f7dPQdPLF/a2HrrIs7jjJsq/4TqVNcpkRXJmkYSC3pidVFTUCa8Wu4rTOXSYtxwgteDJREK6e5Mhp/IemTTME2DmXHGmTh8eODffthz5uzIL3/8jTU3XF3sV2meDeMrsUe72lpVqozJ2KSouN1NKcVeBgCoo0F3d3dl1SZDNwQZ6evre/DBB7u6unJcp7W1dePGjRir6aKzs3P9+vVqube3t6mpCWMCigo8hgDUKDmsc7Yf0OkonNAbyOyy4Ux/ZlyjViqcK+96X8tYLN5/aHjL0yf8Pj8RxDSYaTJmMsNg8jamacpSZZOZskI5W7Fz7iQWUWQmrFwu2IdYDPuhUP+UXJgMPOFMJEbXZNzk3BSCk9HR+GO/ef7MmeHm1deWQC60bYYTFh1XYo92lYUCv+FkqTi5EAZDAIDz06oSexRAMQQp9PX1NTc355YLCSFdXV1wwwFQuUAxBKDmSFesnApUilyYWyvMKALmLxdm4+pLV37yY+9kjO/eN7xz95Df5xdSt2IpDQ35VBsa5m6tmLFj41RKm3NrhSk7aLJS4FTDneU/dXPOLd1QMCJlWW4mx19w3tO7fX//cY/X+7UvfaZkL9o8i44rpaFh+jTMbnEI3TCf4ao4Myk6GAIAbCq0NhmiIXDS1tbW3NycsrLDQWtrq/Oi5uZmiIYAVCJQDAGoXdIFrzwlwknJf7nJJt7d3nTlqmUL4nH+/IsDpweE1+cTjHOTK93KamjIxhsaKtEw5dlNI7k3e1r6JOapLeazWwtRD4VlKhRcyYeUMGINsilH2BSCi1e3HXz6mV2C0Ed++M/Lliws8fxqQimwshoaZpyMQTeccIgq0UkKgyEAwEmF/r5ly4WhUAg7sZbp6+tzWgtbW1t7e3uFEOscbNy4sbe316kbFk80tFv7tbW1uXnc0H8QVCJQDAGoUZwq1YSOwslKhBNKbPkUEf/dx99ZX+cfGTVf3zNUF6zTda/gQnAhgzhU4gnnLPkcGBeETKXQOGVwCtYTc1c953nnGTcjx8ocezmfa1rhyIISIk+JBcI4MU3OTKYyT4QQQ8PRzb07YjGj6abrbrruqrLMr/KczFS0OgPdMMfIbN68uYLCkRUwGAIAUqjo37eU0RuiYc2iipHtP1Wbwozd9JqamjZu3JgiGhZjkzZt2lQR44YXD6hEoBgCUFs4c0tyuwvT7YSTMhhm0wfzLxBeMG/m39zTFo2Zu/cNb90+FAwECdE4E6ZpmQ2FtTnWjZnJeKHlyVP3A+bIlU4ZkAIe3d53OUqY89z1GdY7/5AGQ0oIN4XqXchMOeBMjI7GHv318wcOnvT6Av/0xfvLMr/Kx2ZYoQ0NM07JoBumDIiKiKm4zYbBEACQ8eCQ54eaOzceomHN8uCDD9rL+aSapIiGLrcBAgBSgGIIQA2RLhcW4BzMp7vfFC1+NlddsvwT/+XW4VHzte1njh4zAz4/M5OtDE1uMk6sLVFFtUQwqx1fASMzdQdinvc2oRuxsN6I+dgJM6iQVkDyuMGQEsq47FpoCqY8hmZiC7dtO7h7z1GP1/sf//aVZUsWlOUFnL8jo3ILvtJnZdAN1aQ6EolUYrooDIYAgKl/qLnz4wmiYQ2SUo+cZwjyAw88YC9PmJQCAHAVUAwBqEUmFQOSMTQ5h/41vdy25rKVS+YPDcWffuHUmREa8Ac4F5wJLguTTVN2MpRhy0LloCQuFnzKYmU+Izb1WOSMwzihgJiu/RWmHnLZuDApF1rSoWxcKJjBmcmYykcWYueuI4/97kXDFP/8pc/ceN2V5Xrd5m8zrPSGhukTM6duWIPSYSgUam9vrzi5EAZDAEDuj6qKy3FKOcRBNKw1tmzZYi93dHTkeaumpianzbCzs3N6t6oiVEjn0AFQQUAxBKBWyCZd5dmRMKMbbkKhMPf25Mkn731bXV1w8Izxxu5RrycY8AWIEg2ZEHJBCCIzUAQRcn2hKcNTkRcnqyROWNqcp4CYXpic+8/xDVZuTC4El9eRPk3OKWOcGWbSXci54KdOD/U8sS0e5x9+/7s+8J7byvsyzl8HrL5IR1s3jEQiNaUbKrmwEp8vDIYAgAkP7BXtiIdoWGusX7/eXl69enX+N7zlllvs5ZS2g3ZuCaU0282d17EbAqbfqqurizpwSpPpKzs7O9va2uz1bW1tuaXMjHeb7Tp28bW95c6ha25udm5n6fdjynNXTypjp0XndXK3Ykx/7rkfVA14tvtMGe2+vj7na2DaRWeQAyiGANQETv3IXkivMs5YfTwp+Wx6nX2KhfNmfuLDoZGoufONs493H2PM5/X6iZQI7ZNQqpctGjIZ5iFEMUZy6sJo/snIuQXE/JXKFOnw3GJkmYwsiGCEGYZ0F1onzsXu3ce/873N+/tPN6+5/lN/9ZGyv5LztxlWTUPD9OlZTemGajpaiU8TBkMAQJ4fVRX96wJEw9ohRdzJmHaSjXXr1tnLbrAEtrW1rV+/3rklXV1d69evr/ogY6XZpTx3pQU3Nzeni31OJ2kOm6RTwnOqw/al6Q+qBry5uXlC+U+F7TglV1BKoBgCUHNkVKNSFrLJhRnvoTAdcFJcecmyj/95y+gYP3k6vv2NkRmNMzxeP2FCyPJkSyIU46JhYoFZi6Uf0gJ01fxXZtxTuc2M9kX2MuGEypaF8qRRQgUnpslMkxsGUxHJgotTp4f+0P3q4JmRC1at+scv/O2yxee54QU8qXLjqmlomD5DqwXdsKKNojAYAgDyQf0SVukR/xANa4HCSpIzUl5Vrq2tLYdq2dzcXK2iIaU0t1zb1dWVIho6pd4cmp3TN+q8iS3O5njQ9evX58jD2bRpU5EitkGeQDEEoIbImOBRWEVt6blt9SV/+cGbx+Jkx86hzVtOBoMNHo9PMCEYEU7RkI+LhlJRI2Xb4nPHeeoaYsarTdZ7qC6XJypPiUdXCdTMYMzgzDDNuMlMfmpg5Ic/eXLPvhP+QMNP//WLLpEL1eSqR5LnlaupoWH6JM1+OVVfNEo4HK5ciygMhgCA/KmCH7cgGoIJcbYynN4v2M6HcH7fTZGuFE6nW0dHR29vb0dHR8q2TaNEtW7dOrUxTo21t7e3vBOr1tZW9dwVzqff1dWVYvpzbnk2LdUe0pSRdIqz6kHtZ93b2+u8+YT3rLZEgXdTKfFgCACoQVKCfdOrlQkh5dbZMnDr6kuoRv/tkSf37B+ZM9t/6UWNQ0NnGTM5ITohXFbaanKJE6JpVD4FzjVNI0KUpU1IHuOvSN+6lOtQSifcI3Y4hsj6dK2EE41SqRrKZGQmpFYoTIObhmkaTDARHYv39G4/euyMP1D/s+/9w7LFC1w4ucozByMcDodCoao34tmvk+oQqlRvLxcehfIkEom0t7fjswYAkA/2j1sVl++UctxWH7jd3d3Yp8DltLa22kHPTU1N69atU6Wv9hXa2tryTIKuuCf+wAMPpFSUb9y40fn0169f7xRbnQ0rt2zZkl6N7lQYnYPW2dnplAtTxrOpqck5YWlubs7xrS/95qBkwGMIQM3hFARzFLG6c+NvufHie9/fZDD6/Eunt24fCgQbdM0jmGAs2dNQxqFYiShELXNeVndknjsltw9xUsEp6ZHW8iZUXiBbFsoVUi7kZtw049yIG0bckgvPDo/99OdPP/Psbq+//p8e/Lsbr7nchdOS/G2GdkPD6qtNTp+qKZWq0v2GPT09kUikcuecMBgCAAo4buTfpdfNzwJOQ+B+MspPTU1NKa63an3iGRtQNjU1ZfMSOqOu169fn24GtEuSUwyGzlLlbHpfPgbGHDcHJQCKIQA1h/oxxz53LrjJh5eVW2646O1rr+JCe/GVwa2vDTU2Nuq6RzAhTKs2mUnFMAFTeShCFi8L4YIK5fzJU0BM/zOLdEg4F4RLayGhRFDCiWlIuTDGzLhhxk1mMM74SDT2+KZXd71x1Odv/Nm/fvHOd6115/hMtta4vb09FApVt2hYHbphT0+PsqhUrtcGHQwBAIV9rlWB1gbRsEZIyTvOB/docA888EDG9U5pLMU6Vx3k1t1yhF87Ryw9/8TesymZJ849TrPgbHGYLVnFqeSC0gPFEACQWhLrfunwQ++45urLziea59UdZ3v/dMrva/B6fJxxeRIiKRrK9n5EqYfKa8iFi/2TOckmIOZOXEleVSPCshZSKRcKTpgpjLhhxJhyFxpxkzHWf+j0d7+3+bnn9/mDjT/77heVu9Cd9szJ2gyru6Fh+uDYLQ4rTjcMhULt7e2VKxfCYAgAKAz1OVUdbSUgGlYlORSlCZlKzvK0k+PR03N+q5hOSZuEUpqje6NzxFJiTJy6qrOWuQC9NZsMXd5XC4BiCEANYeuAdCLc7zr867vWtjZdQaj3wMHY1u3DM2fM9PsD3CSccca4MBPnhKkCZWIFoHAhWCWUKOdBNvvhuXKhpRVqyVhkecynnHEjZsbH4kaMxeOGaTAjbnIuTp4e7n5ix5ETQ77gjO5Hv3HdWy5O73fptsnVpDyDFR28W/C0rbJ0QyUXVvQ+gsEQADCVg3Z19NCAaFiVOIWbqRgGpz0CZVLkzs2YiipaEfT19Sl9UPn7VA5MPnszW/lwtpLk6aK8rxYAxRCAWsEp/KVogpqmqXMbe6XLpcP3t731tqbLDe7ZtXv01Z2jHk99XTAoOBWmYNJpaDLOTS4YF5zI/oZSNlTByhVrNkwns3SodrUQGtWEFAoJoRqhzOBmXPkKWTwWN2JmPGZyLvbsO/G7x7fu238qUDdz0y++vmzxecL1ymoBnsFaaGiYceZWEbqhmmFWtFwIgyEAoPQfbW4+pEM0rDIKLtp1+teyFQWDYtPW1tbc3FyY2uv0DzrLh7OVJDtJyYbORsai6ZpyfboTKIYA1BYZbYa2PpjDcujOauUPvf2td73nxlGDvrZz+IWXz86eNbu+vl4QIpgwTcFlZ0PGpExoFSYnzYaquR+veLOhE4fCp2kksVMJ1YigVAgiCOVU2glZPGYYcW7EDCNmmgaLx83Xdx9//I/bdr5+XHgaH//5V5cumpdS2uzO59simaw6UyMNDTNO3tysG1aHAxQGQwDA1D/aqsYOr0RDSmkNfuZWK075JqU6NQdTL0kuoG1iYWRrpVfA03QbbW1tTq2wtbW1Q2LLebndl06bob3rbdW4tbXVKSlO46iCsgPFEICaJv865XTLoUvUw9ANF/7dfbedOmv2HzEffqz/1Gk6o2EWJbpqa8ikx5CZgplKKJRmw2R/QxWvXD26YWKf6BqVWiGhlCSWKaUa1QjXTIOZcSaTkU0jZhiGyRgfjcb+9OyeRx978fjJ0UD93I0/+fKShXNV5bazg6FrpcMCvBg11dAw4/zNhbqhKsSr9BkyDIYAgGn8aKsOlU0dGGvzh7qqZN26dU6bYVtbWz63choMc8tS2US3kqWmOGXQbBXK2eRLl+ti9hi2trYqQ986Sf4CrnNA1J6yhyLdCei8cj7icp8EbzEXAsUQgJojRQTUMuGsVs5mOXRPwfIVb150/8du0z2B0THv868Mv75nrL5hht8bFIwIJpgpmBWKogRCSysUyVM16IaJXaXrVJNyoS51Q50SSoVOOGWGMOKmKkCOx4x4jJmGyUx+enDkDz07+p7aE41rd7zr9v/8wYOLF8zlDtKdhm4bocJshjXY0DB9BJy6YXmHIhwORyKR7u7uSh9VGAwBANP10VZNv2xBNKwynDXFXV1dE4qGKVfI4UTL8x6mSA674lS8kH19ffmbLkuP86llq/Od0MjZ1NRkC75KHrVVyPTdmjJ6uXdiZ2dnjtwVUF6gGAJQK2RU+rTspLQ4zKdguYzq4ZUXLfrMf72N6N7RGH15++izLw55PHUNjY1EUMas8mSunIaMc+4QDaWAKC2HMipE6mOionar1AqVndCTOCealA4TawQTZpzFx4zEKWYa0mDITMYY37vv5KOPvfLKq4e5Fvirv/jgxz/y9kULZttCoVMudH83w82bN0/2VrXZ0DB9Fqf2bCQSKZdu2NPTUx1yIQyGAIDpPaSoI2TVPB2IhlVDU1OT02aoRMOM7rC+vj5KqdMe2Nvbm341pxktXTbq7OycXoNhV1cXpTS9CWNfX5/z0dM31X7WXV1dKTfv6+t78MEHK2UPZlQGU2qWs2HvrPXr19siYLZwEucYZnudqCQWN4utgFZRCy8AwATY73dnzWlGUupS011mKXeVvpx7ZfH4xeNbf//UjoBPLF7gveTCxlkzxPDQUMyIaxqhGtV1SjSpgeqUJsNdqDZebU1kqLD8M3Em/3PnwZsmnonaQLWsnoIMRKaUCEaYyUyTM4NZC2ZigXNhGOz1vSf+uGnn2aGYr37WI995YPGCuSlNLVPycNIFYlcNhgrYVb2f8qenpycUCnV3d0/2htU6O41EIu3t7aV0X6pdUB1CG6UUiiEAYNqPkNU0U7Md5fjYrQLSBabW1tbcXQ57e3uzufZSvljaRrZNmzaly1jp9+PcmNbWVuWCVCY42/uW/t3VucEpW9va2pqewtHZ2em8Wo6bZ7yHjIrkli1bNm3alDHxIxvOzZiw86Bi9erVTU1NzhFQo6SGsbOzM32c7WFM32XpI9nR0ZHNOppRiMy2f5171n6UHHcOSvQVF4ohALVGivBnL3DO7TXpimE+omFu9bBkR5vuZ/b8tne7YcQoj195aeOq5QFmRkdGRigVUi6kSi7UNUqUGqbJj6WkIOZq3VBqetT6HJUbLbdVSwqFiRWc8nO0Qksx5IwbBhseHnvq2X07dh2LxsmKFed//f98fNF5s9Uzt5VBp8nUqRhqmpbtu0LZp1WF+dRU+7wqMLhN73ROOTdLIH6FQqFKD0e2x63G69wBAEU6tlTZhxREw2oiT1eaIodcmK6mpdPR0WHLZLkVw5Rb5VAMs5FRLsznKSurnbrChIphwVOkFOEyH9Q4TGqQcwxF+v3k3v48XycpjwXF0D2gKhmAGsUZY5KtuWFK5XKOLJR8PoxLJjOFrj//0/etvfSCpYYIHjxCX3otOjrmmzNnjsfjE0ymJ5tCmDJMmXEu1whOiKpNtoqUk8uCEHkq828rsvRY03RdU8Emujrp1FopL9Wo0LkhTFl9bMjzuLXAmMmEENt2HH34Vy8//9LBuPD/t/s+8M+Rv1w4f5ZTDq6IMuR01MSjgFonCD3pAyKEUE7DYkejVI1ciA6GAIDiHZOrqTYZ5clVxsaNG3t7e7NVpNp0dHQIIXL3BEypdE6ht7c3WwiJvSX5b3ZHR0eOx+ro6Mhxb84ejink0Bnt55inJbBINDU1ZawKtwc5T2EuZVdO+KQ2btyYTwrzpHYiKCVQDAGoOVIEvpRy1Nz9DXVdz9bxMCUpJUVhtB+xNB0PF8xp+K8fuH7ttefvPRLtPyJ6nx56+oVhr2/GjBkzNaozk5sm5/LEEucicc4E4YKoJoaCKq0w2dxQ3qmgiVPJ7IbWOOp6ulCo9oVsVygLrHXKNW5wI6biTQwlF1pdC6W1cPfeU4/+Zuvvu3edHIwHZy545Nufv+/9ISUXqkdLcZI6RcOMHlK3UXCTeDQ0zDijU5O64umG4XC4auRCdDAEALjw083lx0yIhtVBU1OT0oNaJc6LWltbOzo68tehlP7olJbUPUyoNtrfUVO2oaOjI5vOuHHjRiFEinSYz9Y2NTVlu2E+gte6detSNFb1HEu5v9IHWUUnq0F2Soq5dVXnk8rziadrtfYuhovQ1dIBqpIBqE0y6kHppcoZuxlmq2suuGA5n4sK4/jpke/+4rljJ896NH7BCv/c2dr5y/xDw8PR0SjjJrWKlDW5oFGNaNZCshKZ2kXKNFmkbB8+HefJZzm147FlJrR+zLEropP9Cq3SaatK2qpElh5JJvOgmdQ9E+eMcRUPLQQXhDz19N6nnjkQM4XX3/CxP397a8tVC+fNOueRszhMnQJxRlepqyismyEaGk44tVMeumkUxaqszg4dDAEAxf50q5qfWJwfBChPBiX+sFYLqHKdOnah8YTOSlAN7x0ohgDULOkqXm4pcLKKYXrQSo6HnnALp8J/PL7t+R2H4rE44fHFCzxXXz7DoxmmMRaNRoVgVJfuSKu5oWb1OpQCIh3PE5HaHXU2NRTj7QQdWqEyKZ4jJKYfeZ3/t5RB+6uM+lujSblSrkiKhsnGhYITwQRjTBZZM6kPjmuFMgNamCY7ePRs75O7j58YNYnv2rdcse6//dnC+bMyfouy1UBd122LqK0YOk2jrlUMp5K6i4aGE46tGt6pS2NVNtQobAcAlEbsqD5xDaIhKPGbSC1AMZwiKdErGMyqB1XJAOCzk6RXKGeLzU0pWM5WwpweoJHSGzE3zq3K52oT8v7bL/3UXavXvPX84bh+5JT+2z8ObN0ZP3ZKnztvbiBYLxjhJmdG4mSaTAaGqNgQzk0imBDMbmhIBbE6GyqVL3ESREYVa0lFT4qONHHS7BNRgSuarpoPqpM1IrK6mHrs1oSyDFnTNI+m6YlzqnsS10ksUKFxRswYM8aMeCyuapBlGbIRHzPjcdM0mLQWiv39Az/75cs/e+TlQ8djxD/361/4H1994KM55MKUl0HGc5dTcDdD6D75jK1qcTjFOmV7flg1I4MOhgCAEtDd3V19RxuUJwNQiWzatMlehlxYC0AxBKCmSSk1TSe9uWGKAui0oaWvzKYzptwkfw0x48bnIyAumFN/562XfOF/3Nr01vPHmHf/YfrcK2N9zwwRraFhxqz6+npKNW4KMy51wzgzDWbGuWmYiWWTMYMzJgQThBOrm6GQDymkSigsrXBcLiTnnmjy3JYLx09SRlTDlqISygWd6oTIbYuxeMyMjxmJU2LBWpa6oWmajDNumKz/0OCPHn7xV7/bfvBoVA/M/vhd7/3J1z959aUrsg2gvZwxD6eyXs9T6feEhoZ5TvAK1g2nYgJ183QXQjMAoNion8Sq72gD0RCAyqKvr88OPp4w9AZUiVyAqmQAgCJ31fCE5cbOc855xptnvGjCvofZ1uRftpxytZMDo9/+xYtHT52t89LYWPSCFf6li31LF/pGR6PRaDQWj1MirM6GlKpeh1Qt2vXLVAmfVuWwrbMRaTi0y47TBDdHgTM5pxbZalIo65EJsUNXhKovFlxGPHMmuxbyBNaC4ILJXoWJW/YfHPjTC4f2958RRNd8jW+/5dqP3nHTgrkzs2l/2RJpUgqTnZJxyg1dSMHdDNHQsICZ3qTqlKuv3x86GNYUh48NHDkxeOjYICX02a17Dx8/ow71h44NJI/n5NDxQSH4kvmzliycpY7gi8+bRYhYsmAOIfz/Z+9d4OOq6r3v/9q3ueaeJmmTtumFO7SAtKUUTAYRkKMC6pFXBWlFfJ730UdU3uN5VI5JzkXFI6Cggp6jD1UUyp0iKnJJgkAptFzacqfQW5KmTXNpbnPZe6/3M2vN7EyTNM1lZjKX35fF7p7JZDKz9p619vrN7///z6soOWvZ4uqqUnQmwAyFyRekf8qWOwiknSoXX3yxLBL9/PPPO/HIGV4RESTzs4MjDQBIZNSYcExRb2zhlKPlQxz7sET18JhlUiapIU5+Dnv9/YO/efBV07JcGrOt0Py5mt9HS2o9CpmRcDAYDNq2Fb3AUIRaqMR3VPGvymJSoVQSHXlRZSyhKAobLR3GJULnR4wpwrHodNiIRBjfiW7s6P9CNLRtS8iFsoozJ5Pbh/vDm7bs3bu3z+YUtnRbcV99Wd1HP3xqVXnh0a6WRu0n5iicoP51VhgPZ2hkQ0LDafTYZHTD3Mvcj0j2nKe9s2frjt1bdux+accexrS2rsPiGx2njRoJZX19mcrW2Tjl9qNDOdkm5ybxyIpTF82tKKypLKmuLD5r2ZLqqjL0NsjzGQqiIUi56gHFcMZdlwi6MY9OACiGAICxHE03PKb98Jg3J/AYjtIWx33MBHdO8PonuP/19w++t7fnz8+85/dqKlluwyovIZ+fjlvotm0zFApGwhEhz1HcWMiY6gRUC/Uv5kAcEdaIkSL+j5cxiWttFBcK4149suNCofQVSq0wwVoYv0ncjiF7ihTWPxB+Z2fXjje7+g6Hw2FLMTxM83/x8jUXnXtqRZmfjetvTCDRMOhohaPixMcGjGdFqPIMM8TnZFXKNKxjpW44roiWk+IaDIY5iZQIt2zf036wv+3QAGMqkU7CWO7XyK9yn8pLNKrxWGJGIb9OFbolf3eBlzRGgxbtD8YG9UGL7R+OTT0fDLGwZXdHeNji3aHomM5ti5NJtsntyMpTF6xYvuSyC1dCPQTHnKGm7aPPcCAaglTP2nIHUte0u84BJZLz6wSAYggAGJfJiG7jioOTCV6e5M1xn3bclzd5B+K49x/sGWrdunvz9rZg2HSp3DLDCgvPrzZOWurXVJtx0+ZmOBiWAcGMSFGVmNYWN+dJC6K8RUqC0dAR2pTE6Tb2ABHCHX9RciPdhVIxjEmHUiPkRMy27VDE7ukN/X3T7p7eEFOUwWFOmmtuZdUXL1+9/IQFFWW+o83u46YsHDd/5ajklaO0wsxXDGfuwsjhJVmqe36sbpiTciEMhjnGo09vu+PuVlLd7V0DjGnEVGLMpbLFXn6SzyzVeYlOpQYvUKnCZflV0lhiffwx09ORW2dnyKSOkNgG6UCIdg7wzqD9/oB0klucR8gKzSv3X37RiuqK4hWnL62uKsehAaPIvYSwo94dREOQKtUDiuF0SSyOfNFFF91www3nnnsuuiWPPjtQDAEAx2QyetwxJb8kKoYTex4nfqkTD3otW3YfHgj96dn3fIaiKDa3w8UFis9jFxdpJYVKgY/pGg+HIpFIhEu/JJcFkmMxylI0lAofYwlXKApLqDPFErbx18PjPSAkQs5tkl5GEf48NBwZHDbffLtrcDDc1jnY0xsmRWOaixTXhWtOuerSs4mxilLfuBdGY7XCsbVNxiqGzp1SMcyuoskztxli0TITEnVDIsrJGDoYDHODR5/edsc9raS6Ow4OkmIYKiszqESjEwv4hWWRuS7bq9pOOgklcaiMD4zkZKKV43ncLM/jnnlnJ+YSF18ScaeJH73ex17rszd12QMRq3PIEuphiFuhVafVrly+5LKLzq6eC+kQHDHG5vA3FnL+xQALAACZctELxRAAMEkm7zoctT9tW+Ixn2dK6uHkfYgHugf//sret3cden9fr8etaMR13VaViMbspYu9RX7m9SiqYpumRWQzsi3TskS0tdQQj8hhOKKxJWp48X9idWdFHWVFif6uwjgnRVHCEXt42DrUO7xnd9+h3uDhgcjgUMTkKjGDFOOCNadc+cmVRFRZ5h8lZIx7c1xf4biK4di0huNqjpm/oJqhUIWEhjM/BLJude4t/GAwzHaEUPgMqa6OrqHoiMqUkwrsk/32SX6ryuDz3ZZP5VIilOlcNU2T1exJ5KwlKRcqCnMmEUc9FHlnybbjXwWNSIeWxHYSTPDYNp7mcNCkjiBvG6bXeu3tPebeASv6Qztkm0Nf/+LFl120ErohSJzcc/g7LYiGAACQQTMOFEMAwDSYhptvMpWRp5S1cEqi4bSLL9/7+Ju6oT738p7+oaCuEmOWFYmoilVerCmKVVSoEudFhbrbxYqLdLfBYmtBuUjksSSFdiz5vZPnUGiEovCJIkqpDA6Fg0Fz/8HBcNg+eGh4aMjs6gn2D0TCJhNJERXOVGL6R9acfOUnV4wSCo+mEh5NMRx3m6ghjr057h/K/AXVDCc4JDScCVIxXLt27cKFC6dUUjkrTi0sZbOR9gO9jz69fWPLmx2Hhkkx/Brza+yS8vDxfnux13Iz8mvRgVmNo2ga0zSmqkzTSFVJKIbRJivbyyY84SOucrkvRMNoc35KxC3LNk3uyIaOYmjbpiMmCgdiyOZhm7b18a3d1tt91s4+k+ywbQXnzfF8+sKzL/vY6pq5c3A085zcjk2GaAgAABl03QvFEAAwQyafMXDybsQpiYDTUAynkffwQPfgM1v3vPlBV9/AcM/hYU1lCrMZtxmzNc3WFa6rpGp2oV+zbcvv1zWVuMVtzgv9uiayXkkZ8dChYSIeDFmDQxFFUYlRd3fQsnlw2BwOWqGIbZqMk2IT46SddnxNxZzCZSdWn3b83MpSvyNYjJUwJt6fjGI4VjrMRnehQ1L0PiQ0nMliz7HAyKOQG7ohDIbZyNYduzY+vX3Lm50dh4LEdLeurCm2PirijmtcpixepWqaoevMMKRQSE5T1ehWUWKioaMVxk2FjiYY2zpyYaJuSESWRaYpt7Ztc8siWQPftk2BFA2lCdGKBy/vD9Lbh61t3eZf9oW4bXJhObxu7SWXXXxODSyH+U3Of6cF0RAAADIBKIYAgOQwwWByTDffJKW9qZZASYrH8Gjv6/WdXTve23+oL/j6zgOGxrr7hhSFFNvmYqFnc86Eg4ScyLPolrj4EXEeuxFdFjLbJotT9Dc5qygrXHZ89anHVS0/cR4RqzwyO2Fs4D66qfBoWQsTdyapHma1XOgsNmY4xyGh4TSvLcZz4eWGbgiDYXaxdcfu79+6sf1QmKmeAkNd6qePl0dOKjBdjJdqXFUUVVUNw2Cy6ToZBuk6JSqGjMVEQ0cudPRBS1RJHqUPOsh9aTmUD5YPkLqhbJzblsVNM1E0lDJirHFuEYVt3h1mm7oiLxwIv98X7g+FuTW88tT5N37vGvgN85Z8mJ4gGgIAwOxf+kIxBAAknWMOLMcU5iZ4wCRjn8fenEwJlMkrhmMf8/rOru3v7pc5Crt6hhlRZ/eADFTrPHQ4ntKKqsr8ogYyVZYV2DY/9bgKTtH9ylKfE2s8gUI3bYPhuPrgMW9mqVwoSYoFAwkNk9vtWa0bwmCYRQit8E9CK3QzRb1wjnlRubnUYxZopCiKrqq6IxQaBjlNioZOGLKMRJbynyPzySbvcTRBpzkDpuM9lL8uw5mdexwBUTwbN03bNC0hGtq2bQrdUN6wLMuMuw4HInxbn/WnPcHXukLcivDI4NfXfuzyS86Bbpif5MP0BNEQAABmFyiGAICUMw0BcRqq4swfP23FcHoPHmdEnoQ2N3PFMHF/3ArIuSEXJstmiISGU13BTlJTc0oqZ1HHwmCYFbQf6L32ht+3HzKZ5q5yK6cXWp+aa/pVe65uq4piGIbqciluN3O5RiTCxCZ9hdIVKEOJZRsrFybGIyd6DJ2w5USnoZMAMT5bOGdV7NfFM1vxXIcWj/7rpDg0pW4oKqXYRC8cNB/dPfxeX2hgOGibA9et+4dPXXJuzbwKHP18Ix9SZ0A0BACA2bz6hWIIAEgnk7fsTeZHk0yhOJO6yVMdJKc9qE5PLpy83jexmHjMYOc8X00hoeFkmIYFL4t0QxgMs4Jf3fPMxpa3OrpNl6Z9vCJy0RxrgdtyMa6pqsswFJdL8XjI7Y42qRgmugsZi4mDkcgRQuEouVBqhaMUQ9G48BiyRNHQeUxi1ZRExXCUkijkSC6chlItNGV4sghaNqXxUIQq7w/yFw6EXzgQ7BwIdfYPWZGB6770ieuu/QzOgbwiWV+MZcXbhGgIAADpB4ohACAjmORYNEljYLIePD0FcBq/Mkl5bjKK4TQSHU7jlWTRMiMpBSWR0PCYzCQ+Lit0QxgMM5xHn3r1jg2bOrrDBS7XRRX2heWRMt0u1biuaYZhKG63IoVCt5sc0XCsUBiJUDgcUwwdg2GCVmgLLY+PlMTncsDn8QS1TjrYsWlhGWOKoijxOzkRI1Kkkihjn0dNJeLPxdyFckfGLEvXodANLaLNnaGNu4fe7x3uHx6qnuP51MdWf+MrV+B8yKuxNx++zIBoCAAAs3MBDMUQAJBRJDEKeKrFWKb0GtI2eB5Nwpv8/ZO5Z+L7s5pk2QOR0HDizmlqaprhhyKTdUMYDDOZ9gO9X/7e79q7I4rmqXQpn6uOBEoihRppqup2u5mjFTpCoWyaRpYVUwkdodBpcY9hLEZYiHcyqyC3RaRxQm7d2L/OQBofTxNvOophbKso8n6FMUVV5faI0Tj+DLaT0zCuHppx46FpWTaRSbQ/aL3QGXqxc3hb56BtDnz6ohXXXfvpmnmVODfyhDxxwUM0BACA9APFEACQHSQ9n2D6ZUHOeVIkuSnVRTnmj3JSJRy1xkiKzRAJDSdexSXLgJmZuiEMhhnLr+5u3dj6VsehiKGq58+hq+YFywxyK+R2uzW3W/F6Y0JholyoKDFZMBQ6QiWMS4c8EjHjdUhi9Ytl0HEyktU6uWNjGqKUC+MyovPImBUx/nDOuVMFxYxXRxnJbyjMhjv7zdu29XUOBA8PD1aXu6/70qWfufQCnCF5Mggna5rLiukGozEAAKTvGhiKIQAgS5nh8JXOkiZJlC1m+IBpPDIHSKL/AgkN07N+c3TDTHD2wWCYsVxy7a3t3ZaiuZd46er54XOKTJUxXdddHo/i9TKvl6Ri6MiFRDGhMByOyYUJ1kIeiZiRGCJbYBIkwmOMw3IojiuG0oFICXc6+9FHKgpxPqIVyh0n0aFtm0I3fGrf0ObO4U37+ni4/7ovfwIRyvkzzeXPF1r49g4AANK3/IRiCADIMZIyrGX42DgNvS+vJMJRJNF/gYSG6Vy5ZYhuCINhBrLxqVfu2LCpvdt0afq1C8360kihxrwqc7ndqterSK3QaS4XOVqhIxeGw7IlCoWmqGdiz8b4LzVBJTED4nhI16EjFMbkQuk9FGZDkyho85te7nqra7hncKCm3HX3r5sQoZwP01xezU0QDQEAIE3XJ1AMAQB5RT4MevksDk6wukiWNxAJDdO8Zptd3RAGwwzkjj+2bHzmnf3d4UoXazrRXOi2PQq5XC5NyIU0qhHFxMExcqEZCoXD4dkVCscZwB1fobAZjlUMY4VTZJxyQniyxbkZr4hyOGxvOxTc8E7P+90DdujwN6+9/Bv/83M4c3KbfJubIBoCAEAaUNAFAIC8gk2OrH4XOMpjaWhoaGpqStaqDBKS7IH0rNYaGxul0N/U1BQIBNDzec4l19xyx/1bDvWEzy9n/3qiudRtelTm9XqNwkK1uJhkKyqKbr1eikRoYIAOH6a+vmjr75fbyOHDA319/f39w8PD4UjElpkKMwNOZIvoY9sayaNox72EDpxzRVFURdEVRVMUXVU1uaMoGlGhoaye6/3SKWVLSwsUV8ktv3n0+n+5BSdPzo/J0myYJ++3ubm5tbUVMwIAAKQUKIYAADAOLHmk4vlxgKaKdBcmayklFyr5szAbd2ma5qWa1A3r6urkxyc9fxrqcEbR3tlzybW3tfeS2/B+eSG/fnFoidt06bqvoEArKmJFReS0ggLinAYH6fDhWOvvp/5+3tcX6u/v7+vrHxgIhkKmZdkZ7Dp3pENZJdmKF2tOhIRuqCmKpqqaqsZ0Q1XVGdOIlpe7b1hV9YnjSgv9hff/dctnv/SdvW37cSLlMA0NDYFAIH/eL0RDAABINSoGWQAASCmQ/DKE2trapqamtWvXJuvZAoFAfX19bW1tvvXkLMa+1QvkvlwYpzRplxSFkbMyE9j45Mvf//nj7T1mbYH+PxZEVpdYhSr3ut3uggK1oIASm6ZRKETDwzQ05DQ+OBgaHAwODQWDQVlrOIvee6wAS7xec2J6DbnDnOLLzqwj/yVya+y4YsNraLv6rff2Hnz9jbfPPuuUokI/zqhcneakHTtZM13ms3bt2qampl27dmGgBgCAVADFEAAAQL4spdavX18rSNbCbP369fmzMJPIrIIffPDBLL6G9OiGMBhmDhuffPmOe19s746cXKRdvyR8VqFdoKter1cvKFAcobCwkNxusqyYSjg46GxDQ1GCoZBpmna2p7JNeP1SLpR6IiMaqa3sbMXDdFU5rsRd5XcdCPFtOzsff7Ll5BNq51dX4bzKSerr65M402UFEA0BACB1ICoZAABAvpDEbIb5qSglsep0Uvq/sbFRvpi0xSmD9HP7XU/9y22Pt3eH6st543HB4zy2rqp+n08vLFQKC6mwMKYY6jqFQjQwMNL6+62BgcH+/sHBQZmsMNvrXvG4PhjFthN3ZPwyY0xlTIs3VQYsE2lEZ8/1/Z9V1SsXlO3rCv3jl7634aG/4NTCTJczIDwZAABSBBRDAAAA+UJ9fX2LILmrlPxJaJjEktNJPKZOaZQk6oYwGGYIt9/11O33vch076fm0nWLzDKdZOJCtaCA+f3kNKJYhRORr5AGBuyBgeDAQH9/fzAUknVCcqZPRkRDWSBFioZSQLRt4lxVFI0xTWxVqRuKtIZlbm3d8qrA4jmKq+j6hl/e9PM7cYLl6kyXh8MXREMAAEgFUAwBAADkEUk3X8hM8/kgGkq5MGPXY6nQDcHscvtdT99+34uK5r18Ll1VYxZq5DIMj9+vJmqFHk+sIHLcV0j9/eH+/oH+/sHh4QyvbTITRpIbHmk2lDtM2gylVih1QyEazvNqV5xSuWpBuWIU3vzrB276+f/FaZaTNDc3NzU15Vt5LoiGAACQdKAYAgAAyCMaGxuTazOsr6/PhxCwQCBQV1eX+SuxpOiGMBhmAjfcfP/t921WNO9FlfzTc62YXOjzqT5fTCssKCCXi4LBxEhke3BweHBwIFfCkI+JjFMeSW6YsMOIVCJNBibHdUMpGq5bXrVqQbmq+2++474ND/4ZJ1tOkoexyRANAQAg6UAxBAAAgHXUjMh5gamxsTEr5MLEFwy/YVZzw833b2x9V9E8Vy3g/3OhVeXiLpfL4/ONuAt9PlKUWG0T2YaGIkNDg4ODQ8PDORaGfExioqF8y05VZVkOReiGTnJDlTGVaK5X+x9nVF18wlzFKPzmDT/dcP+fcMrl5Lidn998QDQEAIAkwvLqigoAAACQQlLSp78MzPGXrGVna2trhlQ7md7rb2pqkodmMkcHBsNZ54abNmxs3ano3svnsqvmW4Uad0u50Ocj2bze6OPCYQoGnRYKBnOkGvJMRjYxujnv36mUYnNuxptFFN0h6gtbD7518MEde6xg9wPr//OclWfi3MsxWlpaAoFAfq71ssUXDwAAGQ48hgAAAPIOWS4juc+ZkwkNs10uTPQbBgIBWV4Z538ms2X7+1veOMh0z5oydmVNpFC1Y3Kh10teL3k80cY5BYMxg+HQkD00NDw0NDQ8HMlvuTAxuaGDDE9WEmooq9J1SFRoqJ84vnzxnGLFKPrUVd96fvNWnH65N9PJiSkP3zuchgAAkBTgMQQAAJB3pMh5kQP62qi309TU1NzcnDPGScdveDQXIQyGs8uW7TtvuPnRjj7rwgrlqvn2XBd3u91er1eVvkLZiGK+wlCIgkEzGCUUDvM8yFo4JZwUhzx+c8RpGLcZmkTv9QY37Gjf/EFHdZnrgd/fMr9mHroux8hV//sk3zuchgAAMBPgMQQAAJB3yADVpK8icklvamlpyTG5MNFv2NTUBL9hBvK9mx5q74ksL2L/o9aqcnHDMDxut+LxkNtNcmtZNDTktPDg4NDQUCgUsiEXjoHFg5RZ/KYSL5osMxuqRBrR0mL32jNqFs8p2dc1fN3/+eHefW3ouhwjP0ugSOA0BACAGQLFEAAAABZRSV6f5EBscg7bUhzdsLW1NbE0inQgYm05W1x09Y0dPeY5c5RvLeUFGuma5nG7VY+HyUjkRLlweJiGh8PDw8FgMBKJ2AiXOQqjREOKxyMnioYq0Tyf8b/OXnR8VdmmV977+j/9O/otx5DDeN6ObNL1j4EdAACmh4oBFAAAQB5SW1vb2tpaK0j6MwcCgfr6+qQ/c9oIBAJXX311bl8h1NfXr1271nm/zp35Gbs366z7pzve2TtU6XN9/0ReYZChql6PR/d6FSd9oW1TKCQjkSkUCos6J0hceExiWqEjGo6IhzExUe4UufXaUv+Wtv53du3dvWvnJRcG0HW5xNq1a7N9VprhaN8iwPAOAABTBYohAACAPKW2trapqcmRjZL4tES0fv36pD9zemhsbKytrc2TywNHImxtbV2/fj0Uw1nhez/Z8PSWPQUe95ULlNMLuaYoXo/H5ciFbjdxHquMLERDlEWeEomiIZfGQ5YoHMZ2fS4tTMobnQPbd7y+ZuVyJDTMvbEuFfNdFr19iIYAADANEJUMAAAgf1dQMmFf0p85exMa5mfpj8bGxrq6Onk+JMYpgzSwZft7W15vVzT35fNYfZmlMuZxuVxO+kLDiD5IWguDQS7qnAwHg3AXTomx4clKQlSyIrYehZ1fW7qytlI1Cn/8s9+g03JyvsvnwQ2FrQAAYBpAMQQAAJC/pC4lfDYmNJS1nvNwQeVUhXZSHEI3TBvf/c/72w8FF/vVz9fYhRq5DMPtditSKzQMUpRYZeRgkEt3YTBoWhaHXDhFWIJuqIgzXBmV1pCo0md8ZUXtyTUVm15557IrrkGn5eR8lwNpdmcy1EM0BACAKQHFEAAAQP6SOpuhXJ4FAoFsWZ45qlnerqUTuwK6YXpYd/3P2w+FCwztqgWkMKbrusvlUg2DGQa5XKSqsdyFoZAttMJgKAS5cNqMiIbxm45WKJtGVOrRz1tUoWq+57e+/eObf4lOy7H5Lp/rJjvDe2LuWgAAABMDxRAAAEBek7oVVBYtz1paWvJWLjxaiWTohqlmy7b3XnyjQ9GMH53Gzi7lmqIYAuZykWGQrsdyFwp3YVhgQS6cGTKDIYvbDJlYCThNJdKJLlxacf35pxT4Sn5862+f3/QiOi3HhrvUfUmWRZ1QV1cH0RAAACYDFEMAAAB5TUpthlkRA9XS0hIIBBoaGvI2JXyiwXDsEYRumCK++5/3Mc17Zom62MsVIl3XXY5caBgUiVA4HG2RiJQLUeokuZf+irAZKtJmmGA21IjOrC45uXqO6iq88ae/Ro/l3nAHsQyiIQAATPWyAQAAAMjfFVTqnICZn9CwqampoaEhP+WwoxkMxz4MumFyWfut29oOBas82teWMsaYrmkj8ciGQbYdUwwjMb0QpU6SRtxm6OwrR2Yz1Ii8CvvH0xdUFhc/t+X1H990G/osl5Dmd4xjEA0BAGAyQDEEAACAFVR9SgO1MjmhYSAQqKury+fV4wQGw7ErzETdEEvuabNl27svvtHGVOOCKlbl4pqiuEQ8smIYpGnEmGMwNMPhUCQSMU1u2+i3pF39J8Ymx3cSY5NVoiUlvgtOnK9qvh/BZphzyCJXeR6bDNEQAAAmA0M6GAAAACDVifzkCi3TEgXmeeFIaTCc3oWQ/F2pNkI6nCofvfI/2nvsC6uNbx6naEQet9vr9RpuN3O7yeUi2x6RC0OhUDhs2zauV5MM5zaR7NbolnOLyCQyOY+InTBR22Dot5vf3fzOu1d84sM//+mP0GeY8nJ1IsjA2RkAADIEeAwBAACAlNsMM1Cby3O5UDJ5g+HY3pMalgxqhmg4eR7+2+a2ruECl3pBhSLTFxq6rmka0zTSNEcutGQwciQCuTAlxG2GserJYkeNewwVEZtc5XPVHzdP0X1/fODPzz2/CX2We1MeBi44DQEAYGKgGAIAAABR6urqUlrXOKMSGkpXRT4vFyeZwfCYT+LohoFAAMvvyfCL3z3FVOPsUmWxj1RF0bUoiqqSqjrxyHYkImKRTRRHTh2OXKjEsxnKfSVBOlw5v7T+hAWqUfCjm36BHssxZAJfxCZDNAQAgAmAYggAAADE1gwtgpSu0DIhoaEUyxCENW2D4dj+5JzX1dWhNMox+e6Nv2/rGlpSoF1arfg10lRVCIZxg2EkQpEINwWWZZom5MJULwNkCRRpM3S0QiVuM9SIPnpCdW1lxXMvvf6j/7wZPZZLyBIoKf2eLLsuACAaAgDAWFRc1wIAAAAO69evX7t2bYqevLa2NtV/4pi0tLSsW7euublZvpi8XRwm3VxTL5D7ctnp3AQOP7zjsYEQ/+rxxhK/4tUUl2G4DUOTJZI5J9Mk07SEvTBimpZtQy9MLYzx2L+xHR5vFN8W+1yKpm7d3cXN0OevuBx9lkvU19evX7++VoDeqK+v37VrV1NT0yxO0AAAkGlAMQQAAABGFgzr1q2rr69P3fJJrtB27do1W3LSokWLGhoa8nxF1NLSkijwJff4Qjc8Gl/8xi1v7+5bUuT+fK1aqDGXprkMw+WUSLYsMk1baIUR4TKEwTANSI+hLAEud5i4P1E3LC3wvL7/8CtvvcXN4XPXrEan5RK1tbXIqJA4gEM0BACARKAYAgAAAEeQag/g2rVrm5qaZsXWEQgErr766jyf+lNhMBy77IRuOJZf3PV0f4h/9QRjqV/RFMXlcrkMQzcMpuvSYMhFJLJMYQi5MG0wIi6EQykXjrIZciJNVQyX8fLurvffe+v//co69FguIachaGSJozdEQwAAcEAeQwAAAOCI1UIa8gzOSkLDQCBQV1eHbwqTmMFwYmQNZal8Ib/hF79xc/vBoUqvtsSnMiItDlPV6I9Nk6RcKDIYojpyWpE+Q6doMmNqQkJDuV9T5FlUWba3s/eHN/4EHZZjyKEJJVASOwQ5DQEAQALFEAAAABhBusNSLe6kP+W8XAJBLkxKieRp/NE81w3b9ne1HxwiRf3yEt2vcVVRNE3TVFVRFGKMLEvKhfGSJ6iPPAvrAcaYEq+ezBLqn0jRcFFpwdlLqhXN+/wLL6O7cg+UQBl3xoRoCAAAUAwBAACAWVg7SeUoPfpRY2Nja2sr5ELn+M7WEjRvdcPv/Gh928GBSo+6rEgp0JhQC1VFUZgiLkStuF5oWTbkwlkhwWbIpHp4pGKoEa1ZXFVaWPD3F1+76w93o8NyDJk2AXPEqBEboiEAAEAxBAAAAEavndJgMySi5ubm1tbWVMeCSbmwubkZR3ZWDIZjX0Me6oZtnf2MadccZ/h1piiKGm9MUci27bi10EY88qwuCWI2w7jTMFE01IjKvMYnzzxe1bz3bHgI3ZV7NDc3pzrBazZOGVI0RLcAAPL58gAAAAAAR5C2EK1UJzSUGtlsueoy88hmyEI0UTfM7eXoQ49vajs4UOHVl/g1RuTIhYqiyIIntmnatm1zbtkQDGePI22GNJ5oWFtaVFFc8vcXX3v22efQYZj48gGIhgCAPAe1kgEAAIDR1NbWtra2pqGcsXz+FFVnbmlpWbduXXNzMwr1pqdE8lSRVlZ5mOTxyskj9cOfP9DWNfT1U3wLvMyjKbpElj3h3LYsYS+0xcaGXjiLyELJJJTDxFrJTnMb6vb9vQf6Bj54980vfP4K9FiOUV9fv379+l27dmHKGNUtRLRu3br6+vpUXxIAAECmAY8hAAAAMA5pc1ukLqFhIBBoaGjA2i/xmGbgq3L8hjJiOse+ym3b3/Xitl0Vbq3AUEaHJBNx27bsGDAYzj5HhiSPqoKiEhUYev2JCwy379kXX0FvYeLLHxobG1MdEAAAAJkJFEMAAABgHKTQlp7lQSoSGkq5EJEEznpv1jMYHvMV5qRu+J0f3UmKtrhIX+RlSmJIMmPEuRQKpcGQQy/MjIWBrHwi1UMlfo8TmLxmUWWZv0DRfT/40Y/RXTk58UlpDF0xdoiGaAgAyM8LAwAAAACMQzrdFsldisjUS5ALR/VwVixKHd0wN0qjtHUeJqaeVqL5dSYchmq0CSObLaqdWDIkmUMwzAwSbIZj65+oogVOXqCo7mef24zeyknksANdbNyegWgIAMg3oBgCAAAA45NOm6F0diRFoExdmHP2LvMy3GA49gVzzqXEmdW64UOPb2rrPMyYsmqO7hgMZU1eLuqcWLI4sm0T9MKMYUQujKuHiRHKGtHJ88qLCwqefeFl9FWugtjkCQZniIYAgLwClU8AAACAo1JbW9vU1JSKsiRjSUrW+cbGxtbW1jvvvBPHzkFWFMm6fI6Jr1kGCWbdW/jhz+9tOzD4oQpvoEp3a4qhabqqysaJLMuS5U64KJSMEzVDYEdWPhlV/8QiinB6c3/3gb4+Hh4877xz0WM5OfGhBMoEIzMKoQAA8gcNXQAAAABMsDaQBXbTs3Bqbm6W5oXp/TlppoNbK5f6JPGbXcZYduWmbOvs01V15RzdUElxwl2FICUrI9tEnHPIhZlGYlTyqJsqUanXtbSy7PXdHgQm5zDNzc2MsWz8riVtw3IgEGhubkb/AAByG0QlAwAAABOR5visaUc8tbS0NDU1NTc345CN7dIcWKA6KQ6zJU55y6vv7OvsK3ap51bqhsIUNtK4KIsswpFtmb8QkmFGEdMKGUsMTHaqoLgU9rHTFimK8fcXtqCvcpjm5mbEJk8wJiM8GQCQD0AxBAAAACYindkMp53QsKWlBX6HcRd12ZXBcDLvKFt0wwf/+jwRWzFH9+tMIWKK4shPMndh9I1IgyE8hpnGmPoniYVQVKISr2v+nFJF8/zghzeit3J77kMCqwlGY4iGAICcB4ohAAAAcAzSbDOcRumSQCDQ0NAAuXDcY5eTK9XM1w03v/ouMeWkUpd8nY7BkOKRyBLUPMlMxoYkK0eKhmfUzmMaKibnxdwHRWyCoRiiIQAgt4FiCAAAAByDNNsMZThYa2vrJP+ilAvhBBm7lssxg+HYN5jJuuG+jh4idlyxJg2GUi5kQi6UBkPbti0pGoLMI7FcMsX9hk5sskp00txSl+79+6YX0Ve5PfehbvIxx2EnATF6AwCQe0AxBAAAAI5NXV1dmldNk3QuNDY21tXVQS48Wh/mw3o1A3XDhx5/nphS49dcTChNCVGuMa1Q1keGXJixxA8ZHSUwudTrnlNcwFTPXXf9Ab2V2yNMmr8wyzrq6+shGgIAchUohgAAAMCkVk0tgnQuQo5p7phG/HL+HK/cNhiOfb+JuuGsv/GH/vw8kbLQrxtCd1IYU+JRrk6JZIQkZzhjhcLEwOQ5Be65pQVM1ffua0df5Tby6yv0w8TzNURDAEBOAsUQAAAAmOyqKc02w4kFwcbGxtbWVsiFExyvfHvLjm4o1dJZPDf27e/hjJ1Qavj1EasaxYVC27bjkiHIXNio2OQj7/Qb+hy/lyn635/dhL7KbeTXV5hrjtlLEA0BADl4MYCrNQAAAGCysyZj6a9HPG5VE+mhwyQ+LugcpxOkbJrmpf6+/V0f+ewNRT7fdcvKVpTrhqIYqiqbypgtFENbRCVDNMxwLM4tUabGFDsRItnCREGibQd6Gu95fN4c/fWXX0BfYfoDMnwbmYUBALkEPIYAAADAZKmvr09/DvixCQ1bWlqampqam5txRCbotDzvgVn0G7706tucWLFLPb7YSAxrJSKbyIZOmD2MrZjs3FSIKv1epih793aio/KB5uZmlECZzEWC7CgohgCAHLkSwEUbAAAAMEmkfSD9U6cMQJYSISwMx+wrGAzH9omUDtNz2tz220duu/Pxjy4q+eppJQbnujQYKoquqkzkMeTCsEYioSHIZGxxpJxmxg2GEaIQ0RDR9X94fG/Hnl/e3HjlFz6P7sp5AoEACm1N/lIB03RO0t7Z03Gwt62zl4ht2f4Bkdp+oIeI2jp7xM9ZdP9AL3F73pzi6qpikYLDnldRTMSrK0uJ7HkVJWctW1xdVYrOBFmBhi4AAAAAJkm9IP0Z4hobGwOBgPy7adN9shcYDMeeP84+YyzV58+Lr75DjM0vNJj8e/FceFKBktkMUfMkKxjXY5hYCGXp3Dn79rft3deBvsqToTUQCMh5EL0x8aWCzGmI0mQ5gJAID2986tWXduxhTGvrOkxMiQ+BTk0vJ3aTE3FFL+LEO/p4R28PceJk01s94kc7o1OfbXJuEjdXnFo7r6KourK4urL4rGVLoSGCzASKIQAAADDlJVP61wDNzc2MsUceeeTSSy/FCuRowGA4cec4+ynVDdv2HyKiOW6VOGcJVTNiQiHntriBI5L5OJpv7Cbno0TDBeVFpGh/f+7579D16K6cx6ngD8VwMn0F0TB72bpj95Ydu7ds391+cKDt0ABjKpFOTGUKuQvn6G6X4THcfk9hRbF8vKJpRcJFSETFc0oVTQkFwwOH+qL3cB6O7h+W+4cP9ISHg8OHB8yIueWtQ/RmF7dNTibZJrcjK09buGLZ4ssuXFldVYajADIEKIYAAADA1JYBs2IzJKK1a9feeeedt9xyC47CBMBgODFp0A05V4ioxKUmFtslp1IyDIZZBGOJKiGNsRnWlhcyRd29rx1dlT8DiMyrC9FwMlcLEA2zi0ef3rbxqW3th4bauwYZU4lpxJiiFZQvnFu+oMJT5HUXuD2FPsPj8hX7XR4X05Qx0198oLSpcmm1kBB57N7YlsJDoYGevshwaOBQ33B//8FdncP9Awd3d3Buv/Rmz4tvbPrFH1q5FfzaFy+urihecfrS6qpyHBowmxcCuGYDAAAApsSsZDOUqQzr6uqchIZgbBfBYDi9Tkuibrhvf9f5//hdr+G6qb6mxq3IQsm6omiMqYzFSiTHDIcgC5CFkp1yyWY8j2FYpDLcfqDnxoee7uvt6O/ajb7CDAiO1l3IJZLJxIXC4Y6uQVIMYoq30OctKSyqLD7xw8v9JT7No5PQAZn4qoSxWFPkPWwkMtnZ4TaJb8eETshFrDInssnmZFviXlv+2CZZDIzbne+3db67d8+OnaGhof5Dvdy2OA+TFVp52sKVy5dcdtHZ1XMhHYJZQMXgBQAAAEyJ2tra1tbWWkF6/qJT+aS+vn79+vW7du2Cv2PchRkSbE0VaZiVa1p5c4ZP+NSzrzz599eOL/OfX+PzqIrKmKooKpEifIaxUifQGrIHeahsseNsrfi2ezjy4vt7h4YGz139oYULF6C78mQGlJWU1q5di96YTHfV19evW7cuKQMsSCKPPr3tJ//9xK/u2/zoM+90dFsDQV5aM3f+8uNOPn/5cWtOOfG80xaeudhd7FZdqqqRppPhIpeb3KJ53OTxkNsT3XG7yOUilxG93+USN43ojmGQrkf3Dafp0efRNKaqTNEUpkYbKdHGmOovLaxcUlO7/ITqkxbNXTrfU+Azwzw0zNsODr24/YM773tKEaIjLIcgzSAqGQAAAJgy6czlJF1gjq9QJjSENDZuL8H2Mu3ek0wpTvlokYmJx4BJfZAxilsL0dvZBRtv34lQLvG6ygv9Bw7S7j37zkNn5dOIgdjkyYPw5EzjV/f8fWPLGx2HgsQMl9ftL69auvrEspqKonklmqHqXkNRSFFJU8VWi+5oKqks2jQ2UvHEaTzeWHwSlF+u2NJKSHGPIZHFybLINKNbbpNtM24zO7pDlqlYlu1SNcPvLq2es+C0pWY40tPe+f7Lbx/c3d61r/Pnf3zW/v0T8+Z4Pn3h2Zd9bHXN3Dk4lCANwGMIAAAATJna2tr169enwWbY0tKybt066S5MXH7IgpVpMzlmPjAYJmVZO3m/YWNj47p168aehL+794k33ms7o6pgZaXHIFJFMLIqyp/w+FIKZBeOu5AzZh/pMQzZ9qt7Oju6Dv3DxR9Zdtop6Ku8mgRhM5xSd8FpOOu0H+j9yW/+9pM7n2nZ2jYQUnS3r/b0E0776Jkn1S+rPnlB4dxCw6e7varbQz6vaG7yu8ivk18ln0J+Rl5GXoo1j2ju+NZpLrE1ZGPR5mKkMzKUaNMV0jTSNWk2JFXKkbpUJ5mqCOOhopCqkqqqhu4vKZx/8qLqE2srFs71+HwD3cN9feHN2967894nFLLnVZUVFnhxZEFKgWIIAAAAZO56adGiRQ0NDaP+itRo1q9fj9WaRBoMW1pa0BUzZ5Ru6Jxvo5A/3b1796iTcP29T+zb33PW3ILTy92aCEZWRZlJRdgM0b1ZByOyhdrrhCQnioZMU3d29b69t335qcefu2Y1uiuvZkCkyJhqj0E0nC227th17fd+d8f9L727byjC3BVL5p/ykQ996LKzq0+tKakucxe7DDdzu8nnI79HqIQa+YVEGG1EvrhK6EuQC12iGUSijnJ0q8a9hyR2VBHOKX+qi32n6Yw0RWiFCqka6bpUCElRRYyykA5V8R9TVGKK4XGVVJZXH7dgyYdOLCgrti0yw/Tc1nfvvO/JzVu2rTrjhMICH44ySBFQDAEAAIDpr5dSajMMBAJXX331uDM1EhomAoNh0nF0Q7m+HRV+KBNrEtGuXbtGrX4f+stznZ29H15QtKTQUB3FkDGFMfRqViJ8hXxMHkOpGJpE7x3o2bFr74Lqio//w8Xorbxi7dq1cLtP9bIBomGa2bpj17U33PWHP+8YCBtMcy9eecqZn1i1dPVJlcdXuvyGp0D3+JjPF7cTOkIhxYRCRyL0Cn1QFc8phz6nAFRI7DgtIu6RD3CaKcbMSELkckxPFNKhKtRDVaiHakw0FBqiwlShHSpCNyRVNbzuOQuqjltxcnFlWWgwNNA71Hag/857H2fcqq4qg24IUnIVgIw/AAAAwPRoaWlJzDCYXKRQOPEXe4yxUQHLeQgyGKanhxsaGpwTkh0p/yWmPrzyf9/4yms7v3Jm9UdrfAbnmqLIQsma+BVUSc5GrHihZNmcWslhomGi+15++56W589becpjj9yHvsrDwQHl+6dBIBCoq6uDcSeltB/o/fINv+s4ZDLV4y8rKZ1fduJ5y3ylPl+5X1FJN0StEp1ciogdjscR6zSyL+cs+QWJGW9WwlcmcofHs20kurApnuJQYscfwBJ8iIlpOuQ90Se0o00mOrQs4lZsxzbJMi3LsmzL4rG7rD073n29devBfR3B/n7bHLhu3SWfuuTcmnkVOPogiSjoAgAAAGB6SKkuFcGwk5ELZRUUmX4+zw+EFLNA6kQBqfI1NTXJigejHpAYEt7W0TWuIMghF2YzbMx21I9wWPN2cEA1j2nQ3Nzc2tqKfksdv7rnmWu/v2F/j6J7CmvPOqn+movPvfKCiuMrCyr9bg8VFlCJn0rdVKpSGaMyirZS0cqICoVuGCYaJDosWl+8yZv9osmdAU79NvVb1G9G20CEBkM0OExDQbETosEwDYVpOEzDoWgLhiloUtCiEI83K9rCcQeiopCukSGqLbvdZLhEjWYXGW5yuVXDZeguQzNczDBINxYsO+G8z3/srIvPqz5xSUXt0tt+99SHP/3tn/3X/fvaD+AcAEm7AMCVGwAAADBtUmEznJJrI88tHjAYzkqHj71fFgMlosCn/6mnu/+bq2pWlLsTPYYymAvHKRuR1kKTc1tsIwnxd0Giv721+44/P72gsnDHqy+gr/JzEgwEAhiEpwGchqlg645d/3LrYx3dEbe/cN5Jtad85Ex3octd5FE1crmEr1Ajt3JEuRLpKFTi4cOjoolHuwstUddYmvw4cZs4jzXxpdjIPmPiCxU2skOcmBK9qYgtS/BuKUqsjUbUVrZMssXWssXW4rZpW6ZpmyYXt8k2O97b8/Jfn+va1z7c31tT4b384rO/8ZUrcD6AmQOPIQAAADB9km4zlIrM5BVAWDxgMMwc1SC2viGuqwolRi+L9RMUhZxcM3jduvgXeSrzdxJsaGgYaz0GxwROw6RzybU/u+b7D+zvY76S8jM+sWbFZ84tnl/sK/X4fFRUSMVeKjWoVKFSohLRSomK4o5CxzmYaCqMNpsOm9Qfov4h6u+PtsOHo62/nwb7aXCQhoZoeIiGh6Pb4DCFghQORVsoSMGg2A6Lnw7TcHDkkcPDFByicFC0UOy3QkEyI0c2M/q+VI1UnTRDVFXWSdeYpquarmsul2q4yDBIM+Yet+jDn7vkrIs/XHPiCW1d5q13/vWfmn65r70TZwWYIfAYAgAAADPVSpJlM5SyyzRSE+ZnQkMYDGfhwnHCAiYNDQ1/+Nsun8v17XMWnl5iGEQaY7pIYihrJfNjPQPIQKyEJIZjPYbP7mq/+cHHF1QV73jlefRV3hIIBBoaGlDNY3pdB6fhzLnj7pZHW97u6A6rhrvm5NrTLjqrsKpYNZjLRW4PeTXyHOkrdIvaI5GErKzSURh2DIYWRUyKRGJ2Qmn040RcpCSc4XWHnAaZyHTIxI4sbTLWaSj9iTGvokyVaAm/Ycx4yG3Ttk3TskzbjBkR+7t6n/y/Dx7u6h7q760ud113zaWf+eQFOEPA9E9XXGcDAAAAM5dRZi7YTVsuzNu4MPgr07+yPaadtvqUC8vn1I6jGMY9hlAMsw47ofLJWMXwve6Bf77zvvlVJTteeQ59lbektA5YPgytEA1nwiVf/ml7j61o7tJ5ladccMaCMxYzlTSdPG7yusirkpfIQyOKIRPiYChe7DicKBTaQigMUyQSbbZ1RKBxCi8jlZg4KAslxyRF4e5mjm4otEOFjeiGtqiIYlu2Zdm2adpSN7Si7e3N2/Zsf/u9197gkYHrrvk4IpTB9E9OKIYAAADADElKMkHGWGLN2Vl5DdnV5zAYpvuqcXJi35LTP37r5efWehRDaIVSNGRTfBKQOUxGMaTIQF/XHvRVPgPZC72XfkRB5Lvauy3d5TnlI2ctPeckt9+tuZjbTR6P0AoZeSnWXEKDiyQIhSPbI4VCy4olKJyNiZYUFrcfSqHQEQ1ZLPshi7sUrZhiGDMbSqehbZmi1rJph4Kt9/1lz5s7B7u7aspdd/+6qWZeJc4ZMOVTEpfaAAAAQBIm1JnZDJOyWsirJQcMhunv8HFrnoxC0Qy3p+CRf/7neQbpRLqjGMaFQiiGWYdUDB3R0PHjOIrht++8b/jQBzXVVVdffbVMbIfoVMyDADN4qrnjj80bW99u7474iorO/eKFcxZWKG5Flhj2GuRVRrRCr8jEOspUGLtpUihMkRCFZ1coHP8DFZcI4x7DI/aFvMh5ot/QsuOiIReiYXhgsO3t97f+7dkDe/fZocPfvPbyb/zPz+HMAVM7D6EYAgAAADNnJha/JIpfebJgg8FwVkgob3IM7v3ef5xSbGic6/GoZAabYdYyrmLoeAy3dRxq+uPD5YX6B+/uqK+vr6ura21tbWlpkfsQEPNtiEBs8kyAaDh5vv/Thx9pfVf3+KuWLjjjE6uLq0tVjTxe8nqEVshikcheUQfZ+YbDEQ2jOxEKhSgcpnCI7EwSCsdc2MWkQyW+c0QhZrFv26KJCGVZQFnohhFpNmx754NNjzx1YE+7Hen/x4tX/uTfvoHzB0we1EoGAAAAkkBjY2OLYBq/mESvXHNzc57UrESJ5PQzSd2nsvaMMI9lLeTxRon6LqTerIIn7PAxP+oLBjlxX0ER51xKhM3NzZxz5xPa1NTEGAsEAo2CJFaWB5k5REDwmskMjurJx6S9s+eSL9+68ZkPdE/BqR9ZueaqjxZXl2oG+f1U6KEilYoYFYkiyIWivMmQKILcFy+F3MepL0T9A9TXRwP9FBwW1kKe0UMwt+PVTkRL3JeNc5EAUSNVU1RNUzRd0XVVM5iuk6ZXn7D4kmuvWFa3sqhi7n1/femz13xnb9t+nEhgksBjCAAAACSHabgDUpF8MOcTGsJgOLsn+TEVn8qFZ9z0hc+cXhLzGMpUhqpz6SkdhrAZZg9W3GBojsljOEz07AdtP3vgzwtrKrZvfdb5eI51OjtfqMCBmNvMpIQXmPa1RP6wZfuu79/6WHu3qWj6yk/X1yxb5CnyuFzk85FPJ59CPoo2r9AKpa9QWgujO5xCYQqHKRQiM5LZvsIJODIqeZTZkHPiIq0hj0UoR7gVy2wYGuh/Z/NrW598dqC7q6bMdc9vfjC/ugpnFDj2GYcLbgAAACCJK6XJT6ypU75ye72BDIazdXpLv9gxFcOT1lz91ZWL6+Z5dc5jlU+IlHgoMhTDrGMCxXCI6NkP9t36wF/OW33mYw/fkziyTVzHaZSA6LiGISDmxhCdV2W4UgFEw3HZ+OTLd2x4ob0nUrGo5kOXnle+qELRREFkH/lU8jHyC63QTWQdqRWGZL7CIAWDFIlkVrLCaTKubhj7iRBDLW5bFo8XUOZWhMxo2/XaWy8/+Wz7zg+qy103/dvXV69YjvMKHONcg2IIAAAApP8qP9VGjFxNaAiDYZpxhEJ5s6Gh4Zhh7yet+eK1H1p0wXy/IRVDIukxjEuFUAyzDEculNvwkYrhk2++/1+PPfXh1Wf+Ka4YOuPblIq/y0dCQMyZqbChoQHHLj2XE3nCI0Iu7OiOlM6vOv/aT3iKvYpOPi/5PORTySushX4iLcFaGBMNLQqGKBTM+HyF07nSo1E5DWP3k6yIwrlliyooER5LaxhtQz29rfc99t5rr9vBvpv+9atXXP4xnF1gotMM19wAAABAspikzXAay+kUvZKsAwbDtJ3Jo4TC+vr6yVQ+WXjiKm/pCV8+q/aj8wukx1Afoxii+El2YR7FYxgSUcn3b3nz3uZnr7zik7ff9pMkflohIGb7AIISKDMHoqGDIxeeVH/msgtXGX5DM8jnI6/7iEhk5gQgx8OQg2EKBikUjNVBzkliumHsxhizoQhPlqIhtyJciIaHDx566c8tb7y41Q71f/Mrn7r+a2txjoGjnmBQDAEAAIA0X+KnZxmQe6FhMBimYZ0/Vigc9Zgzzjjj1VdfPdoznLYiYOrVXz5r8QVCMZRyoSaikmVgMhTDrMNMqJUsFcNQgsfw/i1v3Nf83FVXfPKXYxTDCdIaTuOznyggOmkQIaZk8nCNA5SeK4qc5/a7ntzY+k5Hd3jB8uNXX3GB4Td0g7w+8gu5ULoLPSIS+Qi50KRgPAzZtvKio1hcODzCbGgTtyxuRWxT6IZCPSQzEuo//NTdj+589XU7fFiIhuvwcQPjglrJAAAAQDJpaGhobW3NhAVATi7YUCI5FcikcgGBzEDXLBhX5XnllVcmeKpTl50Z4dQXNjljnMiON4qXSIbcm73wI8sl84RF6cKF8482CslI9hkORLLOsqzC7BRlbm1tZQKnEDOOUeYgxWKUxp4h8ju/fD63b7/ryds3vNjRHV6w7LgPXXqelAt9Qi70K+QnKhCJC8NEg0QDsnEaCNLAAA30i0hkK4+GaDkuHzFKM2KqylRdUTWm6UzVmKqTprkKCs+9/OIly09WjIKb77hvw4N/xscNjIuGLgAAAACSSH19vVwmjau2NDY2ptMv0NzczBjLjVA+GAyTTmIxk/r6eikUTvIkP5oQsGDR8VveODAYsRx1KXrIGJNbkrohDIZZtAQd84njR7bOQ73E+MSf3Pr6ehnPnqyhb9TzOHVUpHcVDsQMobm5uampCYHkMz/bHfLtvUu5UDG8p31k5cnnn2UUjMiFPiVW50QT6RFG6pxYMWthOJxHWmHi+Exijh39BZ2ikKqx+Fd38p/COWX1//gP/gL/q89s+ua//Ixs+4rPfByfODAKeAwBAACAJNPQ0ODEdY667m9tbU3zRX9zc/Mxc89lUcfi7Jo5Yx2FnPMpxY0eTVtsaGiYV1nOGPUFrTA/cg0jnIYQfLNx/Rk9avHmCIW22IZsu2dwiIjWrF41wZPU19fLQ5+i0a++vl7qKdKB6AwUoxyI8LulGTmkQLedOfkZ4v3Ik1tu37CJGZ4lK04++SNnGX7DSJALZSQyE7kRBkWL7kRocCjaQsF8lAtHxm3HDe7sMEaKSqrmNCa23pLiVR8PrPhonWIUfeNfbtn04sv4uIFRQDEEAAAAUrJMGrU6na2sgtI7lu2ioTQYYuU5E2YuFCaeVE7w8qgf1cwtJ6KOwVDYOkId5CPWhiP2QRYsPsc0O77TMzDcNzjEiBYsqJ6k6hEIBFKt3B1NQAwEAhAQ04z8/gxdnZRJMK9Ewy3bd95+93NM91UtXbDs4tW6zzBcsdyFXoU8Qi4k4S4cko3TUIiGBqPNDGOGkV/2JIzbXIQnK6qISo7JhbK5/AXL61ctOGGpYhR/6qpvPb95Kz5uIBFUPgEAAACSz6hKkbMeUZvt2dORRH8mp+Ko0OOZBwmOOhyOBtTc3OwpqLj66z+aU+j/t7rF5RppnGvx4iey7InKGCWUQAEZjpVQJdkplByKb3f29P/w3scO9x/etuXpBfPnT/LkkXLzbH2cnRBmWUfFCWFGIebUjd45VoMLU2EauGjdzR299sJlx591eb2vrNDlJo+XfB7yimBkz6iyyDYFQxQcytNI5ImIOw3lf2KPc9vmZoQsUT05umOSGT58sOvv9z/27iuvVZe5Hvj9LfNr5qHzgASKIQAAAJASAoGAVGeksWvm1UJnOuUzNuuvYdprJGQwnCqpEAonWP87x2hfx8GPfOZbTDX+vf744wvVmGIoRENVnIROxWQohlnBEVWSOTcTCiUHiV5pO/ijDX+qral87aXmKZ2ccnjMBOEDAmI6Z0N0RVJG4JwXDS+6+scdvVR1fO2az1/oKyvWhbvQ5yGvSj6Ru5AS5UJREzk0TOEIcRsnyBjil06cx8OVOeeWJeomR8g05VaIhgcf+6+79+/6oKZMf+CuW+fXVKPzABRDAAAAIIULUWkzZIxlwtpYrtKzcd6HwXBKRzlFQqFzLI6m3jrVfk5Yc5Wi6l9dtfi8Ko9m245iqEnFMC4XQjHMCswEg6HJeTguF4ZFPOBj296764lnvnDFJ3/5sxtz43MNARGzT1bMiTls21z37V9tfau7YsnCc6+8xF9epOnM4yOfj3waeYl8MoOqU+okQiFR5ySCSOQJGCUacju6sUxpMyQzIvcpEjm4e3fzhkf3vfvW2ctrH/rj7eg5AMUQAAAASCGBQKC3t/fSSy/NkFVxNi4zYDCc5II8pUJh4rL/mE7V7/zHrx9+/IXzl1R85bQKzbJiiiGRKjRCVWqFwmyIA5f5JMqFkTGK4S+eeOG5bW9c+f988hc/vXHaI1Imu88gIObz7IP+TD/fu2nDxuZ3fGVzLv7fV/jnFKma4vYKuVAnHyOv0L7C8bLIobBwFwYpEoFceCzGEQ05t0RUshmXDq0IRUIHd+956Bfr+w+1/+PH19z2k39FzwFUPgEAAABSRW1tbW9vb+aYaLLUrIcSyRPLGUkpZjIZJhlaOK+yjBEPWxZn7IiKGWJJ5wSNQQXOfGx+JGNKoLR19TJGn/vsp6c9ItXV1clSJJnZA04Rlebm5sQiKk1NTSiiMo3ZBx2VxP6Un51celOPPPHSI0+/yTT3ieed4S32M1Ux3OTxkEcjDyNXgrswuhVyYRBy4SSJf0Mn04IIFYjJQiikqqJ0sqikrBll8+aeUbda0Qs2PPzU8y9sQc8BeAwBAACAFC6QMtBBk0UJDWEwHJf0OApHMfniOZu3vrHumz8+saL4m6vmFzHupDJURWwyUhlmEeOWPQnHy54ME1196x+5FfzTA+vPXb1yJudz5qQ1nOorhwNxSt2VWBAMJGWKzBmn4ZZt733v5oc7eqyT6s5aftFqo8Dj9jCvj3xu8ijkFZNIortQBiObkAunhOM0tDnntqyCQpbJLRGbHKuCEjrcefCFPz2x4/nnVy1f9PAff4luy3OgGAIAAAApvI7PwDVSFqWUQgbDUQcu/ULhNA7EvvaDF15x/cLSom+trq3QRhRDKRpKxVBBKsNsYFQSw0TFMEz0anvXD+77K7PDr21+csH8mebIz/Z67hAQ8+QoZ+zFRra/kQuv+mFHL/eVlop45GLDzbxe8nrJo5KXyIh/USHdhSERjAy5cDrw+MYSoiGJKiiWSY5oaEYoEhrq7nrst/fsfnPH6uW1D2/4Dbotn4FiCAAAAKT2Cj4DPX1ZscaAwdCRIWZLKHQOxFR12xPPubLI77/spMpLFhTK4idqXDGUWqGKVIbZsK60jkxiGIkv2uX2T6+994enn180v/KVzU8mcVzKmaK6owREJ8FCnguI2Vu1P1suObKR7934h0eeecddWLbi8sCiM080vLrHSx4veXXyinhkU7oLeazUCeTCGQ7uMkUI55awGXLiVrwEilAMhWi4vXXTk3c/aAa7P/vJD//85h+g2/IW5DEEAAAAknztLjPKOfc0NDQ0NTVl2ovMCu9ePmcwTHOOwonXolM9VT71Dx8eClsdAyYpipPzzo5HRMmbFMuMBzJ1URldSIqWcATt+E7Itt/c20Fkn3POiiSebBme1nBKjMqB6JztMgciYyw/cyBm4ISYA1cdWZ3TcMu2d196o41p7vq1H1/8oZNUQzMMMgxyaWQw0okijlwYpnCQwiHIhTODiayGCrFYNkNGjIlshiopoqkqadpp56785DWf8xaV3/PQ357f9CK6LX/PF1yrAQAAAMniaMVkM9NVkclej7w1GM66o3DsUZjGSXLrf9//67v+8qHqsq+dNddlWRrn6pGBydJmiMDkTMZMSGJoEUU4DyUkMdx7eOinj7buO3Dg0Qd+O5MkhkcbRbMxreE0Pl/56UBEbHKKTqcsdRpeeOW/t/dS+fzq+ms+4S8vNdxKLB5ZI494QMzXHKZwiEKh6BYCxszh3LEZEuc22XYsm2F0GxY2w7A9NPCn3979zstbzj598SMb/hudlp/AYwgAAAAkcyE0rsSTma6K5ubmTHYl5JXBMEMchaNe0vTkQiJadcZJ3La2dw4MW4xExWRK8KlRwk2Q0UvKRHchY5wxO+40HAyFuw8PKIwvrKlO7h+tr6/nnE/D2Zp1TOxAdAox594bh80wRadTNjoNv3vj79u6w26//5zPfdRfUqxqzCUMhoZKhnhAWHgMwxGKhCkcRmXkpBGzGTKForO0sBkqCqkaU1RSNFE3WVc83tUfO794ztznt7z+45tuQ6fl6akCjyEAAACQFCb2TTCWiXNuZroS8sdgmFGOwimdz8fkxHOuNDz+r5y14MNVHmaajsdQi6cyVKIrFVRMzlBs4Su0RtU8idsMg0R3b9rxp82vfOGzH//FLT9I6eiUM2kNp/H2Ex2ITh2V3NAQc6nILzp22mx57Z3v3vRQe7e5dOVpa75wkaLpbo/iFRkM3UosfWF0zDFH3IWWje+akkcsoSHnts3JJssWpZMjIyVQrDAFh1/661OtDz0aGers3rsdfZaHwGMIAAAAJOcyfWJ5Rea0ysxFaQa+sNw2GGago3Cq5/Mx+V9fupy4tbsvyBSFC5vhSOMcSQyzYS1J9pG5CxNbz8CgQnTuOatSfRLmTFrDabz9RAeiMyTmhgNRvvJ8y+GYtqE7W5yG3/nxhrZDwQXLj19xaR0xVdeZyyDdII2RJuTCMFHYorB0F4bJhlyYXFhsTJEmQ4o5DeOpDGVaQ8044azTj1++TDUKv/aN76DP8vE0weUaAAAAkJTFz8SLN6kQZea0m1EJDXPYYJjJjsKpns/HZPPWN750/X+eNm/ON1bWeKyIKiomxzyGCRWTGSomZySxEsnxbIYR6fSJewzb+oeuv/NRZode2/TX+fOrU/2pyZO0hlPtFmc8SXQgZlEaRCfvAY5mKsbwzHcabtn27he//VtPQfk5n7towbLjNMPweJnbQ9GmkOrIhSEKhSkSItNEPHJKcBIaRq+7LItzmyxTeAxFNkMzTKHgzq2vPvTrOyNDBx69544156xGp+UVUAwBAACANF2aZ2y694xSM7OljvOUujcrhMKkLzVPOvfK+RVzPn/q3DNKdGaasuxJrP6JE5gsYpIRmJxRyJDkWOUTUfMkfGRU8uZdHT/b2Fw7v/KV5/+anpeEWhnHHGSyUUDEYc2KkTxFXPCFf+votauOW3T+uk+6Cvwuj+b2CrlQJzcTA45N4VBC+kIbRzU1iNhkbnHiNrdFYLJtJoiGwt4ZHH78d/e+9mzr2acvevSB36PP8goohgAAMFPaO3s6Dva2dfYyYi9t39V+oFca/ds6e+KOf2o70Mu5XT2nuLqqWITj2fMqiol4dWUpUXT/rGVLqqtK0ZnZeFE+eUNcJtsMM2R1kUsGw+wSClPR/9/991/9pfXVs+eXf3lZuRYJawnlkpW4aIiKyRmIoxWOUgxDYidE9NPHNm1974MvfPYffn7zv6d5jMrbtIZTHXzk+CPTIGaygJjJJftz4PokY0XDh//2wndveqSocu6az32s8rgFmstwe4TB0E1ulbiQCyORWDByJEyWheOZQsazGVo85jGMkBmiUKjj3Xee+OP9bTtf//bXrvo///QtdFr+AMUQAACmzNYdu9o7e7fs2PPSjt1M0dsO9hFThTioiLXw6IlYfn/H4ztiY4v5mQszBydbhH/xyIpTF82tKKypLKmulBpiGXo7w1dlgUBgSqudoxVTzgQywe6RAwbDbBQKp30+T8zmrW9c+083Lyov+c7q+W47otq2SqQ7oiECkzNz6egUPJGxyXHF0IlK3t078JOHm7sPH95433+tOXtFmscHmfcTrrSpfrQzVkBEbHKqPzKZKRpe8IV/a++xVl12/tJVp7kLCgyPFotHNkhlFCGKRGJaYSSMeOS0jPzCYkjCY8i5RbbFTZMSbYah4def3fTY+j/MLdW2vYQPbB4BxRAAACbF1h27Nz71WvuBw+1d/W1dA4xpxHTGFGLM41HcBrndrLhQKS6MrXs9biouiFWXmlupE/Fg0O7utUTOZh4M8ei+0A47DprBoD08bAZD9tCQKYICLE4m2Sa3wytPXbhi+eLLLlwF9TAz5ZWprl0zfHU0u3aPbDcYyoObdUJh4tFPuhZz8nlfdHuLvrh8bv1cjwxMdlpMLoybDWEzzBBsJ32hkA4jie5CoRg++vI7G57dumhB1cvP/TlbBl4wqg8zSkBEbHIaJtaMMnI+/LcXvvuTRwoqKj95/VWG36e7DJdHcXvJ7SZDJZsoYsWDkUWDXJEOuBQNLeI8ugyxbW6ZcY+h0A3DoVDPoft/8d973t4mbIbfRJ/lCVAMAQBgIn51T+vGp7e3HRpiTBcqoUqMaZoyr1JdvEBzu1hpseoyqMCv+H3kdjF1pAT9mOGVS7VwZCMrvnGiYIj39prBIHX3mV3d5sGuyIGuyMFDYS6nbR7hVqi63H/5RSuqK4pWnH5cdVU5Ds2sM+1FTibbDGc3bjpLDYbZLhSmdNH+3R/81+Otr55SUXT9iioeCjlRydqYwGTYDDMEMx6PLFMZjtQ8EXJhiOj7G57c03lg432/Pje9BsM0nK75yawLiEl3N4Oxc2tGiYYXfP7f2nsiqy7/6Ml1Zyq64fJobg9zucnlIsbItIVQGEE8crrhMvDJjv7PLYvbFlmjbYbvbN7y6J2/H+7d09f5PnosT4BiCAAA4+AIhYriIsXQNKXQr/j9yqL56rKTjfJSpivRIZSEPqhEF7sKIxI5/JUEq4yzFx1sHaKTsi2SGZKISLYt27a5TfIuMWfHVMXdeyM73h7atWd4eNjs7okI9TDErdCq02pXLl9y2UVnV8+FdJh969UMtxnOVhBT1hkMc0ModDo/RVrtiy+/+ZVv37K4vOSa0+dWu/j4NkMm0jrAZpgB2AkeQ7l1MhhKxXDzro7fPv7CcHj4lecemz9/3qyPVEhrmIqRLf0CYlbU9s1qMkc0/O6P7nz46bdchSUf++pnS2qqdJfL7VHcbjLcpOtk8bhWKLYW4pHTSawEih3LY8jH2gyDB3bu/Nsf7tvz9mvf/tpV3/nn/w99lg9AMQQAgBF+dc8zCUKhLryE2uKF2oK5qt+vzK1UVJGlkDFSVVVVVEVVNU1VJEyJSYeK4qh+Yvkr18DcFsRUQ1vY/+WuZVmWHf1PfLMXh5Md0xeHg3Zvr3mwy3x/T/DtnYM9PeHo79sh2xz++hcvvuyildANs05eyWSb4Wz5d7LFYJhLQmF61uonffjqksKSS0+s/NhCvx0MJlZMhs0w03DchXIbTnQXiu2DL735xMtvXH7phbfd9K8ZIoIgQjnVI156BMQMnxZz49IlE0TDj3z+Xzu6I6su/8jSVcvcBX6XR3e5meEmlzs6JZgJcqFpoj5yuomVQLGtWGByos0wekhCFBze+rennrj3/nNOX/ToQ39Ej+UDUAwBAIC27ti18entW97c39E1TIru8ahzytSTlmoLa7TyUqZrittNiiI0QiERaoom/tFUgaYqTCiGYstILnrjsqAzC4tswvYRuqHAsizLtCTiZ+I/gRm93+QWt8WzmRaPRKwPdodfe3PgYFe4vWOY7IhtDc+b4/n0hWdf9rFzaubOwdHMCnkl83O9pzmhYeYbDHNPKExbz9/2mwfvvO+pkyuL/3n1fHNoiFnW+DZD4TCEzXAWGTEYOoqhoxWKnY7Dwz/787P7Dh7aeO+v1px9VoZ8MJHWMM0dniggElFDQ0NSBESUQEnbgD+LouFDj7/wnZ886Cks/cx3rzEK/LrL5XIrhjQYGmRZca1QeNpsGwbDWYDzmM2QW6L+iWWRJY+KODDh4OCB/et/eEtP5wc///ENV37hc+ixnAeKIQAgr9m6Y9f3b3207VBIUT2kaB63+qHT9OMXqaUlamlJdOmqKqqmaboR/U8TKqEg+q/6/7P3JXBSVHf+7706+poZYIABufFGBRUPlCMMikLM7RUTTYRcu/+Nidkkq9G4wmQ3m4ibaMxhkk1W2GjiEe+TRBmQI4goIIeI3DPDAM4MMFd3V9V77//p96priu6enp6ZPqp7fl9qiuqa6p7uV9Xvvd+3vr/vT1FVQghRCFYIEgJDkaDsGnR5bMSV7oWxSIwx5KYNWQJjaAmC0PklY7E99k4hQZTpyyKTGR1vsw7WRfYejLy1sYXHfhHlVuftC6757PzpY0BymMvZdrbUWB7XU+TZ0NCzAsNSJQrdZzkP0eOUOV8LhMq+OnXMtGE6j0YVF2PolhkSSEwuKLoEhvGaJ1HOHdLQQOiZt3c++9Z7E8eOfGfNi55652BrWMA+JIsEYgnUyi+KaUwBScMrb7q38Zg17dqrz5l9MVE13a/rQmCo+xHCNlFomsiyEDWBLiwQRNFkKTC0ZYaWhaxoXGkYRZGOd/9W+7cnnrz8/AkvvfAkNFjJAxhDAAAwQPHOtgP3PvRSY3MUKYEhQ5QRw7WLJvvGjiKqikJBrChEVVVd1zVN02P/xVaqpqmqqiJVJQpBRMWKglQFEYJiD53OlCLKERO6fuZe5DjsJhAFoRhnCCkT5GDsRzCDTHKIDmloc4nyf8ooZdJg5GhzdN+B8Pad7Y2HOzs6o9wKXzp57H0//CroDXM0z87WuOl9PUXejKU8KDAsbaKwaxaYg+LIKfHrpc8ue3LlWVXl37pwpGpFFcZAZug1MKfaiWQMnXzkeJXkE4Zx75Mrmo6f+MG/fv3fvvNPA7a/AqTvOftJIBa2Xv+Amszkv503bt51y/d+66+ovOabNw0Ze0psku3XHIGhZcXzXy1kmoiDwLBwiMsMaWydmJhsIqOzbtuOl5c+1lS/6+W//u/MmTOgxUobwBgCAIABh9/9ZdULtdsbmw2k+jBWpl/iO+9MbWSV4vdzRVE0AV1A/C+2ia4RXcOailQVKUTYcGHhUMgRZSg2nLoXHpvqMI5cekLGhHgGi8As7mKIOGLIqZUid9h8YZxDTCYNheQwvrKocDxE4SjddyC87u1ju/e1c2pys/3bC6753DWQp5zNQCjraizv2zblR7njHV3JACEK83lyJdZv3PH/7vpl5aCKheePPLccc8NwM4YkXvmkz26GQDL2HycJDBEyhbTQIQ1NhJ7e+MHfNr0fMSLvrn6xsDVP0vMgkKHstZPSKwIRcpPz+WXJM2n45e/8fMOOI5d99qozpp3vEynJejwlGZEuuhAEhgVHvGiyrTEUickWMqXMMIqMaPRYy4t/+L8Pt2y8bMqEV156BlqstAGMIQAAGEAQOcgvN7aYqi82V5k4TvvYJb5Bg7Hfh1RV8em67vP5/X7d5gslUajpWNeRTmKLKig/xhGl9qzG6iIKJYlnlzCRFobc5UnIsSiCIv9xW2sY27BDZExQ3PTQrpqCCeI89nIWs4TckFqU8thDGrc9jGc0MxYb0WPj+6atre9sOXGosaOzPcys9tsXfuLaa2aOGVUFZ7+/42UO1FhFERrlWvHhBYHhgCIK3WF8HogV58zeu+SR19duO3No6LsXj7TC4WSZIZGLrD4PpGF+wcQi6yN3CQzjDoYG541t4V+8sq7uo+abb/jEQ//tXT4ObA2LoudxCESnjor7fEGOed7ORZ5Jwyu/+KPmTn7lVz438owJiqZrfp9MSVZ1xFhsSs0sW2DIGEJAURQUQmboYgyZFa9KIwwNo+EP1r/94tI/hY8fbD26D5qrxCMgYAwBAMAAwe8eX/3wk28RNRgM6pdO9Z0/SRsxPBaaqpqq6z6fT/f7AwF/DD7Fp2FdEIUaQTq2icLYNMYhCmMLsyizTiLv5H/SvpDbsbJ4FIMMaWVYi7Hd/XLEiQiTZeUU50kIIbnLCYOFnNCmJJ1KKVTkMMfWXbwhbzlhvf9B+46dJ5qaO4+1tFGz4/avfOr2r18P10CfkbsAxvsyw1wbGhZQYDgAicIctXkml8eGTTu/efdvVN33vUtHnxZCyDTdboZKQm5yDvg/oBTTwCEKJW/Y5WAYz0p+5u2dz23YRjB9980XvCkwzE+PDchFX5RAIB44cGDp0qWQm5yf9s8bafjs8vV33f/U0DFjr7ntJr1MCgw13Y9UH1KUrrk1FQvwEwUHF26GsvKJLJ1sM4aWWIwoCrf/btFPmhp233Hbl+/+wR3QYiUMYAwBAEDp47d/Wfniyp2NLWYgpF802X/ZhdqgwYqmIE1kHPt9/i4QsSA/dhGFTJRuk4vFTcuxF6RS9eeAsdiwKiobMyb5wPg6tiNOFyZGw5ItFJVTMMGka79i/0scxYVq0WEMLfdbcEqnUB6O0H37O9a+1Xyosb2jvX308MC1H7/8O9/4PFwPngo+i0JmmDuDsIIIDAcsUdj/s9nPM/XV7/7sg/1HL6wKfmnSUJUaJGXRZFEIpSAlUAYspcjidKElGUNXMrKkC49HjT+88c6mvXW//tm9N133yZK/zgGFgmODWFNT050CEZCLUTgPpOGVN93T0GRMrp526eeuJJqu+f26j2hCYMhR3MRQrBmF0+IBcEkauhlDatOFkjeMdKx/+bXap5+57Pzxr7z4NDRYCUOFJgAAACWMd7bt//eHXjzUbBHVP6TSf9WswAXnqRhjTVNtotBWFQqiEAd8yE+QHyMFIYsjw0CGzRVy06CGaZimN7NMOgAAgABJREFUaZ5sLOgmDCmnPCGotnWEacJSkaWMif1Pig3lfgULIpEo9h738zFWFAXLTD5CCCXEih0qHggRIqahADrnrIpTRvp37Gx9Z0tLfUPbQ4+81tDYfPvXrxszagRcG5lPpnOqVamurp4zZ45kr7zcCHPmzFkskPUXl7ZW+YlF3UThwKQSnOCwh0ihl8xghsf/862f/NYPH65r03a30fMG62Y4LN1g5aJwzkRKMhMPkXBsyGt81NOnKElKkcUZQxZ/SDmX6clWXHu4s+GjXQ1HFYJnXDq1iC71xYsX5622DyBbA6IcCuWg85nPfOb48ePOV88hEPtWiBnQ3TdF3hnNKWnYcLi54Ugr0kIjTx+H7MmtMKHAsSkypYhTxMSaMzgn3gC2h73YqMxjozQnBDlLbL867szTyocMXbP+XWitEr8WQGMIAABKFb97/M0XVn3Q2GwEQvqMqb5Lp6rBgKJpOBAI+P2BYDAQ8AfiXKFfRwGCfBgRhEwLRQVXaBjcNKlpWqYp2UJDUoamoyxklHEJxPtrueKkLEunQ4IVTLBCxDq23TWGY9exmHAuSyeLN2VnLlNqOQVSYu/u8OHI66uO7N3X2t7eNnqY7/5//+fLLj4PrpBMZtJ5UKkUixYm64aG+REYDnBFYUJT9BgWpj8dmZ+s7o78/aOvPvb8m1VB/d+nj6HhDmxZMiVZE2sl7mPoLH3sSvP1jSiBq8JyaQzlEj1ZYHi4tfN/3ti4q/HIr++/9/PXfaIYr3kgDYu3v3L3JN0pEIFAzM/o0B98+fb/3rDtUNWpE6/8ymeDQwbrPr/q0zQfUn0I43gyMkPMQgxKJHsHskijrTFkXKpALcMugWJEWg/Vv/LIY7u3vX3ntxbcfded0GClCmAMAQBACeLQ0WNf++Fjh45RouinTfBdc4X/lBEEY+wXhU2CwWBA/pCAH/s15BdmhYgjUxKFURQ1LMOWFAp00YWmyZir1nFOO2iC4/JDKTfErt2yMgqOk4cEIc5k4ZU4lenYKoptzhnf8E7Lh7tPvLfjI2603f7VT33nnyBDOR3ySeTlurpIjuK3/rdw7tLNgChMRkJ+fZpT2d2vejz7PR7w9pYPvn3v/wT8vhsmVX3sFL/R0UGSSqAoSSVQcs3N5ej1vU8p0iS60HRVO5Elkp/duPO5t7arCnp75TNjx5xSAlc+oDRGYYdAlDaIQCBmZYjP0VTkypvuOXTcmjp3xpSrLlcDAdXn13RF9YmaJ9zORKYUxebXkJLsJXTVP6GWSEyW9U+ikjekba1rnn9p7asvXw4Vk0sawBgCAIBSw4tvbH74yfWNLaama5+ZHzjrVK28jKiqKoWFgiwMBtVggAR9KCi4QkaRaQpdocGjEStqRA0j9mMaRhdp6MgJC9FVI4KJSEGW+kIct0LE0v3QeYhiI7sUGboYQ1tvGBvxLZP/5Zl9O3cdMyMdo4f5Hv99DWQodxeo5LOGYLHIDLP4PnMkMASiME2Dy3Vv2cCTbRaywCT+7tHXnnhxzfCQ7xvnVw1XTG4YCSVQiLsESvZ4N4/UX/YOjchcdGFsA2NTiAodgaHJ+faGpj+seKe5rf25v/x6xrSpRX39g61hMSLz4mBAIGZlAM0FadhwuOmKL9T4ywZ/9nsLyoYPVXSf6vNpPqL6EFEQY4IxZIIxpCAw9Bbi9U8sTqWhoatishlF0XDH0cZf3lVjtB9qazoIzVWqUOCGGwAAKCX89i+r7n9kdYehjD7Ff+sNFaeNV4MBJRgIhMrKyiWC5eVaeRku11EZQYShaBR1dqKOTt7ZYXR0hDs6xT+Jzs7OaCRqmZakCws6ZnNZgZlJbSPj8TrM8V/ZbCa3VTlSmIPi+hxsO5JgjM48rXzMKWXNx6yGo51//NNzmJuXXTwZrpyESfPChQvzKfqrrq5euHCh90fk6urqZcuW7d+/v/8tIyO6bLWwPGULFy48cODAokWLli5dumDBggkTJsDF7NAl3dGFaQjBhHskKX+bfExKuH970eTTXnh9Q1vEVBRy3vCgYZqIc8fNULoncbGHx/f0B71l6DI8voA8ZrYgGUOOMZOLEBgagiiUa4bQY+u27j3cfOqE0d9Y8PlBFWXF+xWorq7ev3//nDlz5Db0CcWCCRMm1NTULFiwIJMj5ZiyYMGCxYsXT5gwYf/+/QihZcuWLVy4cNWqVfv375eUIgwN6dsw61+TN9ZueX3t9pGnjTtn1sVIVRRVVTRVUbGiCh9DhhgXPoYcTAw9BxwfvOOVUMS21IKKiERXyIdbtrUfP8bNzlmzZkKLlSSAMQQAAKWDa772i9p3G3Rf4JILg5++KjRsGPbpalkoVCbowtiPXl5OygKoXEE+jowI6uxAHR20o9Po7Ah3dJGFnR2RzoijK/TWh3R4QocuRCiBN+Qym49I6tBhDOVDrqpk+FDfsOGBhkORjojyj41b6+vq5l1xOVw/DiZOnLho0aJMQpQsQsYz3g9lFyxYMGfOnOrq6v4EXVJgKIO3/gCIwgxbe9WqVY888khXL5KKB0wm/tzbXXXfu3mYZmcyTh038m9vbq5rM88bXlahY2qa2B2fOO9E8IaoH7xhr/i4Hg/OA5mYNwJRcoVUEIVcCAwtwRiaCBkImaLyyd+27lv7/n6G+NKHf3repNOL/Ysge1fHAg96hqLAhAkT+nabCgjEPje4vIWZxa/JT371ZMORtrMvO3/EaeOwoiqqpqqKomGiCPaJxvogxmILwHOQ8oMuxpDbjCEXJ4xTRK3W5ub6PXu52XnzF8HsqESvAshKBgAAJYAXXt/02yfXHWo2dZ/vls+VnTpRVVXs1/VgyEZQDwaUYBAFCfIhZEVRJLbwaCQajUQiUSMajUYN8c8yLc540fSNRBYHcMy+XKZfYs0YF0aGPJ6qzJgVW3HKW9utlasPvbP5cEdr27TJ4372H7ePHT0SrqVCOV5l3SXQy2+1nw6GkHrcq6auqalh8VAsmStMZgwz2Uj5sLv05JQ7b7/3f/Y1NJ07NPDpU8vKaZSLEigyK9ltaBhbuwwNez3NzRLHl/6A/vw2F0/sEQwhhrGI05EVVxdGOY8y5uQjt0aN+15YW990/M7vfP1731oAnTygkPFqtq1+IYU5k1E+WyWDrrzp3vqP2ud/4/Njzj2DaLrq82s+VdERUW2iUGYlM3n7AuAxiMRkhpjISqaWXf/EjIrFQNHOfZs2//XhP4RPHGxrrofmKs0eGBhDAABQ7Hj4sdrfPvkW1oLjx/jnVQerhpGKCsXv84ccqKEQCWkogBEyUNRAkQiPRoxIJBqJxFbRiKALqWUxefes+Ppy5JIR2qJCe4+dQMDi/oY2dcjkwvi69Udr1xw8cax12pTxP/vRtwc4aZjTWhylFMf2xxSszw6GQBT2FrW1tVdcccWKFSuqq6tT8oNpdqahFFM+7K3L4bvb9t695M9Dy/1zx1bMGKFFOzowpW7G0F03uTtDw0w4tawck+aArP+qn5+lR0i6kMW9CzlCJsYOVxjl3GAszNif12zdsLveYmj9G38pvXEBbA3hfCUMLkAgJrdJVkjDhsPNV3z+3oqqqrlfua5y3GhF92k+n6Irio4wEQVPZIlkSEn2LJyKycLHkFNqFz8xDWRFUTRyZM/u5/+w9Mi+nb/5+eJbbrkZGqz0AFnJAACguPHwY7W/fWoD1kLjxwSu/2RwzEg1EFBCwVC5A7WinJRryM+QGUadYdTRbnZ0hoVJYXtnZ2dnR0eHETUotYr7Dgp3/cgB3hWpd1GJwhlMWhrKA0eOCFUNDRw7Qbd/0Lj89ZXnnDVhwJKGhaULZTZQUbgZ9tPQsLcOhpB63Jf+QHz1Tz311EWLFt16663OzmSisLtUYsaYs84FRg4fPKyyYt2mPYc6rJCujilTDcNwfwTsWBmKt4j70RTd+SomHNPdK2SxZEpv92fxLTmgDmMo6EJLFDwxRSayXCyENuw59Pzb71PGn370oXPPPr30viBga1h052vZsmUTBHI0+KZJYZaZ7AMthTlb6cnLnnr9rS37ho4eeeal52vBgKKqJLZgooi+V+a2clAXehhOYrLkdJ3cZE5lerKGeMPuPU2Nh6acc/qsmTOgwUoPKjQBAAAoXtzzwNMvrtqN1cC0i4JzZ/jLyxTNp4aCIZmMXOYPBZRgCIUwwlHUEUGRMJWSQgGRj2waJmO8RGqzSWtDLEJsqS4UwbaMLxHGSmx+poiA0VkjXWfnnD1k6FD/8teVrdsO3/CVH/7sR9/8/Oc+PtCuJS9ITmTEIgk177dYbW0txri3EozMBYYJikJQA/UWV1xxxaJFi+69995kWjAle5hefuj+VfJ2d3vSH3DVzMnLV22pa2zacDQypixUqetmNMpPPl4RfReJ9WzSmlX6MGT6F3vXg578ag4rJ/enJOkwzjRZRz49+eCU+3skBN3H94o9pMK+kEvSUPCGluAKjThpSBE62NL26ubdjKM7vvPVyy85v4T7fMfWEPQT3seiRYvyZtyRMK45CsSamhrJMi9atGggKBCrq6tra2vlR+7zd2TD5g8xwkNHjQhUhEQPTuRta+6iC4EwLAKIkye06ZhLDtEur4h9oeCgoZWYaKvXrL8LGqokTz5kJQMAgCKFQxfOnRWafkkg6Cc+vy8YCpaFQsFgMOgPlZGQHwU4YhEUCfNIxIi4EY1EpXymZId258ZgvE6yKEGKGOOu9OTYWmQo85bj0b+t2P/OOw3MaP3Xb1z3vdsWDJxrqc95slmHpMmKhR3rg6Fhj0JOSD3OIhvi0IUOISgNDROowzSMYY/FlFP+9cwviU3b99/z86cGl/nOGxr84hmhjtZWbppqPDFZJinLxGQST1LG/dDXZasQivtXGeZK9/mwXr3t9Efa9oWCMaSCMTQQMmQ+slhLxvA/nl2z93DTrBmXPPhfd44dPaLkvyxga1hE3ZoXcskTUphLnkDsZ3ryFZ//Yf3R9rm3XjvxovOIriu6T9E0RcdEsb0LbdIQUpK9jNgJ4pxZ3BKJyZYVT0wWSzR8aOeOpUseHDVc3/7uP6C1SjCmBMYQAAAUI+554OkXVu0mamDurLLpF/uDAeIL+ELBUKgsVBYKhfRgkIR8yGciM4wiEWqzhOFwWLKF1LLYQKjKhruyj+2kZDn0M9Qdafj0s7t27f6IGq3f/ca137tt4UC4luRsOLuu6v0MX4uIKetVCJeGmQWiMOsn5Y033sikcnF3ScpdkUJaXWGv+MGUR773/sEf/fLZ8qD+hbMGjVYtHulQGDuJNBQ0mF0FRWyT3JsA9q2YibM/E34wYU8fntKr9+ZkIjvqQlntxHAKnjDGEXp2466X3v0AEeWt1/88EOjCPvRjABgfk6+fZAKxlLSrfSYN6w83XXHjPUj137L4tkBlJdEEY6irih7rqJgolCzpQiAkPA5OOWey8gnlzEKm6S5+0nGk8aG7F1udTa0f7YO2Kj0QaAIAAFB0+OoP/mDThTNDbrqwrCxUVlZWppeVkTIVaZ0o3Mbb26PtMbS1t8XQ3tnRaZnmgKAL7awP7vI4lM4jCGNEFKwomCiKXBOFEAVXDvZ9Yv6pF00drWhlP//tU08880rJt5AzD/ZOBLJo0SKZKFcskXav4iInlHK3P8a4pqZm0aJFnHPvULfFy33U1NS46ULWG7i9C5092ULKNzxl0rh/+uKVUZO9vL/tQJTofr8VN9STnnpUPJOKjyO3WZ+Cy169vfTH9PhS7vZMQ9T2tvV6bMwEAakDhjEX+d08vlCMTUEaGpwbIh+ZI7Rxb+Pr7+3hGD/40x8MHLpQfmtmz56NMQalocfhzfFxsUBtba37q7dq1SopiZ4zZ448oHibXaYn19TU9PZTvL15F0J48PDKYEWZXY1PLtzOSnbmqgCvw74XhW0dgnMqRUQRGjRoWFUVItqjj/0Zmqr0AD6GAACgyPCVH/xh485movhv+FTFpNP1YAD7Az5hWxj7KdPKQiTIEe9EHWEWz0AO2/9bhsUGYOaDNDYUP665GcYEEeTsV2RcOeqU0DVXT4xGra1b+b/e8yBi7PPXf7KE2yYrpQCzPjV31HZF0YYZGhq6BYbgUZgjyIZdsWJFd4RUMoeVMgE5w+zjTH6bCebOOHfF+vcPNTatbgiPOzOk+iwrGnWbCEpDQ46x4mxzTrKnGczkg3RnRJjJb5OdBznnCanNyVaGyU9J+bC7j9Z1NgmxWUJpYijoQkOoC+ViMkYRajje/tw7uzop/dj0Sy+/ZMpA++6ArWFRQI6PHifgEt6bo0CUX1WnEHNxXWZ98zSsO9SEEK4aPzLeV9m2El11+ri7Zh/Aw7AJX/mA2CaGmNj7CRk1cfyRur119Y3QVCV48iErGQAAFBHu+flfpbpwXnXZzEv9mkb8gUBIygtDoTItFCABjngERSNmF1kofxhlA73Hw4nzM8EgcJGSLBaxTSlvbTXWrqt/Y+UuGml5etn90y+dWpLt4Vn7quJyM8zQ0HDx4sUHDhzYv38/pB7n8CuO8SIBNzPoFrulZAz7ZlaYCTJ/7ub3D/7Hr54fVOa7eJh/7mgfD7db0ahjZRjbEHShk5jcn/TkHtswW0/JcH+8kj1Of0CPv0r5F2UOMhdrJNYU4yhCUcaijMmUZIvz9qjxl3Xb1+5q+Nj0i3/24++NHTViwH6JwNawKDq64lWju+uoFB2B2Nv05C99+/63tuy76OPVF318NtJ8RPcpmk/RCVa6Ku4yBoxhEUBI/UVWMmOxtWUhy0BmxE5MDre/87fXX338iRkXn/nKC09Dc5UYICsZAAAUDR5+9PUXandi1Td3VtmlU/2aRgKBQDAYDIZErRMt4Cd+imgn6uyIdnR0dgh0dnR0hjsjFOhCJylZ/meXp0OEYJmSjBVCRH0BheCKcn3G5WPOPL2K6IOu/dJ31731Tum1hkxD8+YcXQZCMqgoCkgGUKoPUsYYEydOrKmp2b9/P6Qe55TpkHRhQvJsQpZxyuzjXqUS9zMNOSUumDTun2++MhKl208YtY3RYEWFouuWqORLRZkO6+TEZCc9mWe/n+z1Z8k87zjN8SlPTfr85eQ85YQ/5KYLpbTQwjjKucGYTPo2OJd04Z/X7ti0/7DuC/zsPwc0XShF0/LbBF2Kl89REXl3JA+XUiMpv6qOU0dCCrM3JwC9TU9uaGxBCFVWDeVCkiZ/5E1re0YKdGGRQNzXIrHFeWxrDIXekKjDx4zCRDlQfwjaqvQAjCEAACgObNy69/naHVgLzLgkdPnFvmCA+Px+fzAQCgWDgYBfD/iJ30RWB+vsCHfG0GGvo5EooxRMlU+ugdJlbYi4sLciRCEYE0IIRgQTBVdUaJ/8+BmTJ49RfIO/84MldaU1Ceit+17+UVxuhimb1O1R6GQfA1GYu/afPXu2E3ymZKMSNnpUHfafB+wVrpp+zldvrD7RyTZ8FF152AhVVBBJGsbpQrk4dCHlItjM172g3jZI5hxiGgIxQ/bQ/RedbZmDbHsXEoIIYYQYwrhQqgslXdhhmI+v37H2wzoT62tf/d8BZV+Y/tuEMS6i2zYDCnIcKQ0daHcEohw9PUggStJw1apVKdt/8eLFbradI6T5/WVDKhzPOzsXmcfzXQBFFELgOFGI4g6GTnoywaquB0OhgweBMSzFkw+iGwAA4H1s3Lr7hz9/ofE4m3lp+ZUzg4Eg8fv8trSwLBQKhALETxGLWJFI1HEtDEfCUcuyoJdLjGOdgLLLR0aodURiMmWcWTI9mTPKWo5Fnnp6+65dDWOGak8/+tDYMaNLIxosirKYxVU02Z5VYPzAAw88//zz7tTjNCWSAdm6pJ2qMrKd3fwgpdQhnuR2Mrvknc+yYv37S59eM9ivXFLlm12ltre1cdM8qXSyu3qyzFOW8hzvfRf6cEyP+cgpyzEn72SCInTUhUhRLMkVCoGhLI5sMdZumH9eu2Pz/sNRpPz1kSWXXXwefJvcXytZkQkylD0IeUeq5O9COSnMshCzk8Lco3FwfqYoCYka8ivjjCn1h5uuuOHuytGj5n39xrKqKqL5iKYTTSOqcNHmduUTmBoUTfjAEbesVOWSo8iIHNm966nf/P7Yof0vP7Ns1qyZ0FylBGAMAQBAEWDegvsbj/GLLiifOys0eLDi033BUDAkEAwGg0rQQlbEFFxhNBKJRKOCMqQWhS4u9ajv5gnipCGLk4ZiEYwhi200NLQ99viWw41HLpsy4dm/PFwaQWBRXBjF5WboFDORKWPuYMb7is5iv6QlA+4uY+Io1CilyQLD5JonnsLjL21cvnqLgvl1p1ecU8ba29qQZbkZQzWJLlRs6QP29Jy7p7eXuUehmz1M3O9iCWPbgjekMhlZLNLBkDIWoeyvb+38+/Z9iqI9+F93Xv9pyMPNiBYBeK3rGzgf2WsEovvb4dCFzhzg2dfW3vnjZZVjRs/72o3lI6qw5iOqTjQVqwThOF0IZU+KKHbgiFNqk4bMQpaFDIcxDHccbXz2fx7Zt23Tbx74z1tuvgmaq5QAWckAAMDruPrLP2lssSaO98+bHRo0SNFUzR/wB2KIrf3EZyIjbITD4bBd6SQMdGFPgetJQae9S8bbJJ6bjJXYNiF49KjyL35+SsWgwf/YtOtb37+32GfbRcTBFYWboTv1WDroJeRTyygC4u3cxcyy5ZNm9ty97rHaiadw0ycvvuC8UzlSXq8P7w2rofJypChUeBqaYh1bZNata83E4u1wq4d0Zt5LuK0PnVekgjFkIvuPCurQwjgS5wojlEYZo5xHGFuzq/6tPYcURf/C9Z8CurA7gK2hl3u/4jL8zcqsQCYpy1tETs9fU1NTkBRmJz3ZTRc6RV3qGj7inA8aOsQfDDgTT44wXLpFH0Jgl82RHURgVYtFZ9jtdQgoFcAZBQAAnsbC7z986BgNhvwzppWVlRFVVWy60B/w+wO6oluYRoxIAoAuzGTERyeRhkKwE698YtsaioooRMEjqspmXj6RaGVPPPfGuvUbi/dTF12e7+zZs73pZphMFDq6wmRFYTKfBcjWWZAMePIlnaBBc294XIgncfuX5kw9b2KryR/74PjmYyxYXs5V1aELTaciikMXxqnD4ipxlSGBmPwwJXVIhZaQiYInXFEkdWgiFGHMEFxhhFKZjByx6F/X7/zTmu2dFv63by389jdugG9TGoCtoWdRdIa/2UXBCcSVK1dK0jDhLMi/2HikGSGk+/1awMcdB+142ZN4NwdXcTEFDriLJMRd+0QA4QsGhlYN5wjX19dDW5UYVGgCAADgWWzcuuft9w8HyyuumVt+6lhNURW/3y+4Qr/P7/NpOkMsaog85EjXmkFZ5IzHfi5ZBM7lZA7z2EMu8kUQIgpnlGDCke4jl1825qOW9rffMpY89MfnLru4GD+vpAuLS+y2ePFiGaZ6hOV0Uo+dSiYpD6utrcUYV1dXy+Ph+5gjSLo2/bXhdrhznwi538un5pu3VC966KXGo82vN0Y0NXReRUV7W5tlmsIxH/G4gz5CiMR6MKxwTqW/oXhYjLfEE06Hc+7SnCb7tMraJoIu5IRgsYEIsTCOCrrQFA6GUSFHbIsab37Q8Pa+w6rm+/5tt37769c7f6Io2ORCdcVFOoiUNuQoI3kxaI2E3GQnhbmmpmbOnDm5SGFeuXLlsmXLUjKSzs7BwwcndnRAFBZ36GBThK76J7JuMg4EgwjhAwfqoJlK7azDPB4AAHgWV9+65PBxdunUQZ+dV45VEgj4gzYCwUCIKMSICnmh0cUYAl3Y6xjVCUm57WbIpeVZ3MdQlkBhFj9xIvLo45s+/GD/5edPeO6JPxbXxyxeLyovWDUlEIWZBBtShCglDxDL5Y7CcLetW4zmZKp252MovQ6Tn+u5j/nrlxsPN/sxu7hKmz3S197aSqNRaWWoCvtCuxZKvPiJIkIXx9awZAiwlB8Fx8uPIkI4IUhRuGQMRZ0TC6Eo56YodWLG6cJ2w3xqw641uxpUPfC9f/nSbV/9nFt2CoxhCQ8lpRzNYgyF+DMZlLPugZimx6iurh5z3vz1m3Zf9qmrzr96Ftd9RNWxqhNNw4pzLwROS5EFDJxxUfnEEm6Gpih+ErGtDCPt61969e9P/3XWpee+/PxT0FqlBMhKBgAAHsXC7//mULMxZIjvoil+RJCuaz7d5/P59Bh8mGCbLoxEIp0RoAv7PtWOz/scaxIRPgpDQ3uFpKFhRYVv+rSJihpa984Ht3337uLiVoo3xpMpRQXJhkuTetwjZBACDoa5uyqSqdiEChjyIbG/yfbagbPT4wnLi7/5iSmTJoS5sqmFvd+GKwYNUv1+SyTbmpybIkPZTlIWvb8Vz1O2k5RLJlJLlbnMhU0hI4SrKieEYcwUBRFiCK4wLDKRw5Yl85Ep55TzN3bUbdp/VPOH/rvm9m9+5bNeLoDjTYCtoTdPykDOTc4Q6VOYMca9TWFOf+TKlSt50mQTn9SnwTkpzoABu87nSWfVdjeEE1t6AMYQAAB4ERu37t6wozEY8t1y7ZCxozRVVX26/Kf7fD5VVaLRaESUOolGo4ZhGFED6MJszAFs8kCyh4SgONNgF0I5f8qIG6+7IFRe+fizf1v3jw3Fwq1IZ+7iPUF5tmrqD1HoxuzZsydMmACMYf4vabdkzIHDD6aE+7le4w2/+cWPXTXjvDaDvFoXfv5gtKKy0hcMWoQkk4aWLP1xMnvISiuAcYhDTghXFLnEPqOiSALRwDjCWETShYyFKY0wZlLaeKzjj29ue3HTHlPxL1n0rc9+fGZy1WwYQzP8AoKtoafgttAFZN5obgLRyWPIkECU84T0f+JIw16OeKgi5HBKdj4yB7qwNBEsL0MIHYSs5JID+BgCAAAv4u77n8ZqYMqk0IgqFROsS2Gh7tN1XVM10zBtotCGCaVO+gks5Cq2sSFHiGDExP1CwrEwDMMEYQUThE47bdjpZ458b3P7fQ/+/vnLLy0KbqVYiiOnmdnngTHM0KMw85aXDobS0BDyxbJ4SWdiDelm/QghMgGZEOJWqzHGbBdTse7uNb3Qtd4w/8KqyrJlz2/Yq5DnDkSmDQsOLlM629vtJGvhaShrJXNxM1wRdKEi2EPMOYlnKJdCzi2WN3OEcSEhsX5bUeROKqSXBqUG57G12JZlYY51RJate7+upU0PDvrx3d/49NWXy7PvuFu6rwTITc7kawi2hp7CokWLpFUfjDV9nmak8UB0Kpg5h8mpQnrSfOOaV4eeeZXm108aUGKddFyKBj1N8YKneOgPBOIhBaCkABpDAADgOSz43q8amqOnjg/MvjxIMNI0XbdzkXVN1ymjUcMwDdOhCy3TArowC3GoXS9Z5CTHLY1tmSGxc5MxwUOHBq664swRI4at27h9yc9+6X1upQQK9co5eo7i0mwpClOGcDJfDDL4sniyZHHkbr/FSWpBR2CYEm7tYbIIMfl1CvjZZ196xi2fvrSpne5q5a8cNA4YWqC8gui6iZARlxlK1SE9OTeZxVWHrNj1hpIclIuqdq2FiaEhaiJHKI1wbq8ty6Q0bJjv7D/64N+37G9pVwKDXn70p5+aO43FkbJSM4ynGY4vnPNiF7CXDOSNLshNzmJ7uhWITp/gKBBXrVrV44sc/yhZa+aY7ULuasmGEgjuOZUcgDEEAADewvPL33p7R2Mg6D/3zOCgQaqiKj6foy5UEUKGW1xomJZplpBXVcFHeqfmGcIk7m9GECKY2JxCbBl1SsXFl0wgauinD/7e+9xKaSgOsh4L5Y4odLhaGUjLQA5Iw6xAapp6e5pwL+Ewj93lLBeKOrxi2hnfW3iFqgWaqfZynbn+GAqUD/GFQgxjI56Y7BCIVpwlPIk3FAFr8fGGoqrJSSxhnCtEqkoVJSLpQpmDLNKQo8K4sMOkr++oe3zD7mNRfuppZ7247D9HjxzqLoyTDPiW9Qpga+gdyBEHUsVz17xuAlFWTekR4RON8ellF6CbKeo4IQ0qhw3t+SBAEQIYQwAA4C38+tEVWPF/Yu6gyy4KKARLeaGmaaqmKopi84Rx0tA0TQZ0YZZnA3byHo4X4iSyAIpcMMIEE4VcOGX0xRefqugVt33nB6XErXgW2ZIZ5pQodMMt7UxZpgPQt0s6w2ZMoPx6FBsmFEhJ43vY1U8UQns45cxT7vj6lQhrBvZtaEEv1UdRaFBo0CCsaZIrNBLMDTGWLCHFmInkZRrPXy4O3lByhW5poVwEV8gUxXC4QpdxoUFpxLSaOyL/8+aOV7fVmYr/n79y46//619GDB+cLC1MyRICddgrJgVsDT0CuDuVz8s+kwv+WN3mYHnI0RZyKItRUuCutZwZxFZ1deBjWHKhIcwJAACAd3Drv/5i486WymFlX7pu2MiRmt+n+wOBQMAfCPj9gQDiKGqIf3ETQ0oZ3KzMxRQgNjRwzjjijMX+Q5zS2A+lnFHOLEYtvnVr4yP/t9rsPPri47+dMf1yr3ErxVscuTtIsq9vo3aCR2FOidTujPYwxiUj+SxIeNYr1jWhnIWzIQ0NHR9D9wEpi2A45zFlZYzks5y3WeVf/7b1b+t2lvnJSB//2Cn6GB9tb22NRCKEcxUhVWijFYQUjFWECMaKNF4Q21g694nb5k7dJ++IImSJa1GBSlgWSrPCuH2hbWKIsYWxwZjBuSmMC03GTMYseVoRWr/v6N931B1tj/oCg3/54/839bzT03PH7sx05wsL37ve9ntga1h0XSWgPxOSHg8jivbD59dXjD6Fqz6saVjVsaphQuKdDDRksQUIjHNqcUss1ESmicwIMqKxdbSjad/eh3/0Y2x2nGg6AG1VSgCNIQAA8AoaDjc3NkUQUSed4R9RpSgKUTU1tggghOLCQsO0YmBAF+YsXpX6JGLnJttyw7isCMnSyeeeM/LyaacrWvlPf/YbrwUMpUcXOjLDXslY8qYoTEBK70gwNOzPJd03u7RkpaGbG8pcb9ij8DD/2sPrr578rZtnIcXXGFFfOGi+e5ygYEX5oEFIVU2Eoq7cZMflkCLEhORQ6g2dbVlimIl1AUNY4QVBsCMk1DQnARkrClZVLPZzRTFFGnJYKAq7jAsFY8gYqzvWvmz9rpe2HGgK84mnnvnU73944bmnORxxSpoYTAyz8iWtra0FW0OP9Jag98w1li5dmslhjJrhtnaXwpDbFoYcQXpyUcI1VsRLXyPndEY6I4izcePGQjuVGIAxBAAAXsHd9z1a39Rx2vjgjEvKCCGSKtTED5H5yKZhmZZlWtSyoDhyjmNXp7CoXQQlXgIFyfRkQjBR8SUXj68cNnTtxm0/vf8B70QLJawvyNDNMIEorBXIj7jP7WCYADA07E+T9rZ0dYJSLFlc1iNp2J0ALQ1pmMb3MBcc4uQzRv783z4569IzjhvKm03o0b3GhhYydOhQfyjECJFJylEXYyg3mDsrOZ6nTOW25A2Fpo/nhTq0b8R0QxSiOEsoF6YoJiERziVXaNOFMg2ZMcqYwdi7dc1/XLvr3YPHIiTw2U9c9Yuar48cPjhZLpogPgWiMCuorq4GW8MiGigBfcbKlSuXLVuWyZGBQSOj4XA8HZmfRDpxhCBJubjh4n1ja97Z3hbbCdLRkgMwhgAAwBPYuHnXhm11waB+xaxBgwcpRAoM46CUWlJYaFmxbctiEN7kPJbFsuoJJthxNeySGWJECB43tvKKOZN8/oo/P/mCR7iV0pZ4SNavO/VEd0RhnhOB0xSnhnyxPgRmfaAL499gnLyRQCAmSw4TKEX3w5QFUlImsaZhBrMuPLz2ynNvmHc+UfQwCW44rrxzQo36KyorK/VAgGJscm5wHhWLrKQcjfsbMoypWCQ/6NYeStKQOdShs/S/W3VRhNjtS6hpkivEziJ0hbE9hHBVNTB2dIXSrDAs7AujlFKRhrz90LGHVmx/dvPBE5Yy4bSznvr9v//gXz47cvhgO7ArXP74QAPYGnpkoISBJqctPGHChEyO9JePsHWFjLs0htymD6ETKkZwlyOliy50Ti60UOlBgf4UAAB4AXf99NFDLeaZZ1RcemHQ51N0Xdd9us/n03WdqIRagjA0LdOM/U8tCmNSruEKjrGdeyAlh4h3/YfQkEGh3XubD9Q1cis8c8ZlhY3Tampq9u3bV9rnZcKECTU1NQsWLHCTSgsFDhw4sGjRogULFixevHiCQEFOQfo4ecGCBXPmzMk83hjgmDhxojynffwWp+IKE9ZpyqRkXlg5ucJywgsmvKvsqg4njh4y5axRhJAPG1oPR/GeNnYkwkZWBAaHfBbnlFLGOcOYiaxkFM9EFn1aDJIWlHdCHH7QOQC5PASxYy8Yv6eCuuMT3fvlc+XTnRomonQJilOHztL1K1XlhFBCbKWkIAcNzqNCSxgVxoVSad8WNV/eWv/ajoaWKEf64K/e8snbbp3ncIXJzS5sEombNXbOnXsbfAz7yVgtXLjQ2Qbkf6BcuHAhDDS5wwUXXHDgwIH9+/enPyxQMWLiBdOqJo5FRBXJKSTW6RHsGnhAkVZU4AgxijhDTC4UUcteW9HmQw3bN26ccu5ZN990PTRVSYWEcI8RAAAUHBs37/rS93+H9Yqv3zJq4nifpqsBv98fCPgDfp9PRxyZpqiLHPuJraV5PyAPEwMu7gwzJEugMDE9iMWp8TW3TLZ5U/1jf14VPlF34sjeQr1VKa8bIIU1ZM1chJC7mIkXQtMMJYT9KeEyoJCtAj4pXeq6K3KSXCwl5cM065RlUvqscevVdbJt95H/fW4TZdSnImyEZ5+in+KjQ4jZ3tFhmqYiMmuIqIWiCLJMbhD5MP5bdyGUkzYwdukpEEpZLIXzk4g2Z1tuOHxcmoUQWdmZck45txCijFmMmZzba86pLHaM0LGO6Lp9H22ua45S1MF9n5h7+ZevmzVy2OAU0/2TaVy3jFRRFLeYFBjDLI5KNTU1JWmqWxSQOQd9E2gDMp93pT9m1DlXXXnLrVOuns1VH1Y1JCqfIFWRHaLsdqGjKaKoIBYLiJonov6JiUwDGVFkRJAZQeG2995c/fyyZR+7fOpLzz0OrVVKgKxkAABQeDy09FWsBk4bH5g4Tlekg2HsJxbHcMQty5QCQwnIR84b7BIoLiUSwggTuWAiDQ0VPHbskNFjhxO9/Cf3/XcBp60DhC5cuXLl8ePH58yZU9jU45ThWXcOhgkAQ8MM2zNbXEOyHjC5bG7yurdehylrpyRnOicnRKcXG/ZKkHje6SN+/v35My6YyJFmqmUrm5Tlh/GbTUQtGzx48GBF1y2MDc4jCMk85Ug8YTkispWl6aHl5Ca7k5QJiW0oSmxbrJHccDSD8YW7c42dbVnA5GRfQmdx9lNFMRAKc97JWCelscWywmJDpiFHqahYz7lJ+apdjX/8x+6Vu4+2mOrgUac+8Zs77vinTyXThclnP8OWB/QTYGtY8C40jYkHICtXeI8Tj0DFiJNNDOPpq4AiBXdqnrge2ueUd7S1cY7Gjh0D7VRiAMYQAAAUGMLB8GAg6Lti1mCEMVGJoqqKohKRucAsFqcKoT5yAYAdQQyRloZiJQsoS0EMxkOHhi44fzxRg+vWv1uQNyk1d6VNF7o9Ch944IELLrhAfmRPfeo0DoYpYzmQ3uS5fZKtBtNnHKfnDburuZzeGDFN4ZTufpXwEXost3Lj1efc/92rZ1wwnmj+JhrYFvb/4UPz/U6fFRg8dOjQQCjEFSUa9zeMOGvGughEhAyMTUKYXKSnobQ1lHShYAYlkyh5Q3sR5GBK0hAnsYRIVbmqWooSJSSMUCdjDlEYdhGFHQ5XKPwKm8PGK9sPPbRy58q9x45EVCU44us3f+aBf7915PDB/c8rB2lhLr7LYGtYwCEJ6NrcYeHChT1e1ViwS5jbFobxNRgYFj9sy0KXd2F8A4aQ0oMKTQAAAAqLh5a+ghX/xPHBU0ZohBBVUWWSFFEUzjm1qCUEhiJcYhymGfmH9LfinGOZcsdtpaGoM4pjITO+eOqE2pU7Vm/Y8uhjf7nl5i/k891JurBUuSeZ1+akHjvE6AMPPFBTU+MdulAKDHuVQFpbW4sx9hrp6ZHGzF0yneSD5JlyuCF+ci5t7Ese3yM33A+dbfdLSacI9x7nYnBMJNwvmPBH01w5Ce/NvT/lR3Nw/VXnXH/VOc+8sfPVtbsHhwavaqbDNPOMQeoYnzpmeEVHZ2dnOExN0+JcERGOXFucC/E0VuJrKbOWCctEbhASi4vif4+f/Ne5bEAk77K4fhfPOHaqMzPOY8vJGzLp2BIpyVTslzEYwfjQiciGA827m9qOdFiM+EeNGvXta2eef/a4EcPKu2sH93aCd2Ea0hB4w6x/o0t+qPImHD075Cbn4qpeunTpggULli5d2t0xp591LsK4teWYiy2ML9DDFCt4l1rU5dGBbFIY6MLSBDCGAACgkGg43NTwURvGZOq5Ib+PKMJTSaxjAQ2jzLRMalEmwIEwLATsqFjIDDmnBCEaC5yZXRsAY6zwUFC/YvY5zz7T/PgTz+aTMcxi5qan0B1R6A6EnAM88p4zFxg6qK2tBUPDlKc+1/Gtm9pL5vISjnRIw+SDU7KHblYxgZd0WyK6/1aCAaJ72/2nk99Y6oDGtf/aK8+eNXXc6k0H39ra0GgoTS1cQ+Yw1TqjzD86qPv9JrcM0zAkSSetDB0CkYg7JRhjImhELAlEjJW4F6Fd+Dj+YbDjV5i0jhcGFZ6wwhaWIkQdutC1llwhF3JChJBhsSNtkbcPthxtN462R8OMUBI4f+oZt3525sjhgxK4wmSmj8RZy+SaJ8mkISCnQ5XstBcLQIPks+XnzJnjqbGyNAYpp6d95JFHJk6cmLIEyulnnruniRvhCDMtpDHRA3LMOCdCdYgZRwQDyVQ84F1Zye7iyCy+gZqPHEWYjx8/FtqqxACMIQAAKCQ2vLe74Wjb6FGDJp0ZEKZ4ImlNrDnnMg3ZAfCFBYOMJ22OAAmtoWNryDHHRMGnnlZVOaxy9dtb1qxZO3PmjPxEAiWW2dojUejGokWLPCIz7IPAUAIEIMkXQD5T7BNIt/S0UQJ16KYLe6x/4qYIuyMH08geu6ugkiGZWFUZuu7KSdddOemJ5Tv21B+rO9Ia5Vp9i1V+nJ4ewqf4tVPKgiGFR6PRqGlSUYkYMSbJQSJeS9wW4fKhXOJFPlEyb5hQ7cR5H0y8K/lQ1jimnFMXVyj5RNm5KghRhLc2nthxpO1gS/ho2EJExdqgC6acccfXrq4aWt7dKUvIL07YTtAYJggPgTrMKaSeWhJY0OPlE94ZK0tjiiKb1N2ejzzySMrs74ryEGoyzYjBLJOIbg4RJ4kVx3+gXYsJXAzCYu1SjHKGOKVGtL21FSE04/Jp0FClFgXCvX0AAFBAXPmFRcc66XWfHH3upKCiKD6/z+fTfT6f5tcQQy4DQ4taFPqrQs4SBGTB5NgGjU0PqEico5RTi7W3Rh9/8q0t726/bMqEV156Jg8sVcmUQUwgCjOveowx9kK9l35St9mqCFwCKGBTpOxduytwnMz6dVclOZOH6V8q/XZ3O9MMFkdbOlZvqntrW0PUMFXEEDeZET2/UhkbRKODhCDKLJNRyzBNyjliTBJ/iuTX4nQhFttulhPjk8pEIZfGkLl5w3gNeio7VfFeiaDwqJAVnohYh9uj79afOHA8YjLUZiKk+pDi+/JnLr96xrlVQ8sSeoDk7WTGMNmz0mEM3WJDqJKctw5T6oiBwxoIXWuJXbfdZdZLIeeECRPcYsPf/G7pg8teHzJyxLyvfKH8lJFc0ZGqiVrJGsIKJrJTIlAuuZgCAcsSi8ktE1mmXSXZiCKj80RD3RMP//5o3YGtG1eMGwsyw5ICaAwBAEDBsHHzroajraNGD64cosWCsbh9IVEIikVqLlAQGBYaGNvWXbENFAuWYxEz4owLey4cDOkXXjB+63v71mzYlJ+Za7EzyL1SFKaEF6QT/T8XYGjotGQBY9qUgr7kxOHkOiQJucMJ6ckpdYUZMoa9Yg8T3n/KlGeJEUPLrp876fq5k576+/sfHGjZf+hYwOd7r51vbjUHqSyE2XCfMqFcL9PpiIAi626ZjFmUcoQscc/EnX1MpH+Trb4Wa0ddGN/mcbMn531IM0RFUUQnilujdNeRtvrj0Ya26L7jYcoJIipRypCiLbh2+twZZ4/onihM5gozYQy7q38CA11+vulga5hnwCjT/7mKbMaUDbh48eKVK1fK38opgdz/qU9d8+Cy16MRg5pmV1HdrsVWYUO/UzRgruLIsf9FPjITGkPGqGm2t7XB2SzNEBA0OwAAoFD48nd+vmHb4RmXVc39WGWoTNd1TWoMNV1DGDnSQpmbDJ1V4acKYmZAOYv9Y5wzxiiPLRZnlFGLWwa990fPnmhuuPO2L9/1gztyN3mVWV1FOvXvs6KwO6KnsE2RldxweU4H8nfcmyn2fVDz9Sg/7D852Cv5YSYf4Ynl7+u6smbTwY6OiKYgjJlpGio1B2n03Eodc3r2UJ8iUocVzrGImYRNhjAldGpEOoSgQ1meTMkphGCELMYtyiyOGluNY1FzT0ukqcNo7rTClCNEDIYxVjnx3fzpaVedTBR2xxJ2xximXLvdDBP2QK3kggwEIHzLc4NDPngfxqYeJbEJ8xCHNOScnzXjFqzoV33p+vFTp3BVQ4qGVA0rGiIKVogt11aANCwOcMq4ZXFq2hpD00CGEBiaERTtOLBt658e/NWEMSO2vA3fslIDMIYAAKBguPKLNQ1NkVtuGHveWSFFV2U+su7TVE1llHXlI1MRmkFfVfC5ggyUuZg0cEapeEgZo5wK0tAy2HPPb1654u3Lzh//yotP52jSX6TSjOwShe7ZfAGzs7Mo9iylNPOS/OyZlBlJ3tP/vOM+M4mZE4jO/qMtHW++e/D9vc27DzYFfILhQwwxalkW4eYpQaJya1SZhjkd4lcYZSpB5RpmwtIptq0rsT5SkIfNnaaUGjZ1GiLpWGkJm60GPR42jrQbrVEWYdILljBEhLmXet5ZY6+eddbkM0ZJojClR2Ga7e4EhgmMYUqiEOjCAn73S8yN18uA3OQ+TFqktLAP17As8vOlb/1445Y9066ZO3nuTKbqiGhIVQVpqCKsCK5QwRhhAu1dBDGANCGSdKGLMRSJyZH2ja+veO2Jp27+/Kd/84v7oLVKDMAYAgCAwuDZ5evvWvJEsKzs+/8yMRhSdV3z+XyaT9N1DRPsMIayUDL0VB6BLTNktsyQdckMBW9osr17m/73kTdaGve2frQPZvy5IwoTeIRCyQyzG+4OzHCuuFLs07zPHtV8GVJ7PVKE/dQYZujYuH1P09bdhxVC3t/XdOxER/OJsKogwhlCFDEWUGOxEyFcw1gaZxDEVcyZ9IRHyLSoybhJuUGpTFLuNMTtllgPihknFOEpZ42pGjroqulnjBhWPqIyXd5xj5nI3XF/6fWG3b0UIP89ANga5mc4LuoEhXxCVjLp0SylxznAQ3945uH/e2HM2Wd9/OtfpIqtMbQZQ6IIX1hgDIsEHHGLcst0aQyjXRrDcPuLyx7dsm7dLTd9+tcPAmNYagAfQwAAUBj86pGXEFYnnVnm98sCyURayiOMGGOCKbS5QnAw9A4wRjw2tyOcU0wQ5oRgKvZgzDgiuKIiUDViyLGmwH/95L6777oz61PYYmGU+u9RmDmqq6sL4maYdapL1k0eUFZTRZcol8YfMJlvctc7TihhnOBvmFwiOeG5GaZCZ1gCJRMacfIZVZPPqHLv2bb7o627DxOMt+/5CAtBIsUIYX6kpV30i7IAqF1BUhRN4Uysh1aGqoaUjxgWYhxNPmOk5AdHDC1Lw9D1VmCYnjHs8SHQhQUE2BrmDXI4hrrJmYzsmVyNmcwBpk2d9Nv/e6Gz5QTmDNvzebvcrnB9xcIbj4Cbofdhn7nEKsmyUHJsOXroMMLoCzdeB21VegDGEAAAFAANh5vqjxwvHzRoyjkVSPJNJJ4nxRGjsUhLqjWAMPQWX4Awx7GgGNuFUKRYhcvok2BeOSR42qkjP/xg75q1b2V9Fut9ujCfRKEDSbQV5PNK1WQWw7na2toBZWgoaYIiDV8TOKbks5Z8QHeFU5L3dFfJJGVhk8y5xe52ZnLJTTlzxJQzR3R38NGWDoTQ4ZbOkZVBZ2dVZajHdsukbTNnDDORH6Z3RQQUhKORN35kIic0SE6bWlb1BdKwuwlMmgonCZDEYg8HiYl8y0fNZjhMytQujolxpMTm+UgRxfSg9b0PWeFEOPlySR4yFi9+ElsfaTgUG0ngXJZk9Ae5fgAAIP94dvk/fnDf4xMnDvnyjWODQVXTNV3XNF3XfRpG2E5FpoxSizFwMPQWOI9N8hITky255pbFjjV31PzoCaO9sa3pQBZn+V42estD6nF65F99mbtc2oFjaFjCWdiZXBiZJzj3mfXrrY9hdj9g6ml3n+jCDCuf9EgmAlfocYCtYd7GayiBknJMz1zomvm1Omnml7Diu+nf/ik0cgRTdKSoyC6BomAsE5OFZAASkz09qAsHczsf2eLUbWIYRUb44I4df3rwV5gZW956fdzY0dBgJQb4dgIAgALg2VfXIayceVqZz6dgIhSGIitZ3MRisZGJcah24lGIzHFMSGxtlwOVhUGFBBHjivLAqNHDiOb/r5/cl62JrDcpJGmKhDGW8+xagYIk1cpMq/z/UYiZ+/MxS9i0EXeDTI5Jhm1bkWqnez9xIXmPF6AoSrZeKrkFklsjYSPBuzD5jAC80C3Is7Ny5UpojRyhuroa6p8kz2RWrVrFOc+cLly1alWGB3/umo9hjHe9sw3bGay2gYMtNkRddeYBnoZUhjppyIzaGkPGELWO1DUgxiaMHQ10YUkCspIBAEAB0HD4OEL4tHEhm3KKRzRcVN8VpKHISoaUZA9yAQhxkZIsWEJui1Rs6pALB2s8+dzx9QfqspKY7MG6EAVJPe4xBKqurs5bRluuT0rJGxoO2MLQCcnFPR7mPrhX9FaCT2LKV3b/trsX7+59Zpdr65WhYXf7M9mTizcPyHrnALaGeWhkjPGA8sztDhlWOEmeAmU+ARhZVYkRO/ZRC7MoIqpMaxWsk4KwqC6v2JwhdEyeRZzijRO+zOENGWKUGtGDH+5GiE2ffgm0VUkCNIYAACDfaDjcXN94DCE8cqQfOYQhkc54IudVkIbAF3o37O8aPCRdGFerSL0hQePGDdX04Op/bOjnH/JU9pB3FIUpsWjRolWrVuXzz+XuxR1Dw1INViEnLhMRYvqDe3ypZHldjyrFNMfkFJkrK9N8ruTW6K4NYQjzfhdRW1ubuYYL0AfU1tbmX5jvtcsMYzx79uzeFo/OyL7QhWkXTuKcR9s7EKXY8b8TlJNthxeb6TPE4Kr0MFhcYGiTh5IrtBnD9hMnWpqaMcJfuPFz0FQlCWAMAQBAvvHMq2sRxuPHhFQVE4JEUjKRcjUuzfGkQR64rHo33JeiQuLcEXaM9SUBXFERHDVmGFYCjz76WH/+jhfqQnicKHQg308ectkk4ZXrUFbqN0uPNAQLrR66lsw4xF4lNWeY9ZyeOswb+vlmumsiuLSKC/KuCXga5nrEHJjN66Qh19bW9rYF+uSnwTmzPjp05MTRJiyZJu4sQr7GOCQmexocOZWRuTh9mLHYqeRU5CZbkfaO1mMtBPPxYyAluTQBjCEAAMg33t78IcJk9Ci/XV83tggGCiHGGKfxIlwIJIbejettN0ObJSTY9jS005OHDAlVVASxotXVH+rz3yhsWpacUs+ZM8fjRKEbeXMzzKnA0B2ZlF5EV9TFkQvT1/RGjZj501O+iBcYw35+dmAJSwlga5iHEXOgta2sFt0HaWFv7QsdTLvo3Ismn8GoVbdrn4JRlxGeZA8layjIKJjyexPxMxSXGUp9KIuTv4zu3LQ10hm+6cbPjAUTwxIF+BgCAIB8o/7wMUUjp44PxcMbZLvjcVtayBlMG7wdw4s1QYhKyhBxJD0N41aGwaBWOaQME231mvV39XVSWxBvcikBc8KJ/Fc97g+qq6sdg8XcBRv5tJUsMUND8CbLZi+UlhHL8BItaloNOMGBALA1zOmIKUnDAXILx5ne9HkE77NA/pKp52x+/0DbR80kNoEUOjUs6Ccs3wvDXInNIRkGLZMX4SQj2z6GsuxJfKGs7cQJgtDM6dOgqUoVwBgCAIC84tnX1tY3Ng8fXjFkkB5XpdmlM+yCJ0hmJ0NTeT9gdQJXxG3Rof0QY3z+lImrVm4+cOhQn2OkfEZHRU0UupGH+Cc/AkMnopOGhiUgPijt4sie65wyY9M8fl0BJwiQXUd1dbW0aIAOJOttO2fOnJzeZvMI+lDhJPkV+vz0aRee/dtlLxw9eIhGosTnp4whIuRpiNn5jrGuGLo7T4IjxBxFoSAN2Ukmhq0tLe9t3Iwxn3nZxdBapQpg8gEAQL6DIITx8Krg8OE+O5c1HrlJwXs8GZlBSrLHI3KZVO6a4+Gu/QRVDgliotTVHe7DDD5vju8Jqccy+7ioFW05dTPMj4Nh8icqAUPDksywLoluLCfZxNl6fThBAKcblOw29CFZR0ka5ibMc/pW4SRhCOvPHa9pF51LLbOzo/3Qh/tJbJrvykq2BQLcBlyRHoOdkixSwByW0LWwIw2NiNPx40ZDSnIJAxhDAACQVzzzylqE8NjRAYVgu7QuRrYBnq14h0YqhkjbFhPaD5xzGN/AFeWBIUMqENEefewvvZqV5qEuROkRhQnxT+7cDPMpMHRfFUUdKueTBAcUrEsEyg+Ql54QbA2zC3lTqiT7ZznVkXOq/nzArNws/OZXr6PR6PtvbWbRKEaCe6K2EZ5NFdpVUOCS9BikwFAKQu06127GkL634V3E0QwQGJY0ICsZAADkO7BCCA+v9GHiSmtFtrSQez5NDJB4LkVSMscEYYqlmyESboYEV1UNajqsZl78JNd0YcmkHvcY/+TCzTDPDoYJKF5DQyiODAAAstgPg61hLlq19HKT5ZCdletEvk4/X2TahWf//tGX2o+3Rjs6dF3jdkqyVBoqSJEFD6FmsscgUpKFIFRwhZwi6tgXUkStlsNHDtfXE8y/cONnoLVKGMAYAgCAvKL+cAtCqHKwTz50hBiiUJoQvXNpaAhN5XVIUSiKFz9xTAxlDWVE0PjxVTu27Vq9dt1d6Hs9vppDrGR9yj5AiEI3cuRmWBCBoYQ0NCw6i3op8YDiyAAAIFsAW8MiGjQLNe7IOU9WJlSLFy/OCu047aJzLcNsO378g43bL7hqBpMpyVSUQCEMcYWj2Dr2kBPQZ3sEXNY8senCWHiGGeVU8oYWYnTX1h0nmlsmjhs147JLoLlKGJCVDMgVlixZ4iTmrFmzBhoEIOjCpobGJoRRWZlq27pLuhAJuhBJ1hDaqWhw0qzOrmBj17HBGA8ZUoaJciAzjWHWiZXSTj1Oj6y7GRbEwTD5Q82ePbu4DKeyJfEAAAAAd2cItoa5GDRLoD2dOU+26MIsNss3v3otp1bDrr0KRlia4nEhW2OUMyZSkgU5BVGARyAFhnZK8kklkjmzhMaQ7nh3M0b8oZ/VQGuVNoAxBAAA+cPbmz9ACA0Z5CsLqRjbrncoXuwkThaCi0mRwCl+4hhR2sZdtjVlMOgLBgN1Bw9lMsfNFrEykIlCN7LuZlhAgWGOopc8vFsojgwAAHLaH4KtYXYHzeJtzKxUOEl4wezeKbz0grMty4q2t7cebVYwklxh3M1QcoXS05CBO5FXED8vNm/IhLSQWlJj+OHW7cebWwhG40dDzZMSB2QlAwBewZIlSxBCK1asWL58ubNz3rx5V1xxxfTp02fOnFkiow/Cp4wMik1JGcarpNn3F2GaUDQQqcjCuBAn/UKIRwN+1efX2rG6evXaWbNmpAl7+k+sDMDU4/SQaWtZMWYqrINhclBXFIaGUBwZAADkp58BW8NsDZpFmpuc3TRkB/K6yuL7nHbROReed+r7uxsPfXhg0sjh1GIcM0Qo4gRxhTOMFI55XGYIicmFjtbiIlBRz5ozzBinFqddpOGR+kMI8S/c8KmxY0dBg5U2QGMIABQeMoP7TgE3XYgQWr58+Z133jlr1qz58+eXQHL3hk0fiMoYyElo7SIhOJQ8KUrEU5Djfoa4q55N5eDyyspyhPGBg3XpA54+hzpuRaGkkwamojAlsigz9ILA0AnqpKGhx8N4KI4MAADy09vILhE6nKw0ZnYNPfLznufMmZNFaaHzsrmgob+54HOmaex5730WNRTMhWZNFk2mdnkNEQlAQOAFcJmSzOwCNZxaQhMqSUPa2tL8wdbtRjhy0w2fhrYqeYDGEADoC5YsWXLnnXdKDeBrr73Wn5eaP39+AkuYEssFVq9eXdRiw/pDH2GExo0ud/FKYlSy62bADKHIgE86Z9jZJ+lgf0AL+P0Yk7r6hu6mpKtWrepDGVm3olDyWUARpmxembDWn8bxlMBQorq6WjLF3ixA7MEWAwAAJQxpa7g4DmiQ/kDK2IuiA8+RtLA/c7MecenUsw3DaDny0Z7N28+49AJLcoWECJkhETJDjBjDsTUGXVPBcJLAUFC6VHKFNmmIGH1vw+ajhw5PHDdq3GgQGJY+gDEEAPqCFStW5OiVZRqy81Dykg5mzZpV1IFofWMz4liWPXHGJc4RTnA6hli7uIBP3sYy5ZwHgtoppwx5b5NdDjt5StpbYgWIwj4EP/3PsfKOwNB98cyZM8eD4bFT8huuPQAAkOdeUd4lyjp/NKAgc5M9e0fKfbpXrVqVi1lQrkex/7xr4Y9/8eS2tZtOvfBchRDqVEyOLQTFi/OK0slQNLlgcAsMMaecykxkC1ETMRrp6Ni64V2M+E3XfxJSkgcCgDEEADwBSRTecccdCfvvuOMOR88osWbNmqKWGfr8SlnI3fPEpwM2dQRkYZEhLizECAtXQ9d+jJCuawjhAwfq+jMlBaKwP0FFf2SGsuW9eZfCm4aGWS/5DQAAAJl3+GBrmJVmzJYLcC4gJfbSgyUXry+rxuXus39u/qzF9/+ps7V1z6Ydp1082aIUYaEoZARhwhHCCsIUYUJis8jeywwxsIz9BEeisAm1BYaMcSEwlHQht2Ib77z5j0i4k2B003WfggYbCADGEAAoMO4RSEMCShrRIQ3XrVtX5FVQHPbBnZfcRRfKLRjwiwPYlYuMktOURQEUQg7W16ec76afkgJRmBXIHN4+M4YeFBg6n0u6d3nnqoBAHQAAFBaLFy+WZa+g8lJ/4M0SKLlLQ3ZfP7kr8e/cffzGlz7xp6dX7t60Y+LkSUTFjFFEJU2IEdYQ44gwzjDCDDGCSa//CpCG/T9VsUU6GLIuuhAJurC1ufmDLduinZ033wACw4ECcAgAAPqCTJwHM8RMgfTHTJ8+3dnOXUJ0HlDf+FFKEWECXQgoHtgZyCeTvDjh/4SzKu9gdzclTVnMBNKs+hn59O25HnfTr66unj17toyNvRCo5y7QAgAAgMw7RknNQHfUnzb0WgPmqMJJwp/o/6DPu4dzzEWTzzQN49jho0f2HVQxEZZ5ogIvo4hRzljsYFchlCy+h4R3AkgRj1GGOMNiESaG0sFQqgtNxOjWt949eqiREP5vt38DGmyAABhDQAEgSwNLyD1r1qyZP38+dmHJkiXdlQbu7unuV0jz9JSvkP4Y56WSn7V8+fKEt52LFityUeFJY5GqkkBAcQ/sYoDi3EUXws3BkkF5WQAhdNCVlSynvMlTUiAKcxf5VFdX9yEGkKUSPf7p+llrO4tvA4ojAwAAr/WNMrsWWqMPkBp2L7SenB3JUiQ5HWV6ZV/YHzJu2tSzr5l7CcZo+7p3EbViIYFdUkMkwyKRCYs4j60ZZzkIRYBS7A6MO8nIiDNsCwzN+JpG2tuP1B9CiP/yvxeDwHDgALKSAYVHymLBMgk3k9LAa9asmTVrVsqn97+QsUfgJiLddVGKC/WHmxBHqopVlSQN3vb/Yg2EYbGhe+Y9ENDc5zRZhwWpx3mAtPzrbaRRLAV/C25oCMWRAQCAByFroYBbQn8Gl4LnJsvxJQ93T+U9wuS/0tuhLcPjPzl32mu173aeOLHvvZ1nXDK507CkjyGiws1QwZgxRJComMw5yneacY+fojQTnzkSdCFF3DYx5JRySRdaYs2sut17Du7dpxA849Kp0EUMHIDGEFBgLFmyJE2G76xZs9JIBbujCx0sX758/vz5xd5Ea9ascVc+Sa6OUkxDEeLJtU3slGTOoepJKQKjOKXo1mGBojCfkGxar7QSRSEwdD6dFIMU5K9DcWQAAOBZLF68WHaPwBj2rfUKqGFfuXKlpKU453mgC5M/aXq1XebqvJSHXTzlzIU3XhXpDO9cv0XhWMOYWxan8YVRTimLgbLYNu8bchjQlJ4m0c5HpkhWr+YyH9kQ1U5MWSv5RHPL22+uj4bDv7z/XhAYDiiAxhBQYDhcmCwWPH369HXr1rkJsvSFPhy6sLunL1++fMmSJdli2ZxRwbm5lGsZYwIlunr16tI479g1QiEQF5YuhlWWy1Mr6UIpB3N+C4rCvKG3WoniEs1JPnTOnDn5Z+6gODIAAPB498g5XxwHNEhvh84+KPT7iTxUOHFDyhiZdA/sPvDJcH/mB3zjlo//8fG/KW3t76xYN3XuDCsSpZYVDwYwV1VM5QthHntsl0DplbivD9OYvokHS0GTyDji1CYNHbrQMoW6UNY8YVvf3lS3Z7+qkOnTQGA4sAAaQ4AnsHr16tdee+2OO+6YOXPmHXfc4ebF3PRfhk/nnM+bN8/9CumFip7FkiVL3HThvHnzitrQsKGxiSOuqMSvK1JVaKsOTxIXAmtYTMCpTxlPOGbX+1vk9BcUhQUMGmUckmH8UCwCQ/d7zr8YBNL9AABAEfWQYGvYh6Ez4U5nHs5UriucdM3VBGQxuuT9yeq5TGR9LI4eD2CM3X3bjRalB977oKmuUSMkXvzEkhvOM6SnnnwvPBvosU2yLlf0uluinY8sNYYcI4aZJArjC7MO7tnz3oZNGKOnH/v12DGnQOcwoACMIaDwSDYrnDlz5n333ec8TM/3pfQ6fO2119yk4bp164quWebPn+9mSyUrWtxnmiPMkaYqmkZ4fIRKLqQLKMYzm/6XlhmprKwEorCwyLxocpGmsMkPmLd4GIojAwCAIoLssvKvmCuBdsv8flt/kLcKJ/bcTDBWki6UjKGbyUpJ87kfdkcCptyZfIDc/sTcSyadNsqKGgd37PFpKuG8KzfZsjiLrRmjTGQoc8qyRRT2gVjMRe6zVwhEjuw6J5xhxjGnyLK4ZcYXg1smovTtN//R2nxMVZVxYyAfecABGENAgdFP3Vyap99zzz3OdiZCRe9Alml27B3nzZuXSQWYogBH3DKtzrCJ3HyhKIQWNznkQCEW1Qnt8qZMlc2C2tsjiLPhVaMqKiogUCksMpQZFqPA0PmAeTM09EiNZgAAAOhVxwW2hn1A5vfb+nNqpGg913dV3fxUTU3NqlWr7r333mR+MCVX6Gb9EkjAlMwgywALbrwyYpp7Nm87uGOPriqcUi7pKmpyy2JcFN9gjFFJHfaLLuwtt5hnMrFgBCLngisU3oWcxjZc6kJumojRd1avq/twn6KQp//0q7GjR0KfMNAAjCGgwOhn5d80Ty9Gim3NmjUJ0kLpk1gadKGEZcWGfBfZJH94V8FkQNGCn7zNOQ+HoxzxqlHjbr31VjkJxhhDrOLlsKeog8nq6moposl1aOfU8AEAAIDi6iQ559CD9bbRcneLSEoL81PhxM1DSbfEN954Iw3nlcAVZkgCSlC7dEniTglnz4XnnvrFT89CHL2/flNbU4uGueQKHY2bIAotLur29pM0zCJX2LeqL709WflgDznCzPYuxIxhzhFzuEKDW1FErUh7x9a3NkUjkTtu/9r0aRdAhzAAAYwhoMCYPn16f56evqSJOzHZ+5Cuhe7K0aWQiezC6FFDsaiJ7EoMcP4BX1j84F2zUe7SkGKxz5lqy0OANyxg2JNGZli8AkP3R8ip+k9GWcXeSgAAYCBDFokCW8NetVjWXS8kVygHlFzPiJLZrpqaGqkudKjAHplBh+lzU34JDGAaurA7TD57bDhitDUfO/TBfp+mYcaYaXQtlhn7A5ZFqcUsi1Er5WtnYrDICwd3y/eBRsyVByJHiFJZ8ATbpKEliEKDm1FkGciyqGGseuXvx48d0zXtxmvnQ1cwMAGMIQDgCSRLCznnpSQtdEanSNSKRGhcYWgnCqD4f3AlFOVJTSUvRK6d48aOcU+4Fy9eDLxhoZBeZlga2Wq5MzSUAR4URwYAAMUOsDXsLf4/e28CJ8dZ3vk/b1V3z61jpNGMTrcs37bk2zK2ZfUkxpaJAYOBJRsWabjCEsAkEO8uGGbGgXywN1lMlmTJ5p9ISmAhQADbgA9sa2TJt2TrlqxzJFkajTT39PRR9R7/T79vdU1Nd09PTU/f/XxdLlVXX9VVNfW+769+z/NkN+tF3iqcJCtWqq2/8847v/nNbybIhSm1QiX/TUsEnBbXXbX8K5+6z2Ts9MFjh9/YW+3zEmY68uiZMrMhU5syk4SGafIqTpaxcSahzem1woQDNF0pMAu6oQDgjAgu5UIeW+ZMmIaKRBZqztnhvfv3vrGLGeYv/u37GI9csaBiiJQzTr/edHnxxRfztp3r1q1LyFpYTtbC5FbONJitEqp/J8hLSCkeVEcXJJ6SMjYbDYac1ZS3bNliR4zadjbUDfNJGpthGRgM7d+Yo4SG+TGDIAiC5AFMa5hB6znzfZXnCieOjpoleKn0hU65cLoOwTQ5Dad8Y0rx7u47V91+wxXhYOjc0e7hs+d9Xq8zMFnIJIYyoSHjqhYK5RnUcc5YW3SjJ063oIrL9VMe0wxdh7LaCeFSKJS5C2PL0loo3YVRYRrA6Pkz517f8rLg/GsPfvo9N1+LF4GKxYO7AClXHnvsMXs5TXjy9u3bU1r5ZqI2TosEubCMtcIlC5sgIWhV2KohGVeV5FMEz+DSQICjVo2VjtKWgAWEw1EhYKn0GKoOd3t7e2trqwqJcvaVCSGoxeQHZcFLNjV0dnaKcskOEAgE1KhMnWnZGl1jcWQEQcoJldZQOQ3x4uam9WxtbQ1IMm5HOjs7c+0rTNFXc0hajzzyyO9//3vOeXJ95GRPojP7YYIzLvnzJ/teN3ztc/dt+MsfjvYPde9+56b33ckYMzgFRoAQIBoAEYQAB6IR4ExoAEwDPfMdQgiZ7ham/FHOz7GXnc+6+Xz1xsk+NuVmu9yqyXruVrUTwWMLEFsGanLbYCjlQgD+3C+eOn/mnK+q+mP3YzxyRYMeQ6S02b59+2RPOU2C062vsm7durxtf4XIhXYjBoJb9ZCVyzDWXDnroKDXsKQQ412V5CMXNxpO0H+T08ypgUp7e7vKxY6DljwMEZNthmVjMExzphXPpyEIghQPmNbQfeuZcd1k21qYhwon492wJCnwrrvuevjhh9esWZPgpEsOPZ5WMPJkYb/TChB+cMO6sSjtO332+K6DOghN8AmxyfEqKDIwmUq/YZ7qJmfgTJxJ8sQ0TsNpeQ8n67cTACkXCiJiO5lwBkwWOVEGw9hc1kd+6bXeMz0er+8Xm/526eJm/POvZFAxREqbNWvWrFu3Llk3TDDuJRRIcZZbWbNmTcJ7H3vssbwZDL/97W/by+UvFwIsWbTAymMYT2Io4lnvBEYmlyYiaUEtcnloqckIwEUXLU0YnCQXalThUXYqPZRmckrymKcsA9OyldAQiyMjCFLeYFpD9zsqfQGxZJwVTrJoe5+6e5YkF3Z2dq5Zs+Yb3/iGSxEwuVZydiuBJHDtlcv+65/8YThKTx84dv7o6WqfD5iKTTaErMIBjILggjG1TSCY4BnuGfebl3FV5QQZMbu5Ed3EI6d4gZILuQxDFjIeWXDCKJgGGIZQc2qATF/4xkuvghCPf/e/33rzKvzDr3AwKhkpeZ6VPProo+rhiy++mKD3PfzwwwlvSQhDJoSkeXtK7rnnHvWyZ599dt26deorXnnllSnLN6fBGUadnttuu61Ei6KsvuHKM7/dHhwzBACxip0QIYAQFYqM4ciliognM7QlYLU0MDCaUgeeLLSnI46tZ+HoJRcEAgFlJ7ETM5Vl8V87oeFMDB3qbBRYzR1BkLKmo6NDNQ3Y8qZHdWBcNgqqBSls0hU7GPnb3/52OByezP6WMgDZZfSxm2fd8N7brzlw+OzO/cdPHzzWsnxxtdcTNul4P1P+FKJ7Yv/qOpERtULoZDoOqCkidl38kOQgYvfPJocPJ4QzJwcyJ6xxPkz4rsl+mp0CSiNEyYUAnPDYAIwwU5hW+sLYRKPAaP/58y8/uzU4PHrnbTe/B+VCBBVDpGxwFhp2sm3btpTi2rZt25zuwuS3P/roo5N9ZgJKsrTfNa3NdqqTLr9ObXzpHilCyOBQBJz5DIXK/2FFr6JkWFpY/lAVgcyFiPVDLMmwf2BkZDRICNz+ntXJUs5k2fSSdUMcuuRozGPv/zJWxGaY0LCrq0vlnMITBkGQsgfTGrrcS0oBTL+LVPOh4iryX14/2QT3V3/1V8pdmEYxzCxZ4bS2Z0q+vOG9n//m5uDA8N6tO6+7a7UQEKE0YSQhdCAMQAfBgOgg+DREQ/dbMoUAN8lbUj6bPqGhU/JLLyAmyIWTPZVi46W7EBgFSy7klrtQVTsx4oohY5FQ6PUXX+k713fnbTf/7be/unQRxiMjGJWMlDhptDNVdHgyL94dd9yRphzKtm3bnJHLyVRCBHEuWNQ8DwicOx8GQeyiugI4WHW7wFkaBSl+4tkLxYReSdxmGI2YIyMhAFi2bHHye+0YqMk+vKOjQ/V+sJhyLujo6OiSlKvB0PlLM9adW1tb29vb8z/YQxAEKRSY1tBNs7J169Y0+6ejo0M1HwWXC9XCI4888vDDD3/9619PmXHPZd69lF80kzDkyfjCf/6DsTAdvTBwbMf+WfV11R4PMArUBGaS2JwCZ0KGJ8sgZRmezHKyG6f7W9zHHad5fcpDkz5+OTlOecIXOYORlVzIOWEmmFEug5GtidLoWOiFXz93dN8hX1UNyoWIDSqGSGlzxx13CCG2bdvmNPfdc889jz766DPPPJM+dPeZZ55J+UYhhJuYXyHEPRJ7zaOPPppeZ0SWLGwiAMPDEaIRp+TEx8UmIVAwLBHimSdF3GkolN9Q8NgRFUL0D4wGR8fIzKQc1A1zh7IZlmUGw5S/dLqjXzXew7MOQZBKA9MaumxWktcXpMJJ+tHKI488AgDf+MY3nBkJ3ciF9idkRQd0z6orl372463Dw+HufUd2v/BatUev0nVg1JILmcxpOEE0ZFI0FHnbpdPaIe41xDQCokv10PmN1jKPdc0JZzKIm2uca4JrgoJh8GiER8PCiEh3oRkJhV546tl9O3dzrr389L9gtRPEhmBqHqTkeOyxx+wAXjyBS4vXdx5Y/+Xvgu776pdXz53jAR00XdM0IudACCGa/N9dnhGkwN1QAME4Y5wzISvXMU45o4JRzimnJn/t1Xd++tPn/Eubd78xaVCnHTHqpmNdDPmAyq0TQEiF7M/pxhdjcWQEQSoc1UBjmzsZra2ta9eude4cFTZRcGe6Mynhli1b7rrrrkgkogof28KTWk5Ul4ppVPXCKwc3/mL77Nk1LZcsveK264eDQZNx8HhB94LHA7oPdA/oOiFabA5ELhPQtGIbQLgZ0SS/JmFNQqiyc03yU+MLAjQADYRV6gSELjgwKoyoMCI8GuFGJLZAjWgo8vKzW/a+uYtR+MWmx2696Rr8A0ds0GOIIEj+WH3jVUAEAR4MGgKIXSNDVU22Syejy7A0EHYMefyGJ8RjzQVwDn19wyDE7e+5Jc1nqNoUaWKTnaDfEMmYQCCQPgo+4UzD4sgIguBlUwiBF8PJcLrXOzo6CCFr164tSBhyGu666y5VnjGhdLLdj0vWDYuEP7ztyk99bM1oMHJq35FDr7xdV1Xl1YiKTZZ1k43YgqyeHJ8zIThwnlkB5Zx2lqfrQ0wTZZzGe5j4LlXbxKqMzDUhNM6BmiIaEcb4xKnBDHP7s107t7/OTP74X/8lyoVIAqgYIgiSV265/koAOHx0ECypyYpFttr38VSGSGlgH0HZO4kdQcFVeLLoPT8MAP6Llk05JlGVB11+I+qG2aKjo2PDhg0p46rK9fe6sQ0qKytWO0EQBMG0hlP2Xv78z/9chSFv2bKl2PoknZ2d35RMmU2vOPfwH9x6RdtH1lCu9XX3XDj2bkNdrZeAFA2N2JzG5oKZ8cBkDoyBSmvIi7nbPHU4c/rA55TqYWLYMhcga5sQzoEzwhkBTpglF/JImEXC3IgIajCD7ntz96HdezXd98cfve8jH2jFP20kAVQMEQTJK4ub5wHAWMiIt4qyURvPYYguw5IhMeuksAyGVu9TwIULQ4TAbbfePOVHZRABirphVsYSGzduDAQClbMDp0xoiMWREQRBkhtcTGs4Gbt27fL7/cVmLVRN/EsvvfStb33LGcfqXCiJ/D+tqy+/7pqLx4LhvS/tOLXniE/3eAmAaSi5MC4dmjKPocxpKKRoyPOX1jA7PWp3AuKU3kPOOWeccCFLITPgXONUJjFkYES5VAlZJMxk+kJuRqlhbv3d88/98ndGmP3JR+/78uc+hn/RSDKoGCIIklcWtszXCFy4EIqEmYpFHq+aAY7AZBQMi7xzY0uG1gGMP4p1WKyF8xcGgcCypYvdfOCWLVsyCH1C3XAmI0BVInmy9O1liYqCT/N7sTgygiBIyiZDXTyxqVXYFU62bNmyadOmIty8Rx55RMmFipRp75wyYtHy4H8JXH/1xaB5uve8c/bQidkN9T5dtzyGpkFoXD0UVLB4bLJVC4UDL80+9iQC4tQ1qYXQhCCCA6UaZ5pgJLaGEjPKpVDIImEejU3MNDjjO7e9dmjXfo+36mtf2vDot76wZGFTEcanIwUHFUMEQfLK6uuvFJwZBg+FKDgDk2E8MBlNhiXRnQG7fwJ2BETcdSjEgYPvCsE0APf90MwK2jrFL9QN3WMP/AKSygk3S5PQEHP8IwiCpLl4YlpDu7FQddiUtXBamVXywyOPPNLe3r527dqU1sIEit91+GefWHvdVX7TFMffPnjo1d21Xm+VxxINhWnG1UMKnApV2UPFKTMuRImZDSfpcae2H05YKQQRQuNCA6ELpglOBJd1TgwRibDQGI+GeGSMR8MsGuLUjATHun7z/EtPdxlR8dUvbvjSZx5AoRCZDFQMEQTJK6tvvEoAHxoK9/SGIa4LChHPXmgJUBx3VAn0YOKyIfDxsieq5gnnMDAwIri4aOmSpe48hnZKoMz8bh0S1A3d7y61rxQVZTOcLApehd3hmYMgCJIGO61hxbaeyRVOVMNRPDfeOjs7165da/eIlA6oaRohxJ7b2CuLXDr8sz9Z+947rhHg7T959syh7saGhlqfl1AKNCqsIGVDRiubgjOZ0JDJCOXSNhum6Hunkg5JXC7UQShfoS64BhyoISIRHg2xaIiGx2Q8ckhIG+Zbr+7Yv3O311f3F3/2iS9+6v6EkjgI4kTHzjFScrz88svPP/+8s5FGSos33zp4+txgc3ODf2kDgAACRIt3aGJzAhpRURMlkWOlMhFS1xWCK9GQs1gPTcQ6IbFlM8p+//xb/X0DX/jT/5K+VnICgUCgu7u7s7Nzw4YNGWyVssup5S4JhpemJCGHvd/v37x5s19SIXvA7/e3tbUFAgH1k1Vx5CKMLEMQBCk2VEutRMPKaWS7urra2tpOnjy5cePG5C6K3+/PuOuSXZzFuwghCTGtMHnXuvijla++ZGHYYAeOnh/r7w+NBJuXtng8OjVNlQ2HgCMZuhpGxH5vbBdY64R6oqxOSyL9XxoQHYQOnMTmICORGTENHo0IQ1Y4iUZYNCzMKGd08EL/1t88v+uVnZqn+rGOL33yo3cnnAPOOYKgxxApSR566KH0JeqRIuem664Awc73hWJdGSDOmifyFqDAislFjhh3g8aTGFrlkqWMyGF0JDQ6MkaAuCl7ktzZneHNAOU3tANv8b5C8v5xGgwVlWYzdCY0VHIhVjtBEARx345UVFrDjo6O1tbWBGthQptSJD6GhOJdzuhj20uoTSTBe5gmeLng/Kd7b/jEB1ePRqDvzIV9L73NDXNWbZ0uOFBTmAaYppzimQ2l2VDd0AZGBVd1UUR5pD0iALqaBOiCa4JpQnkMuc5NYkSYDENm0TAPh2hkTBhRzlh4dOz5Xz599MARX82sv374Cx+4+z1cklBNBf2GyISTDc8GBEHyzOs7D3z6L/6np6r2619bTTRGCBCdxLooOtF0TZMdHE0nGtHwDldxIsuxCc44Z7EFRhmnglEem8zYdOJ47z/98++MyNiuV59zH5VsoxKKZ6XyoLrZrgQylA7t8UPKpr8Ci350dHQ88cQTu3btKsIylwiCIMWP0tHKuHlVBfTtcOwpm9fCtibOe64J6o8qpOssqpuwbL/MqRZNJh4VVkDYc7hn869fN6NRb43nsltWzlrQGI6GoyYFXSe6Bzxe0D2ge4guFzQ9NpjQYoMNiC2owYUm15RmL056vuLuQiAgdOm40EAQ4GAawjRFbB5lRlQYEWZGBGPMZMcPHXnt+e2jw6Pe6tlP/eu3ly1eYMvBuq6nFIhxHIagxxBBkAKw+sarYg0XY2d6xmJttqPClwxyBVUGhQM6DYuaeGXkeC+Tqzhl4Bx27TkRDkc+/tH7M5ALbf9XVtIk2cWUscKjvUOSDYaKSrMZqr2xa9cuZzA7giAI4p7yTmvorHDi5vWFbUanDNEg08QZoJpgMyys8XDVZQvXf3C15qkyI+zQy7u6d7/TUFPbUFOjcQZUegwtv6FlNhRcpjJkFOIpDuUwQ440SmqcoXyFnglzoQPXiFxgJkTCPBLm0TEWGWPRkIiGmBEVjEfD0be2v9H1mxfGgpGLV1z+m3/9q8Ut85yqcUJxFbSUIU5QMUQQpAB86N41BMTZM2Ma0VVgcjwu2WqpZBUN3E/FiIgnMYwfI5VAxlIPVfdjcDCoEfjjj92f8bdkt/igrRtiUZQ0yqlSzSqnaLIaDW7YsEGlvMQ/bQRBkMxa2LVr15ZZ89rV1ZVc4cTNrihUQIOKqEj+6smqJGuT4wxSdsYyT+ZBK0jk8qrLF/6Pz93VNL/RMMm5I6d3vfhGlcczd1aDTwMpF0bBjAgaVQtgGoRTGZvMgTMi4uphbOLAS2DEoTlUwthEwEOEhwgNeGwlZ8SM8EhITUzNwyEajXBmjgwOP/3TJ9/sek2A7/NtH/v7v/6z5qa5Si60RUPbZ4pyIZLy9EMQBMk3N157ueD0bG+Q23mKRbzJVrkMhUpniLJh8RFXB4GPl0sW3LKJAhcDg8F9B7oJiIuWLJph9ze7PW/UDdMYDBUVZTNUo9yNGzfaCQ0RBEGQzC6nZZPWUOVFUdkAM/g5hWpG7QQsNpP5BJ2y4HRFwymrKudTOmyeV//Nz9/VuvqKoKGFhsI7n31tpHewcW5jbbVPmg0Ny2xIDaARMCJgmoRTwrkqoBxb4FROUjpkxWg51IgSB2OTN77gIUJaC4UOXCdCo4aISokwGrJFQx4Nc2YCiHePnfrNj3/Vc6qnuq6x/Wuf+vTH39s8f7ZTJUx2Gk7s8uM4DEHFEEGQQnDLDVdT0zx/IXimJ0SIFpebpPgEnKsXYWByUSKUthubW6HkVv2TeFRyT88gCL5s2eLMQpKdbNmyZevWrdkdgVSybjjlcK5ybIZOPToQCKxdu7Zco+oQBEHyQCAQEEJkvcnOf9OQvsKJm/2QxQgJ95vd3t6ePh7ZZRWU9NLhlMbD/HsPP3LPygc/cacg3nCQHn794NlD3T7inT2rwUsAzKgwo2AaYJiW8ZBGgRrAKWE8NueMcE5knDIRnEgZkXCQZRkLKtDEhUIdwKushWQ8DNkTD0bWqQHhsO0rFJEwi4Z4NMQNg3Pe13Ph+V8+++ITz40OhZavuOzn//TwvYHrba0wQShMzmKJWiEyfkLiLkAQJP8sXdT0offd0dcX2negXyOqXHI8FtlKZIiBycWIiPs/43MuU8FY8qFyGu7YeYQIcfv0qySnRN2uz7qGVYG64ZQGQ+cOL/tdkTCsLWAoGYIgSNlQumkNlbVQ1c2fYVug3p63e2/JLZqThBIWyYHJU4qGCQJiGuNhmvLKOa28vOqylsf/2/vvvOnSkRA7vvv428+//u6Bkw31DfV1tTpnMio5ouRCYUSl2VDqhowCi/sNBbfzG8Z1Q6EJKR3mayRCnI7CuFDocayx3IUEdGAajYpwiIVDPBJkkTErDDkaEtEoZ4yZ9Nj+w8/97DdH9x9h3HP/fXd/v/OzLU1zkqXABK0QhUIkJTr2jxEEKQj7Dx1/a+9RAfr1q5qFigQgVp8CgGiyYyNLmRFQ/yHFgPIVcq7shMCEDOaQFZMZF0ycPTP44pbdphn90T99b/bsWTP/Qr/fDwCbN2/esGFD1n9NIBDo6OhQ4wTbYVeutLa2uhnA+P3+zZs3+yVluR9UsqcTJ04k//C2trZAIFCuPxxBECQPBAKB7u7u0mpVlVaobHpZaQL8fn9nZ2cu+i0JdHV1tbW1JbdoTpLr3qZfSIg7TlAbnQ+Tn0qorptGGcy68fCqFQv8ixvfPnyem2LkwoAZpXPnNzbMmQWcMdMUjALnRMSGG4SL2DLEurMEhNx0ISdQFgbroRByEwWx7i/LYQpkbUCiJEJVxllXk1QJ7YcaGc9aaD0UMto6GlFFkIUR4UZURKOxuRnlnIEQ3e8c73rqhWMHjlIGS5au+Je//cq6wHX1ddXO3Z58KJNl34RDjFQyqBgiCFIwfvX09kgULlo2Z84sn5BtNhCr+ZRKoZIKCcHmqmiwoo8tO6F0FjJZg44JwThjYtvLB985fMq/dNFDf/5fszv86OrqytHYoxJ0w46ODvcVgfM21Mk/anC1ZcuW5DGh3+8PBALl+sMRBEHyhmprlF29yJvUrq6u5cuXr1+/ftOmTVm8XaTuvXV3d+f657e1ta1fv37Kb0kQBFOqfmnKpLgvrDyZwpg+6WG2XIcLGuvvue3SUIQePj1ExyIDZ8+P9I/MaWpsmNMgOGUmFSqPoRQKgXGpCTKlHqpNUCqhkg6laKg0xPH1mjVGmaAekonThN/omDR7wSERahNVQrVyXCK0dEOhMSrDqyPciDIjIowwj0q50Ihy0xCcghBG2Hz9xVd2bnsjGIzonvq2P77vixvuaWmak3wmpKyEk5yeEhVDxDpt0HqKIEih+MZ3/79nunbdeN2ide9dQhnVdEI0QnQSa7Y0TddkM6XHVmB7VQwooZDF+lqMU8E5MMoY5dTk3IzNmcnb/+on4VDwyZ/93zuyFJVso1IL5foulzKgpc8HVJKNPZlec08IyTiLU5Hvh/QHV4V3qcA6BEEQpPgb7szo6upSmmZ7e3uOGrtct6TTzaeRHI6akLQu5Tz9wzTzlBnxkrsiLjsn0+rD7D3a+8+/fItzVu0lHOjCixc3X7y4qq5qLDhqUCp9fR6i66B7iKbHFjSdaB7w6ITooGsQG40obVC6GFTYU/xhrEOlNgnU+lRbC+Act5CJC5p1eqR4aKmKRK4UgjAq5ASx3nZsEtTg1BSxyeCMC8FBwOjQyMG3D5w+2h0ORaIm+aM/vO2TH1nTMn9OynMywVeo5rquJ0Spo2KI2GAeQwRBCsaNKy/jjPb2hQaHmEY0Hs+Fp5xsVtkTDlxg/ZOiQFh5DK0qySA7KiqJYezYcbH/4OlwKKwBXLR4cda/PUcJDZP730otKqf8hi4zGCbv7fIbu06pBWNCQwRBkGxRtGkNZ17hxOXPz11Lmj59YUqS/YApjWYJ8+nmOkxZOyVhOWUNlvRmw2kZElde0vz4Q/fefv1yBh7Gfb0neo++ebB7z1GNeOfMmVPl9WrMFEYUomFhhEVsHoktRGIPIRJWuQ4Jk7WVBScynJkIrgHT5IIeWxY6CA24FnuoKhcLK4hYTUImHwSrYokOdumSeMljGK+A7I1PMk2h0KkZ27bwGA+PsfAYCwVpSC5Exlg4xMNjPBphlArOGWV73tj1+188s/eNPUNDoflNS//9H/7bQ59/f7JcOOXRR2UQmfTSgR5DBEEKyNVr2xpmz77/jy677JJZJovq0oKv6RraDIsNWRaZcwacc6ESF8r7ndxk8mYn45Q/9dSbL23f/Scfve9//+0judgGFTicn2ZL9cLLw284XYOh/a5yshm6lwLVaVaWFksEQZCCXH6LpzHNg7XQSe5cljNsoyfz/U3LNqjmXN7iT/lsynoayStdGg8zcyP+x+8PvrH/DGNMA0a5sfyaixsXL6idVRMOhSLhMOMcNA2ITjweIBrRdTWXYw+5oOkQe6iBJp9SDkAYdxraNsOkYQqJuwjjQd/x12rxhfHciLJ7DYLLLD8cKOVxd2Fsoo5lLmIvAAgOjx7bf+zYgSORcDQS4XpVw4YH1r73zpXN8xpSDpiSA8YTPIbJNW2cb0QqGcxjiCBIIRHA39p7bGDIvPn6Fs65lc0QnNkMreyGmM2w0EdK1TxR9ZGFLCvHBRWMcVXzZGBg7LfP7DDCke90/OWyJYtysQ0qx1B+Ms05s/6VdH7DaWUwTCBHBWcKshO2bt26adMml6cZJjREEATJYntaJGkNOzo6Nm/enMUKJ24alFzU1GptbV2/fv1MGqkESShlLsKUWQhTLjs1psn8g5N9VPoNmNIZN9lPs7lqRdN737NiNEx7B8JCeEb7RoIDw5HRcE1dXeOC+bquCcYYo0BNEFbpZKBMCGotMwZczU3CqPIbasCJEHKoQjQQmpXZUKgchRqoSah0h7pc0OxKJgR0IjTBNc4JMwk1hRkVpiHMKFdJCQ1ZycSIcDXJwiZcaYgi9o2DF4b279i/65Vdp46eihhac8uSj77/zof+9P233XhJfW1V8mFNOBBp6l+nPGR4BUPQY4ggSCF5feeBzz30eFV17YZPrGxe4KPM0FQqw4k2Q00nGtoMC4eKFuc8bjAU8janyahKX0gZN/kTv3lz2/a9y5cu3Ln9qZxuTP7zIim9ae3ataUYr5qZwXDm7y0elL1luj8EExoiCIKUevNto8zjBfE5Zr01yXr2jIT20b39cEoHYvrMiWmshek9iem3P836/3jh0Ot7z0QN6vMIjy40n2fRpUtrGqqrG6o5o4ZhMMaEksk0TY49pNOQ6PKhHIbEhiNq0q0Fq7tkpTiEuLYJjroo1lwIK8EPt8oIqsgdMWHOgauSglQwyrl6CzENOtQ/eGTPO8MDIyMDw4ZJOam69ppL19256trLlzbPb0jovCX8cJWX0CnI2skKE3yFKcslIxUOKoYIghSYv9/0xOafv3jF5Qse+MDF0WgENKFKoOhWIRTZRFsL2HQVBq4UQ2bNVUgyMzk1GYtNon9g9If/99nBgaEn//0fb7/1xjyMOvITT5TQR1dxVSWkG85wXFHMSeunNUrMLHSrDH4+giBIsbVKeb4Zk+cw5Fx3WjK7BzYlbkS39Cph8lPpJcKUb3cGOKfZgPQq55Trf/rsgdFQdMeBnjofIcB9NZ55i+dXNdTMb2n0VPvMaCQajXLGOJfqoRqNaPFyKFJek6MUObdCoqRSqEQ2Ev8nvgmOcYsALojcEzInuIwvlr3quGIol609Ffty0DTBxcnD3e8eO3X+zPnRoVHQPMRbc93Vlz70mbsXzJtlCTpJg6PJ6h3bJU0SnKGTZZPEYReCiiGCIIXntZ0H/+zrP2iY1fBfPn7F7NkaY6amp7AZYjbDQuE0GAIXjAtOhZILKeXMZNzkzz2/Z9v2vUY08ta2p5YuXVTMMlBWOuuq61/8ccozNAnmM3Fk7vZAxqYSTGiIIAiSu2Y0D/djiiSFYhYb05k0ai67fG5WTiYauvEeunn7lCtdZj9Ms8/PD4xte/vU6/vORiKmV+fUNAmw+f6FTYubZjfNBiI4Y4yZpmEKWZPY8hLqKmWSpRgqlVAVTSZ26kICRNOsWsogiFVTWfkLlSQqwFIGOah6x3JL1ciHcQZAxkbGRgaGjh042nfmAuU8EjLAUw161d23X/PJD966YF59wlmRvJwQnpy8MJliiFWSkQQwjyGCIAVDNeRLFs5/t+fC0RM9p88Gb1i1gDIab3jVXAihmmMiIN4uT7+DhXs788Okyp7I/owVSMEEi02cU84pDA4Gn/zNm8Fg6Ad/863b33NTHjZJ5QMqSJa9QCDQ0dHR1dXV1tZW5MkNZ5LB0N7PW7du7e7uLlHJTGV6yrifgwkNEQRBctGM5iGtoWqmT548uXHjxoJfw1Wnpaura4a/VzXrOR28p8wPmNJ6NvMUhC5JGTA75VdMuaa+1nfVxU1333rx6BgNR/n5IcPr9UVGIxdOne9793zfmf5wMCw4VFVXz57X6PX5dI9HJ9KgpwKHKRXMBGoKSoGZIIOIYw/VXE4Qm6i9LKjB40/Jt0sPo6bpHq/uq9I8HjNinDp++vjBEwd37N+17e3jh7qHh8JM+DjU3B24uePLH/rw3TfeffuVKllhcnbClDWvkwteJ1esnix3IY6eEOtMQI8hgiA5l5ymus68sevQF7/+D7rX95UvXK9rlHEqPYZWnhB5K4+MxyaTeFmyrPaQ8DBNduyEABWPLJ2Ggsc6P0xOnJmcU/7c87t//8JbOhE7X3oiDwZDZ9e5sAHCRVX/MeVZPfMmvnRthtk6PTChIYIgSC7IXeYH1ToXm0N8htWNC9jncWM8nOxhZgtZXHb/E/Yf6zt8cmD726eCoYjPQwgwZhpE46Br/quWU0pblrV4qrzKzaAcDFxlHpQlAQG4ussOykEZtxzGazjKOo4qeyDRGOeUcs74UN9AcDjYe+rc6FAwODJqRkwAQhkA8Qit6q7br/zEB24BgOa4qTClnTDlcsq5m2o2ODJCEq9dqBgiCJK7/oR7PvO1771zrGfVyqZ7/mBJxIgK4LqKTdZUwpACl0CpzIZTxk8IEIIx6S+0cjFzqkKSTcZN0d8/+tOfb+8+0fPET/9PHjIY5m28Md2RSbHphlkcWhQka+TMf34WZT5MaIggCFL81+rCVjhxs21Kx8z4dxXDsN19xsAp5Tz3iuFMVMUMBMTzA2NdO0++c2JgZCw8Mhr2aCCAg2CUUkbNhnkNVVW++jn1mkevm1XHOdd1zVdbpXRDXdOr62rsXISjwyNSh9OCQyPSy6eNjgYjo6Gx4NhQ/2BkLGJGmRxkaFxoAjQBnmsuX3L3mitWXroICDQ31qfMUZi8nLCQ7AZNTm5o5zRM+S68OiHjpxkqhgiCZKWvMMO37Nhz+EsP/7ChvvY//6cr5swiUSOqapSNl0BRrnlN/Vt0jVlZNq5cCLDunoLgPLXB8IXd27bvp6a5Y+uvli5ZmP/+d5Fkmis23TCLZY5nMsgppyEoJjREEAQp2tazGCqcTEnG959ynb4wix37Kd18LqW9acmI2UpxmLxy/7G+vUfP6Zp28ETf4NBY/3DY4wFNcAAOnHurdMFFbGCiy9LHsnKLpuuCc1W8hFKTU25SSqlJNBJbYzLTZLJ/DVxoDLRVly9pnjf7mstarr1iUXNjffrBxZQew5TaX3q/YZqPRRBUDBEEyU7/YFpK4mSv/OxD3z9y/FxdQ9Wfrr/KMMNcMEdsskyxoSvl0KqfTDKSUfKm15TB4Y51aGRGZhmNDJwJJqudKMWQm3xgIPjDf3puaGj41z/5h9tX31iQ7SyqsNki0Q2zHrtUQjbDHKl7JSebIgiClAozbzqLPEnIDFuogudgmcnowI1al+YFLmOfkx+6KaY8rUopNvuOXth79JxGyP5jF4g0JAIBnUDvwKgMVRZECLAKIlsdaSkOinlz65obGxbMq+MCVl7aAkBWXdYiABbOb3A/mpiJYujGUYhyIZLiPETFEEGQaTX/7uMRpvuCHbsPf7n9n2qqfe+/95IV/tpwNBRrg2Vssq4R0FSmXpUlmGhJGTdydZXM2ecXc6s8MR7Zchpyk8uAjLjBkPHnXtjz/PO7ll+06Jc//sHSxQsLONgoqkxzBR+6ZNFgaA9ySkUvy50RAxMaIgiC5I7M/HclYS2cSVNSck1PZsOB6aqKM399xoph+hefHxgDgHMDoZbGWnvlgsa6zEYBGSuGbgKWkz8f5UIkJRruAgRBnI2flXljkvXJDbAbuCT9s5zzG1Ze8skHWjmHrS+fGQsxXdPls7LghnW/jnPrXyVnCffbkB43+2Ra75rW3s7WB2a5wxfb3Uo7jE2cCcY5l6kMOYsdgH37T7/8yiFCyOOPfqOAcmER3n7v6OhQR5MQkv+t6ujoaG9vz+5nqmFYV1dX8Q84cyfUFrnLA0EQpKRRolhra+u0Lsvqsl9CWSNUI+KyPVV6aNbb9JySXFg5zQsSUuy5r5isxZlsjeYg4WHK12SAruvJK1vmN7TMb7jusma1oCaX783gNclMWWDaucMTDgpehZDUf7PoMUQQZFwkmnxNxnEB6VcmP3vHh/97Q33tTdcvuO2WplB4DDQRa/V0ounx5i9uMJx53eTpto4uX5+tRrdQjbcKpAAhOJPLKn0h5dw2GFI2Nhb92S9e23+ge81tN/7qRz8oHrWo2MYM+fcbZt1g6By3FLPNIQ+KHiY0RBAEyUOjOeVlVjVJJVqTyn02lXKtu+Xmt083pGlaQ4+M46Vm+PqZjybSuw7TPJzyjQgyGTreLUeQCieDrMPO9fbDBK9cSgOdG7tfU+Os13YeHh1jSxbV19bolFEBCaIgIaC2QBAgYgbCShZfnB8xMQ+tuzQUxv7jnEhDJwAHxgRjnJqCUcYoF0xsf/XwK68e9Oj6r378g9mz6ovhTPb7/a2trYFAwO/3F8/flz3mUb6JXCtNHR0dAUkudu/mzZv9kuIcZObBAOj3+wOBQGdn54YNG7DtQBAEyVGj2dbWNlmL2dXV1dbWtnXr1vb29hK9FKtmdMqmpKOjw+/3l+VQPb0JMeE1Lt2ImT015bPuXzNz0hgDUz6V5vXJW16QMQVSJn+w6DFEkEomfZWxZMXQzULKh9O6+/dg+z+fePeCf1ndXYGFQhiMM1X/hOhE1wkQO6GhNB5maqTPlsY3ZY9n5tuQt2ZeHnWZwRBAGgyF4MAppyanJlVVkjkVff0j//bjbed6B37w2Dc/9uF1RSUbFW26H9Xpz7XfMEcGQ3ucVpw2wzwfd0xoiCAIkmtS2utKpcKJyx+YJjBC/dKKHafPvJpiBi8rzr2dpsOfwVOoEiKZnISoGCIINsYp9cE0K5Mfpg9hnq7//619xx/+m59U+by33rTgupWzw+EQJzxeNDk2jd9a09S/GZrts/Ka7LblGTfnM+8EWHKhUAZDAJU8koKtFVKDM8bHgtFfPfHGO0fO3HzDtd//7n9furilqE7sIk8259ywrG9kHn57EYZ+F2RYVa6RYgiCIEV1ebdvz5RchZMpSX8TjhCCGTDcjCNy8fZikEeyNYpBlRDJwtmIiiGCVHKjmxBfnFIudKMnZlcxBIBnX9r9wx/9vr7O27p24cIFeiQSIiqboSUaalZCQ5Cioa4ch1kr+OVef8zAQjjdj8pRssVUxyJJLmSgiiMzQ3kMuWD8xa0Hnn9xDxDyxgs/LTa5UFH8ao7tksiuwJdTg6GbEU5BRlwFSSyICQ0RBEHy1lxu2LBh06ZN5XfJnay7goW2MhvUFPZDckcGHXtUCZFsnoGoGCJIxTauCdpfQrZB9+q59uBiAACAAElEQVRh+mWXjXHyC772nR+d7umf3eB5/71LdM0wzCixPYZWFRRN1638HJquEQAZq5yPttmNOjktBXCGK2feY7DlQsFl7kK5wChzxCNzTvm+/e8+9budwyOh//3Y1z92/z3FeYaXipqTXd0wbwOMorIZEkIKFZ5W/KVgEARBSh2VtbC7u7s8IpFTtmIJ3RWUC3Mx5CljUBxEcn6OoWKIIJXZfCYLhZzzZOkwudSJG8UwK2XI3t7f/c3/9fNZ9dVXXT77jvc0jY6OcEGJRnRdIzrRNQLaeEJDSzScQenkbClxzqcy9irmQkBM80pLIlTVZHhsAhDMFEz6ClVIMmN8ZCT0rz/afur0hTV33Pz4dx5auri5mMcYLgsRFpxsZWXKg8HQ3uAiyeJXcDMpJjREEATJ6UXeDkMu11wQCe1IhacvLOzIqGhBTRApLBruAgSpwEYxuXIx51zNbRhj9nyyNWmwPzbl+mSSX3ntlcu++Mm7owY73j327plIdXUNCMJl0V7OOGVcyI1hTL1Z/oTYf5ngso6zmxc7f2PKHZLBt9vHLmFNgsKbWX9owloee0wA5H5mVO7n2D9MhELRZ57bc7an3+vzFblcqAostre3q8FG8Y8W1KEhhGQ8Furo6FBexfxscJek4Put4KNHdIIgCILk6OpKCFm7dq3tv1OaWkk06zNpR+zIAySfZLHecS4+Hw8QUlh07OkiSOWQLBemkfAmE7bsj3JjKpzhXbsVFzVHTXbw6NmRYXP2rKr6Ot0wDYirXEKAppF4WywtcsphmIPWNYsZiFOqfhl8Tpq3uN9aVe1ETiS2+wQhQDgXnHJGBaOMmYxRwQV/552erdsOcEG+/93/8Z6bVxX/2R4IBDZv3tzd3V0SaY8CgYBS4tRwaLrb3NrammcJb/PmzRs2bCjgEGvr1q2bNm0q+IHz+/1tbW2BQMDv92MTgyAIMkNUGPLJkyc3btyY0MoEAoHu7u7W1tYyu+SqdqRDojoDeBqULij5IeUHKoYIUolMaY6bzASXrBWm97jN3Oe/6oqlb+0/2XshODxqXLxslq4Bp1SpW1YLTGRErfw3tkDUium17lnpIszk81PKspO9Pb0+6F49tCRWSy60pEPOBaOCUiZrnnBGhQBx+tTAb59+ayQYfejBT3/6E/eXynm+YcOGzs5Ov6QkNjgz3VCNMfIpjAYCgQLKZCpo68SJE0Uy0gsEAioxP7YsCIIgM7y8t7W1bdy4saOjI2X7olq6tra2DG6tFS3ql7a1tQ0PDxfDnTAEQRAnqBgiSKWQMrjVKRSmD5hN+JCUDzPbnilpnFP//CsHNNCBaAtb6gAEpxSUVij/IfH8CoTYMliu0n5kq3Jx+s9xLyCmSZuY/uFEd6Hcm3KZUSYnzkzBZOx3KBT9xa9fP9sztOa2W/78C5+Y3VBXWh3xkvMjTFc3zL/BUFEQm6Gyn2zZsqV4Dqjf7+/u7kbREEEQZIbX9pMnT544cSL95V3dISuhGAKXfP/739+4cSPa1REEKTaw8gmCVATJcqHSB2UWwPGMewlVUDKQCHN0Sfn9Kwc3/qyrukq/6vLZ162cZRohSg2iE6LL4ieqgDLRCAFZQzm2XoUB5CcWICsyopuV6qG9MjP1kBCSJBcCcCI4UEoZFVQVPDEoZ2JgIPiz/3jt3PnhxQuX/OSf/3rpouaSO/lLujzFlHVRCpVKr1C1ZQpYHDk95ZqVH0EQJA/XT7vCSYU07sl7QN18wlJaCIIUG+gxRJCKI7neiFNDTAhAnuwT8rzNK5Y2zZlVv3PfqXCYEdAuWjLLpJQzNlE+s2yHVk5DAUQjxVBiLOMciJO9MeUBcmNmHF8WSlFVkwag5ELGTC4jkTkzmeAiFI7+v5+9ery7T/dU/+5nf1eKcqGd+airq6sUzQhT+g0LZTD0+/1bt27Ns8WjtbV1/fr1xdlvwYSGCIIg06Wjo0Nd2Ddt2jTdi2fZpDVUIdiPP/54+RknEQQpA9BjiCAVgVNgcpZFVh5Dp98wfRhyYXnxtYOb/mO7TyfXr5pz1eX14XCQcUo0okuzIdGU0zCGphFQkphmLRbXlTcjDXFKR2FK72GqZaIUQxAERGwl50JphdSkzGTUZJzy4WD4J//+6tmeQSY8rzz9L0VeHHlKysACluw3LGyt3jzbDIu/MHFXV1dnZyc6RBAEQVxeMDOwFk7ZMpZcy+50ShJC7PLQCIIgxQB6DBGk4khIUDhZYZMivJ2wfElT1BBHTvaOjXGvR1/YXGuaphBcwHi1ExJfGhfKEuS0omTKrXNfPmWqY0dAEE2lfoxXRlYFkZlDLjRM9szze/fvf5dovp9vevTqyy8u9dO+DCxg9hDC9hsWymBo79K82QyLpzhy+h2CCQ0RBEHcXNLb2toysxYmt4wlndawtbXVmb4QS2khCFJ0o1T0GCJIJeAUkpy+Qkqp03KYshpysfH9f9uyZ383Ieye1kWNcyEcDgrgymkIGtFsp6Fcjl3mNKKchkUuGiZendNurMsMhsRK5wjjBWKEvSeIBhpngpp27kLGKOeUhyLGL375Rvep/qhJHv/OVx94f2t5/BUUKvVeLsZaANDZ2RkIBArraMvPLi2tZFWY0BBBECRNq6GshVm/pJdiWsOU7QU2IgiCFNeYFBVDBKkEEqKSbZWQMeZ8mFAluWj5+x917TrQXVujXX5J7VWX142NBTln8UIo1kTsEihKNLQqo5SWbOi4WLuruTzZQyUXEtDJ+DIRDKg0FTLKZPpCxpkwDPrUM7ve3HHC66v53rf/4sP3rS3RPVY2I4o0p0R7e/vWrVsLO7RobW2dYUzZlMPL1tbWEorSKrkNRhAEyVt7MfMw5PRNvEoNURKX38lSbWAjgiBIUYFRyQhScaSPSoZS0NRuWeXfuf/dwZFQ/0C0usozb06NAMYZB1vnJGDVQRHj8clClEaEshumkQmRECKs8iYEQMUjE6FLdyGX1kKqFjgT/QPBJ5/effRoLxDf977z1Q/ft3Za31j8lHTsUsJIQxVFUWGwaoxRkB/l9/tzGkK1fPny9vb2EgrR8vv9GFaGIAjipKura/ny5VkJQ07fxANAW1tbykJhxdaId3Z2pswrovbP5s2bsRFBEKQoRp3oMUSQSsBZycRZHFl5DBOKJisfYvJ7i66/9YPf9pzrr/KJKy6pW3X17GBwNGpGNWUz1DU7TlkF5lphyVrcb1guElj6OstyHtsfSjsE0Igsi8wo41RIa2HsFKAmFRxCYeOXT+48eKjHV13///7xkdU3XOkMaoYy2mO5tsXl56A7/zDt1O8FqQ2Su/1ZusFZ5eRmRRAEyZhsVTiZ7jcWeduhogTSbGEZdFQQBCkPNNwFCFIJpMxtFy8rrKm5jb3S+bIiVIs6vvhH712zMmJoR05Ej52Iejy11VXVnAJnglEuGGeUc8YFF4LHVkqVVAgWm7gAUS43TFIGkgt1hRcqFDu+DEQwQQ1qRplpmEbUMA1qREzOxIW+0aef23uiu89XM+vH/6fjluuvKIn49Mxob28vbM2QmZ75HR1KHHSuUUeqs7Ozo6Mjzz+tvb1dDQiz/jNLN5dT8Vd2RhAEycOVUN34yWeMrZ3hVwVBF20jnr6ByFHDiiAIMl0wKhlBKosEa5JaTp8jz5ELr+h0w6suWRg2+MFjF4ZHTEbJCv9czplpUhAglGqm6iaP104mQECk/J3ldIxBVnsBmbvRikcmRBBOBTWsxIWmQU2DMcqFED29w7/+za7DR3t1X8OP/v5bt1x/RcpzoGz2U6mH/Ewmd6o45a6urjzHZPn9/s2bN/slWRxQlbriVgbluREEQTJDtUQnT548ceJEQYxygUCgu7u7tbW12C7CLls3VXy/UPlGEARBxgeDGJWMIBWCMzDZxhmM7Kx8oqKSU5ZOnmy5gGx5/ci/PvHG3AZPXTW/cdXsWbNFMBgE4ETXrCooOtE1AroUzjQYL5ysyqFYSQ9LXw8jKl8hWCVe1HLs1xLghJqMUqayFnIZj6wO+N79Z7e89E5wzCC++qd//N2li5psh6ntNi0/0bB0NSmXm23HKefnB6oosGwF4ZZNSG92dwuCIEhJkOsKJ9NqTfLZFLppFFpbW913ngkhWAIFQZDCglHJCFLpkOmQMsC54CpS6+pL/2LDH/QN05Exbefe0Z7zoqF+tkf3cso55YxxQTllseXxCGVuBynz8SDl0r2FQgjRdF3ViJYSqSalUJ1ohGucCjNKqaEmRg1qGpQxPjIafvWN41teOjwSpEv9F//2376zuGVectVsZ6HtcrrJpGSpklMMVdyxm19nO4jz8BvVeCYr0dB2scsyOMcCgcDatWuLNjIOQRAku3R1dRFC8hyGnL5B2bJlS/G09So7ofvXb9myBWOTEQQp8CgTPYYIUjkkSD8JJHgMk1+cUjZKvoYU6qqy53DPxl++zqhR5ePLl1ZffXm9aYRCkTABAaocStxsSHQtLneqQii29qlil4lVark0ruLSCDhuMLR+k7ohJGRKRxWGTCnjjFOTM8o450PD4WdfOHD8RD/o1fe/L/DZP7l3ccu8hNSWzoyWKSXjMhjbtLa2ltAN/Mx8kfkxWWTFT1dyR8TlELF0EzIiCIK4bwKKxFqYrdazGLYBWxAEQQoL5jFEkIpjMt0nWRhy+geTQ1PTvDLho/JD87yG669c8k53/+BIdHDIjERFU1N9XY2XUsaoymxIiMpvKGxRUNUQtlMbEsc/Ra8bxn2FMrJa10AjoMmHOpG/iFNBTaoSF1KTMYOZJmWUmyY71t339LP7T7077K1u6Hjo05/5+N0NdTUpD3TKo1lOCQ0DgUBnZ2epJDTMrGCLnd9Qmd1yNJbLSjbD5cuXt7e3l2h+yTR7BhMaIghSxnR0dLS1ta1fv37Tpk3FeaELBAKqESzUpViFNWzatAlbEARBSgtUDBGkgkiwiSWXNEmpGKZ8V0rRMGWccj6lw/pa3503XRyO8IMnBkyTnHp3rMrnm9dYrWmEUgpcKLkQhBINre2z1EFbHhQknvHRekVxSYd2ALJma4VSIyQa0XQizZScCmYyqRVSajJrWZaNpiZ7dUf3y68eHxg2/MtXbH78q9dffbFyhqY/HxJOmLL5o1DJxUtCNOzo6AhIMh4v5Vo39Pv9M9mTra2t69evL79uSckJ0wiCIC6xK5xs3LixyC9xqtXLc2Uw517auHFjBqpfqddqQxCk5AUEjEpGkErDWQIlZZAyADijkhOS2aUMT04frZzyOpPTi8+ew+c2/moHZ7Rxtmdeo7jkouqaGjY2GjQZVTqbrhNlztM0TdPjsphGHL5DKRSSuDpWcNFwQvSxta2a2kS5rKohMxmGzGUMMqOcUspMThkXggsuDh+9sHP3qWPH+3Rf7QfuXdv20T9cuGBugv6rpaKMA5MVJRH1k1DofCbkLk454zTtZVAcecofWB7lXBAEQeyms2jDkCdDRU/nucWfeR9DJUDEEigIguQfrHyCIBXHZIHGTknIqRM5ZSPnQkIh3TSBqykFppyqTqsua/nef7vvzpsu7Rkwu0+LN3aFDh015zXNr6+vJ6AJyinljApOpb5mCs445wKkTCpNiMSyInLLkygn5UMkeT1UmkY0PYYVgOyRaqeux46Dh2i6pslloXMKZpSZUdOMUkPWNjGjsTmlTHAeDhuvv3Xqyaf3d58ara6f/6frH/gfX3igpWnOZGKuswxMJdxYUpnRs1K4I0d0dHRMK136lJ+Wo7oo7e3tGaRpL9EqNNP9jeUtiSIIUjkUW4UT9wQCAXXnJm9X446OjpkLlO3t7VhEC0GQwkgH6DFEkMokWQ9KaSRMcBqmtBkmuw5TfnjyV6fZqmyx72jvxifeAsYEN6+8tGZRi69xNgSDY+FwiIBQJYVVURRNlRdWBVI0Eo9XHjcbJmqcJD6fGMM8s0uy0l81S5hU3wpxGdcyPWpWpZbYA01wwRlnsiS0nDNuLTPOlFNUvLXr3W2vHBsLUU91/c3Xr3ro8+9vaZqT8LVObF1YV8rkRJthWXoMi7/mRhYNhgkjGVV/OYt+w+naDJXhsRJ6I2VZ1wVBkIqiyCucTKv5U9W6cvorstjAoVEdQZCCgHkMEaRCSZm0LmWKw5QVMNyQ8sPTZDzMRdWUBY3199x2aShC3zk1NBKEM2cjI6NsyeLZ9XVVnAlTJTe0MhtKe6E1s5IXxgujABFkPFrZ4TQkEzcZ4hJfqinFU8TKP6gRotmpCZVyKYOl4ytj/+ua/TKiE9CAAaOcmVZhE2pSZi9QLrjgIIZHI88+/85be3pM7lnmv+Sf/+ZLD9y7ur6uOvlkcNpLndLhZDkuy+8vophTBc0wg2EanB+bxfyG7nejyu60ZcuWSsjpjumoEAQpaYq/wsm0mr88pDVUd4mysq8CgcDMy4shCIJMWzRAjyGCVDIprX+TeQan6zHMIOPhZGum3PgpuTAw9kTXO7sOn63zkmg0fMWK2kUtnsY5JBgMGqZBBBBdCXdKoLN1vNjDuC4I4JQxHdKZ/Ec4fYZkgv0Q1FPxjIjjRZknlF+x0iiqf0jc3AjjiiMhRADngjMrjJoxxplgNPYvVwZDbh2OM2eH3tx95tjxfpMST9Wstv901913rmqZPztFMzBRE0yIQNd13SkdlrFiaA+HijB0NEcGw5S/PXk5sw12adwghOQinWKRn2PoE0EQpLQoG2thyt+Vo7SGWe9RqK3F5gNBkLwOQ1AxRBAkpYSXHK2cXh/knDs/IeXbk59K/vaMNUQ3r9l3tPdffv02E7xKJ8AjC5v1W2+Yw8xIKBQ2DEMIPu75syx9RIpncrWuWdVGiMN8aD+SD6Q5UdjPJV1xx4syk4krpbuPqP8dKiEA0dSKuErIHQux5dhMBSDHtcKR0ci2V7tPnh4cCZp6Vd2N117zl5+7t2X+nJRijb3gNBgqldCOSq4oxbAIq6DkWcS0i6LM5Etd7sOSKDhTCecYgiDIlO1CGWdUyEU7m6O2G5sPBEHyDCqGCIJYpK90PKVhMI166HxL8lNT5j2cbE3GbsSf//7gG3vPMMG8hIdCwasvq1vhr26oI6FQOBIOM8YAQBVQjpdRnmA5jIcOj7sACRmXA+NewskqLBNbFHSusIQ420sIIHjs+iykDmgJhUollIkLHQ+lVMgFIXC2Z/TtvT2Hj/UZlOi+uutXXv7Q597XLH2F6YvPJASDO32Ftnqo1iS8sSwptmRz+TEYphwfZqwbqn2YfrMruRIIJjREEKSELlaV4ATPriqa0/y8000WjCAIMhMwjyGCIONdkGQJKTlq1WV2wmRXWppEh5N9xWQr039ssp6V8OzVK5ruvm1FMGSe7Q9rum84COfOG0LoBLyLFs7WdV0IwagS54Qt2wkOIrYGeGxBCAaCqZXSNijGExMS0OJxzOPLmvWsDtK4aJdZ0eJ5CYnQQIBgwJhgJmeUMVNOlFHKqcmpSeWCTFYoI5FjXw+x7TlyrP+1nWfe3nvu3AUDPPU33LDqfz38iQfW3VxfV53ysCYcoMnKYVegwVAlmwsEAp2dncWQbC53GQzTEwgEOjo67DC06W6A3+/funVrd3f3ZG9UkbmbNm2qzIstJjREEKTIUUlmt27dunHjxkq4UmU3raGSWXPXdmPzgSBI/iQC9BgiCJJMmjDh9PbDKR+m8RgmxC+nfE2alWm2P836Xzx/6NXd7xqUVnsIZ0a1jy1u8cxv1Jcs9IVD4WjUYIwJzlWN4ri6RpTcZ8mCKmbZCmG2IpfVBdaW2DTNGYhsV02J5zmUu4Sruis8LlFy22AoLJ8hE5afUAY/h8Lmu2dH9h44f+78WChMNd2nVdV/8v4199y5csG8hpSyqfOh0zCYUPMkeaGiFENFkSSbK4jBMHlXZFBMOY3NsHKKI5fEOYYgCJKVy34ZkBVPZR4c9BibjCBI/gYj2GVHECQlbkS3lOKgm+Bllw9TfmzKzXOf+jDl+r7B0La3T/12+9GGWq9OOOHRhc2epnn6xcuqQTAjakSjUdOkSq2zMxta1kEgYCmFhOia7eizBTbQIF4seeL1F4iwt8gq1gx83NIYm6SMqMo4i9jL5WcaBn/jrTO9faGBwehw0ATdp3lqP/mhO+6545oF8+oh/u2JX5dU/9p+mECacsmVoxgWQ4+8qOJ2MxhAptyBGJCLoz4EQYqWcq1wkrfGNz+3gtyk/kAQBMkKqBgiCDI1bvS4KSW/LCqG6T2P6Tc1/UXv588ffHX3mahhVnmI4CZj0aa5ekMDWbjAV+OD2hoCQI2oKdMHchAClGaoqQhkosnqJdI9KKzEhlq8sAkhMFE0VK9SzwiuLJaW0VJlg1TlVgiQsbBpROnpM8Hh0ejxk0O9fRFN00D3gV517ZX+u2+/+trLly6YVzfhwx26XoJWmCau3Jmv0KkYVqBcaO+oAmpbxWAwTB4LudcNU1Z1rMDiyFOO+lA/RRCkeK7weEXKeFfkU8hDlzqCIPkB8xgiCDI1yckBk5MGZisFoRuSo2WnTJLocs3VK5rW3bbCv7ixvq66pz8cpbpheoeDZGhEnD1nDAzR2toaj8c7b159lc/n8Xp1XSMakSZBwhlnTAhVk4QLNQmrtHG8XAnjwlpQE2exiTFuSYaaVAm9Xo/X5+VMGxgyDh4ZOHh48K29fe8cHzrTG42YPt3XcO01V6z/SOC//nHrh9973YplTfW1vpR7JiG+2PnQuUZzkJzWMKXmWCEEAoHW1tZAIKASz+V5uFKQDIZT7hCV37C1tXXKZE9+v3/z5s1+iVrT2tq6fv167Hg4dxFmpEIQpOCorIV+v7+rqyv/7V1xtv4ZpDVsa2tbv359fhruQCCQ0MIiCILkRAdAjyGCIBmQgZvPTWXkaWUtnJblMLPiy3uP9R3u7jt4ou/46cGaKk0HzrlpGlFd4ysuqtV0sWC+r9pHqqsIAV5VpRMQsoCxgAluQcs+Oa5SSt+h9CNqSiI0osxkLDhmMA5DQ8bwqHH6zOiFgUg4zKQZURNEB+K95vJld6+5auVli5rj0cfJKl4ad2Ga+ZS1aCpQLlQU6jZ+ERoMk/fMlH5Dp82wkosjF+c5hiAIgmHI6XeO+7SG+W/jUhr5kVxztnew58LQmd4hAuTNvScA9LPnBwHgTO9gPJaInLkwCEIsmj8bBF/cMlcIvqh5bmy5uRFALFow96Zrl4OAxS2NuD+R4gcVQwRBZor7jIFTynmZiYBZUQyn/BX//uwBr0ff/nb3cDDi1QkhggAXjAGw+lrw6KKuVvPq0FCvcy5qarSqKl0lIqyr1X0eTcSLm/QPRlQuwtExk8jMh32D0WiERqJsZCQaNnkkKghoAggXmgDPqsuXrLxy8arLFi+YX9/cWJ8+R+FMFMNk6bCS3YWFHQaUkLg2pW6ohltdXV0oF6YBExoiCFKEF3DEZYtcqBs/2HbkASkRjjz5wq6z50fOXgie6RsBogHYU3L3mDuSlKuZTGQkH8gMRFRwCsIU3Lxl5fKbV61Y3DznplWXAIjFLfNwhyPFBiqGCIJkhzQXkyndfC5Lmky3BMrMPYaTvWz/sb69R88NDEf2Hzvv08nASFjThCYYIeN1jrmQAcixh4wzrqogyw+0uhGyH0GEIEw+4By4AE60lZcuap4355rLWq69YpGSCBMv3JObCifLWuhccKkeolxYwE558RsM3Q877WA39EGkARMaIgiS52sOWgun28aluUQXMOtxYRMulys79508e374yRd2S4lwlBAPEC8QnWjEV1Ote/Tqhlrdo81btsB+y9xF81VQUf3c2b5qL6V0uHdQVTZkseUhtTxyftBX4xvq7ecmG+q9ICVEJgQFQQUzb1m57OZVK+6/+2aUDpHiARVDBEGyz5QXlgwcf1PGJk/50I2p0H3N5YTXKA1RI3BhIEwI9A6MEYDegVEConcgSKwoZflSLhY01stKyLBgXt2CxoamxtoFjQ3N0kIoAFrm1adR6DI2GKbUB6d8iHKhPbjKm6BTotG7aoOTdUNMpe9+B2JsMoIg+bna4GU5s52W8sZYYY1+GJucRZ56YfeTL+452x8+2zdGiA7Eo+oWzl3UNP+i5trZdbVza2tm1flqqurm1FfVVBGPljgisJe4/VgkzI1QNBwci4yMhUfGggNDF06eC48EL5zsAZkIXQgTuCFo5IufXAfcvH/drYtb5uOhQQoIKoYIguScDATEDFTFmb8+Y8UwsxenuCK70OZmrhg6l1NWQEa5sLCd8pIzGCaMqZy6oVJav/e97z3xxBM4nnG59zC+DEGQnDZkAIAX5Mz2XnJaw2K4bmNs8gx56sXdT76ghMIQ0XxAtNpZddUNdbNb5l5y61WNixo9NV7ZP5P6oQYasZY1Obd7yiQ+FwCCxxVCbkUVxXp2cplxAFnwELh6HZe+At57/Ezv0dPvHuwOjwSjoXA0NCa4ATy6aH79h+9Zff89qxcvROkQKQCoGCIIkldcWvZcPuUyhaIbHdB9NsaZ/8DUl+OM5EL3el96MXHKYGckPy6w8tCM1PZv3bq1u7tbFUdWoyz0s+DAD0GQgoBhyLloppXxsOCjacxrkRlPvbhHCYU9/WNAqoCQxkVNjUsXNK1ont08t25ufXVDDWhSGdRik+4BXQNdtx5qcj2JS4RW5zm+wAVwpjIRWZOUBYFzYBQYl5phbC4EF5xL9ZAzwYURigQHhoMDw2cPneh793zfybOCMyEMTsMP3H3DoqbZX/zUB/HYIfkEFUMEQYoCl9cil8bAbL04MwUwg7e4lOfcKIYZJDrMYEsqllwLOiVtMExg+fLl3d3d7e3tABAIBDBsCgd+CIIUBKxwkov9uWXLFveVlPOwSZjXwj3/+NNtT3Yd6OkPA6mqqq321tQsXem/+JYrZjXN0rwaKH1Ql+KgDh4PeHQ5EdDllFD0xDIVTvwKLtdwJR0KgHj+cg5AqRQNmcxgbumGsTmlgjGZ1VBQ6zkuggMjR3fsP7X3aP+Zc4KZnEUWNdU+cM8tH1p3G1oOkfyAiiGCIMVFFqOAp1uMZVrbkLeL52QSnvv1btakX48k7KUcCTrlFJRq/xZ7pLp58+aNGzeiEIYDPwRB8gZaC3N3oe7s7AwEAsVzrUYv/5ScPT/01It7n+w61NMfBc3jrapqXrHk4psvmbt4fl1jveYjRAOPDl4f+LxSKCTxCUCH2FyTCzpY1kLiMBWKibohT5pEXEOkAIwDE0AnioaUWmIiZzKjIeOCc2AMBDdCkXNHT585ePz42+9EQ2OcRTiLPLjh3kULZn/4fWvwyCK5HfigYoggSEmQ9XyC+ZcFhRBZkeSmVRdlyqdQJcxgAJYjF1jZGAyTNS81uPL7/SdOnMBTyM0OxISGRTfU7B3suTB0pneIAHlzb/fZ80NqqHimd9A2mpy5MAhCLJo/GwRb3NIoBF/UPAeEWNzcCMAXLZh707UXy2UEydOVBCuc5G7fnjx5ctOmTcXj3MQSKGnYua/7yRf37Th4rqc/UlVbP6ulcfn1lyy/+TJvrQcIEGkh9HrB5wOfDj4NfAS8UiK0J1sr9ExUCXm8xkmCPmjD4zKiWmDxdzEpHVIe9yHGFUPLgchlUDPjnAtGmWAs9pgzYyx8ct+RniOnTu07GhkLchZZMr/6w+tWf+kzH8GjjOQIVAwRBClVZnj5ymdJk6xdsqdS99zLfygUznCokHUXWNmIRGnyOqmzDiPj3IAJDQvO2d7BnftO7th38s19pwjxnOkbiQ0rgUwMR4OJA0PVWKgs9yBUlnuV8l4IEFRwCsK8+Rr/ogWzFzfPWdw856ZVlwCIxS3zcIcjWQQrnOR697a2tqpmrqjabrzblMzOfd3f+rvfnO2PEr0GNH3hpcuuWHP1nMXz6ubXEQ08XvApoVADrxQKfTA+eeIqoS4v+hBX+pwTj4uAwiEgKsVQNRK28VAtkImNB3O8hXJgttOQAaMg5BpOgVHGGWdWMHNsOrXv2NE39x/f845gpmBjt1xz0aMPf3rJwiY84kj2h5+oGCIIUmZk5bJW5NfGDPQ+lAiLvF9eHgbD9AZMpbSuXbsWM2rNcE8iORtbntyx7+SOvSfPXgie6Q8SogN4QdMJIY0+mOsRczyspRqavGo8KKp1bYFPjRah0UdqdTA49Easv+QgE31Ra6jYF9X6DTgfFaMm7w1zKSEyISgIKph5y8plN69acf/dN6N0iMyc1tZWDEPOdR/M2YSppq1IdnjuEqeUIu/77N+dHTA1vbZ+3tyGplkr33tj04pmIkuXeKugygfVXqhKEgq98QWIuwItP6AKKE5SDHn8lcKRvtA6HHF90HYaEscdp4Q+H1EfqAKWqYxZlvkMGVOiITAmdUNKhXIkCjZwunfvSztO7j0SCY5yOvbghns//L47lixC3RDJ6lUFFUMEQSqKSrjooTiYt1FZtlxgZeMLSBhHpRnMYCZ+N2cFJjTMD3bFzLN9QUI8QHSIXUXJlQ3iklrW7OONPpjrFQ06LKhi9Tp4yMRy/PGmhUy0kyS4S0IUBigZjIqeCAyacGhEDBni4AgHmbFKCBO4IWjki59cB9y8f92ti1swqz0y7YsGXlfzsJOT2+vi2fMYm6z4x59sfXLroZ5+s6qufvFVF1+65op5i5u0KqLp4PNBdRVUe6Fag2qITQlyIYmLg+ZEoTBZLmQAgserIUO8GrIMMY41IVIvJPHaKOoFqvIykSWY7aZCgFV8eUKpZakVKk+hshxyy3vIpXQY+0fJiqf2HHnnjT2j/QN9p84wOvbgp/7ogfetWbJoAf61Itnp26NiiCAIkkz5eQyRXPTLs+UCKw+DoRsJNUEFw/FtBkNTJFs89eKeH/5kK+jVPX1joPmAaPN8ZK5XrKjjH1pgLqzitTq3TCKEaABE04iax7H/gK25EFxIVByyHEcKIXhsVMkZ587893a48v4RsnuI7xzgA1HeG6KCMyEM4NFbrvHfcu2K++9ZjQUxETftEVY4yc81ebIbOapLUAzNWYUntdi5r/ubf/dbaS2salzacssDd9TPq6+eXaN7oEr5Cj1QrUENWHJhNUBV3FHIpEpoT0ayXChk0RJm5RnkQmYbjOuGdkHk2IU+3iwoxVAtCLVGioaaZkmKSkzUpYyoayl+EefSYCiTG8q6KOohY5RyxgS1pMSeo6feevblvnfPhkeHuRn8yqc/8OBnP4p/s0gWRp2oGCIIguSUNAndkPIYpM3wZn55qELuf0WyPIq6IY798okUCl8CT1VPXwiIDwhZUSeubhCrGmiLTyytZnW6UBKhpmm6pukej65pRNdB04gc5xE51CNSIhTx09q64MvKl8LynIyrh5xJOFd+wvG5CmoTYozCuYjojcKOAb53kL47xpV0yGn4gbtvWNQ0+4uf+iAeO2Syyy9WOMlPiz/lbcJiaNArOanFZ76+eceh85qnrnZWw9V33bj8pkt89T7dA9XVUFUF1TrUaONCYTVAjTQV0iSh0F5WxUkos9Q6ay5Nf3bSWmUtTNvviZsHieU9dDoNlZhI5ENLSRzvL40vCNtyGM9yyDlnVHBGOaVCFkyJjo31HDm5f9vOM4dPMDq2euWS//mtL6DZEJkhqBgiCIIgyIyGajMMHS0Dg+G0dsJkEhjqhjj2yzX/+NNtT3Yd7OkPg+ar95B6D3nffOPWuWJpNfUSoRGiE6LpFprHQ9Sk66AmTRt3hqiRnEgaKXJpNVGjSc7HX8MYZ0w4ZENbNzSlT4RKddEuo3k+Ar87Q98aZMdHTMFMzsKLmmofuGf1/etuW4KWQ8RxfcAKJ3nD5f2bYkhrWIFJLX74k66nug719Jt6VZX/hsuvXLtqVvMcvYpUVUF1DdR6oGair7BG1jMx4hKhMVE0jE0MDArUjNchYQ47YXIOwkz6fo6wZQAtVQtjK4zjdkVwRCtTuygK45RyrnRDNto/9NbTL505cmL4Qp8whh/8zAe/8qcfx79fJPNzFRVDBEEQBMnDKGKybn2pGwzTFEeebIhrV5mc7NNQN6zwsV922bmv+8kX9+442NvTFwHNU+3Vr53F7ptvLKsWLVXcJ7VCj8fj9fk0r5d4vcTjAXvStNhcyYVqbo/eFHE7ofVQegwnTHHFMDZRaQtReevjoiFVSCWRqUn+eXCAIBVvDYjdA3RbrxE0KGcRzsIPbnjfogWzP/y+NXhksenBMOR8XofdN9bF0JCpEOkKOTc+8/VNO97p1zw1TRctvvT2q5fffAnRY1fu6hqorYJaDWoJ1MK4YqgyFUalUBhNUAwZmBRMU04GMA4ifusnt6IMGZ90HUg8PNmyH5L4MpEPwUpuyNVEuaqkzCnjVAUwm+dPvHvg5Z0H39gjzNEH1t3ylc99dMmiZvxDRjI5OVExRBAEQZAZd/UyrE5Y6gbDzBxw6TXWLgnqhhkPVhGbnfu6v/V3T53pNzS9psGnL6uB+5rM1nmmjwgCoGuarus+n4/EJ/B6Y1OCYqgMhh7PBJVQqYHJEqH9goRJuQ6VbhiXDoFzIb2HzBYNpQVRTZxzKnMgMoCgKV7rM3cPmm/0msGowVl4yfyaR7/xqVuuvxKPcmVeEPAKmf8dPq3GuuBpDdPfnCsbzp4f/Mw3fnx2gFY3NFx6+6pL33Nl7ew6TzWproaaGqjVLa1QTVVSdDMdQuH4PJVQWKidp2kT4pcTJ23ceDiuG1JQTkOpGyrRkPJI5O0XXjv0+tvDF84vmV/9wB/d9pXPfwL/nJFpD1VQMUQQBEGQrHTN/3/23gQ+jurK9z/31tKLdsu2JEu222xmMftiMAZ3hwwQmIQ1gWTmxVaSNzNv8jKBzGRmkkli6T9vJiHLJEMgkJlkbGcIYUnClkCSIZEUMBhswGADXsCWLEvyJku2JfVWde//0/dWl0qrtfRSXX2+FO3WYrl1q7pOnV/9zjnTFc48IAOddDjyjDMZ+TMxK7bBhobT4rXtHV+791fdvXGiBIAqK6vZrfMTdX5WqzFKqaYomlModG5SNHSaCgHSNWBsWPKTH0o10KkYypTO7oFvq4d0ZEN7+dflZhiQSvNM6TZkwmMoq5dNU3S3Z8yU7Q4BXjliPLMv9uaRODcNbgxedu6ie77ymYa6ebjHiyfW4ISTvES6md0UzO952/O3mp7+/dYHH9vU3ZsMVpRftfraeYtrqI/6/OD3Q1AX1kIY3qhDH4wL3dD60IB4AhLx/AuF4x15YoDyKLlwZKkycGs0StpsaIjOhklupiILi8cO7+vZ8psXOt7dzZIn7v70TXf9FVYoI9NDwUtPBEEQBJkloVAIADZs2LBmzZpp5RKtra2F+1tHIpHVq1fP4EIiFAq1tbWFBBN9T1hg/0PyM0V+jDU2NobD4UkWDRGWk/477/7Pnz67fSCp1wV9F1TCV06P3zIvtsDHKzTi9/l8waBWVkbLyojYoKwMSktHbH6/5ShkDBIJiMXG2eJxa0skrC2ZTH2YTKaeCxEwtcnPy+ejDIYyKyVEOhmpHLGiKKoct6Io4lPyD0oJoZwTgPogjdTpy+f7jyVJn6Hu7Tm+7tHnCDPra6vLy0pw73ubpqamxsbG1atXr1+/Hs8DuVz2cDg8reBus2bNGinyTh7vskQ4HM7XP50Dvva9Jx94/NUY0xYuO+3im1ZW1FT6yvRAEMpKoUyDMgqlAGViC4ga5CjAEMCgeIwCDHIYMmAwCoNDEB0UcqEJLnRSyeHLllt9PP+6GMBs2RJlZ0Qxliv1HwdCFKW0srTulIbjh/uOHR18+bV3Xt605fJLzqkoL8W3NjJFUDFEEARBkMxcmre3t7e2tk5R2JJJSOGqYE1NTaFQaMZXEaFQqLm5+aQ5mL1EbW1t01pe7zEzVbrY+OEjf/z2uraePl7u93+4xvzs4sQN85LzNaarqt/v95WUqGmtEMrLh7VC+cTnS/2IZBJiMYhGU9vQUGqLRsdXDKVQ6HjCEgku9EFi1bYJrdCpGDp1w7HSoZjCTNMioVQLFfkZQoRsKHpbcT7HR1bW6vWlehKIT9N/u2nXfz3yLHCzoXYu6oaepLW1tbGxsaOjY+/evWgtzHGkm6VNT14bNDY25uW+1xRDbWHRfbDvjrv+Y8uOo1qgtOHsUy++5cqq+jm+Mq2kBMoCUK5AGbG0whIho8UcWmFq4zCYgKEoDA5CLApGcrifhJuxxEFujWYeqxumR6YII6LUDYnV/NAX8Nefvtjn8w30n3h/b/fb7+xE0RCZOliVjCAIgiAZY+olSAXdwTAjtU7T7ctut+4q5o5+2NBwIp75/dYHH32p52iyzOe7tJJ/qiFep5uUEE1VdV2nfj/1i1o12d1KPpE1yISM8APaSt8oY6DYZJ9BLjfOmcjVxNzMFEwmZ/LtDbbVgwDnhBAh+Em/4HA/REVRrEb3YxH/liHGK8u5KIZ4Yoo6ZUMMVjYB3j9hPLp7cNuRoROxKEsOfP7TH77rf38MjwePhRUsQ87X+Xa67QsnIo9tDT3W0WLLtr1fu/fX3UeNkqrKy24LVy+pCZQHfH4IlkCpBiUUSoRQKMuQZQ1yLP0Y4xBPQCwOyTgkDZcVIE9Dvxk5QFnOUCZW9JHTnEWEMJlhcNHWkIm2hmAk2t/c0fr4swN9Rxuq9W//f5+74tLz8W2OnPyIQ8UQQRAEQTLFFBsaFrTuk6nRvbJWa7o/B3VDbGg4iu5D/Z/5p590H01SNXBaCf3TmmSkKlmmgqYofr+f2FqhLRTK54oCpmkVEdtCodzssmIhETJzmNSHtkToGJ+Z/oND2tSRvtBOezxEYmdbBYc/HFdJtH8IIYxzZquEdltD8cjSuuGJBNvWn3ymfXDboSEzObD8vIXf+tpfNyyYj8eGNwIK9nLNW548o0a9Ljx7z7gPo9sQcuGz3UeN0rlVV/2v6+csnKv4aDAAwRIoUaCEWHKhX9xNiTm0wjhAPAlxYRBPCm83FLoEMq5uaH0FGAducmYwqRiKxySYSTCS7W/u2LHp9ffeepvFjn27+a/vuOV6fKcjJznWUDFEEARBkMzmeCcVwgrXYJhBz8UMbIajXkZxJtIzG7PjVR78WeszbTt7ehOlunZllXnbAnORz/RT8Pv9qt9Pg0FLKHTKhZRasqBTK3Q8Z8mkmUwatkrImOUlzMRxP1wqJkqMiag7JulmhfY4ZmpVl1l/gQMwqRJK6TAtGkoZUZoND8bMH71zfE9/9MDxKE8c+/xnPnLXX34cj5CCjiNoLcxvsMvGfSl5yy3HuzVT9/nyy1PPv/bgo5t6jhr155xy2W3h0rllVIOSIAQDUKpAECy5UBPWwphji5sQi0M8Bon4cB8Ij2CPVE7fqiKOL3LTGqAsJmUleepJEowEGMbvf/rknrfejR47fPdf3PqFz67G9zsyCdjHEEEQBEEySSgUam9vn6RzUOF2MJS9tFpaWjLVRn3GLZbC4XBTU5PUzoptKAo2NLS54TP/3vJ692BCuaxK+UR98hN1iXkaD+haoKRELS2lcqpJebnVsrCsDFQVDMPqTjg4OPwoNjY0ZAwNxYaGokNDsXg8mRYNWYa0wuEsLo38yUKSZNK9yByYovBZfoqLumaF0nRb+2GoVB45L1HplXWBMyp9jKp7B+im17bv7+o6+4xQeRk2qyowcMKJG3ZBW1vb+vXrM/6T89LWMBwOb9iwoaBHoKTlwuSi80+/8uPXBiqDqg6lpVDih1IFSoVWWCoqkWN2v0LRsnAoIbrRitkmBdGvcCYxxYos6RaH6c+LOSjUukkFtqxIgPC59bUsYR7Yf/ilV9/s7Oy4/pqV+K5HJgI9hgiCIAiSeSYpPipcg2E2KqpmbDN0pnZF6Dcs8oaGr21v/+q9v+4+atYFlQ/Mg1vmx8tVElSIz+9XgkEaDIJz8/lSWZQ0EspBJc5xJcJRmEimsES6fLw9RWZHpbtQ1izbgqDzQyK6IpqcS4+hKbZhy6EwG0YZ/+8d/Zu6Bw4eH2iY6//8Z265/aY/wXNyQWBbCwvdDlboeyESiWT1PJD7evMc/FLZw5YLT1t+7nnXXVYyt0zVoaQESv0QpMOVyMzuVyg3Q8ypiooyZLMoDt2xTkMixyubJjcMZiRtsyGYCUgav3/4yXdeeYMlTtx+w+Xf+/qX8L2PjH9coWKIIAiCIFkJseN1DipcoSdLDZhm1s1wLEWoGxZtQ8MHH255+o+7enoTc3Sl6fT4GWXgJ1yWISujtMJgMPUXRgmF8bh8YsTjiWQykUjkUSic6NQxSi506obWXQcAlhYKpWhoVStzbgDEGN/dn3h4R99bB0+wxIm7P3PTXX91J56T3f+OxjLkojq15vgcXqC1yQ889PzTbTt7jibrzghdvfoGvUzXdDHnxD8858QHkHRqhRxicUsu9FoZ8knDx9jyZOk9NE0pFzKHaHj80NHNz7a888prZvzYR29c8b1vfBnf/shYsCoZQRAEQbJCOByORCLhcNhZBxSJRFpbWwvud2lqagqFQtm4ZgiFQhmplirCOuVQKNTY2DjqAPM8H/r0d1tf70omyKpqctepxtIS06fQkmBQKyuzypDLyqC0FMrLweeDRAKi0REFyIODfGgoGY1Gh4aGYjHbV+i2X5PbnRMdT0aQtiUSOzNM64nAuUJIdUBdXKEfifEDMfLya++8vGnL5ZecU1GOFcpupLW1dcmSJViG7O1gN5Y1a9bIe2a5qRcuxNrkBx56/oFHXxlI0AuuXbHsmkuCc0o0n+UuLKVQKiqRVWEttCuRo2bqrB+NQjxeLNbCsVi6oTOyUTqiyaF46gv4Tll2OgXStadr+7u7wYyvWH4hngSQUaBiiCAIgiBZYWy/uQLtYJi9jk72Qs2sm+G4GVHx6IbF1tCw+2DfHXf9qKef+XX/HfV8dUOyTmc+TQuWlKjl5cSWC8vKIBgExiAWG9GvMBrlg4MJoRXGYrGkYTDGXG494bLpoeURgXF1Q3v48ijdkABU+pSL5wc1VTkcYzv3HXr7nZ0oGroN2Ry2ra1t3bp12JnU88Fu3JiVy7aGGYy2OWDLtj0PPLJpIEkbzjrl0tvCvsqgnpYLbXehbFwYFVvqSdKSC5Nx4KyoD2ZiR5HhT9ERX0qHjLl183RN69rTtXHTloaaqmVnn4GnAsQJKoYIgiAIksVkYMOGDe3t7TITKESDoaz23bt3b1ZzmMwaH4pHN5TZZmtrq+c9lU8///rX7vtNd59ZHdC+eEpy5RyzSuVBv99fVqY4tUI53iQeF73uo/ZUEz40FB8clCNNDDH+uLB+fctpaD93fGF47HK6YBnsPzjXFXL2XP/8oO/to4n39vf+7vm2s5cuXlhfiydnl5xdccKJq4hEIuvWrcvxvpD3EXMjGspoa1+TuJkt297/ynd/1dOfqD198cU3XRWYU+oTcmGJo3chcciFUVGJHI1CdAhMA7DvGoySC8VNp1SsIGSUZqhq6twF844d6us7fOy537UsrJ2z7OyluHiIDfYxRBAEQZCsJyGrVq2yU8QCeuVSdBvbjTEb/1BGuhmOm5N7vr+h5xsaPv386w8+9kr30cT5FfT/hIzTg0ylNBAIqCUlNJVBiq20FHQdTFM0r0pv8TjEYvF4PB6LJU2Tu95UePILdykGpsVB+7n8kMk+howZYgSzIZ8DGADvH4v/cmfvix1HWKz/282fveOW6/HMnN9Tq5xwgl0L8USa+xcwbpNlt3Hdmu/09PO6paesuPODJdXluh+CJRAMWO7CoDgZDs85YVbXwkSieCuRxycd84RZ3ZqmnAqFpgFGkhtJbibBMMBIHD/c+8qzz7/90mYzdvSXP/nOiuUX4+Ih1hkDFUMEQRAEyXZyKM1uBRdzCSE509qyl8PI1+9h3TBnwm5esOXCFdX0riWJag00RSkRWiEJBqG0FIJBKCkBRbFGmqSFQojFzFgsKsabMFc2K5z5myWtG9q1yLZuyAGYEAotxdDehG5435bulj1HWOL43X9x6xc+uxpPznlBlr6iVui2neKGW3q5OTayd4suU1y3+p6efqg9Y8llt0Yq6+eqOpGn+RIFgsJdCEIujErR0IBYFOIxSCSLvRJ5fMYXDU1uGNxIghigDGbqSfz48ed/+uSuN7aa0SNbWh5b2LAAFw/BqmQEQRAEyTqhUKitrQ0A7rrrrgJ62ZFIZPXq1bm8TshSSz5n70hP1il7uKHhAw89/811bQNx5doa+PySZJUGPk0LlpYqpaVE+gpLS1N5JICzBhmGhlg0GhsaGhwaMkS/Qo/mgdxudAiibFl+WuqIw2Kig8WVgYEkbz9hvPTqW52dHddfsxLPz7lETjgJh8NYhuwqpBXdDT1DctPW0OW1yY1ffHDX/qHyutpVq/+0vGaOopFgUBgMFQgSKBEKWCzduDCWhLgwlCcTWIk8ASPuK9lxwdH81lIViaqptYsW9B3sPX70+LZtb9552424eAh6DBEEQRAkJ+FWmPXa2trcfFd/VAaVe8NFDkqlnL+Rx26ausQjk0EeeOj5Bx7fTNXgrQv4rXWszsd9uh4IBhWpEkrPid8PhjGiEjkWi4stWYD9CmeYDI5xGgJJXeGb0l3oNBtKp+Hmrj/sPcISJ26/4fLvff1LeH7OAViG7P4A7Z6Tp7SNZ/UludaZ/k/ffvSp1p2lc+aFGz88d8kCqtKAkAtLdChJy4UJoRXGAeJJ4SlHuXCKiCWynIac8VSQMLmRFAZDg5tJMA1Ixod6e39+738d3r/3ozde+f1vo7cMQY8hgiAIgmQZOSK5qampUDqO535epE22jXLSb2hn716aGRIOh5ubmwviAJsKabkwcG0N+fQic64OPl0PlJRYcqF0F+q6NeTEthYODUWHhqKxmGGaxXNTfFzRcPiTzuJlweKq4GASOo4nt7+7u3Nf+/UfvArP0tk+o+KEE/cHaPe8pFAoJG2P2Tufu9OZvmXb+/f85/NUKzk7cknogqVUV/0BCAYhqEOQQiDdu1BusYTlLjSSKBdOPVSknYacpIcnO52GHAgolBhJ88j+g2+8+dbC2jnLzjkTV67YDxz0GCIIgiBIdmMtGY627u84LhOVfF0eONcqN7/p2rVrPWPN80xDw6/82+NPt71H1cDnToXLKlmdj/ulXCi1QjnqhFK7X6HsXZiMpUgkEqz4Lm7t6SfyT/v3Z7KtYXoz007DIcZ/uePIo291mvFjf/uXt/3t5z6NJ+osvSXRWuhmXG7NlnfvsleaIJ2M7jkyr139rQP97KzwJedfe4VeGvAHiJx2IuVCzekuRLlwZtg9DRlAKiBwzhgw6TS02hqCkYBE7I0/bHzhiV/FTvS81vbEooX1uHLFDHoMEQRBECS7V/zOPnrhcDgSiYTDYXc6TVpbWxsbG1taWvL18tra2nLmkpO+Ejul90A+742Ghk89/9qDj2yiWsmtC+CjC4wqDfw+n+UutOuRASAeh2hUbjwaTURTJA2DFXH6KH2FfGRuSByPtqSoELK4wq9p6rtHYhs3bWmoqVp29hl4us74yb+xsXHdunVNTU1oLXRtvHND+8JJglR7e3v22u+GQqHm5maXxIvGv3tg1/6BksrK5bd+IFhdpvuoPwCBAAQU8APoQi50ugvjKBfOMEgMRwvgVsdbknrOUx+nNcWKqrK+g0f7Dh/d9tabd97+YVy5YgYVQwRBEATJIpFIxJmQuFzTWbJkydq1a/P42kKhUGNjYy4vTmzd0BtDUWSGWbjV1lu27fnmj34/kFSuryEfW8CqVO73+wPBoCJ9hcGgJRfGYpZcGIuxaDQejQ7FYqa3BiLPBu54HNHi3oGu0MUVgf44bz+WeO53LQtr5yw7eykuXUawJ5y0traiVuhaZLxz+alS3nHM0iyUUCjkknixZdt79/9so7+s8vI7Pjh3UZ3qU6Vc6NfAT8AHkJTuQi7chXGUC2cBST9wIqIEkQ9WzOBWbbKm6YGAv2dP565d73d0vHfDddfgyhXvIYOXVgiCIAiSJSaqeIpEIqtWrXLbTTuXvKo8vgy7TrnQ76e68wA7KV0Hj376H9f39PM/qaF/FTLLVQj4fEEpF8o5J7ZcmN6MeDwWi8UTiWIYcjItLK9I+kKfi/JkUw4/cUxB6RlKPrKt+w+7u83Y0V/+5Dsrll+MSzcbsAy50KNzEZ7V3dAs5dpPfrOn37zs1g+cedVFVFP9ARIMQkDUI/tFdwWrfWEcEvHUI446mX2ESK0fE1XJkHrkzATDADOZrk2OQyKxf8eOZ3788InDnfd+/e8/fsetuGzFCcUlQBAEQZAs0dzcPO71fUtLS1tbm6tylaamJpdoTGvXrpUpd14WQQoshJCCFg3lGrq52m5cPvUPP+4+al5bS/98IStTxaiTQIAGhNUkGAS/H0wTBgdTm5hzkhhKEY/HUS4cRwVIFynbVcmUEEqIAqCIqmT5pDaoNV7YsDw0n/oqb/nzz3fu78alm80JRMo6Hugl6vk95bYQfFJkN0Pphc/4T85XzJWs+bv7u3vjdactqj8zRCjVNKLroPvAR0AXcmFCbglIJsQjugszEiHkH4SmHikhhIKiABWbooCigaovOGXJ4qVnUL3sW/f+J65Z0YJVyQiCIAiSrZzE2cFwFLL81iUNDV3ltgiFQm1tbSFBXl6AB+qUC7Gh4XWf/EZPPzu1TPvHM9hcHXRVDQYCajBIpLUwEADGZBmyHHiSEO7CIm9cOJWU0FmRTMdMVSaiPLmuPHhoKHlg0Ni27c07b7sRl266yI54HR0d69atK+guosWzs9atW1dwBeNZamsYCoU2bNiQrzn792947qmWd6gauPavby+bW61qij9AZD2yj6ROWZZcmHTIhXiHKCMBgsj+hUTUJxO7jwUBDlxswAmllXMqDrbv79rfvW/P7huu/yCuWxGCiiGCIAiCZIVRHQzHXqO7RNORbov169e7Z+nc0I690HXDwmpo2PjFB3buH6orCfzzOUIuVJRgIKAFg1RqhVIujMet4cjxeCIWi8XjKBeePCdMOw0dOeJw73v785V+9Zzayjd6Trz9/r6O9j03XIv+uOmdQnHCSQHR2Ni4evXqAhV2s9TWMI+3ML/ynV8MxOnic08/Y8X5RFX9Aer3g98HPgqa6JyQAEgYabkwAWgoz2yAsERDLmagiEfgMDw/C1hJadDv83Xuan/tjS0rl1+waGEDrluxgVXJCIIgCJKVHHLt2rUn/Z5Vq1Zlo8hoWq9Tdu5zW1IknSBu2I+FW6dcKJV3//StRza/e5govo828Lk6KJT6fD5d1iOncke/JReKxoU8lmII5cJpp4XSUJK69KeEyKpk1VGeXOFTwqfVlJdWPPb0Hx55/ClctKlg31HgnGMZcqGcFQuxx+uo+Mg5z+y5PRwO56UfyOov3Nd1NF532qIVd/wJEEXTia6DpoFCU2cnSy40IeGUC/Gsn+HYkAoJQCiRFcqEArXLkylQFVRt8TlnnLP8IqqV3vNvP8A1K0JQMUQQBEGQzDNRB8Ox2Usey4Flk353ttzKYzfDcXdTgeqG7m9ouGXbe1ve7qKK/69OJeFq5ifg13W/lAt9vtTGuVMujItiZAPlwhkkhtJRKKRD6tAK5ean5Nazam8/P+TzVXzuH/5lX2cXLtrkJ89IJOKNQUnFg7xD5o39lfG2hnJZchkstry5a/Pb+wlRz7z6Ir0koGpU14iqgaqCKub3WnKhmHOSTIJpYvvCLIQGOyzIZrcAVitDStM9DVVfWdmZl1yoB8o3bt7+jW99Dxet2EDFEEEQBEEyn5ZM3bUnm47nRdOR6a47rTHusRk6d6tM0gpIN8yXc2TqfPmbj3f3Jub41Svm8DIVfD6f3++nPh/oOvh8qaQlLReyWCwuDIaGaXJMHKebFo7aCBkWDdN+QxXgqkXVFy6qUfSKv/liMy7aJKcCnHBSiMg7ZF46DletWpXBeLR27dpcFj18f8PviOIvm1u5aNmphCqKLRdSUKRcyCCZEGN7jdSGZ/3sRQegQIZthgQoJYoq5p9QKRrWnhK64vprqFb60itbccWKDexjiCAIgiAZZvIOhmMJh8ORSCTHLYRkyy03Xwa4oZvh2JdkCwSF0t/QzQ0N/+mbP9uy41BtUP/iUlgUhKCmBgIBze+36pE1DRIJ2biQxS1QLpxVWkis/lSyy718tDcA0DUa9Pte7z7+XntnR8d7N1x3Da6bE5xwUrjIWWQe22vyrC7vCc3+DC+vQHITLLa8uev+h9v8pRWr/tcNpXOrVE3VfVT3iRHJFBhAkoGRhGTSUgyxfWFWYwNJhwVhMhTBgfPhTcxC8enKsYNH3tz6VseeHTfecB0uW/GAiiGCIAiCZD4tmdYFd+6noLhqOPIky7Jhw4Y8Dk2eJEkrLN0wHA43NzfnaxTmhBnjW+/991OvDiTIX56mrpjDfYriFw5DRcqFPl8qWRTTTnjCwkS5cLaJofyDwKgpKI55yvNLfafMK3+z59jmN7YtrJ1z7rKzcN3s0yZOOCncfef+kDfj07usts7IGV4GixyE3U9+4b4TCWXpFRcsFQNPdF3x+YnuA10FSsAASBpCKxRyoWlg+8JcxAYxAmXUFzgAE6IhKykt8en67m0733jjtU987KaKigpctmI5OvDCC0EQBEEyGVnJDGOrLHPLQUojB2IURHGW3WnRzYmoPTrGzemobLvmqiLKa//8X7r7+U0LfR+up4v9PODzBYNBLRAgwSD4/akUJZGAeBwSCekuTCaT2LswA3DOAORKMvHc5NwAMOSj2AYY//7G3S/uaq+vVl/f+ByumTwR2c3jkMJCnqU9n/NmKrLnIOw++dtNX/7OE0Qr+cT/+2u9pFT16/4A9QfAHwBdEQZDU/QulANPksDRYJiL0CDM5iYDzjhjnJncNIVkm7AeEwlz8NgzP3pox2ub7vjwVT/4/r/hmhUJ2McQQRAEQTJ5yT7jucMtLS05mGwrc6dCyXtd2M1w7HrKRFQ21HetaOi2hoZfvuehrqOJcp/254tgoZ9rqqr7fKqenpQp5UKRLCaFuxAnI2cMOTRZNrgXf1jDTxyDUAKU3LysITSvqvPgif9715eKebWcE05QLixQ7Js6nr/8yEhbQxl2sxrL7tvwW1B8Z191oR4MUIWqKlFVUFWgoh7ZYMO9C3HaSQ5Dg4wPoo+h3OTQZNnQkCqgqkogsOzyi4NlVQ8//gwuWPGAVckIgiAIkjGm28FwFKFQqLGxMXsNDWUTrpaWlgIqqXNhN8NxU6ympibbiOTOpoHuaWi45a1d//3U5oE4+dhC9ZIq0Cj1CxRZjKyqkBT9qxIJI5GIJ1Nw7GKVwcTQUZUM6c5VMLI2uTygVZeVbu3q27L19ZXLL1i0sKEIF0qWIa9evXr9+vVYhly4O1Gen4vhl81UW8OsXoo88dtNTzz/lr+0fPktkUBFuaLrsoOhqoMiFUMjrRgmgZlYj5zL0GA3M0z3trVaGTLrkXGFkuNHeg/3HODJoZVXXoGLVgygxxBBEARBMpaZzNLIkG0jWCQSce1w5MlTIDfbDJ0HgNNv6M5XmAMf60n50j2PdfXGVszXPrIgdSWq6SkUVU5mVKRWaMmFiUQykWCMYc6Y8QRAqoPSZkhHbnJ08tK5JVefUU+10nv+7QfFtj7SWihrPNFdUdBB2Q1nvNyHoVn+1lm9FLlv/W+oop92ydlVC+ZTRdGEwZBSYTAkI+RCNBjmBWJ5DEWUINSKy9bQZKVi/ryzLrlQ85c9/OiTuFbFc8GAIAiCIEgGyIhOJAuL5DyNzCLlwkLMnVxVTjvFhE1edrtwteVi5lGBffJ3m7p6o2W6elMdKVNB1TRd0zS7LI0xKReaiURCuAtNxrDpdjaSQpo2G5L0h3ZVsnxeoatXnlrn95Vt3Lz9G9/6XvGsTVNTkzxbuqrvJzJd3N8GN3vI33o2FxIyeGU8Ujzx25e7Dg8CVS+49gogVFEUqqTOPlQBQsE00sXIcjgynvjzEBnkDSWSCg+EEEot9ZAoQtZVQNGWnLO0an5N58G+r9/zbVyxYgAVQwRBEATJTJKZqU5J2ZjqKIXIArVahMPhVkFhHQ/u1A3z3tDw/g3PE0U/t4IsqwBKqaYoqqpSKRcSIg2GTGiFhmHgcOSspoayoaG0GVpOw7TlUEqHS6tLb7v4VKqVvvTK1mJYEttayDlHrbDQKZL2hZMH/dkEoLVr12b85uUTz71CFO2cqy7Sg36iUJraQG4ms7RC0wTGUhue+/MTGcQtJALEqlK2BV1i2QwVX2D5B8NULdn48hZcrmIA+xgiCIIgSAaYZQfDUaxZsyYSiWSqi5CszFq/fn1Br/CGDRtc3s1wLHZ/Q5l3uUSDyGNDwyd/t+nJ32+vLfE3nqLV+UFXVb+u+3Sd6jr4fKlMMZHgRoqkYSSTSZQLs5sZCo8hH9nBUGKvu6ar3ScSW7bv6Niz48YbrvP2Obytra1AjdjI2KgXCoWKfFfOsq2hvPzIYB/hrgNHvv6Dp/1lFZffHA5UlCmqquqqqkNqU4GZqQhgmtYTnI+cv8CQjgEEgHNiDVF2NjQEYMnO3Xvfffvthpo55513Lq6Zt0GPIYIgCIJkIDnJuJehpaUlI7f3C2s48iS/RcHZDJ0v3m1+w3y197pvw/Og6Dc3qOdWgEKppqqqplkGQ9OUozFN0xSaoYHVyLnJBIisULZthiMbGp5aVXr92Yv1QNnDP//1vn2dXj2BE0JWrVqFZcie2aHF1r5w8ugz49XIbG3yl76xHhTttIvPrqqbD5RSRZH98RQKJgeTic1MPTLsYJhXpP2cpB+suclU2gwpKErF3Hn1pyyhWvBnj/4Cl6sYrhMQBEEQBJkV2Zh0IatHZykaeqmRU2F1M5woc3OPbpj7hoZfvmdD95HBU8qUD9QQAqCKemRVVYmmpZIQoRgyKRYK3RAFw5ykhpa9UDatIg6t0B6BctHC6gsXL1B85d/41nc99tvjhBOvRuRirkcey2zaGmYw8nYdPAFELZtbIapeiUKp7IwHNO0rFNZCZuIey3tcAKBSKHQqhun5J5T6ysouWLlc85e9+OqbL774Ii6Yt0HFEEEQBEFmRTYMhvZPnmVDw0IcjjwRspuhB44Wl+iGuW9o+OpbnaBoNzVopWoqV1RVVVNVRY7JZAwMgwut0BAeQ5QLc5kMkHQrw7FOQwUgQMk1Zy4sC5Y9/PgzHjt1RyIRtBZ6DC9Fvcwe7TNrayhXcvbR6onfvtx16HhFdeWiZacCJVRRZPtCQoFz0bjQBCNdj4ynfzdg2wzHmX9C1bolobqFC6kWfOHFTbhW3gb7GCIIgiDIbPOT7ClZa9asaW5ubm9vn0H+E4lEVq9e7ZlAHwqF2traZrYUbsMl/Q1z2dDwk3d9Z2dHf02p/+aFWrVOfKrq01MoqgqKIgdkGqaZlBZDxjBhzGVe6HwiV95+5Okv7Tpy4vDxAZ4cWnnlFYX+G7e2tjY2NnZ0dOzduxelJS+B7QsnP+HPrK1hRhorf/2+x7sOD9WdHjrlwrNVv0/TdE2nigZUA86H2xdiPbKLwgIh1s7g6YAgxV3Z0JCZYCR2b9vBk4N/9vGP4op5+WDAW7gIgiAIMpv8JONzjccmt5FIZLoumBy8sLzk+ZFIxEuXLnIHyRq6fO0sabPK9r9+zSf+uedo4i/ODN5cr2qU+v3+gK77fT6q66kvi8nIifSIZIZXpzmGcwbAOJebybkJYAAk01sMoG3vofuf3xTt33/s0N6C/l2lTI82NE+G4+bmZsxts3HOl60hZ9zhpOvAkU9+4b6uw7EPfurWxecvVTRd9/tVH9F8oOpgmMCM4UHJuANdExaAMw7M5KbJxSMYSTASkIynHhOxg+/t/sUPftS7f+ezv9ywcuVKXDGvglXJCIIgCDJzstHBcBQzqB71pFwol0K687yU4tpV7fmqU85BQ8Mnfrup6/DAkjL1A/MVCqCoqkqpQimh4kJU9C2U3QsZti/MC46qZKsKbWRhsgpw2eK5tZUVVC/9+j3fLty3G0448XY4xvaFU2EGbQ1neVHx6pvvdR08Vjm/urphPhBKFYWIKRqEWvXIpgmMo1botrCQjgzO+SdWeXJqK62snDN/HtUCf3zxZVwuLx8JeFmGIAiCIDPOP3MmzE39Dv8svQAux3s2w1GH09jnrjq6ZsY1H1/bc9T4/LLSP6kVBkNdl5smDIbMMAzpLRSqIRoM84VtMJRPjLTN0ABICJvh/+zqeuB/Nl1xwZJfPfmzgjtvyJsuaC3EcIzYKyYHo03xHTGb4PvJu77z6raeC69fedGHVhFN03x+1adqOqg+YBxMIy0a4tgTl8E5cJPxtM1Q9A9JWDbDZBziQztefvWJ//zx5eeHnn3m57hcXgX7GCIIgiDIDMlqB8NRhMPhDRs2nLSLn8wB1q1bN5t+Q25GdjMMCTz2q0kHpS1t5Ka9oP1PZ6+h4ZatuzY88VJNafDmhb5qnWiKoquq3IgYkSwHnZiMmSgXuiFFlH+kexpyAJb+PCPk3QP923bsaKiZc9555xaQMtLY2Lh69er169d79cRY5KBcOLPTPgA0NjZOsa2hfO80NzevWbNmuv/WfRt+e3zICH/8Br0kSBVNUVVFpYomxp4wq3chY6nniKsgdljgMhRwKSJaj4xVVZa+9D9t7Xt2fenv78Ll8ipYlYwgCIIgM0xRclwA1dLSMnn1qBSbPF9wl+MJv7k/rqSJQxa85ywHljbDbPxz967/NSjaB2rVJaWEEKKIemSrJJlJmTD1yIRciHphPpNDWY8s/xhvaHJtaeC0umqqBX/22C8K4jeSrqi2tjbOOcpJXkUGPty/Mw43Uz/zy2+b7o3SJ36zcX9PX9X8aqqqQAilqf+pKGzl0sUG1nxkPP+7LyrAcEkySZckgyKeKEAVxe8/ZenpRAv86ze+iavlVVAxRBAEQZCZkJcUpaWlZZLeQ7KLk+dr7uQvmDN3Zx4TuRwfZllqaNh16AQhyrI5auq6k1KqKHIjqSyRmUwgPIYoGLohPYR0Q0OnaEiEYliuqx88a7HPX/Liq2+++OKLLv9dIpGIPCV6tUUDYu9obF84y+uKqbc1nMEdu81bdwOlFfOqFE0FQoVkaJ1rOAPG08Y1BigZujUqkPRIfdnNMK0eUgpUrT9lCaW+Fze+ikvlVVAxRBAEQZBpk3uDoUROQRn3yr6pqSkHE29dgrdths59KkW03AxFmcGMnZPyxG83dR06vqRMW1YpZp6kDYbyApQJxTC1GQZHvdAduSFxQEfaDCnAGfPKQ/PnUDX4woubXPtbtLa24oSToorFaDCc/TKuWrWKEHLSO0byDTWtBX/ljfeAkAWnL9SDASp9zJQAGa5Etsae4G5wbVwQtnPLfT4sFxKgClB10dJTdV/ghZc340J5FVQMEQRBEGTa5LEGatyGTcXWxakYbIbOnZsz3TDjuvN9634NhC6rUqgwGFpyoSh6ZWI4soRxFAxdkxqO9BiSMUOTrzkr5NODL77kRkeJLEOWzRlQRSqGc2OWeikU52LK+5EnXc+TNkhx0nXgyP4DRwNlpeXz5qROL7IfhXCqyWJkJuuRORoMXRsTrP9FJbKQjwgVQ5NFbbJCy+dU1SxcSBT/Qw89jMvlSVAxRBAEQZCZXFjn8QW0tLQ406TizJqKxGboPOpyoxtm8HDasnVX16Hjpbq2fL6PyJJkoRbK6iZ71IkFnllckxs4nYbUUZgsRcNQdVl5ackLm153W2FyU1NTJBJBa2GRINsXYj1yxqPMVM7/UjScys98detuDiRYVlIbagAqzijpPqk8XYyMcqHbGRYNabqzIbWaG1LqLykpLS8jitrZ1Y1L5dWrAgRBEARBpoEbmqzbLefkcOQibNRVVDbDURldtnXDTDU0vHfdM0CU08uVhiChYuYJFYM1KCGycWG6hyEKhq5KDocnn8B4fsM5fl/9nAqq+f/44ssuecn2hBO0FhZVIC6Gvr25ZyptDadem/zL5zYCodV181W/jwAFOiwXMtHEUJ77MQC4PizQdFwgo2ah+ILBObXzCNHc3KoCmQ2oGCIIgiDINMi7wdC+Xl+7du3dd99dnHKhpNhshs6DMKu6YaYaGnYd7OOgLJ+vz9HTIlRaijJNkzPGADjnDOVCtyWH6cexhckUYE5Qv/rMEFX8L258xQ2v1jnhBPWj4jkHFk/f3nwt7+RtDad4Y6mr5ygAWXjWKbLznYwA0lPImTXthKNkWAgRIe0uTE9PljoSIaAoyy69iCjKCy+9gmvlSVAxRBAEQZBp4AaDoX1Nv3Xr1lAoVLRJcnHaDJ0HQPZ0w9kn5KJ91TFCyMoaUZKcdhdKg6ElFEqDIWaLbssPHYzqbChFw0uXzCeq78VNr+X3deKEk6I99bknEHt4kSdvazjFG0v7D/QCgeoF8+RphQPhdkmy+IbUI8P1dnlISE9Mlh9Y3QzB0g2JUlpRQYhKqLZv3z5cLe+BiiGCIAiCTO8a2iUvJhKJrF27NhQKFXPiVLQ2Q+cxmSXdcJYNDV99czdQurRSL9XJKMXQbl3IpG6IHkO3ZoijhEL5XAHwU3reohqiBf71G9/Ky8vDCSfFTDE763McXEY1TR77DZPftNt/4AgAqaqp1oI+KTlZtyHEV/HcX1AhwSkXjqhKBkJ8JcH59XWEan984SVcKu+BiiGCIAiCTCNRcY/BUFrAMtVyrkApcpuh83iQWnZmdcPZHF1PPPcSAD29QiGcp4doiBZWUiu02xdiC0N3pofpR2dDQ2dt8tn18yjNT2EyTjgpZuStMtzvOYuwk7c1lD7Eif765q27QIw9UVSVDDe/E1XJwlsunYYYAwokKlhyIZG6oWP4CVBl8emnAFU79+PwEw+CiiGCIAiCTDVNdYnB0Gn+khf0kzcp9zarVq0qcpuhfVRkXDecTUPDrgPHNIUsrdItg6Eck2y1r2JSNGQ488S1ueHI2uRRfkMF4PSaap/ue+Hlzbl8VTjhBM9y2L4wX8s+bltDGSPG3SOtra2vvrETAMqqKjRN42CfS1JYI5LFE6RAIoIsEhhlMxROQ0pr6+sJUV7YiB5DD6LgORdBEARBpkIkEnGDl022cNq7d6/9mVAoJP2Pa9asKcL9Eg6HGxsbw+GwXIciJyywj1jbhjmbH9je3t7a2jqtn7Nl6671j7fUlfqvaSiZ76MqpaqipB4pJQCMMRPlQrcniFZib6fzst0kExsHSALsOHi099jxhXXV5513bm5OfRs2bJDyBL7ZixAZ+9BRnq/IAgCNjY1jY0o4HG5ubg4J5GdaW1sbGxvb2togWN91oK/utFDDmadSTSeqShWVKMTqY8itySdIYSBbTjJmyb2cAzPFxGsDmBkfOPHWps3te3Z/+R++gEvlMdBjiCAIgiBTylXcYDBsbW0dt4WTvP9XtHcBsZvh2OPBrliXz2f506bb0PCXv9nICWko0xuCCgGglNJ0ceuwtRC7WLkbMvJx1FbmUyuDAaKonV3dOTjvSWsSliEXM3IiNq5DHsPKRG0NZayRY1IiAinsdvX06kF/zeJaoqow3MDQuv+AZ/8CiwcjrIWQ7mNojU7Wg4GS8jJCFBx+4sGdj3d3EQRBEOTk8ZK4ImISQiaqAJKesqKt2CKEoJowUZpnZ9ozPjZkNejUV/gDH/vy/kMDd55R+YnTyzRCfKqqK4quKBqlnHOTMS5shsCxIs3FcG4CmJzLzeDcAEgId2ESIArw8JadP9+4ZcWFpz37zM+z9BLkPRIpSeC7u8jPY8V8V8xt+0J2BnB+csmSJe3t7aO+8/SVny6rnhf5s4/ULD2N6D5F06mqE40SIttTyNMMrmihBATghsFNk5sGZwYkDUjGIBlPbYlYb/ueXzz4o4Mdu379i59cddWVuFxeAj2GCIIgCHLy62M3WBtkx/dJUqZinoKCNsNJjl4pdku/4cwOj+k2NOw60AuELKv2yyaGtjFteNqJeEmYKrqadGHyWJuhTCGuOLWeUOWFl17J3qGLE06Q2c9tRzK+O0a1NWxsbBwrFw7LDZptMCTyvM+xfWHhBgViP5cxgkjjYWl5WcWcSiCko7MT18ljoGKIIAiCICfBDSOSp9LxfTZzKgqdcDiM/a0mP36kSCcLx7J0BEr2HzjCRQpxepVGxihN2L6wkNLDiauSCcC8iiAhKqFaxsvQcMIJ4jwYxu3FgeQ3oMgpyVLWX79+/bjfNtR/oKS8dO6CWtlVYOx5HyNBQQYFh2xo64aqrvv8AQK0sxPHJXsNVAwRBEEQ5ORXxnl/DVMsyJKyThGOTpYTP1BcOOnhIdW6mQ1TnqLTZ/PWXQBkaZVPpyPG7IqyJqt3Ibc+QAogPxyRGzomJvsoXTivglDtjy+8lNmjVPqp0VqI2OZ6XAcXRpPvfve7//7v/z7pvTruLy2hquIUB9FeXrixIG07H7YWWp8jRNHU2oY6MREbl8proGKIIAiCIJORd4PhdAuyirbfExYmT/0ImbFuOJXK987uwwCkwkfTrdLTvdI5Z9ySCtFnWCgp4rBQSAiMVAwpwNkNNUDVzv2ZMZXYE04456gVIvYdO7wV5EJaW1vvvvvu/v7+yb8tUBoYKxDyEfIhUkAhQZ74He5z23AovkQIfWHjy7hOHgMVQwRBEAQ5SbqS3xcwgwGRxdnQEG2G0z20ZqAbTqXyXXgMob5EG1XQaouFDNvdF0p6KE2iYhtblUwBTquZQ4jywksvzV59iEQisvgU38KIfY7C9oXupFVw0iuT6LGeSb6KQaAwo8JweEj3urUHKBOODkMvgoohgiAIgkxIfg2Gdv+m6dptwuFwS0tLEdYmo81wBjn5dHXDiRoaymJSAOjqOQoEKn0KcC69acSas8iHJ5/g0hdUhkhGPrcVw6pggFDlhRdfmeVBiBNOkHHjL9Yju5NwONwsmOIJhDv7G3BId6dACi0SjHoc8YyI/9A26kFQMUQQBEGQCfPY/KYrMl+aWQotvWDFJhpKmyGOQJnBoT4t3XCU98c2iNn54LwSPVSmkpFiExcWQ4ZyYaEliWSCcckEIKir5SV+QtWZDT/BCSfIRMj2hagguzZkTOXboscPVNVUO5uhclSUvMuc+fMAYF8Hzkr2GqgYIgiCIMj45NdgOPXRtJNf0xdbHo42w9kcMFPXDe3Kd2kQkypta2vr/gNHug70BhUo16nd/C6dLIp6ZEfqiBQEo+RCcHyoK0qpPwiEdkw/S8QJJ0j2wh+SVdra2qb2jY4xJyPLVfHGkafgzj+xMNlroGKIIAiCIONnLHk0GGZK7GtpaSm2PlBSekCb4WyOvanohnKFndZCybqfPAIAuqpolA7LhY56ZEwWCwsy3mdsDbEiqFeXB4GQjs5pKIbSWogTTpCJTkF5HziGnPTSYppnETIsHmJJsgfiwXh7UNNUa9YZ4i1UXAIEQRAEGUtzc3O+qidlyec0r8gnRNYmy3LdItl30v6GSsQsD0IrQSBk3FmlHR0d4/7Fw0f6OYd5QXVeQEmnFlymEAyXtWAzRKf469AAwKepQU0jQDs7pzQuWfZmlW9SfIciEwVfbF/oDaLHDoz4GLVCT1NWWQEA+zqxKtlroGKIIAiCIKPJo8FQ2isyKFbaw22LJz+XTdlbW1tRkpjloWg/H6Ub2mXIY9m9c3sqcdAUnQJwbitNXExBwWLkwkPuwfTuG1WbrFG6cH7VS2+TqdhK7MnvaB9DJg++eIRkg6lY7yf/nikXI6fPD/7yOfOrwXHmJ8BHjkFBCiscOD/g44Z0gnvXc6BiiCAIgiCjyZfB0B6OnPEczKZI9iDaDDN48Ni5onze1tY2SVb5zrat+oKrKvzKhD+Rc9QNCzRJHNvHkIr8kBD6wsaX//FkZzZZzIjvSmTys40n41Ru1LrJf8JU3nqrVq2aPLBO6zcqnXuKQ1cafdZH5bDw4ONE83S9OQfReQQVQ++BiiGCIAiCjE5a8mUwzN50SDlkoHhEQ7QZZimTP+lUmX17d562YOW4mSBHubBgISMfyciv8UkdhvKwQa0QmRwpK2fjXl0O1LqT/hOZVesmIuO3G2fJnIXn9R3qXcA5MC6H5YstfR5Beang4I5nwxZDqz3x0cNHgPNFixbiOnkMVAwRBEEQZAT5MhhKuTB7il6xNTREm2HGkYrzlApQx1UFUS70HHaR8ri71bYWcuxeVqycVEqzv0GersdGwIJQ69wm1WV7n8rJRZNz7qUfGLI6F6YHXgm5UJYlE1s3RAoPp2/UnmZmeQxxn3oPVAwRBEEQZJh8GQybmppWrVqVVQNgOBxuaWmJRCJFkr2jzTBLB+pJvyd6rKehfPHI/II78wykEJkoDaybUwYA+zo6R2kKOOEkv7ihDHYqap2U6tra2sLh8LiyHap1Loyt8lpi8m+74aY7fv7rFxLRKJFCkuVG47KVISc4UrcwcQZzno7v6Y3gzSEvgoohgiAIggyTF4NhZocjT36hL52GRZJioc0wG2+Qk37P0LGeCbMMpGAhk6eQjvQfJ5wUllo3yfnzpK8hI6FEHico/BUQJz201q5dGxMnhoHjAzBsR5M2NHEywS6GBYptGk33LhxhMwRYtLABF8ljoGKIIAiCIMN5S+4NhhkfjnzSf654GhqGw2E51RdFw4wwlUo0AIgdH1YMncVLROaK6CwpNCYfcq2rCkl/lxsmnGDTusIiS/O+kByE161bt/b39090pfGP/++HwBkZFpXkWAxrcD42MCw8+Ehrof2ZtBbce/AwACxqQMXQa6BiiCAIgiAWuTcY5iVZkvVERaKjoc0ws4frVL6NGYlx5CVsZedR5gRLAGBfZ6e8+fHd7373ggsumOhoOekhhE3rijDsYul6gcbWcDg87h0g+Q5aUDMvFQiYUy4Uj+guLFz4mJknw4OSWSrK4871IqgYIgiCIAjky2CYveHIJ73Wl7XJns/T5KQOtBnOntbW1lAo1N7ePvm3UUU1jUR9uZ5OMNI6IVoLPU1soE9K808JJvq2yaU6VOuKMOxmu4EvkiXC4fBE8r21Q0nqrB+PRp197tJ+w3RdMgpMBQZ3RPa0cGjtVsttiPvTe6BiiCAIgiCQF4NhtocjT36tXzzmO7QZZuqYWbdu3UkLk5lp1J650lm0hNZCz2SKowZkOjYWKK1OxAZwlZCpk+OOHEg2dl9LS4vdi8AOuPJJw4J5ACR6YoBIRYkBZ5wwzimkPkMYAMVlLMhIANzZmDL9eR6LRgkhixcvxEXyGPhGRRAEQZA8GAzz7q0oHnNHU1NTqwCP81kyFdX1/JUfDZTXdJ1IOAUm5tSbOEeNoLCzxZH0HDsOHBYtwiwRmR7YvrBwiUQibW1tnPNwONzU1DRuaKivmUsIJGLx6HEhGjoa3jl74iEFdPLn1h50WAu5rEdmwPmxo33A+cKGelwrj4GKIYIgCIKkUpdcamfy38q7WidnNBfJCBRUDDO1kpN8taWlpXJuvdNZOJwSpqdkIoWYKDqf8FE5pGxehaVoyHTIV0cOZJa0trYSQlatWuVUe+0OJ86yifoFc0kKONF3zFGPLFobsuEZGhgUCisacOuun927kAFLPYkPDfX3HiUEZyV7EFQMEQRBkGInxwZDV+l0smLX82qa/DXxUJ9lojj5cTLcFpPzhMlHFCbbWiFqhgWaJo584ixJFv3KcLci0wuC2L6wQHec7IA8dt+NvYhqqJsHACf6jg/0HR9xzhiemMHwxFFQYWBk70LGnB7D2NBgdGiAACxahB5Dr4F9DBEEQZBiJ5etlPIyHHkSwuGwHJ3sbftXWNAkwAN+ukesfI/YaeG4uqEtF9YvmLv/wJFDgwmAklGHFPa491r+6BAA0FeCTBFsX1igyCa2Ew1Mk9cSo7/EOaEw0H9MTkmW7jSrraGcesIxMBTWGd/RZcRRjwzMjA0OHTvaLzyG2KHCa6BiiCAIghR79pIzg2Fra6sLS7HkFBRpHPDwjpa/IyqGUz9WRwmF9kE7tsTbmSgSR3JhGwyBEMtdSHA2ZkHmiaNshs4mlT29JwBgUQMqhsiUaG5uznHXYGT24aC5ufmkttCxFza33Hj1U795uf/gkfTwE2FMo2LmCQNQrMJkDAkFEgasCTYOa2H6CWODJ06YyeSSRRgIPAgqhgiCIEixZy85MzvITMmFopUsNfK2BQ9thlPMDCcSCm1G2QxH+Uoa6uZauQUhTHY7IoRzzgjBVjiFnS2OnHw94jnm/MiUY407gyAyyS6ThREzuNO5oKYaODcSCZ5MAlWBMNtmyEnqPzkxGc8fBRIG0s0oR8w8sZ7seXcX57DiistwnbwHKoYIgiBIsWcvOfu33Ny5SdYmt7a2ergVPXYznAhbKJQHwOQ2WOeXxqaRC2rnEYAkY85ByQw7Z3sgW3Q8Gfk89YAJPzKVINjW1obzkQtxl83swmD5hWc/sOHpw509RiKh6H5uj0oCUZuMOmFhnf9toygfMyiZsRPHjxPgocWLcK28B16/IQiCIMVLzkYku2Q48uTIul0PT0GROQ8OTbaRw0wiAmmA5ZxPJTmU3zDhdxI4PJgYLl+VUxVtJy/2LyvkpHGsbjgQjRFCFi/G3lXISc42rurhi5x0f03euHAq1C+YCwBGMmkmkwDMITCJ8wfjHANCIZ3/0zIhH9nEUDzu2fk+IbDi8ktxobwHKoYIgiBIkZIzg6GrhiNPgjSXeduFhzbD2QiFzmWc6Psb6uYRIMdi5oDBRn2JAY7ULcAs0faSWEkjOFrfp570nxgEzhc24HxMZDJkD19ch0K5OopEIrOvimiomweMxQYG+3oOE6vhXVp0kucUeWJhuOSuh9n7Tpz+WVr5TYuG8aEhMfYEA4EHQcUQQRAEKVJyYzC0ewAVSp7g5tLp2VPMNsPZC4XOZZzob9XXziUAQ4bRH2ecEO4clGEnieg0LBzG7j7m+GTfUOzw8RMiUcSG98hkkQXbFxbQzpKVyBnZX7fceBUAdO/ZR5ylrMDEGUQqhagXFkQYcOw+Sy5MPzJj97Z3GDMIoGLoTbCPIYIgCFK8CUy2/5VCrMPy/BQUaTP0cLvGsQfhqB6FWT0g6xfMBQK9Q8ljcZMHlVFJBwNQpG+NEGxhVUjZYrofpVMuZABxwxyMJVKJ4iJMFJEJYwq2LyyUYCE9+BncWbXzqwlhBzu6WdIAqgqtMD0ugxDOeCokyEH6uAPcHAJGWkQtuVAqhqa5591dhPM7P3YzrpUnQY8hgiAIUozkxmAo67AKTpySmppXjXhFYjPMoKNwWjTUzQMx+aQ/YTplJubIPZACyxXHfGgrhkPxRO8x6THEPobIhNEW65ELImTISuTMarvLLzyLMW7EY4nBQWIP2xXVrNyenAQMjYbuDgOWyXBEM0rOwDSBm/HBwZ7OLgD4xB234FJ5ElQMEQRBkKIjNwZDKRcWolMvHA7L0clePQA83M0wX0Khk/ra6qGk2dEfG65K5o4iZKxHLqxUcQKDofzk4RNDhmksXoQlychkcbB4PN2Fe1EUiUQyVYk8IhzUzePcMGKJQ/t6iOx/x2yboQgHqBUWQBiQO05GckYYI8NVyUZsaGDgeD8lfBF2s/UoqBgiCIIgRUcODIaF3hBQlq96VTT0ns3QDUKhzWUXngWcDSYNcFYej5yYjKphoWaOI4ef7Og6xDmsuOIyXBnEe3GwSIhEIm1tbZzzbMSLhgXzLjn3jGN9fb3dBynhIxxqnNl3lEbcVULcdtK3Z55wyx8KzLQ20+zcvfdYb9/ihfXYxNCroGKIIAiCFF0Ok22DYaEMRz7pb2E/eg9v2AxdJRTaLKidSwjsODzonHzCUkkHd3a5xwyxAFJFx5TksXIhA+g9PkCAX7ViOa4VMjaC5Kb7BzKbCEIIyXgl8iguvegs4ObRnkPEYMM2Q8uzBtyaf4JmQ7dGAYB0MfLwvuPDO9Hsat8HwFdccSmulVdBxRBBEAQpLrKdwxTWcOTJaWlpaWtr82TLv4K2GbpTKLRZfuFZADCUNBkhqc1R0DqcgaBcWCC5opR65eaUC+W2a/8hQmAhDkpGxgu12L7QzWSvEnlsRDAN42jPQSMeo06PoX0uYRgOXAyT5/70zuJ82GDIDGDsvR07KYGP49gT74KzkhEEQZDiukTOag5TiMORJ0fWJrtHjcrsr1ZYQ5NzPPV45vnhxWdzzo7Fk70xNl8dMTpj2FeIo5ILIlUc07jQ+ZmBeGIwHhNjT7AYDRkn1KLB0LXIliO5MXovv/gc00hEBwYO7OlsOPdM0xYNGQPCQSHOVrcYGNyFLElmjLP0fGRmOlyibN977cd6+ynA4oYFuFpeBT2GCIIgSBGRbYOh97q8S2XKk3NCwuGwNOu5/HW63FE4LpddsJQQ2H00aiuD3DF1N12EhhRGwsjH0w1NgHd7ehkzCaBiiIzA2x0tCp0szUSenM9++nbOzPfffBcM0xqakR6BYvUwlC0rMCq47fwv3YXDc04cHQyFdHioq8sf0BcvWrAQo4B3QcUQQRAEKaI0JqsGw8IdjnzSdfNq93o3i6GFKBTaXHL+WQDw7pGBBJBRk3ZtEQqw2b27Safyw3uNOToYMoBXd+wlnN+JxWjIyBMXti90czTPy4XK8gvPNM1k/6He40d6iRyRLKuSORN3kGS/VI6KobvgkG5ZyK1WhiMUQyM+eOLt19+MR6P3/ts/42p5GKxKRhAEQYqF5ubm7CkU3h4KKdOMJoHHfi9CiCzydU++XRClx5NTX1dNOOuNGkMm+CihaeFJzsUkQm9SsDC5EBJGnm5hNWo7Hk8cPj4AAJ+44xZcKMRGClK4Du6Md21tbXm57bT84nOMRCI2NNjXfai8Zi7jJnAKjAPhQBlwRdyhUIQsRTEyuCkG2MXIqSeEmdwakWyAaXbv3XfkwAFC+OJ6LEn2MugxRBAEQYrlWjl7aUwxFGFJO573pqC4xGZY0I7CsdTXzuWcdR6L9gwk5cRkcNS0guNDxLUM64NimmlqPzrm2AzEk8cGo5TwRQ1YjIaMiLNoMHQbMrjIaWb5iil//elb44OD77yyFQxGOAdTuNW4aRUmW/NP0GboIjiTVlAOdiG5mZYLhcfw3TfeikejH7/9I1iS7G3QY4ggCIIUBdkzGNr37b29gOFwuKWlJRKJeKySVNoM85jIecBROJblF59jGIn+eHLficTZVTpnjImW9kzcrLYFRCIOJoKWkkJglMdwZ8+RI8dOLFlYj00MkaIKhYW4X+RdqPwqucsvOOvHP/3N4c6e/gMHK+prDZ4eo2FSINRhM+Q4GMsVcBADaoTHUN7sk8ORTYObJpjG8SO9e3e/L6YkfwRXy9ugxxBBEAQpiivmLBkMZc+mIinCkpKWtCp47PfKcSrlMUfhuHz2U7cBZ+8eGQJKmfCmOQuTsYmhy2HOQvKRZkM59uS9nl4AvuKKS3GtkGILhYV18SNl3LwbP5dffPaFy06hFHa88pZKadqwxuTcZM5Y2mbIho3oSB7haWshl/XIDJjBLYOhAcw80NUVj0YXL1ywqB5vGnkcVAwRBEEQ75OlRuxS9PHYcOSTph/eq7/OWWFyMQiFw/nhhWcyw3i/b5AoiixMnmhDXMhwMfIo0TC9be/oFu4SHHuCWEG2qEKh+5GxJl+NC8flI9ddyczk4PETLJ5QCVgeQ2aCaQp9SkhUHPBuUv5J7QohF1rTaTiXu8k0rCfM3Pbq1ng0+vGPfnjhQmxi6HFQMUQQBEE8TvYMhm6o9Mk9LS0tbW1tXmpoGBZkbz8WlVBos/zic5iZPB43OgdNQqntTTMdKiFOTHZrtgj22JOxM09MgNfbe44cGyAAixswV0Q8PvirEJERZ9WqVa4qEr/1xqvj8Xj/wcMHO7pUSqzWeNJmCIwzoRrKboZoM8xzDEgbDE25g9LWQtN6sm/3+x3v7aGE33kbliR7H+xjiCAIMlu6D/b1HO7vOthPgGze1t59qF807IKug33i6+L5oX7OWf28yvraStnmecH8SgBeXzMHwFwwv+qS806tr52Di5kNstTBsJhzJFmb7CXNS/5Gmd2bXu1ROHVu+dDKZ1vf+EN73yfOrFIIcb4N7YZV2LHKldmiJRTa7kLT8cgADh8fAmArr7gEG94jsk0e6v6u2iOushY6+T9rPrzhsT+88fuXF511asJMMm4Co6nNpKAQkIP0U4+cY2zIWwBIGwx5ejNF+0LDkCOSgbEXf9cajw5dffklaDAsBlAxRBAEmTbdB/te296xZfu+zds7CNG6jhwDoghlkIrNvsihVuwFTrUKDrznGO/p75NNvGBHnwjL74vWLQbnBvDkpcuW1M0vb6ipqq+pFBpiNa727C+ds2EwLIbhyJMg9a/m5mbPKIbSZiilvdn/NNnSq2iFQpubPnT1r36/eXfvwJBZVUYI49wUp0UGoDiMbOA4aSJuwNmykDushfLx0InoK7vaCfAv/u3/wbVCmpubcdqJe7BnIrvz5d18/cr/fOg3fQcPd767p+b0xXGTATGBKUCZuFPBQRHdDBVGUp/E/ZmXAOAYjsyYNRxZWAu5mQRmHOrq7u/tpQBfvPsvcbWKAYJ3hBAEQabCa9s7nv791u5DJ7qPnOg6MkCICkQjhAIhgQD16+D3k8pyWlluZb4BP1SWWRc7dTUaAI/F2NF+U5bhxeI89Rw4cOg5bMRiLBo1onEWHTJEB2iDgyl6DCcuW7bo0vNOvfm65agezjDOkcxHOpwIaa+Dl2RTKfPNZreOEgqxpRcAnLXyf82rqm68sP6iKhUMQxU3q+VGCaFiTLJCLHC53JEtchPA5NwAEPeyIAEQ59x+fGVvzw+ee4GZiTc2PosewyJHlr5iPbJLQphsrOzy3XH/+qd+8vPWuiWLw3f+qakqjCqgaqlN0UBRiaKkIgOlQFOPGBZyDQeeLj0GYSrkyTgkYjwR4/EoT8Qgmfj1Tx/f9urmq5Zf9P1vr0WPYVFkUqgYIgiCTMIPH2l7+g/bunqHCNGESqgAIapKFtQopyzS/D4yp1Lx6VBWSktLwO8jyvAd0TGnV279l/5a+jlALM77+41YDI4eM470God7k4eOJA/3JjjnnJmcJ7kZr59best1l9bPr7j0gtPra+firpkK2ZC0sALLw+nizObYoFA4CV/+l//4bdubS+aUfnVFPcTjilMxFKIhIUQqhpgaugSpFdqPSYA4QILzeFox/OEfNm/cvvsTH/vT+77zz7hcGGFRLnTJvpB3vFwbgOyrplde3/HZf7q/vKJyxU0fnHfq4gTnw4qhqgFVCBUxQRFPpmkzxDtPsw8AUivk0ldoJnkiBmm5kCfiB/d1PHz/fyUGB5569MErL78YF6wYwKpkBEGQcbCFQkp9QDTdX1FeSktL6ZKFynln63PnEI2KQjpxKUOtZFdmvdRhlSHiP+v2jE3qwonJYXCp/xkzgwoL+HXgPMQ167pKyIkdncntO4fa90WjQ4GevuT9D7/MeZyb8eXnhi47/9Sbr7u8vg6lw8nIuLQ3exuax5Dt/2RJrzd+namXWmPp8VS46UMrn23Z8n5f9IRBShWFmaYpapDlJsuTU4/ijYrJnisS+3QlMh9ZkmyKOuWDJ4Z2dh4kBD7+UWx4X9TgzTP3ICuR3bAvpvIall905g3XXPKHjdv2bN9Vv3SJmTRMZoJJgVDrkTFQKGccCANGpyUaTuUFYKCZ9OxvjUgmooMhtzoYJrl4BGZsf/WN+NDgVVdcjHJh8YAeQwRBkGGEULhdCIU6UF1V6YIa9ZTF6qI6pbSU1tVQRZFXG6BQhSopVFWhVNROCOFQPrVVP3FZQtLjQBkzGbee2oohN02Tmak8mnHGWOq7BKKfizhHR2Osv984fMTYsy+28/3Bvr5E6u+zODOif/PJ626+bjnqhhPlMxm3PxBCinA48uR4TEU9qc0QHYXT5eyrV5eUVHz83AUfbAiyeFwaDBUATZwcFbQZugkmKpHtquSkdBc6DIavtvf8+Lcvza+pfn3jr3G5ijqHxGjomvibe6f/LAWEzVt3fO4rD+r+wFW3f6j29MUxw7QMhooKqg5UsWqTxWMqSOT82C7Gg4mD6FpogGGmHpnBk0lIRHk8xsUjJOMHOzt//qP/Hujre/KRB65cjoph0ZztUTFEEATpPtT/w0de2PLugZ4jUaBaIKDMq1bOOk1d3KDOnUM0lfr9QKmiCJFQpUImVFMoUjSklChUyoUi4xXGQikLpvIvIi+vLDHQZEzMSrYxTdM0TIklFor/GGNG6vMGN8VfYGCYPJk093Yk3nx34PCRRHdPFFiSmdEF8wK3XXv5zR9a0VA3D/em85ovszEOGzZNhJcaO04kgKJQOGO+/+Nfrn/895c0zPnUubU+I0YYc3YztOVCRWRp6P7IL7JxoVQMTUcxstXHEODrT/5xZ2f304/9cOXll+ByFfM5H+uR3bAXsl2JPN2LqKl//3889NxDT7TWhBquuvV6pmsGECEXaqDqoKiEKqI8WbQynH5tcg4uLz14PJncMeTEFB0MY5ZcmIhBPAZG4lc//fm2za9ddcVFTz78AL4BiyifQsUQQZBi5rXt7V+795mu3jhVAkDUQEC5+FztjCXKnCplThWRXkJFVXRd1zVdqIRSKpRyobQXKgpRRItmUZgMBNJXEmLsG7O6FQIwLqaOpT2EwkY4QjE0hEDIHBgCy4Qo1Ubxl4BB/wljX2dsz77YK1uO8tQX4twY+vyaG26+fkUDWg6zkM9ggjQ5XpJTnTZDFApnz/7uwzf82Zeqqyo+d+miU0sIFzZD6TFURDdDWzGkOP8kvwljWig07ZkntrtQPNnWdeS+X78YTUTfePFX2PC+mMMrzv5yyV7IXlSaXCKYuoAw0Xdu3rrzc1/9YWlZ6XlXLz/j8vMH4wmuqJbTUPQ0FB5DKqTDmYxAyWUo8ULY4gD2wJPUExOMOJcDT0QTQ0gmDuzr/Nn96xKxwSd/9oMrl1+E78HiARVDBEGKlNe2d3zt3me6exNECVRVKTXztIvP9S1cQFUVSoJEUaiqqrom/hNyoabrmqapqqqCqqYuXsREN1ApUAUUCjQ9xYQwMBkw0f6JObfhyyewipKl2VAogeI/Q4iGzDRNkzMuNURbNDTN9DcKUdE0Un/XNOBQb3xvR/TtHQM9B4YGh+LciN527UX/91MfKXK/YWYNhpggTYWZjQ1xIa2trXfffXdlZSUKhZni/vVPrH+s5cz55f+wvD45NETHsxnao5NRNMxbwmgbDMXzhKMkOSE+/OrjLR0HD/3ZR2/8/rebccWKEzmQF/PH/O6CzFYiT7I3M/4lJ8/8btN3/uPpkoqy6z99O9e0BOdOxXCi2uSsBogs/XC3B7XR9cgmMZJpg6G1xU4M/PLHP+14771VKy7+5U/vx7dhUYGTTxAEKTp++LO2p1ve7ulNgOqjWumKS/VlZ+i18xW/nyuKoqmqJiRCXdfST3RN0XWqa0RTRdUEBQVASZsJTXP4llxqE4ohFxIhtx2DnHH7ioGB1cyQi6pl6auhlBKdqKpq6YLiD8OBaQirIZN9D01TZaZhUmrW1fjq5vkuPLdsb0f0pc197+1Rf/n7d37xm1f/Zs0Nt9xQpHXKTU1Na9euzeBPw/7uU0FOQXHzrMYpZmKtra2VlZU4zCSDXHLemT/66e+6BxJbD8eWVagskWDp2RpM2AxF3wXRwQH1wjzBRk47sZyGDuPhjp7ewWiUAtx5O848KV7knSFchzxe3jQ3N2ekg+REVzVT/Py43zYtZ+KNH7zsx48+PxCN7n1z55krLjRiMWaPQCHpESiUcGYCBcIUOWwwIxdjE8WZiX74LOPSuD/WRbFODGEExgjI9IVxM8mN4Q2YuXvbO/+fTLkAAIAASURBVIe6elSF/vs9X8G3YbGBHkMEQYoIUYP8q56jhurzlZVqSxZpV1/qq6gkfh+oovTY5/P5/X5doGm6TjWd6jrRNdAV0CmookMh48Kvb4jNFgqlN9CqHxYtDLlsXsi5yUzOuHWBILsayg6H4jKCpCem2Cdk+YQQCuLvMoMZlt3QZMxM/0vCdWhVNDPGTG4C4/yNt46/9tax7p7BoYEhZgx+vvHGW29Y2bBgfnHFtswZDKWfoqBVsBwnM4VoxhxbeoxDsTPOV+/5r+c3br+gtmL12dW6GVcm6mYoT5MoG+YWlgpsDoNh2lQoOxjGOU8CfP2pF3bs77lq+UVPPvpDXLGiPcNjd468R9iM2N4nl/9m8+G0dMPXtr3/9/+6vqSk5IIPrliw9JSo6RyBknp0eAyF35DO9uIwG9+ckYCVt6gn6pGJdD4wYYFIJuxiZJ6I8mSyt+fAYz/ccPxo3z/c9Zm/+5tP4Tux2EDFEEGQYuGHj7zwwGObqFoSDOqXXeQ7/yytZh5QQlRV1X0+n0/3WwR8ik8nmk50HXQyLBSaJiSN1GZphQYzhrU7W8MzTTHl2PIRivaF1vhjRx5MhKTFZKS2ZiynL4Xk3wQAKgeqpF+/aQhdUOqE6Ukp0niYejSsr4PJjx4z3t058M6OY0d6h/qOnjCTg5//1Ic//79vx5RmZtdwOA7Sqynl5D0KPVNknV/s68xX39jx2S//oKKs5POXLKjXTZJMOhVDKs6Ezq0wci2vIFVClm5faIwakcz59q4j9z330lA89tQjD155OY7ILNJzO3bnyG+0mn0l8iTq3tgnU3w+RfvhRF/93Ff/Y0/n4dolDVd/9EPHozGTEFBsxVCIhsPzT2TP8Fld0WXq2076PbOMSjkKaqIemTDhfOAGMU0wk/ZwZJaI8kQcDGNL28bnn3xW19RX/vDYwvpafD8WG6gYIgjifR78WeszrTt6jiYDJfrF5/ovv1CrqFQ0BTRN0326z+cPpMVCPxUb+AnoJC0UMkgakEhAMglJgxuGOVwnbIzUCk1mclNMNmZMnFytB9mzEOiwXCieEdn3MHUhJP4jI3s7E8X6b3R8F8NTbMVwxIuQfkMxICUaM/e2D258pbe758TgwGD9vMCtH7rirr+4w/uBLXMGQxyO7Ml1m+IwE7QZziYLHZdPf+E7O/ceXLmo4hNLq4xoVGFMmcBmmI0RKKgqToSsRx5tMEy7C6XB8KGNb/3P1p1XXX7Rk488iCtWpEkjIWi3z1fMknewMigXjvucpy9ax/38RKriLAuWX9/+/j9+46GA33fOykuWXHhmzGSMKKCKKSjCb0ioAuM1NMzG+X/yb8veV/MTuTgA44SZwITHUA5KTsZYPG0wjMe4mdy+6bXnn3rOiMef+On3r7jsAnw/FuPJHxVDBEE8zGvb27967zPdvQZV/VWV6p9cFbhgmfr/s/cn8HEcZf4/XtU994x8SvIhH3IOch/kjm1FEgRkCOcXEhYIOVj4wx4s7C44ZDdgaYH/rg3scm5Y2NfmICFA7pAQG0hkRY4TJ07ixHbi2I4t25IPWYd1zNFHVf1eU9Xdas2l0cxopmfmeUtu97RGo9H0qKqeT3+e58EYu10ur88mFAqtEPu8yCchP0YyQjrjjkJNaIVM04iqapqmaraygoRMkgwpIzRpHSbmZNuwyz+sG+IINkVDSbRc5muEePAsc5+hNGm5gCXeGwAhQzc0bIbcahjfUuE6pIQyioZH1Df3jL7y+lBv3xjTxv7h8x/65HVNSxYvqNQzXkCDG6Rf5RneOC22zKHrMUTIWYZ8WfLy62///R2/mB/033he/XlzZBKLCa1QThYNS5GZXLWSop5QuNC0FopPFaEDgyM/eer54bHxx35756orwWBYjcD1sxKuavLPRE7WB+3aX5aKYT5yYebp456HNj/89LY5tXMvW9Pkr50dUTRbYjIXDblciAynoZwuNzl/318+d0j3pYIriYWZqlj8lGAi5EKRj8z7I3O5kCqR+I6mhkdGHvzVr/v7jt321S/881dugb/H6gQUQwAAKpb/eeC5J7rePjakujzyVRf7r7naHfDLbjf2+/0+ny8QCPh9fp/X0Ao9yCchH0YSQpqOFBWpGpcLNV1TdU1TNa4Wxj803cgBFkULmdXDhOUdrZrWwwkNUY6vjITV0HZPvlwwmopKjFFR0FCYDU0Z0ap3GH92B3siT27qGx6Ojo+NN9R5v/+tL1912fmVOasVyGAIcmGeOMegl4NQmBCqVaHNcLp/RNnf/wvf+NHeA8fPqQ189bJFyvg4JmRSbrJISUZIKIaSwyS8ipQUiU0utAyGllyocA3xl395+fk9PdesvOTR39wJg1sVAhNiqWhtbUUI5TMHpfMS2kXAzNvkm9PKUM5ymvj6d3/d03fS5fO2fuY65HErhNoUQw9y2QsaShinFg0Lpb4VUBYs1PHCT0kMcblQx5QaoqGuMpVnIhseQyU2Pv7M4xt3vvyK1+N94c+/WdqwAP4kqxNQDAEAqECO9g9/4V/vPzpMJNlz+nLvB9/rW7RAwhh7vV6/z+cPBAIBf/xDCvixz418EvJghBlSuaNQVZGi6KqqGzKhQDX/UWrrdTyjA7SETelQ5vZDbDvMPyRsiocSQox7Cymx+q8Qq0cKpYQxwl56dWjf/pE33jzJ1NGv/vVHvvalT0FUAzrRjJ6OEr6M+QiFCSvyqrIZTqv+VA733P763q98+5chv//6s+avXOhVw2G7YiibNkNJ2AxL6L9w0g+aOax8ZGtHsymGovnJrr6Bnz/9QlRVXul6dOnSRTCyVeFI3tHRAQFj8aewwmYiJ0iBdtFwWrphnkUM093htd09d/zw97OC3saLzz531SXjUUVHyKYYum2JybxcjykaFqFcoP0+01IYZ+7O+c5HQi5k1EpJxkRjikJVYTCMbxHRdr702jOPP62r6qO//vFVl18Ef5VVCyiGAABUGn945rU7f//isSHd7XF/tM1/1unumpDkcrl8Pl/AEAsDAVcgIPm9KCAhL0aUIk3lvkKVqYquqKqqKPGtTSjUqHDrlWTMxEjCUnw5IAl/ITb9iLzwIZYsxyFCzGigbCmGuqEeUsIoZbrKHnj04J69Q1os3FDr++0vOyopQ7kgBkNojlxAip/IViih0B4tV6p8nOGPpeBfsvM/9z3928e7l8z2ffndC4N6lE1ugSKEwuTc5BmV52bowZ0vKeoJRQwxVmwVDFXGRlX1J398aU/f8WtWXvrI/T+HMa0aY0Vo/1V0hEqb/zokWS601rGU0mTpMMsk5TwLF6ZjY9frv/rts6GawPnNVyw4fel4JEJll9E6WdgMJRe/ppQoGs70EJ2l5TDhbvkog3k6EKe4G0OYEsNayPiOrjFN+ApjQjRkunbgrb2P3/uQpig//f7tN3y0Df4qq3oWAMUQAIBK4hcPdP3i99skd2DRQu/114XqarHbjX0+v99vyIUBbyAgB/zIL6MARowIRyFSVKYqqqLEN6qqKjFFVRVV01Qr79gpvyHvCCAhM2fPlAtNO078P8bYZLMhtXYoYdGofuDg+DNdfb1HT1Fl9B+/+LGvffkzlbHCzt9gWKir+oB95VoE+bXgQmHCr1BJi6XppozlHxkmf3Xlx9aGQv5PnFV7zUKfEg5LlLoQcps2Qys3Obe+yfnHhznfv1SPmRuEV7LSuVBIbfnIqq3hySPb3/7Ljr0xXdu++eGlS8BgWHVAPnLxyT8TOWHsTZAL05HZYJhZMcxmIsjmbvc9tvWPna+G5tRccV2LHPJFFNUqZci3Liy5JhKTZQlhKQfRsICZv1NKe8l3mFIczEY9zL2bM0OYMcy1QkQoZgQTnakKU6PcWsi3uj4yOPjs4xv37nqraeXlP/reNyAfucqRYQ4AAKBi+OAXftz5Sp/H57/83YGPvC9YW4s9HlcwGKoJhWpqakKhUI23pkaq8aMaGXkZUmMoEkHhMA1HlEgkGgmbH+FwJBaJcl8hdZxSwMRqy6icKFZ5/DazXyw2tESev4xMOZHvMJdLqpvvra319x2LhWPSC9t39h450vaeqytgkb158+Y8H+TWW2+9+eabYWYsIC0tLa2trS0tLY2NjQV/8M2bN9/KOXTo0Lp16+6+++5bbrml4D+oq6urp6enMjyn08omS7hYkvJmgiElXSCa8NW6eTUvvPL2sQhZXOOb55WortsjGyHQGr3kzf18mKHKUAXR/kolIDJRvhBjijGzpSRrCKn8U0eo71T4d1t3jkZjP/3+t1Ze+W4YzaoNkAuLjJjRmpub77777kKN9na5kFJq3RT72QiIycbD5IVxPlfW7d944dlLX919eGhoZPjk0LKzVjDEdELMEQtNrGlF+z9mFP+eiUG0UC1QEubBmXhi03iqDGHKMBPWQiaJrGRdRarCNMXY6hpi5Lmn/rz7lZ0er++hu34AciEAHkMAACqBJ/7y2i9+t/XokObxem/8eOi0FS6XC3s8nmAwGArGCXgCfjkQQH4JeREiKoopSIkxJcY/FDX+T1XUmBrTNZ6+Wy5joyQMONisbWhbPmHRT5lxeyGjE+1QKG/rzEbH9M1bjr6y43h4dPTKC5b98DtfW9qwsJoDG4iOZu7sFDaxd0YdhSl/XGtra1mvl1IGeOl2cq5wPy2n4T98+1cHjpw8b77v02fNlmIR0QJFTlXQUPgN8wkKC5PJlXcYWcCnmj/EshZiLBqeTHQ7oVRlTEfowRd3P/Xq200rL/3J+n8t39kBqNpxr+wmyoJkIicMuXYR0K4SJiuG05oXppxicuaNPUc6fvro3FmB0y4+Z8Wl54yFoxpjwmBomQ15u2QJSxLiOzk4DWduQskhGTlzncTsE5+nTEaWGMOMCrmQ64YUmVULqRLlbU8UROneXbsf//XDEpYfvvsHV11+IfxtAuAxBACg7Lnz/md/8/TOo0PaggX+D70nsGypy++XA35/jYWnJiTX+FFIRrKGYlEUCbNIWA3bTIXRSDgSi8V0nTBe26V8lABkdGrmTkPbwo2Jf0bzFGtNYu4jhDxuvHRJCCNpYEh751D/7t17rrrs/NmzQuX4HsjfYAhy4czR0tLS09MjBL48I9giOAqTaWxsLGub4ZTyX4ZYMct0tgykc7LU187503M7ogzXeOTls72ariPbczPKsjJmOA1nJrTLMwLM/6szFNBOCUGIcrlQGAyJ2fBEfGrcYNi95/BTr7xNMX70vp+CXFiFrFixogjXYwBrDXPo0KG77rpr5uTChAE5YWTOUMcw5fSR5YyTAwtqZ82dU7P99YORU6Nut6uuYYGuabzqIhYzQXxolDAyOgDOoNOwgH23Mj9Uhtc8+67NU7RnEbULTblQQhQzijSFaSpTFaOIoa4hSvbtfvOZxzbpqv6j/7itAtKPgML8LcC1IwAAypo773/2Fw++jF2B5Ut8n/yQv26eS3ZJgUAgaCEFgzgoIw9BmoJiMRSL6bFYTFFiSsxAIbpOGUXlPhya/U/wZKeh+CKl1HAaGltK+aeqsr37hjuf6+05NLCk1vPD73z16nJriJa/2AfNkYsTEeXWBaXIjsJ0z6Ec7TbJ+mC6LLMM7pI8+2NmeNHueXjzoxu3NYS8nzizZpGLqNFoQt9ko//JVAUN8/f9FSoBrSBfKuC3pITwENvyGFKMNZu7UGFMY+zI8Ngvnnmld+DUzzbcccP/WwPDV7UBl9CKObl0dHQUtkVYci0IoQ8Ss4Mf5SR0QUk3a2Q5xRSWB/6w7emu1/1+73lNl9SfvmRsPKwhjGQ3drmsyobxyUHmHvQCNUKZiaE4ew0x3ZF0JRGz9B7G9xnClGJGeRoyNdyFmoo0haq8P7JoeEL0fbve+stjT4dHxtd+9a+/9uVPw58nIACPIQAAZcyd93dyudB/5aXBj7X55852ub2ukKCmJhSoCcqhEA5JSFZRNIIiYRKJxKKRSDgS4dbCSDgWU0QT5Ap5RZjtHzKdh8xYNJgKIjN1xPhGktD8+b7ly2aNjWv7e0Z+/8hTDYvmnX/OmeUlReVjMBSpQAcPHoQ/qBmlsbHx1ltvzb6gYakchemefFdXVyOnPEaCNF7CzOXtsylrVUAuOmf5k507RmL6wRH13LqAB8cDWmSaCrFpnM6/oGG60ooJdyhg6Fio4/nfOQFqyoWMy4UMY51XLdSEwZCbDcdU7f6tu/f29jetuvyLN19fpsZzIJ85saurqyCl9IApX+qZq56cMGgLiTDBY5jZ4JZyQikOF5y15PCx4d5jQ8NH+10ed+3iel3TGKPG5CDGQAljJsoZcg9dfCsVRzTMc3ye1hienDOe5VRiLPoZwohJiEmMYb6VGMXCWqhyoVBT4p+U7Nu155nHN4ZHxq9ZedVX/+Yzs2uC8BcKGO8l8BgCAFCm3PFfD/+haz92+a9tCq68zB/wS16fJ8ALF/oDgaAvGJSCfuSniAprYdTyFApjoaI4sbFJYdcs9n8if4MhSu1OQ0aNNsps+JTyp2d7Xnmljyqj//ilT/zz399SLgvufKwQwjtWhGa+gGWmyOzldIKjMOcn7xxS+gSTtb/M6mHKh0oXQ+bmRnltd8+3//PBYMDT3BBas8QbHh3FhIhqhu7JBQ2NLQ8Si1YuMPvyUlMeL8id84w8qS0ZWezoGAtfocKYyncIQpt2Hnj8pT0qpdueeQDykatwlIY5sWirl66urpmY5uxjuGUnpJQKj6Hdb5iypZVzXqKf/PrZHbsPejzuS9esCtbNGYtEKcJW3+T4VnZhycUnCTm+lWSEJSxjB67Hp3WfbKoZpvQeJhzEDBlaIaOYUZkxxAjSVGaVL+S1CxnVjx7qfeK+h8eGR04//fQHfvmdpYvr4S8UsACPIQAAZckd//XwE137JFfg2qaQKRd6g8GgYTD0BkNSyIt8OtKiwloYjUSEsZDvqqpKCa2y14yZywhe2dBwGjJLUvR65YZFNSf6Y8OjZOu21xCNrbyiDJpj5mkwFKWabrnlFvibKgKNjY09PT0dHR3JL7ijHIXpnvw999xTFjbDZLkwIftsWgUH03kMU7ZLnrJjcgIL6+bMnRPavvPAiIoWzfbW+mSVFzS0AiBRZ2GidXKJ5MIMvaFze/BswsIcHjPzPSnGzPykGBNJElULVV7EUOMJy28cOXn/c2/ECPnxhttXXn4xjFrVhrC8wZw4o4j5rrGx8e67757pCcXKOLZnIqe7dOQ0rrxoxSu7ek+Njg8cOV63uD5QE9Q0zeY2N1ewIvHWcBryfbPooQOZ7vyV7v7J527SPRmSGMLMNBUyihHDlCBNpWoMmQZDQy7s6f3LI08NnRzy+mueeuA/QS4EEgDFEACA8uPz3/xV5yt9ksu/+vLgNVdxudDvDQq9MBgKeUJBKehGbgXFwiwSUQ2tUGxi0SippDTkrJYbk52GhmholZCeJBouXhgaD+snB9Wt215bsmDO+ee+y8m/WXt7ewsnt29vbW2doWwgIB0tLS333HOP1UXE+UKhncbGxpRypzPJJo84IU8t5ZHMGmLyj8jwfFJy+rL6I8eHe44OHhpTz5jjD8pM07SE70xITGY2SXGG4rTs75wuAp/u4+d2JPtnTkyhkEmSkA5FGrLR7YQxnbFDQ2MPb3vr+Gi4aeXlX/jc/4N85Gqjvb29sbER5sQZRbg4i7P2mPJij32ycCatV5316pu9p0bD4wPDc2rnBmYJ0ZAaq1vMJta1jPdAEdv4jCFhXFZL9Tz6dCVIhxKXTzElQi6UuFwomXIh47ULqRpDPBl5ZHDo6d8+1n+s3+uf1f3kL0AuBFK86yArGQCA8uLz3/zV9j1Dkuy7/iOzzznD4/cjn98fDARDIa4YuoMBKYgQU5ASo7GYEouKbORofKOrGmW0Kgd7lFgBjDdZ5u1QKM9NnkhPHhtTH/3Dvp07+0hs+OG711/tYKchxrnPYlDZvbQn7uKLL96xY4fTUo+njl5aWx3+hJPTkO31qpKlwORgI8ORlD8rn+dp8cV/+T9NVc+e67n+tKA6PkpVVTZboPD69siemCxjLM1MN+GCt0DJpmh9NnfIQT2038GSC61PeyayQmk8BGfsJ5u2v9pz1OPxvbDp3qUNC2CkqipESV8IDGf6RZ6hTOR0s4DV7YRw0l0WcvKLdmJgrONnT1FdQZids+ri+tOWjEWiBCEku4wWKCJJWZJ5CxQJ8UZZSJIcmKGc50yUTfMTzJCMjWRko3YhYpjoSFMoL1/IRUNevpCRI/t6nnu683jvMY9/VtfjP1+6uA5jDH+nQOIbDyYGAADKiDt++OATz+2XXIG2ltDqK3wut+T3+wKmXBh0BwOSnyEUQ7GYFjOaIUeFWhijOqnqEc9aA7CJ3ih81cio2Tc5vpjk29Ex9fmtR57p3EdiQw/f+/2VV1zizJV3zpIfNEcuCVaNQiEXlmOpLOdXM0x2lCRbCFMqhulKFma2DRbqab+x58h3fvbYnJD34rnu65b5ImNjTNNkW+tkF1cJjabJfAdP30OSTyyU+XtzKFM4rSKJWbbFTHGOEGKSZMmFDGOCsYaQYmqFhlyIUPfbR+7t2oFcHpALq3SNgDGUL5zpuQMhVITpI0MdQ2vf3iu5LETD7/z3H3VN8QY85668ePbiuvFwRGMMyS6uGLqsyoYYywhjJMsYYTShHpb3H2Y2x0V3Q8lQDJlEGUJURkxGCOkq01XKM5GZxnc0hTF68uiJx+75/djI+GmnnXb/ne1LF9flP1ECFQlkJQMAUDbced+f73/qdewONF0eWn2V3+uV/D5fIMh7nQSCAU/AL/kpolEUiyqRSNQkElNiMUIoqu4LJGzS3kQ/ZSxSlOOvjpHIgRHyuOUF9aEDPSOjYfLbBx/91MfbZs+qcdpvlHMFQ2iOXPxIKSH1+Mtf/jJC6J577im7alkOr2aYLhM5oT9mSoNhOpUwy0gyz4BzQe2seXNqXt51cERnbpdrxRyfpuuMUsuhN5F6Zt0sRE3D7MOz/CO65FcsXVZgznJhYu1Fs3ahkAsRz0fWzExkKx+ZIrTpjYN/3LE3RvHDd//gvLNPh1Gr2hB5slC+cOYmQfEKF7MDtRgNUg7m9vwMuw/asVJRKOC99LzG7lcPaJo+cPjonNp5c2rn6ppOiC7Wsti4GG7VvmVGnrJRyYKX764IUp4j4xIaQ5ghF0YYUYkxGRGJUcT1QapEmchHVqJUVRmj7+zeu/kPfz41OHL6GWfe9/NvC7kwnSkeqHJAMQQAoDzYvvOdO3/7/Lgqr7o89J4mf8Av+fz+QJB/BAIBX8Av+QkiMRqNxqKRaDRqSoaqolqFn6sZbLkMMUuSDY0QnIkSYfyGxyMtXTJb1DR8euOf11y7ylGiYc4VDIV61dnZ6fz+FRUQI2WoUdjS0tLT0yN6IpfX7+XYaobJml2yXGj3lVjZytNtfDxDbpTTltapGnnzwPGjYTIn4F0ScqtCNLTFLnbRkM2YaJh9nDbdO2d4kJSuz2nlQScIi4ZcKElUkpgkIUkS7kLVphjqXC58fm/vQ9v2jKvkx//xzbb3XA1jV7UB5Qtn+uUVq46izRrJg4Y1sCSMEik78DpTOgwFPNc1n3/4xMixEyPjg8OI4QVLF+i6TnSNr1t5cVsm1rDmBXDGjFKHxgLXue1Q8jnLEv/9JNEWGREJMdlISSZME+1NYlSJESVKlSgjOmLs4N53Ov/wl5HBkdPPPPPen96+ZFFtwkU4UAyBSW82yEoGAMD59J0Y+vw37z52il52UfC694b8Acnr9QWDwUAgEAwGg/6gX/ZRRKMkZhA1/tc1HUa5xIXjpPXjxL6VmEx52gqvbEiHhmL3/mbHkcMnrrqo8ZH7fuqopVJuZxZjvG7dOgiNZg4r9TibGoWtra3Nzc1ldzqcWc0wIRPNkgjtQqHV9ymbsvclGTx/fO8zO3YfrAu4Lqt1XzFPCo+OYkpdtvRk2fRTyLZtyQPcnHuSZM5HTnB8pJMIU3yL2eQE8Z7IVjKyytOQFW4tVBkjlG7Z1/fYy28NhdVPX/+RH/7bP8AIVm1A+cKZniyKk4mcbjqwY9U0TO6bnLKmrTPfFQ9teu1PW97ye9G7Lr9g9qL5VEIRRWGSjGQXdrmQZKYny6KyIca8oCHCGEl8uijzJOVJU4CZhiwhxtOweQ4y44ULdZWJPic8DZmqMaoriDJdVbd1bj341v5Tg6OrV13xndtuXbq4HtsA0RBIBjyGAACUAdf/3c+ODdNLLwpd2xQM1cgejycQ8AcCQWEw9Mt+HelRLRqNRmPRqGIULlSIDnJh6hWGLQC17SenbGDk87mWNsw+0DO6Z9+R57ds+atPfsghEU5uBkNojjxz5Nb1uLGx8dZbb21paSkvy6fzmybbI0Bhsk7ugpJD6nERuOqi046cOHXg6NCwSt9VVxPySLwzZuLTw6bHkHtLnBjf5KYhpvuuLL2HRh4ixpSbCq3myEyShLVQsfKR+VvhrWPDD2zdNRRRrll11be/8YXZNUEYyqoN53dzKt85ccWKFUXORM5yhEm2EyYMovbjCXJSyTn3jEURVd9zYGDk+MnRgeH5C+v9QZ+uaowRI4WGGeW6J9yF5tVxI4VZNFjG5b2Sl7lcyNuCMeErxFw3lJkueiILayHVYlSJUk1FhKnR2LZnnn/t+e2qhq7/2Jpv/O0NDQtrE94q9hMNoiFgvBMgnAYAwOG8/6Z/P34KrWgMfvpjc0I1stvtCga4vzAUDAWDftlPMI2pMUU0OlGMHVLlfU6mFBQmhaHGXjyCpGb/E8r9J7yNcm/f6P/e9dLIUP/1H1r10x98xwmheA4nF5ojz1BQlL2jMMMjlF0XGgeG2dPtj+lYCwlC6Ns/fuJo/9Biv/SBZf56WY2Oj0vcaSibNkOXzV0o27aOjW9yEBCnrGaYooMKr1TI+CeWZSpJovOJyludqKbH0JALjw498MKbvUNjHv+s7id/YZW9B6oHmBZn7oUV81oJ54h0ffPT7Sdcaspc0Lbkc8ez2/b/+vGXZgVll891xiXnzqqfp+iqqulIlnk7FNNmKLvjNyXeLkvCfAcjSeZts/jNctPEJMtaaGwNrZA7DQnSNKPJia5RTeHuQoUSjVHW+86hbZ1bTw0MUeT5m1s/8aXPrpEkSeiDEielOgyiIQCKIQAATufWr9/58tuDoVDw/31o9rlnelxul5GJzAsY+j1+hFlMVQypkEuGigJyYXaryUmLyvgORYhRyggjhEuHpmioxEjXcz1P/2m3Hj352K9/svLqy8ouwoHmyIUlf6Gw3M+OA59zcmQoUs+S+2OWRXPMv1n3G0rUBT58/Rk1AT0SDYclShMUQ+GwsORCI4gqqwgnyzKFmfORrUxkYSdkvJ8Vk2XGj0xYC63OyIztPzH86y27jwyPnX7a6b/55XdALqxCYFqcIUQmcskvKSUrhpl76GdQDDPnKZdqKnlj77F7H39JVVVZZvUrGs67+qLRsfFwLBofAE2tEAv1UHJhWUaijbKEjX7KvGgDQiJhuQzeV5ZKKIubmPGUZIYR3yE8DZl/MlWJb4VoSAmibOf217dvfjEWUbyBOY/+37cbFtZaWmGyYihJEuQmA5OWHBBUAwDgWLbvfOeWb94bqJl1bdOsSy7wBUMuv88fDPJ05EDA7/NjCSuqIrRCSzGkBOTCrBeUiUoDYpRRsXwkiDsNhYBIBwdjTzz11hs7Dlx5UeNjD9xZ2gB7uucX4qJCUVihMCHEKruChhjj0lpIMsSH2XgMHT5OnhgY6/jZU5QoNTK7ZqHnvFkoMj6OCHGZcqHYsURDydxKzjYbZn5HZfOl1Ptme5OJLca6WbtQZUxYCzWEKGOv9Zx47JV9fafCp51+2v13dixdXAdhYRUO5q2trbBYmokp0iFzWYLMx9KToBhmqFwxZS2LIr+jTgyN//L3Lx4/ecol0QWNC5eds8IzKzA+HlZ1YpkNseQypMP4p4yRhCRuM0TYphvy+oYTLQKdNC/wokFCK5SMNobMchfGx35KmaZQXaOiLTIvXMg0VWQMjZ0affGZrSd6j4XHY1dedskdX/3U4gXz+IwhJYuGCU5DUAwB400IUwUAAI7l/TdvOH6KXnHJ7I+11WCX5Pf7AgFRwNAf8AckWVZV1UpDjsUUJRbjvjgY1qazprRWeXabIeOdT6iRlUwJpTobGYnd99vX9r3dc8OHVv30B/9Wkmebg8FQxEWOUnbKNAqaCaEwQRMpr9PkNCU6ofOJJREmewzLxWb4xp6+Xz20RVeUkEzXLA+c7qfj4+NM0xJEw4leKKZWaKmHZbxAz1495EEvt9XIwmCIeX9kHWON+wpVcyvchW8fG7qre+dQWD3tjDPv+/m37O5CCA6rKAKEJmAzMCOUPBM587yQZX3blE7DDE1RSi4dPrhp58bn98wOuIJzA6dffFZofk1UUaPRGJUQdxdaiqGMrSRlLGOJq4QIC1O2qRhyAdEBA6ElFGJbGrLhLkS8twlimFGs60Tj7kJdoapKNa4V6ppYyff1HHnp2ReOHznm9oW+8LkPf7Dl0kX1c+2aYIJcaN0ExRCY9G6E0BoAAGdy69f/++U9Q/Nrg5+8bs6K5W6vzxvw8w+O2+PWFM3oiKwqSiymxFRwF+a4oLSWd8zcUOFOYhM2Q51RQl/Y1vvwI6/psaGv/+1n1v7T35YkyJnuKYa4KGeKIxTaf1zZabuOUjntsZylDNoVQ3uvzCzbJZecE4Pj//bzPxKizvWw1Qu858/G4dFRoqoiMdnF85Fl02wo2eRCWQRalRLupO5zItwxwlEocpDNmypjlrtQ5ZnIGhcItu4/+ocd7wyFtabVV/7HHV+2ctMy/BSgIrUtKF9Y8Je0q6vLsT30MyQpW731UyqGOYiGJWm+3z8U/t4vOxkjMtbrly1YdOYy2e/RdFUTlQ1dLoxdSJZ4M2VTQJRkm0qYuCPaaiGMijw1JquEfCKzK4bcYEh1pOtUV5mmUZ59zMsXcsWQlyQfGR7duum5/t5jqkYld+jBO9cu5FrhRLNDmzgoy7JdLgTFEEgAeiUDAOBEtu/c//MHtgSDwVs/NX9pg9vldvl9/COOz+1x64oWU3jRQkVR458ayIW5x6IT2/jSgPGNdQRZrypDDYtmzZ0TONBzqrOre/WVFy9d2lDkFfl0WyRDc+QcyK3rcf6IH3HPPfc4uQdxMs55wskreyvAS3Clpeyh4ZxWmHZCAc91zedtf/PowGhsUKV+t2vpHL+m6zohxqUOjO39g8UlEKOHsu1gxQQ99jRkIQ6KeoXGPncXKmYOcoxShRCVUkIpwnhn78DD2/cNRcg1TVd97/YvLFlUl6GJClDZ2lYJG/hW5IzZ2Nh49913O7Dpf8qap8l1URNa5drnhcydlJNnn5IMI0G/5wOr3xWOkj2HhlFMGTo+4PV7GxqXSBIiXFNDjGLEEGXxHUb5ltv0RE4yppj3UMaIYozML1l9lsV9ZmQeEV5CCSNZfFrVNrDt0+iJjCTMZGq2QlZiTFWoGmNqjIodTWWU6bq+a9uOlztf7D/az7DvQ23N///bblxYPzfdecwATAqA8VaBABsAAAfy/pvWHz2lX3nJ3I+umSXL2O/3+wOBYCDgD/j9Pj8hRJmEqmsajGZ5wuw2Q8RthpRRylOSibmjs4GByBNP7nr99f1XX3za47/73yIvfKd1lsFGMd2wp5iOwso4a06rBZayXpXlK7RnJadLPbM/jqN4aNOOPz//5iwPPneudE4Nm8MUJRqVGZMn1zR02UoZWgYNy3tYKYt3bFUqRLIsmpzwcBZrPBPZKFnId/T4+M0imv7bl/btPjIYo9INH1/zt7d+dMmiWrvTBETDKor9yq3+g/OnAOfnMaTsYZJsJMxQzXBK16FDLIc795246/FXuSCoz66bvezcFaH5NZquRSIRnTIkydhqhCLLWJKxJGod8r4oEjZ7oYgWWhL3GHIt0VRQLdchsy2ep/HXZ21tXkJs8xJaU1VSVjJDRGe6ijSd6BrTNaqrTNe4x1AVa3Rd0w/v79m57fVTg8OqShuWNv7o2zcvrJuT8OdvaYWWl9DuMRT7CZIxUOWAxxAAAMdxyz//bM+RsdNXhK5776yAX/J4vD6fz+8zYIypE2iqquqaDnJhAaII85+wFopXFMf/555DxoTz0O9z1dWG9u4b3PP2fqRHV628smhC0rQMhiAXZh/wlMRRmI6WlpaOjo6enp6yCGgbGxu7urqc9myTl/gJklBm50jC3RwSMJx7xsKIou8+MDCgsojOTpsX8rslVdcpH6KEo9BuKrTbD63jrNzNhtxXiGTZ2MoyE/uSRCRJw1hohYpwF/J9htDJsehf3ux9ft9R5vJ/5Yuf+uwnrm1YOD/dewNEw8pG+O7Ly8ftWNrb2++555677rrL+a9nOntgSqsgzo+Ex0k3m6T8xvxZMD/UtvLM8ai+650hpJP+g33xGIGyBQ0LEaOE9wPBlMbnB0IwowhRTLjlEHHXIZcEeYlAsSOmDDah3DFm9iwW7baMhiT2TyunWJgHrU/Z5iWUxFUu86ZLfMm0Fkr8SPwgYhLRsa4xVUGqQlSFqlGmxLi1MErVGI1HQAQxNj4yvuvlnTu2vjY8OCq5a/76sx/6u5vbEuTCdKc4oVFysskUqPYIEcJsAAAcxfY39t90212B0Kz3XTPn6suDbrfsDwQC/vjG5/e53S5V1VRFFdZCNf5PFfWbgYLAElqgiNbJ3GbIhNNQp7pGn+3a/9STr2qRE0NHdhZtvZv9hAXNkafEIY7CdM+tjAoaOspmmOAfsVe1T7mf7BxJV6bKIcbDzm37733ipVkBWdaV5oXes2YhEg3rvKyhVdDQKm5oVH0yXYeIuxGtwK/MMEsWIllGVmdkXriQSZKGkI6QSoioVyi0Qm4TZy8eOPHEjoOjiu721/zb2s9/5H1XJRS8Txm0Q5RYqQoXXEgr4AQaH5HKapmR0vqX0i2Y0oqY0pCYTYOUzJbDbGaWHGafE0Ph53cc/tPW/TUBF9WV0PxZy89dEZxXo/EYghCKZCk+V8gu7OI+dVnGsoywjGUJSzK3b0vYaISCTU+gZPWbMp8QRrZL7hNP2GYktN3P2BFfsryE2Nqf8BgyTAjTdSbqFRKdET1+k8SX4EznvU0IYYgRjQ6eOPn2G3t63zkcCcewy//B91590ydWL6ydk3Ihbd8mewzt/U9AMQQm3jmgGAIA4Cje/7l/PzqsX/f+2pWXBV0u2cf7I4sKhl6vl+i6oqiKqnLJUNVUlVCCYBgr4IJycgsUSnmCspWbzLdEZ4MD4ceeeP31HXv+6sPX/OxH/+GoOEf0K4TZLUOc40yhMPl5lksw1tra2tzc7JA4PF15+3TqYbpEswzSYc4hXEHYuffYPY+/rKmKm2mX17qvWewdGx2NhcOIMRey6j1h2eynnJCbbJWTR2UhHZqtkI0EZMtmKDKREdIxthqbqIyphOgIaSL/HKHn9x17etfhMRUtb2z873//O6vPib05JtSuqhJgZizg9FQWmchTThPJU0bm6SA5Zzn5EeyX8DNIipmnkinfpdm/jfuHwltfP/KXFw8EvJKuRetXNNQuqatbWhcNR6LRiE4IYwjLPElZko2t6KQsS1ikLcsiYZkbCiUhHXJlj4+UzJ6dY9MNTcUQ2wqFm1s8WTG0VELGMKWIEUSp0AcNodAmFzLCD8bvFv/JkfHoW6/ufmfXvpGRUdkdWLy44Yf/+plkX6E5meB0WcmSJFnJyKAYAinePDBzAADgHG7+xx9v3zM4r7bmc5+oXbjQ7fV6RKsToRgihHifE6EYqqqqEkIQDGIFX0qaoiGvEE3j/yEWf6UJIYRRwm2GOtu189hd93RrkRN/+N3/rFp59QxHzdnOVuXYb7doApzzhcKE+LZcjKKOtRmmFA3TKYY5O0SKrx6eGBr/5e9fPH5yxCWRM4Lo8nr3Qpc2Pj5OVFUyhUKzeLzhN0yQC5M/nbU6t0yF3EJo7dulQ4KxxsVB1axaGN9yOzhjbCisPPTKO2/0Dbk8gY984JovfHrNIt4oM8FdmFI3tIeXQOWEfBiXtcjlnIlJXM0q9zVG5rKDmaeDhCPJKmHmdvwZ5pf8J5qU9xkYjnzvf7sJYwE3UlWldknduy4/R3bLlOqRSERTFJ0xLBnWQowlscXxrWxUPOR+Qyxz16EsTdgP0SSboTV8mn90k3pvmaZC/iEUQ0owt4MjQsQWMcrEUjtBNNR17hqP/3bjI2MjQ6M9bx88eqgvGo5R7Hv3BWe2XXPRRWcvq58fSjd0pzMYWuULExolw9UjYOLNA4ohAAAOoe/44C3f+NXRIW3VlXOubZoVDLn9vHih3+838pEVjauFmsZLGEL5whlcShppybyfHEllMySMqPR3D7724otvXH3xij88fN+MLtCzNxhCUJSgZJWdUGjHUd69KZ+qo17hdHlkViZySsVwypL2jpIOH9y08+nn98wJuEOy9rHGQK1Li46PxaJRTCmvD4WtpihSkm6IzVqtkzREjEt7CcouFBpeQrup0FQMKe9wohOiCmshFwo1LhoybjPc2Tv05zd7e0eibl/NF2/88OdveE/KwlUJOWhW6GgPL4GK0bkgH7kg43zZZSJnM1Mk30x3rSjDJaXs1cOUP2vKKSb/DipvHhj41SOvaRrxeZHHLQVmB3w1vkUrGrw1Pk2JRaNRQrnlMD7Scqch3wqbIcbxcRjz6yzYOM4/zbRlw3UomipLeKKeru2iFBbldqnYxhfZQiKMv2i8w+DElhDLXRifpymfrSRpqH+o/2j/gd37+/v6GcaSO3jR+Wd84wtrFtbOTrkeTt6XJCmlxzBhH8raApPeSxBvAwDgEG7+2o9f3jNwxmlzPvHheXPnCoOhz8pH1nVd431O1Ph/mqZqUL5wBheR5vrOsBlyyyEhiBJChc2QUKKznoOD99639eSxI2u/8rlvfuOfZmqiytpgWEYC04ySIBSKdiLlukwpk7aeTkujTucKsSuGyUJhhv6Y2euGxVxY9g+Fv/fLToaIC+mLPeT9SzyzkToeDms2s6Fkb6Zsq2mY2m9oioa4iL+FqIqVKBQmpCHzHeEr1Lk+qJuZyJpoiMybnAyNRx967eDOvmFJ9niCc373828sqp+bXO3eig8TRENQDCsSkAsLMsKXeyZyNvNFuhkkYTbJoC1mUAkzeBWz/7nZPPnMxweGI907Dr+066iiarJEMaIMs4tbL3P73ZqmYcxUJUYJIfG5EmOj1qEsBk4kRkguFBpiovDi2TqFGFtJDKBsUoFDoRXyFbUQDW0qIWWU2PfFGlwkQUcjsZNHTx7ee7DvQJ+q6vGj7mDbNZfc9LGr6+eHkofr5NHbLhRaR8TBhJq29pRkmAgA4x0FiiEAAE6g7/jgzd/41amIfuMnF65Y7nG5XX6fIRf6/D6EkKZpQjGM/69qkI884ytIsd6ijCLGiHAkTbIZUsJ0lT7b+faTT2676uLT/vDwvaUNdSAoqiShMCFOKwvR0Jk2wwyVDVOqhOkUw8zxYYYIrQjrzIf+tPtPL+6bHZCxFluz1HvBHGk8HEfXNMlshGLphiK7TDZ1QylJMcT8SSObCRGZRwq2+LYKFCZohaY+aNzB5jfUMdYY07mRUDdVQp1nIovIcjSmbn772Bu9g0Mxit2Bz3/6A2uaL7YKWtlTzOzxoVXwPqF8FQSKFSZ1QbiX5zqkMjKRs5kvMhzMbDxMOd0kHJzyAZOtANm4DnOzHz7457e2vtGrqbrXjRlR3T533fIF/tnBYMg/q3a2qsZURdFV0USKCQO4cBTyQoc2j6HpLDTGWCQZpQ7RJMUQGwtrymw2Q8YsrdC4Ns/HfElMRZGx8OipscP7Dh0/cmzoxDDPj/Zh2XvTx65+/6rzhVaYcrjO0Ag7wWNoKYbpWlfDRACAYggAgFPgBsOT55079xPXzQsEXV6vVxgMfT6fx+PWNSKshUIvhHzkoq0g7TbD+L9JNkNGdDo2ovzvXc8d2Ldv7d/f9M1v/GPhZ6nsDIbV3By5IoXCcjy5DuzWkrKHSbqWl9kohhmMh9mohzMnIO7cd+L/Hn8VUSojEkDqNYs8pwcpUaKRaJTadUPREUWUpBI7yR7DyTnLiDG7jIhS7kw1ik2EXkbbTWRXCbG9RqElHfIdhrGOkCERWlqhuSO6IWs6ffPY8GOv9wyEVZc3eNnFF972Nx9JKH6frtq9KHhvNxuCtaTSIj2o1JEfIhO5HMt65DlrZDOhTNkva8pLTdlcgso+VTkHz/vJoXD3jt6XdvVFo4osYUo1jKnkkhvObJi7YJ7H78GYYQnrmkaIzggxCnqI4dS6umONmpLZ3WSioiF/AszcMWuEi+mD8Q9Jdkm8QTNlTFXUscFTfT1HD7y1X1c0jKVoVEWyF0ne9606/8YPX44lqX5eMPnPPHk/WTe0XzqyFENomg9MPY9A1A0AQMnZvmPv577+P9hT87lPLjrnLL/bYxgMfT6f1+dFCFlaoXAaQj5ykRaOEzZD0QJF1FqZbDPU6I7Xjtz/m676WfiN7V0Fl4qysQ1WZwvIihcKE2K2ssg3d5rNMJuS9uluTlcxzBD4ZRmF5s9Df37zlTf7NFVDTF3io5fWus4IsvFwOBKNMl2XrJqGZolDozWKmaqcTjScpJ+JZyssJykX1kmR3KStqRjiBHFwsmLIeGMTUeheeAntBkPd6niN0KGBsZcPDe4+fuqUyly+OZ+/4X2f+/jKlLIRKIZVCFjv859kq7nUSfbGw8yzQPY72VyCymaWma7hnTH23Cu9Q2PRrTuOjIUVrwshTIimBecGQ3NCvqC3tqFO1zR/0OcN+AlvSCK2jBEu+iFDGBTpxvH5gdm7oFiiIhZObmQM+BhLmqYRTR86MTQ+Oj7UP3y8py86HtUJ405GD5I8164697MfuQIhtMBmKrQP7On2p1QM01kLYQoAEt9poBgCAFBybvraj7fvObl4Uc3f//VCSZa8Pq9QDL1er8st6xoRGclcNdQIoZCPXLz1IjKas4kQlXtaqOUxFDbDI4eHH3x424G9+9f+/eduv+3rhZyisjAYVltz5KoSChPEOOdHbg60GWbTpDJl6vG0FMMcOizn3PVySh555q1NW/fPCsiUaHOwckWd+/QgpUo0piiEENmsZujiWiHmO9guHZrGQyvasyuJKEkuxKkGLoTshe+No8iseIUmewyN45JEMSYIEV4HQkeIcK2QmKKh0AoJz1zTCOs7Nb71wMlXjwzpSHZ5a2791Pvef80FC2vnpBtLU7bIFIqhPSsZwsWKoZqt9wV59So+Ezm3SSTD8elaBTPfrYCK4bQExP6h8HOvHn7rwOA7vYN+ryxjihGlui65sKZpkgvPX1zvD/kWNzZ4fG4kYX41ncrxHYb4ilmWZbF0xgjphPAkZYnqRAyylNKxkbGxkfDRnt7wyFhkLEo0QinVNarrDCGJYfcFZzXU18797EcuSxYKM5QszOwuTKcY2m8mPwgAgGIIAEDp2b5j741f/0Vw1hxewdDndrsMd6HX4/V6GENcLjQMhkQnMGoVGcqEbIgo4Z2TKWJ2m6HOdI1ufu7txx7devXFK5589DcFXK9nY46okpSrqhUK7a9AWUjDDrQZZgioMkiEKY+n64OZfDxD8nK6aG3KsT37wb9/KLz19SN/euGdoFcmeuz0AL1wnnRmjRSJRiPRqK5p8aDOylPmO9h2k5e1x67JBkO7/dC4apUqorIHZ8iuG1pyofkpakoxhIRQyIQmyIVCwvcN6dAUDS1B99DA+PbDQ3tPjvRHiOwOvvvCs9d+6YMZtMKErWxU8cd2uRAUQxgwAfvyo6urq3oykXOeRzIcz6arSZ6KYc59lrMXEN88MPjmgf63DpwcGo0Oj0TdLiwjIQsSSjSdkODsIKNEdskuj5tRSihxu91ev1dcb0cMjY2NE12jhETDiuieEhkLE51Qbn7ASKJIIkiqnz/7/HctvuCshgvPWowQXjA57zh5NJ5SKMxSMcwgLwLAxHsMYm8AAErLTV/7r5ffOnnaabM/98n6QMDt9Xq8Pm8cn8cly7ouDIaarmm6rsfjUhi0irw6ROKiKS/dQo2ShkIrFLqhrrFTQ5H//MnGoROHf7b+jhtv/HRh5qcsDIYV3xwZhMLkKM7hlhln2gyzjK+mNAxmox4mt0lJGQdmH7Pl7EYcGI68uLN309YDAS/Wtdh8mbQu9c2TNT8mkWhUURTKU5Ulm2KILcVQeA9N9VA0BBEORLv9MGUwh+15x6ZoaJcLKVcJ40Enf+aED7NCKJykEnKbikhAxggNRdST48qrR4Ze7RshSJY9oYsveNc3vti2YP7sDDFecriY0Pkk4QgYTCqDip8cZ3QAh5cutyE33VczXx+a6f1CXbJCCO16Z2DXvuOShHe/c3JoJDpwKizzPiWIMQlxCVA0NkGi9DcyS4GLmuDx9TOhSGeofm6ofl5wQe3sunmhC9+1qH5+aMG8YOYxPN2R5BE7G90wQT3MkOAMAKAYAgBQet772e8cG1Rv/GTDuWf73W631+fhcqHX6/FQxjSOzgGDYQmXhsywGRLEWHxDKSGUEUZ0Lhoq9LnuvY8+8tzVF6148onfF0QbmtJgWMEVmkAozHDSnS8aOtlmmDmamlb1+umqh+l+RA7ZZNPyG+4+cPLxzr29J0YDHoyQXucm81zkwrny0iAKR+Pous54exMrYdnop2xGVEJIk23qoWRKgcktUxILF5rlrMQLxGtcxfepaEHPc5CZJR3arIXU/AUpY/tPjh8aDL/ad2pI0SlyY0/w5o9d875rzl9YOztzeGntJ/THtJsKEw6Cx6QyxkkoX5jb69bR0QGNYvKcULK5Q/YaYvbZx/mXNczt6tSJwfH+UzExEZwYGEMY9Q+GRVOT+vlBsyY4WlAXqp8TSFmOMHnoznA8c+3CbBTDdDczPw2gmgHFEACAUvLophdv/8Ej82tnff1vlkou7PEIuTC+lV2ybkiFBmAwLNlykHd3i7/8ps2QTlQzjO8Qle7d1//AA1v6+w489dBdq1evyndymspgWJEVmkAozAbne2ccbjPMJl7KbD+cUknMbC3M4edOK0ZN+aWTw5Hu1w5v23VUVXW3RF1UnSPpl8yX57v1gEQkRhRV1QnhwxyzdENDTRPl/4SeKPyG3Coo2fyDeFJ3zEn5yHat0FAMGUsUDc2t5SgklB0bifaNKm/1jx0ejkYJ0rEbu4M3fXxV2+oL6s2YM3OkJ4TChB2xn+ArTFn5Hihf2QviuxxeN8hEnqGZZVqzT8ojOZgEZ0gxnKGO/xmG3NwUw+nuTPk0gGoGFEMAAErJez/97WND2mc/uey8swOyS/Z6vR4uF3p8bkSRXS4Eg2HJl4Ci84nRAoUgRgjhZhjC+5+Mjyi//s3WN3e+9akPN9/58x/luXDP7I+oMLkQhMIc1tYOr8/lfJth5sgn+/aX2SuJWTZWzrMW1ZTh3O83vfX8G0c0jfi8GBPNw7TFProsgBuCeJEfK5qmqKqqaTof7LCpFVpmwwnF0AyuEnRDSyhkpuvQsl9Sc1/kGgvRUNyUzUANY3xyXH2zf+zwqNJ3KtofIUh2Ydl30TkrLjpn+ftXnSdMK1MGkwnCX0qPYcqbEDfC8FhtiCkYIQRdYoozv+QwDWU4mHONwpyLYOR253R/rdO9Q4arRFN6BrP5XgBI8T6ECBwAgFKxfcfeG//5zsWL51z/0YZFC91uj4d3O/F6fR7ZJVNCJ/RCTacMDIYlXfkhxMVCZDoMUXyjm/1PCNM1+tqrh+/9dac6fnS0/2CeS6gMc1PFFHQHoTCfl87h74GyE7WzsexNV0bMUzHMWTrMZmV7cij83GtHtu3qi0Y1WWYSpS6kznPThR6yOCjPdzMJ6TUupOm6TojOc675FRIm2Y14tjBLSu6PbNunNt2Q18NHEu9AImNMGSIID40rx8Nq36jyxrHRGEFYkkc1hGTvRWcvv+Cs5e9bdXbKRLYsq1klVLu3q4cJ5QvBYFjuQPnC3GYTyER2zowzrVkpm+MFuXM+v0jm5e5075BZ6ZvWTRjtgWzfqKAYAgBQKm762n++tOv4qqvqrr1mfjDk8XjcXh9XDT1uhJHwFQp/ISVgMCw9lDFEGWFm/xMrMZnrhkRnukpu/9ZDkZHja79y0798c23OUktmg2G5N0cGobAgOF+SK2unTw4JX9nriZm7oEy5n3/qmf1Lbx4YfLtn8PkdR8bCis8rYaZTXZ/lJiGsh2R2Qa1H07VlIbdPZjqlhBAhIBJe037Sr2A2UGZmkxPLMzipdKBZ9zBM2LhCe4aiJ8Jqf1g7MhKLEqozCWEZYc8FZy+/8Oxl1648qz5VOfwsfYUJQqFdJcyQiQwBZFmPilC+MId5BDKRHTXX5HznHBTG0mYip1w2TOurKe+fvXkQRnsg23cmBOEAAJSK936mvW9AufH6peedFXS5XaJ8ocfjdnlcdoMhJRQMho5Y2CHG1UJECUWMiv4nRh1D0f9EpY89tmNz58tXXbT8j394OOcFU4aJqXwNFCAUFhyHvxkqIHc+S2vGdK2C2TsK81cMp1Wp6sE/vzU0Et13ZGh8PCbLTELEJyFd1zDVMSVLa6SARJfVeIIu5JUxpURCzBXfYQwxtyy5MGaifgPCUVUXo1lUp6JeoKLT4ah2KqodGI6NKSSsMY0yjWGNIo1gimSEXee9q+Gis5dcu/KsZEdhNqWspqUYJtsPIYYsa6B8YQ4zCGQil9fUk/935aw5FvBpT2uMzUHvA4kQKDigGAIAUBoe3fTi7Rt+h13+b/3TGYGg7PGInGS3x+PGkkQJscoXisJ58Io5AcNmmNz/hLtudI3u23vi3l9vHjp2YPTkwdxingwWiXI0UIBQONMhn5NFw0oqKDZd9TD5SJ7i4HT386lvtevA4FsHTgyeir51YGBoNOKRJVlCGIkm8TolpMYTH/1cGHklxC+bUJ8LyzzhWGiiUY2qhKo6jRIq+qRENKrxqSz+iTBlMkHSBe9qqJs/68KzFl941qIF80JTRnfZd8bMrBhmbqAJlGVEV+bu+yLPy5CJXGEzUcG/q7RxR5ZDcTZ3g1EdKMAbEuJwAABKwnv/6o6+AeXSd9d+4kMLXS7J4/V4vB63x+32uBhjvHiUyP4iYDB00FptwmZI7P1PRGKyrrOB/vHf/Pb5t3fvvu0rt/zL7bflsEhKNyuVl1wIQmExAz/HqnKV52GZlkEj+96XM7Gfv+XQ4o13BgYGxk+eGh8cie3e3y9L+OSpMG+HwhBiGFHEGBZiIFcMhc+QtzfhGiNllKLaucH6eaH6+bMYQhecufDCdy1iCC1IlXScLtLLsmph8s2EbQaFEYaUMgXykaf1Wole9jApV8OsVJxHKIZkM83xGcZzoJBvP1AMAQAoPn3HB97z6XZ/oOaTH1167tlBt9vF3YUet8ftcsnEglIKLZIdtTLjSytRxJBHwogR7rDRGdcNaSxKnt644y9/3nbVhSuefvKRQsU85ZLgCUJhScI/x743hKBZqSPYlL/XdAW77B2C+fgKcxMNU97nxOA4Qqj/VExC6MTAGDIvbfFxEi2sC4nb9XMCDKGFqVqXZJ9ZNmW/ywyKYTaOQggvYQysBiATuapXsJU1F8OgDRTvzQahOAAAxefRTS98c/1vV6yYe9MNSwMBF7cWcsXQ65YQ1olOubmQmwwnCswDDllyseTEZF1sma7T4YFwx3d+p44fHRs4PN3VT8opyfnlmUAohIA5Q4BaJa1Lc9PdptViZSbuX9rK93kaDDNbBbPZgciz3KnsyxIFn6ahkTRQnLG9mJMFAMz42w/mGAAAis9NX/3htl19be9d3Lyy1uWS3B6Px+t2e9xut4vnulKdcOsa9Dxx4NLKlpg80TFZZ0Q4DXWqK/SHP3ry8Dv7bvvq57NPTE5nMHRy2ikIhc7BscJcdcbzeVagz19YnNZDFbZfZ0FivywrGGb+UjZeQohCyz6Qg/KF2S0wIBMZAAAgx4kGFEMAAIrPe//qW70nI/+/m85Y0eh3uV3cYOh2e9yyS+JF5IVaGP+AMcqBUHtiMuFtk3Wj/wnRma6Sp57a8adNL1z97tOzT0xOZzB0YDgEQqFjI2dnBoTVYzOckjzdfNM6XpA75/mLZHivTvcOmZW+ad0EobBigPKFWb5KXV1d69atg5kaAAAgB1zwEgAAUGT6jg/2HhtGLt+SBp+owy4Z/8yC8VSkvPKWkoDzwAhJCBFkCn2iFShi/GQyLOHTTqv3eANbXngp+wW9kN4ScFQ3wwShEMIPp9HZ2elMO+q6des6OjrgBNmFqsyKW4KeZd05pc7F+AiU8kHSXYRIfgLpFLR0z7OwilvmR8tS78teFgS5sDKA8oVZztrNzc3wKgEAAOSMBC8BAABF5qXX9yGMlzcEXS4sSUiSMJYkjHjXScooI1w05C0nAYfG/fFPjCWErRBUHDEE4FmzAvPm12DZf99992fzeB0dHcmyYHt7uxOcWSKrFGPc0dGxbt26Tk4LB94IjkIouQ7U5sRbZfPmzXCOJoaQVEzrztkgmWT4UsqvprxbOqb7ZLJ5hAxfzf4npnsN4e1XAQgtLOWVNsBaQoC/GwAAIH/AYwgAQLF59I/PIyw1NPh4GCPxTyS0J9NcSHm3E6hg6Nx4n4uGjCuEjEuHhIeijOuGeO7cYP2Cucd6e4/09mWzrE8Oe0runhDxmNgHR2F5hYjtHEc9MSFlwltoimElvZiVwZOYzXdNy8Q33YzjYvoNp/UTQRysYIRcCENKhrlALCHgJQIAAMgTUAwBACg2vceHZJe0rCFoGh+QcK0hozIeA3uh0wN7vpUQIlgEpYzbC82sZIwCAfe8uSEsubu3vHh7FpFPQohewubICUIh1CgsO0RustNEw5aWFiulHc5RLmNOkviVzfiQWTIrTsZxwX/xwt4fKEccYsB3JpCJDAAAUFhAMQQAoKg8uvH53mNDdXWz5s5xG9ZCLAxriNctFLULQTMsiyDeilERw8ZpFDcxxhdduKJr845DR49OGfkkGAzFcr/Ia30QCiuJdevWtba2Oi1zHGyGBR5+spbGykIZnIlfHKhISnhFzfmIKiLQPBoAAKCAgGIIAECx4x2EcV19oK7Wa9VUwsiQCrlQKPqdQEqy08NWxBjGEsNk4swKsyFiGKN5cwMYS0eOnMj8MMmRj1juF0dYAaGwIrEKGjrqbILNsHRj1fS6mpTFkweqFihfmA7IRAYAAJiRpQhcpAIAoJh87h++v+31nve0LLm2eZHLLbncssvtcrldsiwjxCiljDGzlCG8Wo6GiixyQuInjVBKGNEp1RklVNeZHiPf/feHTx47/N//2X7jZz+Tbn1vbQXFqVMOQmFVRY/OeUol8c8CAFBJw1rCpAlYiwdRlQJeCgAAgMICvZIBACgyGCG8eEEAJw4/TJgL4TJGuZ1OYROVDDsMNjonIwnV18/GWD7Seyzd9ya0SJ7p2kwiX6m1tVV4NETjY+h6DKF10YCmyQAAVNKY5gQ2b96MMYbChQAAADMEZCUDAFBU+o4PIYRqQm5kqE1GZjKvYUgZFbnJYDAsA0QVSmZIhrYt/8QSbmiY/+YuV/fzW29H/5wy+LGnVs2cIwwchVVLZ2cnxthRojBUMwQAIAegfGGGlwUykQEAAGYOUAwBACgevccHeo8PIMkzd47HtKTxJslINDsRqiG8TmWDWWLLqGCIJ9olx6mrm40l+VBv6uYn9uBnJuRCEAoBq3Wyc8JsqGYIAEAOQPnCZEQmMqioAAAAMwoohgAAFI+Xd7yNGAoFXKGgiyewYqPhLkMMWw2SKbxQ5YHV/AQR0S6Za8DMaJ6MUSDgDQR8hw+nUAztBsPCFncDoRCwI7qgtLa2OidhDWyGAABMCzFjQj5ywkRfhKrHAAAAAHQ+AQCgeDy68fm137v3vHPrb7zhdMmFXW5ZdskulyzLErcZMhofkfigBCNTOTDR/IQwSnnbE50SEt/qOt2759j9v3l28FjfU4/c09S0etLcgydmH4xx/rEQCIVABorTUWcaay+MIY0OAIBscGATp5K/IJCJDAAAUDSg8wkwU2zYsAGbbNmyBV4QACH00mtvI2zpRJO/xuD6RVmCjXKUxlYYR0WNynlzaubNq0EYHTrcm7DctwyGra2t+ciF9mYmQiuEZiZAMp2dnV1dXc5RDIXNEM4LAABTznGQj2yntbVV6KcwywMAABQHyEoGAKB49B49iRFatCBg6UocRnlKKxgLyw486Zxh65iQg31+t9/nw1hGkxtjWxUMc26ObHcUCv0FggcgMyI32SFqcnt7O8YYqhkCAJAZIRfCQAGZyAAAAKUCPIYA4BQ2bNiwhoNtbNiwoZIcmr3HBhHDc+d47QdFIvIksRCUw/ICT97HwmKI/H53Xd0shnBv74TH0DIYikX/tJb+KR2F4DUAskEUNHSOsw9shgAAZCbni2oV+VI4rbgEAABAlQAeQwAoMVu2bPnud7+7adOmlF+97bbbxE53d/fq1asr4Pf1+uRQMOXIw6B+YTli5SAjzOzWUSEbBgI+hPChQ0es+wuD4bQKM4GjEChUzCnCTidUBAObIQAAmYcIy48PLwVkIgMAAJQK8BgCQC5YVRrXrFmT5+M0NTWlkwvtNDU15fmzHIO1/LXnJYtVsWh/Aqph+YBtuchocq656JyMMZKkw6bH0Gr4mE1zZHAUAjMReU7X2TpztLS0bN68GU4KAADJZDNLVjxiGSBq0cLUDwAAUBLAYwgAufDss8/OxMO2tbW95z3vsW5aBkPBpk2btmzZUtZOw95jJ31+X/Jxy10IV9PLDV61EGHGK1Ey20Fk04TZ5BAos8kLHIXAjNLZ2YkxdkJBQ1FaEZLsAABIQPQEq/K5T8iF+fRGAwAAAPIHFEMAKD1CKFy7dm3C8bVr1ybkLDc1NZV5igpzuSS/X7YdYIZWiCYJTkBlUBPyW/vCYJguEAKhECgaQrYu+VgqVEuRKw0nBQAAa66Egn2QiQwAAOAQQDEEgBKzfv36ZK3QYvXq1XfccUc2acvOp/f4AGLI5cIuV2I9BDNyF/+BYFhu4LSnzO93I4QOH+oVBkPRfcIeCIFQCBQf8T50QkFDsBkCAGAHyhcKi6W4tAPvBwAAgJIDdQwBIBcKJeGt5WS+z+rVq9va2irhVTNshCzFYahfWLGIAoe4vb29paXF8k1AjUKg5GG5EwoaWjZDOCMAAIhLa+vWravaX3/z5s0Y4+bmZpALAQAAHAIohkAJsNqGYNOatGXLljVr1mAbGzZs2LJly7S+3f4IGb495SNkvo/1UMnftWnTpoSnPROvmCVQVoh0aLMRTsiFYC6sRGrn1YgTLpTBlpYWEAoBh9DZ2dnV1VVytW7dunVdXV1wOgAAsJqDVe2vL6zfcBEFAADAOUBWMlB61qxZk2zZE00/uru7p2z0sWXLlqamppTf3tbWtnHjxgp4fax9e1+UsqPv2IDQB30emWuFzHAd8q4ZploIqmE5wVuc4CTfKEu4z55dL4t9SygEiRBwAiIpuLRdUFpaWjo6OjZv3gx/FABQzViV+6rz1xeZyAz63wEAADgM8BgCJWbDhg0ZMnybmpoyWAXTyYUWmzZtsstt5cWWLVuEz9FuMJwyhdnRMIQZ8/ncbrfEjAMIIchFLn/YFF+klC5cuBAchYDTEAUN7ZU0S4ITngMAACVElPStTrkQMpEBAACcDCiGQIkRZkAhh61fv767u3v9+vX2O2zdujXDt1tyYbpv37RpUwEzhZmJdaStrY3ZyFnRw0k0NTVZL07F+CW5q5BFohqy64WM2oocMpAQy+qE2jLLU305GlURo+dddNWXvvQl0eEBso0ARyHakgp7S6kQGvrmzZvhdABAddLa2lqd5QshExkAAMDhgGIIOILu7u6NGzeuXbt29erVa9eu7e7utr5kV82y/HbGmL3e32233ZbZqOhw2traxC9YGec6FlUpYXaxiRl1DCEXpXxJfeoYY+PjUcYTl9vb2y2pHfNGKPCqAc4JWUveBQVshgBQzUNQdZYvbG1tFYnYkHkAAADgWEAxBEpPcrHC1atX262CmfW+lLUON27caBcNMxsVHc6mTZuamppmrq1K0WhYPB8jNqlKDbM+QC8sf/i5nXCOmnvYdmqFx1BkHoFuCDgHIdiV0OUHNkMAqE5E+cJqmw03b97c2toqMpFBLgQAAHAyoBgCJaatrW3K3ia5ffsdd9xh7WdjVCwt3WmwK6e33XZb+ZZlNGExRY/FiOkwFAoTQ+Z/8BdRlic1wWVo6YXmwWVLl9jv39LSYlkOhW4I0iFQWlpaWjo7O0ubmww2QwCoNkT5wmrLRxaZyM3NzTD1AwAAOB9QDIESk2fz3wzfno8QWXxWpyEhybqwZRlLA2OaShCaEJUYm1QODyjLkzqxa5WkjG+iUSVDA2xLN+zq6gLLIVBaRBeUEoqGYDMEgGpDyIVVZbKzWkLDjA8AAFAWgGIIlJiVK1fm8+2ZO43YE5PLGnsRQ+f7JdOxZFEdSkhaZZZqOOmeoB2WDyKl3LwhTqwlATM0Nh5hDC2d7DFMjh86OzuFyQJ0Q6C0oWxpCxqCzRAAqmrAqSqfnchERghBJjIAAEAZAYohAJQH2Rd2dDQYI0aNfsgiF5nRxKRW0AvLCOtkpWlyLcRDnF3sBKnKQMnp7OwsYU0xsBkCQJXQ3t7e0dFRbXIhZCIDAACUHaAYApXMpk2bcv7eZ599Fl7AgrNkUb1Rx9AsYshMwyEUMixTWNKO2KWGIswwQsuXL51WHCV0w46OjtbWVogugCJT2i4oYDMEgGqgqsoXCnkUMpEBAADKEVAMgYrFXu8vQ3pyOr9ePmrjTGBXMMurRKOdKy87ByM8HlaZ1S+DmZnKFOyFZQwzixma/lHjv/7+kdxOq9ANm5ubIVUZKDKioGGpZLuWlpbNHDgRAFCptLe3r1u3rhrmNWEtFIULIRMZAACgHAHFEChvMuTn2iW26fZXKVpL4g0bNmSTYrxhwwanKZg5whDCeDysoaROGSxdXivg8FNqnTnGM84pF39p/HYkqoyOhRFGq66+MuewKiFVGZQUoDjxfHNzc6m6oIDNEAAqe3gpbb3UomFlInd2dsJ5BwAAKFNAMQTKm6ampjVr1iSLbmvWrLEktra2toQGKfZ2K01NTQnfW2R5rqmpCWOcQTpcs2aNvduJvaBh2bF4wXyM0ZG+ccSwWccQMUQNWxpFttYoQBlg9ki2ny9mtbY5NTw+OhrGCC1b1pB/iCV0Q5GnDJZDoIKjeqGMgzgOABU5sFRJ+cL29vbW1lbIRAYAACh3XPASAOXOJo6loz377LMJet8dd9yR8C0JWb0Y4wzfnpK2tjZxt02bNq1Zs0b8iK1bt07ZvjkdliZoFwSTn0yy+lleLFlUhxEKhxUsYSuTFTNEjcsXQmzCmGXXLAMoKWZDZMsjarZKplwEZiwaVcbHwrigEYglF2KMqySrCygV69ata21tbeEU/0d3dHRAEh8AVBhVUr5QGLQZg6u/AAAAZQ8ohkCFYHfh2enu7k5Z9a+7u9vuLkz+9vXr16d7zASEZGl91wz9IkIu3LhxY1mfpoaFtYixkwORsXG9JiRxjQkL0RCBTFiOCKGQIsNvSJnZvib+3+Dg2Ph4pHHZkmVLlxbwZ1rmr/b2dtANgZmjpaWls7OztbW1+HGveG/DKQCASqIayheKTGSYlwEAACoGyEoGypvu7u50X2pra0snFwqbYYZ2KN3d3fbM5WQKpdytXbs2w69gZ/369eUuFyKErrz0XIQZRvTUqdhE8qq4Ds2Q1ToZ0pLLgwkZhbGJ/GTDckgpGhgYRYytWnnFzEVf9hKHEJ8ABUd0QSlJQcOWlhZ4SwNAxdDe3t7V1VXZf9SQiQwAAFB5gGIIlDerV69mjHV3d9vNfW1tbUJfy9xTeOPGjSm/kTGWTTNixlgbxzqyfv36zDrjlL9CgogpHl88pbJORrazeOF8hNCBnhGEJkoZIkap+DK161CA02ETnU+YsUN5/xN+5ET/KYRQ4/JlMx2iWHoldFUGZuINVpKChtD/BAAqhs2bN1d8PjL0RAYAAKhIMNSYAMqODRs2WKm78AYuO27/7v889qdtl1+25MMfPA1jhiWMZSzJWJIkCaP4TSn+gTD/ABwMZYwSRgkVn0SPf1KdEZ3qGtVV+v0fPHTi2LEnH75n9YzZDBMQso4IzEA6BAobDDc3Nxf5TVWSHwoAAPwtTwuhh8JgBQAAUJGAxxAAgKKyaGGthNHJkxFdF2nIzOyaYTbQsLrtAg6HTXQ8YcxMJ7duMtR/chjholanFLnJkKoMFBzh+Cty/2KwGQJABdDe3l7BaprIRIardAAAAJUKKIYAABSVK999DqNUVamqEGT12uUaoZWYDKUMnQ8zmp4wZu5xzyEy+yWjgwdPMEYwQsuWNJQkhhG6YVdXF6QqA/kjChoWWb8TbZqLLFMCAFDYyaijo6OC5ULIRAYAAKhsQDEEAKCoXHnpuQzRU6eiR/rGkKkLGiXwjDxzZomHgHNhExZDRM02Nvx00vgJZH1HBxBljUsbli1tKGEw09nZKUpHgW4I5P92Kr5RCGyGAFDWdHR0dHZ2Vt7vJXoiI4RALgQAAKhsQDEEAKDYXHHxWYpKTvTHEMPCZCgEJ0aFfGglJoPL0LkwQ9c1WyRzg6Foe4Io0zTyzjvHEKIrV15e8qcKqcpAAd9LRe52KkJxsBkCQDki0nUrT1ATmchQuBAAAKAaAMUQAIBic9lFZyNG+gciGGPGsJHHKlomIy5CUVALHY1lDDWrUBoKr5ARKUNjo5GhoVMY4U/f8HFHBTlCN+zo6GhtbYVQB8iB4hc0BJshAJQjlVq+0MpEhjkUAACgGgDFEACAYnPFu89hjL69d1DCfAgSKcmWSmiYDimUMnQuolGN0a5GJCZbDWwYoiwSUYaHwxJmy5c0ODDaYYw1NzdDqjKQAy0tLZ2dnSIdr2g/EWyGAFBeVGT5QpGJDIULAQAAqgpQDAEAKDYNi+oo0QkhfccjGElCZ6JCcKKmbEiFZAiaoXMxJEKjiKFhMGQMUYpe39kTjUb/6oaPLl3a4NhwLiFVGRQZIEtEF5RiioZgMwSA8qKjo0OU0K0YhFzY3NxckWUZAQAAgHSAYgiUH2vXrmUm8GqUI0sW1338A00YsRMnIhKWKMITHZONmoZGQUPAmbBJzWl4i2SKDOWXxrejo2EJodUrr3T+72LphiJPGSyHQJZvG2tbBMBmCADlNT6sW7eukmYTYZmETGQAAIAqBBRDAABKwML6uYzqBw+NUIax0eiE99hFIh0ZEpOdCzPTjxFlRhFD7jSklN+gbHh4fPur+xFmq6+6rIzCIesKBKQqA9nQ2dnZ1dVVNBUPbIYAUC6zSZH7I800kIkMAABQzYBiCABACbjyknOJrvf3h0/0RzGWqEhuFeITosK+ZiQmg2boMJjQdnkGuXHGmOE6pDwr+eixYcTI8mUNjk1JzhDptbe3i5Qr0A2BKRG5ycURDcFmCADOZ/PmzcKLVzG/DsYYMpEBAACqGVAMAQAoAVdeet7HPrB6YDC8681BCWOej2wlJvN7WInJIBg6CWY2SeYnDCFGuVBoNj3hIuL2V/YhhlaVj8EwgZaWloQShyAdAuneKsW0/oHNEAAcTmtra8WUL2xvb29tbYVMZAAAgCoHFEMAAEoDT0wmvUfHJCxjhE3FSchQyOzHC4nJDsPwFVJGTdnQchfS+OGTJ0f7+k5KmH36ho9WQLwEqcrAlG+S5ubm4rw3wGYIAA4fDSqmfKHIRGaMQSYyAABAlQOKIQAApeHKd5+j62pf36jomGx1PhESlKEUUkQZdEx2EFZDZJGMLM6O4S6k8dO24/We4eGx5csWr7rq8ooJAkUcCLohkO4dUrSyZWAzBAAYB2YU6IkMAAAA2MHQbRYAgFJx81e+t2vf8SsuXXxta4NOdVnGWMZYwpIc30gSV2hkLGMpvgOvV6kxO5wgSgkljBJEdBr/1AjRqK7F9/99w0PDg8NPPPjL1ZWiGCaHhR0dHUJABPUQsMfYxekMgDGGFgQAULUjQBEmOBhhAAAAAAvwGAIAUDIuvehsSrWjJ8KnRohs9j8xm/CCzdCJ8FNBEbcZImpYC41GyZTtfvPI8NCY9P+x9yfwbV11/vB/zrlXm7fsceI4ibvRhXShW0pbagtohwJtKQWGztImTJkZBn4Uhj/8huU/seb3PPOiwDDALDDPa17TZCiUAlOgfVha2sqmaZumSZckbdImbZzFdpx4iWNblu6955zn5Xuurq8lWZZsydbyeSNUSZFt+cq68v34+z1fStavWVOpW8BtVe7s7ETJIShtbW3RaDQcDs/D10KZIUCpUX9GqoC4EDORAQAgBRJDAFgwn73nDtMw+vpGT/WPM6Y5qxgKIpKr4znL5mE1wxLg9B87LckT1wWRbtXhxLmUPT2DhPA7P3rL2rVNlb011EhltCqDS01BmYfQsL29vcOGbQ5QIm8H87aYaZGoGklCCOJCAABIgcQQABbSp+6+xbL4rpdPaprOCBNCrYo3ZTVDu6YNZYYLzH1aiExGh8nqQjHxfzk0NLbv1cOU0I9/9NbqOVDEVGXw/jzMT686ygwBSudVH4lEynrPr2Yil3voCQAARYLEEAAW0pWXnG+ZxtGjp/sHEoxpqiWZCJlaZihRZriQpKowFCTZN243jnMVFzqNyS/sfrP3xFDLuqbrrrmyCg8aVW4YiUTC4TCOu6pWNBrt7OwsdgGgKjPE1gZYcO6ytuX75qU6kfG2BQAAGSExBICFtPGKiz7w3qspkTt39+m6j0rmZIXeMkOCMsMF5j4hJJnkOiNQuCRcSkFi48buFw9RIj/+kQ9W7VZSuWFraytalauZ6k0udqLX1taGHzCABd/nb9mypUxfiehEBgCAXCAxBIAFduuN15qW+frBoVP9CcqoEEJwIbnkqn6NSykUnpyMkh9s4TlyZtHYBYZOc7IgdoGhkMmW5O3PHBgfjzMqP37HLTiGTGlVRjlYVVELGha7axiNyQALvqsv34n5Ki5EJzIAAMxIw1sFACwUlaqsWb38+d37Tw6cOTkYv/Si5ZxbhFJKJ+7AKCFUBS/E/t/Ezc6/5WMWHwKTT5P9f7Wo5MS5IIILziW3hLQkt+TQ0Ojjv3/p9NDIn3zsgx//yC3YYoqqAuvo6Ni8ebM6QkMdR/U89V1dXUV9xltaWjo7O7u6uvBDBTD/1PKFZfrXoPb29m3btt1///2bNm3CUwkAANmhxhAAim7G6r9PbbrFMs2jx84kDMKortbFk9KuMZRqAT01bcMZhVKoB4A6xBw2nR0X2usYEpEcRCNU3afknhUMGZNf/OxfYoulH5u5P2BoVa6q572zs7OoTzfKDAEWSvkuXxgOh9XChfhjAwAA5AKJIQAUxlzyuKsuPT9hGDpjL758yu/zU8JULZu9ZJ7TBqu+gJCyGANQEClm3izEXVAy+T+VFXLprGDIZWw80dM9QIj8l2+1r13bhBdCRqpVORqNIjesHirRK14VUpsNPe8A878/L8flCzs6Oiilra2t6p0IAAAgF0gMASA/s8jUclltMPKFP+eWte/A4OBpS9d9zpKFzmBeu7zQKXBz6g1L5Luu8EjRjguFsEdVC3s+siow5EJwyfnEE7T7pcNvHT6hM3rd1Zfj1ZGd6lP2LnGI6LCyn+5oNKpmCxQJygwB5tk8lA8X6WGHw2HMRAYAgHwhMQSAXGWPxnJP0zLe55abrtlwwbqREePhRw9SqjHC7FhKcHvJPKGmoMg5jUApaq5XeQGiKi5Mjq4WyX5ku8BQtY0LOXR67IUXDsYN4+EH/x0FhnkdvKFVuRqoKSjFCw1VXyHKDAHmR0dHRzn2I6tOZCklOpEBACBfmHwCABlkTwbz/RD3DtnTtFUrl/z6yRcsizQur1u+LGhZFpFEjSyhVC1faP+XUOlcIPMw1aR4n7+U57G4/cjqTC1lKO1pJ/ZJCktKIZ9+9sCevV26Rr/42XsWNdTjhZMX1VXqHtG5ARBU2LO8bdu24o0oaWlpiUQimGAAMA82b9589913l9HLTY3eam1t3bp1K54+AACYBdQYAsCkXKoCp7s9C1UZmP1fhRCXbzj3rjveTQnZ/copzimjmkj2JguR7E0m9sqGqkfZfjCyEHLZJgWvWCzl1mbp9iNLFR8SISTnzgqGggsp5Jtv9u3a/Sal5H9+9G9rm1fj5TM7qjcZrcoVLBqNdnZ2FqkSEGWGAPO2r25tbS2j/bPqRC7HJRcBAKB0IDEEAEeWoDA93koJAVMCQTFVxhsz/uulF7WMx43evtjzu0/5fX4piOBCJlfNE0IKy5m9ISw1p1cUJCicXbBY8CSxRAJEJyV0rqi4kAh7zolawVAKIoXc/tzrp4dGz1q/Zl0z+pELc3SnnvRIJIJW5QqjepOLlOthNUOAedg/RyKR8ooLMRMZAADmDl3JANUuvWww/XJ6uOa9nB4pTldAN2Mkt3rl0pXLFj3/4htnRs2WdYuCAco5J3TqA7Rbku2KLEkJnXWulldT8Ix3zvGzzbETeR4amZ20UBIhqDOYmhMuVD+y5BYXppBcPLvjjV27D0opt37/6xsuPBevo0JR01HUZbQqV4yWlhZCyLZt24rRz9jS0rJt27YWGzY1QDGosSFl8RJTncgtLS1bt27FPgEAAOYINYYAVW3GKDC9tDClxlDkjHOe8UbFveXm8OUN9SHLIi++0s85pYTaw0+cKSjS+ZiJL8wtadcZCllM2TfIjHdI3+BzqSIsdvmhygqJkJPXneHIcmL7W1xYE9t7LJbYuetQImF+6d57rt14GV5HBYdW5Yp8TovX0ogyQ4DiUb29ZfHHG9WJXF7d0wAAUMqQGAJUqSxplxsFzpgMZgwBM94+3T0z+v/99Ydi4/GuIyNdR8f9voDgklvcsrg6tz+ZZVlT/jtds3P21RVlkc2YJM66qblYayBKJ0QWIpkbqvnIlhCWVFOSTZP/7vevDJ0evf7aKz/24ffhpVTswz+3VTkcDuMgsNyfzc7OzmI8iVjNEKB4L9tyCeDcTmS8UwAAQKGgKxmg6kzXd+wmXOnnWSLFYli1YvGx3oEj3f2jY7y+3r+4wWeZphqWnEQnHhh1HjqhlBA5u00xY2aXPY+bRZvwdB+S7+0FfEjJ79QZiyyTY2UkJ5xzNR9ZWII7/cgH/7D9NcHFww98b+2aVXhBzQPVqtzV1YVW5XLX0tKyefPmtra2gncLYmgyQMGp5QtLP4tHJzIAABQJagwBqld6IDhdOWFeFYJ5mW5Yyhf/8tbampqhQePV10Y49+maz34czhQUoR4Odz6MW9yegSJSvrsCyv6w8y0wzPHOGb+RXJ7WWRQeeuNCOyykRFK7E1nap4kLUsjeE8Pbn31dCPnd+76CuHD+j11TWpVRU1Z22traotGoin0L/plRZghQWJFIZMuWLSX+IDs6OtCJDAAARYIaQ4AqlZJS2Y2oYu4h13T/5P26Obbirm9e/vjTexNxblrynLMWGYZp38+uoKOEOYV0yYq6if/SubTmzmWoyHTf6ay/Ssa75X5jvt+gs3yhpBNb1l3D0G4Gnxx4woWQ5L9/9HTfyaF3XXfVJ+/6yKKGOryOFoQqOVRFJepwESWHZURVABWjHrCrq6tIw1UAqlB7e7t3FFXJPsht27bdf//9eOEDAEAx0OKtoA8AJSh7u3HG4C/jufezpV/OfmOWB5bi8e37/vOhp5Y0BN/btnrJIhmLjRFGmEaZRqlGNUYnMMYYJXTiRkbp7JK/eZib7N6Y/q8z3jKLD8n9gSXjQru00CkwJJwLyxTc5JZ94qYQQjzx1KvRzr2Eas8/8eO1axrxaiqRw0W3EAZ/AiwjRaoJopRGo1EkyABz37WqNQFLfDdCCCnxBwkAAGUNNYYAVSQ9LsxrLcKM6x6mfPKMX3HWzlm38tiJoSPd/YODxoplwVCQWZaZfAT24TGjak1DZyVD6lyhhd50s87jMm6T7NWIc18bMfdH68aFyQJDSgkVQgqnE5mrMkMhxb7Xjv/u8Ze5kN+978vXXnUJXk0loi2pq6sLSxyWkU2bNkUikRZbYT8zygwB5khVcB8+fLiUH+FZZ5119913b926Fc8XAAAUD2oMAarIdAsXTpcepkSEKRcyXs34Fefok1/5L9Mwm5tD1121VMq4ZRnUrjFkzK4rdEoNJ65OyL/SsCABY5ZPktdIE3Wj95/yrUDMPT10IlYnLnSqC+3J1MIyLMsUlmlxU3AhTvYN/+gnz/T1n7nh2qt/+l/34aVUstQ6/WrhLfxFsPRTiXA4XPCSQJQZAsz9RVTKVdtqP4+XOQAAzAPUGAJUi+kmabhBYcZ6w5Rywrziwhwfz4zWNy176rn9lkk0ja1rbrC4JTgnzsDkyZBtHioNCzi5OPvnyRjaZvzALNli9qtTqwuJs4KhoPZwZM5NoWoMhRCxWOKRX7/YdXTghmuv+qf/+wuL6rF8Yelylzjs7OxUqxzikLJkqerCYpQEoswQYNZKfPnCcDh85MiRw4cPYyYyAADMAySGAFUhY6lgSlzoXnanoMwiIixG2XLjikVLF9e/sKfrzIjp8+lNq+osyxKCJ+edECcrnK/QMF1BYsRsxYDTBIi5FzamBItpcSEhgkpBLDXqxFQzT7jgMhYzHv3NSwcP9TEt8ND9961twvKFZaCtrc0NjNCqXOLPVFdXV8Fn10QiEfx2BzALavnC0mz1Vb3Sra2t6EQGAIB5g65kgKrgDQpVMqjOOeduUMg5TykqnG7/sCD7jQcfff63f3ilLqRfdvGSC84NjYyOCMkpo5rdoUztDmVqdyQ77clMXZu32HD6/exs10DM3o+c0r88XUSYuQhREkqZPe3EKTCUqhnZtCxTCMuedmLJsVj8Zw/vfHX/cX+g5tnf/hemnZTvMTBalUtZwaegFGmsCkBlK9JCAQXcjaMTGQAA5vtIFokhQDXwJoBuYiiEUCmhmx7O2Ia8sL677cmXXj1cE2TXXrlibbM+MjoiiXCGJidXNnRDQ6JiQ60kQsPUPe+sMsQZ+5FnSAknL9NkYqguUCGIMLlauFANRxaWGIsnHv31y6/t7+aC/ez+b1xz5Qa8jsqaOuAkhGCqcsXnFOoT4hc8gLyUbNSuSsW3bNmCuBAAAOYZupIBqk7KAoXpdYXZCwwX0DWXnX30xOnjJ4ZO9ccXNYSWLQ2YpjHxcClxU7Ep7clqnActxdAwxYwPL5fHn9tzR4mkjDKiKg0JE0L1IEvLUtNOuLCEYfLHn9z3wu43CdG++49fuim8ES+ccuddmQutyiWlpaWlra0tEokUavHBlpaWzs7Orq4uPMUAOWpvb29paSm1wyJvJzIWLgQAgPnHsAkAqo03VMo46qQ040Lls3/+7uZVK2JxsmPXQDzhq62pI4RKIbmQkk+chF00KQURQtrfHrFLKIks7Xqb6Z6I6e6Q8UNSBtqkjLJR8akUhKitMXED40LaKaEwDctMcMu0BJeGxf/wzOt79hzVtMCdH7nljlvDeNVU0lFxe3u7evoppeoqNsuCa2tra21tVUluQWzZskWVlAJALjvGElz9s729HSsMAADAwkJiCFB1vB2sdufu5IUSL8RT/uFzt65ZvTxukl8/0X3kuFlXU0cFk1xybkeFXApramgoiHcOdLk8TTkGiOlXp4kOJ7aGFPZAZDs6JJIILrjBLcO0DMtKWNy0uCUSCfO3v9/z+6dejZvki/9r8ze2/A1eMpV6hKx+YCKRSDgcxhFpKTwjBVxoss3W0dGBDQswI7VEYKntEDo7O6PRKHbOAACwkNEBlrkBqAYp6xi6Sxm6k09SZiWXfrj2N+0/ETwRCsiNVy5btUIfGxsVkqs1DYm9oCFj1P4/ocyeqKzyUEZZ6bco57LvnuY7yLxqISGMavYltRUoJZrgwjIFtyzL5Fw1Iws5Fkvs2PnW8y+8OW6QL3z6z+/95Edn/IpQAdQRqZqOgqPThRUOhwu1WllHR0cJ5iDlq6dvqPfU6e6+05TQF/YeJkTrOTlECOnuGyLOwhi0+9QQkbJp+SIi+ZpVS6UUTY2LiZRrGpcSIppWLrny0rPty1ByO8DS2fWpVy5KCwEAoCSOOpEYAlSD9FnJ2RNDIUT6x5aUvv7R//Nvv7Z4QqP8sg2LLzi3ZmR0hHOTaIypuclqIgpTEZk7C4VQpkYoV04Elm3OMk3Ojp44oLWzUsKIIJxzbkluOUNOLFNIIWLjxq8f3/vSy0d8/tDf/s2ff+Yvbk8pO0VoWA1Hzio3VBVq2CALEhYUcApKAfPHKtTTN7R735Fd+468sO8opXp3/xlCmd2do07p+0ORfMeUxDkTTkW3KncXXEqLSPOqDS1NKxetaVy8pnHxlZecu2YVMsSF3OmVWlyoXraICwEAoCSONJEYAlSJlJZV73xkNzTMuFheymcoHX39o99/6OkTfQMNdWzj5UsaV+gjoyOGmVCJIWOUakyVHFJ7MjBLlheqGJHSSsvApnw3qqKSEMaYExTa85GlkNwei6ziQm4JbglC5Pi4+dzzbz2z46CggW9s+V8f/sC70nvVkRhWzyG0yg1L6kC6qra/6kYsSPqAMsO8qIhw194jPadGuwdGKdUI8RGmUUrrdFKnyVpNLtFJc4ir+wcoWRN0Lq8K0hpNGlwei6vIkIxxdmLcficl8vg4GTPloDlxh8GEsCNELolJJJfcvPridVddcs6HbrpqzapleBbmeV9XOr/bqNc+Un4AACihA0wkhgBVwn2xe5NBlRhmHJSRMiAl/fOUiJ8/9tLjz7xWG6DnnhW6/JKG4TMjiUScMUI1qml2G7K3Q5lSyojTpZwsvau4JMz53pjdhUzs3FDVGEouVQ+yigsti3Nr4knvHxj7fcf+Awd6fMG6+/7+07fffL27uqUdOE5Z+xKq51garcoLu/ELstlRZpiLR5/a88iTe3oGxnv6RynVCdXsnSe9sF6eW8Mb/WKJjyz1y3qNrAzwOo3odMr7YMrbpJx67l6IWWTQokMJ2RsnQyY5cEaeNuT+M2qpXSGlSYQhrfhn7nofEeaH3nfNmlXL8dQU+82yRPZvKtwnhCDfBwCA0nqvRGIIUCXSp2SkBIXexFB1JWdc07AE08Of/e6lx7bvX1yvNS7TLzyvJhgwY/FxSqUTF2qUMrtDWXMytMliQ7sOr5JyQ/ubsMNR4mSFTEWkgjopoalKC7llcXuutDzePfTYk/t7+4Y1X+393/3KVZe9jTHmVheqywgNq1aHDbnh/CvUjFSUGWbhBoW9/WOE+Qlly/xkiY+cUytuX2muDogaTRBnQQfKCKHqzcOzh5TEeftQ+18ipbu6R8rf36QQ3H5nFckY0bkPIa+eoa+cFrsHxWBC9MUse+S/QXji6otbrr70nA/90cY1qxEdFl7p9COjExkAAEr36BKJIUD1SCkbTE8JUy5nDA3Tqw69ty+U6POHfvjIzroQqwuJK9+xpKFWJBIxS1h2dSHVNHvxQpUeUuoWG3pzQ+au+1eW+3KVfTq1hM73RuwyQ8IEl9y0LEuVFgrLsrg5ceg6HrcOvdUX7Tw0PBL3hxY9su3/rG1a4aaE3nOEhoBW5QUJEQqyoCHKDFM8+tSeHzzYSfRgb3+MUD+h9Jxa+fZ6eUm9tcov1wZ5rSbVbpTZdF2f+I+mEbtk3TlXiaH7xpfcMUphz+mXktgnNz0U3Jb8M93kuZ0hCinHLHIiLvsSZNeg2DtkHR8TQnAiDWHF7rjpiisvOfvDN1+P566AO7RC9f4XZNdaqKVLAQAACnyUicQQoHqkh4Ap9YYzJoa5NCkv1F5l7xu93/qvp5bU+/x+fm5LzYYL6kZGRuJGnKmFC5MDlO2SQ1WEl5ybTN0V+1QjGiWZ1rQv1b24HeuR5CTkZA+yKjaUQnJLcDUT2RLCEpaadiJEPGE+9Yc3Xn2t1+SspeXsf//Hv1mzerm3rjAlMUzJDaGaD7M7OjpQDjMPClUeiDJD13/85OlHOvb3DowT5q/TaZ1O37/cuGaJWBvkPipVRKglMV2n6qRpRJ3suJDYcSFJFhWSlLc8YU874XziXM0QU9Eh54Jz4okNVWLIpbT/nsMttVSIXXsoCDkZJ7/ptl4c4m+dMeXETny8aUXNHX+08UPvu7YZJYdzfkWEw+FSOAIKh8PoRAYAgJI+1kRiCFA90usEp+NNDDPGhSmfKv1y9huLZM8bvfc/vNNeqM+44NzaC8+rZdQYHR0VRHjiQmcoCnNDQ3d2slOcl8wMaWnnhhmyQjUG2ikwtLNCZ7FCbln2tBPBOTdNfmpg9ImnDhw5NqSHav/yz2//4HuubGpcmjEuVDACBbywxOF8buqCVEJVeZnh7n1djzy1d9f+E739CcL0oE+7tIF/cLmxLihXBYSfyon3BF33+3zM76c+H9V1omlE14l7QcWF6ly9Sbi7wWQ5oXPVrjGcclL/aseFxLKIHR2qUkQVGlpKMkrkdoyocsNRS+4elHsGraf7jFHDEjwu+Pi9m26++tLzrn7HBXiBzPrlUJCW/7lAJzIAAJQFJIYAVWe64kF37cKMiWEuoWGJ1B4+/Pjex3e80RBi9bXy0rc3LFtCRs6MJMwEY2QyLmR2KKYRp7Qw2dGbLKNTK1dNLlBVQgmZGxQ69ZBMPUjVgKy+DcGlakDmFncvWBPHqBMHrq+82vPMc28NDcf9NYv+5f/6m8s3nKO+azcZdFPClMRQTUFBYggutCqXS7pRtWWGu/d1/f33Hu0eSDAtVO/X14XIB1eY4WWmn0pKiGZXFPr9fqqCQr+f+P3E53OyQnVSQaHKDb0poUoD0yNCl8oKU6oOVW6ookPVv2wHiDwZGgo7N1QnIYRlVx0KQkZNuaPffGXI3NlnjiYMwceblwfv++pfXP2OC/EayXevteD7K3QiAwBAuUBiCFB1Mq5ImJIMZkkMs4SGeQ1IKerOJ7rzrd88vd8yzYYaet45wVUrGKPG2NjYxJGXOz1ZoxpjxJmknOxNJp6gkBKazBAXvt4wPSi0H6U9zCVZJGlPQ7YrCtVsE2cmsipckUIeOTr44p7uI0f6YwZdv/6s70buWb1yiXeBQm9K6IXGZJjx6JcQgnqZ4r365x4uVFuZ4e59R/7+e4/2DBhUCxGmXb9MfHilsTooVvkEo9Sn6z4VFNon4j35fBMnNyhUfylRMZ8Qk5GfuqrqB91aQpUYqj2kW3ioLiT/4uJQH65OlkVUiaGl1o8QbmjIub07F0LlhoKQ5/utR4/GX+lPSG5Ja+zqi9fd97V7mlevwMsk953Vwh77qE5kLC0KAADl8VsoEkOAKpSxPDBLRJglMZwuNJzFeOXC7o76BkeffenokzsOBnS5bAldtVK74JzQ6OhoPBGnRFKVgzmLGzKqqQq75LKGKhNLLms42aM8cWke95nOI8kUFDpZppNrUkkFF/ZhpdN6zC2hZiJzLqSU43Hjra6BpzoPnhkxfMHaT/zpB29ufceqFYvdPCJlOLIKCjVNS1/NEIkhTHcojlblIinIFJTSGfVQbD0nT3/yaz/sGTCpHloVZOfWyruajbMDpmbXl/t9Pi0YZMEgDQScfDA9MaR0Mhy04zzVTZwaF3qWKZwsJHR2zMmdpFtpaM/pnwwTp+zqqftpBefSslRcaCWHpTihIec8WXK4o9969Ej80OnEaDwurLF7N73/w++/rrlpJV4sM7ypFiJ8n8sLORKJLHhDNAAAQB5vnUgMAarTjKFhxhgxS2g4l4blXP5pdn762N4X9/dYlmka4++8fMniRaSuRo6OjhqmQe3gzV3Z0M4N7VpDjXp7k2kyPZySk6UmZrKQBzTJ8SuekkeWjAaTQaHqIyZs4kCVS/vIUmWFwjmytFRzG5FEvHHw1DPPHx4eSSRM1ty8/jtbNrlZoedrTgkNVUSoEsP0pQyRGEIWaFUu0lade963sHHJ/PiPn/zhkY4DvYNmvV+/abl1y0prVUAEqNQZ8wcCWiDAgkESCpFgcOLkjQtVUKjCQdOcDArTs0J1cpcvdAsJ7T5iSj3LW3hHo7iJoXf/qWoP3SRR1R4mx6QIb2iohlepy/YaIpyQ505aT3bHTsaMtwZj3Bq99xMfuOP9NyA3nM7CLl+ITmQAAChHSAwBqld6ipc9Csw3MZxLz/KM/5S7k4Nj//nwrt7+kZBOAj6rZZ3/7HVBjZljY2OmaU4cvmlUY/YA5WSHssYo0VRIRzwxHfGEZeoIcMp4FKlulCl5okyNE2n6f5KxpLNkovoabPI/9jwT6j4W+/6SS8GlnRSmZIWSi4ljTiHkWMx49vmuPft6DUH9gUWbP37jTTdcvGr54vQcwU0DNU1ziwrdxNBbZojEEHI/PG5ra0NBTQE36RxD2MouM3z0yZd/8NCzPYNGQyBw1WJye6OxOiiW6VLX9YDfz+y6QuKeVGiYHhQaxsS5NzGcGhSK5KRjaa/dIdQc5OR7oXCWwHX/9uPsXaWU3j/GUM96iJqmTTwATcvwLSULDFVvsrArDZ2rySUOOSH7hswHD429NRQbiceFOXrvX9zyuU9+DK+Xgr985vjVOzs70YkMAABlB4khQFXLmNxlTwDzTQzzSg+nu2XGBz+jhx7b98K+4xYXfk0Ka/wdG+oXLyJ1ITkWGzMt0wkM7dDQPqpTM1IYU/WGzNOs7GZlnujM/s+UR+Pea8qNU3M2OuVssnLQEx8yN0ZU6xVOfEJB7KzQXhefC09caEeHXKgNMzw8/tK+3gOv950Ztagv9P73XHvXHdenZ4XuAW3KOobexmR0JcMcj9LRqlwocy+Sqsgyw56TQ/d89b97Bi2mh86tZR9sNMNLzHqd+DQtGAxSb1boVhcGg0TXCedOPujNCt1Tcq6xUHV+ydhOTH1vS9/Ve/4iNPmXIfXnHuau/ODs8pPvOWq0lLtr9UymF1KKZGhoeTqULc9olBFD7D1tPto1tvdkjJujGy9p/ubffxrFht4d0UItX4hOZAAAKGtIDAGAZKk3zB4azj0xzNi2POPVGb+F6fz88dd2vdZtcosRXh8SLev8Z68NWFZ8PDZucpNKYjcmO+3JKi6kmt1Lliw/VOmh92DQ08Ps1BNSz/+9D3ByigpJPZpkJFk7SBhJSwmpHRQKLoVITtCcDA3lZFZob4J4wtrzau/e106cPDWmBWrWrFn37a/+SUobctoxaYaWZLfGEIkhFOqIXZXYoMpmLunDHBc0XNjGzGL4wYMdj3a+3jtg1Pl91y2x7mgS6wI8yEgwGNSDQVZTM5kSuhcYcyLC9JTQvkWYJjdNpyPYGR0lp7zzzeU3b+quPWGfM6bZbzDq3L0b83Q3U0olIcJdzdBuTHYqDVXVoV1s2Bfn//nambdOj584E5PG8L1/cdvn/vpOvGrUJlyQP1q4+z3EhQAAUK7voUgMAUDJ3jU8Y/znPRdqPfi0+2T8p1wCxDkOXE6528NPHdj1ao9pGESa3IpfvqF+zeqAz8fHx2KGYXDBVQUIm6z9IJMzQTQnPSTJVmVvR3Fyz0rolP845SfUPVx0Q0NKpg5bIZMpobNAlhSCSNUDpxbDn7wgOJdS9cfZ3+GZkfjLe068fujUmVFT0oAeqL/7wzfceMMljcvqMmZ8GRNDFRR6G5O9YWLKBwLM7vgZSxzOmipZmnVnscocK+Z3v/ff892eIc704NWL6Y0rzLYlpkap3+cLu2ZR2gAARIRJREFU2Fkhrakh6uRmhVISyyKJBDEM59wTF0rTtMwJ9huB8zeYYm8r5tYeUsrs/e3kbjY5xt+7j3YTQ0tNR/FmiIRYhBwYMn53dPTJrmFpjtxx89Wf+8uPNTc1VvluZ0F2OOhEBgCACoDEEACmmK64b7oL6cWGWQoSU65608MZx6Tk1bw8456tb3Ds2ZeP/e6ZNxtqNCKtRXXk7JZgfY1cuUwfi43HYzFTcCrt3mSaPJhLVh0yzbmN2k3DTHPuRTXiSQJpStexWznoDjRJ3s0OCO3SQuJEhPbiWGqJLEnsq8mgMFldOHHV/mdqL5t4rHv4jbcGj3YPD51OSBpc3dR01+3XX3rBusbl9VP2+N7ZLZ7L3qW1UrqSUWAIRTqAR+nNHJOIWYeGlVFm2HPy9D1f+1HPoLW6Rn/3CnL7ykSDTkMaDQaDWk0Nc7NCdQoEJj7GNEkiMSUutC84QaFlqaCwQKWE+f9Gbtcb0ql/wnFCw2QLs3OjlFxKVWPIPbmhJYRlFxuOC/nD/ad39I72nRltXh64954Pf+S2G/FimTcq1ieEVMNocgAAqGxIDAEggyxNwdnLD2e8mqXGMONxWr4xYl63nxqKbX/p6PP7ehKG6aeSMrO+VrQ0BxbXk4Y6FhuPmYapHrBabsqpMaTEno2SPKBTqZrmHNupIz7v0GNngklq3aH7mJIDU+yKFmctfScxTAaI9rngXAophJD2xxiWPNU/9trrJ/tOjZ0ZMQ2LCha89O3nfumTNzcua0jP9VJu8RYMulmhW0uYcgGJIRTjoFodVyM3nIW5pH4VUGb4gx9HH/nDGz0DxjK/1n5e4m31JEhlMBDQQyEtJSusqZn4gGQ4mBIXWomEYVvAoDDzb+duZffU3JAmRy1P6VP21BtOXJDSIiQu5MHTxo8PDO3pGxHGyOfvubUKO5QX5EddfVHs1gAAoDIgMQSAzHIJ3WZcnXDGRuYZc8D0o7gcZy7nFSb+9LH9z+45Zpo8EKCUG0G/aGrUly5mTav8lPBEfOKocuKATApKVKUhUxNR1PqDTgUic1ezd4M1VUBIvUOVp5Qe2nGhszGEqjEU7qGrFMl6Q+KcqwhSCvLSnp6T/ePHekbPjBhE06lec9nbz/vSPTetXFbvHnBmPARNv5yxxtB7Y0pWiMQQCgutyrP8BW4OM0zKuszwnq9sfWF/f8gfuG4p/eNmfm7Q0jWtJhTSamupakB2s0Kfb7KuMB5340JpGJZhJBIJ0zSdNQpL8ylOWTLC/nuUmxg6I7fsicpOXCjlZG5odyi/ecZ44LWh3SdGhDm28eJ1//QPn127ZlVVvUbmOblDJzIAAFTamykSQwCYUS4dwTmGhjmWImZPEnMMDTPemGWnd2pw7OmXj+/c1x1PGBoThBtEmmetC65e6W+oZ4xyYXEhTMMwVaSXrP0gk8tQuasUSrUk4eQUk/ScjdprFbqzUiaTQRUX2gtCqupFzgUl9MyoOXh6/MVXeoeGTc7lWFwSLUC04F23XXPTdW9fuawu5Ugp/XJ6Ypi2RtbkeoXeMkPEhTA/R9odHR2ozcnRXKagzHExxIXS0zd0z1d/2DPEmeb/wCryF2sSDboM6HqwtlarrSXq5LYhc04MwwkK43EnK1RFhYmEaVnqjzNl8Y27g5Un5+pP3X0LOytMyQ1VseFgXPz8zdPPdw+fGD5TVaHh/C9fGA6H0YkMAAAVBokhAOQqr6rDvEoO800MZ5ce5r4S4s+e2L/9peOxcSMUYBqRQhhL6qmui8WL2NpVASL50qU+e9kri1vcHlJM1MGnOq7z9h97Izn7S6ZvPXVbslmbUk1jagl8KUhs3Oo6OnSqP3ake2RgKM4Fo5pOWYBQ/5/ddu17rzu/cfqgMD0rzCUxTC85zJg5AhTvIB+tyrlvrlmv0aYaJ8uoEuqRJ178wU939AyYy0L6Z1vMt9eLZboMBYP+mhpWV0e8iSGlTl2hygrt0FDG4wk7LjQtq6QakPP4lT1ZWujdabv1hmpBi8nE0K03tIsNn++N/eDl3sHRseZl/m/9w2ffedWleGkUEDqRAQCgUiExBIDZmEU1Xy6TkfNatTCv0HB2w5d/9vh+n1/b/tLR0bGETyOEWNw0GDFrQvTs9TW6Tpoag4xKSgW124upPdbYqRN08kF10Tv8hEyuR+UsjEgFn9gfjycsxmj/QHx4xDzefaZ/MD4asxIGl4RZnFLqk8z/p7duvPG6C7xB4XQp4XSJYcZz78Fn+tWMXwigqAf8aFXOcUPNbhOVV5mhExcOmqtC2lfPtS6q4zpjoVBIr61lblZYW0sCAWcUcjIoVOcTxsdNzqUQ5f5bL/W0JBPPyobqGrdLC7knN/SEhmNPHDm989iAiJ/+VuTTf3z7+yr1RTGX8ttZ76zm7csBAADM6y8eSAwBYI5yXzEw92rEvELAgiSG2b+LfW8NHHjr5IHDAweP9of8jDJC5cTBJ7dMXROL6zW/TyxZ5KdE1Nf7hBB+P/X7NGFPM/HrNBDw2Z9HSimHzyQoIZYlR8ctVc135owxGjNGR4zBofGxcWGYUhJGCRWEScJWLl+yYtnim951/sXnNamgMPtUk1knhhnqVlBdCAtKHYoTQlC8k8WsFyUslzJDNy68dhn73FnGMh/xaVptbS2rqaG1taSujtTUkNpaoutOG7LnxON2baG9XmEl/cY7mRu6e2bPLnpKYuie7NzwX3f1PPXWKZkY+fxf3f63n96EV8TcvxY6kQEAoIIhMQSAwsiyM5mxmi/HASYzRoQFrzHMeMurb/bvPXRCY+zA4f7B4bHB4XFNI0wKQrjOJGPEbk+WmkaE4FIIwqTGiEwurx9PWFIQyxIWtxhhgkwc1Qp74UIhqZCME3rJ+c0Xv63pkvObGpfXNy6tS91xT19UON2qhd4LOaaHiAuhRKBVObtZV1SVRZnh9x944pHOg72Dxk2N5FPrrQad+H2+mpoazQ0KVSeylJOlhXZdoYjHjXh8PB4v5dkmc/oNPrl39u7c3bcBodYxTAsNe2PWT149GT18ShgjH/3AO//5H/+uIncX87CjUC+f8p0gBAAAkNPvG0gMAaDgZtyxzKLib8be5Bmv5lJUmPtah177Dp3ae+gEo/TVN08xQvoGxygljMq+wVFqtyoTKZxPZR+52o3LhAu5fGntyiX1jctrhSQXn7dK5YONy+qyJHSzLjDMmA/OeBVxIZRUFoBW5SzhxSyyvxIvM/z+A098/2c7mV774Sb54dVidUD6fb6a2lpNLVyoEsNgkFhWSmmhkUjE43HTsiqgDXmG3+OTu2mSdkHaKWFqbqgqDV/ofupwvzBGPnLzNd/5+pcrbBcxD4c26EQGAIAqgcQQAIpuFgHiLFLFud9/1olh9jufHBwjhJwYjK1aWuPeuHJpbYY9cg7Z3NwTQ+/ljBOQERdC6YcCbW1tqO5J2SyzGPVQymWG33/gie//dCfz1dzUSFV1YcDvD6nqQrcZ2e+fUloYj4tEIhGPjycSFdaGPMP7QsbQ0J6oxZNZ4WSTslNp2Bd96xRPnP7bv7rjC5/5RIVsB0rnIcVTr7XyGhwEAAAwOxp+2waAefgl3rtAXvY7pMzzTf+nLFdzv1sut8yCO2XYq64mUFcTWLWsTl1Qpxw/Npf7pNzovZrL5fQN7r2Kn14oNW1tbe3t7V1dXe4iYjhuVxth27ZtXV1deW2NlpaWbdu2tdhK6tv52rd/+qNf72O+mr8+m3ywUSzzk6DfH3KrC9W5ppF4nIyPT5zsC+b4hLhhCCGq7n02ueNW/5WeG2na3Wp97NLV9aMWffO08czzLzY3Ltlw0dvKfQu0t7e3tbVt2lTExRk7Ojo2b97c0tKydevWUnvJAAAAFOUXDNQYAkApyHFflGNhYKHunG+N4aw/JMdsLuPdslcd5rLQ4SweCUDpxASqVbnNVu2/1eVfY1WCZYa/emL3//+7v2X+2g830U+ss4KMBgOBUE2NplYtVNWFlKZUFybsISeWZYkq/s1W/e3L/f6lOkkp3A5lQtxKw2GDP3qw/6d7jvL46X/+h8/+8Uc+WNb7gWIvVuDualBsAQAA1YNhEwBAiRzo5lLUlns1YpYyvekq9VzT3T7dfbJ8qhzl+Nky3p5vrWKW+kHEhVCOSYHK6MPhcLutmrdGNBoNh8MdHR25f4iKF/P6kKLatfet7z/4DPXVXL+cfWiVCFAyJS6sqSGhEJGSjI+TWIyMjZFYTMRi47FYbHzcrO64kCTzQXssv4NISQlhlOqUapRqhLinRX7tlvOWb1y3kvkbPve1bz/7/IvluxOIRCLFjgtV1z/iQgAAqK6DdNQYAkB5HAjNeT3BIt0nr0dVkEgur7koM/4TUkKoGJiqPLsFDUunzLC7b/ATf7e197S8qZH9dQtv0EkoEKipqdEyjkW2z634hIRhVPyQk7zfcVRvcvJtTBIi3OEnquTQnoLSGzMf3NsTPdjDxwce/uG3r914RfkdzFBavJe8enWoOB4/VAAAUG2QGAJA2R4OzW33VfAIcn6Oi+Z4h1ncE6C8dNiqOTecRYfm/IyMmNEfbfpm7xC5vlH/y/VidUAGfb5au7qQqrgwFJq4k9uJnEgY4+PxRMI0TYHfZjO+c6WFhlz1Jru5oR0anjb4N595a8/xPj4+sCv607XNTZX9057XzkTNE0dpIQAAVCckhgBQccdIhditlfi+cRZ5HyJCqDbuumPFXuCsBIXD4bxmSc9u1HJhfeJL/8+uAwPnNAS+sUE06MSn67U1NX4VF9bUTJyE8FYXGokE4sKZ38uS72huesg9Sxm6lYYHT4//8KWjLx7tvebS9b/40b+X0Wu8eD+3mIkMAACAxBAAquzwqQp2eggHAVIO+zs6OqqqUEjVRuVVNriwZYZf/aeHHuk4uLqu9usbeFOA+DStJhQK1NYy1Y/sNiMnE0MjkYjH41i4MNc3Pk9oKJKDUFSBoRqHYhJyatxqf+LVo6f6PvqB6/7lW+3l8kNepPd0NYcdncgAAFDlMPkEAKpLjgNSyvq7wLMM4Gpvb49Go6rSkFJaJaFhW1tbNBpV66/laMuWLXndv4D+/YHHfhXdT7XAR5vlcj/RKA0GAoFQiIVCJBQiweBkdaGKC+21CxEX5v22YQ9TZvYglMkpKPYFnZBFAa3t3MaGukU//dWTP/nZL0v/G3IriAuro6ODUtra2oq4EAAAADWGAADFlXE3i1wPYEFUVatyvj2bC1JmuGvPoa9++1e9Q/zPWrTbV8tFOgkGg7W1tZrqRA6FiK471YWJhIzHE3Z1ocU5foPN753IrTSUUtgXeLLA0EqWGZ42rEfe6PvFi4fGx/p2d/5y3do1pfyzXYyXsNo/lMKangAAAKUANYYAAMWFMkCA0tHe3q6SpkgkUvElh/mmKm1tbR0dHfP8IL/yzZ/1DBhLg/qNK2W9TgKBQCgYZIEAUSdNc6sLZTyesKsLERfO5p3IPVHKkufMc64Rssivv++cxsvWrWL+xZ/9YqSUf7AjkUjBX7zhcLizs1NKibgQAABAQWIIAAAA1UXlhtXQqqx6k3PMAee/Mfmr3/hxz2CiMaT97dvkUj/x6XogENACAer3E79/Mi60qwvj8XjCMBAXzhr1lLcz+6qWdloa0m84e2VDbcOzuw985gtfLc1vRJUBFvATqiUR0YkMAACQAokhAAAAVKN2m4qfVG5YkdFhNBoNh8O5hIZttnnbCLv2HHzh1W6qB/60hV3RIEKMBvx+n9/vFBjqOjEMp7owkTAMwzBNy7IQF84F9eSGqtydeVYzVAsaXrduyRfCFy1tWPLQL5548KH/KbVvIRwOF3Z+cXt7u/qc1TZRHQAAYEZIDAEAAKCqeVuVw+FwhQUHbW1tuRcPzmeZ4Ve+8dOegUTrCv3CBsoo9duYqi70+QjnxDAmTqZpGkbCNC3TxKiTgvzqTz0XnMbk5EkjxEfIBSvq375mJfPX/+Thx0rtpdra2lrAV6i71ic6kQEAADL+2gAAAABQ7VRu2NraWnmtyrkvaKjKDOdhNcOv3PfD7sFEQ8D36XPI2qD06brf79dVP7LPN3EP03TjQsOcgLiwMJIFhm6ZoTa1zFAjJMTohzY0t6xY+uzuVz/zuS+Xzo9xAZcvVJ3IqggXcSEAAEBGSAwBAAAAHOmtyvM/DKQYotFoZ2dnLmnLPJQZ7trzxq5Xe6kWePdKVq8TzS0w9PmIz0cYc6sLLTsuNExTCoEfzoL99m8HhSouVMGhtzeZ2b3J5yypuf2Ss+prFj/4i98cPXa8FB62O+W8IC9ztXAhOpEBAACy/c6ATQAAAACQws0NVZ9yBSQLKgqcMQBV9VZFzUm/fN9D3QPxa1f6PrRm4jdRv8/n9/s1n4/oOtE0p7rQMCzDIYRAeWHBDwCc3uTkaobexmR1uqxp8RXrG5mvvhTKDNvb2wu11KDbiYy4EAAAYMZfGAAAAAAgAzc3rIBW5dwXNCxqmeEvH9/RPRCv9+u3raarAkS340KfrlNdJ7pOhFBxITcM1Y/MhcC0k8Kz6wqd3mRvmaF9UmWGi/z6TRc1BwL1z+za98yzOxb2ZZhjhWx26EQGAADICxJDAAAAgGxUjWE0Gi333FANjlChSRZFLTP8t21PUM1/8SK6YRFhjPk0Tdc0puJCxohpEtMU9sKFpmVxzhEXFkuyK5klL1BPyaGKDi9YXn/zJWcxX93Xv/2DhXqYHR0dkUhEvfrm+HnQiQwAAJAXJIYAAAAAM2tra0tZ4rAco4ccp6AUqczwl4/v6O4fawzptzXrjBCdMV3XfbpONc3pRzZNaZqWZVmcW4gLi30YQOnkCJTk0GS3zFCzKw2vP2vFhnVrnn1h79e/+Z0FeZDhcHjuyxeqqSnoRAYAAMjvVwVsAgAAAIDcublhZ2dnOZYcRqPRGRc0LFKZ4b9ue4Jogfeu0i5WBYY+n0/XmduPbFnEriu0bFi+cH6OBFRWOFlmmDxX0eE5S+r+6KL1/lDD17/970ePHp3/19rcly8Mh8Nq4UJ0IgMAAOT7ewIAAAAA5Ef1Kavqp7LLDaPRaDgczh4IFrzM8Cv3bevpHzu7Xrt1DaOE+DRN13Xd56OqH5lzwrlQYSHn6EeeJ6rIMFlmSDOVGV6xdtk71jdp/oZ5LjOc+/KFHR0dlNLW1ta5NzUDAABUISSGAAAAALOkepPLrlU5lykoBS8z3LnnKNF8tzX76nSq2f3Iuq5raj6yEIRzmWxGRj/yPB8MUHdNw+S5t0M5xOgNb2sOBOp+/LNH5+1RqeUL59KP3N7eHg6H0YkMAAAwl18SAAAAAGBO3NwwEomURclhLgsaFrDM8Cv3bes5NdYY8p1Tzxghuqb57MSQMkYoJZwTu7jQRD/y/HMnn1BKpi5oyJJzk89eUtvSuIz5677+jW/Pz4NSceGs+4hVJ7KUEp3IAAAAs4bEEAAAAKAwVG5YLq3K0Wg0e9dnAcsMn3/lCGG+96zSzqp1Cgw1TdModfqRrUnoR16Q44H0MkNvseGq+tD7Lj7bH6x9ZseL8/M6mvVQY3cmMjqRAQAA5khDoT4AAABAAbXZ2tvbVXihUozSrHVqaWnZvHlzW1tbS0vLdHeIRCKbNm2ay1f5xWM7fvHEnsZa/+cuCIQ06vP5/PbME11NSXZnndgXBH6A5p+zmCGRhMjkZedq8sLyRaGnXu85cPCgNGLvuv7a4j0WNdd4djl1e3v75s2b77///jn+xAIAAABqDAEAAACKxduqXJpLHM64oGFbW1uHbS5f5V+3/oYx7c6zA3U+yhjTkidqFxgKy+J2Wig4F1KiwHBBqMEnzO1QTlvQMMTYx666gOm1z+zYVdRHEolEZlceiJnIAAAAhYXEEAAAAKCI3NywNFuVVQeoqoXMaI6rGe56+Y3uk6Mra/zn1GmMEM2TGE78sx0UcimFlEIIxIULiCbPabJJmSXbk9XQ5LOXN6xcsnj7zj0PPPBg8X4aZ7F8obcTGXEhAABAoSAxBAAAACi69KnKBRxDPPfHlmUKinqos36039v6/xLN9+5V+ll1dhWbt8BQCMG5UOeqwBA/KAtHBYVMVRtOXcpQhYar6kIXNa9kvtCDD/18/n8Os3yUigux1BIAAEBhITEEAAAAmD9ubhgOh0unVTkajWZZPG4uZYbdJ0cp1c5qmPidkzGmaRqzT5QQKQQXgttZIUeB4YKjdEqZoacrWTUm1/v191y4zh+o3b7zle3btxf8daGa9/P9KNWJjLgQAACg4JAYAgAAAMy3EmxVjkaj4XA4Y2g464rIXzy2o7tv+Kx638blPm9LsvoFVNi4ENyysH5hiRwYUA9vYqjKDM9fseislcuYr+bp7TsK+6UjkYgaMp4jd6wQOpEBAACKBLOSAQAAABZGW1JXV5eKPxYw+1Djkrdt25ZxzmxnZ2dXV1e+D+8zX/uPkXFx+YrAtSt8OmM+Xffruk/TdMakXV3IObfs3BCBYUlIG5SccuKEWIS+2HVCmmN/eudHC/Vl29vb1XjxHO+v4sK7774bBzIAAADFgxpDAAAAgIWkshLvEocLlYNkWUhuFo3J9syT4bqA791NQapakhmjydZX1Y/MpZR2XIi8sHSODain0pBN7U3WCTl3xaLlixqe3rG7UI3JqrM495951b+MTmQAAIB5+K0AAAAAABaemxt2dnYuVKtyNBrNGN+oWsi8HtL37n+UUL2ljjXXUEapxhizB2swStXChe58ZASGJSQ5+STjiRGyNBRoXFzPfKE/bH9u7l+to6NDxX853j8cDquFC9GJDAAAUGxIDAEAAABKSHt7ezQaVWu6LUhuqMoJ0xcuzLfMsLtvSBLt+lWBpf5kCJWMogTnKiwkUgrEhSUmZf5JygiUpTX+d190NtMC2595fu5fK/flCzs6Oiilra2tuceLAAAAMBdIDAEAAABKjupNXpBW5ba2tozhoCozzHEESveJ/uMnhimll6/wU0Lc6kJVYKhOKi5UC+RB6fB2JZOp9YYqNLyseSnVAtt37J77D3lra2suP9jt7e3hcBidyAAAAPMJiSEAAABA6XJzw0gkEg6H5ycxUVGOGsbilXuZ4c5XDhJGz1/sXxlkbgKlEkOZpEJDghrDUjN9V7KamFwf8F+yfhX1hf7x69+Yy89YJBLJ5edZdSJLKdGJDAAAMJ+QGAIAAACUOpUbtra2zlurcsYpKCqyyaXM8Be/fZYQraVBp1I6WaE99oQQIt3lCzEjuVRlb0xmhJy3ejljge3bZ9+YnMvyhehEBgAAWEBIDAEAAADKwzy3Kkej0fQFDXMsM+w+MezT6IalPm9LMrXjQjUcWSSHnuBpLUGqLNTTnDwlOtQIuWj1cr8/+PSOXbP7/OFweMuWLdlrBtGJDAAAsLCQGAIAAACUGW+rclFzw2g0mtKbnEuZ4a6X3zjeO7AypK8MaU5LMmMqb1JZIUdcWOKSKaF7wKBObmPyirrg+pVLqRZ84IEfzeKnd8blCzETGQAAYMEhMQQAAAAoS25uWLxWZTUFJSU0nLHM8LmXDkhKm+v9zTWaU2OYjJ+EEMLODbF8YYmjWU/1AX1xbYhq+rHjvfn+0GZfvrCjoyMcDqtOZMSFAAAACwiJIQAAAEAZUzWGaqE3lRvmOM4498+fsqDhjGWGO18+SAhbU6vX6iTZ1WqTzuqFJDn2BKlhyUqZmEymNibX+X2rF9dT6nv6mR15fdpIJLJly5YsP2yqYRmdyAAAAAsOiSEAAABA2Wtra3NLDtVI5QJmLtFotLOz0/sJs5cZdp8YJJRuWOpXBYZu0qRGJJNkPzLiwpKWzHnTp6CoQ4gbzl9HNe1Id0/un7K9vT1LGtje3o5OZAAAgNKBxBAAAACgchSpVVlFhG5dYZYyw+Mn+o/3DhBKzmvQaVrShOULywjNFBe6p8U1fkr1Y8dO5P6TmZI7u1QnssqmERcCAACUCA01/wAAAAAVps2mLqssZo5BTEtLCyFk27ZtmzZtcm+JRCLuVdeT21964um9Zy2uueWsOp3SiRNjOmMaY6ov2akuRG5Y8tQzJOwL6iQ8J6prOw93D4+MrG1accklG7J/qo6Ojs2bNx8+fDjjP4XD4bvvvhtHJQAAACUFNYYAAAAAlUn1JquQTtUbziWUUSNu3Sko05UZHuvpJ4SusGeeENXcqvpb1dqFyd5kia7kkjelwDBtKUNGyEXNjYTpx47P3JisVifM+EMViUSi0SjiQgAAgFKDxBAAAACgwrm5YWdn51xalVOmoGRczfCFl98ghKyp9U1pYqXUDQsFCgzLhDPzxD6ldyUzQtYtW0Sp9vQzz874Y5O+fKEqLcTChQAAACULiSEAAABAVVAjlVWp16xzQ++Chm1tbR02VUT2ne98JxwOP/AvXxof7l4c0Cbn7aqIUMrJySd4MsqEdw1KkpYYNjbUUqY9/czz2X/q0pcvVHFha2urmvENAAAAJUjHJgAAAACoHm6dYHt7O6VUBYi5p4dtbW3RaDQcDqvcb8uWLZ///Odffvllb4fyopBvbZ1OpXTiQpu0SwwF4sKyQqeZf6Ku1vh9tcHgmbg23Yd3dHSopuOUn0B1I0oLAQAAShlqDAEAAACqkduqHIlEwuFwXqHhpk2b1IKGR44cUXGhV5CRJQHmLn7nUP3I7jU8AWUiJS4knqt+TVtUW0Oo9vTTz2T82EgksmXLFm8yqDqRpZSICwEAAEocEkMAAACA6qVyw9bW1rxaldevX3/gwIElS5Zs3bo1/V91jfkY88aFbj8ywSKGZYVmusXNEBfV+OtrA4TSI8eOZfzRam1tdX+iOjo6KKXoRAYAACgX6EoGAAAAqHZ5tSqrVtMsn21xQFsRSnaqqqCQUoGtXLaomnntSXudGkOfvjgYJJQdO9aT/hMViUTcHnR0IgMAAJQd1BgCAAAAgMPbqqwCxJQ7dHR03Hnnndk/SUhjfkaIlJOLGCYLDFFeWF6oZ1Zyem+yn7G1K5ekdJ8r3uUL0YkMAABQjpAYAgAAAMAUbm6Y3qrc0dFx4sSJ7B/eEJh2FAZakssOneZGNS7ZThPZ0888l/Lzo5YvxExkAACA8oWuZAAAAADIQAWFKvdRrcq/+tWv0uecZJIhZZLJuBCRYdlRTycjREytMVT/JqdWGHo73NGJDAAAUL5QYwgAAAAA02pra3NLDnOLC6dJBREXVhy3Sdl9WlVKqOLCzs5OxIUAAADlCzWGAAAAADAzVTuWfeaJsjSY9hsm4sIyR6e5ffXSekLI0SPOrORIJLJp0yZ0IgMAAFQA1BgCAAAAQCFpKcsY2gWKiAvLGp3+n2Ty39vb29va2rZu3dra2jrdoG0AAAAoF6gxBAAAAICc5FJg6CWT55IQaueGGabqQmmjnqcynV/XqD0fp729fdu2bS0tLehEBgAAqAxIDAEAAABgZrlXjZ3ffFbqTRIzkivT0ppaQsihg/sjkefa2trQiQwAAFAxkBgCAAAAwMxyLDDUA3Ura33OFTcnRGlhRePGOOJCAACACoPEEAAAAABm0NHRsWXLFvdqStupe/W8a++SVFeNqm4/MlQAObXHfOpJ1jQ0Ii4EAACoMEgMAQAAAGAGbbYZ77Zm1fLjJwZPxcz1Ic0NmIS9Fp6zHJ6UFCWHZSs9/+0dPkOkXLduLTYOAABAhcGsZAAAAAAoEHsqssGFc81zu0wuZYiqwzJ7StMueP9JTjzlkiABBgAAqDhIDAEAAACggKTBpTdmcrNCgvEnZfqMep/NqSdiT8HGJgIAAKg8SAwBAAAAoDDWNC0nUp4cM9JL0pAqVRh3EUNCyLq1zdggAAAAFQaJIQAAAAAUhtucOjklQxWgSakKDJEblp2UXnI5dQpK78AIIWRdMxJDAACASoPEEAAAAAAKo3n1cpUmSUrduHCyKxnKk3dKcsqNTq851jEEAACoOEgMAQAAAKBAKKWSmEK4AZOwT0gMy910NYZ2V7JEYAgAAFB5kBgCAAAAQGE0NS4njJwaMyaHY9gFhpNVhig3LFsyLTckhIwnEpTS9evXYvsAAABUGCSGAAAAAFAYzatXUEKH43zUEin/hErDciSTC1A6V5N1o+6FgeFRIuXa5jXYVgAAABUGiSEAAAAAFMaaVcspIZYUpxPcWcrQW5uG4SflxnnuVG5oP33C85yOJIxTZ0YpxaxkAACACoTEEAAAAAAKY03TckJJf8wcToiUZFBKKTyXsa3KRUpRofRcHkuYY/EEJWTdOtQYAgAAVBokhgAAAABQGM2rVxBCYqZ12uDu2BN1UiSWMiwr6QNPvIlhLGEMDI/YNYZYxxAAAKDSIDEEAAAAgMKRImby3lFjsivZW1KIuLC8nsypsW9KaHh63LC4tX4dWpIBAAAqEBJDAAAAACiY299/A5FiaNwglE7eOnViMlLDMpVSY7j3SI+U5Np3Xo0tAwAAUHmQGAIAAABAwTStWk4pOTQ4njL5RCbnZihYyrD0pTxHKXGhIGR4bJwS2bJ+HbYVAABA5UFiCAAAAAAFs/EdFxJJzsRNQenECUsZli1JiJDSPcm0+SevHemllFx7zVXYVgAAAJUHiSEAAAAAFMzGKy6SRAwnzIG48FalqZo1KSXiwnIhM9UVelczHEsk7LEnGJQMAABQgZAYAgAAAEAhXX3Z+ZSS7rHJpQy9uaFI3gIlTk5/EoTs7uoVwqKEUGwpAACASoTEEAAAAAAKaU3jckLI7uPDBqHe8jSn0jDZmIylDEuZkFOeoZShyZyQfUd6qZQf/9iH1qLGEAAAoBIhMQQAAACAQlrVuIxKMRC3YpyopQynzD+xF8XDVip96vkSU7NCdTqTMHoGThNC/uSPb8eGAgAAqEhIDAEAAACgkDZefqGQYmDc7B011cRkRU6df4LUsJRNrlqoZp5Qqp5KVWA4mjBPDY8wKtc1o8AQAACgMiExBAAAAIBC2njF26Vl9YyMHx0xqCdmclcwdLJCNCaXA5nWkiwIefPkYP/wyPq1azD2BAAAoFIhMQQAAACAArv9/ddTIQ4NjRNNS2lMJmhMLnlC1RV64kJp38iT4e+B46cIkde+8ypsKwAAgEqFxBAAAAAACuy2m28Q0jrQP0o1jVCaHj9JNCaXMJEpNJxSY9h7ilFy58c+hG0FAABQqZAYAgAAAECBbbziIm6ZwwlrwJCEMW/Y5F3WULUmY3OVIFVUmHHsyYGe/mOnhiiR65ubsKEAAAAqFRJDAAAAACi829/3Lir57w4NEE1LKSoUdmOympuMDVVq3KDQiXQ9y1Cq09GBYULE+nVr1mIRQwAAgMqFxBAAAAAACu+2m98lOH9zaPyMJQlj3s5W4kmjEBmWGpFWXcg95yMJY/ehY5TIOz9yG7YVAABABUNiCAAAAACFt/GKi0zL6BtNHB4xCWN8amOyuzoeJiaXlJS2cUEItwfXuKHhoZOnj/YNUCo//tFbsbkAAAAqGBJDAAAAACiK229+15hh/ub1fqJpZOrAE+lCmWEpUc+IG+ym1BhyQvYc6YkZiXddcyVakgEAACobEkMAAAAAKIrb3ned4NaBgVhMUpmcf6KCp8k1De1zlBmWCDm1DnQyLpSSE9I3EnvpzW5GyRf/9q+xrQAAACobEkMAAAAAKIqNl19kGAmNkZ29Y9TnS2lJltJbaAgLL7UlmVJur2nIkznv0YHh8Xhi/dqmdWtQYAgAAFDhkBgCAAAAQLF8atOtUvLX+sc0XVdlhtxTZqgKDEUyQMTmWlhuOKieEW7PP+HJAkNByPbXusYM47p3XrF2bRM2FwAAQGVDYggAAAAAxXLVpReYhvHGYOx4TDBdFymrGaqgEGWGJcA7xtob7Lqnrv7hN473MSq/dC9akgEAACofEkMAAAAAKJaNV1z0V3d9YCxu/WjvCc3nk/bQZPdE7Co2b4AIC0UkKwrdwk9OiOWpOvzJ9j1jRuL6a65EgSEAAEA1QGIIAAAAAEV05SUXWJbZPWYcjwmqad58cHLCBhY0XFApBYaWfeKe0PBI//CpkVFGyBc//1fYXAAAANUAiSEAAAAAFNHGyy/4wHuvihvWL9/oZz6fmqfhVq4RKYUdFGJo8kKR0xUYep6mX72wf+DMyPp1TevWrMYWAwAAqAZIDAEAAACgKNxRyB+88RrDNA8OjXfHOGGMS+k9CSm5mDBxLvOG7TxHwh6LTCiVaVmhunC4f/hA90kqyfe+uQUtyQAAAFUCiSEAAAAAzF4uWd7Vl11wyYUt44boODqs+f2CUpHshBX2ZxBuk3L+ISBSxblwt7ybG7rNyOrECdn++pGxROL6d15x3TVXYIsBAABUCSSGAAAAADCzOYZxt930TsGt/adiPeNS9/u9808me5M9g1Dm58EjUhSeTmRJCJfSSq5jaBHCKT05HHu9+xSj5Iuf/yReBQAAANUDiSEAAAAATMo3U8uxd/iWm67ZcMG6IYM//Ea/HggQTRPJHljuqXRzvta8R3jVGSl6Vy2cfDrchnH7ll/u2n/01NB177ziuo0oMAQAAKgiSAwBAAAAgJCZOoJzj9Kmu+cn//R9pmm+OWS80h/X/H6n6dVdzVBJrmw4qyUNZbE3TiUFiMKTFborGJreAkNCuvqHX+nqZYx+8V4UGAIAAFQXJIYAAAAAVSdL3JYljMsS1QmP6e5zxcXn3fWR91hS/O6toQTViaa5i+VZU0egWPZnEfnHc/MWMlZATaJIW8TQ249sETJqGI+8cGBUrWC48XK8agAAAKoKEkMAAACAajFdnjVd1JVL4pY9JRRTXXpRy3jcODpi7jwx5gsG5dQRKML+8jLtccpCmHGbFDbyK/EY0duPTCgVlLrRrWmfOCG7D/e93n1S09j37vsaXjsAAADVRscmAAAAAKgG6Wlgka5m+bpXXHzu//7U7d/9r9/8vuvMmlrf2oDPSiRS702pRqkkRJOS0Alz+a7dD88lsPN+rez3n92jSvmcc/zWZv+ToLJC+6vL5FKSlt2SrE6ckFMjscdfORgzzBuuu2pt82q8fAAAAKoNEkMAAACASpaxonC6Czlenkto+P53X7n1Z9GxhLn75PjZ59aPm6bFuXM3+6TZH8AoJXakRaWch2RNfYncU8KM98z3cS5UgKh6kFVWKOwyT8sdkWyXGQpCXjx84lj/ab9P/87Xv4wXEQAAQBWilTr6DQAAAABmjP/cBQoz3j5dqphjYjjd75kvvnr4K994YFHQf8d5iy9ZrCViMV1K3c4Kdfuk2eGZZhcYqvNZZGk5flD2uxXvX4vxgbnghEhKuV1jqLqSDUISQiSkVCdTyucOHX+g85Vxy/rFj/7l2qsvw+sIAACgCqHGEAAAAKACpeeDKcsCTvdP2W/P5eqMt1924frz1zcd7jn53Inx9fUNtT6fGY/L5CAOdUGjVFLKVH5GKZsmRMsSrqmvPsf0Tc62wpHSaf8wP+MnTP/AQmWIIhkXiuQKkhYhppSmOrfnzwzF4r95+VDMNP/3vfcgLgQAAKhaqDEEAAAAqCjT1RJ6Q8BczouXGBJCevoGv/yNB2PjxuoQ+6sNS2R8TJqmnqwxVCe3wFCzL2cMDWcRpdE8Pw/NM6ws1O1zv3MK1YzM1dqFdiBrEpKQ0i0wNKQcSRg/fnbf9gNHAj7/c088uHZNI15QAAAA1Qk1hgAAAACVKSUE9IaG85MYZrnDqhWL791885bv/PwE9W0/EX/P6lCMc3dBQ0WzP1AVG6rLKjTMfT5JvtvKyzsyJT2nm66EMOPt0y2SmPH27Jmg9855pYciWVcok+cWpaquUBUYWvanfvFw38tvndB1/ef//c+ICwEAAKoZEkMAAACAypHeVuzmg0KI9OgwxyblOS5cmNGlF67/1J/d+J8/ie7sS1y5urY2GIyPjVFCJk92RMjtoclc9Snb4d1cUsIcPzzlbuk5XcaW57weW/qd8w0Q84sa7VEn0o5fVWho2kWFZrIrmRNysG/osT1vxiz+pc994pqrLsWrCQAAoJohMQQAAACoNClx4XSyFxhmTwyz3Jj73W68bsMT2/f1nBj84Wun7zi7dkkgoBY0dE96cmgySxb7sVktLOgtGMzlzrP4vtyPmvHhpSeD093ivTGX1Q+nu49IBoVSVRqq6sJkaGgQwqU8ORz7zStvHh8cDgRDH7vtj/AiAgAAqHJYxxAAAACgQqRXDgoh3KvqcpbEMP2TeG/J+LXm8jiVE/2nt3znF4lE4tJlwTtagiNnzpDkgoYaIT5KdUKYvYghs3NDdV6MacI5Jn25/5N7Y3opYvYPz/f+2R+PWrJQuCc1HNldvlAI0/5h2Pr03p2HjptEe+6x/0Y/MgAAAGjt7e3YCgAAAACVJGNFYXpimL64ofeWlIbl9C9RkIdaVxNsaVr+1I79MTFx9azFQcuyVAO1Cr1kskNZXZBSqnq8fL/QXELG7B+bS/Vfeko7uwc2Y5iYQjDmxoWqzNCk1LCXLzRUV7L9aHZ3nXh4535OtP/Z+q23X3AOXkEAAACAGkMAAACASpAx+BO2jIlhxnLCXKoLp/vSc/HUc/u3/vwPNX56xzmLzq3h4yMjhPPJucn25BPNjsfUOUueF/g34wJNXs7x9oxFiFkywXxLEb3VhZQxbg9HNjzVhYaUXMqnXz/22Ctv9p6Jfffrf/fRW9+LlxIAAAAQQhg2AQAAAEC5S8/sUlJC4ZGSJGZf6DDjp53xbvl69zsvvOLSs+MW/d2x0TdjLFRfLxmzCJk4UWrZczm4/ZW856LQf/nOfTtk+ZB8b/c+OynBbvaHNN1V9YXcuFAyRuy40Ej2I7vVhULK3V19Dz37Wu9w7M6P3Iq4EAAAAFxIDAEAAAAqREpylBIOZsykMi5uOLvsbI7uvevGtU0rBhPkd8djb8a0UH291DQVF6rT/ISG023Y/4+9e4+Rq7oPOH7OubPv9drGG6/tgHjY1Cg2oCSt0hBCVIlHpYgUpa3USFXaSFQJf1TqH23aP6o2bfpvX2lppLYpkdKKBNEEkJK0kCjhGcBgwAYcsB0/YIu93l3veudx5957zql8z9zHPHdm1zbr2e9Hq9Hd8RrfXWPp6Kvfuaf7rtrlvu8OWobdzrm27trlwjgUWrcrWSm3GblqjBstDOPpwpePzzyy761ypD95y6/+0Zc+x78gAACQYlcyAADAZS+/vzjfm7TWaRZ0181PJ1xTq8G/+MdH/+/03PYR9dldExO6XCmVPGO8dHtyfByKym1Pdq/yYpyEspoVdhe309P5J+668y7m2kXcB0Xy+EJXDINkM3Lt1Rht7XuLpb/9wQtzRX/ndTsf/LevXrVjK/+OAABAtsagGAIAAFzu8sXQlUGXCPPFsHm76xr8Rk7NLv3VPz2mo2BqWN1z3ehG4/vlcj4aeu6jPhd6ay8a1i24ew+IXdbDhvfdOSdCSuF57qgToVQgRNWYQIjADRjGufDIqbMPPn/oF2cWhobHn/7+v5ILAQBAA3YlAwAA9I/8NtX8RfMm1rV5/9smN3zlDz8zMDh8JrA/nq6WvZGhkRGd7E0OczuU3cZkHX8v7tWs4b+UZbczd9713O7k66wCxzuRjRDunBOdDBhWhfDjc058rX2t3VEnZ4qVbz37xrHZxaGRCXIhAABoiRlDAACAy16vM4ZrfAV44OfT//7wM2Hgj0p9z7Xj2wuBXypJY9yMYUHK2vbkdNgwHrHzhJBre9iw06K8/V23Oy45u5bSep7biSw8z40W1k5GTnciJ88uPDQ9992XDv9idmFweOKpx+6/asfWy/MHBgAALi5mDAEAAPpK/sF2zXtX8++vWTfd8MF7f/vWxaKuiMIT75SPVwuj8UEobsYwtDasPwslSiYN1/iwYQe9HqWSfqlVyiTHm7idyO6ck6oQvta+MX6yGTky5uTcuW8+c/DY7OLw6KYnH73fTReu/YIMAADehyUl6wMAAIDLXX67cTpL6E4+ST9NZw8bj9Zdq6vBA29Nf+Ph58LA3+Dpe67bsK0QlopFG4YNk4bpYw37Y9iwxXq91fchlRLxLKGVUhQK5/8Kk5ORw3j7dmBMGE8XhtaGxggh9p+cfWz/4emF0rU7r//P+//8yu2T+XbMpCEAAKhbbFAMAQAA+kDDA+/y5yO33JXcfP7JGlwWnp4rfvVffqh1sGNEfeQKtWezV1pc1EFQOwUljoPuUBQVX4t8PYwTWD9tqMmiXpwLbfIqPc8qZYWwnhdYm7ZCd6Hj33Fy7tzXf3pgvhhct/P6b37tT910oVKq5SEqAAAAFEMAAIB+0PKIjHSu0MTaFcM13g2//u1nDxw6MTFoP7d7YlT7ulwMfN/LRcNCLhqmpye7XKj6rBvGc4W1XOh57lhkN2AYSVkbKoyfWhgkxyILIb5/8MRLx2Zmlvxrd+36j7/74/TZhUqpdicvAwCAdY5iCAAA0A/yu4xbnqibL4Ym3qbavEO5uRiukbXiw4+/9sSzbw4q/aEJddu2wUJYrlYq0hgvtzfZBUSV7E1WcSiU7jUZPLycl+21Vuh2H9dOOxHCHXVSO+Qk/Yh3IlshymH09NvTj75yTA0M/9bdd3zp83dfuaO2GVlK6Yoh0RAAALRYelAMAQAA+kPD2GBzJWy4zufC5scarrV0+J3/ee1Hzx4aHRKTXvTrV4+OGd9UKyYIVD4axtcqmS6USTS8vLuha4VJLkxboY3fDN1RMPEG5LQYRvFf8PTZ8oP7Ds+cq4Te0H2//5t33/Gx9NmFrhWmxZAHGgIAgMYFCMUQAACgPzQXw+Z5w87FcNlu2OHNS+AnLxz51mP7No55o0r/8hXer3xgoLi0VK1UlLW1GcPcww3dvGEtFyatMH3n8qhiuUpYe413Itv4Ux1vQw6S5xUGxkRxOtTxBOmBd2YfeunouWo0OLLxkQe+8sFtdaOFjmxCMQQAAA7FEAAAoE80ZD7bXkMx7BANl33E4SVeTB54+70HvveijiJhgk/tGN49IUe0XyoWbRQ1PNnQPdZQ5Y5AcU8zTLuhWKvpUEqZnYPsth6nM4bx+0apMDnhJIgfXOieXRjFf62LleCx1068/u5cxXrXXHPNP//Nfdu3bm4eLWzZDbMbAAAA6xvFEAAAoK80Txq6i/TZhS2LYXM0bDd1uBbS4X8/fvDxn709Niy3eNHd14xuEFW/VKz6vrI2O0PZpcPkyYYytytZWitzw4ZrpBumoVDkL5KDTWr7kaWMpAzdXKELhXE3DJMJ0sOnF7/7yrF3F0qF4fF7f/czn/61j6a5sHnGMF8M01NQKIYAAIBiCAAA0G9aDgk2lMEOxXAF0bDdevKirjNfP3LqgUf3mzAcVtENG8THJr0h7RdLJR2Gda2wfuRQdvxwU4fiEi6PZf5Ik5bFMLkwSkVChG7rsbWh1umAoTsPO4z0t18++vNTi0tVMzAy8bW//uKH91zXcKpJmgiboyHFEAAA1K1SKIYAAAB9ppto2Pm65UnKa3Dq8Hs/fvNHLxweGRQjpnrbVGHvJq9YKlXKZau1y4X5A5QLUi7TDaUU8fihi4YXqR7WZgldK2yIg/F1w7ChVspVwigeJ6y9JtHQWlsOwuePzuw7OXt6yReF4Y9++OY/u+83tn1gU/rH5Yth+up5XkNApBgCAIBsxUIxBAAA6D8tzzBpboLdF8MOs4fv78jhT/cd++Ezb2kdmSjcNWY+tWNo0FRNteJXKtaYdMbQS3JhIe5h6bko6cMN023LddEweUc0vPa24q4Vu+aPLA6mX5B/XqEQgbWREFEcB93BJrXjTdzucmuPzy099PLxJT8oRurKq67+h7/8QtoK6/9wmT8c2SVCz/Py84b5L+ZfEAAA6xzFEAAAoD81V7zOKXDFxbDD7GHnUHihFqKn50vPvXri8eeOjA8VBmSwc9zuGhM7x+xSqVT1fWlM2gpVHAoL7lAUKfPnKWfFMF4iZ+eiNEVD2fRt1BbWrvrVrbVl3Wta5Ro+0mLoNiALEQkRWatzrTAdMNTJX8lCOfjZsZkXj82erZqBofEv/M5dd95247bJTY3LfYohAADoHcUQAACgb7Uc/etQD1dQDFdwwnI3688VrFHPzJeee+2dJ54/MjqoCibYu1Hs3SSHTFVXK0EQCGtV/aShSs9TTtKhaoiG6Ukp7r3c7GFjVEujYUMiTN6t/VLDLGFS8ty1ljKyVsdDhToug7p+rtBY634u85XglRNzL56cnStFdmD4Izfv/fIXP93cCtNbatiVnC+G+V3JFEMAAJAtISiGAAAAfa/dvOGyzyvsqRi23Ard7gba3d6y99/Z60dOf+ORV40xwwUhdeXGCbljWF8/4ZUrFd/3dfx8w/RElNpjDZNrJWUhDmpe/smGbudyftLQLaPzTTDZwlyX23LPK5S5GcPafuT42sQThVqItA+auA9Gyac6nis0yfe/UKo+f2J2/8n5mVJQGBhVQxv+6+/va9iGnN1h/bEnDcUwzYUUQwAA0GIhQTEEAABYDzo/dnDZgcH8qzGm3W/vnAibJxBb3lLnN7v5moeeOLTv4LS2elAZFVVv3Ox9fGrARlURVsuVShiGbuRQJZOGriHKuBiq9CPJiLUnHiaThvmm1hDX0jhX+zyX4VwlNELYeJzQxpuOtbVGCBP/SLNKGA8YRvFjCt13p7VZ8MOD04sHTp1791zFqtHtO7b/3mdvvemGq6e2jLcLfO2KYXryScM7nHwCAACyhQTFEAAAYP1o95DBdhcXpB62+yM6zx6u/gSVh5849OIb09VAD3l2UEUfKESbvGjPZrV10JYrFb9aPX/z1mbFMNm2nIXC/Kfxh0y2KoukIdZ2MbcaKrTuI/4C434m8UShjeOgjTcgGxcN43fqQqG1Usr3Fv3jZ0vPHZs9XQy08NTg2M17rv+TP7ir5R7kfOlLr9PhweZdyQ1vMmAIAACytQTFEAAAYL3psC+48/jhsiWx82jhCv7cbm6+8y/9ZN+JpXL1B08fGR/2pNCbBqJRE3xy++C4inRYVVaHQRDFG3/rimEcBGWraFhbRieBLb+bNz9/aJOvdEnVuGnBNBfGA4Y2PuHEFUN39rHbBL1Qjc4s+W+eWjp4asmPTMV4amDs5j07v3zvnVu3TKQ3kK3pmzKfC4UNF+66Ya4wj2IIAABqqwuKIQAAwDrUTY9rd4xJ91Wx5dhg/s38iGK7G2j+dAUx8czZ8lMvn3j+9ekg0APKRGF1oxft2iC3DNqpITE5LKpBUA3DSGujdW0PcZIFVfJaN4yX1DWZvCbra2mTkcNaK4zvzO1HdpuR01f340hnFRd8Pb1QPjJfPjRTnClF0vOENyS84c/f8/G7br1x65bxbBHf1PWatxXXHsJYP2PYcARKQy6kGAIAgNqSgGIIAACwnnUzsrfswSldjiJ2eL/LY1J6+rSl7/zvoXOlyr43T40OeZ7QgyoattG41Lsm5OSQ2FCwSuiC1WEURXHNNDEX9VRuEE/mDj/J97b01GQRJ8L8fmSRpMOClCp+jqC2ohLq+Ur47kL54OlSsWoq2sz5RnhDUg3dceueOz/xoanJjVuvGGtcxLcaMGwuhvnJwXwxbNiPzJknAACgcbFBMQQAAEBq2YOMexpC7P45id1cL3sny34vqZn50pP7T55d8o+ePFss+QXPCmPCwN86ZK7ZoLaOqB0jMgjDjUPegDSR1uc/jNFaWylF/JDB7J5yrc0mjzUU6SHL7omBbrIvfjRhVQtjxZlyOF+JTheDV99bXAqsH1mrlJSDQg3e/ok9t9+ye2py41RuqDD5r8qWnzb0voZQmK+EHXYikwsBAEC2zKAYAgAAoEGX2357HRXsfqJw9cWwm3ecN47OvnV87qn9J4olf2jQUyIyxmwZEmEYjnrGRuFV4974gLhqfGB0QHrCamOsNUpJNzlorS2cvxbCGiulNtbFN33+M6mU0lbMlkI/iA7Pl8uBOePrSmQqoS1HNtLSCmWlNzW5ee/uK2+/5YapyQ0NobBdJVxxMWweP2z5BwEAgPWMYggAAIBOeq2Hze+sMg72et3lbuWWbz70+JsFTz350onCgFxYLHueVPFpxtZEOoqUjTYPe9boYU+OedYYY6yZGPSGB5SwxlgrrJivhH6o/dCcC7Tb+ztXCSNjjRXi/K8rK5QWylrvxt1X7t29/aZf2jE1uWHqik6VsPtQmF60K4YNv4tcCAAAWqIYAgAAYHmdF43d18NLcL3KkcO8N47OHjxyan7Rf+PITMGTcwtlqeLjjK2RIj7cOD7ORGhja88qtCaOg+dfjDBCaG21FcaIm67fZoScmpzYu2vbTbu3T10x1iHSdT9X2K79tXztUBj5PxwAANQtPyiGAAAA6MmyC8heg133E4KrmStcWTRs+JrTc8WZBV8JcXp2SUg5M19078+cLQkrtm4atcLG24qtsWJqcnzr5jEhRPMTCTt0um5yYZfFsJuJQnIhAABosSChGAIAAGA1VtDdej1i5WJ8fZd33v2X9bYKX92AYedRwW4ulr0NAACwnlEMAQAAcMGsrB52+KVeQ2FP/6lea+DqV87LFroun2DY+Ze6mSWkFQIAgE5rEoohAAAALrZVTvP19P4F+eJVfiNtF9+rK4ar/JRQCAAAul20UAwBAABwyVzAmb6eZhV7vYFLs0ju3O+67H3dZ0FyIQAA6HaVQjEEAADA+2tlK9JV7oBefRbs8rf31Om6f75hl/99KiEAAFgBiiEAAADWrosXEy/U77pg6/Iu0l6X+Y9KCAAAVrsyoRgCAADgMrL65etlsQDutfpRCQEAwIVcilAMAQAA0Jf6bKFLEwQAAJdMgR8BAAAA+lK7xNaXM4YAAAAXEMUQAAAA6wsxDgAAoDPFjwAAAAAAAABAimIIAAAAAAAAIEMxBAAAAAAAAJChGAIAAAAAAADIUAwBAAAAAAAAZCiGAAAAAAAAADIUQwAAAAAAAAAZiiEAAAAAAACADMUQAAAAAAAAQIZiCAAAAAAAACBDMQQAAAAAAACQoRgCAAAAAAAAyFAMAQAAAAAAAGQohgAAAAAAAAAyFEMAAAAAAAAAGYohAAAAAAAAgAzFEAAAAAAAAECGYggAAAAAAAAgQzEEAAAAAAAAkKEYAgAAAAAAAMhQDAEAAAAAAABkKIYAAAAAAAAAMhRDAAAAAAAAABmKIQAAAAAAAIAMxRAAAAAAAABAhmIIAAAAAAAAIEMxBAAAAAAAAJChGAIAAAAAAADIUAwBAAAAAAAAZCiGAAAAAAAAADIUQwAAAAAAAAAZiiEAAAAAAACADMUQAAAAAAAAQIZiCAAAAAAAACBDMQQAAAAAAACQoRgCAAAAAAAAyFAMAQAAAAAAAGQohgAAAAAAAAAyFEMAAAAAAAAAGYohAAAAAAAAgAzFEAAAAAAAAECGYggAAAAAAAAgQzEEAAAAAAAAkKEYAgAAAAAAAMhQDAEAAAAAAABkKIYAAAAAAAAAMv8fAAD//67G5Ntaz8kiAAAAAElFTkSuQmCC)", "_____no_output_____" ], [ "*#Building the Model*", "_____no_output_____" ] ], [ [ "clf_neural = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(25,),random_state=1)", "_____no_output_____" ], [ "clf_neural.fit(train, Y)", "_____no_output_____" ] ], [ [ "*#Predicting from the fitted model on test data*", "_____no_output_____" ] ], [ [ "print('Predicting...\\nIn Test Data')", "Predicting...\nIn Test Data\n" ], [ "predicted = clf_neural.predict(xtest)", "_____no_output_____" ] ], [ [ "*#printing confusion matrix*", "_____no_output_____" ] ], [ [ "confusion_matrix(predicted,ytest)", "_____no_output_____" ] ], [ [ "*#Printing roc auc score*", "_____no_output_____" ] ], [ [ "roc_auc_score(pred,ytest)", "_____no_output_____" ], [ "accuracy_score(pred, ytest)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a676b1af7d5b7bc4d3d075ac0d5923f5df4daf
10,956
ipynb
Jupyter Notebook
Thermodynamics_an Eng Approach_5Ed_Cengel_IDN/.ipynb_checkpoints/Chapter_1_1-checkpoint.ipynb
msuherma/termodinamika
e23001f6947a2599523ac7117b354a6c2f788bfe
[ "MIT" ]
null
null
null
Thermodynamics_an Eng Approach_5Ed_Cengel_IDN/.ipynb_checkpoints/Chapter_1_1-checkpoint.ipynb
msuherma/termodinamika
e23001f6947a2599523ac7117b354a6c2f788bfe
[ "MIT" ]
null
null
null
Thermodynamics_an Eng Approach_5Ed_Cengel_IDN/.ipynb_checkpoints/Chapter_1_1-checkpoint.ipynb
msuherma/termodinamika
e23001f6947a2599523ac7117b354a6c2f788bfe
[ "MIT" ]
null
null
null
10,956
10,956
0.67908
[ [ [ "## **Thermodynamics: an Engineering Approach, 7th Ed**\nCengel & Boles", "_____no_output_____" ], [ "# Chapter 1: Introduction and Basic Concepts", "_____no_output_____" ], [ "##Example 1-1, Page.8\n", "_____no_output_____" ] ], [ [ "#Diketahui:\nEl_USD = 0.09 # Harga Listrik adalah 0.09 $/kWh\nP_wt = 30 # Wind Turbine power rate, kW\nt_wt = 2200 # Durasi kerja Wind Turbine dalam satu tahun, hours\n\n#Dicari: pengiritan per tahun (?)\n#Jawab:\nE_tot = P_wt * t_wt\nprint ('Total energi adalah %f kWh' %round(E_tot,0))\n\n#Uang yang diirit:\nSaved_USD = E_tot * El_USD\nprint ('Total pengiritan adalah %f USD' %round(Saved_USD,0))", "Total energi adalah 66000.000000 kWh\nTotal pengiritan adalah 5940.000000 USD\n" ] ], [ [ "## Example 1-2, Page No.9\n", "_____no_output_____" ] ], [ [ "#Diketahui:\np=850;# densitas [kg/m^3]\nV=2; # volume tangki [m^3]\n\n#Dicari: Massa (?)\nm=p*V;# rumus massa\n\n#Hasil:\nprint ('Massa minyak di dalam tangki adalah %i kg' %round(m,0))\n", "Massa minyak di dalam tangki adalah 1700 kg\n" ] ], [ [ "## Example 1-3, Page No.10\n", "_____no_output_____" ] ], [ [ "\n#Diketahui\nm=1; # massa seberat 1 lbm \n#Konstanta yang dipakai\ng=32.174;# konstanta gravitasi, ft/s^2\n\n#Dicari: Berat (w)\nlbf = 32.174 # Konversi 1 lbf = 32.174 lbm.ft/s^2\nw=(m*g)*(1/lbf); # berat = massa * gravitasi\n # konversi lbf ke lbm diperlukan\n\n\n#Result\nprint ('Berat benda tersebut di bumi adalah %i lbf' %w)\n", "Berat benda tersebut di bumi adalah 1 lbf\n" ] ], [ [ "## Example 1-4, Page No.21", "_____no_output_____" ] ], [ [ "\n\n# Diketahui\nTc=10; #deltaT karena proses pemberian kalor, C\n\n# Calculations\nTk=Tc;\nTr=1.8*Tk;#Konversi dari R ke K\nTf=Tr;\n# Dihitung dengan rumus di atas\n\n#Hasil\nprint ('perubahan suhu tersebut adalah %i K' %Tk)\nprint ('perubahan suhu tersebut adalah %i R' %Tr)\nprint ('perubahan suhu tersebut adalah %i F' %Tf)\n", "perubahan suhu tersebut adalah 10 K\nperubahan suhu tersebut adalah 18 R\nperubahan suhu tersebut adalah 18 F\n" ] ], [ [ "## Example 1-5, Page No.23", "_____no_output_____" ] ], [ [ "#Diketahui\nPatm=14.5; #tekanan atmosfir, psi\nPvac=5.8; #pembacaan vacuum gage, psi\n\n#Proses perhitungan\nPabs=Patm-Pvac;#vacuum selalu dihitung sbg tekanan negatif\n\n#Hasil\nprint('P_absolut dari ruangan tersebut adalah %f psi'%round(Pabs,1))\n", "P_absolut dari ruangan tersebut adalah 8.700000 psi\n" ] ], [ [ "## Example 1-6, Page No.26", "_____no_output_____" ] ], [ [ "#Diketahui:\npw=1000; # Densitas air, kg/m^3;\ng=9.81; # Gravitasi, m/s^2;\nSG=0.85;# Specific Gravity/Dens. Relatif fluida di manometer\nmeter = 100 # 1 m = 100 cm, cm\nh=55/meter;# tinggi dalam satuan, cm\nPatm=96;# Tekanan Atmosfir, kPa\n\n# Jawab\n# Menghitung P menggunakan likuid pada ketinggian yang sama\np=SG*pw;\nPtank_abs=Patm+(p*g*h/1000); \n\n\n#Results\nprint ('absolute pressure in tank %f kPa' %round(Ptank_abs,1))\n", "absolute pressure in tank 100.600000 kPa\n" ] ], [ [ "## Example 1-7, Page No.28", "_____no_output_____" ] ], [ [ "#Constants used\ng=9.81;#acceleration due to gravity in m/s^2;\n\n#Given values\nh1=0.1;# distance b/w point 1 at air-water interface and point 2 at mercury-air interface in m\nh2=0.2;# distance b/w oil-water interface and mercury-oil interface in m\nh3=0.35;# distance b/w air-mercury interface and mercury-oil interface in m\npw=1000;# density of water in kg/m^3\npHg=13600;# density of mercury in kg/m^3\npoil=800;# density of oil in kg/m^3\nPatm=85.6;# atmospheric pressure in kPa\n\n#Calculation\nP1=Patm-(pw*g*h1+poil*g*h2-pHg*g*h3)/1000;#calculating pressure using liquid at same height have same pressure\n\n#Results\nprint ('the air pressure in tank %i kPa' %round(P1))\n", "the air pressure in tank 130 kPa\n" ] ], [ [ "## Example 1-8, Page No.31", "_____no_output_____" ] ], [ [ "#Constants used\ng=9.81;# acceleration due to gravity in m/s^2;\n\n#Given values\npHg=13570;# density of mercury at 10 C in kg/m^3\nh=0.74;# converting barometric reading into m from mm\n\n#Calculationa\nPatm=pHg*g*h/1000;# standard pressure formula\n\n#Results\nprint ('the atmospheric pressure %f kPa' %round(Patm,1))\n", "the atmospheric pressure 98.500000 kPa\n" ] ], [ [ "## Example 1-9, Page No.31", "_____no_output_____" ] ], [ [ "#constants used\ng=9.81;#acceleration due to gravity in m/s^2;\n\n#given values\nm=60;# mass of piston in kg\nPatm=0.97;# atmospheric pressure in kPa\nA=0.04;# cross-sectional area in m^2\n\n#calculation\nP=Patm+(m*g/A)/100000;# standard pressure formula\nprint ('The pressure inside the cylinder %f bar' %round(P,2))\n#The volume change will have no effect on the free-body diagram drawn in part (a), and therefore the pressure inside the cylinder will remain the same\nprint('If some heat is transferred to the gas and its volume is doubled, there is no change in pressure');\n", "The pressure inside the cylinder 1.120000 bar\nIf some heat is transferred to the gas and its volume is doubled, there is no change in pressure\n" ] ], [ [ "## Example 1-10, Page No.32", "_____no_output_____" ] ], [ [ "import math\nfrom scipy.integrate import quad \nfrom pylab import *\n\n#Constants used\ng=9.81;#acceleration due to gravity in m/s^2;\n\n#Given values\np=1040;# density on the water surface in kg/m^3\nh1=0.8;# thickness of surface zone\nH=4;# thickness of gradient zone\nx0=0.0;# lower limit of integration\nx1=4.0;# upper limit of integration\n\n\n#Calculations\nP1=p*g*h1/1000;#standard pressure determination formula\n#P2 = integration of the exp. p*g*(math.sqrt(1+(math.tan(math.pi*z/4/H)^2))) b/w 0-4\ndef intgrnd1(z): \n return (p*g*(math.sqrt(1+(math.tan(math.pi*(z)/4/H)**2))) )#integrant\nP2, err = quad(intgrnd1, x0, x1) \nP2=P2/1000;#converting into kPa\nP=P1+P2;\n\n#Results\nprint ('the gage pressure at the bottom of gradient zone %f kPa' %round(P,0))\n", "the gage pressure at the bottom of gradient zone 54.000000 kPa\n" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
d0a678bfb331a5037b1d83afdd77ef5e846afcaa
7,675
ipynb
Jupyter Notebook
hpc_lecture_notes/assignment_1.ipynb
skailasa/hpc_lecture_notes
bcabc86d97b7069df98e1efcc90f5408a7e5d4f4
[ "BSD-3-Clause" ]
null
null
null
hpc_lecture_notes/assignment_1.ipynb
skailasa/hpc_lecture_notes
bcabc86d97b7069df98e1efcc90f5408a7e5d4f4
[ "BSD-3-Clause" ]
null
null
null
hpc_lecture_notes/assignment_1.ipynb
skailasa/hpc_lecture_notes
bcabc86d97b7069df98e1efcc90f5408a7e5d4f4
[ "BSD-3-Clause" ]
null
null
null
48.88535
691
0.667883
[ [ [ "# Assignment 1", "_____no_output_____" ], [ "We are given a 2-dimensional grid with points $(i, j)$, $i, j = 0, \\dots, N+1$. In this assignment we want to simulate a discrete diffusion process on the grid. We are starting with a distribution $u_0(i, j)$ of function values on the grid points. The distribution process follows the following recurrence relation:\n\n$$\nu_{n+1}(i, j) = \\frac{1}{4}\\left[u_n(i+1, j) + u_n(i-1, j) + u_n(i, j+1) + u_n(i, j-1)\\right],~i,j=1,\\dots, N\n$$\n\nIn other words, we are simply taking the average of the neighbouring grid points. We still need to fix the boundary values. Here, we just use the condition that the boundary values should remain constant, that is\n\n$$\nu_n(0, j) = u_0(0, j),\\quad u_n(i, 0) = u_0(i, 0), \\quad u_n(N + 1, j) = u_0(N + 1, j), \\quad u_n(i, N + 1) = u_0(i, N + 1)\n$$", "_____no_output_____" ], [ "## Part 1 (basic Python)", "_____no_output_____" ], [ "We define the following skeleton of a Python function:", "_____no_output_____" ] ], [ [ "def diffusion_iteration(un):\n \"\"\"\n Perform one diffusion step for all given grid points.\n \n Parameters\n ----------\n un : numpy.ndarray\n Numpy array of type `float64` and dimension (N + 2, N + 2) that stores the\n function values at step n.\n \n This function returns a Numpy array of dimension (N + 2, N + 2) of type `float64`\n that contains the function values after performing one step of the above diffusion\n iteration.\n \"\"\"\n pass", "_____no_output_____" ] ], [ [ "Implement the function `diffusion_iteration` using pure Python without Numba acceleration. Benchmark the runtime of this function for growing dimensions N and plot the runtime against N. What overall complexity of the runtime with respect to the parameter N do you expect?", "_____no_output_____" ], [ "## Part 2 (Numba acceleration and parallelisation)", "_____no_output_____" ], [ "Now optimise the function `diffusion_iteration` using Numba. In the first step develop a serial Numba implementation that does not use parallelisation. Repeat the benchmarking from the first part and compare the Numba compiled function against the pure Python version. What speed-up do you achieve with Numba? Once you have done this parallelise the function using `numba.prange`. Explain your parallelisation strategy and benchmark the resulting function. The function should parallelise almost perfectly. The optimal speed-up is roughly given by the number of physical CPU cores that you have. What is the actual speed-up that you measure compared to the theoretical speed-up?", "_____no_output_____" ], [ "## Part 3 (Visualisation)", "_____no_output_____" ], [ "Assume we have some kind of material distribution $u_0$. Furthermore, we assume that all boundary values are $0$. We now want to visualize the diffusion process by generating a nice animation.\nWe assume the grid size parameter $N$ to be large enough such that by the discrete time $n$ when diffusion process arrives at the boundary, the function values $u_n$ are negligibly small.\nThink about a nice initial distribution $u_0$ of values. Create a nice animation of 5 to 10 seconds in length that plots the iterates $u_n$ one after another. In order to do this you can use the matplotlib function `imshow` to draw individual frames and the `FuncAnimation` class in Matplotlib to generate the animation. Some details about creating such a matplotlib animation is discussed in a [Stackoverflow thread](https://stackoverflow.com/questions/17212722/matplotlib-imshow-how-to-animate).\n\n\n", "_____no_output_____" ], [ "## Advanced Problem", "_____no_output_____" ], [ "We could make the diffusion process more complicated by defining an index set $S = \\{(i_0, j_0), (i_1, j_1), \\dots \\}$ of interior indices at which we are keeping the interior iteration values constant, that is we set $u_n(i, j) = u_0(i, j)$ for $(i, j)\\in S$. Implement a parallel Numba accelerated diffusion step that implements this additional condition and again create a nice visualisation. You need to change the interface of your `diffusion_interface` function to take an additional parameter `constant_indices` where you can pass the information about which indices should be kept constant. Explain the data structure you choose for this condition and how you implement it.", "_____no_output_____" ], [ "## Assessment of the coursework\n\n* The submission deadline for the coursework is **Monday 19 October, 10am**.\n* Up to 80% of the coursework can be achieved by a perfect solution for Parts 1 to 3. The Advanced Problem is worth an additional 20%.\n* The assignment does not require much code writing. But a strong emphasis is put on good explanations. Putting a few comment lines in your code is not sufficient as explanation. Use the Jupyter notebook capabilities and write good explanations about what you are doing and why you are doing it as markdown cells.\n* Your code must be executable without any errors from scratch in Jupyter by choosing \"Restart kernel and run all cells.\" If this produces any errors, any code and explanations after the error occurs will be ignored for the marking. It is not the task of the markers to debug your code.\n* The code should not run for much longer than 2 minutes on a typical laptop/desktop. This is a soft limit. If your notebook runs too long we reserve the right to reject it.\n* In addition to core Python packages you are allowed to use Numpy, Numba, and matplotlib. No other packages are allowed and any such request will be rejected.\n* Any matplotlib output must appear inside your notebook. For more information on how to do this see here: https://medium.com/@1522933668924/using-matplotlib-in-jupyter-notebooks-comparing-methods-and-some-tips-python-c38e85b40ba1. A tutorial on embedding animations is shown here:\nhttp://louistiao.me/posts/notebooks/embedding-matplotlib-animations-in-jupyter-as-interactive-javascript-widgets/\n* You must submit your solution as a single Jupyter Notebook file with ending `*.ipynb`. **Any other submission will lead to 0 marks automatically. Make sure you submit a correct Notebook file with the right ending.**.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
d0a68851506dde1ff8625cb6d33c1f4387cfbf77
45,622
ipynb
Jupyter Notebook
notebooks/04_Ejercicio_spectral_clust.ipynb
medinaalonso/NLP
c8a0a3ce372c4289b14d526eae0e55ca2549a892
[ "MIT" ]
3
2019-09-02T02:08:36.000Z
2020-08-30T06:53:34.000Z
notebooks/04_Ejercicio_spectral_clust.ipynb
medinaalonso/NLP
c8a0a3ce372c4289b14d526eae0e55ca2549a892
[ "MIT" ]
null
null
null
notebooks/04_Ejercicio_spectral_clust.ipynb
medinaalonso/NLP
c8a0a3ce372c4289b14d526eae0e55ca2549a892
[ "MIT" ]
1
2020-08-29T00:14:11.000Z
2020-08-29T00:14:11.000Z
129.607955
34,262
0.844066
[ [ [ "# Ejercicio: Spectral clustering para documentos\n\nEl clustering espectral es una técnica de agrupamiento basada en la topología de gráficas. Es especialmente útil cuando los datos no son convexos o cuando se trabaja, directamente, con estructuras de grafos.\n\n##Preparación d elos documentos\n\nTrabajaremos con documentos textuales. Estos se limpiarán y se convertirán en vectores. Posteriormente, podremos aplicar el método de spectral clustering.", "_____no_output_____" ] ], [ [ "#Se importan las librerías necesarias\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.decomposition import PCA\nfrom gensim.models.doc2vec import Doc2Vec, TaggedDocument", "_____no_output_____" ] ], [ [ "La librería de Natural Language Toolkit (nltk) proporciona algunos corpus con los que se puede trabajar. Por ejemplo, el cropus Gutenberg (https://web.eecs.umich.edu/~lahiri/gutenberg_dataset.html) del que usaremos algunos datos. Asimismo, obtendremos de esta librería herramientas de preprocesamiento: stemmer y lista de stopwords.", "_____no_output_____" ] ], [ [ "import nltk\n#Descarga del corpus\nnltk.download('gutenberg')\n#Descarga de la lista de stopwords\nnltk.download('stopwords')\n\nfrom nltk.corpus import gutenberg\nfrom nltk.corpus import stopwords\nfrom nltk.stem import PorterStemmer", "[nltk_data] Downloading package gutenberg to /root/nltk_data...\n[nltk_data] Unzipping corpora/gutenberg.zip.\n[nltk_data] Downloading package stopwords to /root/nltk_data...\n[nltk_data] Unzipping corpora/stopwords.zip.\n" ] ], [ [ "Definimos los nombres de los archivos (ids) y la lista de paro", "_____no_output_____" ] ], [ [ "#Obtiene ids de los archivos del corpus gutenberg\ndoc_labels = gutenberg.fileids()\n\n#Lista de stopwords para inglés\nlista_paro = stopwords.words('english')", "_____no_output_____" ] ], [ [ "Definiremos una función que se encargará de preprocesar los textos. Se eliminan símbolos, se quitan elementos de la lista de stopwords y se pasa todo a minúsculas.", "_____no_output_____" ] ], [ [ "def preprocess(document):\n #Lista que guarda archivos limpios\n text = []\n for word in document:\n #Minúsculas\n word = word.lower()\n #Elimina stopwords y símbolos\n if word not in lista_paro and word.isalpha() == True:\n #Se aplica stemming\n text.append(PorterStemmer().stem(word))\n return text", "_____no_output_____" ] ], [ [ "Por cada documento, obtenemos la lista de sus palabras (stems) aplicando un preprocesado. Cada documento, entonces, es de la forma $d_i = \\{w_1, w_2, ..., w_{N_i}\\}$, donde $w_k$ son los stems del documento.", "_____no_output_____" ] ], [ [ "docs = []\nfor doc in doc_labels:\n #Lista de palabras del documentos\n arx = gutenberg.words(doc)\n #Aplica la función de preprocesado\n arx_prep = preprocess(arx)\n docs.append(arx_prep)\n #Imprime el nombre del documento, su longitud original y su longitud con preproceso\n print(doc,len(arx), len(arx_prep))", "austen-emma.txt 192427 73149\nausten-persuasion.txt 98171 38337\nausten-sense.txt 141576 53986\nbible-kjv.txt 1010654 374945\nblake-poems.txt 8354 3805\nbryant-stories.txt 55563 21718\nburgess-busterbrown.txt 18963 7582\ncarroll-alice.txt 34110 12240\nchesterton-ball.txt 96996 39715\nchesterton-brown.txt 86063 35348\nchesterton-thursday.txt 69213 28328\nedgeworth-parents.txt 210663 78148\nmelville-moby_dick.txt 260819 110459\nmilton-paradise.txt 96825 45568\nshakespeare-caesar.txt 25833 11056\nshakespeare-hamlet.txt 37360 15898\nshakespeare-macbeth.txt 23140 10078\nwhitman-leaves.txt 154883 65080\n" ] ], [ [ "Posteriormente, convertiremos cada documento en un vector en $\\mathbb{R}^d$. Para esto, utilizaremos el algoritmo Doc2Vec.", "_____no_output_____" ] ], [ [ "#Dimensión de los vectores\ndim = 300\n#tamaño de la ventana de contexto\nwindows_siz = 15\n\n#Indexa los documentos con valores enteros\ndocuments = [TaggedDocument(doc_i, [i]) for i, doc_i in enumerate(docs)]\n#Aplica el modelo de Doc2Vec\nmodel = Doc2Vec(documents, vector_size=dim, window=windows_siz, min_count=1)\n\n#Matriz de datos\nX = np.zeros((len(doc_labels),dim))\nfor j in range(0,len(doc_labels)):\n #Crea la matriz con los vectores de Doc2Vec\n X[j] = model.docvecs[j]\n \nprint(X)", "[[-0.71790749 0.52590156 0.04665314 ... -1.11765587 0.27269235\n 0.17521498]\n [-0.99053538 0.81888688 0.08494744 ... -1.4580884 0.35552233\n 0.36190808]\n [-1.1343857 0.89900047 0.07679608 ... -1.67795575 0.4307873\n 0.40497407]\n ...\n [-1.57137489 1.01522303 0.13583013 ... -2.54420257 0.6712212\n 0.23293038]\n [-1.57902765 1.01099396 0.12562202 ... -2.54485178 0.67597187\n 0.24470329]\n [-0.93969488 0.65869391 0.03659648 ... -1.44744205 0.37233579\n 0.26923421]]\n" ] ], [ [ "###Visualización", "_____no_output_____" ] ], [ [ "#Función para plotear\ndef plot_words(Z,ids,color='blue'):\n #Reduce a dos dimensiones con PCA\n Z = PCA(n_components=2).fit_transform(Z)\n r=0\n #Plotea las dimensiones\n plt.scatter(Z[:,0],Z[:,1], marker='o', c=color)\n for label,x,y in zip(ids, Z[:,0], Z[:,1]):\n #Agrega las etiquetas\n plt.annotate(label, xy=(x,y), xytext=(-1,1), textcoords='offset points', ha='center', va='bottom')\n r+=1\n\nplot_words(X, doc_labels)\nplt.show()", "_____no_output_____" ] ], [ [ "##Aplicación de spectral clustering\n\nAhora se debe aplicar el algoritmo de spectral clustering a estos datos. Como hemos visto, se debe tomar en cuenta diferentes criterios:\n\n* La función graph kernel se va utilizar\n* El método de selección de vecinos (fully connected, k-nn)\n* El número de dimensiones que queremos obtener\n* El número de clusters en k-means\n\nPruebe con estos parámetros para obtener un buen agrupamiento de los documentos elegidos. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a69ede235308e4ca4aeab3074b9d6858b772fa
9,556
ipynb
Jupyter Notebook
assignment_2/Task_2.ipynb
antranx/ee604_assignments
bdd4313722157aad9548200e8872b51759e0e020
[ "MIT" ]
1
2020-11-22T15:09:01.000Z
2020-11-22T15:09:01.000Z
assignment_2/Task_2.ipynb
antranx/ee604_assignments
bdd4313722157aad9548200e8872b51759e0e020
[ "MIT" ]
null
null
null
assignment_2/Task_2.ipynb
antranx/ee604_assignments
bdd4313722157aad9548200e8872b51759e0e020
[ "MIT" ]
null
null
null
30.240506
161
0.486605
[ [ [ "# Copyright 2020 IITK EE604A Image Processing. All Rights Reserved.\n# \n# Licensed under the MIT License. Use and/or modification of this code outside of EE604 must reference:\n#\n# © IITK EE604A Image Processing \n# https://github.com/ee604/ee604_assignments\n#\n# Author: Shashi Kant Gupta, Chiranjeev Prachand and Prof K. S. Venkatesh, Department of Electrical Engineering, IIT Kanpur", "_____no_output_____" ] ], [ [ "# Task 2: Image Enhancement II: Spatial Smoothing\n\nIn this task, we will implement average, gaussian, and median spatial filter.", "_____no_output_____" ] ], [ [ "%%bash\npip install git+https://github.com/ee604/ee604_plugins", "_____no_output_____" ], [ "# Importing required libraries\n\nimport cv2\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom ee604_plugins import download_dataset, cv2_imshow\ndownload_dataset(assignment_no=2, task_no=2) # download data for this assignment", "_____no_output_____" ], [ "def avgFilter(img, kernel_size=7):\n '''\n Write a program to implement average filter. You have to assume square kernels.\n \n Inputs:\n + img - grayscaled image of size N x N\n - values between [0, 255] - 'uint8'\n + kernel_size - size of the kernel window which should be used for averaging.\n \n Ouputs:\n + out_img - smoothed grayscaled image of size N x N\n - values between [0, 255] - 'uint8'\n \n Allowed modules:\n + Basic numpy operations\n + cv2.filter2D() to perform 2D convolution\n \n Hint:\n + Not needed.\n \n '''\n \n #############################\n # Start your code from here #\n #############################\n \n # Replace with your code...\n \n #############################\n # End your code here ########\n ############################# \n \n return out_img", "_____no_output_____" ], [ "def gaussianFilter(img, kernel_size=7, sigma=3):\n '''\n Write a program to implement gaussian filter. You have to assume square kernels.\n \n Inputs:\n + img - grayscaled image of size N x N\n - values between [0, 255] - 'uint8'\n + kernel_size - size of the kernel window which should be used for smoothing. \n + sigma - sigma parameter for gaussian kernel\n \n Ouputs:\n + out_img - smoothed grayscaled image of size N x N\n - values between [0, 255] - 'uint8'\n \n Allowed modules:\n + Basic numpy operations\n + cv2.filter2D() to perform 2D convolution\n + cv2.getGaussianKernel(). Note that this will give you 1D gaussian.\n \n Hint:\n + Not needed.\n \n '''\n \n #############################\n # Start your code from here #\n #############################\n \n # Replace with your code...\n \n #############################\n # End your code here ########\n ############################# \n \n return out_img", "_____no_output_____" ], [ "def medianFilter(img, kernel_size=7):\n '''\n Write a program to implement median filter. You have to assume square kernels.\n \n Inputs:\n + img - grayscaled image of size N x N\n - values between [0, 255] - 'uint8'\n + kernel_size - size of the kernel window which should be used for smoothing.\n \n Ouputs:\n + out_img - smoothed grayscaled image of size N x N\n - values between [0, 255] - 'uint8'\n \n Allowed modules:\n + Basic numpy operations\n + np.median()\n \n Hint:\n + Not needed.\n \n '''\n \n #############################\n # Start your code from here #\n #############################\n \n # Replace with your code...\n \n #############################\n # End your code here ########\n #############################\n \n return out_img", "_____no_output_____" ] ], [ [ "### Test\n\n---\nYour observation should compare the different methods for different images. Must include a sentence on which method + kernel size worked best in each case.", "_____no_output_____" ] ], [ [ "# Do not change codes inside this cell\n# Add your observations in next to next cell\n# Your observation should compare the different methods for different images\n\nlena_orig = cv2.imread('data/lena_gray.jpg', 0)\nlena_noisy_1 = cv2.imread('data/lena_noisy_1.jpg', 0)\nlena_noisy_2 = cv2.imread('data/lena_noisy_2.jpg', 0)\nlena_noisy_3 = cv2.imread('data/lena_noisy_3.jpg', 0)\n\ndef plot_frame(gridx, gridy, subplot_id, img, name):\n plt.subplot(gridx, gridy, 1 + int(subplot_id))\n plt.imshow(np.uint8(img), cmap=\"gray\", vmin=0, vmax=255)\n plt.axis(\"off\")\n plt.title(name)", "_____no_output_____" ], [ "# Do not change codes inside this cell\n# Add your observations in next cell\n\nimg_arr = [lena_noisy_1, lena_noisy_2, lena_noisy_3]\nimg_caption = [\"Noisy 1\", \"Noisy 2\", \"Noisy 3\"]\n\nfor i in range(3):\n for kernel_size in [5, 7, 9]:\n print(\"\\n-------------------------------------\")\n print(\"# Lena\", img_caption[i], \"| kernel:\", kernel_size, \"x\", kernel_size)\n print(\"-------------------------------------\")\n\n plt.figure(figsize=(20, 13))\n plot_frame(1, 5, 0, lena_orig, \"Original\")\n plot_frame(1, 5, 1, img_arr[i], \"Noisy\")\n\n tmp_img = avgFilter(np.copy(img_arr[i]), kernel_size=kernel_size)\n plot_frame(1, 5, 2, tmp_img, \"Avg.\")\n\n tmp_img = gaussianFilter(np.copy(img_arr[i]), kernel_size=kernel_size, sigma=int(kernel_size/5))\n plot_frame(1, 5, 3, tmp_img, \"Gaussian.\")\n\n tmp_img = medianFilter(np.copy(img_arr[i]), kernel_size=kernel_size)\n plot_frame(1, 5, 4, tmp_img, \"Median.\")\n\n plt.show()", "_____no_output_____" ], [ "your_observation = \"\"\"\nReplace this with your observations.\n\"\"\"\n\nprint(your_observation)", "_____no_output_____" ], [ "# Submission >>>>>>>>>>>>>>>>>>>>>\n# Do not change codes inside this cell.\n\ngen_imgs = []\nimg_arr = [lena_noisy_1, lena_noisy_2, lena_noisy_3]\n\nfor i in range(3):\n for kernel_size in [5, 7, 9]:\n tmp_img = avgFilter(np.copy(img_arr[i]), kernel_size=kernel_size)\n gen_imgs.append(tmp_img)\n \n tmp_img = gaussianFilter(np.copy(img_arr[i]), kernel_size=kernel_size, sigma=int(kernel_size/5))\n gen_imgs.append(tmp_img)\n \n tmp_img = medianFilter(np.copy(img_arr[i]), kernel_size=kernel_size)\n gen_imgs.append(tmp_img)\n\ntask2_submission = np.array(gen_imgs)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
d0a6a828c5eceadff693c4586e97adbe13f25871
52,935
ipynb
Jupyter Notebook
jupyter_notebooks/Section2_MLPipelineOverview/02.10_ML_Pipeline-WrapUp_for_Deployment.ipynb
mtpadilla/deploying-machine-learning-models
83f75c4cce7ae1cfda9f1d1377635087a8567c7c
[ "BSD-3-Clause" ]
null
null
null
jupyter_notebooks/Section2_MLPipelineOverview/02.10_ML_Pipeline-WrapUp_for_Deployment.ipynb
mtpadilla/deploying-machine-learning-models
83f75c4cce7ae1cfda9f1d1377635087a8567c7c
[ "BSD-3-Clause" ]
null
null
null
jupyter_notebooks/Section2_MLPipelineOverview/02.10_ML_Pipeline-WrapUp_for_Deployment.ipynb
mtpadilla/deploying-machine-learning-models
83f75c4cce7ae1cfda9f1d1377635087a8567c7c
[ "BSD-3-Clause" ]
null
null
null
32.736549
380
0.434476
[ [ [ "## Machine Learning Model Building Pipeline: Wrapping up for Deployment\n\n\nIn the previous lectures, we worked through the typical Machine Learning pipeline to build a regression model that allows us to predict house prices. Briefly, we transformed variables in the dataset to make them suitable for use in a Regression model, then we selected the most predictive variables and finally we built our model.\n\nNow, we want to deploy our model. We want to create an API, that we can call with new data, with new characteristics about houses, to get an estimate of the SalePrice. In order to do so, we need to write code in a very specific way. We will show you how to write production code in the coming lectures.\n\nHere, we will summarise, the key pieces of code, that we need to take forward, for this particular project, to put our model in production.\n\nLet's go ahead and get started.", "_____no_output_____" ], [ "### Setting the seed\n\nIt is important to note, that we are engineering variables and pre-processing data with the idea of deploying the model if we find business value in it. Therefore, from now on, for each step that includes some element of randomness, it is extremely important that we **set the seed**. This way, we can obtain reproducibility between our research and our development code.\n\nThis is perhaps one of the most important lessons that you need to take away from this course: **Always set the seeds**.\n\nLet's go ahead and load the dataset.", "_____no_output_____" ] ], [ [ "# to handle datasets\nimport pandas as pd\nimport numpy as np\n\n# to divide train and test set\nfrom sklearn.model_selection import train_test_split\n\n# feature scaling\nfrom sklearn.preprocessing import MinMaxScaler\n\n# to build the models\nfrom sklearn.linear_model import Lasso\n\n# to evaluate the models\nfrom sklearn.metrics import mean_squared_error\nfrom math import sqrt\n\n# to persist the model and the scaler\nfrom sklearn.externals import joblib\n\nimport os\n\n# to visualise al the columns in the dataframe\npd.pandas.set_option('display.max_columns', None)", "_____no_output_____" ] ], [ [ "## Load data\n\nWe need the training data to train our model in the production environment. ", "_____no_output_____" ] ], [ [ "data_folder = '/Users/michaelpadilla/projects/udemy_deployment/data/house-prices-advanced-regression-techniques/'", "_____no_output_____" ], [ "# load dataset\ndata = pd.read_csv(os.path.join(data_folder, 'houseprice.csv'))\nprint(data.shape)\ndata.head()", "(1460, 81)\n" ] ], [ [ "## Separate dataset into train and test\n\nBefore beginning to engineer our features, it is important to separate our data intro training and testing set. This is to avoid over-fitting. There is an element of randomness in dividing the dataset, so remember to set the seed.", "_____no_output_____" ] ], [ [ "# Let's separate into train and test set\n# Remember to seet the seed (random_state for this sklearn function)\n\nX_train, X_test, y_train, y_test = train_test_split(data, data.SalePrice,\n test_size=0.1,\n random_state=0) # we are setting the seed here\nX_train.shape, X_test.shape", "_____no_output_____" ] ], [ [ "## Selected features\n\nRemember that we will deploy our model utilising only a subset of features, the most predictive ones. This is to make simpler models, so that we build simpler code for deployment. We will tell you more about this in coming lectures.", "_____no_output_____" ] ], [ [ "# load selected features\nfeatures = pd.read_csv(os.path.join(data_folder, 'selected_features.csv'), header=None)\n\n# Remember that I added the extra feature, to show you how to put\n# an additional feature engineering step into production\nfeatures = [x for x in features[0]] + ['LotFrontage']\nprint('Number of features: ', len(features))", "Number of features: 23\n" ], [ "features", "_____no_output_____" ] ], [ [ "### Missing values\n\nFor categorical variables, we will fill missing information by adding an additional category: \"missing\"", "_____no_output_____" ] ], [ [ "# make a list of the categorical variables that contain missing values\nvars_with_na = [var for var in features if X_train[var].isnull().sum()>1 and X_train[var].dtypes=='O']\n\n# print the variable name and the percentage of missing values\nfor var in vars_with_na:\n print(var, np.round(X_train[var].isnull().mean()*100, 2), ' % missing values')", "MasVnrType 0.46 % missing values\nBsmtQual 2.44 % missing values\nBsmtExposure 2.51 % missing values\nFireplaceQu 47.26 % missing values\nGarageType 5.63 % missing values\nGarageFinish 5.63 % missing values\n" ], [ "vars_with_na", "_____no_output_____" ] ], [ [ "Note that we have much less categorical variables with missing values than in our original dataset. But we still use categorical variables with NA for the final model, so we need to include this piece of feature engineering logic in the deployment pipeline. ", "_____no_output_____" ] ], [ [ "# I bring forward the functions used in the feature engineering notebook:\n\n# function to replace NA in categorical variables\ndef fill_categorical_na(df, var_list):\n X = df.copy()\n X[var_list] = df[var_list].fillna('Missing')\n return X\n\n# replace missing values with new label: \"Missing\"\nX_train = fill_categorical_na(X_train, vars_with_na)\nX_test = fill_categorical_na(X_test, vars_with_na)\n\n# check that we have no missing information in the engineered variables\nX_train[vars_with_na].isnull().sum()", "_____no_output_____" ] ], [ [ "For numerical variables, we are going to add an additional variable capturing the missing information, and then replace the missing information in the original variable by the mode, or most frequent value:", "_____no_output_____" ] ], [ [ "# make a list of the numerical variables that contain missing values\nvars_with_na = [var for var in features if X_train[var].isnull().sum()>1 and X_train[var].dtypes!='O']\n\n# print the variable name and the percentage of missing values\nfor var in vars_with_na:\n print(var, np.round(X_train[var].isnull().mean()*100, 2), ' % missing values')", "LotFrontage 17.73 % missing values\n" ] ], [ [ "#### Important: persisting the mean value for NA imputation\n\nAs you will see in future sections, one of the key pieces of deploying the model is \"Model Validation\". Model validation refers to corroborating that the deployed model and the model built during research, are identical. The entire pipeline needs to produce identical results.\n\nTherefore, in order to check at the end of the process that the feature engineering pipelines are identical, we will save -we will persist-, the mean value of the variable, so that we can use it at the end, to corroborate our models.", "_____no_output_____" ] ], [ [ "X_train['LotFrontage'].mode()[0]", "_____no_output_____" ], [ "# replace the missing values\n\nmean_var_dict = {}\n\nfor var in vars_with_na:\n \n # calculate the mode\n mode_val = X_train[var].mode()[0]\n \n # we persist the mean in the dictionary\n mean_var_dict[var] = mode_val\n \n # train\n # note that the additional binary variable was not selected, so we don't need this step any more\n #X_train[var+'_na'] = np.where(X_train[var].isnull(), 1, 0)\n X_train[var].fillna(mode_val, inplace=True)\n \n # test\n # note that the additional binary variable was not selected, so we don't need this step any more\n #X_test[var+'_na'] = np.where(X_test[var].isnull(), 1, 0)\n X_test[var].fillna(mode_val, inplace=True)\n\n# we save the dictionary for later\nnp.save(os.path.join(data_folder, 'mean_var_dict.npy'), mean_var_dict)\n\n# check that we have no more missing values in the engineered variables\nX_train[vars_with_na].isnull().sum()", "_____no_output_____" ], [ "X_test[vars_with_na].isnull().sum()", "_____no_output_____" ] ], [ [ "### Temporal variables\n\nOne of our temporal variables was selected to be used in the final model: 'YearRemodAdd'\n\nSo we need to deploy the bit of code that creates it.", "_____no_output_____" ] ], [ [ "# create the temporal var \"elapsed years\"\ndef elapsed_years(df, var):\n # capture difference between year variable and year the house was sold\n df[var] = df['YrSold'] - df[var]\n return df", "_____no_output_____" ], [ "X_train = elapsed_years(X_train, 'YearRemodAdd')\nX_test = elapsed_years(X_test, 'YearRemodAdd')", "_____no_output_____" ], [ "for var in ['LotFrontage', '1stFlrSF', 'GrLivArea', 'SalePrice']:\n X_train[var] = np.log(X_train[var])\n X_test[var]= np.log(X_test[var])", "_____no_output_____" ] ], [ [ "### Categorical variables\n\nWe do have categorical variables in our final model. First, we will remove those categories within variables that are present in less than 1% of the observations:", "_____no_output_____" ] ], [ [ "# let's capture the categorical variables first\ncat_vars = [var for var in features if X_train[var].dtype == 'O']\ncat_vars", "_____no_output_____" ] ], [ [ "#### Important: persisting the frequent labels\n\nAs you will see in future sections, one of the key pieces of deploying the model is \"Model Validation\". Model validation refers to corroborating that the deployed model and the model built during research, are identical. The entire pipeline needs to produce identical results.\n\nTherefore, in order to check at the end of the process, that the feature engineering pipelines are identical, we will save -we will persist-, the list of frequent labels per variable, so that we can use it at the end, to corroborate our models.", "_____no_output_____" ] ], [ [ "tmp = X_train.groupby(cat_vars[0])['SalePrice'].count() / len(X_train)", "_____no_output_____" ], [ "tmp > 0.01", "_____no_output_____" ], [ "def find_frequent_labels(df, var, rare_perc):\n # finds the labels that are shared by more than a certain % of the houses in the dataset\n df = df.copy()\n tmp = df.groupby(var)['SalePrice'].count() / len(df)\n return tmp[tmp>rare_perc].index\n\nfrequent_labels_dict = {}\n\nfor var in cat_vars:\n frequent_ls = find_frequent_labels(X_train, var, 0.01)\n \n # we save the list in a dictionary\n frequent_labels_dict[var] = frequent_ls\n \n X_train[var] = np.where(X_train[var].isin(frequent_ls), X_train[var], 'Rare')\n X_test[var] = np.where(X_test[var].isin(frequent_ls), X_test[var], 'Rare')\n \n# now we save the dictionary\nnp.save(os.path.join(data_folder, 'FrequentLabels.npy'), frequent_labels_dict)", "_____no_output_____" ], [ "frequent_labels_dict", "_____no_output_____" ] ], [ [ "Next, we need to transform the strings of these variables into numbers. We will do it so that we capture the monotonic relationship between the label and the target:", "_____no_output_____" ] ], [ [ "# this function will assign discrete values to the strings of the variables, \n# so that the smaller value corresponds to the smaller mean of target\n\ndef replace_categories(train, test, var, target):\n train = train.copy()\n test = test.copy()\n \n ordered_labels = train.groupby([var])[target].mean().sort_values().index\n ordinal_label = {k:i for i, k in enumerate(ordered_labels, 0)} \n \n train[var] = train[var].map(ordinal_label)\n test[var] = test[var].map(ordinal_label)\n \n return ordinal_label, train, test", "_____no_output_____" ], [ "X_train.groupby([cat_vars[0]])['SalePrice'].mean().sort_values().index", "_____no_output_____" ], [ "ordinal_label_dict = {}\nfor var in cat_vars:\n ordinal_label, X_train, X_test = replace_categories(X_train, X_test, var, 'SalePrice')\n ordinal_label_dict[var] = ordinal_label\n \n# now we save the dictionary\nnp.save(os.path.join(data_folder, 'OrdinalLabels.npy'), ordinal_label_dict)", "_____no_output_____" ], [ "ordinal_label_dict", "_____no_output_____" ], [ "# check absence of na\n[var for var in features if X_train[var].isnull().sum()>0]", "_____no_output_____" ], [ "# check absence of na\n[var for var in features if X_test[var].isnull().sum()>0]", "_____no_output_____" ] ], [ [ "### Feature Scaling\n\nFor use in linear models, features need to be either scaled or normalised. In the next section, I will scale features between the min and max values:", "_____no_output_____" ] ], [ [ "# capture the target\ny_train = X_train['SalePrice']\ny_test = X_test['SalePrice']", "_____no_output_____" ], [ "# fit scaler\nscaler = MinMaxScaler() # create an instance\nscaler.fit(X_train[features]) # fit the scaler to the train set for later use\n\n# we persist the model for future use\njoblib.dump(scaler, os.path.join(data_folder, 'scaler.pkl'))", "_____no_output_____" ], [ "# transform the train and test set, and add on the Id and SalePrice variables\nX_train = pd.DataFrame(scaler.transform(X_train[features]), columns=features)\nX_test = pd.DataFrame(scaler.transform(X_test[features]), columns=features)", "_____no_output_____" ], [ "# train the model\nlin_model = Lasso(alpha=0.005, random_state=0) # remember to set the random_state / seed\nlin_model.fit(X_train, y_train)\n\n# we persist the model for future use\njoblib.dump(lin_model, os.path.join(data_folder, 'lasso_regression.pkl'))", "_____no_output_____" ], [ "# evaluate the model:\n# remember that we log transformed the output (SalePrice) in our feature engineering notebook / lecture.\n\n# In order to get the true performance of the Lasso\n# we need to transform both the target and the predictions\n# back to the original house prices values.\n\n# We will evaluate performance using the mean squared error and the\n# root of the mean squared error\n\npred = lin_model.predict(X_train)\nprint('linear train mse: {}'.format(mean_squared_error(np.exp(y_train), np.exp(pred))))\nprint('linear train rmse: {}'.format(sqrt(mean_squared_error(np.exp(y_train), np.exp(pred)))))\nprint()\npred = lin_model.predict(X_test)\nprint('linear test mse: {}'.format(mean_squared_error(np.exp(y_test), np.exp(pred))))\nprint('linear test rmse: {}'.format(sqrt(mean_squared_error(np.exp(y_test), np.exp(pred)))))\nprint()\nprint('Average house price: ', np.exp(y_train).median())", "linear train mse: 1087435415.441452\nlinear train rmse: 32976.28565259362\n\nlinear test mse: 1405259552.2596023\nlinear test rmse: 37486.79170400692\n\nAverage house price: 163000.00000000012\n" ] ], [ [ "That is all for this notebook. And that is all for this section too.\n\n**In the next section, we will show you how to productionise this code for model deployment**.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ] ]
d0a6b4639c04eb57b8b4f4bb786183ffd24b24ae
22,861
ipynb
Jupyter Notebook
Notebooks/SuperComms.ipynb
Invalid-Entry/ProjectScribble
22fee8657a21ace69736861c884ab3e224650a32
[ "MIT" ]
3
2021-03-02T13:17:18.000Z
2021-07-25T21:57:33.000Z
Notebooks/SuperComms.ipynb
Invalid-Entry/ProjectScribble
22fee8657a21ace69736861c884ab3e224650a32
[ "MIT" ]
null
null
null
Notebooks/SuperComms.ipynb
Invalid-Entry/ProjectScribble
22fee8657a21ace69736861c884ab3e224650a32
[ "MIT" ]
null
null
null
26.800703
1,364
0.446218
[ [ [ "# super comms script\nimport serial\nfrom time import sleep\nimport math\n\nfrom tqdm import *\nimport json", "_____no_output_____" ], [ "def set_target(motor, location, ser, output=True):\n if ser.is_open:\n if motor =='A':\n ser.write(b'A')\n else:\n ser.write(b'B')\n \n target_bytes = location.to_bytes(4, byteorder='big')\n #print(target_bytes)\n ser.write(target_bytes)\n sleep(0.02)\n while(ser.in_waiting > 0):\n b = ser.read()\n if output:\n print(b.decode('ascii'), end='')\n else:\n raise Exception(\"Serial is not open!\")", "_____no_output_____" ], [ "def get_debug(ser):\n if ser.is_open:\n ser.write(b'D')\n sleep(0.02)\n while(ser.in_waiting > 0):\n b = ser.read()\n print(b.decode('ascii'), end='')\n \n print(\"---\")\n else:\n raise Exception(\"Serial is not open!\")", "_____no_output_____" ], [ "def gogogo(ser, wait=False, output=True):\n if ser.is_open:\n ser.write(b'G')\n sleep(0.02)\n \n if output:\n print(\"--- Making a move ---\")\n \n if wait:\n end_found = False\n while not end_found:\n sleep(0.002)\n\n while(ser.in_waiting > 0):\n b = ser.readline().decode('ascii')\n if output:\n print(b)\n if \"move-end\" in b:\n end_found = True\n \n else:\n while(ser.in_waiting > 0):\n b = ser.read()\n print(b.decode('ascii'), end='')\n \n \n else:\n raise Exception(\"Serial is not open!\")", "_____no_output_____" ], [ "def stop(ser):\n if ser.is_open:\n ser.write(b'S')\n sleep(0.1)\n while(ser.in_waiting > 0):\n b = ser.read()\n print(b.decode('ascii'), end='')\n \n print(\"---\")\n else:\n raise Exception(\"Serial is not open!\")", "_____no_output_____" ], [ "def penup(ser):\n if ser.is_open:\n ser.write(b'C')\n sleep(0.1)\n while(ser.in_waiting > 0):\n b = ser.read()\n #print(b.decode('ascii'), end='')\n \n #print(\"---\")\n else:\n raise Exception(\"Serial is not open!\")\n\ndef pendown(ser):\n if ser.is_open:\n ser.write(b'X')\n sleep(0.1)\n while(ser.in_waiting > 0):\n b = ser.read()\n #print(b.decode('ascii'), end='')\n \n #print(\"---\")\n else:\n raise Exception(\"Serial is not open!\")", "_____no_output_____" ], [ "def reset(ser, output=True):\n if ser.is_open:\n ser.write(b'R')\n sleep(0.5)\n while(ser.in_waiting > 0):\n b = ser.read()\n if output:\n print(b.decode('ascii'), end='')\n\n else:\n raise Exception(\"Serial is not open!\")", "_____no_output_____" ], [ "ser = serial.Serial('/dev/cu.usbserial-141240', baudrate=115200) # open serial port\nprint(ser.name) # check which port was really used", "/dev/cu.usbserial-141240\n" ], [ "get_debug(ser)", "!-- I received: D\r\n!--- Debug ---\r\n!--- A length = 0\r\n!--- B length = 0\r\n!--- A target length = 5000\r\n!--- B target length = 0\r\n---\n" ], [ "\n# Start with thing at home position!\n#reset(ser)\n\ntarget_coord = (300,200)\nreset_point = (800,800)\n\ntarget_lengths = translate_xy_to_ab(target_coord)\ntravel_lengths = (reset_point[0] - target_lengths[0], reset_point[1] - target_lengths[1])\n\na_step_mm = 10000/125\nb_step_mm = 10000/125\n\ntravel_steps = (int(travel_lengths[0] * a_step_mm), int(travel_lengths[1] * b_step_mm))\n\nset_target(\"A\", travel_steps[0], ser, output=True)\nset_target(\"B\", travel_steps[1], ser, output=True)\ngogogo(ser, wait=True)", "!-- I received: A\n!--- A target length = 35155\n!-- I received: B\n!--- B target length = 8062\n--- Making a move ---\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n" ], [ "set_target(\"A\", 0, ser, output=True)\nset_target(\"B\", 0, ser, output=True)\n\n#set_target(\"A\", 132, ser, output=True)\n#set_target(\"B\", 9121, ser, output=True)\ngogogo(ser, wait=True)\n\n", "!-- I received: A\n!--- A target length = 0\n!-- I received: B\n!--- B target length = 0\n--- Making a move ---\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n" ], [ "gogogo(ser, wait=True)", "--- Making a move ---\n!--- B target length = 5000\n\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n" ], [ "abpath = [\n (5000,5000),\n (10000,10000),\n (0, 10000),\n (1000, 0)\n]\ncounter = 0\nfor coord in abpath:\n counter += 1\n print(\"Step %s of %s (%s)\" % (counter, len(abpath), 100*counter/len(abpath)))\n set_target('A', coord[0], ser, output=False)\n set_target('B', coord[1], ser, output=False)\n gogogo(ser, wait=True, output=False)", "_____no_output_____" ], [ "ser.close()", "_____no_output_____" ], [ "reset(ser)", "!--- B target length = 17723\r\n!-- I received: G\r\n!--- Running ---\r\n!-- Reached move-end ---\r\n!-- I received: R\r\n" ], [ "def translate_xy_to_ab(coord):\n x = coord[0]\n y = coord[1]\n a_len = math.sqrt(x**2 + y**2)\n b_len = math.sqrt((MAX_WIDTH-x)**2 + y**2)\n \n return [a_len, b_len]\n\ndef translate_ab_to_xy(lengths):\n a = lengths[0]\n b = lengths[1]\n \n # Cosine rule!\n #cos(left) = (a**2 + MAX_WIDTH**2 - b**2) / (2 * a * MAX_WIDTH)\n \n try:\n left_angle = math.acos((a**2 + MAX_WIDTH**2 - b**2) / (2 * a * MAX_WIDTH))\n except Exception as e:\n # This specifically happens if the values just arn't a triangle!\n # i.e. consider maxwidth = 100, left length = 10, right = 10... one of\n # the wires must have broken!\n print(\"Not a triangle!\")\n print((a**2 + MAX_WIDTH**2 - b**2) / (2 * a * MAX_WIDTH))\n raise e\n \n #print(left_angle) # in radians, remember.\n \n # sin(left) = opp / hyp\n # cos(right) = adj / hyp\n # hyp is 'a'\n # Lack of precision here - chop to mm. Rounding 'down'\n y = int(math.sin(left_angle) * a) \n x = int(math.cos(left_angle) * a)\n \n return [x,y]", "_____no_output_____" ], [ "# Math time\nMAX_WIDTH = 970\n\na_scale = 10000/130\nb_scale = 10000/125\n\n# 0,0 is furthest, then up is less (?)\n\nreal_start_mm = (800,800)\n\norig_length = (real_start_mm[0] * a_scale, real_start_mm[1] * b_scale)\nprint(orig_length)\nxy_path = [\n (500, 390),\n #(500,500),\n #(600,400),\n 'HOME'\n]\n\nab_path = []\n\nfor coord in xy_path:\n if coord=='HOME':\n movement = (0,0)\n else:\n short_ab_mm = translate_xy_to_ab(coord)\n #print(short_ab_mm)\n short_ab_steps = (short_ab_mm[0] * a_scale, short_ab_mm[1] * b_scale)\n #print(short_ab_steps)\n movement = (int(orig_length[0] - short_ab_steps[0]),int( orig_length[1] - short_ab_steps[1]))\n print(\"Going -> %s\" % (movement,))\n ab_path.append(movement)", "(61538.46153846154, 64000.0)\nGoing -> (12760, 15141)\nGoing -> (0, 0)\n" ], [ "ser = serial.Serial('/dev/cu.usbserial-141210', baudrate=115200) # open serial port\nprint(ser.name) \n", "/dev/cu.usbserial-141210\n" ], [ "reset(ser)", "!-- I received: R\r\n" ], [ "get_debug(ser)", "!-- I received: D\r\n!--- Debug ---\r\n!--- A length = 0\r\n!--- B length = 0\r\n!--- A target length = 0\r\n!--- B target length = 0\r\n---\n" ], [ "counter = 0\nfor coord in ab_path:\n counter += 1\n print(\"Step %s of %s (%s)\" % (counter, len(ab_path), 100*counter/len(ab_path)))\n set_target('A', coord[0], ser, output=False)\n set_target('B', coord[1], ser, output=False)\n gogogo(ser, wait=True, output=False)", "Step 1 of 2 (50.0)\nStep 2 of 2 (100.0)\n" ], [ "ser.close()", "_____no_output_____" ], [ "with open(\"spiro.json\") as fp:\n paths = json.load(fp)\n\nMAX_WIDTH = 970\noffset_x = 300\noffset_y = 50\n\nscale_x = 1.5\nscale_y = 2\npath_counter = 0\n\na_scale = 10000/130\nb_scale = 10000/130\n\npaths.append(('HOME',))\n\n# 0,0 is furthest, then up is less (?)\n\nreal_start_mm = (800,800)\n\norig_length = (real_start_mm[0] * a_scale, real_start_mm[1] * b_scale)\nreset(ser)\npenup(ser)\n\nfor xy_path in tqdm(paths):\n if len(xy_path) == 0:\n continue\n \n print(\"path %s (%s)\" % (path_counter, 100*path_counter/len(paths)))\n path_counter += 1\n \n ab_path = []\n\n for coord in tqdm(xy_path):\n if coord=='HOME':\n movement = (0,0)\n else:\n coord = (offset_x + coord[0]*scale_x, offset_y + coord[1]*scale_y)\n short_ab_mm = translate_xy_to_ab(coord)\n #print(short_ab_mm)\n short_ab_steps = (short_ab_mm[0] * a_scale, short_ab_mm[1] * b_scale)\n #print(short_ab_steps)\n movement = (int(orig_length[0] - short_ab_steps[0]),int( orig_length[1] - short_ab_steps[1]))\n if movement[0] < 0 or movement[1] < 0:\n print(\"%s -> %s\" % (coord, movement))\n raise Exception(\"out of bounds\")\n \n #print(\"Going -> %s\" % (movement,))\n ab_path.append(movement)\n \n \n #input(\"> PENUP !\\r\\n\")\n penup(ser)\n set_target('A', ab_path[0][0], ser, output=False)\n set_target('B', ab_path[0][1], ser, output=False)\n gogogo(ser, wait=True, output=False)\n \n pendown(ser)\n counter = 0\n for coord in tqdm(ab_path[1:]):\n counter += 1\n #print(\"Step %s of %s (%s)\" % (counter, len(ab_path), 100*counter/len(ab_path)))\n set_target('A', coord[0], ser, output=False)\n set_target('B', coord[1], ser, output=False)\n gogogo(ser, wait=True, output=False)\n penup(ser)\n\n#print(len(ab_path))\n#print(int(offset_x + xy_path[0][0]*scale_x), int(offset_y + xy_path[0][1]*scale_y))", " 0%| | 0/2 [00:00<?, ?it/s]\n 0%| | 0/500 [00:00<?, ?it/s]\u001b[A\n 0%| | 0/2 [00:00<?, ?it/s]" ], [ "penup(ser)\nset_target('A',1000, ser, output=False)\nset_target('B',1000, ser, output=False)\ngogogo(ser)", "--- Making a move ---\n!--- B target length = 1000\r\n!-- I received: G\r\n!--- Running ---\r\n" ], [ "ser.close()", "_____no_output_____" ], [ "reset(ser)", "!-- I received: R\r\n" ], [ "def go_to_xy(target_coord,ser):\n target_lengths = translate_xy_to_ab(target_coord)\n travel_lengths = (reset_point[0] - target_lengths[0], reset_point[1] - target_lengths[1])\n\n a_step_mm = 10000/125\n b_step_mm = 10000/125\n\n travel_steps = (int(travel_lengths[0] * a_step_mm), int(travel_lengths[1] * b_step_mm))\n\n set_target(\"A\", travel_steps[0], ser, output=True)\n set_target(\"B\", travel_steps[1], ser, output=True)\n gogogo(ser, wait=True)", "_____no_output_____" ], [ "reset(ser)\npath = [\n (650, 400),\n (300, 400),\n (300, 150),\n (650, 150),\n (650, 400)\n]\n\nfor point in path:\n go_to_xy(point,ser)", "!-- I received: R\n!-- I received: A\n!--- A target length = 2942\n!-- I received: B\n!--- B target length = 23020\n--- Making a move ---\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n!-- I received: A\n!--- A target length = 24000\n!-- I received: B\n--- Making a move ---\n!--- B target length = 1574\n\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n!-- I received: A\n!--- A target length = 37167\n!-- I received: B\n!--- B target length = 9073\n--- Making a move ---\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n!-- I received: A\n!--- A target length = 10633\n!-- I received: B\n--- Making a move ---\n!--- B target length = 35727\n\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n!-- I received: A\n!--- A target length = 2942\n!-- I received: B\n!--- B target length = 23020\n--- Making a move ---\n!-- I received: G\n\n!--- Running ---\n\n!-- Reached move-end ---\n\n" ], [ "pendown(ser)\n", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a6cd14927c8e18cc1159743340137f5506bddd
19,939
ipynb
Jupyter Notebook
how-to-use-azureml/automated-machine-learning/classification-credit-card-fraud/auto-ml-classification-credit-card-fraud.ipynb
tomasro27/MachineLearningNotebooks
2544e85c5f77ce7f4c4bc843a39d5e978d3215b9
[ "MIT" ]
1
2021-03-13T08:29:49.000Z
2021-03-13T08:29:49.000Z
how-to-use-azureml/automated-machine-learning/classification-credit-card-fraud/auto-ml-classification-credit-card-fraud.ipynb
tomasro27/MachineLearningNotebooks
2544e85c5f77ce7f4c4bc843a39d5e978d3215b9
[ "MIT" ]
null
null
null
how-to-use-azureml/automated-machine-learning/classification-credit-card-fraud/auto-ml-classification-credit-card-fraud.ipynb
tomasro27/MachineLearningNotebooks
2544e85c5f77ce7f4c4bc843a39d5e978d3215b9
[ "MIT" ]
null
null
null
39.640159
365
0.587492
[ [ [ "Copyright (c) Microsoft Corporation. All rights reserved.\n\nLicensed under the MIT License.", "_____no_output_____" ], [ "![Impressions](https://PixelServer20190423114238.azurewebsites.net/api/impressions/MachineLearningNotebooks/how-to-use-azureml/automated-machine-learning/classification-credit-card-fraud/auto-ml-classification-credit-card-fraud.png)", "_____no_output_____" ], [ "# Automated Machine Learning\n_**Classification of credit card fraudulent transactions on remote compute **_\n\n## Contents\n1. [Introduction](#Introduction)\n1. [Setup](#Setup)\n1. [Train](#Train)\n1. [Results](#Results)\n1. [Test](#Test)\n1. [Acknowledgements](#Acknowledgements)", "_____no_output_____" ], [ "## Introduction\n\nIn this example we use the associated credit card dataset to showcase how you can use AutoML for a simple classification problem. The goal is to predict if a credit card transaction is considered a fraudulent charge.\n\nThis notebook is using remote compute to train the model.\n\nIf you are using an Azure Machine Learning Compute Instance, you are all set. Otherwise, go through the [configuration](../../../configuration.ipynb) notebook first if you haven't already to establish your connection to the AzureML Workspace. \n\nIn this notebook you will learn how to:\n1. Create an experiment using an existing workspace.\n2. Configure AutoML using `AutoMLConfig`.\n3. Train the model using remote compute.\n4. Explore the results.\n5. Test the fitted model.", "_____no_output_____" ], [ "## Setup\n\nAs part of the setup you have already created an Azure ML `Workspace` object. For Automated ML you will need to create an `Experiment` object, which is a named object in a `Workspace` used to run experiments.", "_____no_output_____" ] ], [ [ "import logging\n\nfrom matplotlib import pyplot as plt\nimport pandas as pd\nimport os\n\nimport azureml.core\nfrom azureml.core.experiment import Experiment\nfrom azureml.core.workspace import Workspace\nfrom azureml.core.dataset import Dataset\nfrom azureml.train.automl import AutoMLConfig", "_____no_output_____" ] ], [ [ "This sample notebook may use features that are not available in previous versions of the Azure ML SDK.", "_____no_output_____" ] ], [ [ "print(\"This notebook was created using version 1.22.0 of the Azure ML SDK\")\nprint(\"You are currently using version\", azureml.core.VERSION, \"of the Azure ML SDK\")", "_____no_output_____" ], [ "ws = Workspace.from_config()\n\n# choose a name for experiment\nexperiment_name = 'automl-classification-ccard-remote'\n\nexperiment=Experiment(ws, experiment_name)\n\noutput = {}\noutput['Subscription ID'] = ws.subscription_id\noutput['Workspace'] = ws.name\noutput['Resource Group'] = ws.resource_group\noutput['Location'] = ws.location\noutput['Experiment Name'] = experiment.name\npd.set_option('display.max_colwidth', -1)\noutputDf = pd.DataFrame(data = output, index = [''])\noutputDf.T", "_____no_output_____" ] ], [ [ "## Create or Attach existing AmlCompute\nA compute target is required to execute the Automated ML run. In this tutorial, you create AmlCompute as your training compute resource.\n#### Creation of AmlCompute takes approximately 5 minutes. \nIf the AmlCompute with that name is already in your workspace this code will skip the creation process.\nAs with other Azure services, there are limits on certain resources (e.g. AmlCompute) associated with the Azure Machine Learning service. Please read [this article](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-manage-quotas) on the default limits and how to request more quota.", "_____no_output_____" ] ], [ [ "from azureml.core.compute import ComputeTarget, AmlCompute\nfrom azureml.core.compute_target import ComputeTargetException\n\n# Choose a name for your CPU cluster\ncpu_cluster_name = \"cpu-cluster-1\"\n\n# Verify that cluster does not exist already\ntry:\n compute_target = ComputeTarget(workspace=ws, name=cpu_cluster_name)\n print('Found existing cluster, use it.')\nexcept ComputeTargetException:\n compute_config = AmlCompute.provisioning_configuration(vm_size='STANDARD_DS12_V2',\n max_nodes=6)\n compute_target = ComputeTarget.create(ws, cpu_cluster_name, compute_config)\n\ncompute_target.wait_for_completion(show_output=True)", "_____no_output_____" ] ], [ [ "# Data", "_____no_output_____" ], [ "### Load Data\n\nLoad the credit card dataset from a csv file containing both training features and labels. The features are inputs to the model, while the training labels represent the expected output of the model. Next, we'll split the data using random_split and extract the training data for the model.", "_____no_output_____" ] ], [ [ "data = \"https://automlsamplenotebookdata.blob.core.windows.net/automl-sample-notebook-data/creditcard.csv\"\ndataset = Dataset.Tabular.from_delimited_files(data)\ntraining_data, validation_data = dataset.random_split(percentage=0.8, seed=223)\nlabel_column_name = 'Class'", "_____no_output_____" ] ], [ [ "## Train\n\nInstantiate a AutoMLConfig object. This defines the settings and data used to run the experiment.\n\n|Property|Description|\n|-|-|\n|**task**|classification or regression|\n|**primary_metric**|This is the metric that you want to optimize. Classification supports the following primary metrics: <br><i>accuracy</i><br><i>AUC_weighted</i><br><i>average_precision_score_weighted</i><br><i>norm_macro_recall</i><br><i>precision_score_weighted</i>|\n|**enable_early_stopping**|Stop the run if the metric score is not showing improvement.|\n|**n_cross_validations**|Number of cross validation splits.|\n|**training_data**|Input dataset, containing both features and label column.|\n|**label_column_name**|The name of the label column.|\n\n**_You can find more information about primary metrics_** [here](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-configure-auto-train#primary-metric)", "_____no_output_____" ] ], [ [ "automl_settings = {\n \"n_cross_validations\": 3,\n \"primary_metric\": 'average_precision_score_weighted',\n \"enable_early_stopping\": True,\n \"max_concurrent_iterations\": 2, # This is a limit for testing purpose, please increase it as per cluster size\n \"experiment_timeout_hours\": 0.25, # This is a time limit for testing purposes, remove it for real use cases, this will drastically limit ablity to find the best model possible\n \"verbosity\": logging.INFO,\n}\n\nautoml_config = AutoMLConfig(task = 'classification',\n debug_log = 'automl_errors.log',\n compute_target = compute_target,\n training_data = training_data,\n label_column_name = label_column_name,\n **automl_settings\n )", "_____no_output_____" ] ], [ [ "Call the `submit` method on the experiment object and pass the run configuration. Depending on the data and the number of iterations this can run for a while. Validation errors and current status will be shown when setting `show_output=True` and the execution will be synchronous.", "_____no_output_____" ] ], [ [ "remote_run = experiment.submit(automl_config, show_output = False)", "_____no_output_____" ], [ "# If you need to retrieve a run that already started, use the following code\n#from azureml.train.automl.run import AutoMLRun\n#remote_run = AutoMLRun(experiment = experiment, run_id = '<replace with your run id>')", "_____no_output_____" ], [ "remote_run", "_____no_output_____" ] ], [ [ "## Results", "_____no_output_____" ], [ "#### Widget for Monitoring Runs\n\nThe widget will first report a \"loading\" status while running the first iteration. After completing the first iteration, an auto-updating graph and table will be shown. The widget will refresh once per minute, so you should see the graph update as child runs complete.\n\n**Note:** The widget displays a link at the bottom. Use this link to open a web interface to explore the individual run details", "_____no_output_____" ] ], [ [ "from azureml.widgets import RunDetails\nRunDetails(remote_run).show()", "_____no_output_____" ], [ "remote_run.wait_for_completion(show_output=False)", "_____no_output_____" ] ], [ [ "#### Explain model\n\nAutomated ML models can be explained and visualized using the SDK Explainability library. ", "_____no_output_____" ], [ "## Analyze results\n\n### Retrieve the Best Model\n\nBelow we select the best pipeline from our iterations. The `get_output` method returns the best run and the fitted model. Overloads on `get_output` allow you to retrieve the best run and fitted model for *any* logged metric or for a particular *iteration*.", "_____no_output_____" ] ], [ [ "best_run, fitted_model = remote_run.get_output()\nfitted_model", "_____no_output_____" ] ], [ [ "#### Print the properties of the model\nThe fitted_model is a python object and you can read the different properties of the object.\n", "_____no_output_____" ], [ "## Test the fitted model\n\nNow that the model is trained, split the data in the same way the data was split for training (The difference here is the data is being split locally) and then run the test data through the trained model to get the predicted values.", "_____no_output_____" ] ], [ [ "# convert the test data to dataframe\nX_test_df = validation_data.drop_columns(columns=[label_column_name]).to_pandas_dataframe()\ny_test_df = validation_data.keep_columns(columns=[label_column_name], validate=True).to_pandas_dataframe()", "_____no_output_____" ], [ "# call the predict functions on the model\ny_pred = fitted_model.predict(X_test_df)\ny_pred", "_____no_output_____" ] ], [ [ "### Calculate metrics for the prediction\n\nNow visualize the data on a scatter plot to show what our truth (actual) values are compared to the predicted values \nfrom the trained model that was returned.", "_____no_output_____" ] ], [ [ "from sklearn.metrics import confusion_matrix\nimport numpy as np\nimport itertools\n\ncf =confusion_matrix(y_test_df.values,y_pred)\nplt.imshow(cf,cmap=plt.cm.Blues,interpolation='nearest')\nplt.colorbar()\nplt.title('Confusion Matrix')\nplt.xlabel('Predicted')\nplt.ylabel('Actual')\nclass_labels = ['False','True']\ntick_marks = np.arange(len(class_labels))\nplt.xticks(tick_marks,class_labels)\nplt.yticks([-0.5,0,1,1.5],['','False','True',''])\n# plotting text value inside cells\nthresh = cf.max() / 2.\nfor i,j in itertools.product(range(cf.shape[0]),range(cf.shape[1])):\n plt.text(j,i,format(cf[i,j],'d'),horizontalalignment='center',color='white' if cf[i,j] >thresh else 'black')\nplt.show()", "_____no_output_____" ] ], [ [ "## Acknowledgements", "_____no_output_____" ], [ "This Credit Card fraud Detection dataset is made available under the Open Database License: http://opendatacommons.org/licenses/odbl/1.0/. Any rights in individual contents of the database are licensed under the Database Contents License: http://opendatacommons.org/licenses/dbcl/1.0/ and is available at: https://www.kaggle.com/mlg-ulb/creditcardfraud\n\nThe dataset has been collected and analysed during a research collaboration of Worldline and the Machine Learning Group (http://mlg.ulb.ac.be) of ULB (Université Libre de Bruxelles) on big data mining and fraud detection.\nMore details on current and past projects on related topics are available on https://www.researchgate.net/project/Fraud-detection-5 and the page of the DefeatFraud project\n\nPlease cite the following works:\n\nAndrea Dal Pozzolo, Olivier Caelen, Reid A. Johnson and Gianluca Bontempi. Calibrating Probability with Undersampling for Unbalanced Classification. In Symposium on Computational Intelligence and Data Mining (CIDM), IEEE, 2015\n\nDal Pozzolo, Andrea; Caelen, Olivier; Le Borgne, Yann-Ael; Waterschoot, Serge; Bontempi, Gianluca. Learned lessons in credit card fraud detection from a practitioner perspective, Expert systems with applications,41,10,4915-4928,2014, Pergamon\n\nDal Pozzolo, Andrea; Boracchi, Giacomo; Caelen, Olivier; Alippi, Cesare; Bontempi, Gianluca. Credit card fraud detection: a realistic modeling and a novel learning strategy, IEEE transactions on neural networks and learning systems,29,8,3784-3797,2018,IEEE\n\nDal Pozzolo, Andrea Adaptive Machine learning for credit card fraud detection ULB MLG PhD thesis (supervised by G. Bontempi)\n\nCarcillo, Fabrizio; Dal Pozzolo, Andrea; Le Borgne, Yann-Aël; Caelen, Olivier; Mazzer, Yannis; Bontempi, Gianluca. Scarff: a scalable framework for streaming credit card fraud detection with Spark, Information fusion,41, 182-194,2018,Elsevier\n\nCarcillo, Fabrizio; Le Borgne, Yann-Aël; Caelen, Olivier; Bontempi, Gianluca. Streaming active learning strategies for real-life credit card fraud detection: assessment and visualization, International Journal of Data Science and Analytics, 5,4,285-300,2018,Springer International Publishing\n\nBertrand Lebichot, Yann-Aël Le Borgne, Liyun He, Frederic Oblé, Gianluca Bontempi Deep-Learning Domain Adaptation Techniques for Credit Cards Fraud Detection, INNSBDDL 2019: Recent Advances in Big Data and Deep Learning, pp 78-88, 2019\n\nFabrizio Carcillo, Yann-Aël Le Borgne, Olivier Caelen, Frederic Oblé, Gianluca Bontempi Combining Unsupervised and Supervised Learning in Credit Card Fraud Detection Information Sciences, 2019", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
d0a6cdec7f993544334cdf087a7484679c27c0a0
5,812
ipynb
Jupyter Notebook
examples/2_WorkflowRegistryAuthorize.ipynb
ilveroluca/life_monitor
61752952cff6be8daea1d87b8f395ccb4dbe424c
[ "MIT" ]
null
null
null
examples/2_WorkflowRegistryAuthorize.ipynb
ilveroluca/life_monitor
61752952cff6be8daea1d87b8f395ccb4dbe424c
[ "MIT" ]
1
2021-04-16T09:08:26.000Z
2021-04-16T09:08:26.000Z
examples/2_WorkflowRegistryAuthorize.ipynb
ilveroluca/life_monitor
61752952cff6be8daea1d87b8f395ccb4dbe424c
[ "MIT" ]
null
null
null
27.032558
111
0.530282
[ [ [ "# Workflow registry authorization", "_____no_output_____" ], [ "\n", "_____no_output_____" ] ], [ [ "# set the lifemonitor root\nlifemonitor_root = \"/mnt/data/projects/@crs4/EOSC-Life/Repositories/life_monitor_rachk8s\"\n%cd {lifemonitor_root}", "/mnt/data/projects/@crs4/EOSC-Life/Repositories/life_monitor_rachk8s\n" ], [ "# import requirements \nimport requests\nimport pprint\npp = pprint.PrettyPrinter(indent=2).pprint", "_____no_output_____" ], [ "# list registries\n!docker-compose exec lm /bin/bash -c \"flask registry show seek\"", "INFO:lifemonitor.app:Logging is active. Log level: DEBUG\n\n\n****************************************************************************************************\nWorkflow Registry 'seek' (uuid: 1c40002b-8676-4f51-b529-9ff02ff78234, type: seek) registered!\n****************************************************************************************************\n\n\nOAuth2 settings to connect to LifeMonitor:\n----------------------------------------------------------------------------------------------------\nREGISTRY NAME: seek\nREGISTRY API URL: https://seek:3000\nREGISTRY CLIENT ID: bAVlsOwx2Z7sPgQJU7mdg1CE\nREGISTRY CLIENT SECRET: FQ8FE57DvYy9C5NKsAgXaBXAQ061gsNXz71X8I8radX6zHU2\nREGISTRY CLIENT ALLOWED SCOPES: read write\nREGISTRY CLIENT ALLOWED FLOWS: ['client_credentials', 'authorization_code', 'refresh_token']\nREGISTRY CLIENT REDIRECT URIs: ['https://seek:3000']\nREGISTRY CLIENT AUTH METHOD: client_secret_post\nAUTHORIZE URL: <LIFE_MONITOR_BASE_URL>/oauth2/authorize/seek\nACCESS TOKEN URL: <LIFE_MONITOR_BASE_URL>/oauth2/token\nCALLBACK URL: <LIFE_MONITOR_BASE_URL>/oauth2/authorized/seek[?next=<URL>]\n\n" ], [ "# Set Registry Credentials from LifeMonitor\nCLIENT_ID = \"bAVlsOwx2Z7sPgQJU7mdg1CE\"\nCLIENT_SECRET = \"FQ8FE57DvYy9C5NKsAgXaBXAQ061gsNXz71X8I8radX6zHU2\"", "_____no_output_____" ], [ "# HTTP settings to connect to LifeMonitor\n\ns = requests.session() # actually not mandatory, but just to share some settings among requests\n\n# if you use self-signed certificates,\n# you have to uncomment the line below to disable SSL verification\ns.verify = False\n\n# common header settings\ns.headers.update({})", "_____no_output_____" ], [ "# LifeMonitor URLs\nlm_base_url = \"https://lm:8443\"\nlm_token_url = f\"{lm_base_url}/oauth2/token\"", "_____no_output_____" ], [ "# Get an authorization token from LifeMonitor\ntoken_response = s.post(\n lm_token_url, \n data={\n \"client_id\": CLIENT_ID,\n \"client_secret\": CLIENT_SECRET,\n \"grant_type\": \"client_credentials\",\n \"scope\": \"read write\"\n }, allow_redirects=True, verify=False)\nassert token_response.status_code == 200, \"OAuth2 Error\"\ntoken = token_response.json()\npp(token)", "{ 'access_token': '1WHYfVkZBsiwf55DQ9XGKSiA1eUWxJ7HPtVlwAvm4d',\n 'expires_in': 864000,\n 'scope': 'read write',\n 'token_type': 'Bearer'}\n" ], [ "# Update headers with the OAuth2 token\ns.headers.update({'Authorization': f\"Bearer {token['access_token']}\"})", "_____no_output_____" ], [ "# Get registry info\nresponse = s.get(f\"{lm_base_url}/registries/current\")\nassert response.status_code == 200, f\"Unexpected error {response.status_code}: {response.content}\"\ncurrent_registry = response.json()\npp(current_registry)", "{ 'name': 'seek',\n 'type': 'seek',\n 'uri': 'https://seek:3000',\n 'uuid': '1c40002b-8676-4f51-b529-9ff02ff78234'}\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a6e87b6ccdf4d95d3aa284e4ec8c3a0edc9fee
8,198
ipynb
Jupyter Notebook
jnotebook/mdp_to_csv.ipynb
edervishaj/spotify-recsys-challenge
4077201ac7e4ed9da433bd10a92c183614182437
[ "Apache-2.0" ]
3
2018-10-12T20:19:57.000Z
2019-12-11T01:11:38.000Z
jnotebook/mdp_to_csv.ipynb
kiminh/spotify-recsys-challenge
5e7844a77ce3c26658400f161d2d74d682f30e69
[ "Apache-2.0" ]
null
null
null
jnotebook/mdp_to_csv.ipynb
kiminh/spotify-recsys-challenge
5e7844a77ce3c26658400f161d2d74d682f30e69
[ "Apache-2.0" ]
4
2018-10-27T20:30:18.000Z
2020-10-14T07:43:27.000Z
30.70412
118
0.499024
[ [ [ "import json\nimport os\nfrom pprint import *\nfrom tqdm import *\nfrom utils.definitions import ROOT_DIR\npath_load = \"mpd.v1/data/\" #json folder\npath_save = ROOT_DIR + \"/data/original/\" #where to save csv", "_____no_output_____" ], [ "playlist_fields = ['pid','name', 'collaborative', 'modified_at', 'num_albums', 'num_tracks', 'num_followers',\n'num_tracks', 'num_edits', 'duration_ms', 'num_artists','description']\n### care, the description field is optional\n\ntrack_fields = ['tid', 'arid' , 'alid', 'track_uri', 'track_name', 'duration_ms']\n\nalbum_fields = ['alid','album_uri','album_name']\n\nartist_fields = ['arid','artist_uri','artist_name']\n\ninteraction_fields = ['pid','tid','pos']\n\ninteractions = []\nplaylists = []\ntracks = []\nartists = []\nalbums = []\n\ncount_files = 0\ncount_playlists = 0\ncount_interactions = 0\ncount_tracks = 0\ncount_artists = 0\ncount_albums = 0\ndict_tracks = {}\ndict_artists = {}\ndict_albums = {}\n\n\ndef process_mpd(path):\n global count_playlists\n global count_files\n filenames = os.listdir(path)\n for filename in tqdm(sorted(filenames)):\n if filename.startswith(\"mpd.slice.\") and filename.endswith(\".json\"):\n fullpath = os.sep.join((path, filename))\n f = open(fullpath)\n js = f.read()\n f.close()\n mpd_slice = json.loads(js)\n process_info(mpd_slice['info'])\n for playlist in mpd_slice['playlists']:\n process_playlist(playlist)\n pid = playlist['pid']\n for track in playlist['tracks']:\n track['pid']=pid\n new = add_id_artist(track)\n if new: process_artist(track)\n new = add_id_album(track)\n if new: process_album(track)\n new = add_id_track(track)\n if new: process_track(track)\n process_interaction(track)\n count_playlists += 1\n count_files +=1\n\n show_summary()\n \ndef process_info(value):\n #print (json.dumps(value, indent=3, sort_keys=False))\n pass\n\ndef add_id_track(track):\n global count_tracks\n if track['track_uri'] not in dict_tracks:\n dict_tracks[track['track_uri']] = count_tracks\n track['tid'] = count_tracks\n count_tracks += 1\n return True\n else:\n track['tid'] = dict_tracks[track['track_uri']]\n return False\n\ndef add_id_artist(track):\n global count_artists\n if track['artist_uri'] not in dict_artists:\n dict_artists[track['artist_uri']] = count_artists\n track['arid'] = count_artists\n count_artists += 1\n return True\n else:\n track['arid'] = dict_artists[track['artist_uri']]\n return False\n\ndef add_id_album(track):\n global count_albums\n if track['album_uri'] not in dict_albums:\n dict_albums[track['album_uri']] = count_albums\n track['alid'] = count_albums\n count_albums += 1\n return True\n else:\n track['alid'] = dict_albums[track['album_uri']]\n return False\n\ndef process_track(track):\n global track_fields\n info = []\n for field in track_fields:\n info.append(track[field])\n tracks.append(info)\n\ndef process_album(track):\n global album_fields\n info = []\n for field in album_fields:\n info.append(track[field])\n albums.append(info)\n\ndef process_artist(track):\n global artist_fields\n info = []\n for field in artist_fields:\n info.append(track[field])\n artists.append(info)\n\ndef process_interaction(track):\n global interaction_fields\n global count_interactions\n info = []\n for field in interaction_fields:\n info.append(track[field])\n interactions.append(info)\n count_interactions +=1\n\ndef process_playlist(playlist):\n global playlist_fields\n if not 'description' in playlist:\n playlist['description'] = None\n info = []\n for field in playlist_fields:\n info.append(playlist[field])\n playlists.append(info)\n \n \n \ndef show_summary():\n print (count_files)\n print (count_playlists)\n print (count_tracks)\n print (count_artists)\n print (count_albums)\n print (count_interactions)", "_____no_output_____" ], [ "process_mpd(path_load)", "100%|██████████| 1000/1000 [11:35<00:00, 1.44it/s]" ], [ "import csv\n\nwith open(path_save+\"artists.csv\", \"w\") as f:\n writer = csv.writer(f,delimiter = \"\\t\",)\n writer.writerow(artist_fields)\n writer.writerows(artists)\nprint (\"artists.csv done\")\n\nwith open(path_save+\"albums.csv\", \"w\") as f:\n writer = csv.writer(f,delimiter = \"\\t\",)\n writer.writerow(album_fields)\n writer.writerows(albums)\nprint (\"albums.csv done\")\n \nwith open(path_save+\"interactions.csv\", \"w\") as f:\n writer = csv.writer(f,delimiter = \"\\t\",)\n writer.writerow(interaction_fields)\n writer.writerows(interactions)\nprint (\"interactions.csv done\")\n\nwith open(path_save+\"tracks.csv\", \"w\") as f:\n writer = csv.writer(f,delimiter = \"\\t\",)\n writer.writerow(track_fields)\n writer.writerows(tracks)\nprint (\"tracks.csv done\")\n\nwith open(path_save+\"playlists.csv\", \"w\") as f:\n writer = csv.writer(f,delimiter = \"\\t\",)\n writer.writerow(playlist_fields)\n writer.writerows(playlists)\nprint (\"playlists.csv done\")", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
d0a6f1422cab0f7febe14c850aac264fe8ca1403
594,768
ipynb
Jupyter Notebook
dmml2/Shailender_Ashish_DMML_Assn_2.ipynb
AshishSinha5/mlAlgos
003f691e92c7978626de0b2cb3533a164434159d
[ "Apache-2.0" ]
null
null
null
dmml2/Shailender_Ashish_DMML_Assn_2.ipynb
AshishSinha5/mlAlgos
003f691e92c7978626de0b2cb3533a164434159d
[ "Apache-2.0" ]
null
null
null
dmml2/Shailender_Ashish_DMML_Assn_2.ipynb
AshishSinha5/mlAlgos
003f691e92c7978626de0b2cb3533a164434159d
[ "Apache-2.0" ]
1
2020-01-25T17:41:15.000Z
2020-01-25T17:41:15.000Z
731.571956
107,126
0.943738
[ [ [ "Submitted by Shailender Joseph and Ashish Kumar Sinha", "_____no_output_____" ], [ "__Importing Libraries__", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.naive_bayes import GaussianNB\nfrom sklearn.svm import SVC\nfrom sklearn.metrics import accuracy_score, recall_score, roc_curve, auc\nfrom sklearn import model_selection\nfrom sklearn.base import TransformerMixin\nfrom sklearn.model_selection import KFold\nimport matplotlib.pylab as plt\nimport matplotlib.patches as patches\nfrom numpy import interp\nfrom joblib import dump\nimport os\nimport joblib\nimport timeit\nfrom datetime import datetime\n\ncwd = os.getcwd()\nprint(cwd)", "C:\\Users\\Bala\\Documents\\CMI\\Courses\\1-2\\DMML\\Assignments\\A2\n" ], [ "DT = 'DT'\nDT_long = 'Decision Tree'\nGNB = 'GNB'\nGNB_long = 'Naive Bayes'\nSVM = 'SVM'\nSVM_long = 'Support Vector Machine'", "_____no_output_____" ] ], [ [ "### Data Preprocessing", "_____no_output_____" ] ], [ [ "# Replacing the 'unknown' values wit the most frequent values\nclass DataFrameImputer(TransformerMixin):\n\n def __init__(self):\n \"\"\"Impute missing values.\n\n Columns of dtype object are imputed with the most frequent value \n in column.\n\n Columns of other types are imputed with mean of column.\n\n \"\"\"\n def fit(self, X, y=None):\n\n self.fill = pd.Series([X[c].value_counts().index[0]\n if X[c].dtype == np.dtype('O') else X[c].mean() for c in X],\n index=X.columns)\n\n return self\n\n def transform(self, X, y=None):\n return X.fillna(self.fill)\n\n# Function to separate the object and non-object data and Impute the 'unknown' values\ndef preprocess(df):\n df[['age', 'job', 'marital', 'education', 'default', 'housing', 'loan',\n 'contact', 'month', 'day_of_week', 'duration', 'campaign', 'pdays',\n 'previous', 'poutcome', 'emp.var.rate', 'cons.price.idx',\n 'cons.conf.idx', 'euribor3m', 'nr.employed', 'y']] = df[['age', 'job', 'marital', 'education', 'default', 'housing', 'loan',\n 'contact', 'month', 'day_of_week', 'duration', 'campaign', 'pdays',\n 'previous', 'poutcome', 'emp.var.rate', 'cons.price.idx',\n 'cons.conf.idx', 'euribor3m', 'nr.employed', 'y']].replace('unknown', np.NaN)\n bank_object_data = df.select_dtypes(include=\"object\")\n bank_non_object_data = df.select_dtypes(exclude=\"object\")\n bank_object_data = DataFrameImputer().fit_transform(bank_object_data)\n label = LabelEncoder()\n bank_object_data = bank_object_data.apply(label.fit_transform)\n bank_final = pd.concat([bank_object_data, bank_non_object_data], axis = 1)\n return(bank_final)", "_____no_output_____" ], [ "df = pd.read_csv('bank-data/bank-additional-full.csv', sep = ';')\nbank_final = preprocess(df)", "_____no_output_____" ] ], [ [ "## Function to output k-fold estimates of accuracy, recall, precision, f1, and roc_auc ", "_____no_output_____" ] ], [ [ "def kfold_output(model, X, Y): #function for kfold output\n start = timeit.default_timer()\n begin = datetime.now()\n scoring = ['accuracy', 'recall', 'precision', 'f1', 'roc_auc']\n\n kfold = KFold(n_splits=10, random_state=100, shuffle = True)\n results_kfold = model_selection.cross_validate(model, X, Y, scoring=scoring, cv=kfold)\n print(\"Recall: %0.2f (+/- %0.2f)\" % (results_kfold['test_recall'].mean(), results_kfold['test_recall'].std()))\n print(\"Precision: %0.2f (+/- %0.2f)\" % (results_kfold['test_precision'].mean(), results_kfold['test_precision'].std()))\n print(\"F1 Score: %0.2f (+/- %0.2f)\" % (results_kfold['test_f1'].mean(), results_kfold['test_f1'].std()))\n print(\"Accuracy: %0.2f (+/- %0.2f)\" % (results_kfold['test_accuracy'].mean(), results_kfold['test_accuracy'].std()))\n print(\"ROC_AUC: %0.2f (+/- %0.2f)\" % (results_kfold['test_roc_auc'].mean(), results_kfold['test_roc_auc'].std()))\n \n print(results_kfold)\n \n stop = timeit.default_timer()\n end = datetime.now()\n print('Start Time: ', begin,\n 'Stop Time: ', end,\n 'Time Taken: ', stop - start)", "_____no_output_____" ] ], [ [ "## Function to print roc curves for k-fold validation sets alongwith their AUC", "_____no_output_____" ] ], [ [ "def classifier_roc(classifier, X_train_res, y_train_res):\n start = timeit.default_timer()\n begin = datetime.now()\n cv = KFold(n_splits=10, random_state=100, shuffle = True)\n cv_split_filenames = []\n\n tprs = []\n aucs = []\n mean_fpr = np.linspace(0, 1, 100)\n plt.figure(figsize=(10,10))\n i = 1\n file_name = ''\n if type(classifier) == type(DecisionTreeClassifier()):\n file_name = DT\n plot_title = DT_long\n elif type(classifier) == type(GaussianNB()):\n file_name = GNB\n plot_title = GNB_long\n else:\n file_name = SVM\n plot_title = SVM_long\n \n if not os.path.exists(plot_title):\n os.mkdir(cwd+\"/\"+plot_title)\n \n for train, test in cv.split(X_train_res, y_train_res):\n probas_ = classifier.fit(X_train_res.iloc[train], y_train_res.iloc[train]).predict_proba(X_train_res.iloc[test])\n \n cv_split_filenames = cwd+\"/\"+plot_title + \"/\" + file_name + str(i)\n dump(probas_,cv_split_filenames)\n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = roc_curve(y_train_res[test], probas_[:, 1])\n tprs.append(interp(mean_fpr, fpr, tpr))\n tprs[-1][0] = 0.0\n roc_auc = auc(fpr, tpr)\n aucs.append(roc_auc)\n plt.plot(fpr, tpr, lw=1, alpha=0.3,\n label='ROC fold %d (AUC = %0.2f)' % (i, roc_auc))\n\n i += 1\n \n plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r', \n label='Chance', alpha=.8)\n\n mean_tpr = np.mean(tprs, axis=0)\n mean_tpr[-1] = 1.0\n mean_auc = auc(mean_fpr, mean_tpr)\n std_auc = np.std(aucs)\n plt.plot(mean_fpr, mean_tpr, color='b',\n label=r'Mean ROC (AUC = %0.2f $\\pm$ %0.2f)' % (mean_auc, std_auc),\n lw=2, alpha=.8)\n\n std_tpr = np.std(tprs, axis=0)\n tprs_upper = np.minimum(mean_tpr + std_tpr, 1)\n tprs_lower = np.maximum(mean_tpr - std_tpr, 0)\n plt.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2,\n label=r'$\\pm$ 1 std. dev.')\n\n plt.xlim([-0.01, 1.01])\n plt.ylim([-0.01, 1.01])\n plt.xlabel('False Positive Rate',fontsize=18)\n plt.ylabel('True Positive Rate',fontsize=18)\n plt.title('Cross-Validation ROC of ' + plot_title,fontsize=14)\n plt.legend(loc=\"lower right\", prop={'size': 10})\n plt.show()\n stop = timeit.default_timer()\n end = datetime.now()\n print('Start Time: ', begin,\n 'Stop Time: ', end,\n 'Time Taken: ', stop - start)", "_____no_output_____" ] ], [ [ "## Function to plot roc curves using saved parameters", "_____no_output_____" ] ], [ [ "def plot_saved(directory,file_name, X_train_res,y_train_res):\n start = timeit.default_timer()\n begin = datetime.now()\n cv = KFold(n_splits=10, random_state=100, shuffle = True)\n tprs = []\n aucs = []\n mean_fpr = np.linspace(0, 1, 100)\n plt.figure(figsize=(10,10))\n i = 1\n for train, test in cv.split(X_train_res, y_train_res):\n probas_ = joblib.load(cwd +\"/\" + directory + \"/\" + file_name + str(i),mmap_mode = 'c')\n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = roc_curve(y_train_res[test], probas_[:, 1])\n tprs.append(interp(mean_fpr, fpr, tpr))\n tprs[-1][0] = 0.0\n roc_auc = auc(fpr, tpr)\n aucs.append(roc_auc)\n plt.plot(fpr, tpr, lw=1, alpha=0.3,\n label='ROC fold %d (AUC = %0.2f)' % (i, roc_auc))\n i += 1\n plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',label='Chance', alpha=.8) \n mean_tpr = np.mean(tprs, axis=0)\n mean_tpr[-1] = 1.0\n mean_auc = auc(mean_fpr, mean_tpr)\n std_auc = np.std(aucs)\n plt.plot(mean_fpr, mean_tpr, color='b',\n label=r'Mean ROC (AUC = %0.2f $\\pm$ %0.2f)' % (mean_auc, std_auc),\n lw=2, alpha=.8)\n\n std_tpr = np.std(tprs, axis=0)\n tprs_upper = np.minimum(mean_tpr + std_tpr, 1)\n tprs_lower = np.maximum(mean_tpr - std_tpr, 0)\n plt.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2,label=r'$\\pm$ 1 std. dev.')\n plt.xlim([-0.01, 1.01])\n plt.ylim([-0.01, 1.01])\n plt.xlabel('False Positive Rate',fontsize=18)\n plt.ylabel('True Positive Rate',fontsize=18)\n plt.title('Cross-Validation ROC of '+ directory,fontsize=14)\n plt.legend(loc=\"lower right\", prop={'size': 10})\n plt.show()\n stop = timeit.default_timer()\n end = datetime.now()\n print('Start Time: ', begin,\n 'Stop Time: ', end,\n 'Time Taken: ', stop - start)", "_____no_output_____" ] ], [ [ "## Building dataset for training", "_____no_output_____" ] ], [ [ "X_train_res = bank_final.drop(['y'], axis = 1)\ny_train_res = bank_final['y']", "_____no_output_____" ] ], [ [ "## K-fold ouputs and ROC curve for Decision Tree classifier ", "_____no_output_____" ] ], [ [ "dt_classifier = DecisionTreeClassifier(min_samples_split=60, min_samples_leaf=60, class_weight = 'balanced', random_state=20)\nkfold_output(dt_classifier,X_train_res,y_train_res)", "Recall: 0.92 (+/- 0.01)\nPrecision: 0.42 (+/- 0.01)\nF1 Score: 0.57 (+/- 0.01)\nAccuracy: 0.85 (+/- 0.00)\nROC_AUC: 0.94 (+/- 0.00)\n{'fit_time': array([0.13122725, 0.14262033, 0.11668587, 0.12962604, 0.11729074,\n 0.12234402, 0.12221599, 0.13319755, 0.11166501, 0.12965488]), 'score_time': array([0.01193357, 0.01296616, 0.00997257, 0.01296425, 0.01197124,\n 0.01097226, 0.00993609, 0.00997043, 0.01200151, 0.01492405]), 'test_accuracy': array([0.84729303, 0.85311969, 0.84340859, 0.84972081, 0.83636805,\n 0.84680748, 0.84777859, 0.84098082, 0.84944148, 0.84725595]), 'test_recall': array([0.91397849, 0.89497717, 0.93528184, 0.90356394, 0.93075356,\n 0.91823899, 0.92144374, 0.93023256, 0.89498807, 0.92222222]), 'test_precision': array([0.41913215, 0.41219769, 0.42184557, 0.42928287, 0.4165907 ,\n 0.42524272, 0.42382812, 0.41431262, 0.39432177, 0.41129832]), 'test_f1': array([0.57471264, 0.56443485, 0.58144062, 0.58203916, 0.57556675,\n 0.58128733, 0.58060201, 0.5732899 , 0.54744526, 0.5688828 ]), 'test_roc_auc': array([0.93684282, 0.93304101, 0.9396826 , 0.93653503, 0.93707827,\n 0.93653158, 0.944074 , 0.93467862, 0.93624446, 0.94237156])}\nStart Time: 2020-04-26 15:41:10.738495 Stop Time: 2020-04-26 15:41:12.133646 Time Taken: 1.3957977999999969\n" ], [ "classifier_roc(dt_classifier, X_train_res, y_train_res)", "_____no_output_____" ] ], [ [ "## ROC curve for Decision Tree classifier using saved parameters", "_____no_output_____" ] ], [ [ "plot_saved(DT_long, DT, X_train_res,y_train_res)", "_____no_output_____" ] ], [ [ "## K-fold ouputs and ROC curve for Naive bayes classifier ", "_____no_output_____" ] ], [ [ "nb_classifier = GaussianNB(priors = [0.11, 0.89])\nkfold_output(nb_classifier,X_train_res,y_train_res)", "Recall: 0.83 (+/- 0.02)\nPrecision: 0.26 (+/- 0.01)\nF1 Score: 0.40 (+/- 0.02)\nAccuracy: 0.72 (+/- 0.01)\nROC_AUC: 0.86 (+/- 0.01)\n{'fit_time': array([0.03595591, 0.02892351, 0.03590584, 0.02889013, 0.02988458,\n 0.03091645, 0.02692842, 0.02892184, 0.02692819, 0.02692866]), 'score_time': array([0.01494098, 0.01296425, 0.01798654, 0.01196647, 0.01396418,\n 0.01196861, 0.01296496, 0.01196814, 0.01097107, 0.01499963]), 'test_accuracy': array([0.70648216, 0.72371935, 0.72177713, 0.72881767, 0.71279437,\n 0.7157077 , 0.7038116 , 0.72226269, 0.71345313, 0.73288004]), 'test_recall': array([0.8 , 0.81050228, 0.84968685, 0.83438155, 0.82688391,\n 0.83438155, 0.80254777, 0.8435518 , 0.82338902, 0.84222222]), 'test_precision': array([0.25 , 0.25177305, 0.27481431, 0.27715877, 0.26994681,\n 0.26711409, 0.25116279, 0.27161334, 0.23776706, 0.26917614]), 'test_f1': array([0.38095238, 0.38419913, 0.41530612, 0.41610037, 0.40701754,\n 0.40467717, 0.38259109, 0.41091658, 0.36898396, 0.40796555]), 'test_roc_auc': array([0.83657709, 0.84262702, 0.87481417, 0.85677376, 0.85380566,\n 0.86321791, 0.83729385, 0.86067358, 0.86018314, 0.86652399])}\nStart Time: 2020-04-26 18:06:17.196246 Stop Time: 2020-04-26 18:06:17.648038 Time Taken: 0.4516958000003797\n" ], [ "classifier_roc(nb_classifier, X_train_res, y_train_res)", "_____no_output_____" ] ], [ [ "## ROC curve for Naive Bayes classifier using saved parameters", "_____no_output_____" ] ], [ [ "plot_saved(GNB_long, GNB, X_train_res,y_train_res)", "_____no_output_____" ] ], [ [ "## K-fold ouputs and ROC curve for SVM with plynomial kernel", "_____no_output_____" ] ], [ [ "svm_classifier = SVC(kernel = 'poly', random_state = 0, class_weight = 'balanced')\nkfold_output(svm_classifier,X_train_res,y_train_res)", "Recall: 0.86 (+/- 0.02)\nPrecision: 0.40 (+/- 0.01)\nF1 Score: 0.54 (+/- 0.01)\nAccuracy: 0.84 (+/- 0.00)\nROC_AUC: 0.92 (+/- 0.00)\n{'fit_time': array([30.32745337, 30.33124638, 31.4992497 , 30.54082561, 30.9915328 ,\n 35.33771086, 34.59846258, 33.66181946, 31.86866832, 35.12172508]), 'score_time': array([2.820364 , 2.73974872, 2.90521097, 2.79356456, 3.08678079,\n 3.08637118, 3.34132648, 3.44902897, 2.48543358, 2.93081903]), 'test_accuracy': array([0.83466861, 0.8443797 , 0.84413693, 0.84170915, 0.83224084,\n 0.83563972, 0.83224084, 0.83661083, 0.83827101, 0.83802817]), 'test_recall': array([0.84946237, 0.85159817, 0.88517745, 0.83018868, 0.87169043,\n 0.83857442, 0.86836518, 0.86680761, 0.849642 , 0.87111111]), 'test_precision': array([0.39264414, 0.39304531, 0.41938675, 0.40951396, 0.40530303,\n 0.4 , 0.39402697, 0.40196078, 0.37122002, 0.39160839]), 'test_f1': array([0.53704963, 0.53785148, 0.56912752, 0.54847645, 0.55332902,\n 0.54163846, 0.54208085, 0.54922974, 0.51669086, 0.54031702]), 'test_roc_auc': array([0.91830959, 0.9222479 , 0.92921265, 0.91880253, 0.91980792,\n 0.92055762, 0.92006788, 0.92358042, 0.92027065, 0.92962741])}\nStart Time: 2020-04-26 15:41:35.994475 Stop Time: 2020-04-26 15:47:29.939690 Time Taken: 353.94505630000003\n" ], [ "roc_svm_classifier = SVC(kernel = 'poly', probability = True, random_state = 0, class_weight = 'balanced')\nclassifier_roc(roc_svm_classifier, X_train_res, y_train_res)", "_____no_output_____" ] ], [ [ "## ROC curve for SVM classifier using saved parameters", "_____no_output_____" ] ], [ [ "plot_saved(SVM_long, SVM, X_train_res,y_train_res)", "_____no_output_____" ] ], [ [ "__Among the models we observed that:__ \n__SVM with 'poly' kernal and 'balanced' class_weights gave:__ \n #Recall: 0.86 (+/- 0.02); Precision: 0.40 (+/- 0.01);\n #F1 Score: 0.54 (+/- 0.01); Accuracy: 0.84 (+/- 0.00); ROC_AUC: 0.92 (+/- 0.00)\n\n__Gaussian Naive Bayes with priors priors = [0.11, 0.89] gave:__\n #Recall: 0.83 (+/- 0.02); Precision: 0.26 (+/- 0.01)\n #F1 Score: 0.40 (+/- 0.02); Accuracy: 0.72 (+/- 0.01); ROC_AUC: 0.86 (+/- 0.01)\n\n__The decision tree classifier with min_samples_split=60, min_samples_leaf=60, \nclass_weight = 'balanced' gave:__\n #Recall: 0.92 (+/- 0.01); Precision: 0.42 (+/- 0.01); F1 Score: 0.57 (+/- 0.01)\n #Accuracy: 0.85 (+/- 0.00); ROC_AUC: 0.94 (+/- 0.00)\n\n__Among the models the decision tree gave the best results as it had te highest recall, F1-score and\nROC_AUC but SVM was a close second to decision tree.__", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
d0a6f84a58eb3ab98d1152bbaf836620ee1b9f4c
46,087
ipynb
Jupyter Notebook
columntransformer_example.ipynb
allisonhonold/column_transformer_ferry_wait_blog
0b8d327c6346a6c2aeee65426fcce227b0b9b4f3
[ "MIT" ]
3
2020-05-30T06:04:08.000Z
2021-01-13T04:32:47.000Z
columntransformer_example.ipynb
allisonhonold/column_transformer_ferry_wait_blog
0b8d327c6346a6c2aeee65426fcce227b0b9b4f3
[ "MIT" ]
null
null
null
columntransformer_example.ipynb
allisonhonold/column_transformer_ferry_wait_blog
0b8d327c6346a6c2aeee65426fcce227b0b9b4f3
[ "MIT" ]
4
2020-05-30T06:04:27.000Z
2020-08-17T07:29:18.000Z
34.113249
133
0.382624
[ [ [ "# This Jupyter Notebook contains the full code needed to write the ColumnTransformer blog", "_____no_output_____" ], [ "## Import Necessary Packages", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nfrom sklearn.compose import ColumnTransformer\nfrom sklearn.preprocessing import StandardScaler, OneHotEncoder\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.impute import SimpleImputer\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.pipeline import Pipeline\n\nfrom pytz import timezone", "_____no_output_____" ] ], [ [ "## Import Data and some pre-transformation data prep", "_____no_output_____" ] ], [ [ "# read the csvs with waits and weather\ndf = pd.read_csv('./data/dec2019.csv')\nweather_df = pd.read_csv('./data/dec2019weather.csv')", "_____no_output_____" ], [ "# rename the columns\ndf.columns = ['date_hour', 'wait_hrs']\n\n# cut the date_hours to the hour (no minutes/seconds) and convert to string for merging\ndf['date_hour'] = pd.to_datetime(df['date_hour'], utc=True).values.astype('datetime64[h]')\ndf['date_hour'] = df['date_hour'].astype('str')", "_____no_output_____" ], [ "# create dataframe of all possible departure hours in the month (as string for merging)\n# note that I chose to include non-ferry service hours at this stage\ndts = pd.DataFrame(columns=['date_hour'])\ndts['date_hour'] = pd.date_range(start='2019-12-01 00:00', \n end='2019-12-31 23:30', \n freq='H',\n ).astype('str')", "_____no_output_____" ], [ "# merge/join the waits to the dataframe of all departures\ndf_expanded = dts.merge(df, how='left', on='date_hour')\n\n# cast as datetime with timezone UTC\ndf_expanded['date_hour'] = pd.to_datetime(df_expanded['date_hour'], utc=True)\n\n# adjust time to PST\ndf_expanded['date_hour'] = [dt.astimezone(timezone('US/Pacific')) for dt in df_expanded['date_hour']]\n\n# remove non-sailing times (1 to 4 am for Edmonds (1-3 for Kingston))\ndf_expanded = df_expanded.set_index('date_hour')\ndf_expanded = df_expanded.between_time('5:00', '00:59')\n\n# reset index for modeling\ndf_expanded = df_expanded.reset_index()", "_____no_output_____" ], [ "weather_df.columns = ['date', 'max_temp', 'avg_temp', 'min_temp']", "_____no_output_____" ], [ "weather_df['date'] = pd.to_datetime(weather_df['date'])", "_____no_output_____" ], [ "df_expanded['date'] = pd.to_datetime(df_expanded['date_hour']).values.astype('datetime64[D]')\ndf_expanded = df_expanded.merge(weather_df, how='left', on='date')\ndf_expanded.head()", "_____no_output_____" ] ], [ [ "## Simple Column Transformer Example", "_____no_output_____" ] ], [ [ "# a little cheating to extract the day of the week \n# and hour of the day w/out using a transformer \n# (see below for the \"real\" version)\ndf_simple = df_expanded.copy()\ndf_simple['weekday'] = [dt.weekday() for dt in df_simple['date_hour']]\ndf_simple['hour'] = [dt.hour for dt in df_simple['date_hour']]", "_____no_output_____" ], [ "df_simple.head()", "_____no_output_____" ], [ "X = df_simple.drop(columns='wait_hrs')\ny = df_simple['wait_hrs'].fillna(value=0)\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=111)", "_____no_output_____" ], [ "# define column transformer and set n_jobs to have it run on all cores\ncol_transformer = ColumnTransformer(\n transformers=[\n ('ss', StandardScaler(), ['max_temp', 'avg_temp', 'min_temp']),\n ('ohe', OneHotEncoder(), ['weekday', 'hour'])],\n remainder='drop',\n n_jobs=-1\n )", "_____no_output_____" ], [ "X_train_transformed = col_transformer.fit_transform(X_train)", "_____no_output_____" ], [ "X_train_transformed", "_____no_output_____" ], [ "lr = LinearRegression()\n\npipe = Pipeline([\n (\"preprocessing\", col_transformer),\n (\"lr\", lr)\n ])", "_____no_output_____" ], [ "pipe.fit(X_train, y_train)", "_____no_output_____" ], [ "preds_train = pipe.predict(X_train)\npreds_test = pipe.predict(X_test)", "_____no_output_____" ], [ "preds_train[0:5]", "_____no_output_____" ], [ "preds_test[0:5]", "_____no_output_____" ], [ "col_transformer.get_feature_names", "_____no_output_____" ], [ "col_transformer.named_transformers_['ohe'].get_feature_names()", "_____no_output_____" ], [ "for transformer in col_transformer.named_transformers_.values():\n try:\n transformer.get_feature_names()\n except:\n print('SS col')\n else:\n print(transformer.get_feature_names())", "SS col\n['x0_0.0' 'x0_1.0' 'x0_2.0' 'x0_3.0' 'x0_4.0' 'x0_5.0' 'x0_6.0' 'x1_0.0'\n 'x1_5.0' 'x1_6.0' 'x1_7.0' 'x1_8.0' 'x1_9.0' 'x1_10.0' 'x1_11.0'\n 'x1_12.0' 'x1_13.0' 'x1_14.0' 'x1_15.0' 'x1_16.0' 'x1_17.0' 'x1_18.0'\n 'x1_19.0' 'x1_20.0' 'x1_21.0' 'x1_22.0' 'x1_23.0']\nSS col\n" ] ], [ [ "## More complex column transformer example: imputing THEN standard scale/ohe", "_____no_output_____" ] ], [ [ "# define transformers\nsi_0 = SimpleImputer(strategy='constant', fill_value=0)\nss = StandardScaler()\nohe = OneHotEncoder()\n\n# define column groups with same processing\ncat_vars = ['weekday', 'hour']\nnum_vars = ['max_temp', 'avg_temp', 'min_temp']\n\n# set up pipelines for each column group\ncategorical_pipe = Pipeline([\n ('si_0', si_0), \n ('ohe', ohe)\n ])\nnumeric_pipe = Pipeline([\n ('si_0', si_0), \n ('ss', ss)\n ])\n\n# set up columnTransformer\ncol_transformer = ColumnTransformer(\n transformers=[\n ('nums', numeric_pipe, num_vars),\n ('cats', categorical_pipe, cat_vars)\n ],\n remainder='drop',\n n_jobs=-1\n )", "_____no_output_____" ], [ "pipe = Pipeline([\n (\"preprocessing\", col_transformer),\n (\"lr\", lr)\n ])", "_____no_output_____" ], [ "pipe.fit(X_train, y_train)", "_____no_output_____" ], [ "preds_train = pipe.predict(X_train)\npreds_test = pipe.predict(X_test)", "_____no_output_____" ], [ "preds_train[0:10]", "_____no_output_____" ], [ "preds_test[0:10]", "_____no_output_____" ], [ "col_transformer.named_transformers_['cats'].named_steps['ohe'].get_feature_names()", "_____no_output_____" ] ], [ [ "## Create your own custom transformer", "_____no_output_____" ] ], [ [ "from sklearn.base import TransformerMixin, BaseEstimator\n\nclass DateTransformer(TransformerMixin, BaseEstimator):\n \"\"\"Extracts features from datetime column\n \n Returns:\n hour: hour\n day: Between 1 and the number of days in the given month of the given year.\n month: Between 1 and 12 inclusive.\n year: four-digit year\n weekday:day of the week as an integer, where Monday is 0 and Sunday is 6\n \"\"\"\n\n def fit(self, x, y=None):\n return self\n\n \n def transform(self, x, y=None):\n result = pd.DataFrame(x, columns=['date_hour'])\n result['hour'] = [dt.hour for dt in result['date_hour']]\n result['day'] = [dt.day for dt in result['date_hour']]\n result['month'] = [dt.month for dt in result['date_hour']]\n result['year'] = [dt.year for dt in result['date_hour']]\n result['weekday'] = [dt.weekday() for dt in result['date_hour']]\n return result[['hour', 'day', 'month', 'year', 'weekday']]\n \n \n def get_feature_names(self):\n return ['hour','day', 'month', 'year', 'weekday']", "_____no_output_____" ], [ "X = df_expanded.drop(columns='wait_hrs')\ny = df_simple['wait_hrs'].fillna(value=0)\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=111)", "_____no_output_____" ], [ "X.head()", "_____no_output_____" ], [ "time_preprocessing = Pipeline([\n ('date', DateTransformer()),\n ('ohe', OneHotEncoder(categories='auto'))\n ])\n\nct = ColumnTransformer(\n transformers=[\n ('ss', StandardScaler(), ['max_temp', 'avg_temp', 'min_temp']),\n ('date_exp', time_preprocessing, ['date_hour'])],\n remainder='drop',\n )\n\npipe = Pipeline([('preprocessor', ct),\n ('lr', lr)])", "_____no_output_____" ], [ "pipe.fit(X_train, y_train)", "_____no_output_____" ], [ "preds_train = pipe.predict(X_train)\npreds_test = pipe.predict(X_test)", "_____no_output_____" ], [ "lr.coef_", "_____no_output_____" ], [ "ct.named_transformers_['date_exp'].named_steps['ohe'].get_feature_names()", "_____no_output_____" ], [ "ct.named_transformers_['date_exp'].named_steps['date'].get_feature_names()", "_____no_output_____" ] ], [ [ "## Rare features with ColumnTransformer", "_____no_output_____" ] ], [ [ "df = pd.DataFrame()\ndf['cat1'] = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]\ndf['cat2'] = [0, 0, 0, 0, 0, 2, 2, 2, 2, 2]\ndf['num1'] = [np.nan, 1, 1.1, .9, .8, np.nan, 2, 2.2, 1.5, np.nan]\ndf['num2'] = [1.1, 1.1, 1.1, 1.1, 1.1, 1.2, 1.2, 1.2, 1.2, 1.2]\n\ntarget = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n\nX_train, X_test, y_train, y_test = train_test_split(df, target, random_state=111)", "_____no_output_____" ], [ "num_pipe = Pipeline([\n ('si', SimpleImputer(add_indicator=True)),\n ('ss', StandardScaler())\n ])\n\nct = ColumnTransformer(\n transformers=[('ohe', OneHotEncoder(categories=[[0,1], [0,2]]), ['cat1', 'cat2']),\n ('numeric', num_pipe, ['num1', 'num2'])])", "_____no_output_____" ], [ "pipe = Pipeline([\n ('preprocessor', ct),\n ('lr', lr)\n])", "_____no_output_____" ], [ "pipe.fit(X_train, y_train)", "_____no_output_____" ], [ "preds_train = pipe.predict(X_train)", "_____no_output_____" ], [ "preds_test = pipe.predict(X_test)", "_____no_output_____" ], [ "ct.fit_transform(X_train)", "_____no_output_____" ], [ "ct.fit_transform(X_test)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
d0a6fdbdbe5b24857b1f6ed2da85bbcfa3043bbb
12,395
ipynb
Jupyter Notebook
cases/.ipynb_checkpoints/beta_FigE_M_AL-checkpoint.ipynb
arkopaldutt/GML_Glauber_Dynamics
9c0afd353bdcb7b8bcc835daeb337d901686e774
[ "MIT" ]
null
null
null
cases/.ipynb_checkpoints/beta_FigE_M_AL-checkpoint.ipynb
arkopaldutt/GML_Glauber_Dynamics
9c0afd353bdcb7b8bcc835daeb337d901686e774
[ "MIT" ]
1
2021-01-13T06:21:38.000Z
2021-01-13T06:21:39.000Z
cases/.ipynb_checkpoints/beta_FigE_M_AL-checkpoint.ipynb
arkopaldutt/GML_Glauber_Dynamics
9c0afd353bdcb7b8bcc835daeb337d901686e774
[ "MIT" ]
null
null
null
37.560606
227
0.620734
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
d0a70e5bb8d066d72dd866a2acd1d3870df9e2b3
26,151
ipynb
Jupyter Notebook
cs231n/1.1 k-Nearest Neighbor.ipynb
shouya/thinking-dumps
a6fc111e02dc631f56302bb059d855446792bebc
[ "MIT" ]
24
2015-02-14T17:18:34.000Z
2022-01-09T01:02:15.000Z
cs231n/1.1 k-Nearest Neighbor.ipynb
shouya/thinking-dumps
a6fc111e02dc631f56302bb059d855446792bebc
[ "MIT" ]
1
2015-06-14T06:07:33.000Z
2015-08-04T22:05:11.000Z
cs231n/1.1 k-Nearest Neighbor.ipynb
shouya/thinking-dumps
a6fc111e02dc631f56302bb059d855446792bebc
[ "MIT" ]
2
2015-12-02T02:10:26.000Z
2017-06-03T06:32:26.000Z
103.363636
10,614
0.852969
[ [ [ "import numpy as np", "_____no_output_____" ], [ "class NearestNeighbor:\n def train(self, X, Y):\n \"\"\"X is NxD array, N: training examples, D: flattened img\"\"\"\n self.Ytrain = Y\n self.Xtrain = X\n\n def predict1(self, X, k=1):\n \"\"\"X is 1xD array, D: flattened img\"\"\"\n\n # L1 distance: d = sum |a - b|\n distances = np.sum(np.abs(self.Xtrain - X), axis=1)\n\n # L2 distance: d = sqrt(sum (a - b)^2)\n #distances = np.sqrt(np.sum((self.Xtrain - X) ** 2))\n\n min_indices = distances.argsort()\n Ypred = self.Ytrain[min_indices[:k]]\n _, counts = np.unique(Ypred, return_counts=True)\n return Ypred[counts.argmax()]", "_____no_output_____" ], [ "from common import *", "_____no_output_____" ], [ "cifar = cifar10()\ntrainX = cifar[b'data'][:8000, :]\ntrainY = cifar[b'labels'][:8000]\ntestX = cifar[b'data'][8000:, :]\ntestY = cifar[b'labels'][8000:]\nmeta = cifar10('meta')\nmeta", "_____no_output_____" ], [ "%matplotlib inline", "_____no_output_____" ], [ "i = 3\nimshow(trainX[i, :].reshape(32, 32, 3))\nmeta[b'label_names'][trainY[i]]", "_____no_output_____" ], [ "nn = NearestNeighbor()\nnn.train(trainX, trainY)", "_____no_output_____" ], [ "i = 1\npred = nn.predict1(testX[i], 5)\nimshow(testX[i, :].reshape(32, 32, 3))\n\"got %s, should be %s\" % (meta[b'label_names'][pred], meta[b'label_names'][testY[i]])", "_____no_output_____" ], [ "C = 50\ntestX_ = testX[:C, :]\ntestY_ = testY[:C]\nfor k in [1, 3, 9, 20, 50]:\n pred = [nn.predict1(testX_[i], k) for i in range(C)]\n wrong_cnt = np.count_nonzero(testY_ - pred)\n print(\"Precision for k=%d: %.2f\" % (k, 1 - (wrong_cnt / C)))", "Precision for k=1: 0.30\nPrecision for k=3: 0.30\nPrecision for k=9: 0.42\nPrecision for k=20: 0.28\nPrecision for k=50: 0.26\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]