repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
hanrui1sensetime/mmdeploy | mmdeploy/backend/tensorrt/init_plugins.py | f2594c624b67910e55e24418832bd96685425b2f | # Copyright (c) OpenMMLab. All rights reserved.
import ctypes
import glob
import logging
import os
def get_ops_path() -> str:
"""Get path of the TensorRT plugin library.
Returns:
str: A path of the TensorRT plugin library.
"""
wildcard = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'../../../build/lib/libmmdeploy_tensorrt_ops.so'))
paths = glob.glob(wildcard)
lib_path = paths[0] if len(paths) > 0 else ''
return lib_path
def load_tensorrt_plugin() -> bool:
"""Load TensorRT plugins library.
Returns:
bool: True if TensorRT plugin library is successfully loaded.
"""
lib_path = get_ops_path()
success = False
if os.path.exists(lib_path):
ctypes.CDLL(lib_path)
logging.info(f'Successfully loaded tensorrt plugins from {lib_path}')
success = True
else:
logging.warning(f'Could not load the library of tensorrt plugins. \
Because the file does not exist: {lib_path}')
return success
| [((19, 12, 19, 31), 'glob.glob', 'glob.glob', ({(19, 22, 19, 30): 'wildcard'}, {}), '(wildcard)', False, 'import glob\n'), ((32, 7, 32, 31), 'os.path.exists', 'os.path.exists', ({(32, 22, 32, 30): 'lib_path'}, {}), '(lib_path)', False, 'import os\n'), ((33, 8, 33, 29), 'ctypes.CDLL', 'ctypes.CDLL', ({(33, 20, 33, 28): 'lib_path'}, {}), '(lib_path)', False, 'import ctypes\n'), ((34, 8, 34, 77), 'logging.info', 'logging.info', ({(34, 21, 34, 76): 'f"""Successfully loaded tensorrt plugins from {lib_path}"""'}, {}), "(f'Successfully loaded tensorrt plugins from {lib_path}')", False, 'import logging\n'), ((37, 8, 38, 57), 'logging.warning', 'logging.warning', ({(37, 24, 38, 56): 'f"""Could not load the library of tensorrt plugins. Because the file does not exist: {lib_path}"""'}, {}), "(\n f'Could not load the library of tensorrt plugins. Because the file does not exist: {lib_path}'\n )", False, 'import logging\n'), ((16, 12, 16, 37), 'os.path.dirname', 'os.path.dirname', ({(16, 28, 16, 36): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
dmitryvinn/ReAgent | reagent/test/world_model/test_seq2reward.py | f98825b9d021ec353a1f9087840a05fea259bf42 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import logging
import os
import random
import unittest
from typing import Optional
import numpy as np
import pytorch_lightning as pl
import torch
import torch.nn as nn
from parameterized import parameterized
from reagent.core import types as rlt
from reagent.core.parameters import (
NormalizationData,
NormalizationParameters,
ProblemDomain,
Seq2RewardTrainerParameters,
)
from reagent.gym.envs import Gym
from reagent.gym.utils import create_df_from_replay_buffer
from reagent.models.seq2reward_model import Seq2RewardNetwork
from reagent.net_builder.value.fully_connected import FullyConnected
from reagent.prediction.predictor_wrapper import (
Seq2RewardWithPreprocessor,
Seq2RewardPlanShortSeqWithPreprocessor,
FAKE_STATE_ID_LIST_FEATURES,
FAKE_STATE_ID_SCORE_LIST_FEATURES,
)
from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS
from reagent.preprocessing.preprocessor import Preprocessor
from reagent.training.utils import gen_permutations
from reagent.training.world_model.compress_model_trainer import CompressModelTrainer
from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer
from torch.utils.data import DataLoader
logger = logging.getLogger(__name__)
SEED = 0
STRING_GAME_TESTS = [(False,), (True,)]
class FakeStepPredictionNetwork(nn.Module):
def __init__(self, look_ahead_steps):
super().__init__()
self.look_ahead_steps = look_ahead_steps
def forward(self, state: torch.Tensor):
"""
Given the current state, predict the probability of
experiencing next n steps (1 <=n <= look_ahead_steps)
For the test purpose, it outputs fixed fake numbers
"""
batch_size, _ = state.shape
return torch.ones(batch_size, self.look_ahead_steps).float()
class FakeSeq2RewardNetwork(nn.Module):
def forward(
self,
state: rlt.FeatureData,
action: rlt.FeatureData,
valid_reward_len: Optional[torch.Tensor] = None,
):
"""
Mimic I/O of Seq2RewardNetwork but return fake reward
Reward is the concatenation of action indices, independent
of state.
For example, when seq_len = 3, batch_size = 1, action_num = 2,
acc_reward = tensor(
[[ 0.],
[ 1.],
[ 10.],
[ 11.],
[100.],
[101.],
[110.],
[111.]]
)
Input action shape: seq_len, batch_size, num_action
Output acc_reward shape: batch_size, 1
"""
# pyre-fixme[9]: action has type `FeatureData`; used as `Tensor`.
action = action.float_features.transpose(0, 1)
action_indices = torch.argmax(action, dim=2).tolist()
acc_reward = torch.tensor(
list(map(lambda x: float("".join(map(str, x))), action_indices))
).reshape(-1, 1)
logger.info(f"acc_reward: {acc_reward}")
return rlt.Seq2RewardOutput(acc_reward=acc_reward)
def create_string_game_data(
dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False
):
SEQ_LEN = 6
NUM_ACTION = 2
NUM_MDP_PER_BATCH = 5
env = Gym(env_name="StringGame-v0", set_max_steps=SEQ_LEN)
df = create_df_from_replay_buffer(
env=env,
problem_domain=ProblemDomain.DISCRETE_ACTION,
desired_size=dataset_size,
multi_steps=None,
ds="2020-10-10",
)
if filter_short_sequence:
batch_size = NUM_MDP_PER_BATCH
time_diff = torch.ones(SEQ_LEN, batch_size)
valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None]
not_terminal = torch.Tensor(
[0 if i == SEQ_LEN - 1 else 1 for i in range(SEQ_LEN)]
)
not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1)
else:
batch_size = NUM_MDP_PER_BATCH * SEQ_LEN
time_diff = torch.ones(SEQ_LEN, batch_size)
valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None]
not_terminal = torch.transpose(
torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile(
NUM_MDP_PER_BATCH, 1
),
0,
1,
)
num_batches = int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH)
batches = [None for _ in range(num_batches)]
batch_count, batch_seq_count = 0, 0
batch_reward = torch.zeros(SEQ_LEN, batch_size)
batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION)
batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION)
for mdp_id in sorted(set(df.mdp_id)):
mdp = df[df["mdp_id"] == mdp_id].sort_values("sequence_number", ascending=True)
if len(mdp) != SEQ_LEN:
continue
all_step_reward = torch.Tensor(list(mdp["reward"]))
all_step_state = torch.Tensor([list(s.values()) for s in mdp["state_features"]])
all_step_action = torch.zeros_like(all_step_state)
all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp["action"]]] = 1.0
for j in range(SEQ_LEN):
if filter_short_sequence and j > 0:
break
reward = torch.zeros_like(all_step_reward)
reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN - j) :]
batch_reward[:, batch_seq_count] = reward
state = torch.zeros_like(all_step_state)
state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN - j) :]
batch_state[:, batch_seq_count] = state
action = torch.zeros_like(all_step_action)
action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN - j) :]
batch_action[:, batch_seq_count] = action
batch_seq_count += 1
if batch_seq_count == batch_size:
batches[batch_count] = rlt.MemoryNetworkInput(
reward=batch_reward,
action=rlt.FeatureData(float_features=batch_action),
state=rlt.FeatureData(float_features=batch_state),
next_state=rlt.FeatureData(
float_features=torch.zeros_like(batch_state)
), # fake, not used anyway
not_terminal=not_terminal,
time_diff=time_diff,
valid_step=valid_step,
step=None,
)
batch_count += 1
batch_seq_count = 0
batch_reward = torch.zeros_like(batch_reward)
batch_action = torch.zeros_like(batch_action)
batch_state = torch.zeros_like(batch_state)
assert batch_count == num_batches
num_training_batches = int(training_data_ratio * num_batches)
training_data = DataLoader(
batches[:num_training_batches], collate_fn=lambda x: x[0]
)
eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0])
return training_data, eval_data
def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5):
SEQ_LEN, batch_size, NUM_ACTION = next(
iter(training_data)
).action.float_features.shape
assert SEQ_LEN == 6 and NUM_ACTION == 2
seq2reward_network = Seq2RewardNetwork(
state_dim=NUM_ACTION,
action_dim=NUM_ACTION,
num_hiddens=64,
num_hidden_layers=2,
)
trainer_param = Seq2RewardTrainerParameters(
learning_rate=learning_rate,
multi_steps=SEQ_LEN,
action_names=["0", "1"],
gamma=1.0,
view_q_value=True,
)
trainer = Seq2RewardTrainer(
seq2reward_network=seq2reward_network, params=trainer_param
)
pl.seed_everything(SEED)
pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True)
pl_trainer.fit(trainer, training_data)
return trainer
def eval_seq2reward_model(eval_data, seq2reward_trainer):
SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape
initial_state = torch.Tensor([[0, 0]])
initial_state_q_values = torch.squeeze(
get_Q(
seq2reward_trainer.seq2reward_network,
initial_state,
seq2reward_trainer.all_permut,
)
)
total_mse_loss = 0
total_q_values = torch.zeros(NUM_ACTION)
total_action_distribution = torch.zeros(NUM_ACTION)
for idx, batch in enumerate(eval_data):
(
mse_loss,
_,
q_values,
action_distribution,
) = seq2reward_trainer.validation_step(batch, idx)
total_mse_loss += mse_loss
total_q_values += torch.tensor(q_values)
total_action_distribution += torch.tensor(action_distribution)
N_eval = len(eval_data)
eval_mse_loss = total_mse_loss / N_eval
eval_q_values = total_q_values / N_eval
eval_action_distribution = total_action_distribution / N_eval
return (
initial_state_q_values,
eval_mse_loss,
eval_q_values,
eval_action_distribution,
)
def train_seq2reward_compress_model(
training_data, seq2reward_network, learning_rate=0.1, num_epochs=5
):
SEQ_LEN, batch_size, NUM_ACTION = next(
iter(training_data)
).action.float_features.shape
assert SEQ_LEN == 6 and NUM_ACTION == 2
compress_net_builder = FullyConnected(sizes=[8, 8])
state_normalization_data = NormalizationData(
dense_normalization_parameters={
0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS),
1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS),
}
)
compress_model_network = compress_net_builder.build_value_network(
state_normalization_data,
output_dim=NUM_ACTION,
)
trainer_param = Seq2RewardTrainerParameters(
learning_rate=0.0,
multi_steps=SEQ_LEN,
action_names=["0", "1"],
compress_model_learning_rate=learning_rate,
gamma=1.0,
view_q_value=True,
)
trainer = CompressModelTrainer(
compress_model_network=compress_model_network,
seq2reward_network=seq2reward_network,
params=trainer_param,
)
pl.seed_everything(SEED)
pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True)
pl_trainer.fit(trainer, training_data)
return trainer
def eval_seq2reward_compress_model(eval_data, compress_model_trainer):
SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape
total_mse_loss = 0
total_q_values = torch.zeros(NUM_ACTION)
total_action_distribution = torch.zeros(NUM_ACTION)
for idx, batch in enumerate(eval_data):
(
mse_loss,
q_values,
action_distribution,
_,
) = compress_model_trainer.validation_step(batch, idx)
total_mse_loss += mse_loss
total_q_values += torch.tensor(q_values)
total_action_distribution += torch.tensor(action_distribution)
N_eval = len(eval_data)
eval_mse_loss = total_mse_loss / N_eval
eval_q_values = total_q_values / N_eval
eval_action_distribution = total_action_distribution / N_eval
return eval_mse_loss, eval_q_values, eval_action_distribution
class TestSeq2Reward(unittest.TestCase):
def test_seq2reward_with_preprocessor_plan_short_sequence(self):
self._test_seq2reward_with_preprocessor(plan_short_sequence=True)
def test_seq2reward_with_preprocessor_plan_full_sequence(self):
self._test_seq2reward_with_preprocessor(plan_short_sequence=False)
def _test_seq2reward_with_preprocessor(self, plan_short_sequence):
state_dim = 4
action_dim = 2
seq_len = 3
model = FakeSeq2RewardNetwork()
state_normalization_parameters = {
i: NormalizationParameters(
feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0
)
for i in range(1, state_dim)
}
state_preprocessor = Preprocessor(state_normalization_parameters, False)
if plan_short_sequence:
step_prediction_model = FakeStepPredictionNetwork(seq_len)
model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor(
model,
step_prediction_model,
state_preprocessor,
seq_len,
action_dim,
)
else:
model_with_preprocessor = Seq2RewardWithPreprocessor(
model,
state_preprocessor,
seq_len,
action_dim,
)
input_prototype = rlt.ServingFeatureData(
float_features_with_presence=state_preprocessor.input_prototype(),
id_list_features=FAKE_STATE_ID_LIST_FEATURES,
id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES,
)
q_values = model_with_preprocessor(input_prototype)
if plan_short_sequence:
# When planning for 1, 2, and 3 steps ahead,
# the expected q values are respectively:
# [0, 1], [1, 11], [11, 111]
# Weighting the expected q values by predicted step
# probabilities [0.33, 0.33, 0.33], we have [4, 41]
expected_q_values = torch.tensor([[4.0, 41.0]])
else:
expected_q_values = torch.tensor([[11.0, 111.0]])
assert torch.all(expected_q_values == q_values)
def test_get_Q(self):
NUM_ACTION = 2
MULTI_STEPS = 3
BATCH_SIZE = 2
STATE_DIM = 4
all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION)
seq2reward_network = FakeSeq2RewardNetwork()
state = torch.zeros(BATCH_SIZE, STATE_DIM)
q_values = get_Q(seq2reward_network, state, all_permut)
expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]])
logger.info(f"q_values: {q_values}")
assert torch.all(expected_q_values == q_values)
def test_gen_permutations_seq_len_1_action_6(self):
SEQ_LEN = 1
NUM_ACTION = 6
expected_outcome = torch.tensor([[0], [1], [2], [3], [4], [5]])
self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome)
def test_gen_permutations_seq_len_3_num_action_2(self):
SEQ_LEN = 3
NUM_ACTION = 2
expected_outcome = torch.tensor(
[
[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0],
[1, 1, 1],
]
)
self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome)
def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome):
# expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM
result = gen_permutations(SEQ_LEN, NUM_ACTION)
assert result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION)
outcome = torch.argmax(result.transpose(0, 1), dim=-1)
assert torch.all(outcome == expected_outcome)
@parameterized.expand(STRING_GAME_TESTS)
@unittest.skipIf("SANDCASTLE" in os.environ, "Skipping long test on sandcastle.")
def test_seq2reward_on_string_game_v0(self, filter_short_sequence):
np.random.seed(SEED)
random.seed(SEED)
torch.manual_seed(SEED)
training_data, eval_data = create_string_game_data(
filter_short_sequence=filter_short_sequence
)
seq2reward_trainer = train_seq2reward_model(training_data)
(
initial_state_q_values,
eval_mse_loss,
eval_q_values,
eval_action_distribution,
) = eval_seq2reward_model(eval_data, seq2reward_trainer)
assert abs(initial_state_q_values[0].item() - 10) < 1.0
assert abs(initial_state_q_values[1].item() - 5) < 1.0
if filter_short_sequence:
assert eval_mse_loss < 0.1
else:
# Same short sequences may have different total rewards due to the missing
# states and actions in previous steps, so the trained network is not able
# to reduce the mse loss to values close to zero.
assert eval_mse_loss < 10
compress_model_trainer = train_seq2reward_compress_model(
training_data, seq2reward_trainer.seq2reward_network
)
(
compress_eval_mse_loss,
compress_eval_q_values,
compress_eval_action_distribution,
) = eval_seq2reward_compress_model(eval_data, compress_model_trainer)
assert compress_eval_mse_loss < 1e-5
assert torch.all(eval_q_values - compress_eval_q_values < 1e-5)
assert torch.all(
eval_action_distribution - compress_eval_action_distribution < 1e-5
)
| [((39, 9, 39, 36), 'logging.getLogger', 'logging.getLogger', ({(39, 27, 39, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((105, 10, 105, 62), 'reagent.gym.envs.Gym', 'Gym', (), '', False, 'from reagent.gym.envs import Gym\n'), ((106, 9, 112, 5), 'reagent.gym.utils.create_df_from_replay_buffer', 'create_df_from_replay_buffer', (), '', False, 'from reagent.gym.utils import create_df_from_replay_buffer\n'), ((137, 19, 137, 51), 'torch.zeros', 'torch.zeros', ({(137, 31, 137, 38): 'SEQ_LEN', (137, 40, 137, 50): 'batch_size'}, {}), '(SEQ_LEN, batch_size)', False, 'import torch\n'), ((138, 19, 138, 63), 'torch.zeros', 'torch.zeros', ({(138, 31, 138, 38): 'SEQ_LEN', (138, 40, 138, 50): 'batch_size', (138, 52, 138, 62): 'NUM_ACTION'}, {}), '(SEQ_LEN, batch_size, NUM_ACTION)', False, 'import torch\n'), ((139, 18, 139, 62), 'torch.zeros', 'torch.zeros', ({(139, 30, 139, 37): 'SEQ_LEN', (139, 39, 139, 49): 'batch_size', (139, 51, 139, 61): 'NUM_ACTION'}, {}), '(SEQ_LEN, batch_size, NUM_ACTION)', False, 'import torch\n'), ((189, 20, 191, 5), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((192, 16, 192, 85), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((202, 25, 207, 5), 'reagent.models.seq2reward_model.Seq2RewardNetwork', 'Seq2RewardNetwork', (), '', False, 'from reagent.models.seq2reward_model import Seq2RewardNetwork\n'), ((209, 20, 215, 5), 'reagent.core.parameters.Seq2RewardTrainerParameters', 'Seq2RewardTrainerParameters', (), '', False, 'from reagent.core.parameters import NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters\n'), ((217, 14, 219, 5), 'reagent.training.world_model.seq2reward_trainer.Seq2RewardTrainer', 'Seq2RewardTrainer', (), '', False, 'from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer\n'), ((221, 4, 221, 28), 'pytorch_lightning.seed_everything', 'pl.seed_everything', ({(221, 23, 221, 27): 'SEED'}, {}), '(SEED)', True, 'import pytorch_lightning as pl\n'), ((222, 17, 222, 70), 'pytorch_lightning.Trainer', 'pl.Trainer', (), '', True, 'import pytorch_lightning as pl\n'), ((231, 20, 231, 42), 'torch.Tensor', 'torch.Tensor', ({(231, 33, 231, 41): '[[0, 0]]'}, {}), '([[0, 0]])', False, 'import torch\n'), ((241, 21, 241, 44), 'torch.zeros', 'torch.zeros', ({(241, 33, 241, 43): 'NUM_ACTION'}, {}), '(NUM_ACTION)', False, 'import torch\n'), ((242, 32, 242, 55), 'torch.zeros', 'torch.zeros', ({(242, 44, 242, 54): 'NUM_ACTION'}, {}), '(NUM_ACTION)', False, 'import torch\n'), ((275, 27, 275, 55), 'reagent.net_builder.value.fully_connected.FullyConnected', 'FullyConnected', (), '', False, 'from reagent.net_builder.value.fully_connected import FullyConnected\n'), ((287, 20, 294, 5), 'reagent.core.parameters.Seq2RewardTrainerParameters', 'Seq2RewardTrainerParameters', (), '', False, 'from reagent.core.parameters import NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters\n'), ((296, 14, 300, 5), 'reagent.training.world_model.compress_model_trainer.CompressModelTrainer', 'CompressModelTrainer', (), '', False, 'from reagent.training.world_model.compress_model_trainer import CompressModelTrainer\n'), ((302, 4, 302, 28), 'pytorch_lightning.seed_everything', 'pl.seed_everything', ({(302, 23, 302, 27): 'SEED'}, {}), '(SEED)', True, 'import pytorch_lightning as pl\n'), ((303, 17, 303, 70), 'pytorch_lightning.Trainer', 'pl.Trainer', (), '', True, 'import pytorch_lightning as pl\n'), ((312, 21, 312, 44), 'torch.zeros', 'torch.zeros', ({(312, 33, 312, 43): 'NUM_ACTION'}, {}), '(NUM_ACTION)', False, 'import torch\n'), ((313, 32, 313, 55), 'torch.zeros', 'torch.zeros', ({(313, 44, 313, 54): 'NUM_ACTION'}, {}), '(NUM_ACTION)', False, 'import torch\n'), ((429, 5, 429, 44), 'parameterized.parameterized.expand', 'parameterized.expand', ({(429, 26, 429, 43): 'STRING_GAME_TESTS'}, {}), '(STRING_GAME_TESTS)', False, 'from parameterized import parameterized\n'), ((430, 5, 430, 85), 'unittest.skipIf', 'unittest.skipIf', ({(430, 21, 430, 47): "('SANDCASTLE' in os.environ)", (430, 49, 430, 84): '"""Skipping long test on sandcastle."""'}, {}), "('SANDCASTLE' in os.environ, 'Skipping long test on sandcastle.'\n )", False, 'import unittest\n'), ((95, 15, 95, 58), 'reagent.core.types.Seq2RewardOutput', 'rlt.Seq2RewardOutput', (), '', True, 'from reagent.core import types as rlt\n'), ((116, 20, 116, 51), 'torch.ones', 'torch.ones', ({(116, 31, 116, 38): 'SEQ_LEN', (116, 40, 116, 50): 'batch_size'}, {}), '(SEQ_LEN, batch_size)', False, 'import torch\n'), ((124, 20, 124, 51), 'torch.ones', 'torch.ones', ({(124, 31, 124, 38): 'SEQ_LEN', (124, 40, 124, 50): 'batch_size'}, {}), '(SEQ_LEN, batch_size)', False, 'import torch\n'), ((147, 26, 147, 58), 'torch.zeros_like', 'torch.zeros_like', ({(147, 43, 147, 57): 'all_step_state'}, {}), '(all_step_state)', False, 'import torch\n'), ((233, 8, 237, 9), 'reagent.training.world_model.seq2reward_trainer.get_Q', 'get_Q', ({(234, 12, 234, 49): 'seq2reward_trainer.seq2reward_network', (235, 12, 235, 25): 'initial_state', (236, 12, 236, 41): 'seq2reward_trainer.all_permut'}, {}), '(seq2reward_trainer.seq2reward_network, initial_state,\n seq2reward_trainer.all_permut)', False, 'from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer\n'), ((251, 26, 251, 48), 'torch.tensor', 'torch.tensor', ({(251, 39, 251, 47): 'q_values'}, {}), '(q_values)', False, 'import torch\n'), ((252, 37, 252, 70), 'torch.tensor', 'torch.tensor', ({(252, 50, 252, 69): 'action_distribution'}, {}), '(action_distribution)', False, 'import torch\n'), ((322, 26, 322, 48), 'torch.tensor', 'torch.tensor', ({(322, 39, 322, 47): 'q_values'}, {}), '(q_values)', False, 'import torch\n'), ((323, 37, 323, 70), 'torch.tensor', 'torch.tensor', ({(323, 50, 323, 69): 'action_distribution'}, {}), '(action_distribution)', False, 'import torch\n'), ((351, 29, 351, 80), 'reagent.preprocessing.preprocessor.Preprocessor', 'Preprocessor', ({(351, 42, 351, 72): 'state_normalization_parameters', (351, 74, 351, 79): 'False'}, {}), '(state_normalization_parameters, False)', False, 'from reagent.preprocessing.preprocessor import Preprocessor\n'), ((384, 15, 384, 55), 'torch.all', 'torch.all', ({(384, 25, 384, 54): '(expected_q_values == q_values)'}, {}), '(expected_q_values == q_values)', False, 'import torch\n'), ((391, 21, 391, 62), 'reagent.training.utils.gen_permutations', 'gen_permutations', ({(391, 38, 391, 49): 'MULTI_STEPS', (391, 51, 391, 61): 'NUM_ACTION'}, {}), '(MULTI_STEPS, NUM_ACTION)', False, 'from reagent.training.utils import gen_permutations\n'), ((393, 16, 393, 50), 'torch.zeros', 'torch.zeros', ({(393, 28, 393, 38): 'BATCH_SIZE', (393, 40, 393, 49): 'STATE_DIM'}, {}), '(BATCH_SIZE, STATE_DIM)', False, 'import torch\n'), ((394, 19, 394, 63), 'reagent.training.world_model.seq2reward_trainer.get_Q', 'get_Q', ({(394, 25, 394, 43): 'seq2reward_network', (394, 45, 394, 50): 'state', (394, 52, 394, 62): 'all_permut'}, {}), '(seq2reward_network, state, all_permut)', False, 'from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer\n'), ((395, 28, 395, 72), 'torch.tensor', 'torch.tensor', ({(395, 41, 395, 71): '[[11.0, 111.0], [11.0, 111.0]]'}, {}), '([[11.0, 111.0], [11.0, 111.0]])', False, 'import torch\n'), ((397, 15, 397, 55), 'torch.all', 'torch.all', ({(397, 25, 397, 54): '(expected_q_values == q_values)'}, {}), '(expected_q_values == q_values)', False, 'import torch\n'), ((402, 27, 402, 71), 'torch.tensor', 'torch.tensor', ({(402, 40, 402, 70): '[[0], [1], [2], [3], [4], [5]]'}, {}), '([[0], [1], [2], [3], [4], [5]])', False, 'import torch\n'), ((408, 27, 419, 9), 'torch.tensor', 'torch.tensor', ({(409, 12, 418, 13): '[[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0\n ], [1, 1, 1]]'}, {}), '([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0,\n 1], [1, 1, 0], [1, 1, 1]])', False, 'import torch\n'), ((424, 17, 424, 54), 'reagent.training.utils.gen_permutations', 'gen_permutations', ({(424, 34, 424, 41): 'SEQ_LEN', (424, 43, 424, 53): 'NUM_ACTION'}, {}), '(SEQ_LEN, NUM_ACTION)', False, 'from reagent.training.utils import gen_permutations\n'), ((427, 15, 427, 53), 'torch.all', 'torch.all', ({(427, 25, 427, 52): '(outcome == expected_outcome)'}, {}), '(outcome == expected_outcome)', False, 'import torch\n'), ((432, 8, 432, 28), 'numpy.random.seed', 'np.random.seed', ({(432, 23, 432, 27): 'SEED'}, {}), '(SEED)', True, 'import numpy as np\n'), ((433, 8, 433, 25), 'random.seed', 'random.seed', ({(433, 20, 433, 24): 'SEED'}, {}), '(SEED)', False, 'import random\n'), ((434, 8, 434, 31), 'torch.manual_seed', 'torch.manual_seed', ({(434, 26, 434, 30): 'SEED'}, {}), '(SEED)', False, 'import torch\n'), ((467, 15, 467, 71), 'torch.all', 'torch.all', ({(467, 25, 467, 70): '(eval_q_values - compress_eval_q_values < 1e-05)'}, {}), '(eval_q_values - compress_eval_q_values < 1e-05)', False, 'import torch\n'), ((468, 15, 470, 9), 'torch.all', 'torch.all', ({(469, 12, 469, 79): '(eval_action_distribution - compress_eval_action_distribution < 1e-05)'}, {}), '(eval_action_distribution - compress_eval_action_distribution < 1e-05)', False, 'import torch\n'), ((154, 21, 154, 54), 'torch.zeros_like', 'torch.zeros_like', ({(154, 38, 154, 53): 'all_step_reward'}, {}), '(all_step_reward)', False, 'import torch\n'), ((158, 20, 158, 52), 'torch.zeros_like', 'torch.zeros_like', ({(158, 37, 158, 51): 'all_step_state'}, {}), '(all_step_state)', False, 'import torch\n'), ((162, 21, 162, 54), 'torch.zeros_like', 'torch.zeros_like', ({(162, 38, 162, 53): 'all_step_action'}, {}), '(all_step_action)', False, 'import torch\n'), ((183, 27, 183, 57), 'torch.zeros_like', 'torch.zeros_like', ({(183, 44, 183, 56): 'batch_reward'}, {}), '(batch_reward)', False, 'import torch\n'), ((184, 27, 184, 57), 'torch.zeros_like', 'torch.zeros_like', ({(184, 44, 184, 56): 'batch_action'}, {}), '(batch_action)', False, 'import torch\n'), ((185, 26, 185, 55), 'torch.zeros_like', 'torch.zeros_like', ({(185, 43, 185, 54): 'batch_state'}, {}), '(batch_state)', False, 'import torch\n'), ((346, 15, 348, 13), 'reagent.core.parameters.NormalizationParameters', 'NormalizationParameters', (), '', False, 'from reagent.core.parameters import NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters\n'), ((355, 38, 361, 13), 'reagent.prediction.predictor_wrapper.Seq2RewardPlanShortSeqWithPreprocessor', 'Seq2RewardPlanShortSeqWithPreprocessor', ({(356, 16, 356, 21): 'model', (357, 16, 357, 37): 'step_prediction_model', (358, 16, 358, 34): 'state_preprocessor', (359, 16, 359, 23): 'seq_len', (360, 16, 360, 26): 'action_dim'}, {}), '(model, step_prediction_model,\n state_preprocessor, seq_len, action_dim)', False, 'from reagent.prediction.predictor_wrapper import Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES\n'), ((363, 38, 368, 13), 'reagent.prediction.predictor_wrapper.Seq2RewardWithPreprocessor', 'Seq2RewardWithPreprocessor', ({(364, 16, 364, 21): 'model', (365, 16, 365, 34): 'state_preprocessor', (366, 16, 366, 23): 'seq_len', (367, 16, 367, 26): 'action_dim'}, {}), '(model, state_preprocessor, seq_len, action_dim)', False, 'from reagent.prediction.predictor_wrapper import Seq2RewardWithPreprocessor, Seq2RewardPlanShortSeqWithPreprocessor, FAKE_STATE_ID_LIST_FEATURES, FAKE_STATE_ID_SCORE_LIST_FEATURES\n'), ((381, 32, 381, 59), 'torch.tensor', 'torch.tensor', ({(381, 45, 381, 58): '[[4.0, 41.0]]'}, {}), '([[4.0, 41.0]])', False, 'import torch\n'), ((383, 32, 383, 61), 'torch.tensor', 'torch.tensor', ({(383, 45, 383, 60): '[[11.0, 111.0]]'}, {}), '([[11.0, 111.0]])', False, 'import torch\n'), ((58, 15, 58, 60), 'torch.ones', 'torch.ones', ({(58, 26, 58, 36): 'batch_size', (58, 38, 58, 59): 'self.look_ahead_steps'}, {}), '(batch_size, self.look_ahead_steps)', False, 'import torch\n'), ((90, 25, 90, 52), 'torch.argmax', 'torch.argmax', (), '', False, 'import torch\n'), ((117, 31, 117, 72), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n'), ((278, 15, 278, 70), 'reagent.core.parameters.NormalizationParameters', 'NormalizationParameters', (), '', False, 'from reagent.core.parameters import NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters\n'), ((279, 15, 279, 70), 'reagent.core.parameters.NormalizationParameters', 'NormalizationParameters', (), '', False, 'from reagent.core.parameters import NormalizationData, NormalizationParameters, ProblemDomain, Seq2RewardTrainerParameters\n'), ((125, 21, 125, 49), 'torch.arange', 'torch.arange', ({(125, 34, 125, 41): 'SEQ_LEN', (125, 43, 125, 44): '(0)', (125, 46, 125, 48): '(-1)'}, {}), '(SEQ_LEN, 0, -1)', False, 'import torch\n'), ((148, 24, 148, 45), 'torch.arange', 'torch.arange', ({(148, 37, 148, 44): 'SEQ_LEN'}, {}), '(SEQ_LEN)', False, 'import torch\n'), ((171, 23, 171, 67), 'reagent.core.types.FeatureData', 'rlt.FeatureData', (), '', True, 'from reagent.core import types as rlt\n'), ((172, 22, 172, 65), 'reagent.core.types.FeatureData', 'rlt.FeatureData', (), '', True, 'from reagent.core import types as rlt\n'), ((127, 23, 127, 51), 'torch.ones', 'torch.ones', ({(127, 34, 127, 41): 'SEQ_LEN', (127, 43, 127, 50): 'SEQ_LEN'}, {}), '(SEQ_LEN, SEQ_LEN)', False, 'import torch\n'), ((174, 35, 174, 64), 'torch.zeros_like', 'torch.zeros_like', ({(174, 52, 174, 63): 'batch_state'}, {}), '(batch_state)', False, 'import torch\n')] |
grossmann-group/pyomo-MINLP-benchmarking | models_SHOT_convex/syn30m03hfsg.py | 714f0a0dffd61675649a805683c0627af6b4929e | # MINLP written by GAMS Convert at 01/15/21 11:37:33
#
# Equation counts
# Total E G L N X C B
# 1486 571 111 804 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 865 685 180 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 3373 3193 180 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x547 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x548 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x549 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x550 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x551 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x552 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x553 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x554 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x555 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x556 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x557 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x558 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x559 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x560 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x561 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x562 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x563 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x564 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x565 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x566 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x567 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x568 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x569 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x570 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x571 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x572 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x573 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x574 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x575 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x576 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x577 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x578 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x579 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x580 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x581 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x582 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x583 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x584 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x585 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x586 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x587 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x588 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b606 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b607 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b608 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b609 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b610 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b611 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b612 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b613 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b614 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b615 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b616 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b617 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b618 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b619 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b620 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b621 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b622 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b623 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b624 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b625 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b626 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b627 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b628 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b629 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b630 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b631 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b632 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b633 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b634 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b635 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b636 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b637 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b638 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b639 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b640 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b641 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b642 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b643 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b644 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b645 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b646 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b647 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b648 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b649 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b650 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b651 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b652 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b653 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b654 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b655 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b656 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b657 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b658 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b659 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b660 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b661 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b662 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b663 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b664 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b665 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b666 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b667 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b668 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b669 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b670 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b671 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b672 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b673 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b674 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b675 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b676 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b677 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b678 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b679 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b680 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b681 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b682 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b683 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b684 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b685 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b686 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b687 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b688 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b689 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b690 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b691 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b692 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b693 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b694 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b695 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b696 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b697 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b698 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b699 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b700 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b701 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b702 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b703 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b704 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b705 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b706 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b707 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b708 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b709 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b710 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b711 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b712 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b713 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b714 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b715 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b716 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b717 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b718 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b719 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b720 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b721 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b722 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b723 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b724 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b725 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x776 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x777 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x778 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x779 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x780 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x781 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x782 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x783 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x784 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x785 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x786 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x787 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x788 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x789 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x790 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x791 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x792 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x793 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x794 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x795 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x796 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x797 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x798 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x799 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x800 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x801 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x802 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x803 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x804 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x805 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x806 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x807 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x808 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x809 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x810 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x811 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x812 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x813 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x814 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x815 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x816 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x817 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x818 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x819 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x820 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x821 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x822 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x823 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x824 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x825 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x826 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x827 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x828 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x829 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x830 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x831 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x832 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x833 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x834 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x835 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x836 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x837 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x838 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x839 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x840 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x841 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x842 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x843 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x844 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x845 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x846 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x847 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x848 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x849 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x850 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x851 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x852 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x853 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x854 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x855 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x856 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x857 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x858 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x859 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x860 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x861 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x862 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x863 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x864 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x865 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr= - m.x2 - m.x3 - m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86
- 5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112
+ 15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120
+ 20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128
+ 50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157
- m.x170 - m.x171 - m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198
+ 350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205
+ 290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686
- 4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694
- 10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702
- 4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710
- 5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718
- 7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727
- 4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736
- 5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744 - m.b745
- 2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753
- 4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761
- 8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769
- 2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize)
m.c2 = Constraint(expr= m.x2 - m.x5 - m.x8 == 0)
m.c3 = Constraint(expr= m.x3 - m.x6 - m.x9 == 0)
m.c4 = Constraint(expr= m.x4 - m.x7 - m.x10 == 0)
m.c5 = Constraint(expr= - m.x11 - m.x14 + m.x17 == 0)
m.c6 = Constraint(expr= - m.x12 - m.x15 + m.x18 == 0)
m.c7 = Constraint(expr= - m.x13 - m.x16 + m.x19 == 0)
m.c8 = Constraint(expr= m.x17 - m.x20 - m.x23 == 0)
m.c9 = Constraint(expr= m.x18 - m.x21 - m.x24 == 0)
m.c10 = Constraint(expr= m.x19 - m.x22 - m.x25 == 0)
m.c11 = Constraint(expr= m.x23 - m.x26 - m.x29 - m.x32 == 0)
m.c12 = Constraint(expr= m.x24 - m.x27 - m.x30 - m.x33 == 0)
m.c13 = Constraint(expr= m.x25 - m.x28 - m.x31 - m.x34 == 0)
m.c14 = Constraint(expr= m.x38 - m.x47 - m.x50 == 0)
m.c15 = Constraint(expr= m.x39 - m.x48 - m.x51 == 0)
m.c16 = Constraint(expr= m.x40 - m.x49 - m.x52 == 0)
m.c17 = Constraint(expr= m.x44 - m.x53 - m.x56 - m.x59 == 0)
m.c18 = Constraint(expr= m.x45 - m.x54 - m.x57 - m.x60 == 0)
m.c19 = Constraint(expr= m.x46 - m.x55 - m.x58 - m.x61 == 0)
m.c20 = Constraint(expr= m.x68 - m.x80 - m.x83 == 0)
m.c21 = Constraint(expr= m.x69 - m.x81 - m.x84 == 0)
m.c22 = Constraint(expr= m.x70 - m.x82 - m.x85 == 0)
m.c23 = Constraint(expr= - m.x71 - m.x89 + m.x92 == 0)
m.c24 = Constraint(expr= - m.x72 - m.x90 + m.x93 == 0)
m.c25 = Constraint(expr= - m.x73 - m.x91 + m.x94 == 0)
m.c26 = Constraint(expr= m.x74 - m.x95 - m.x98 == 0)
m.c27 = Constraint(expr= m.x75 - m.x96 - m.x99 == 0)
m.c28 = Constraint(expr= m.x76 - m.x97 - m.x100 == 0)
m.c29 = Constraint(expr= m.x77 - m.x101 - m.x104 - m.x107 == 0)
m.c30 = Constraint(expr= m.x78 - m.x102 - m.x105 - m.x108 == 0)
m.c31 = Constraint(expr= m.x79 - m.x103 - m.x106 - m.x109 == 0)
m.c32 = Constraint(expr= m.x134 - m.x137 == 0)
m.c33 = Constraint(expr= m.x135 - m.x138 == 0)
m.c34 = Constraint(expr= m.x136 - m.x139 == 0)
m.c35 = Constraint(expr= m.x137 - m.x140 - m.x143 == 0)
m.c36 = Constraint(expr= m.x138 - m.x141 - m.x144 == 0)
m.c37 = Constraint(expr= m.x139 - m.x142 - m.x145 == 0)
m.c38 = Constraint(expr= - m.x146 - m.x149 + m.x152 == 0)
m.c39 = Constraint(expr= - m.x147 - m.x150 + m.x153 == 0)
m.c40 = Constraint(expr= - m.x148 - m.x151 + m.x154 == 0)
m.c41 = Constraint(expr= m.x152 - m.x155 - m.x158 == 0)
m.c42 = Constraint(expr= m.x153 - m.x156 - m.x159 == 0)
m.c43 = Constraint(expr= m.x154 - m.x157 - m.x160 == 0)
m.c44 = Constraint(expr= m.x158 - m.x161 - m.x164 - m.x167 == 0)
m.c45 = Constraint(expr= m.x159 - m.x162 - m.x165 - m.x168 == 0)
m.c46 = Constraint(expr= m.x160 - m.x163 - m.x166 - m.x169 == 0)
m.c47 = Constraint(expr= m.x173 - m.x182 - m.x185 == 0)
m.c48 = Constraint(expr= m.x174 - m.x183 - m.x186 == 0)
m.c49 = Constraint(expr= m.x175 - m.x184 - m.x187 == 0)
m.c50 = Constraint(expr= m.x179 - m.x188 - m.x191 - m.x194 == 0)
m.c51 = Constraint(expr= m.x180 - m.x189 - m.x192 - m.x195 == 0)
m.c52 = Constraint(expr= m.x181 - m.x190 - m.x193 - m.x196 == 0)
m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596)
<= 0)
m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597)
<= 0)
m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598)
<= 0)
m.c56 = Constraint(expr= m.x215 == 0)
m.c57 = Constraint(expr= m.x216 == 0)
m.c58 = Constraint(expr= m.x217 == 0)
m.c59 = Constraint(expr= m.x227 == 0)
m.c60 = Constraint(expr= m.x228 == 0)
m.c61 = Constraint(expr= m.x229 == 0)
m.c62 = Constraint(expr= m.x5 - m.x212 - m.x215 == 0)
m.c63 = Constraint(expr= m.x6 - m.x213 - m.x216 == 0)
m.c64 = Constraint(expr= m.x7 - m.x214 - m.x217 == 0)
m.c65 = Constraint(expr= m.x11 - m.x224 - m.x227 == 0)
m.c66 = Constraint(expr= m.x12 - m.x225 - m.x228 == 0)
m.c67 = Constraint(expr= m.x13 - m.x226 - m.x229 == 0)
m.c68 = Constraint(expr= m.x212 - 40*m.b596 <= 0)
m.c69 = Constraint(expr= m.x213 - 40*m.b597 <= 0)
m.c70 = Constraint(expr= m.x214 - 40*m.b598 <= 0)
m.c71 = Constraint(expr= m.x215 + 40*m.b596 <= 40)
m.c72 = Constraint(expr= m.x216 + 40*m.b597 <= 40)
m.c73 = Constraint(expr= m.x217 + 40*m.b598 <= 40)
m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0)
m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0)
m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0)
m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431)
m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431)
m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431)
m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999*
m.b599) <= 0)
m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999*
m.b600) <= 0)
m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999*
m.b601) <= 0)
m.c83 = Constraint(expr= m.x221 == 0)
m.c84 = Constraint(expr= m.x222 == 0)
m.c85 = Constraint(expr= m.x223 == 0)
m.c86 = Constraint(expr= m.x233 == 0)
m.c87 = Constraint(expr= m.x234 == 0)
m.c88 = Constraint(expr= m.x235 == 0)
m.c89 = Constraint(expr= m.x8 - m.x218 - m.x221 == 0)
m.c90 = Constraint(expr= m.x9 - m.x219 - m.x222 == 0)
m.c91 = Constraint(expr= m.x10 - m.x220 - m.x223 == 0)
m.c92 = Constraint(expr= m.x14 - m.x230 - m.x233 == 0)
m.c93 = Constraint(expr= m.x15 - m.x231 - m.x234 == 0)
m.c94 = Constraint(expr= m.x16 - m.x232 - m.x235 == 0)
m.c95 = Constraint(expr= m.x218 - 40*m.b599 <= 0)
m.c96 = Constraint(expr= m.x219 - 40*m.b600 <= 0)
m.c97 = Constraint(expr= m.x220 - 40*m.b601 <= 0)
m.c98 = Constraint(expr= m.x221 + 40*m.b599 <= 40)
m.c99 = Constraint(expr= m.x222 + 40*m.b600 <= 40)
m.c100 = Constraint(expr= m.x223 + 40*m.b601 <= 40)
m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0)
m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0)
m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0)
m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517)
m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517)
m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517)
m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260 == 0)
m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261 == 0)
m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262 == 0)
m.c110 = Constraint(expr= m.x239 == 0)
m.c111 = Constraint(expr= m.x240 == 0)
m.c112 = Constraint(expr= m.x241 == 0)
m.c113 = Constraint(expr= m.x263 == 0)
m.c114 = Constraint(expr= m.x264 == 0)
m.c115 = Constraint(expr= m.x265 == 0)
m.c116 = Constraint(expr= m.x26 - m.x236 - m.x239 == 0)
m.c117 = Constraint(expr= m.x27 - m.x237 - m.x240 == 0)
m.c118 = Constraint(expr= m.x28 - m.x238 - m.x241 == 0)
m.c119 = Constraint(expr= m.x38 - m.x260 - m.x263 == 0)
m.c120 = Constraint(expr= m.x39 - m.x261 - m.x264 == 0)
m.c121 = Constraint(expr= m.x40 - m.x262 - m.x265 == 0)
m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0)
m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0)
m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0)
m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517)
m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517)
m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517)
m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0)
m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0)
m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0)
m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388)
m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388)
m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388)
m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999*
m.b605) <= 0)
m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999*
m.b606) <= 0)
m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999*
m.b607) <= 0)
m.c137 = Constraint(expr= m.x245 == 0)
m.c138 = Constraint(expr= m.x246 == 0)
m.c139 = Constraint(expr= m.x247 == 0)
m.c140 = Constraint(expr= m.x272 == 0)
m.c141 = Constraint(expr= m.x273 == 0)
m.c142 = Constraint(expr= m.x274 == 0)
m.c143 = Constraint(expr= m.x29 - m.x242 - m.x245 == 0)
m.c144 = Constraint(expr= m.x30 - m.x243 - m.x246 == 0)
m.c145 = Constraint(expr= m.x31 - m.x244 - m.x247 == 0)
m.c146 = Constraint(expr= m.x41 - m.x266 - m.x272 == 0)
m.c147 = Constraint(expr= m.x42 - m.x267 - m.x273 == 0)
m.c148 = Constraint(expr= m.x43 - m.x268 - m.x274 == 0)
m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0)
m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0)
m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0)
m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517)
m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517)
m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517)
m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0)
m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0)
m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0)
m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353)
m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353)
m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353)
m.c161 = Constraint(expr= - m.x248 + m.x278 == 0)
m.c162 = Constraint(expr= - m.x249 + m.x279 == 0)
m.c163 = Constraint(expr= - m.x250 + m.x280 == 0)
m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278 == 0)
m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279 == 0)
m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280 == 0)
m.c167 = Constraint(expr= m.x251 == 0)
m.c168 = Constraint(expr= m.x252 == 0)
m.c169 = Constraint(expr= m.x253 == 0)
m.c170 = Constraint(expr= m.x257 == 0)
m.c171 = Constraint(expr= m.x258 == 0)
m.c172 = Constraint(expr= m.x259 == 0)
m.c173 = Constraint(expr= m.x281 == 0)
m.c174 = Constraint(expr= m.x282 == 0)
m.c175 = Constraint(expr= m.x283 == 0)
m.c176 = Constraint(expr= m.x32 - m.x248 - m.x251 == 0)
m.c177 = Constraint(expr= m.x33 - m.x249 - m.x252 == 0)
m.c178 = Constraint(expr= m.x34 - m.x250 - m.x253 == 0)
m.c179 = Constraint(expr= m.x35 - m.x254 - m.x257 == 0)
m.c180 = Constraint(expr= m.x36 - m.x255 - m.x258 == 0)
m.c181 = Constraint(expr= m.x37 - m.x256 - m.x259 == 0)
m.c182 = Constraint(expr= m.x44 - m.x278 - m.x281 == 0)
m.c183 = Constraint(expr= m.x45 - m.x279 - m.x282 == 0)
m.c184 = Constraint(expr= m.x46 - m.x280 - m.x283 == 0)
m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0)
m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0)
m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0)
m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517)
m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517)
m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517)
m.c191 = Constraint(expr= m.x254 - 30*m.b608 <= 0)
m.c192 = Constraint(expr= m.x255 - 30*m.b609 <= 0)
m.c193 = Constraint(expr= m.x256 - 30*m.b610 <= 0)
m.c194 = Constraint(expr= m.x257 + 30*m.b608 <= 30)
m.c195 = Constraint(expr= m.x258 + 30*m.b609 <= 30)
m.c196 = Constraint(expr= m.x259 + 30*m.b610 <= 30)
m.c197 = Constraint(expr= m.x278 - 15*m.b608 <= 0)
m.c198 = Constraint(expr= m.x279 - 15*m.b609 <= 0)
m.c199 = Constraint(expr= m.x280 - 15*m.b610 <= 0)
m.c200 = Constraint(expr= m.x281 + 15*m.b608 <= 15)
m.c201 = Constraint(expr= m.x282 + 15*m.b609 <= 15)
m.c202 = Constraint(expr= m.x283 + 15*m.b610 <= 15)
m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999*
m.b611) <= 0)
m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999*
m.b612) <= 0)
m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999*
m.b613) <= 0)
m.c206 = Constraint(expr= m.x287 == 0)
m.c207 = Constraint(expr= m.x288 == 0)
m.c208 = Constraint(expr= m.x289 == 0)
m.c209 = Constraint(expr= m.x320 == 0)
m.c210 = Constraint(expr= m.x321 == 0)
m.c211 = Constraint(expr= m.x322 == 0)
m.c212 = Constraint(expr= m.x47 - m.x284 - m.x287 == 0)
m.c213 = Constraint(expr= m.x48 - m.x285 - m.x288 == 0)
m.c214 = Constraint(expr= m.x49 - m.x286 - m.x289 == 0)
m.c215 = Constraint(expr= m.x62 - m.x314 - m.x320 == 0)
m.c216 = Constraint(expr= m.x63 - m.x315 - m.x321 == 0)
m.c217 = Constraint(expr= m.x64 - m.x316 - m.x322 == 0)
m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0)
m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0)
m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0)
m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388)
m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388)
m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388)
m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0)
m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0)
m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0)
m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539)
m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539)
m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539)
m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999*
m.b614) <= 0)
m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999*
m.b615) <= 0)
m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999*
m.b616) <= 0)
m.c233 = Constraint(expr= m.x293 == 0)
m.c234 = Constraint(expr= m.x294 == 0)
m.c235 = Constraint(expr= m.x295 == 0)
m.c236 = Constraint(expr= m.x332 == 0)
m.c237 = Constraint(expr= m.x333 == 0)
m.c238 = Constraint(expr= m.x334 == 0)
m.c239 = Constraint(expr= m.x50 - m.x290 - m.x293 == 0)
m.c240 = Constraint(expr= m.x51 - m.x291 - m.x294 == 0)
m.c241 = Constraint(expr= m.x52 - m.x292 - m.x295 == 0)
m.c242 = Constraint(expr= m.x65 - m.x326 - m.x332 == 0)
m.c243 = Constraint(expr= m.x66 - m.x327 - m.x333 == 0)
m.c244 = Constraint(expr= m.x67 - m.x328 - m.x334 == 0)
m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0)
m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0)
m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0)
m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388)
m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388)
m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388)
m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0)
m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0)
m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0)
m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348)
m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348)
m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348)
m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617)
<= 0)
m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618)
<= 0)
m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619)
<= 0)
m.c260 = Constraint(expr= m.x275 == 0)
m.c261 = Constraint(expr= m.x276 == 0)
m.c262 = Constraint(expr= m.x277 == 0)
m.c263 = Constraint(expr= m.x341 == 0)
m.c264 = Constraint(expr= m.x342 == 0)
m.c265 = Constraint(expr= m.x343 == 0)
m.c266 = Constraint(expr= m.x41 - m.x269 - m.x275 == 0)
m.c267 = Constraint(expr= m.x42 - m.x270 - m.x276 == 0)
m.c268 = Constraint(expr= m.x43 - m.x271 - m.x277 == 0)
m.c269 = Constraint(expr= m.x68 - m.x338 - m.x341 == 0)
m.c270 = Constraint(expr= m.x69 - m.x339 - m.x342 == 0)
m.c271 = Constraint(expr= m.x70 - m.x340 - m.x343 == 0)
m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0)
m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0)
m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0)
m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353)
m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353)
m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353)
m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0)
m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0)
m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0)
m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553)
m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553)
m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553)
m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344 == 0)
m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345 == 0)
m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346 == 0)
m.c287 = Constraint(expr= m.x299 == 0)
m.c288 = Constraint(expr= m.x300 == 0)
m.c289 = Constraint(expr= m.x301 == 0)
m.c290 = Constraint(expr= m.x347 == 0)
m.c291 = Constraint(expr= m.x348 == 0)
m.c292 = Constraint(expr= m.x349 == 0)
m.c293 = Constraint(expr= m.x53 - m.x296 - m.x299 == 0)
m.c294 = Constraint(expr= m.x54 - m.x297 - m.x300 == 0)
m.c295 = Constraint(expr= m.x55 - m.x298 - m.x301 == 0)
m.c296 = Constraint(expr= m.x71 - m.x344 - m.x347 == 0)
m.c297 = Constraint(expr= m.x72 - m.x345 - m.x348 == 0)
m.c298 = Constraint(expr= m.x73 - m.x346 - m.x349 == 0)
m.c299 = Constraint(expr= m.x296 - 15*m.b620 <= 0)
m.c300 = Constraint(expr= m.x297 - 15*m.b621 <= 0)
m.c301 = Constraint(expr= m.x298 - 15*m.b622 <= 0)
m.c302 = Constraint(expr= m.x299 + 15*m.b620 <= 15)
m.c303 = Constraint(expr= m.x300 + 15*m.b621 <= 15)
m.c304 = Constraint(expr= m.x301 + 15*m.b622 <= 15)
m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <= 0)
m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <= 0)
m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <= 0)
m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5)
m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5)
m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5)
m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350 == 0)
m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351 == 0)
m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352 == 0)
m.c314 = Constraint(expr= m.x305 == 0)
m.c315 = Constraint(expr= m.x306 == 0)
m.c316 = Constraint(expr= m.x307 == 0)
m.c317 = Constraint(expr= m.x353 == 0)
m.c318 = Constraint(expr= m.x354 == 0)
m.c319 = Constraint(expr= m.x355 == 0)
m.c320 = Constraint(expr= m.x56 - m.x302 - m.x305 == 0)
m.c321 = Constraint(expr= m.x57 - m.x303 - m.x306 == 0)
m.c322 = Constraint(expr= m.x58 - m.x304 - m.x307 == 0)
m.c323 = Constraint(expr= m.x74 - m.x350 - m.x353 == 0)
m.c324 = Constraint(expr= m.x75 - m.x351 - m.x354 == 0)
m.c325 = Constraint(expr= m.x76 - m.x352 - m.x355 == 0)
m.c326 = Constraint(expr= m.x302 - 15*m.b623 <= 0)
m.c327 = Constraint(expr= m.x303 - 15*m.b624 <= 0)
m.c328 = Constraint(expr= m.x304 - 15*m.b625 <= 0)
m.c329 = Constraint(expr= m.x305 + 15*m.b623 <= 15)
m.c330 = Constraint(expr= m.x306 + 15*m.b624 <= 15)
m.c331 = Constraint(expr= m.x307 + 15*m.b625 <= 15)
m.c332 = Constraint(expr= m.x350 - 9*m.b623 <= 0)
m.c333 = Constraint(expr= m.x351 - 9*m.b624 <= 0)
m.c334 = Constraint(expr= m.x352 - 9*m.b625 <= 0)
m.c335 = Constraint(expr= m.x353 + 9*m.b623 <= 9)
m.c336 = Constraint(expr= m.x354 + 9*m.b624 <= 9)
m.c337 = Constraint(expr= m.x355 + 9*m.b625 <= 9)
m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999*
m.b626) <= 0)
m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999*
m.b627) <= 0)
m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999*
m.b628) <= 0)
m.c341 = Constraint(expr= m.x311 == 0)
m.c342 = Constraint(expr= m.x312 == 0)
m.c343 = Constraint(expr= m.x313 == 0)
m.c344 = Constraint(expr= m.x359 == 0)
m.c345 = Constraint(expr= m.x360 == 0)
m.c346 = Constraint(expr= m.x361 == 0)
m.c347 = Constraint(expr= m.x59 - m.x308 - m.x311 == 0)
m.c348 = Constraint(expr= m.x60 - m.x309 - m.x312 == 0)
m.c349 = Constraint(expr= m.x61 - m.x310 - m.x313 == 0)
m.c350 = Constraint(expr= m.x77 - m.x356 - m.x359 == 0)
m.c351 = Constraint(expr= m.x78 - m.x357 - m.x360 == 0)
m.c352 = Constraint(expr= m.x79 - m.x358 - m.x361 == 0)
m.c353 = Constraint(expr= m.x308 - 15*m.b626 <= 0)
m.c354 = Constraint(expr= m.x309 - 15*m.b627 <= 0)
m.c355 = Constraint(expr= m.x310 - 15*m.b628 <= 0)
m.c356 = Constraint(expr= m.x311 + 15*m.b626 <= 15)
m.c357 = Constraint(expr= m.x312 + 15*m.b627 <= 15)
m.c358 = Constraint(expr= m.x313 + 15*m.b628 <= 15)
m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0)
m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0)
m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0)
m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376)
m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376)
m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376)
m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416 == 0)
m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417 == 0)
m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418 == 0)
m.c368 = Constraint(expr= - m.x374 + m.x416 == 0)
m.c369 = Constraint(expr= - m.x375 + m.x417 == 0)
m.c370 = Constraint(expr= - m.x376 + m.x418 == 0)
m.c371 = Constraint(expr= m.x323 == 0)
m.c372 = Constraint(expr= m.x324 == 0)
m.c373 = Constraint(expr= m.x325 == 0)
m.c374 = Constraint(expr= m.x377 == 0)
m.c375 = Constraint(expr= m.x378 == 0)
m.c376 = Constraint(expr= m.x379 == 0)
m.c377 = Constraint(expr= m.x419 == 0)
m.c378 = Constraint(expr= m.x420 == 0)
m.c379 = Constraint(expr= m.x421 == 0)
m.c380 = Constraint(expr= m.x62 - m.x317 - m.x323 == 0)
m.c381 = Constraint(expr= m.x63 - m.x318 - m.x324 == 0)
m.c382 = Constraint(expr= m.x64 - m.x319 - m.x325 == 0)
m.c383 = Constraint(expr= m.x86 - m.x374 - m.x377 == 0)
m.c384 = Constraint(expr= m.x87 - m.x375 - m.x378 == 0)
m.c385 = Constraint(expr= m.x88 - m.x376 - m.x379 == 0)
m.c386 = Constraint(expr= m.x110 - m.x416 - m.x419 == 0)
m.c387 = Constraint(expr= m.x111 - m.x417 - m.x420 == 0)
m.c388 = Constraint(expr= m.x112 - m.x418 - m.x421 == 0)
m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0)
m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0)
m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0)
m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539)
m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539)
m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539)
m.c395 = Constraint(expr= m.x374 - 20*m.b629 <= 0)
m.c396 = Constraint(expr= m.x375 - 20*m.b630 <= 0)
m.c397 = Constraint(expr= m.x376 - 20*m.b631 <= 0)
m.c398 = Constraint(expr= m.x377 + 20*m.b629 <= 20)
m.c399 = Constraint(expr= m.x378 + 20*m.b630 <= 20)
m.c400 = Constraint(expr= m.x379 + 20*m.b631 <= 20)
m.c401 = Constraint(expr= m.x416 - 20*m.b629 <= 0)
m.c402 = Constraint(expr= m.x417 - 20*m.b630 <= 0)
m.c403 = Constraint(expr= m.x418 - 20*m.b631 <= 0)
m.c404 = Constraint(expr= m.x419 + 20*m.b629 <= 20)
m.c405 = Constraint(expr= m.x420 + 20*m.b630 <= 20)
m.c406 = Constraint(expr= m.x421 + 20*m.b631 <= 20)
m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632)
<= 0)
m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633)
<= 0)
m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634)
<= 0)
m.c410 = Constraint(expr= m.x335 == 0)
m.c411 = Constraint(expr= m.x336 == 0)
m.c412 = Constraint(expr= m.x337 == 0)
m.c413 = Constraint(expr= m.x425 == 0)
m.c414 = Constraint(expr= m.x426 == 0)
m.c415 = Constraint(expr= m.x427 == 0)
m.c416 = Constraint(expr= m.x65 - m.x329 - m.x335 == 0)
m.c417 = Constraint(expr= m.x66 - m.x330 - m.x336 == 0)
m.c418 = Constraint(expr= m.x67 - m.x331 - m.x337 == 0)
m.c419 = Constraint(expr= m.x113 - m.x422 - m.x425 == 0)
m.c420 = Constraint(expr= m.x114 - m.x423 - m.x426 == 0)
m.c421 = Constraint(expr= m.x115 - m.x424 - m.x427 == 0)
m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0)
m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0)
m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0)
m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348)
m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348)
m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348)
m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0)
m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0)
m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0)
m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186)
m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186)
m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186)
m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999*
m.b635) <= 0)
m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999*
m.b636) <= 0)
m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999*
m.b637) <= 0)
m.c437 = Constraint(expr= m.x365 == 0)
m.c438 = Constraint(expr= m.x366 == 0)
m.c439 = Constraint(expr= m.x367 == 0)
m.c440 = Constraint(expr= m.x431 == 0)
m.c441 = Constraint(expr= m.x432 == 0)
m.c442 = Constraint(expr= m.x433 == 0)
m.c443 = Constraint(expr= m.x80 - m.x362 - m.x365 == 0)
m.c444 = Constraint(expr= m.x81 - m.x363 - m.x366 == 0)
m.c445 = Constraint(expr= m.x82 - m.x364 - m.x367 == 0)
m.c446 = Constraint(expr= m.x116 - m.x428 - m.x431 == 0)
m.c447 = Constraint(expr= m.x117 - m.x429 - m.x432 == 0)
m.c448 = Constraint(expr= m.x118 - m.x430 - m.x433 == 0)
m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0)
m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0)
m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0)
m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553)
m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553)
m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553)
m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0)
m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0)
m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0)
m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686)
m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686)
m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686)
m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*
m.b638) <= 0)
m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*
m.b639) <= 0)
m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*
m.b640) <= 0)
m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*
m.b638) <= 0)
m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*
m.b639) <= 0)
m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*
m.b640) <= 0)
m.c467 = Constraint(expr= m.x371 == 0)
m.c468 = Constraint(expr= m.x372 == 0)
m.c469 = Constraint(expr= m.x373 == 0)
m.c470 = Constraint(expr= m.x383 == 0)
m.c471 = Constraint(expr= m.x384 == 0)
m.c472 = Constraint(expr= m.x385 == 0)
m.c473 = Constraint(expr= m.x437 == 0)
m.c474 = Constraint(expr= m.x438 == 0)
m.c475 = Constraint(expr= m.x439 == 0)
m.c476 = Constraint(expr= m.x83 - m.x368 - m.x371 == 0)
m.c477 = Constraint(expr= m.x84 - m.x369 - m.x372 == 0)
m.c478 = Constraint(expr= m.x85 - m.x370 - m.x373 == 0)
m.c479 = Constraint(expr= m.x92 - m.x380 - m.x383 == 0)
m.c480 = Constraint(expr= m.x93 - m.x381 - m.x384 == 0)
m.c481 = Constraint(expr= m.x94 - m.x382 - m.x385 == 0)
m.c482 = Constraint(expr= m.x119 - m.x434 - m.x437 == 0)
m.c483 = Constraint(expr= m.x120 - m.x435 - m.x438 == 0)
m.c484 = Constraint(expr= m.x121 - m.x436 - m.x439 == 0)
m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0)
m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0)
m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0)
m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553)
m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553)
m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553)
m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <= 0)
m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <= 0)
m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <= 0)
m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5)
m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5)
m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5)
m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0)
m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0)
m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0)
m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425)
m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425)
m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425)
m.c503 = Constraint(expr= - m.x386 + m.x440 == 0)
m.c504 = Constraint(expr= - m.x387 + m.x441 == 0)
m.c505 = Constraint(expr= - m.x388 + m.x442 == 0)
m.c506 = Constraint(expr= m.x389 == 0)
m.c507 = Constraint(expr= m.x390 == 0)
m.c508 = Constraint(expr= m.x391 == 0)
m.c509 = Constraint(expr= m.x443 == 0)
m.c510 = Constraint(expr= m.x444 == 0)
m.c511 = Constraint(expr= m.x445 == 0)
m.c512 = Constraint(expr= m.x95 - m.x386 - m.x389 == 0)
m.c513 = Constraint(expr= m.x96 - m.x387 - m.x390 == 0)
m.c514 = Constraint(expr= m.x97 - m.x388 - m.x391 == 0)
m.c515 = Constraint(expr= m.x122 - m.x440 - m.x443 == 0)
m.c516 = Constraint(expr= m.x123 - m.x441 - m.x444 == 0)
m.c517 = Constraint(expr= m.x124 - m.x442 - m.x445 == 0)
m.c518 = Constraint(expr= m.x386 - 9*m.b641 <= 0)
m.c519 = Constraint(expr= m.x387 - 9*m.b642 <= 0)
m.c520 = Constraint(expr= m.x388 - 9*m.b643 <= 0)
m.c521 = Constraint(expr= m.x389 + 9*m.b641 <= 9)
m.c522 = Constraint(expr= m.x390 + 9*m.b642 <= 9)
m.c523 = Constraint(expr= m.x391 + 9*m.b643 <= 9)
m.c524 = Constraint(expr= m.x440 - 9*m.b641 <= 0)
m.c525 = Constraint(expr= m.x441 - 9*m.b642 <= 0)
m.c526 = Constraint(expr= m.x442 - 9*m.b643 <= 0)
m.c527 = Constraint(expr= m.x443 + 9*m.b641 <= 9)
m.c528 = Constraint(expr= m.x444 + 9*m.b642 <= 9)
m.c529 = Constraint(expr= m.x445 + 9*m.b643 <= 9)
m.c530 = Constraint(expr= - m.x392 + m.x446 == 0)
m.c531 = Constraint(expr= - m.x393 + m.x447 == 0)
m.c532 = Constraint(expr= - m.x394 + m.x448 == 0)
m.c533 = Constraint(expr= m.x395 == 0)
m.c534 = Constraint(expr= m.x396 == 0)
m.c535 = Constraint(expr= m.x397 == 0)
m.c536 = Constraint(expr= m.x449 == 0)
m.c537 = Constraint(expr= m.x450 == 0)
m.c538 = Constraint(expr= m.x451 == 0)
m.c539 = Constraint(expr= m.x98 - m.x392 - m.x395 == 0)
m.c540 = Constraint(expr= m.x99 - m.x393 - m.x396 == 0)
m.c541 = Constraint(expr= m.x100 - m.x394 - m.x397 == 0)
m.c542 = Constraint(expr= m.x125 - m.x446 - m.x449 == 0)
m.c543 = Constraint(expr= m.x126 - m.x447 - m.x450 == 0)
m.c544 = Constraint(expr= m.x127 - m.x448 - m.x451 == 0)
m.c545 = Constraint(expr= m.x392 - 9*m.b644 <= 0)
m.c546 = Constraint(expr= m.x393 - 9*m.b645 <= 0)
m.c547 = Constraint(expr= m.x394 - 9*m.b646 <= 0)
m.c548 = Constraint(expr= m.x395 + 9*m.b644 <= 9)
m.c549 = Constraint(expr= m.x396 + 9*m.b645 <= 9)
m.c550 = Constraint(expr= m.x397 + 9*m.b646 <= 9)
m.c551 = Constraint(expr= m.x446 - 9*m.b644 <= 0)
m.c552 = Constraint(expr= m.x447 - 9*m.b645 <= 0)
m.c553 = Constraint(expr= m.x448 - 9*m.b646 <= 0)
m.c554 = Constraint(expr= m.x449 + 9*m.b644 <= 9)
m.c555 = Constraint(expr= m.x450 + 9*m.b645 <= 9)
m.c556 = Constraint(expr= m.x451 + 9*m.b646 <= 9)
m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999*
m.b647) <= 0)
m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999*
m.b648) <= 0)
m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999*
m.b649) <= 0)
m.c560 = Constraint(expr= m.x401 == 0)
m.c561 = Constraint(expr= m.x402 == 0)
m.c562 = Constraint(expr= m.x403 == 0)
m.c563 = Constraint(expr= m.x455 == 0)
m.c564 = Constraint(expr= m.x456 == 0)
m.c565 = Constraint(expr= m.x457 == 0)
m.c566 = Constraint(expr= m.x101 - m.x398 - m.x401 == 0)
m.c567 = Constraint(expr= m.x102 - m.x399 - m.x402 == 0)
m.c568 = Constraint(expr= m.x103 - m.x400 - m.x403 == 0)
m.c569 = Constraint(expr= m.x128 - m.x452 - m.x455 == 0)
m.c570 = Constraint(expr= m.x129 - m.x453 - m.x456 == 0)
m.c571 = Constraint(expr= m.x130 - m.x454 - m.x457 == 0)
m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0)
m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0)
m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0)
m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376)
m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376)
m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376)
m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0)
m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0)
m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0)
m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034)
m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034)
m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034)
m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999*
m.b650) <= 0)
m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999*
m.b651) <= 0)
m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999*
m.b652) <= 0)
m.c587 = Constraint(expr= m.x407 == 0)
m.c588 = Constraint(expr= m.x408 == 0)
m.c589 = Constraint(expr= m.x409 == 0)
m.c590 = Constraint(expr= m.x461 == 0)
m.c591 = Constraint(expr= m.x462 == 0)
m.c592 = Constraint(expr= m.x463 == 0)
m.c593 = Constraint(expr= m.x104 - m.x404 - m.x407 == 0)
m.c594 = Constraint(expr= m.x105 - m.x405 - m.x408 == 0)
m.c595 = Constraint(expr= m.x106 - m.x406 - m.x409 == 0)
m.c596 = Constraint(expr= m.x131 - m.x458 - m.x461 == 0)
m.c597 = Constraint(expr= m.x132 - m.x459 - m.x462 == 0)
m.c598 = Constraint(expr= m.x133 - m.x460 - m.x463 == 0)
m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0)
m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0)
m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0)
m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376)
m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376)
m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376)
m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0)
m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0)
m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0)
m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103)
m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103)
m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103)
m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999*
m.b653) <= 0)
m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999*
m.b654) <= 0)
m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999*
m.b655) <= 0)
m.c614 = Constraint(expr= m.x413 == 0)
m.c615 = Constraint(expr= m.x414 == 0)
m.c616 = Constraint(expr= m.x415 == 0)
m.c617 = Constraint(expr= m.x467 == 0)
m.c618 = Constraint(expr= m.x468 == 0)
m.c619 = Constraint(expr= m.x469 == 0)
m.c620 = Constraint(expr= m.x107 - m.x410 - m.x413 == 0)
m.c621 = Constraint(expr= m.x108 - m.x411 - m.x414 == 0)
m.c622 = Constraint(expr= m.x109 - m.x412 - m.x415 == 0)
m.c623 = Constraint(expr= m.x134 - m.x464 - m.x467 == 0)
m.c624 = Constraint(expr= m.x135 - m.x465 - m.x468 == 0)
m.c625 = Constraint(expr= m.x136 - m.x466 - m.x469 == 0)
m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0)
m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0)
m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0)
m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376)
m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376)
m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376)
m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0)
m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0)
m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0)
m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171)
m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171)
m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171)
m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656)
<= 0)
m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657)
<= 0)
m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658)
<= 0)
m.c641 = Constraint(expr= m.x473 == 0)
m.c642 = Constraint(expr= m.x474 == 0)
m.c643 = Constraint(expr= m.x475 == 0)
m.c644 = Constraint(expr= m.x485 == 0)
m.c645 = Constraint(expr= m.x486 == 0)
m.c646 = Constraint(expr= m.x487 == 0)
m.c647 = Constraint(expr= m.x140 - m.x470 - m.x473 == 0)
m.c648 = Constraint(expr= m.x141 - m.x471 - m.x474 == 0)
m.c649 = Constraint(expr= m.x142 - m.x472 - m.x475 == 0)
m.c650 = Constraint(expr= m.x146 - m.x482 - m.x485 == 0)
m.c651 = Constraint(expr= m.x147 - m.x483 - m.x486 == 0)
m.c652 = Constraint(expr= m.x148 - m.x484 - m.x487 == 0)
m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0)
m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0)
m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0)
m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171)
m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171)
m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171)
m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0)
m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0)
m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0)
m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327)
m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327)
m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327)
m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999*
m.b659) <= 0)
m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999*
m.b660) <= 0)
m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999*
m.b661) <= 0)
m.c668 = Constraint(expr= m.x479 == 0)
m.c669 = Constraint(expr= m.x480 == 0)
m.c670 = Constraint(expr= m.x481 == 0)
m.c671 = Constraint(expr= m.x491 == 0)
m.c672 = Constraint(expr= m.x492 == 0)
m.c673 = Constraint(expr= m.x493 == 0)
m.c674 = Constraint(expr= m.x143 - m.x476 - m.x479 == 0)
m.c675 = Constraint(expr= m.x144 - m.x477 - m.x480 == 0)
m.c676 = Constraint(expr= m.x145 - m.x478 - m.x481 == 0)
m.c677 = Constraint(expr= m.x149 - m.x488 - m.x491 == 0)
m.c678 = Constraint(expr= m.x150 - m.x489 - m.x492 == 0)
m.c679 = Constraint(expr= m.x151 - m.x490 - m.x493 == 0)
m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0)
m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0)
m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0)
m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171)
m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171)
m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171)
m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0)
m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0)
m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0)
m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924)
m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924)
m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924)
m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518 == 0)
m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519 == 0)
m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520 == 0)
m.c695 = Constraint(expr= m.x497 == 0)
m.c696 = Constraint(expr= m.x498 == 0)
m.c697 = Constraint(expr= m.x499 == 0)
m.c698 = Constraint(expr= m.x521 == 0)
m.c699 = Constraint(expr= m.x522 == 0)
m.c700 = Constraint(expr= m.x523 == 0)
m.c701 = Constraint(expr= m.x161 - m.x494 - m.x497 == 0)
m.c702 = Constraint(expr= m.x162 - m.x495 - m.x498 == 0)
m.c703 = Constraint(expr= m.x163 - m.x496 - m.x499 == 0)
m.c704 = Constraint(expr= m.x173 - m.x518 - m.x521 == 0)
m.c705 = Constraint(expr= m.x174 - m.x519 - m.x522 == 0)
m.c706 = Constraint(expr= m.x175 - m.x520 - m.x523 == 0)
m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0)
m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0)
m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0)
m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924)
m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924)
m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924)
m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0)
m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0)
m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0)
m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943)
m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943)
m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943)
m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999*
m.b665) <= 0)
m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999*
m.b666) <= 0)
m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999*
m.b667) <= 0)
m.c722 = Constraint(expr= m.x503 == 0)
m.c723 = Constraint(expr= m.x504 == 0)
m.c724 = Constraint(expr= m.x505 == 0)
m.c725 = Constraint(expr= m.x530 == 0)
m.c726 = Constraint(expr= m.x531 == 0)
m.c727 = Constraint(expr= m.x532 == 0)
m.c728 = Constraint(expr= m.x164 - m.x500 - m.x503 == 0)
m.c729 = Constraint(expr= m.x165 - m.x501 - m.x504 == 0)
m.c730 = Constraint(expr= m.x166 - m.x502 - m.x505 == 0)
m.c731 = Constraint(expr= m.x176 - m.x524 - m.x530 == 0)
m.c732 = Constraint(expr= m.x177 - m.x525 - m.x531 == 0)
m.c733 = Constraint(expr= m.x178 - m.x526 - m.x532 == 0)
m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0)
m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0)
m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0)
m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924)
m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924)
m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924)
m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0)
m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0)
m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0)
m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506)
m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506)
m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506)
m.c746 = Constraint(expr= - m.x506 + m.x536 == 0)
m.c747 = Constraint(expr= - m.x507 + m.x537 == 0)
m.c748 = Constraint(expr= - m.x508 + m.x538 == 0)
m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536 == 0)
m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537 == 0)
m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538 == 0)
m.c752 = Constraint(expr= m.x509 == 0)
m.c753 = Constraint(expr= m.x510 == 0)
m.c754 = Constraint(expr= m.x511 == 0)
m.c755 = Constraint(expr= m.x515 == 0)
m.c756 = Constraint(expr= m.x516 == 0)
m.c757 = Constraint(expr= m.x517 == 0)
m.c758 = Constraint(expr= m.x539 == 0)
m.c759 = Constraint(expr= m.x540 == 0)
m.c760 = Constraint(expr= m.x541 == 0)
m.c761 = Constraint(expr= m.x167 - m.x506 - m.x509 == 0)
m.c762 = Constraint(expr= m.x168 - m.x507 - m.x510 == 0)
m.c763 = Constraint(expr= m.x169 - m.x508 - m.x511 == 0)
m.c764 = Constraint(expr= m.x170 - m.x512 - m.x515 == 0)
m.c765 = Constraint(expr= m.x171 - m.x513 - m.x516 == 0)
m.c766 = Constraint(expr= m.x172 - m.x514 - m.x517 == 0)
m.c767 = Constraint(expr= m.x179 - m.x536 - m.x539 == 0)
m.c768 = Constraint(expr= m.x180 - m.x537 - m.x540 == 0)
m.c769 = Constraint(expr= m.x181 - m.x538 - m.x541 == 0)
m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0)
m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0)
m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0)
m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924)
m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924)
m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924)
m.c776 = Constraint(expr= m.x512 - 30*m.b668 <= 0)
m.c777 = Constraint(expr= m.x513 - 30*m.b669 <= 0)
m.c778 = Constraint(expr= m.x514 - 30*m.b670 <= 0)
m.c779 = Constraint(expr= m.x515 + 30*m.b668 <= 30)
m.c780 = Constraint(expr= m.x516 + 30*m.b669 <= 30)
m.c781 = Constraint(expr= m.x517 + 30*m.b670 <= 30)
m.c782 = Constraint(expr= m.x536 - 15*m.b668 <= 0)
m.c783 = Constraint(expr= m.x537 - 15*m.b669 <= 0)
m.c784 = Constraint(expr= m.x538 - 15*m.b670 <= 0)
m.c785 = Constraint(expr= m.x539 + 15*m.b668 <= 15)
m.c786 = Constraint(expr= m.x540 + 15*m.b669 <= 15)
m.c787 = Constraint(expr= m.x541 + 15*m.b670 <= 15)
m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999*
m.b671) <= 0)
m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999*
m.b672) <= 0)
m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999*
m.b673) <= 0)
m.c791 = Constraint(expr= m.x545 == 0)
m.c792 = Constraint(expr= m.x546 == 0)
m.c793 = Constraint(expr= m.x547 == 0)
m.c794 = Constraint(expr= m.x569 == 0)
m.c795 = Constraint(expr= m.x570 == 0)
m.c796 = Constraint(expr= m.x571 == 0)
m.c797 = Constraint(expr= m.x182 - m.x542 - m.x545 == 0)
m.c798 = Constraint(expr= m.x183 - m.x543 - m.x546 == 0)
m.c799 = Constraint(expr= m.x184 - m.x544 - m.x547 == 0)
m.c800 = Constraint(expr= m.x197 - m.x566 - m.x569 == 0)
m.c801 = Constraint(expr= m.x198 - m.x567 - m.x570 == 0)
m.c802 = Constraint(expr= m.x199 - m.x568 - m.x571 == 0)
m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0)
m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0)
m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0)
m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943)
m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943)
m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943)
m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0)
m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0)
m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0)
m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719)
m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719)
m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719)
m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999*
m.b674) <= 0)
m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999*
m.b675) <= 0)
m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999*
m.b676) <= 0)
m.c818 = Constraint(expr= m.x551 == 0)
m.c819 = Constraint(expr= m.x552 == 0)
m.c820 = Constraint(expr= m.x553 == 0)
m.c821 = Constraint(expr= m.x575 == 0)
m.c822 = Constraint(expr= m.x576 == 0)
m.c823 = Constraint(expr= m.x577 == 0)
m.c824 = Constraint(expr= m.x185 - m.x548 - m.x551 == 0)
m.c825 = Constraint(expr= m.x186 - m.x549 - m.x552 == 0)
m.c826 = Constraint(expr= m.x187 - m.x550 - m.x553 == 0)
m.c827 = Constraint(expr= m.x200 - m.x572 - m.x575 == 0)
m.c828 = Constraint(expr= m.x201 - m.x573 - m.x576 == 0)
m.c829 = Constraint(expr= m.x202 - m.x574 - m.x577 == 0)
m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0)
m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0)
m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0)
m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943)
m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943)
m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943)
m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0)
m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0)
m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0)
m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917)
m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917)
m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917)
m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677)
<= 0)
m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678)
<= 0)
m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679)
<= 0)
m.c845 = Constraint(expr= m.x533 == 0)
m.c846 = Constraint(expr= m.x534 == 0)
m.c847 = Constraint(expr= m.x535 == 0)
m.c848 = Constraint(expr= m.x581 == 0)
m.c849 = Constraint(expr= m.x582 == 0)
m.c850 = Constraint(expr= m.x583 == 0)
m.c851 = Constraint(expr= m.x176 - m.x527 - m.x533 == 0)
m.c852 = Constraint(expr= m.x177 - m.x528 - m.x534 == 0)
m.c853 = Constraint(expr= m.x178 - m.x529 - m.x535 == 0)
m.c854 = Constraint(expr= m.x203 - m.x578 - m.x581 == 0)
m.c855 = Constraint(expr= m.x204 - m.x579 - m.x582 == 0)
m.c856 = Constraint(expr= m.x205 - m.x580 - m.x583 == 0)
m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0)
m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0)
m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0)
m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506)
m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506)
m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506)
m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0)
m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0)
m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0)
m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672)
m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672)
m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672)
m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584 == 0)
m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585 == 0)
m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586 == 0)
m.c872 = Constraint(expr= m.x557 == 0)
m.c873 = Constraint(expr= m.x558 == 0)
m.c874 = Constraint(expr= m.x559 == 0)
m.c875 = Constraint(expr= m.x587 == 0)
m.c876 = Constraint(expr= m.x588 == 0)
m.c877 = Constraint(expr= m.x589 == 0)
m.c878 = Constraint(expr= m.x188 - m.x554 - m.x557 == 0)
m.c879 = Constraint(expr= m.x189 - m.x555 - m.x558 == 0)
m.c880 = Constraint(expr= m.x190 - m.x556 - m.x559 == 0)
m.c881 = Constraint(expr= m.x206 - m.x584 - m.x587 == 0)
m.c882 = Constraint(expr= m.x207 - m.x585 - m.x588 == 0)
m.c883 = Constraint(expr= m.x208 - m.x586 - m.x589 == 0)
m.c884 = Constraint(expr= m.x554 - 15*m.b680 <= 0)
m.c885 = Constraint(expr= m.x555 - 15*m.b681 <= 0)
m.c886 = Constraint(expr= m.x556 - 15*m.b682 <= 0)
m.c887 = Constraint(expr= m.x557 + 15*m.b680 <= 15)
m.c888 = Constraint(expr= m.x558 + 15*m.b681 <= 15)
m.c889 = Constraint(expr= m.x559 + 15*m.b682 <= 15)
m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <= 0)
m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <= 0)
m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <= 0)
m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5)
m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5)
m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5)
m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590 == 0)
m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591 == 0)
m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592 == 0)
m.c899 = Constraint(expr= m.x563 == 0)
m.c900 = Constraint(expr= m.x564 == 0)
m.c901 = Constraint(expr= m.x565 == 0)
m.c902 = Constraint(expr= m.x593 == 0)
m.c903 = Constraint(expr= m.x594 == 0)
m.c904 = Constraint(expr= m.x595 == 0)
m.c905 = Constraint(expr= m.x191 - m.x560 - m.x563 == 0)
m.c906 = Constraint(expr= m.x192 - m.x561 - m.x564 == 0)
m.c907 = Constraint(expr= m.x193 - m.x562 - m.x565 == 0)
m.c908 = Constraint(expr= m.x209 - m.x590 - m.x593 == 0)
m.c909 = Constraint(expr= m.x210 - m.x591 - m.x594 == 0)
m.c910 = Constraint(expr= m.x211 - m.x592 - m.x595 == 0)
m.c911 = Constraint(expr= m.x560 - 15*m.b683 <= 0)
m.c912 = Constraint(expr= m.x561 - 15*m.b684 <= 0)
m.c913 = Constraint(expr= m.x562 - 15*m.b685 <= 0)
m.c914 = Constraint(expr= m.x563 + 15*m.b683 <= 15)
m.c915 = Constraint(expr= m.x564 + 15*m.b684 <= 15)
m.c916 = Constraint(expr= m.x565 + 15*m.b685 <= 15)
m.c917 = Constraint(expr= m.x590 - 9*m.b683 <= 0)
m.c918 = Constraint(expr= m.x591 - 9*m.b684 <= 0)
m.c919 = Constraint(expr= m.x592 - 9*m.b685 <= 0)
m.c920 = Constraint(expr= m.x593 + 9*m.b683 <= 9)
m.c921 = Constraint(expr= m.x594 + 9*m.b684 <= 9)
m.c922 = Constraint(expr= m.x595 + 9*m.b685 <= 9)
m.c923 = Constraint(expr= 5*m.b686 + m.x776 == 0)
m.c924 = Constraint(expr= 4*m.b687 + m.x777 == 0)
m.c925 = Constraint(expr= 6*m.b688 + m.x778 == 0)
m.c926 = Constraint(expr= 8*m.b689 + m.x779 == 0)
m.c927 = Constraint(expr= 7*m.b690 + m.x780 == 0)
m.c928 = Constraint(expr= 6*m.b691 + m.x781 == 0)
m.c929 = Constraint(expr= 6*m.b692 + m.x782 == 0)
m.c930 = Constraint(expr= 9*m.b693 + m.x783 == 0)
m.c931 = Constraint(expr= 4*m.b694 + m.x784 == 0)
m.c932 = Constraint(expr= 10*m.b695 + m.x785 == 0)
m.c933 = Constraint(expr= 9*m.b696 + m.x786 == 0)
m.c934 = Constraint(expr= 5*m.b697 + m.x787 == 0)
m.c935 = Constraint(expr= 6*m.b698 + m.x788 == 0)
m.c936 = Constraint(expr= 10*m.b699 + m.x789 == 0)
m.c937 = Constraint(expr= 6*m.b700 + m.x790 == 0)
m.c938 = Constraint(expr= 7*m.b701 + m.x791 == 0)
m.c939 = Constraint(expr= 7*m.b702 + m.x792 == 0)
m.c940 = Constraint(expr= 4*m.b703 + m.x793 == 0)
m.c941 = Constraint(expr= 4*m.b704 + m.x794 == 0)
m.c942 = Constraint(expr= 3*m.b705 + m.x795 == 0)
m.c943 = Constraint(expr= 2*m.b706 + m.x796 == 0)
m.c944 = Constraint(expr= 5*m.b707 + m.x797 == 0)
m.c945 = Constraint(expr= 6*m.b708 + m.x798 == 0)
m.c946 = Constraint(expr= 7*m.b709 + m.x799 == 0)
m.c947 = Constraint(expr= 2*m.b710 + m.x800 == 0)
m.c948 = Constraint(expr= 5*m.b711 + m.x801 == 0)
m.c949 = Constraint(expr= 2*m.b712 + m.x802 == 0)
m.c950 = Constraint(expr= 4*m.b713 + m.x803 == 0)
m.c951 = Constraint(expr= 7*m.b714 + m.x804 == 0)
m.c952 = Constraint(expr= 4*m.b715 + m.x805 == 0)
m.c953 = Constraint(expr= 3*m.b716 + m.x806 == 0)
m.c954 = Constraint(expr= 9*m.b717 + m.x807 == 0)
m.c955 = Constraint(expr= 3*m.b718 + m.x808 == 0)
m.c956 = Constraint(expr= 7*m.b719 + m.x809 == 0)
m.c957 = Constraint(expr= 2*m.b720 + m.x810 == 0)
m.c958 = Constraint(expr= 9*m.b721 + m.x811 == 0)
m.c959 = Constraint(expr= 3*m.b722 + m.x812 == 0)
m.c960 = Constraint(expr= m.b723 + m.x813 == 0)
m.c961 = Constraint(expr= 9*m.b724 + m.x814 == 0)
m.c962 = Constraint(expr= 2*m.b725 + m.x815 == 0)
m.c963 = Constraint(expr= 6*m.b726 + m.x816 == 0)
m.c964 = Constraint(expr= 3*m.b727 + m.x817 == 0)
m.c965 = Constraint(expr= 4*m.b728 + m.x818 == 0)
m.c966 = Constraint(expr= 8*m.b729 + m.x819 == 0)
m.c967 = Constraint(expr= m.b730 + m.x820 == 0)
m.c968 = Constraint(expr= 2*m.b731 + m.x821 == 0)
m.c969 = Constraint(expr= 5*m.b732 + m.x822 == 0)
m.c970 = Constraint(expr= 2*m.b733 + m.x823 == 0)
m.c971 = Constraint(expr= 3*m.b734 + m.x824 == 0)
m.c972 = Constraint(expr= 4*m.b735 + m.x825 == 0)
m.c973 = Constraint(expr= 3*m.b736 + m.x826 == 0)
m.c974 = Constraint(expr= 5*m.b737 + m.x827 == 0)
m.c975 = Constraint(expr= 7*m.b738 + m.x828 == 0)
m.c976 = Constraint(expr= 6*m.b739 + m.x829 == 0)
m.c977 = Constraint(expr= 2*m.b740 + m.x830 == 0)
m.c978 = Constraint(expr= 8*m.b741 + m.x831 == 0)
m.c979 = Constraint(expr= 4*m.b742 + m.x832 == 0)
m.c980 = Constraint(expr= m.b743 + m.x833 == 0)
m.c981 = Constraint(expr= 4*m.b744 + m.x834 == 0)
m.c982 = Constraint(expr= m.b745 + m.x835 == 0)
m.c983 = Constraint(expr= 2*m.b746 + m.x836 == 0)
m.c984 = Constraint(expr= 5*m.b747 + m.x837 == 0)
m.c985 = Constraint(expr= 2*m.b748 + m.x838 == 0)
m.c986 = Constraint(expr= 9*m.b749 + m.x839 == 0)
m.c987 = Constraint(expr= 2*m.b750 + m.x840 == 0)
m.c988 = Constraint(expr= 9*m.b751 + m.x841 == 0)
m.c989 = Constraint(expr= 5*m.b752 + m.x842 == 0)
m.c990 = Constraint(expr= 8*m.b753 + m.x843 == 0)
m.c991 = Constraint(expr= 4*m.b754 + m.x844 == 0)
m.c992 = Constraint(expr= 2*m.b755 + m.x845 == 0)
m.c993 = Constraint(expr= 3*m.b756 + m.x846 == 0)
m.c994 = Constraint(expr= 8*m.b757 + m.x847 == 0)
m.c995 = Constraint(expr= 10*m.b758 + m.x848 == 0)
m.c996 = Constraint(expr= 6*m.b759 + m.x849 == 0)
m.c997 = Constraint(expr= 3*m.b760 + m.x850 == 0)
m.c998 = Constraint(expr= 4*m.b761 + m.x851 == 0)
m.c999 = Constraint(expr= 8*m.b762 + m.x852 == 0)
m.c1000 = Constraint(expr= 7*m.b763 + m.x853 == 0)
m.c1001 = Constraint(expr= 7*m.b764 + m.x854 == 0)
m.c1002 = Constraint(expr= 3*m.b765 + m.x855 == 0)
m.c1003 = Constraint(expr= 9*m.b766 + m.x856 == 0)
m.c1004 = Constraint(expr= 4*m.b767 + m.x857 == 0)
m.c1005 = Constraint(expr= 8*m.b768 + m.x858 == 0)
m.c1006 = Constraint(expr= 6*m.b769 + m.x859 == 0)
m.c1007 = Constraint(expr= 2*m.b770 + m.x860 == 0)
m.c1008 = Constraint(expr= m.b771 + m.x861 == 0)
m.c1009 = Constraint(expr= 3*m.b772 + m.x862 == 0)
m.c1010 = Constraint(expr= 8*m.b773 + m.x863 == 0)
m.c1011 = Constraint(expr= 3*m.b774 + m.x864 == 0)
m.c1012 = Constraint(expr= 4*m.b775 + m.x865 == 0)
m.c1013 = Constraint(expr= m.b596 - m.b597 <= 0)
m.c1014 = Constraint(expr= m.b596 - m.b598 <= 0)
m.c1015 = Constraint(expr= m.b597 - m.b598 <= 0)
m.c1016 = Constraint(expr= m.b599 - m.b600 <= 0)
m.c1017 = Constraint(expr= m.b599 - m.b601 <= 0)
m.c1018 = Constraint(expr= m.b600 - m.b601 <= 0)
m.c1019 = Constraint(expr= m.b602 - m.b603 <= 0)
m.c1020 = Constraint(expr= m.b602 - m.b604 <= 0)
m.c1021 = Constraint(expr= m.b603 - m.b604 <= 0)
m.c1022 = Constraint(expr= m.b605 - m.b606 <= 0)
m.c1023 = Constraint(expr= m.b605 - m.b607 <= 0)
m.c1024 = Constraint(expr= m.b606 - m.b607 <= 0)
m.c1025 = Constraint(expr= m.b608 - m.b609 <= 0)
m.c1026 = Constraint(expr= m.b608 - m.b610 <= 0)
m.c1027 = Constraint(expr= m.b609 - m.b610 <= 0)
m.c1028 = Constraint(expr= m.b611 - m.b612 <= 0)
m.c1029 = Constraint(expr= m.b611 - m.b613 <= 0)
m.c1030 = Constraint(expr= m.b612 - m.b613 <= 0)
m.c1031 = Constraint(expr= m.b614 - m.b615 <= 0)
m.c1032 = Constraint(expr= m.b614 - m.b616 <= 0)
m.c1033 = Constraint(expr= m.b615 - m.b616 <= 0)
m.c1034 = Constraint(expr= m.b617 - m.b618 <= 0)
m.c1035 = Constraint(expr= m.b617 - m.b619 <= 0)
m.c1036 = Constraint(expr= m.b618 - m.b619 <= 0)
m.c1037 = Constraint(expr= m.b620 - m.b621 <= 0)
m.c1038 = Constraint(expr= m.b620 - m.b622 <= 0)
m.c1039 = Constraint(expr= m.b621 - m.b622 <= 0)
m.c1040 = Constraint(expr= m.b623 - m.b624 <= 0)
m.c1041 = Constraint(expr= m.b623 - m.b625 <= 0)
m.c1042 = Constraint(expr= m.b624 - m.b625 <= 0)
m.c1043 = Constraint(expr= m.b626 - m.b627 <= 0)
m.c1044 = Constraint(expr= m.b626 - m.b628 <= 0)
m.c1045 = Constraint(expr= m.b627 - m.b628 <= 0)
m.c1046 = Constraint(expr= m.b629 - m.b630 <= 0)
m.c1047 = Constraint(expr= m.b629 - m.b631 <= 0)
m.c1048 = Constraint(expr= m.b630 - m.b631 <= 0)
m.c1049 = Constraint(expr= m.b632 - m.b633 <= 0)
m.c1050 = Constraint(expr= m.b632 - m.b634 <= 0)
m.c1051 = Constraint(expr= m.b633 - m.b634 <= 0)
m.c1052 = Constraint(expr= m.b635 - m.b636 <= 0)
m.c1053 = Constraint(expr= m.b635 - m.b637 <= 0)
m.c1054 = Constraint(expr= m.b636 - m.b637 <= 0)
m.c1055 = Constraint(expr= m.b638 - m.b639 <= 0)
m.c1056 = Constraint(expr= m.b638 - m.b640 <= 0)
m.c1057 = Constraint(expr= m.b639 - m.b640 <= 0)
m.c1058 = Constraint(expr= m.b641 - m.b642 <= 0)
m.c1059 = Constraint(expr= m.b641 - m.b643 <= 0)
m.c1060 = Constraint(expr= m.b642 - m.b643 <= 0)
m.c1061 = Constraint(expr= m.b644 - m.b645 <= 0)
m.c1062 = Constraint(expr= m.b644 - m.b646 <= 0)
m.c1063 = Constraint(expr= m.b645 - m.b646 <= 0)
m.c1064 = Constraint(expr= m.b647 - m.b648 <= 0)
m.c1065 = Constraint(expr= m.b647 - m.b649 <= 0)
m.c1066 = Constraint(expr= m.b648 - m.b649 <= 0)
m.c1067 = Constraint(expr= m.b650 - m.b651 <= 0)
m.c1068 = Constraint(expr= m.b650 - m.b652 <= 0)
m.c1069 = Constraint(expr= m.b651 - m.b652 <= 0)
m.c1070 = Constraint(expr= m.b653 - m.b654 <= 0)
m.c1071 = Constraint(expr= m.b653 - m.b655 <= 0)
m.c1072 = Constraint(expr= m.b654 - m.b655 <= 0)
m.c1073 = Constraint(expr= m.b656 - m.b657 <= 0)
m.c1074 = Constraint(expr= m.b656 - m.b658 <= 0)
m.c1075 = Constraint(expr= m.b657 - m.b658 <= 0)
m.c1076 = Constraint(expr= m.b659 - m.b660 <= 0)
m.c1077 = Constraint(expr= m.b659 - m.b661 <= 0)
m.c1078 = Constraint(expr= m.b660 - m.b661 <= 0)
m.c1079 = Constraint(expr= m.b662 - m.b663 <= 0)
m.c1080 = Constraint(expr= m.b662 - m.b664 <= 0)
m.c1081 = Constraint(expr= m.b663 - m.b664 <= 0)
m.c1082 = Constraint(expr= m.b665 - m.b666 <= 0)
m.c1083 = Constraint(expr= m.b665 - m.b667 <= 0)
m.c1084 = Constraint(expr= m.b666 - m.b667 <= 0)
m.c1085 = Constraint(expr= m.b668 - m.b669 <= 0)
m.c1086 = Constraint(expr= m.b668 - m.b670 <= 0)
m.c1087 = Constraint(expr= m.b669 - m.b670 <= 0)
m.c1088 = Constraint(expr= m.b671 - m.b672 <= 0)
m.c1089 = Constraint(expr= m.b671 - m.b673 <= 0)
m.c1090 = Constraint(expr= m.b672 - m.b673 <= 0)
m.c1091 = Constraint(expr= m.b674 - m.b675 <= 0)
m.c1092 = Constraint(expr= m.b674 - m.b676 <= 0)
m.c1093 = Constraint(expr= m.b675 - m.b676 <= 0)
m.c1094 = Constraint(expr= m.b677 - m.b678 <= 0)
m.c1095 = Constraint(expr= m.b677 - m.b679 <= 0)
m.c1096 = Constraint(expr= m.b678 - m.b679 <= 0)
m.c1097 = Constraint(expr= m.b680 - m.b681 <= 0)
m.c1098 = Constraint(expr= m.b680 - m.b682 <= 0)
m.c1099 = Constraint(expr= m.b681 - m.b682 <= 0)
m.c1100 = Constraint(expr= m.b683 - m.b684 <= 0)
m.c1101 = Constraint(expr= m.b683 - m.b685 <= 0)
m.c1102 = Constraint(expr= m.b684 - m.b685 <= 0)
m.c1103 = Constraint(expr= m.b686 + m.b687 <= 1)
m.c1104 = Constraint(expr= m.b686 + m.b688 <= 1)
m.c1105 = Constraint(expr= m.b686 + m.b687 <= 1)
m.c1106 = Constraint(expr= m.b687 + m.b688 <= 1)
m.c1107 = Constraint(expr= m.b686 + m.b688 <= 1)
m.c1108 = Constraint(expr= m.b687 + m.b688 <= 1)
m.c1109 = Constraint(expr= m.b689 + m.b690 <= 1)
m.c1110 = Constraint(expr= m.b689 + m.b691 <= 1)
m.c1111 = Constraint(expr= m.b689 + m.b690 <= 1)
m.c1112 = Constraint(expr= m.b690 + m.b691 <= 1)
m.c1113 = Constraint(expr= m.b689 + m.b691 <= 1)
m.c1114 = Constraint(expr= m.b690 + m.b691 <= 1)
m.c1115 = Constraint(expr= m.b692 + m.b693 <= 1)
m.c1116 = Constraint(expr= m.b692 + m.b694 <= 1)
m.c1117 = Constraint(expr= m.b692 + m.b693 <= 1)
m.c1118 = Constraint(expr= m.b693 + m.b694 <= 1)
m.c1119 = Constraint(expr= m.b692 + m.b694 <= 1)
m.c1120 = Constraint(expr= m.b693 + m.b694 <= 1)
m.c1121 = Constraint(expr= m.b695 + m.b696 <= 1)
m.c1122 = Constraint(expr= m.b695 + m.b697 <= 1)
m.c1123 = Constraint(expr= m.b695 + m.b696 <= 1)
m.c1124 = Constraint(expr= m.b696 + m.b697 <= 1)
m.c1125 = Constraint(expr= m.b695 + m.b697 <= 1)
m.c1126 = Constraint(expr= m.b696 + m.b697 <= 1)
m.c1127 = Constraint(expr= m.b698 + m.b699 <= 1)
m.c1128 = Constraint(expr= m.b698 + m.b700 <= 1)
m.c1129 = Constraint(expr= m.b698 + m.b699 <= 1)
m.c1130 = Constraint(expr= m.b699 + m.b700 <= 1)
m.c1131 = Constraint(expr= m.b698 + m.b700 <= 1)
m.c1132 = Constraint(expr= m.b699 + m.b700 <= 1)
m.c1133 = Constraint(expr= m.b701 + m.b702 <= 1)
m.c1134 = Constraint(expr= m.b701 + m.b703 <= 1)
m.c1135 = Constraint(expr= m.b701 + m.b702 <= 1)
m.c1136 = Constraint(expr= m.b702 + m.b703 <= 1)
m.c1137 = Constraint(expr= m.b701 + m.b703 <= 1)
m.c1138 = Constraint(expr= m.b702 + m.b703 <= 1)
m.c1139 = Constraint(expr= m.b704 + m.b705 <= 1)
m.c1140 = Constraint(expr= m.b704 + m.b706 <= 1)
m.c1141 = Constraint(expr= m.b704 + m.b705 <= 1)
m.c1142 = Constraint(expr= m.b705 + m.b706 <= 1)
m.c1143 = Constraint(expr= m.b704 + m.b706 <= 1)
m.c1144 = Constraint(expr= m.b705 + m.b706 <= 1)
m.c1145 = Constraint(expr= m.b707 + m.b708 <= 1)
m.c1146 = Constraint(expr= m.b707 + m.b709 <= 1)
m.c1147 = Constraint(expr= m.b707 + m.b708 <= 1)
m.c1148 = Constraint(expr= m.b708 + m.b709 <= 1)
m.c1149 = Constraint(expr= m.b707 + m.b709 <= 1)
m.c1150 = Constraint(expr= m.b708 + m.b709 <= 1)
m.c1151 = Constraint(expr= m.b710 + m.b711 <= 1)
m.c1152 = Constraint(expr= m.b710 + m.b712 <= 1)
m.c1153 = Constraint(expr= m.b710 + m.b711 <= 1)
m.c1154 = Constraint(expr= m.b711 + m.b712 <= 1)
m.c1155 = Constraint(expr= m.b710 + m.b712 <= 1)
m.c1156 = Constraint(expr= m.b711 + m.b712 <= 1)
m.c1157 = Constraint(expr= m.b713 + m.b714 <= 1)
m.c1158 = Constraint(expr= m.b713 + m.b715 <= 1)
m.c1159 = Constraint(expr= m.b713 + m.b714 <= 1)
m.c1160 = Constraint(expr= m.b714 + m.b715 <= 1)
m.c1161 = Constraint(expr= m.b713 + m.b715 <= 1)
m.c1162 = Constraint(expr= m.b714 + m.b715 <= 1)
m.c1163 = Constraint(expr= m.b716 + m.b717 <= 1)
m.c1164 = Constraint(expr= m.b716 + m.b718 <= 1)
m.c1165 = Constraint(expr= m.b716 + m.b717 <= 1)
m.c1166 = Constraint(expr= m.b717 + m.b718 <= 1)
m.c1167 = Constraint(expr= m.b716 + m.b718 <= 1)
m.c1168 = Constraint(expr= m.b717 + m.b718 <= 1)
m.c1169 = Constraint(expr= m.b719 + m.b720 <= 1)
m.c1170 = Constraint(expr= m.b719 + m.b721 <= 1)
m.c1171 = Constraint(expr= m.b719 + m.b720 <= 1)
m.c1172 = Constraint(expr= m.b720 + m.b721 <= 1)
m.c1173 = Constraint(expr= m.b719 + m.b721 <= 1)
m.c1174 = Constraint(expr= m.b720 + m.b721 <= 1)
m.c1175 = Constraint(expr= m.b722 + m.b723 <= 1)
m.c1176 = Constraint(expr= m.b722 + m.b724 <= 1)
m.c1177 = Constraint(expr= m.b722 + m.b723 <= 1)
m.c1178 = Constraint(expr= m.b723 + m.b724 <= 1)
m.c1179 = Constraint(expr= m.b722 + m.b724 <= 1)
m.c1180 = Constraint(expr= m.b723 + m.b724 <= 1)
m.c1181 = Constraint(expr= m.b725 + m.b726 <= 1)
m.c1182 = Constraint(expr= m.b725 + m.b727 <= 1)
m.c1183 = Constraint(expr= m.b725 + m.b726 <= 1)
m.c1184 = Constraint(expr= m.b726 + m.b727 <= 1)
m.c1185 = Constraint(expr= m.b725 + m.b727 <= 1)
m.c1186 = Constraint(expr= m.b726 + m.b727 <= 1)
m.c1187 = Constraint(expr= m.b728 + m.b729 <= 1)
m.c1188 = Constraint(expr= m.b728 + m.b730 <= 1)
m.c1189 = Constraint(expr= m.b728 + m.b729 <= 1)
m.c1190 = Constraint(expr= m.b729 + m.b730 <= 1)
m.c1191 = Constraint(expr= m.b728 + m.b730 <= 1)
m.c1192 = Constraint(expr= m.b729 + m.b730 <= 1)
m.c1193 = Constraint(expr= m.b731 + m.b732 <= 1)
m.c1194 = Constraint(expr= m.b731 + m.b733 <= 1)
m.c1195 = Constraint(expr= m.b731 + m.b732 <= 1)
m.c1196 = Constraint(expr= m.b732 + m.b733 <= 1)
m.c1197 = Constraint(expr= m.b731 + m.b733 <= 1)
m.c1198 = Constraint(expr= m.b732 + m.b733 <= 1)
m.c1199 = Constraint(expr= m.b734 + m.b735 <= 1)
m.c1200 = Constraint(expr= m.b734 + m.b736 <= 1)
m.c1201 = Constraint(expr= m.b734 + m.b735 <= 1)
m.c1202 = Constraint(expr= m.b735 + m.b736 <= 1)
m.c1203 = Constraint(expr= m.b734 + m.b736 <= 1)
m.c1204 = Constraint(expr= m.b735 + m.b736 <= 1)
m.c1205 = Constraint(expr= m.b737 + m.b738 <= 1)
m.c1206 = Constraint(expr= m.b737 + m.b739 <= 1)
m.c1207 = Constraint(expr= m.b737 + m.b738 <= 1)
m.c1208 = Constraint(expr= m.b738 + m.b739 <= 1)
m.c1209 = Constraint(expr= m.b737 + m.b739 <= 1)
m.c1210 = Constraint(expr= m.b738 + m.b739 <= 1)
m.c1211 = Constraint(expr= m.b740 + m.b741 <= 1)
m.c1212 = Constraint(expr= m.b740 + m.b742 <= 1)
m.c1213 = Constraint(expr= m.b740 + m.b741 <= 1)
m.c1214 = Constraint(expr= m.b741 + m.b742 <= 1)
m.c1215 = Constraint(expr= m.b740 + m.b742 <= 1)
m.c1216 = Constraint(expr= m.b741 + m.b742 <= 1)
m.c1217 = Constraint(expr= m.b743 + m.b744 <= 1)
m.c1218 = Constraint(expr= m.b743 + m.b745 <= 1)
m.c1219 = Constraint(expr= m.b743 + m.b744 <= 1)
m.c1220 = Constraint(expr= m.b744 + m.b745 <= 1)
m.c1221 = Constraint(expr= m.b743 + m.b745 <= 1)
m.c1222 = Constraint(expr= m.b744 + m.b745 <= 1)
m.c1223 = Constraint(expr= m.b746 + m.b747 <= 1)
m.c1224 = Constraint(expr= m.b746 + m.b748 <= 1)
m.c1225 = Constraint(expr= m.b746 + m.b747 <= 1)
m.c1226 = Constraint(expr= m.b747 + m.b748 <= 1)
m.c1227 = Constraint(expr= m.b746 + m.b748 <= 1)
m.c1228 = Constraint(expr= m.b747 + m.b748 <= 1)
m.c1229 = Constraint(expr= m.b749 + m.b750 <= 1)
m.c1230 = Constraint(expr= m.b749 + m.b751 <= 1)
m.c1231 = Constraint(expr= m.b749 + m.b750 <= 1)
m.c1232 = Constraint(expr= m.b750 + m.b751 <= 1)
m.c1233 = Constraint(expr= m.b749 + m.b751 <= 1)
m.c1234 = Constraint(expr= m.b750 + m.b751 <= 1)
m.c1235 = Constraint(expr= m.b752 + m.b753 <= 1)
m.c1236 = Constraint(expr= m.b752 + m.b754 <= 1)
m.c1237 = Constraint(expr= m.b752 + m.b753 <= 1)
m.c1238 = Constraint(expr= m.b753 + m.b754 <= 1)
m.c1239 = Constraint(expr= m.b752 + m.b754 <= 1)
m.c1240 = Constraint(expr= m.b753 + m.b754 <= 1)
m.c1241 = Constraint(expr= m.b755 + m.b756 <= 1)
m.c1242 = Constraint(expr= m.b755 + m.b757 <= 1)
m.c1243 = Constraint(expr= m.b755 + m.b756 <= 1)
m.c1244 = Constraint(expr= m.b756 + m.b757 <= 1)
m.c1245 = Constraint(expr= m.b755 + m.b757 <= 1)
m.c1246 = Constraint(expr= m.b756 + m.b757 <= 1)
m.c1247 = Constraint(expr= m.b758 + m.b759 <= 1)
m.c1248 = Constraint(expr= m.b758 + m.b760 <= 1)
m.c1249 = Constraint(expr= m.b758 + m.b759 <= 1)
m.c1250 = Constraint(expr= m.b759 + m.b760 <= 1)
m.c1251 = Constraint(expr= m.b758 + m.b760 <= 1)
m.c1252 = Constraint(expr= m.b759 + m.b760 <= 1)
m.c1253 = Constraint(expr= m.b761 + m.b762 <= 1)
m.c1254 = Constraint(expr= m.b761 + m.b763 <= 1)
m.c1255 = Constraint(expr= m.b761 + m.b762 <= 1)
m.c1256 = Constraint(expr= m.b762 + m.b763 <= 1)
m.c1257 = Constraint(expr= m.b761 + m.b763 <= 1)
m.c1258 = Constraint(expr= m.b762 + m.b763 <= 1)
m.c1259 = Constraint(expr= m.b764 + m.b765 <= 1)
m.c1260 = Constraint(expr= m.b764 + m.b766 <= 1)
m.c1261 = Constraint(expr= m.b764 + m.b765 <= 1)
m.c1262 = Constraint(expr= m.b765 + m.b766 <= 1)
m.c1263 = Constraint(expr= m.b764 + m.b766 <= 1)
m.c1264 = Constraint(expr= m.b765 + m.b766 <= 1)
m.c1265 = Constraint(expr= m.b767 + m.b768 <= 1)
m.c1266 = Constraint(expr= m.b767 + m.b769 <= 1)
m.c1267 = Constraint(expr= m.b767 + m.b768 <= 1)
m.c1268 = Constraint(expr= m.b768 + m.b769 <= 1)
m.c1269 = Constraint(expr= m.b767 + m.b769 <= 1)
m.c1270 = Constraint(expr= m.b768 + m.b769 <= 1)
m.c1271 = Constraint(expr= m.b770 + m.b771 <= 1)
m.c1272 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c1273 = Constraint(expr= m.b770 + m.b771 <= 1)
m.c1274 = Constraint(expr= m.b771 + m.b772 <= 1)
m.c1275 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c1276 = Constraint(expr= m.b771 + m.b772 <= 1)
m.c1277 = Constraint(expr= m.b773 + m.b774 <= 1)
m.c1278 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c1279 = Constraint(expr= m.b773 + m.b774 <= 1)
m.c1280 = Constraint(expr= m.b774 + m.b775 <= 1)
m.c1281 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c1282 = Constraint(expr= m.b774 + m.b775 <= 1)
m.c1283 = Constraint(expr= m.b596 - m.b686 <= 0)
m.c1284 = Constraint(expr= - m.b596 + m.b597 - m.b687 <= 0)
m.c1285 = Constraint(expr= - m.b596 - m.b597 + m.b598 - m.b688 <= 0)
m.c1286 = Constraint(expr= m.b599 - m.b689 <= 0)
m.c1287 = Constraint(expr= - m.b599 + m.b600 - m.b690 <= 0)
m.c1288 = Constraint(expr= - m.b599 - m.b600 + m.b601 - m.b691 <= 0)
m.c1289 = Constraint(expr= m.b602 - m.b692 <= 0)
m.c1290 = Constraint(expr= - m.b602 + m.b603 - m.b693 <= 0)
m.c1291 = Constraint(expr= - m.b602 - m.b603 + m.b604 - m.b694 <= 0)
m.c1292 = Constraint(expr= m.b605 - m.b695 <= 0)
m.c1293 = Constraint(expr= - m.b605 + m.b606 - m.b696 <= 0)
m.c1294 = Constraint(expr= - m.b605 - m.b606 + m.b607 - m.b697 <= 0)
m.c1295 = Constraint(expr= m.b608 - m.b698 <= 0)
m.c1296 = Constraint(expr= - m.b608 + m.b609 - m.b699 <= 0)
m.c1297 = Constraint(expr= - m.b608 - m.b609 + m.b610 - m.b700 <= 0)
m.c1298 = Constraint(expr= m.b611 - m.b701 <= 0)
m.c1299 = Constraint(expr= - m.b611 + m.b612 - m.b702 <= 0)
m.c1300 = Constraint(expr= - m.b611 - m.b612 + m.b613 - m.b703 <= 0)
m.c1301 = Constraint(expr= m.b614 - m.b704 <= 0)
m.c1302 = Constraint(expr= - m.b614 + m.b615 - m.b705 <= 0)
m.c1303 = Constraint(expr= - m.b614 - m.b615 + m.b616 - m.b706 <= 0)
m.c1304 = Constraint(expr= m.b617 - m.b707 <= 0)
m.c1305 = Constraint(expr= - m.b617 + m.b618 - m.b708 <= 0)
m.c1306 = Constraint(expr= - m.b617 - m.b618 + m.b619 - m.b709 <= 0)
m.c1307 = Constraint(expr= m.b620 - m.b710 <= 0)
m.c1308 = Constraint(expr= - m.b620 + m.b621 - m.b711 <= 0)
m.c1309 = Constraint(expr= - m.b620 - m.b621 + m.b622 - m.b712 <= 0)
m.c1310 = Constraint(expr= m.b623 - m.b713 <= 0)
m.c1311 = Constraint(expr= - m.b623 + m.b624 - m.b714 <= 0)
m.c1312 = Constraint(expr= - m.b623 - m.b624 + m.b625 - m.b715 <= 0)
m.c1313 = Constraint(expr= m.b626 - m.b716 <= 0)
m.c1314 = Constraint(expr= - m.b626 + m.b627 - m.b717 <= 0)
m.c1315 = Constraint(expr= - m.b626 - m.b627 + m.b628 - m.b718 <= 0)
m.c1316 = Constraint(expr= m.b629 - m.b719 <= 0)
m.c1317 = Constraint(expr= - m.b629 + m.b630 - m.b720 <= 0)
m.c1318 = Constraint(expr= - m.b629 - m.b630 + m.b631 - m.b721 <= 0)
m.c1319 = Constraint(expr= m.b632 - m.b722 <= 0)
m.c1320 = Constraint(expr= - m.b632 + m.b633 - m.b723 <= 0)
m.c1321 = Constraint(expr= - m.b632 - m.b633 + m.b634 - m.b724 <= 0)
m.c1322 = Constraint(expr= m.b635 - m.b725 <= 0)
m.c1323 = Constraint(expr= - m.b635 + m.b636 - m.b726 <= 0)
m.c1324 = Constraint(expr= - m.b635 - m.b636 + m.b637 - m.b727 <= 0)
m.c1325 = Constraint(expr= m.b638 - m.b728 <= 0)
m.c1326 = Constraint(expr= - m.b638 + m.b639 - m.b729 <= 0)
m.c1327 = Constraint(expr= - m.b638 - m.b639 + m.b640 - m.b730 <= 0)
m.c1328 = Constraint(expr= m.b641 - m.b731 <= 0)
m.c1329 = Constraint(expr= - m.b641 + m.b642 - m.b732 <= 0)
m.c1330 = Constraint(expr= - m.b641 - m.b642 + m.b643 - m.b733 <= 0)
m.c1331 = Constraint(expr= m.b644 - m.b734 <= 0)
m.c1332 = Constraint(expr= - m.b644 + m.b645 - m.b735 <= 0)
m.c1333 = Constraint(expr= - m.b644 - m.b645 + m.b646 - m.b736 <= 0)
m.c1334 = Constraint(expr= m.b647 - m.b737 <= 0)
m.c1335 = Constraint(expr= - m.b647 + m.b648 - m.b738 <= 0)
m.c1336 = Constraint(expr= - m.b647 - m.b648 + m.b649 - m.b739 <= 0)
m.c1337 = Constraint(expr= m.b650 - m.b740 <= 0)
m.c1338 = Constraint(expr= - m.b650 + m.b651 - m.b741 <= 0)
m.c1339 = Constraint(expr= - m.b650 - m.b651 + m.b652 - m.b742 <= 0)
m.c1340 = Constraint(expr= m.b653 - m.b743 <= 0)
m.c1341 = Constraint(expr= - m.b653 + m.b654 - m.b744 <= 0)
m.c1342 = Constraint(expr= - m.b653 - m.b654 + m.b655 - m.b745 <= 0)
m.c1343 = Constraint(expr= m.b656 - m.b746 <= 0)
m.c1344 = Constraint(expr= - m.b656 + m.b657 - m.b747 <= 0)
m.c1345 = Constraint(expr= - m.b656 - m.b657 + m.b658 - m.b748 <= 0)
m.c1346 = Constraint(expr= m.b659 - m.b749 <= 0)
m.c1347 = Constraint(expr= - m.b659 + m.b660 - m.b750 <= 0)
m.c1348 = Constraint(expr= - m.b659 - m.b660 + m.b661 - m.b751 <= 0)
m.c1349 = Constraint(expr= m.b662 - m.b752 <= 0)
m.c1350 = Constraint(expr= - m.b662 + m.b663 - m.b753 <= 0)
m.c1351 = Constraint(expr= - m.b662 - m.b663 + m.b664 - m.b754 <= 0)
m.c1352 = Constraint(expr= m.b665 - m.b755 <= 0)
m.c1353 = Constraint(expr= - m.b665 + m.b666 - m.b756 <= 0)
m.c1354 = Constraint(expr= - m.b665 - m.b666 + m.b667 - m.b757 <= 0)
m.c1355 = Constraint(expr= m.b668 - m.b758 <= 0)
m.c1356 = Constraint(expr= - m.b668 + m.b669 - m.b759 <= 0)
m.c1357 = Constraint(expr= - m.b668 - m.b669 + m.b670 - m.b760 <= 0)
m.c1358 = Constraint(expr= m.b671 - m.b761 <= 0)
m.c1359 = Constraint(expr= - m.b671 + m.b672 - m.b762 <= 0)
m.c1360 = Constraint(expr= - m.b671 - m.b672 + m.b673 - m.b763 <= 0)
m.c1361 = Constraint(expr= m.b674 - m.b764 <= 0)
m.c1362 = Constraint(expr= - m.b674 + m.b675 - m.b765 <= 0)
m.c1363 = Constraint(expr= - m.b674 - m.b675 + m.b676 - m.b766 <= 0)
m.c1364 = Constraint(expr= m.b677 - m.b767 <= 0)
m.c1365 = Constraint(expr= - m.b677 + m.b678 - m.b768 <= 0)
m.c1366 = Constraint(expr= - m.b677 - m.b678 + m.b679 - m.b769 <= 0)
m.c1367 = Constraint(expr= m.b680 - m.b770 <= 0)
m.c1368 = Constraint(expr= - m.b680 + m.b681 - m.b771 <= 0)
m.c1369 = Constraint(expr= - m.b680 - m.b681 + m.b682 - m.b772 <= 0)
m.c1370 = Constraint(expr= m.b683 - m.b773 <= 0)
m.c1371 = Constraint(expr= - m.b683 + m.b684 - m.b774 <= 0)
m.c1372 = Constraint(expr= - m.b683 - m.b684 + m.b685 - m.b775 <= 0)
m.c1373 = Constraint(expr= m.b596 + m.b599 == 1)
m.c1374 = Constraint(expr= m.b597 + m.b600 == 1)
m.c1375 = Constraint(expr= m.b598 + m.b601 == 1)
m.c1376 = Constraint(expr= - m.b602 + m.b611 + m.b614 >= 0)
m.c1377 = Constraint(expr= - m.b603 + m.b612 + m.b615 >= 0)
m.c1378 = Constraint(expr= - m.b604 + m.b613 + m.b616 >= 0)
m.c1379 = Constraint(expr= - m.b611 + m.b629 >= 0)
m.c1380 = Constraint(expr= - m.b612 + m.b630 >= 0)
m.c1381 = Constraint(expr= - m.b613 + m.b631 >= 0)
m.c1382 = Constraint(expr= - m.b614 + m.b632 >= 0)
m.c1383 = Constraint(expr= - m.b615 + m.b633 >= 0)
m.c1384 = Constraint(expr= - m.b616 + m.b634 >= 0)
m.c1385 = Constraint(expr= - m.b605 + m.b617 >= 0)
m.c1386 = Constraint(expr= - m.b606 + m.b618 >= 0)
m.c1387 = Constraint(expr= - m.b607 + m.b619 >= 0)
m.c1388 = Constraint(expr= - m.b617 + m.b635 + m.b638 >= 0)
m.c1389 = Constraint(expr= - m.b618 + m.b636 + m.b639 >= 0)
m.c1390 = Constraint(expr= - m.b619 + m.b637 + m.b640 >= 0)
m.c1391 = Constraint(expr= - m.b608 + m.b620 + m.b623 + m.b626 >= 0)
m.c1392 = Constraint(expr= - m.b609 + m.b621 + m.b624 + m.b627 >= 0)
m.c1393 = Constraint(expr= - m.b610 + m.b622 + m.b625 + m.b628 >= 0)
m.c1394 = Constraint(expr= - m.b620 + m.b638 >= 0)
m.c1395 = Constraint(expr= - m.b621 + m.b639 >= 0)
m.c1396 = Constraint(expr= - m.b622 + m.b640 >= 0)
m.c1397 = Constraint(expr= - m.b623 + m.b641 + m.b644 >= 0)
m.c1398 = Constraint(expr= - m.b624 + m.b642 + m.b645 >= 0)
m.c1399 = Constraint(expr= - m.b625 + m.b643 + m.b646 >= 0)
m.c1400 = Constraint(expr= - m.b626 + m.b647 + m.b650 + m.b653 >= 0)
m.c1401 = Constraint(expr= - m.b627 + m.b648 + m.b651 + m.b654 >= 0)
m.c1402 = Constraint(expr= - m.b628 + m.b649 + m.b652 + m.b655 >= 0)
m.c1403 = Constraint(expr= m.b596 + m.b599 - m.b602 >= 0)
m.c1404 = Constraint(expr= m.b597 + m.b600 - m.b603 >= 0)
m.c1405 = Constraint(expr= m.b598 + m.b601 - m.b604 >= 0)
m.c1406 = Constraint(expr= m.b596 + m.b599 - m.b605 >= 0)
m.c1407 = Constraint(expr= m.b597 + m.b600 - m.b606 >= 0)
m.c1408 = Constraint(expr= m.b598 + m.b601 - m.b607 >= 0)
m.c1409 = Constraint(expr= m.b596 + m.b599 - m.b608 >= 0)
m.c1410 = Constraint(expr= m.b597 + m.b600 - m.b609 >= 0)
m.c1411 = Constraint(expr= m.b598 + m.b601 - m.b610 >= 0)
m.c1412 = Constraint(expr= m.b602 - m.b611 >= 0)
m.c1413 = Constraint(expr= m.b603 - m.b612 >= 0)
m.c1414 = Constraint(expr= m.b604 - m.b613 >= 0)
m.c1415 = Constraint(expr= m.b602 - m.b614 >= 0)
m.c1416 = Constraint(expr= m.b603 - m.b615 >= 0)
m.c1417 = Constraint(expr= m.b604 - m.b616 >= 0)
m.c1418 = Constraint(expr= m.b605 - m.b617 >= 0)
m.c1419 = Constraint(expr= m.b606 - m.b618 >= 0)
m.c1420 = Constraint(expr= m.b607 - m.b619 >= 0)
m.c1421 = Constraint(expr= m.b608 - m.b620 >= 0)
m.c1422 = Constraint(expr= m.b609 - m.b621 >= 0)
m.c1423 = Constraint(expr= m.b610 - m.b622 >= 0)
m.c1424 = Constraint(expr= m.b608 - m.b623 >= 0)
m.c1425 = Constraint(expr= m.b609 - m.b624 >= 0)
m.c1426 = Constraint(expr= m.b610 - m.b625 >= 0)
m.c1427 = Constraint(expr= m.b608 - m.b626 >= 0)
m.c1428 = Constraint(expr= m.b609 - m.b627 >= 0)
m.c1429 = Constraint(expr= m.b610 - m.b628 >= 0)
m.c1430 = Constraint(expr= m.b611 - m.b629 >= 0)
m.c1431 = Constraint(expr= m.b612 - m.b630 >= 0)
m.c1432 = Constraint(expr= m.b613 - m.b631 >= 0)
m.c1433 = Constraint(expr= m.b614 - m.b632 >= 0)
m.c1434 = Constraint(expr= m.b615 - m.b633 >= 0)
m.c1435 = Constraint(expr= m.b616 - m.b634 >= 0)
m.c1436 = Constraint(expr= m.b617 - m.b635 >= 0)
m.c1437 = Constraint(expr= m.b618 - m.b636 >= 0)
m.c1438 = Constraint(expr= m.b619 - m.b637 >= 0)
m.c1439 = Constraint(expr= m.b617 - m.b638 >= 0)
m.c1440 = Constraint(expr= m.b618 - m.b639 >= 0)
m.c1441 = Constraint(expr= m.b619 - m.b640 >= 0)
m.c1442 = Constraint(expr= m.b623 - m.b641 >= 0)
m.c1443 = Constraint(expr= m.b624 - m.b642 >= 0)
m.c1444 = Constraint(expr= m.b625 - m.b643 >= 0)
m.c1445 = Constraint(expr= m.b623 - m.b644 >= 0)
m.c1446 = Constraint(expr= m.b624 - m.b645 >= 0)
m.c1447 = Constraint(expr= m.b625 - m.b646 >= 0)
m.c1448 = Constraint(expr= m.b626 - m.b647 >= 0)
m.c1449 = Constraint(expr= m.b627 - m.b648 >= 0)
m.c1450 = Constraint(expr= m.b628 - m.b649 >= 0)
m.c1451 = Constraint(expr= m.b626 - m.b650 >= 0)
m.c1452 = Constraint(expr= m.b627 - m.b651 >= 0)
m.c1453 = Constraint(expr= m.b628 - m.b652 >= 0)
m.c1454 = Constraint(expr= m.b626 - m.b653 >= 0)
m.c1455 = Constraint(expr= m.b627 - m.b654 >= 0)
m.c1456 = Constraint(expr= m.b628 - m.b655 >= 0)
m.c1457 = Constraint(expr= - m.b653 + m.b656 + m.b659 >= 0)
m.c1458 = Constraint(expr= - m.b654 + m.b657 + m.b660 >= 0)
m.c1459 = Constraint(expr= - m.b655 + m.b658 + m.b661 >= 0)
m.c1460 = Constraint(expr= - m.b662 + m.b671 + m.b674 >= 0)
m.c1461 = Constraint(expr= - m.b663 + m.b672 + m.b675 >= 0)
m.c1462 = Constraint(expr= - m.b664 + m.b673 + m.b676 >= 0)
m.c1463 = Constraint(expr= - m.b665 + m.b677 >= 0)
m.c1464 = Constraint(expr= - m.b666 + m.b678 >= 0)
m.c1465 = Constraint(expr= - m.b667 + m.b679 >= 0)
m.c1466 = Constraint(expr= m.b653 - m.b656 >= 0)
m.c1467 = Constraint(expr= m.b654 - m.b657 >= 0)
m.c1468 = Constraint(expr= m.b655 - m.b658 >= 0)
m.c1469 = Constraint(expr= m.b653 - m.b659 >= 0)
m.c1470 = Constraint(expr= m.b654 - m.b660 >= 0)
m.c1471 = Constraint(expr= m.b655 - m.b661 >= 0)
m.c1472 = Constraint(expr= m.b662 - m.b671 >= 0)
m.c1473 = Constraint(expr= m.b663 - m.b672 >= 0)
m.c1474 = Constraint(expr= m.b664 - m.b673 >= 0)
m.c1475 = Constraint(expr= m.b662 - m.b674 >= 0)
m.c1476 = Constraint(expr= m.b663 - m.b675 >= 0)
m.c1477 = Constraint(expr= m.b664 - m.b676 >= 0)
m.c1478 = Constraint(expr= m.b665 - m.b677 >= 0)
m.c1479 = Constraint(expr= m.b666 - m.b678 >= 0)
m.c1480 = Constraint(expr= m.b667 - m.b679 >= 0)
m.c1481 = Constraint(expr= m.b668 - m.b680 >= 0)
m.c1482 = Constraint(expr= m.b669 - m.b681 >= 0)
m.c1483 = Constraint(expr= m.b670 - m.b682 >= 0)
m.c1484 = Constraint(expr= m.b668 - m.b683 >= 0)
m.c1485 = Constraint(expr= m.b669 - m.b684 >= 0)
m.c1486 = Constraint(expr= m.b670 - m.b685 >= 0)
| [] |
sartography/star-drive | backend/tests/test_resources.py | c0f33378d42913c3e677e07f74eb46d7b2b82a0a | import unittest
from flask import json
from tests.base_test import BaseTest
from app import db, elastic_index
from app.model.resource import Resource
from app.model.resource_category import ResourceCategory
from app.model.resource_change_log import ResourceChangeLog
from app.model.user import Role
class TestResources(BaseTest, unittest.TestCase):
def test_resource_basics(self):
self.construct_resource()
r = db.session.query(Resource).first()
self.assertIsNotNone(r)
r_id = r.id
rv = self.app.get('/api/resource/%i' % r_id,
follow_redirects=True,
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response["id"], r_id)
self.assertEqual(response["title"], 'A+ Resource')
self.assertEqual(response["description"], 'A delightful Resource destined to create rejoicing')
def test_modify_resource_basics(self):
self.construct_resource()
r = db.session.query(Resource).first()
self.assertIsNotNone(r)
r_id = r.id
rv = self.app.get('/api/resource/%i' % r_id, content_type="application/json")
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Edwarardos Lemonade and Oil Change'
response['description'] = 'Better fluids for you and your car.'
response['website'] = 'http://sartography.com'
orig_date = response['last_updated']
rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('/api/resource/%i' % r_id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change')
self.assertEqual(response['description'], 'Better fluids for you and your car.')
self.assertEqual(response['website'], 'http://sartography.com')
self.assertNotEqual(orig_date, response['last_updated'])
def test_delete_resource(self):
r = self.construct_resource()
r_id = r.id
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assert_success(rv)
rv = self.app.delete('api/resource/%i' % r_id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assertEqual(404, rv.status_code)
def test_delete_resource_with_admin_note_and_no_elastic_record(self):
r = self.construct_resource()
r_id = r.id
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assert_success(rv)
self.construct_admin_note(user=self.construct_user(), resource=r)
elastic_index.remove_document(r, 'Resource')
rv = self.app.delete('api/resource/%i' % r_id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assertEqual(404, rv.status_code)
def test_create_resource(self):
resource = {'title': "Resource of Resources", 'description': "You need this resource in your life.",
'organization_name': "Resource Org"}
rv = self.app.post('api/resource', data=self.jsonify(resource), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response['title'], 'Resource of Resources')
self.assertEqual(response['description'], 'You need this resource in your life.')
self.assertIsNotNone(response['id'])
def test_get_resource_by_category(self):
c = self.construct_category()
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
db.session.add(cr)
db.session.commit()
rv = self.app.get(
'/api/category/%i/resource' % c.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, len(response))
self.assertEqual(r.id, response[0]["resource_id"])
self.assertEqual(r.description, response[0]["resource"]["description"])
def test_get_resource_by_category_includes_category_details(self):
c = self.construct_category(name="c1")
c2 = self.construct_category(name="c2")
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
cr2 = ResourceCategory(resource=r, category=c2, type='resource')
db.session.add_all([cr, cr2])
db.session.commit()
rv = self.app.get(
'/api/category/%i/resource' % c.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(r.id, response[0]["resource_id"])
self.assertEqual(2,
len(response[0]["resource"]["resource_categories"]))
self.assertEqual(
"c1", response[0]["resource"]["resource_categories"][0]["category"]
["name"])
def test_category_resource_count(self):
c = self.construct_category()
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
db.session.add(cr)
db.session.commit()
rv = self.app.get(
'/api/category/%i' % c.id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, response["resource_count"])
def test_get_category_by_resource(self):
c = self.construct_category()
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
db.session.add(cr)
db.session.commit()
rv = self.app.get(
'/api/resource/%i/category' % r.id,
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, len(response))
self.assertEqual(c.id, response[0]["id"])
self.assertEqual(c.name, response[0]["category"]["name"])
def test_add_category_to_resource(self):
c = self.construct_category()
r = self.construct_resource()
rc_data = {"resource_id": r.id, "category_id": c.id}
rv = self.app.post(
'/api/resource_category',
data=self.jsonify(rc_data),
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(c.id, response["category_id"])
self.assertEqual(r.id, response["resource_id"])
def test_set_all_categories_on_resource(self):
c1 = self.construct_category(name="c1")
c2 = self.construct_category(name="c2")
c3 = self.construct_category(name="c3")
r = self.construct_resource()
rc_data = [
{
"category_id": c1.id
},
{
"category_id": c2.id
},
{
"category_id": c3.id
},
]
rv = self.app.post(
'/api/resource/%i/category' % r.id,
data=self.jsonify(rc_data),
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(3, len(response))
rc_data = [{"category_id": c1.id}]
rv = self.app.post(
'/api/resource/%i/category' % r.id,
data=self.jsonify(rc_data),
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, len(response))
def test_remove_category_from_resource(self):
self.test_add_category_to_resource()
rv = self.app.delete('/api/resource_category/%i' % 1)
self.assert_success(rv)
rv = self.app.get(
'/api/resource/%i/category' % 1, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(0, len(response))
def test_resource_change_log_types(self):
u = self.construct_user(email="[email protected]", role=Role.admin)
r = {'id': 258, 'title': "A Resource that is Super and Great", 'description': "You need this resource in your life."}
rv = self.app.post('api/resource', data=self.jsonify(r), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers())
self.assert_success(rv)
logs = ResourceChangeLog.query.all()
self.assertIsNotNone(logs[-1].resource_id)
self.assertIsNotNone(logs[-1].user_id)
self.assertEqual(logs[-1].type, 'create')
rv = self.app.get('api/resource/%i' % r['id'], content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Super Great Resource'
rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers(user=u))
self.assert_success(rv)
rv = self.app.get('/api/resource/%i' % r['id'], content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response['title'], 'Super Great Resource')
logs = ResourceChangeLog.query.all()
self.assertIsNotNone(logs[-1].resource_id)
self.assertIsNotNone(logs[-1].user_id)
self.assertEqual(logs[-1].type, 'edit')
rv = self.app.delete('api/resource/%i' % r['id'], content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(rv)
logs = ResourceChangeLog.query.all()
self.assertIsNotNone(logs[-1].resource_id)
self.assertIsNotNone(logs[-1].user_id)
self.assertEqual(logs[-1].type, 'delete')
def test_get_resource_change_log_by_resource(self):
r = self.construct_resource()
u = self.construct_user(email="[email protected]", role=Role.admin)
rv = self.app.get('api/resource/%i' % r.id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Super Great Resource'
rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers(user=u))
self.assert_success(rv)
rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response[-1]['user_id'], u.id)
def test_get_resource_change_log_by_user(self):
r = self.construct_resource()
u = self.construct_user(email="[email protected]", role=Role.admin)
rv = self.app.get('api/resource/%i' % r.id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Super Great Resource'
rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers(user=u))
self.assert_success(rv)
rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response[-1]['resource_id'], r.id)
def test_covid19_resource_lists(self):
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth'])
rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 5)
rv = self.app.get('api/resource/covid19/Edu-tainment', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 4)
rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 2)
rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 1)
rv = self.app.get('api/resource/covid19/Visual_Aids', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 1)
rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 1)
def test_is_uva_education_content(self):
self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True)
self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True)
self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False)
self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True)
self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False)
rv = self.app.get('api/resource/education', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 2)
rv = self.app.get('api/resource', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 5)
| [((70, 8, 70, 52), 'app.elastic_index.remove_document', 'elastic_index.remove_document', ({(70, 38, 70, 39): 'r', (70, 41, 70, 51): '"""Resource"""'}, {}), "(r, 'Resource')", False, 'from app import db, elastic_index\n'), ((91, 13, 91, 70), 'app.model.resource_category.ResourceCategory', 'ResourceCategory', (), '', False, 'from app.model.resource_category import ResourceCategory\n'), ((92, 8, 92, 26), 'app.db.session.add', 'db.session.add', ({(92, 23, 92, 25): 'cr'}, {}), '(cr)', False, 'from app import db, elastic_index\n'), ((93, 8, 93, 27), 'app.db.session.commit', 'db.session.commit', ({}, {}), '()', False, 'from app import db, elastic_index\n'), ((108, 13, 108, 70), 'app.model.resource_category.ResourceCategory', 'ResourceCategory', (), '', False, 'from app.model.resource_category import ResourceCategory\n'), ((109, 14, 109, 72), 'app.model.resource_category.ResourceCategory', 'ResourceCategory', (), '', False, 'from app.model.resource_category import ResourceCategory\n'), ((110, 8, 110, 37), 'app.db.session.add_all', 'db.session.add_all', ({(110, 27, 110, 36): '[cr, cr2]'}, {}), '([cr, cr2])', False, 'from app import db, elastic_index\n'), ((111, 8, 111, 27), 'app.db.session.commit', 'db.session.commit', ({}, {}), '()', False, 'from app import db, elastic_index\n'), ((128, 13, 128, 70), 'app.model.resource_category.ResourceCategory', 'ResourceCategory', (), '', False, 'from app.model.resource_category import ResourceCategory\n'), ((129, 8, 129, 26), 'app.db.session.add', 'db.session.add', ({(129, 23, 129, 25): 'cr'}, {}), '(cr)', False, 'from app import db, elastic_index\n'), ((130, 8, 130, 27), 'app.db.session.commit', 'db.session.commit', ({}, {}), '()', False, 'from app import db, elastic_index\n'), ((140, 13, 140, 70), 'app.model.resource_category.ResourceCategory', 'ResourceCategory', (), '', False, 'from app.model.resource_category import ResourceCategory\n'), ((141, 8, 141, 26), 'app.db.session.add', 'db.session.add', ({(141, 23, 141, 25): 'cr'}, {}), '(cr)', False, 'from app import db, elastic_index\n'), ((142, 8, 142, 27), 'app.db.session.commit', 'db.session.commit', ({}, {}), '()', False, 'from app import db, elastic_index\n'), ((218, 15, 218, 44), 'app.model.resource_change_log.ResourceChangeLog.query.all', 'ResourceChangeLog.query.all', ({}, {}), '()', False, 'from app.model.resource_change_log import ResourceChangeLog\n'), ((236, 15, 236, 44), 'app.model.resource_change_log.ResourceChangeLog.query.all', 'ResourceChangeLog.query.all', ({}, {}), '()', False, 'from app.model.resource_change_log import ResourceChangeLog\n'), ((245, 15, 245, 44), 'app.model.resource_change_log.ResourceChangeLog.query.all', 'ResourceChangeLog.query.all', ({}, {}), '()', False, 'from app.model.resource_change_log import ResourceChangeLog\n'), ((17, 12, 17, 38), 'app.db.session.query', 'db.session.query', ({(17, 29, 17, 37): 'Resource'}, {}), '(Resource)', False, 'from app import db, elastic_index\n'), ((31, 12, 31, 38), 'app.db.session.query', 'db.session.query', ({(31, 29, 31, 37): 'Resource'}, {}), '(Resource)', False, 'from app import db, elastic_index\n')] |
reubenjacob/kolibri | kolibri/core/auth/management/commands/sync.py | 028bb2ad63e438c832ff657d37f7b05c3400f2da | import json
import logging
import math
import re
from contextlib import contextmanager
from django.core.management import call_command
from django.core.management.base import CommandError
from morango.models import Filter
from morango.models import InstanceIDModel
from morango.models import ScopeDefinition
from morango.sync.controller import MorangoProfileController
from ..utils import create_superuser_and_provision_device
from ..utils import get_baseurl
from ..utils import get_client_and_server_certs
from ..utils import get_dataset_id
from ..utils import get_single_user_sync_filter
from ..utils import provision_single_user_device
from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA
from kolibri.core.auth.constants.morango_sync import ScopeDefinitions
from kolibri.core.auth.constants.morango_sync import State
from kolibri.core.auth.management.utils import get_facility
from kolibri.core.auth.management.utils import run_once
from kolibri.core.auth.models import dataset_cache
from kolibri.core.logger.utils.data import bytes_for_humans
from kolibri.core.tasks.exceptions import UserCancelledError
from kolibri.core.tasks.management.commands.base import AsyncCommand
from kolibri.core.utils.lock import db_lock
from kolibri.utils import conf
DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS["Urls"]["DATA_PORTAL_SYNCING_BASE_URL"]
TRANSFER_MESSAGE = "{records_transferred}/{records_total}, {transfer_total}"
logger = logging.getLogger(__name__)
class Command(AsyncCommand):
help = "Allow the syncing of facility data with Kolibri Data Portal or another Kolibri device."
def add_arguments(self, parser):
parser.add_argument(
"--facility", action="store", type=str, help="ID of facility to sync"
)
parser.add_argument(
"--baseurl", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest="baseurl"
)
parser.add_argument("--noninteractive", action="store_true")
parser.add_argument(
"--chunk-size",
type=int,
default=500,
help="Chunk size of records to send/retrieve per request",
)
parser.add_argument(
"--no-push", action="store_true", help="Do not push data to the server"
)
parser.add_argument(
"--no-pull", action="store_true", help="Do not pull data from the server"
)
parser.add_argument(
"--username",
type=str,
help="username of superuser or facility admin on server we are syncing with",
)
parser.add_argument(
"--password",
type=str,
help="password of superuser or facility admin on server we are syncing with",
)
parser.add_argument(
"--user",
type=str,
help="for single-user syncing, the user ID of the account to be synced",
)
parser.add_argument(
"--no-provision",
action="store_true",
help="do not create a facility and temporary superuser",
)
# parser.add_argument("--scope-id", type=str, default=FULL_FACILITY)
def handle_async(self, *args, **options): # noqa C901
(
baseurl,
facility_id,
chunk_size,
username,
password,
user_id,
no_push,
no_pull,
noninteractive,
no_provision,
) = (
options["baseurl"],
options["facility"],
options["chunk_size"],
options["username"],
options["password"],
options["user"],
options["no_push"],
options["no_pull"],
options["noninteractive"],
options["no_provision"],
)
PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL
# validate url that is passed in
if not PORTAL_SYNC:
baseurl = get_baseurl(baseurl)
# call this in case user directly syncs without migrating database
if not ScopeDefinition.objects.filter():
call_command("loaddata", "scopedefinitions")
dataset_cache.clear()
dataset_cache.activate()
# try to connect to server
controller = MorangoProfileController(PROFILE_FACILITY_DATA)
network_connection = controller.create_network_connection(baseurl)
# if instance_ids are equal, this means device is trying to sync with itself, which we don't allow
if (
InstanceIDModel.get_or_create_current_instance()[0].id
== network_connection.server_info["instance_id"]
):
raise CommandError(
"Device can not sync with itself. Please recheck base URL and try again."
)
if user_id: # it's a single-user sync
if not facility_id:
raise CommandError(
"Facility ID must be specified in order to do single-user syncing"
)
if not re.match("[a-f0-9]{32}", user_id):
raise CommandError("User ID must be a 32-character UUID (no dashes)")
dataset_id = get_dataset_id(
baseurl, identifier=facility_id, noninteractive=True
)
client_cert, server_cert, username = get_client_and_server_certs(
username,
password,
dataset_id,
network_connection,
user_id=user_id,
noninteractive=noninteractive,
)
scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id]
if len(set(scopes)) != 2:
raise CommandError(
"To do a single-user sync, one device must have a single-user certificate, and the other a full-facility certificate."
)
elif PORTAL_SYNC: # do portal sync setup
facility = get_facility(
facility_id=facility_id, noninteractive=noninteractive
)
# check for the certs we own for the specific facility
client_cert = (
facility.dataset.get_owned_certificates()
.filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY)
.first()
)
if not client_cert:
raise CommandError(
"This device does not own a certificate for Facility: {}".format(
facility.name
)
)
# get primary partition
scope_params = json.loads(client_cert.scope_params)
dataset_id = scope_params["dataset_id"]
# check if the server already has a cert for this facility
server_certs = network_connection.get_remote_certificates(
dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY
)
# if necessary, push a cert up to the server
server_cert = (
server_certs[0]
if server_certs
else network_connection.push_signed_client_certificate_chain(
local_parent_cert=client_cert,
scope_definition_id=ScopeDefinitions.FULL_FACILITY,
scope_params=scope_params,
)
)
else: # do P2P setup
dataset_id = get_dataset_id(
baseurl, identifier=facility_id, noninteractive=noninteractive
)
client_cert, server_cert, username = get_client_and_server_certs(
username,
password,
dataset_id,
network_connection,
noninteractive=noninteractive,
)
logger.info("Syncing has been initiated (this may take a while)...")
sync_session_client = network_connection.create_sync_session(
client_cert, server_cert, chunk_size=chunk_size
)
try:
# pull from server
if not no_pull:
self._handle_pull(
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id=user_id,
)
# and push our own data to server
if not no_push:
self._handle_push(
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id=user_id,
)
if not no_provision:
with self._lock():
if user_id:
provision_single_user_device(user_id)
else:
create_superuser_and_provision_device(
username, dataset_id, noninteractive=noninteractive
)
except UserCancelledError:
if self.job:
self.job.extra_metadata.update(sync_state=State.CANCELLED)
self.job.save_meta()
logger.info("Syncing has been cancelled.")
return
network_connection.close()
if self.job:
self.job.extra_metadata.update(sync_state=State.COMPLETED)
self.job.save_meta()
dataset_cache.deactivate()
logger.info("Syncing has been completed.")
@contextmanager
def _lock(self):
cancellable = False
# job can't be cancelled while locked
if self.job:
cancellable = self.job.cancellable
self.job.save_as_cancellable(cancellable=False)
with db_lock():
yield
if self.job:
self.job.save_as_cancellable(cancellable=cancellable)
def _raise_cancel(self, *args, **kwargs):
if self.is_cancelled() and (not self.job or self.job.cancellable):
raise UserCancelledError()
def _handle_pull(
self,
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id,
):
"""
:type sync_session_client: morango.sync.syncsession.SyncSessionClient
:type noninteractive: bool
:type dataset_id: str
"""
sync_client = sync_session_client.get_pull_client()
sync_client.signals.queuing.connect(self._raise_cancel)
sync_client.signals.transferring.connect(self._raise_cancel)
self._queueing_tracker_adapter(
sync_client.signals.queuing,
"Remotely preparing data",
State.REMOTE_QUEUING,
noninteractive,
)
self._transfer_tracker_adapter(
sync_client.signals.transferring,
"Receiving data ({})".format(TRANSFER_MESSAGE),
State.PULLING,
noninteractive,
)
self._queueing_tracker_adapter(
sync_client.signals.dequeuing,
"Locally integrating received data",
State.LOCAL_DEQUEUING,
noninteractive,
)
self._session_tracker_adapter(
sync_client.signals.session,
"Creating pull transfer session",
"Completed pull transfer session",
)
if not user_id:
# full-facility sync
sync_client.initialize(Filter(dataset_id))
else:
# single-user sync
client_is_single_user = (
client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER
)
filt = get_single_user_sync_filter(
dataset_id, user_id, is_read=client_is_single_user
)
sync_client.initialize(Filter(filt))
sync_client.run()
with self._lock():
sync_client.finalize()
def _handle_push(
self,
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id,
):
"""
:type sync_session_client: morango.sync.syncsession.SyncSessionClient
:type noninteractive: bool
:type dataset_id: str
"""
sync_client = sync_session_client.get_push_client()
sync_client.signals.transferring.connect(self._raise_cancel)
self._queueing_tracker_adapter(
sync_client.signals.queuing,
"Locally preparing data to send",
State.LOCAL_QUEUING,
noninteractive,
)
self._transfer_tracker_adapter(
sync_client.signals.transferring,
"Sending data ({})".format(TRANSFER_MESSAGE),
State.PUSHING,
noninteractive,
)
self._queueing_tracker_adapter(
sync_client.signals.dequeuing,
"Remotely integrating data",
State.REMOTE_DEQUEUING,
noninteractive,
)
self._session_tracker_adapter(
sync_client.signals.session,
"Creating push transfer session",
"Completed push transfer session",
)
with self._lock():
if not user_id:
# full-facility sync
sync_client.initialize(Filter(dataset_id))
else:
# single-user sync
client_is_single_user = (
client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER
)
filt = get_single_user_sync_filter(
dataset_id, user_id, is_read=not client_is_single_user
)
sync_client.initialize(Filter(filt))
sync_client.run()
# we can't cancel remotely integrating data
if self.job:
self.job.save_as_cancellable(cancellable=False)
# allow server timeout since remotely integrating data can take a while and the request
# could timeout. In that case, we'll assume everything is good.
sync_client.finalize(allow_server_timeout=True)
def _update_all_progress(self, progress_fraction, progress):
"""
Override parent progress update callback to report from the progress tracker we're sent
"""
if self.job:
self.job.update_progress(progress_fraction, 1.0)
self.job.extra_metadata.update(progress.extra_data)
self.job.save_meta()
def _session_tracker_adapter(self, signal_group, started_msg, completed_msg):
"""
Attaches a signal handler to session creation signals
:type signal_group: morango.sync.syncsession.SyncSignalGroup
:type started_msg: str
:type completed_msg: str
"""
@run_once
def session_creation(transfer_session):
"""
A session is created individually for pushing and pulling
"""
logger.info(started_msg)
if self.job:
self.job.extra_metadata.update(sync_state=State.SESSION_CREATION)
@run_once
def session_destruction(transfer_session):
if transfer_session.records_total == 0:
logger.info("There are no records to transfer")
logger.info(completed_msg)
signal_group.started.connect(session_creation)
signal_group.completed.connect(session_destruction)
def _transfer_tracker_adapter(
self, signal_group, message, sync_state, noninteractive
):
"""
Attaches a signal handler to pushing/pulling signals
:type signal_group: morango.sync.syncsession.SyncSignalGroup
:type message: str
:type sync_state: str
:type noninteractive: bool
"""
tracker = self.start_progress(total=100)
def stats_msg(transfer_session):
transfer_total = (
transfer_session.bytes_sent + transfer_session.bytes_received
)
return message.format(
records_transferred=transfer_session.records_transferred,
records_total=transfer_session.records_total,
transfer_total=bytes_for_humans(transfer_total),
)
def stats(transfer_session):
logger.info(stats_msg(transfer_session))
def handler(transfer_session):
"""
:type transfer_session: morango.models.core.TransferSession
"""
progress = (
100
* transfer_session.records_transferred
/ float(transfer_session.records_total)
)
tracker.update_progress(
increment=math.ceil(progress - tracker.progress),
message=stats_msg(transfer_session),
extra_data=dict(
bytes_sent=transfer_session.bytes_sent,
bytes_received=transfer_session.bytes_received,
sync_state=sync_state,
),
)
if noninteractive or tracker.progressbar is None:
signal_group.started.connect(stats)
signal_group.in_progress.connect(stats)
signal_group.connect(handler)
# log one more time at end to capture in logging output
signal_group.completed.connect(stats)
def _queueing_tracker_adapter(
self, signal_group, message, sync_state, noninteractive
):
"""
Attaches a signal handler to queuing/dequeuing signals
:type signal_group: morango.sync.syncsession.SyncSignalGroup
:type message: str
:type sync_state: str
:type noninteractive: bool
"""
tracker = self.start_progress(total=2)
def started(transfer_session):
dataset_cache.clear()
if noninteractive or tracker.progressbar is None:
logger.info(message)
def handler(transfer_session):
tracker.update_progress(
message=message, extra_data=dict(sync_state=sync_state)
)
if noninteractive or tracker.progressbar is None:
signal_group.started.connect(started)
signal_group.started.connect(started)
signal_group.started.connect(handler)
signal_group.completed.connect(handler)
| [((36, 9, 36, 36), 'logging.getLogger', 'logging.getLogger', ({(36, 27, 36, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((120, 8, 120, 29), 'kolibri.core.auth.models.dataset_cache.clear', 'dataset_cache.clear', ({}, {}), '()', False, 'from kolibri.core.auth.models import dataset_cache\n'), ((121, 8, 121, 32), 'kolibri.core.auth.models.dataset_cache.activate', 'dataset_cache.activate', ({}, {}), '()', False, 'from kolibri.core.auth.models import dataset_cache\n'), ((124, 21, 124, 68), 'morango.sync.controller.MorangoProfileController', 'MorangoProfileController', ({(124, 46, 124, 67): 'PROFILE_FACILITY_DATA'}, {}), '(PROFILE_FACILITY_DATA)', False, 'from morango.sync.controller import MorangoProfileController\n'), ((265, 8, 265, 34), 'kolibri.core.auth.models.dataset_cache.deactivate', 'dataset_cache.deactivate', ({}, {}), '()', False, 'from kolibri.core.auth.models import dataset_cache\n'), ((117, 15, 117, 47), 'morango.models.ScopeDefinition.objects.filter', 'ScopeDefinition.objects.filter', ({}, {}), '()', False, 'from morango.models import ScopeDefinition\n'), ((118, 12, 118, 56), 'django.core.management.call_command', 'call_command', ({(118, 25, 118, 35): '"""loaddata"""', (118, 37, 118, 55): '"""scopedefinitions"""'}, {}), "('loaddata', 'scopedefinitions')", False, 'from django.core.management import call_command\n'), ((132, 18, 134, 13), 'django.core.management.base.CommandError', 'CommandError', ({(133, 16, 133, 89): '"""Device can not sync with itself. Please recheck base URL and try again."""'}, {}), "(\n 'Device can not sync with itself. Please recheck base URL and try again.')", False, 'from django.core.management.base import CommandError\n'), ((276, 13, 276, 22), 'kolibri.core.utils.lock.db_lock', 'db_lock', ({}, {}), '()', False, 'from kolibri.core.utils.lock import db_lock\n'), ((284, 18, 284, 38), 'kolibri.core.tasks.exceptions.UserCancelledError', 'UserCancelledError', ({}, {}), '()', False, 'from kolibri.core.tasks.exceptions import UserCancelledError\n'), ((516, 12, 516, 33), 'kolibri.core.auth.models.dataset_cache.clear', 'dataset_cache.clear', ({}, {}), '()', False, 'from kolibri.core.auth.models import dataset_cache\n'), ((139, 22, 141, 17), 'django.core.management.base.CommandError', 'CommandError', ({(140, 20, 140, 86): '"""Facility ID must be specified in order to do single-user syncing"""'}, {}), "('Facility ID must be specified in order to do single-user syncing'\n )", False, 'from django.core.management.base import CommandError\n'), ((142, 19, 142, 52), 're.match', 're.match', ({(142, 28, 142, 42): '"""[a-f0-9]{32}"""', (142, 44, 142, 51): 'user_id'}, {}), "('[a-f0-9]{32}', user_id)", False, 'import re\n'), ((143, 22, 143, 85), 'django.core.management.base.CommandError', 'CommandError', ({(143, 35, 143, 84): '"""User ID must be a 32-character UUID (no dashes)"""'}, {}), "('User ID must be a 32-character UUID (no dashes)')", False, 'from django.core.management.base import CommandError\n'), ((161, 22, 163, 17), 'django.core.management.base.CommandError', 'CommandError', ({(162, 20, 162, 138): '"""To do a single-user sync, one device must have a single-user certificate, and the other a full-facility certificate."""'}, {}), "(\n 'To do a single-user sync, one device must have a single-user certificate, and the other a full-facility certificate.'\n )", False, 'from django.core.management.base import CommandError\n'), ((166, 23, 168, 13), 'kolibri.core.auth.management.utils.get_facility', 'get_facility', (), '', False, 'from kolibri.core.auth.management.utils import get_facility\n'), ((184, 27, 184, 63), 'json.loads', 'json.loads', ({(184, 38, 184, 62): 'client_cert.scope_params'}, {}), '(client_cert.scope_params)', False, 'import json\n'), ((331, 35, 331, 53), 'morango.models.Filter', 'Filter', ({(331, 42, 331, 52): 'dataset_id'}, {}), '(dataset_id)', False, 'from morango.models import Filter\n'), ((340, 35, 340, 47), 'morango.models.Filter', 'Filter', ({(340, 42, 340, 46): 'filt'}, {}), '(filt)', False, 'from morango.models import Filter\n'), ((129, 12, 129, 60), 'morango.models.InstanceIDModel.get_or_create_current_instance', 'InstanceIDModel.get_or_create_current_instance', ({}, {}), '()', False, 'from morango.models import InstanceIDModel\n'), ((391, 39, 391, 57), 'morango.models.Filter', 'Filter', ({(391, 46, 391, 56): 'dataset_id'}, {}), '(dataset_id)', False, 'from morango.models import Filter\n'), ((400, 39, 400, 51), 'morango.models.Filter', 'Filter', ({(400, 46, 400, 50): 'filt'}, {}), '(filt)', False, 'from morango.models import Filter\n'), ((468, 31, 468, 63), 'kolibri.core.logger.utils.data.bytes_for_humans', 'bytes_for_humans', ({(468, 48, 468, 62): 'transfer_total'}, {}), '(transfer_total)', False, 'from kolibri.core.logger.utils.data import bytes_for_humans\n'), ((484, 26, 484, 64), 'math.ceil', 'math.ceil', ({(484, 36, 484, 63): '(progress - tracker.progress)'}, {}), '(progress - tracker.progress)', False, 'import math\n')] |
RezaFirouzii/fum-delta-vision | warp.py | 0a8ad1d434006a9aee0a12c1f021c0bca0bc87e2 | import math
import imageio
import cv2 as cv
import numpy as np
import transformer
def fix_rotation(img):
img_copy = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
rows, cols = img.shape
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)
img = cv.medianBlur(img, 3)
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
roi = max(contours, key=cv.contourArea)
x, y, w, h = cv.boundingRect(roi)
corners = [[x, y], [x + w, y], [x, y + h], [x + w, y + h]]
src = np.float32(corners)
# src = np.reshape(src, (len(src), 1, 2))
# perimeter = cv.arcLength(src, True)
# corners = cv.approxPolyDP(src, perimeter // 10, True)
# corners = np.vstack(corners)
dst = np.float32([[0, 0], [cols, 0], [0, rows], [cols, rows]])
matrix = cv.getPerspectiveTransform(src, dst)
rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows))
cv.imshow('', rotated_img)
D1 = 105
D2 = 175
D3 = 275
if __name__ == "__main__":
cap = cv.VideoCapture('samples/delta.mp4')
if not cap.isOpened():
raise IOError("Video was not opened!")
mse = 0
count = 0
reader = imageio.get_reader('samples/delta.mp4')
fps = reader.get_meta_data()['fps']
writer = imageio.get_writer('samples/result.mp4', fps=fps)
while True:
res, frame = cap.read()
if not res:
break
mean_error = 0
holes_count = 0
img = frame.copy()
cv.imshow('dfa', img)
frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
frame_copy = frame.copy()
# frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
# kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
# frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel)
# frame = cv.medianBlur(frame, 3)
# contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
# roi = max(contours, key=cv.contourArea)
# x, y, w, h = cv.boundingRect(roi)
x, y, w, h = 115, 0, 445, 360
img = img[y: y+h, x: x+w]
img = transformer.rotate_along_axis(img, theta=40)
frame_copy = frame_copy[y: y+h, x: x+w]
frame_copy = transformer.rotate_along_axis(frame_copy, theta=40)
# cv.imshow('', frame_copy)
# cv.rectangle(frame_copy, (x, y), (x + w, y + h), (0, 255, 0), 2)
# cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2)
# res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY)
# frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask)
# corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1)
# corners = list(sorted(corners, key=lambda x: x[0][1]))
# print(corners[-1], corners[-2])
# print()
# corners = np.array([[38, 293], [407, 293]])
# for item in corners:
# # x, y = map(int, item.ravel())
# x, y = item
# cv.circle(img, (x, y), 5, (0, 0, 255), -1)
src = np.float32([[0, 0], [w, 0], [38, 293], [407, 293]])
dst = np.float32([[0, 0], [w, 0], [30, h], [w - 30, h]])
matrix = cv.getPerspectiveTransform(src, dst)
img = cv.warpPerspective(img, matrix, (w, h))
cv.imshow('', img)
img_copy = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)
img = cv.medianBlur(img, 3)
origin = (w // 2 + 4, h // 2 + 2)
o1, o2 = origin
r = w // 2 + 1
ORIGIN = (0, 0)
R = 300 # mm
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
contours = list(filter(lambda x: 50 < cv.contourArea(x) < 175, contours))
factor = 0.1
smooth_contours = []
for i in range(len(contours)):
epsilon = factor * cv.arcLength(contours[i], True)
approx = cv.approxPolyDP(contours[i], epsilon, True)
x, y, width, height = cv.boundingRect(approx)
area = width*height
if len(approx) == 4 and 75 < area < 200:
smooth_contours.append(contours[i])
center, radius = cv.minEnclosingCircle(approx)
radius = int(radius)
center = tuple(map(int, center))
x, y = center
X = ((x - o1) * R) / r
Y = ((y - o2) * R) / r
X, Y = round(X, 2), round(Y, 2)
cv.circle(img_copy, center, radius, (0, 255, 0), 2)
cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1, cv.LINE_AA)
e1, e2, e3 = map(lambda d: abs(math.hypot(X, Y) - d), [D1, D2, D3])
error = min(e1, e2, e3)
if error < 10:
mean_error += error ** 2
holes_count += 1
cv.circle(img_copy, origin, 4, (0, 0, 255), -1)
# cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255), 2)
mean_error /= holes_count
mse += mean_error
count += 1
cv.imshow("Final", img_copy)
writer.append_data(img_copy)
# cv.imshow("Chg", img)
if cv.waitKey(30) == 27:
break
print("E:", mse / count, "N:", count)
writer.close()
cap.release()
cv.destroyAllWindows() | [((9, 10, 9, 45), 'cv2.cvtColor', 'cv.cvtColor', ({(9, 22, 9, 25): 'img', (9, 27, 9, 44): 'cv.COLOR_BGR2GRAY'}, {}), '(img, cv.COLOR_BGR2GRAY)', True, 'import cv2 as cv\n'), ((11, 10, 11, 96), 'cv2.adaptiveThreshold', 'cv.adaptiveThreshold', ({(11, 31, 11, 34): 'img', (11, 36, 11, 39): '255', (11, 41, 11, 66): 'cv.ADAPTIVE_THRESH_MEAN_C', (11, 68, 11, 88): 'cv.THRESH_BINARY_INV', (11, 90, 11, 92): '15', (11, 94, 11, 95): '9'}, {}), '(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.\n THRESH_BINARY_INV, 15, 9)', True, 'import cv2 as cv\n'), ((13, 13, 13, 63), 'cv2.getStructuringElement', 'cv.getStructuringElement', ({(13, 38, 13, 54): 'cv.MORPH_ELLIPSE', (13, 56, 13, 62): '(3, 3)'}, {}), '(cv.MORPH_ELLIPSE, (3, 3))', True, 'import cv2 as cv\n'), ((14, 10, 14, 53), 'cv2.morphologyEx', 'cv.morphologyEx', ({(14, 26, 14, 29): 'img', (14, 31, 14, 44): 'cv.MORPH_OPEN', (14, 46, 14, 52): 'kernel'}, {}), '(img, cv.MORPH_OPEN, kernel)', True, 'import cv2 as cv\n'), ((16, 10, 16, 31), 'cv2.medianBlur', 'cv.medianBlur', ({(16, 24, 16, 27): 'img', (16, 29, 16, 30): '3'}, {}), '(img, 3)', True, 'import cv2 as cv\n'), ((18, 26, 18, 82), 'cv2.findContours', 'cv.findContours', ({(18, 42, 18, 45): 'img', (18, 47, 18, 59): 'cv.RETR_LIST', (18, 61, 18, 81): 'cv.CHAIN_APPROX_NONE'}, {}), '(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)', True, 'import cv2 as cv\n'), ((21, 17, 21, 37), 'cv2.boundingRect', 'cv.boundingRect', ({(21, 33, 21, 36): 'roi'}, {}), '(roi)', True, 'import cv2 as cv\n'), ((23, 10, 23, 29), 'numpy.float32', 'np.float32', ({(23, 21, 23, 28): 'corners'}, {}), '(corners)', True, 'import numpy as np\n'), ((29, 10, 29, 66), 'numpy.float32', 'np.float32', ({(29, 21, 29, 65): '[[0, 0], [cols, 0], [0, rows], [cols, rows]]'}, {}), '([[0, 0], [cols, 0], [0, rows], [cols, rows]])', True, 'import numpy as np\n'), ((31, 13, 31, 49), 'cv2.getPerspectiveTransform', 'cv.getPerspectiveTransform', ({(31, 40, 31, 43): 'src', (31, 45, 31, 48): 'dst'}, {}), '(src, dst)', True, 'import cv2 as cv\n'), ((32, 18, 32, 68), 'cv2.warpPerspective', 'cv.warpPerspective', ({(32, 37, 32, 45): 'img_copy', (32, 47, 32, 53): 'matrix', (32, 55, 32, 67): '(cols, rows)'}, {}), '(img_copy, matrix, (cols, rows))', True, 'import cv2 as cv\n'), ((33, 4, 33, 30), 'cv2.imshow', 'cv.imshow', ({(33, 14, 33, 16): '""""""', (33, 18, 33, 29): 'rotated_img'}, {}), "('', rotated_img)", True, 'import cv2 as cv\n'), ((41, 10, 41, 46), 'cv2.VideoCapture', 'cv.VideoCapture', ({(41, 26, 41, 45): '"""samples/delta.mp4"""'}, {}), "('samples/delta.mp4')", True, 'import cv2 as cv\n'), ((48, 13, 48, 52), 'imageio.get_reader', 'imageio.get_reader', ({(48, 32, 48, 51): '"""samples/delta.mp4"""'}, {}), "('samples/delta.mp4')", False, 'import imageio\n'), ((50, 13, 50, 62), 'imageio.get_writer', 'imageio.get_writer', (), '', False, 'import imageio\n'), ((164, 4, 164, 26), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ({}, {}), '()', True, 'import cv2 as cv\n'), ((61, 8, 61, 29), 'cv2.imshow', 'cv.imshow', ({(61, 18, 61, 23): '"""dfa"""', (61, 25, 61, 28): 'img'}, {}), "('dfa', img)", True, 'import cv2 as cv\n'), ((62, 16, 62, 53), 'cv2.cvtColor', 'cv.cvtColor', ({(62, 28, 62, 33): 'frame', (62, 35, 62, 52): 'cv.COLOR_BGR2GRAY'}, {}), '(frame, cv.COLOR_BGR2GRAY)', True, 'import cv2 as cv\n'), ((75, 14, 75, 58), 'transformer.rotate_along_axis', 'transformer.rotate_along_axis', (), '', False, 'import transformer\n'), ((77, 21, 77, 72), 'transformer.rotate_along_axis', 'transformer.rotate_along_axis', (), '', False, 'import transformer\n'), ((95, 14, 95, 65), 'numpy.float32', 'np.float32', ({(95, 25, 95, 64): '[[0, 0], [w, 0], [38, 293], [407, 293]]'}, {}), '([[0, 0], [w, 0], [38, 293], [407, 293]])', True, 'import numpy as np\n'), ((96, 14, 96, 64), 'numpy.float32', 'np.float32', ({(96, 25, 96, 63): '[[0, 0], [w, 0], [30, h], [w - 30, h]]'}, {}), '([[0, 0], [w, 0], [30, h], [w - 30, h]])', True, 'import numpy as np\n'), ((97, 17, 97, 53), 'cv2.getPerspectiveTransform', 'cv.getPerspectiveTransform', ({(97, 44, 97, 47): 'src', (97, 49, 97, 52): 'dst'}, {}), '(src, dst)', True, 'import cv2 as cv\n'), ((98, 14, 98, 53), 'cv2.warpPerspective', 'cv.warpPerspective', ({(98, 33, 98, 36): 'img', (98, 38, 98, 44): 'matrix', (98, 46, 98, 52): '(w, h)'}, {}), '(img, matrix, (w, h))', True, 'import cv2 as cv\n'), ((99, 8, 99, 26), 'cv2.imshow', 'cv.imshow', ({(99, 18, 99, 20): '""""""', (99, 22, 99, 25): 'img'}, {}), "('', img)", True, 'import cv2 as cv\n'), ((102, 14, 102, 49), 'cv2.cvtColor', 'cv.cvtColor', ({(102, 26, 102, 29): 'img', (102, 31, 102, 48): 'cv.COLOR_BGR2GRAY'}, {}), '(img, cv.COLOR_BGR2GRAY)', True, 'import cv2 as cv\n'), ((103, 14, 103, 100), 'cv2.adaptiveThreshold', 'cv.adaptiveThreshold', ({(103, 35, 103, 38): 'img', (103, 40, 103, 43): '255', (103, 45, 103, 70): 'cv.ADAPTIVE_THRESH_MEAN_C', (103, 72, 103, 92): 'cv.THRESH_BINARY_INV', (103, 94, 103, 96): '15', (103, 98, 103, 99): '9'}, {}), '(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.\n THRESH_BINARY_INV, 15, 9)', True, 'import cv2 as cv\n'), ((105, 17, 105, 67), 'cv2.getStructuringElement', 'cv.getStructuringElement', ({(105, 42, 105, 58): 'cv.MORPH_ELLIPSE', (105, 60, 105, 66): '(3, 3)'}, {}), '(cv.MORPH_ELLIPSE, (3, 3))', True, 'import cv2 as cv\n'), ((106, 14, 106, 57), 'cv2.morphologyEx', 'cv.morphologyEx', ({(106, 30, 106, 33): 'img', (106, 35, 106, 48): 'cv.MORPH_OPEN', (106, 50, 106, 56): 'kernel'}, {}), '(img, cv.MORPH_OPEN, kernel)', True, 'import cv2 as cv\n'), ((107, 14, 107, 35), 'cv2.medianBlur', 'cv.medianBlur', ({(107, 28, 107, 31): 'img', (107, 33, 107, 34): '3'}, {}), '(img, 3)', True, 'import cv2 as cv\n'), ((116, 30, 116, 86), 'cv2.findContours', 'cv.findContours', ({(116, 46, 116, 49): 'img', (116, 51, 116, 63): 'cv.RETR_LIST', (116, 65, 116, 85): 'cv.CHAIN_APPROX_NONE'}, {}), '(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)', True, 'import cv2 as cv\n'), ((149, 8, 149, 55), 'cv2.circle', 'cv.circle', ({(149, 18, 149, 26): 'img_copy', (149, 28, 149, 34): 'origin', (149, 36, 149, 37): '(4)', (149, 39, 149, 50): '(0, 0, 255)', (149, 52, 149, 54): '(-1)'}, {}), '(img_copy, origin, 4, (0, 0, 255), -1)', True, 'import cv2 as cv\n'), ((155, 8, 155, 36), 'cv2.imshow', 'cv.imshow', ({(155, 18, 155, 25): '"""Final"""', (155, 27, 155, 35): 'img_copy'}, {}), "('Final', img_copy)", True, 'import cv2 as cv\n'), ((123, 21, 123, 64), 'cv2.approxPolyDP', 'cv.approxPolyDP', ({(123, 37, 123, 48): 'contours[i]', (123, 50, 123, 57): 'epsilon', (123, 59, 123, 63): 'True'}, {}), '(contours[i], epsilon, True)', True, 'import cv2 as cv\n'), ((125, 34, 125, 57), 'cv2.boundingRect', 'cv.boundingRect', ({(125, 50, 125, 56): 'approx'}, {}), '(approx)', True, 'import cv2 as cv\n'), ((158, 11, 158, 25), 'cv2.waitKey', 'cv.waitKey', ({(158, 22, 158, 24): '(30)'}, {}), '(30)', True, 'import cv2 as cv\n'), ((122, 31, 122, 62), 'cv2.arcLength', 'cv.arcLength', ({(122, 44, 122, 55): 'contours[i]', (122, 57, 122, 61): '(True)'}, {}), '(contours[i], True)', True, 'import cv2 as cv\n'), ((130, 33, 130, 62), 'cv2.minEnclosingCircle', 'cv.minEnclosingCircle', ({(130, 55, 130, 61): 'approx'}, {}), '(approx)', True, 'import cv2 as cv\n'), ((140, 16, 140, 67), 'cv2.circle', 'cv.circle', ({(140, 26, 140, 34): 'img_copy', (140, 36, 140, 42): 'center', (140, 44, 140, 50): 'radius', (140, 52, 140, 63): '(0, 255, 0)', (140, 65, 140, 66): '(2)'}, {}), '(img_copy, center, radius, (0, 255, 0), 2)', True, 'import cv2 as cv\n'), ((117, 46, 117, 63), 'cv2.contourArea', 'cv.contourArea', ({(117, 61, 117, 62): 'x'}, {}), '(x)', True, 'import cv2 as cv\n'), ((143, 47, 143, 63), 'math.hypot', 'math.hypot', ({(143, 58, 143, 59): 'X', (143, 61, 143, 62): 'Y'}, {}), '(X, Y)', False, 'import math\n')] |
sdss/ObserverTools | sdssobstools/boss_data.py | 7f9949341edc91a79dac69d79e24af09e8558ffa | #!/usr/bin/env python3
"""
A tool to grab a single BOSS image and pull a few items from its header. It is
used in bin/sloan_log.py, but it could be used directly as well.
"""
import argparse
from pathlib import Path
from astropy.time import Time
import fitsio
class BOSSRaw:
"""A class to parse raw data from APOGEE. The purpose of collecting this
raw data is to future-proof things that need these ouptuts in case
things like autoschedulers change, which many libraries depend on. This
will hopefully help SDSS-V logging"""
def __init__(self, fil):
self.fil = fil
header = fitsio.read_header(fil)
self.dither = header['MGDPOS']
if not self.dither: # This key started working instead during SDSS-V
self.dither = header['POINTING'][0]
self.exp_time = int(header['EXPTIME'])
self.isot = Time(header['DATE-OBS']) # UTC
self.plate_id = header['PLATEID']
self.cart_id = header['CARTID']
self.exp_id = int(str(fil).split('-')[-1].split('.')[0])
self.lead = header['PLATETYP']
if 'Closed' in header['HARTMANN']:
self.hartmann = 'Closed'
self.flavor = header['FLAVOR'].capitalize()
elif 'Out' in header['HARTMANN']:
self.hartmann = 'Open'
self.flavor = header['FLAVOR'].capitalize()
self.hart_resids = []
else:
self.hartmann = header['HARTMANN']
self.flavor = 'Hart'
# self.seeing = header['SEEING']
# self.img_type = header['IMAGETYP']
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--today', action='store_true')
args = parser.parse_args()
parser.add_argument('-m', '--mjd',
help='If not today (-t), the mjd to search')
parser.add_argument('-v', '--verbose', action='count', default=1,
help='Show details, can be stacked')
if args.today:
mjd_today = int(Time.now().sjd)
data_dir = '/data/spectro/{}/'.format(mjd_today)
elif args.mjd:
data_dir = '/data/spectro/{}/'.format(args.mjd)
else:
raise Exception('No date specified')
for path in Path(data_dir).rglob('sdR*.fit.gz'):
print(path)
if __name__ == '__main__':
main()
| [((47, 13, 47, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((20, 17, 20, 40), 'fitsio.read_header', 'fitsio.read_header', ({(20, 36, 20, 39): 'fil'}, {}), '(fil)', False, 'import fitsio\n'), ((26, 20, 26, 44), 'astropy.time.Time', 'Time', ({(26, 25, 26, 43): "header['DATE-OBS']"}, {}), "(header['DATE-OBS'])", False, 'from astropy.time import Time\n'), ((62, 16, 62, 30), 'pathlib.Path', 'Path', ({(62, 21, 62, 29): 'data_dir'}, {}), '(data_dir)', False, 'from pathlib import Path\n'), ((55, 24, 55, 34), 'astropy.time.Time.now', 'Time.now', ({}, {}), '()', False, 'from astropy.time import Time\n')] |
bryan-lima/exercicios-livro-introd-prog-python-3ed | capitulo-08/ex13b.py | b6bc26dced9728510865704a80cb0d97f81f756b | # Altere o Programa 8.20 de forma que o usuário tenha três chances de acertar o número
# O programa termina se o usuário acertar ou errar três vezes
# Programa 8.20 do livro, página 184
# Programa 8.20 - Adivinhando o número
#
# import random
#
# n = random.randint(1, 10)
# x = int(input('Escolha um número entre 1 e 10: '))
# if x == n:
# print('Você acertou!')
# else:
# print('Você errou.')
import random
numberRandom = random.randint(1, 10)
counter = 0
while True:
chosenNumber = int(input('\nEscolha um número entre 1 e 10: '))
counter += 1
if chosenNumber == numberRandom:
print(f'Parabéns! Você acertou na {counter}ª de 3 tentativas!')
break
else:
print(f'Você errou!')
if counter < 3:
print(f'Resta(m) {3 - counter} tentativa(s).')
else:
print('Suas tentativas acabaram! Mais sorte na próxima vez.')
print(f'O número sorteado foi {numberRandom}.')
break
| [((18, 15, 18, 36), 'random.randint', 'random.randint', ({(18, 30, 18, 31): '1', (18, 33, 18, 35): '10'}, {}), '(1, 10)', False, 'import random\n')] |
mariusfrinken/slogviz | slogviz/config.py | 0557eda336c257245eefe75699eb2479eb672ca1 | # -*- coding: utf-8 -*-
"""This sub module provides a global variable to check for checking if the non-interactive argument was set
Exported variable:
interactive -- False, if the main the non-interactive argument was set, True, if it was not set
"""
global interactive
interactive = True; | [] |
shb84/ATM76 | setup.py | 433179bde8935abeaf2ace52fe17dedb7a313487 | import setuptools
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setuptools.setup(
name="atm76",
version="0.1.0",
author="Steven H. Berguin",
author_email="[email protected]",
description="Differentiable 1976 Atmosphere",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/shb84/ATM76.git",
packages=setuptools.find_packages(),
package_data={},
install_requires=["numpy>=1.16", "genn"],
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
)
| [((7, 20, 7, 42), 'os.path.dirname', 'path.dirname', ({(7, 33, 7, 41): '__file__'}, {}), '(__file__)', False, 'from os import path\n'), ((9, 10, 9, 38), 'os.path.join', 'path.join', ({(9, 20, 9, 24): 'here', (9, 26, 9, 37): '"""README.md"""'}, {}), "(here, 'README.md')", False, 'from os import path\n'), ((21, 13, 21, 39), 'setuptools.find_packages', 'setuptools.find_packages', ({}, {}), '()', False, 'import setuptools\n')] |
indigos33k3r/god-eye | agent/check_plugins/download_speed.py | b2af5ca6dbbd1b302dd5cda1fd0f0c0eee009e76 | import logging
import asyncio
from agent.check_plugins import AbstractCheckPlugin
# Do khong biet dung thu vien asyncio ntn ca nen em dung thu vien request
# python
import requests
import sys
import time
from datetime import datetime
logger = logging.getLogger(__name__)
class Download(AbstractCheckPlugin):
@asyncio.coroutine
def __call__(self, client, dnode):
logger.info('Test download speed : running...')
start = time.clock()
r = requests.get('http://{}'.format(dnode), stream=True)
total_length = int(r.headers.get('content-length'))
if total_length is None:
logger.error("Empty file!")
else:
array_speed = []
start_chunk = time.clock()
for chunk in r.iter_content(1024): # 1kB1024 1MB 1048576
end_chunk = time.clock()
delta = end_chunk - start_chunk
start_chunk = end_chunk
if delta <= 0:
break
else:
array_speed.append(1//delta) # kB / s
end = time.clock()
yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed))
@asyncio.coroutine
def get_result(self, url, start, end, total_length, array_speed):
"""Download and processing data.
Args:
url (str): url file download.
start (float): It's time which started download.
end (float): It's time which finished download.
total_length (int): size of file download (Byte)
array_speed (list): list download speeds for each 1024 Byte (kB/s)
Returns:
list with item 0 : json format for influxdb
"""
download_speed = total_length // (time.clock() - start)
accelerationS = self.acceleration(array_speed)
mean_deviationS = self.mean_deviation(array_speed, download_speed)
logger.info("Test download speed done!")
#TODO Bỏ time, để kiểm tra xem db có ghi đc dữ liệu hay chưa
return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])]
def acceleration(self, array_speed):
"""Caculate acceleration.
By get the highest speed in the first cycle.
Args:
array_speed (list): list download times for each 1024 Byte
Returns:
acceleration (kB/s) : the deviation between highest speed and first byte speed
"""
if len(array_speed) == 0:
return 0
speed_before = array_speed[0]
for speed in array_speed:
if speed < speed_before:
break
else:
speed_before = speed
return speed_before - array_speed[0]
def mean_deviation(self, array_speed, download_speed):
"""The mean deviation each downloads with download_speed.
Args:
array_speed (list): list download speeds for each kB.
download_speed (kB/s): mean download speed.
Returns:
mean_deviation (kB/s)
"""
if len(array_speed) == 0:
return 0
sum = 0
for speed in array_speed:
sum += abs(speed - download_speed)
return sum//len(array_speed)
def output(self, my_array):
"""Reformat my_array for inserting into influxdb.
Args:
my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS]
Returns:
json format for influxdb
"""
return {
"measurement": "download_speed",
"tags": {
"snode": "{}".format(my_array[0]),
"dnode": "{}".format(my_array[1])
},
# "time": "{}".format(my_array[2]),
"fields": {
"speed": my_array[3],
"mean_deviation": my_array[4],
"acceleration": my_array[5]
}
}
| [((12, 9, 12, 36), 'logging.getLogger', 'logging.getLogger', ({(12, 27, 12, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((20, 16, 20, 28), 'time.clock', 'time.clock', ({}, {}), '()', False, 'import time\n'), ((27, 26, 27, 38), 'time.clock', 'time.clock', ({}, {}), '()', False, 'import time\n'), ((37, 18, 37, 30), 'time.clock', 'time.clock', ({}, {}), '()', False, 'import time\n'), ((29, 28, 29, 40), 'time.clock', 'time.clock', ({}, {}), '()', False, 'import time\n'), ((55, 42, 55, 54), 'time.clock', 'time.clock', ({}, {}), '()', False, 'import time\n'), ((60, 47, 60, 61), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
AyemunHossain/Django | Setup Rich Text Editor/mysite/main/urls.py | 0b1ed21fd6bd2906a4a1a220c029a2193658320f | from django.urls import path
from . import views
app_name = "main"
urlpatterns = [
path("",views.homepage,name="homepage")
] | [((7, 4, 7, 43), 'django.urls.path', 'path', (), '', False, 'from django.urls import path\n')] |
jcordell/keras-optimization | GA/train.py | cbda84bcf3b31928d829af4afc82af1886877341 | """
Utility used by the Network class to actually train.
Based on:
https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py
"""
from keras.datasets import mnist, cifar10
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.utils.np_utils import to_categorical
from keras.callbacks import EarlyStopping
import data_parser
import numpy as np
from keras.optimizers import Adadelta, Adam, rmsprop
from sklearn.metrics import mean_squared_error
# Helper: Early stopping.
early_stopper = EarlyStopping(patience=5)
def get_cifar10():
"""Retrieve the CIFAR dataset and process the data."""
# Set defaults.
nb_classes = 10
batch_size = 64
input_shape = (3072,)
# Get the data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.reshape(50000, 3072)
x_test = x_test.reshape(10000, 3072)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# convert class vectors to binary class matrices
y_train = to_categorical(y_train, nb_classes)
y_test = to_categorical(y_test, nb_classes)
return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)
def get_mnist():
"""Retrieve the MNIST dataset and process the data."""
# Set defaults.
nb_classes = 10
batch_size = 128
input_shape = (784,)
# Get the data.
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# convert class vectors to binary class matrices
y_train = to_categorical(y_train, nb_classes)
y_test = to_categorical(y_test, nb_classes)
return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)
def get_dbtt():
data = data_parser.parse("DBTT_Data22.csv")
data_lwr = data_parser.parse("CD_LWR_clean8.csv")
X = ["N_log(eff fl p =.05)", "N_log(eff fl p =.4)", "N_log(eff fl p =.5)", "N(Cu)", "N(Ni)", "N(Mn)", "N(P)",
"N(Si)", "N( C )", "N_log(eff fl p =.1)", "N_log(eff fl p =.2)", "N_log(eff fl p =.3)", "N(Temp)"]
Y = "CD delta sigma"
data.set_x_features(X)
data.set_y_feature(Y)
data_lwr.set_y_feature(Y)
data_lwr.set_x_features(X)
data.add_exclusive_filter("Alloy", '=', 29)
data.add_exclusive_filter("Alloy", '=', 8)
data.add_exclusive_filter("Alloy", '=', 1)
data.add_exclusive_filter("Alloy", '=', 2)
data.add_exclusive_filter("Alloy", '=', 14)
data_lwr.add_exclusive_filter("Alloy", '=', 29)
data_lwr.add_exclusive_filter("Alloy", '=', 14)
x_test = np.array(data_lwr.get_x_data())
y_test = np.array(data_lwr.get_y_data())
x_train = np.array(data.get_x_data())
y_train = np.array(data.get_y_data())
#print("Training with", np.shape(y_train)[0], "data points")
nb_classes = -1
batch_size = np.shape(y_train)[0]
input_shape = (13,)
# normalize y columns
y_train = y_train/758.92
return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)
def compile_model(network, nb_classes, input_shape):
"""Compile a sequential model.
Args:
network (dict): the parameters of the network
Returns:
a compiled network.
"""
# Get our network parameters.
nb_layers = network['nb_layers']
nb_neurons = network['nb_neurons']
activation = network['activation']
optimizer = network['optimizer']
learning_rate = network['learning_rate']
model = Sequential()
# Add each layer.
for i in range(nb_layers):
# Need input shape for first layer.
if i == 0:
print(nb_neurons)
model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape))
else:
print(nb_neurons)
model.add(Dense(nb_neurons, activation=activation))
model.add(Dropout(0.2)) # hard-coded dropout
# Output layer.
if(nb_classes == -1):
model.add(Dense(1, activation='linear'))
ADAM = Adam(lr=learning_rate)
model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM)
else:
model.add(Dense(nb_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=optimizer,
metrics=['accuracy'])
return model
def train_and_score(network, dataset):
"""Train the model, return test loss.
Args:
network (dict): the parameters of the network
dataset (str): Dataset to use for training/evaluating
"""
if dataset == 'cifar10':
nb_classes, batch_size, input_shape, x_train, \
x_test, y_train, y_test = get_cifar10()
elif dataset == 'mnist':
nb_classes, batch_size, input_shape, x_train, \
x_test, y_train, y_test = get_mnist()
elif dataset == 'dbtt':
nb_classes, batch_size, input_shape, x_train, \
x_test, y_train, y_test = get_dbtt()
model = compile_model(network, nb_classes, input_shape)
if dataset == 'dbtt':
model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0)
y_predict = model.predict(x_test) * 758.92 # todo way to not hardcode this?
rms = np.sqrt(mean_squared_error(y_test, y_predict))
print(rms)
return rms
else:
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=10000, # using early stopping, so no real limit
verbose=0,
validation_data=(x_test, y_test),
callbacks=[early_stopper])
score = model.evaluate(x_test, y_test, verbose=0)
return score[1] # 1 is accuracy. 0 is loss.
| [((19, 16, 19, 41), 'keras.callbacks.EarlyStopping', 'EarlyStopping', (), '', False, 'from keras.callbacks import EarlyStopping\n'), ((29, 43, 29, 62), 'keras.datasets.cifar10.load_data', 'cifar10.load_data', ({}, {}), '()', False, 'from keras.datasets import mnist, cifar10\n'), ((38, 14, 38, 49), 'keras.utils.np_utils.to_categorical', 'to_categorical', ({(38, 29, 38, 36): 'y_train', (38, 38, 38, 48): 'nb_classes'}, {}), '(y_train, nb_classes)', False, 'from keras.utils.np_utils import to_categorical\n'), ((39, 13, 39, 47), 'keras.utils.np_utils.to_categorical', 'to_categorical', ({(39, 28, 39, 34): 'y_test', (39, 36, 39, 46): 'nb_classes'}, {}), '(y_test, nb_classes)', False, 'from keras.utils.np_utils import to_categorical\n'), ((51, 43, 51, 60), 'keras.datasets.mnist.load_data', 'mnist.load_data', ({}, {}), '()', False, 'from keras.datasets import mnist, cifar10\n'), ((60, 14, 60, 49), 'keras.utils.np_utils.to_categorical', 'to_categorical', ({(60, 29, 60, 36): 'y_train', (60, 38, 60, 48): 'nb_classes'}, {}), '(y_train, nb_classes)', False, 'from keras.utils.np_utils import to_categorical\n'), ((61, 13, 61, 47), 'keras.utils.np_utils.to_categorical', 'to_categorical', ({(61, 28, 61, 34): 'y_test', (61, 36, 61, 46): 'nb_classes'}, {}), '(y_test, nb_classes)', False, 'from keras.utils.np_utils import to_categorical\n'), ((66, 11, 66, 47), 'data_parser.parse', 'data_parser.parse', ({(66, 29, 66, 46): '"""DBTT_Data22.csv"""'}, {}), "('DBTT_Data22.csv')", False, 'import data_parser\n'), ((67, 15, 67, 53), 'data_parser.parse', 'data_parser.parse', ({(67, 33, 67, 52): '"""CD_LWR_clean8.csv"""'}, {}), "('CD_LWR_clean8.csv')", False, 'import data_parser\n'), ((114, 12, 114, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential\n'), ((90, 17, 90, 34), 'numpy.shape', 'np.shape', ({(90, 26, 90, 33): 'y_train'}, {}), '(y_train)', True, 'import numpy as np\n'), ((131, 15, 131, 37), 'keras.optimizers.Adam', 'Adam', (), '', False, 'from keras.optimizers import Adadelta, Adam, rmsprop\n'), ((125, 18, 125, 30), 'keras.layers.Dropout', 'Dropout', ({(125, 26, 125, 29): '(0.2)'}, {}), '(0.2)', False, 'from keras.layers import Dense, Dropout\n'), ((130, 18, 130, 47), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout\n'), ((134, 18, 134, 57), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout\n'), ((163, 22, 163, 59), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', ({(163, 41, 163, 47): 'y_test', (163, 49, 163, 58): 'y_predict'}, {}), '(y_test, y_predict)', False, 'from sklearn.metrics import mean_squared_error\n'), ((121, 22, 121, 93), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout\n'), ((124, 22, 124, 62), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout\n')] |
rolandgeider/OpenSlides | tests/integration/agenda/test_models.py | 331141c17cb23da26e377d4285efdb4a50753a59 | from openslides.agenda.models import Item
from openslides.core.models import CustomSlide
from openslides.utils.test import TestCase
class TestItemManager(TestCase):
def test_get_root_and_children_db_queries(self):
"""
Test that get_root_and_children needs only one db query.
"""
for i in range(10):
CustomSlide.objects.create(title='item{}'.format(i))
with self.assertNumQueries(1):
Item.objects.get_root_and_children()
| [((15, 12, 15, 48), 'openslides.agenda.models.Item.objects.get_root_and_children', 'Item.objects.get_root_and_children', ({}, {}), '()', False, 'from openslides.agenda.models import Item\n')] |
mbjahnoon/ssl_context_builder | ssl_context_builder/http_impl/requests_wrapper/secure_session.py | e73530f900b56710c705675e8e657f0bd17f7c07 | import weakref
import os
import requests
import ssl
from ssl import SSLContext
import logging
from ssl_context_builder.builder.builder import SslContextBuilder
from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter
class RequestsSecureSession:
def __init__(self, ssl_context: SSLContext):
"""
This class create a wrapper for the requests.Session object
It does the following:
1. Disable session env_vars consuming
2. Load certificates provided with the ssl_context
3. Except ssl_context to control the TLS communication
@param ssl_context: SSLContext
"""
self.cert_file_path = self._create_cert_file(ssl_context) # see note inside the function why not using tempfile
self._ssl_context = ssl_context
self.session = requests.Session()
self.session.trust_env = False
self.session.verify = self.cert_file_path
self.session.mount('https://', SslAdapter(ssl_context))
self._finalizer = weakref.finalize(
self, self._cleanup, self.cert_file_path, self.session,
warn_message="Implicitly cleaning up {!r}".format(self))
def __enter__(self):
return self
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self): # Non throw function
"""
Delete the cert file and close the session
@return:
"""
if self._finalizer.detach():
try:
os.remove(self.cert_file_path)
except:
logging.warning(f"Couldn't delete certs file {self.cert_file_path}")
try:
self.session.close()
except:
logging.warning("Couldn't close session")
@staticmethod
def _cleanup(name, session, warn_message):
try:
os.remove(name)
except:
logging.warning(f"Couldn't delete certs file {name}")
try:
session.close()
except:
logging.warning("Couldn't close session")
logging.warning(warn_message)
@classmethod
def _create_cert_file(cls, ssl_context: SSLContext):
"""
This create a CA bundle file extracted from the ssl_context
The reason we are creating a real file and deleting it is that this file is being opened later on
in the requests flow. This means we have to close the file before it is being used
tempfile is being destroyed when closed.
@param ssl_context: ssl_context
@return: path to the created ca_bundle file
"""
path = "certs.pem"
if os.path.exists(path):
path = cls._generate_cert_file_path("certs")
with open(path, mode="a+") as certs_file:
certs = ""
for der in ssl_context.get_ca_certs(True):
certs += f"{ssl.DER_cert_to_PEM_cert(der)}\n"
certs_file.write(certs)
return path
@classmethod
def _generate_cert_file_path(cls, file_name: str, num=1):
file_name_candidate = f"{file_name}({num}).pem"
if os.path.exists(file_name_candidate):
return cls._generate_cert_file_path(file_name, num + 1)
return file_name_candidate
| [((25, 23, 25, 41), 'requests.Session', 'requests.Session', ({}, {}), '()', False, 'import requests\n'), ((66, 8, 66, 37), 'logging.warning', 'logging.warning', ({(66, 24, 66, 36): 'warn_message'}, {}), '(warn_message)', False, 'import logging\n'), ((79, 11, 79, 31), 'os.path.exists', 'os.path.exists', ({(79, 26, 79, 30): 'path'}, {}), '(path)', False, 'import os\n'), ((91, 11, 91, 46), 'os.path.exists', 'os.path.exists', ({(91, 26, 91, 45): 'file_name_candidate'}, {}), '(file_name_candidate)', False, 'import os\n'), ((28, 39, 28, 62), 'ssl_context_builder.http_impl.requests_wrapper.ssl_adapter.SslAdapter', 'SslAdapter', ({(28, 50, 28, 61): 'ssl_context'}, {}), '(ssl_context)', False, 'from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter\n'), ((59, 12, 59, 27), 'os.remove', 'os.remove', ({(59, 22, 59, 26): 'name'}, {}), '(name)', False, 'import os\n'), ((47, 16, 47, 46), 'os.remove', 'os.remove', ({(47, 26, 47, 45): 'self.cert_file_path'}, {}), '(self.cert_file_path)', False, 'import os\n'), ((61, 12, 61, 65), 'logging.warning', 'logging.warning', ({(61, 28, 61, 64): 'f"""Couldn\'t delete certs file {name}"""'}, {}), '(f"Couldn\'t delete certs file {name}")', False, 'import logging\n'), ((65, 12, 65, 53), 'logging.warning', 'logging.warning', ({(65, 28, 65, 52): '"""Couldn\'t close session"""'}, {}), '("Couldn\'t close session")', False, 'import logging\n'), ((49, 16, 49, 84), 'logging.warning', 'logging.warning', ({(49, 32, 49, 83): 'f"""Couldn\'t delete certs file {self.cert_file_path}"""'}, {}), '(f"Couldn\'t delete certs file {self.cert_file_path}")', False, 'import logging\n'), ((54, 16, 54, 57), 'logging.warning', 'logging.warning', ({(54, 32, 54, 56): '"""Couldn\'t close session"""'}, {}), '("Couldn\'t close session")', False, 'import logging\n'), ((84, 28, 84, 57), 'ssl.DER_cert_to_PEM_cert', 'ssl.DER_cert_to_PEM_cert', ({(84, 53, 84, 56): 'der'}, {}), '(der)', False, 'import ssl\n')] |
jiaqiangwjq/python_workhouse | tiny_scripts/select_cifar_10.py | c0e739d8bc8ea3d318a0f916e9d79b1f4d4acad9 | '''
Selected cifar-10. The .csv file format:
class_index,data_index
3,0
8,1
8,2
...
'''
import pickle
import pandas as pd
file = 'E:\pycharm\LEARN\data\cifar-10\cifar-10-batches-py\\test_batch'
with open(file, 'rb') as f:
dict = pickle.load(f, encoding='bytes')
dict.keys()
batch_label = dict[b'batch_label']
labels = dict[b'labels']
data = dict[b'data']
filenames = dict[b'filenames']
length = len(labels)
data_index = [i for i in range(length)]
class_index = labels
csv_dict = {'class_index': class_index, 'data_index': data_index}
df = pd.DataFrame(csv_dict)
df.to_csv('selected_cifar10.csv', index=False) | [((32, 5, 32, 27), 'pandas.DataFrame', 'pd.DataFrame', ({(32, 18, 32, 26): 'csv_dict'}, {}), '(csv_dict)', True, 'import pandas as pd\n'), ((18, 11, 18, 43), 'pickle.load', 'pickle.load', (), '', False, 'import pickle\n')] |
disqus/codebox | codebox/scripts/fixture.py | 9f8e1a9c08c6a79bf3519782be483ff9763c4b4e | # Ghetto Fixtures
from codebox import app
from codebox.apps.auth.models import User
from codebox.apps.snippets.models import Snippet
from codebox.apps.organizations.models import Organization, OrganizationMember
from flask import g
client = app.test_client()
_ctx = app.test_request_context()
_ctx.push()
app.preprocess_request()
g.redis.flushdb()
User.objects.create(pk=1, name='zeeg')
Organization.objects.create(pk='disqus', name='DISQUS')
OrganizationMember.objects.create(org='disqus', user=1)
# Create sample snippets
# plaintext
Snippet.objects.create(org='disqus', user=1, lang='text', text = "Hello World!")
# python
Snippet.objects.create(org='disqus', user=1, lang='python', text = "print 'Disqus was here'")
# html
Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its HTML!</h1>')
# javascript
Snippet.objects.create(org='disqus', user=1, lang='javascript', text = "document.write('Di-squs')")
| [((9, 9, 9, 26), 'codebox.app.test_client', 'app.test_client', ({}, {}), '()', False, 'from codebox import app\n'), ((10, 7, 10, 33), 'codebox.app.test_request_context', 'app.test_request_context', ({}, {}), '()', False, 'from codebox import app\n'), ((12, 0, 12, 24), 'codebox.app.preprocess_request', 'app.preprocess_request', ({}, {}), '()', False, 'from codebox import app\n'), ((13, 0, 13, 17), 'flask.g.redis.flushdb', 'g.redis.flushdb', ({}, {}), '()', False, 'from flask import g\n'), ((15, 0, 15, 38), 'codebox.apps.auth.models.User.objects.create', 'User.objects.create', (), '', False, 'from codebox.apps.auth.models import User\n'), ((17, 0, 17, 55), 'codebox.apps.organizations.models.Organization.objects.create', 'Organization.objects.create', (), '', False, 'from codebox.apps.organizations.models import Organization, OrganizationMember\n'), ((19, 0, 19, 55), 'codebox.apps.organizations.models.OrganizationMember.objects.create', 'OrganizationMember.objects.create', (), '', False, 'from codebox.apps.organizations.models import Organization, OrganizationMember\n'), ((24, 0, 24, 80), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', (), '', False, 'from codebox.apps.snippets.models import Snippet\n'), ((26, 0, 26, 93), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', (), '', False, 'from codebox.apps.snippets.models import Snippet\n'), ((28, 0, 28, 91), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', (), '', False, 'from codebox.apps.snippets.models import Snippet\n'), ((30, 0, 30, 99), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', (), '', False, 'from codebox.apps.snippets.models import Snippet\n')] |
akashkj/commcare-hq | corehq/apps/linked_domain/tests/test_views.py | b00a62336ec26cea1477dfb8c048c548cc462831 | from unittest.mock import Mock, patch
from django.test import SimpleTestCase
from corehq.apps.domain.exceptions import DomainDoesNotExist
from corehq.apps.linked_domain.exceptions import (
DomainLinkAlreadyExists,
DomainLinkError,
DomainLinkNotAllowed,
)
from corehq.apps.linked_domain.views import link_domains
class LinkDomainsTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
super(LinkDomainsTests, cls).setUpClass()
cls.upstream_domain = 'upstream'
cls.downstream_domain = 'downstream'
def test_exception_raised_if_domain_does_not_exist(self):
def mock_handler(domain):
return domain != self.downstream_domain
with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\
self.assertRaises(DomainDoesNotExist):
mock_domainexists.side_effect = mock_handler
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_exception_raised_if_domain_link_already_exists(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\
self.assertRaises(DomainLinkAlreadyExists):
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_exception_raised_if_domain_link_error_raised(self):
def mock_handler(downstream, upstream):
raise DomainLinkError
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\
self.assertRaises(DomainLinkError):
mock_linkdomains.side_effect = mock_handler
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_exception_raised_if_user_is_not_admin_in_both_domains(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\
self.assertRaises(DomainLinkNotAllowed):
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_successful(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\
patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True):
domain_link = link_domains(Mock(), self.upstream_domain, self.downstream_domain)
self.assertIsNotNone(domain_link)
| [((26, 13, 26, 67), 'unittest.mock.patch', 'patch', ({(26, 19, 26, 66): '"""corehq.apps.linked_domain.views.domain_exists"""'}, {}), "('corehq.apps.linked_domain.views.domain_exists')", False, 'from unittest.mock import Mock, patch\n'), ((32, 13, 32, 86), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((41, 13, 41, 86), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((42, 13, 42, 95), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((43, 13, 43, 77), 'unittest.mock.patch', 'patch', ({(43, 19, 43, 76): '"""corehq.apps.linked_domain.views.DomainLink.link_domains"""'}, {}), "('corehq.apps.linked_domain.views.DomainLink.link_domains')", False, 'from unittest.mock import Mock, patch\n'), ((49, 13, 49, 86), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((50, 13, 50, 95), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((51, 13, 51, 110), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((56, 13, 56, 86), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((57, 13, 57, 95), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((58, 13, 58, 96), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((59, 13, 59, 109), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import Mock, patch\n'), ((29, 25, 29, 31), 'unittest.mock.Mock', 'Mock', ({}, {}), '()', False, 'from unittest.mock import Mock, patch\n'), ((35, 25, 35, 31), 'unittest.mock.Mock', 'Mock', ({}, {}), '()', False, 'from unittest.mock import Mock, patch\n'), ((46, 25, 46, 31), 'unittest.mock.Mock', 'Mock', ({}, {}), '()', False, 'from unittest.mock import Mock, patch\n'), ((53, 25, 53, 31), 'unittest.mock.Mock', 'Mock', ({}, {}), '()', False, 'from unittest.mock import Mock, patch\n'), ((60, 39, 60, 45), 'unittest.mock.Mock', 'Mock', ({}, {}), '()', False, 'from unittest.mock import Mock, patch\n'), ((33, 90, 33, 96), 'unittest.mock.Mock', 'Mock', ({}, {}), '()', False, 'from unittest.mock import Mock, patch\n')] |
Vamsi-TM/jubilant-train | LanguageBasics/functions/import_eg.py | a3ca0216e161ead4f59d923a36587098790beb5d | import function_exercise_01 as st
st.sandwich_toppings('meatballs', 'salad')
| [((3, 0, 3, 42), 'function_exercise_01.sandwich_toppings', 'st.sandwich_toppings', ({(3, 21, 3, 32): '"""meatballs"""', (3, 34, 3, 41): '"""salad"""'}, {}), "('meatballs', 'salad')", True, 'import function_exercise_01 as st\n')] |
golnazads/adsabs-pyingest | pyingest/parsers/zenodo.py | 37b37dd9e0d8a6e5cc34c59d30acd14e3381b48e | #!/usr/bin/python
#
#
from __future__ import absolute_import
import json
import re
import logging
from .datacite import DataCiteParser
class WrongPublisherException(Exception):
pass
class ZenodoParser(DataCiteParser):
def get_references(self, r):
# as of version 3.1 of datacite schema, "References" is not an
# allowed description type so Lars is shoving the references
# in a section labeled as "Other" as a json structure
references = []
for s in self._array(r.get('descriptions', {}).get('description', [])):
t = s.get('@descriptionType')
c = self._text(s)
if t == 'References':
# XXX not supported yet, but one can only hope...
references = c.split('\n')
elif t == 'Other':
try:
j = json.loads(c)
references = j.get('references', [])
except ValueError:
logging.warning(u'Ignoring unparsable "Other" description element: %s\n' % c)
return references
def get_abstract(self, r):
abs = super(ZenodoParser, self).get_abstract(r)
abs = re.sub(r'\s*<p>', '', abs)
abs = re.sub(r'</p>\s*$', '', abs)
return abs
def parse(self, fp, **kwargs):
"""Parses Zenodo's flavor of DataCite 3.1 schema, returns ADS tagged format"""
doc = super(self.__class__, self).parse(fp, **kwargs)
# r = self._resource
return doc
# publisher
pub = doc.get('source')
if pub != 'Zenodo' and pub != 'ZENODO':
raise WrongPublisherException("Found publisher field of \"%s\" rather than Zenodo" % pub)
else:
doc['source'] = 'ZENODO'
return doc
#
# if __name__ == "__main__":
#
# # allows program to print utf-8 encoded output sensibly
# import codecs
# sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
# sys.stderr = codecs.getwriter('utf-8')(sys.stderr)
#
# parser = ZenodoParser()
# for file in sys.argv[1:]:
# d = None
# with open(file, 'r') as fp:
# d = parser.parse(fp)
# print json.dumps(d, indent=2)
| [((39, 14, 39, 40), 're.sub', 're.sub', ({(39, 21, 39, 30): '"""\\\\s*<p>"""', (39, 32, 39, 34): '""""""', (39, 36, 39, 39): 'abs'}, {}), "('\\\\s*<p>', '', abs)", False, 'import re\n'), ((40, 14, 40, 42), 're.sub', 're.sub', ({(40, 21, 40, 32): '"""</p>\\\\s*$"""', (40, 34, 40, 36): '""""""', (40, 38, 40, 41): 'abs'}, {}), "('</p>\\\\s*$', '', abs)", False, 'import re\n'), ((31, 24, 31, 37), 'json.loads', 'json.loads', ({(31, 35, 31, 36): 'c'}, {}), '(c)', False, 'import json\n'), ((34, 20, 34, 97), 'logging.warning', 'logging.warning', ({(34, 36, 34, 96): '(u\'Ignoring unparsable "Other" description element: %s\\n\' % c)'}, {}), '(u\'Ignoring unparsable "Other" description element: %s\\n\' % c)', False, 'import logging\n')] |
AmeyaDaddikar/vjtichain | src/fullnode.py | 2a9b68d475fe5cc2babdf3f5b463a685e8423f05 | import json
import time
from functools import lru_cache
from multiprocessing import Pool, Process
from threading import Thread, Timer
from typing import Any, Dict, List
from datetime import datetime
import hashlib
import inspect
import requests
import waitress
from bottle import BaseTemplate, Bottle, request, response, static_file, template, error
import utils.constants as consts
from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block
from authority import Authority
from utils.logger import logger, iplogger
from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db
from utils.utils import compress, decompress, dhash
from wallet import Wallet
app = Bottle()
BaseTemplate.defaults["get_url"] = app.get_url
LINE_PROFILING = False
BLOCKCHAIN = BlockChain()
PEER_LIST: List[Dict[str, Any]] = []
MY_WALLET = Wallet()
miner = Authority()
def mining_thread_task():
while True:
if not miner.is_mining() and not consts.NO_MINING:
miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET)
time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2)
def send_to_all_peers(url, data):
def request_task(peers, url, data):
for peer in peers:
try:
requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1))
except Exception as e:
logger.debug("Server: Requests: Error while sending data in process" + str(peer))
Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start()
def start_mining_thread():
time.sleep(5)
Thread(target=mining_thread_task, name="Miner", daemon=True).start()
def fetch_peer_list() -> List[Dict[str, Any]]:
try:
r = requests.post(consts.SEED_SERVER_URL, data={"port": consts.MINER_SERVER_PORT})
peer_list = json.loads(r.text)
return peer_list
except Exception as e:
logger.error("Could not connect to DNS Seed")
return []
def get_peer_url(peer: Dict[str, Any]) -> str:
return "http://" + str(peer["ip"]) + ":" + str(peer["port"])
def greet_peer(peer: Dict[str, Any]) -> bool:
try:
url = get_peer_url(peer)
data = {"port": consts.MINER_SERVER_PORT, "version": consts.MINER_VERSION, "blockheight": BLOCKCHAIN.active_chain.length}
# Send a POST request to the peer
r = requests.post(url + "/greetpeer", data=data)
data = json.loads(r.text)
# Update the peer data in the peer list with the new data received from the peer.
if data.get("blockheight", None):
peer.update(data)
else:
logger.debug("Main: Peer data does not have Block Height")
return False
return True
except Exception as e:
logger.debug("Main: Could not greet peer" + str(e))
return False
def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block:
r = requests.post(get_peer_url(peer) + "/getblock", data={"headerhash": header_hash})
return Block.from_json(decompress(r.text)).object()
def check_block_with_peer(peer, hhash):
r = requests.post(get_peer_url(peer) + "/checkblock", data={"headerhash": hhash})
result = json.loads(r.text)
if result:
return True
return False
def get_block_header_hash(height):
return dhash(BLOCKCHAIN.active_chain.header_list[height])
def sync(max_peer):
fork_height = BLOCKCHAIN.active_chain.length
r = requests.post(get_peer_url(max_peer) + "/getblockhashes", data={"myheight": fork_height})
hash_list = json.loads(decompress(r.text.encode()))
for hhash in hash_list:
block = receive_block_from_peer(max_peer, hhash)
if not BLOCKCHAIN.add_block(block):
logger.error("Sync: Block received is invalid, Cannot Sync")
break
return
# Periodically sync with all the peers
def sync_with_peers():
try:
PEER_LIST = fetch_peer_list()
new_peer_list = []
for peer in PEER_LIST:
if greet_peer(peer):
new_peer_list.append(peer)
PEER_LIST = new_peer_list
if PEER_LIST:
max_peer = max(PEER_LIST, key=lambda k: k["blockheight"])
logger.debug(f"Sync: Syncing with {get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}")
sync(max_peer)
except Exception as e:
logger.error("Sync: Error: " + str(e))
Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start()
def check_balance(pub_key: str) -> int:
current_balance = 0
for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():
tx_out = utxo_list[0]
if tx_out.address == pub_key:
current_balance += int(tx_out.amount)
return int(current_balance)
def send_bounty(receiver_public_keys: List[str], amounts: List[int]):
current_balance = check_balance(MY_WALLET.public_key)
for key in receiver_public_keys:
if len(key) < consts.PUBLIC_KEY_LENGTH:
logger.debug("Invalid Public Key Length")
return False
total_amount = sum(amounts)
if current_balance < total_amount:
logger.debug("Insuficient balance")
elif MY_WALLET.public_key in receiver_public_keys:
logger.debug("Cannot send to myself")
else:
transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message="Authority: Faucet Money")
transaction.sign(MY_WALLET)
logger.info("Wallet: Attempting to Send Transaction")
try:
r = requests.post(
"http://0.0.0.0:" + str(consts.MINER_SERVER_PORT) + "/newtransaction",
data=compress(transaction.to_json()),
timeout=(5, 1),
)
if r.status_code == 400:
logger.info("Wallet: Could not Send Transaction. Invalid Transaction")
else:
logger.info("Wallet: Transaction Sent, Wait for it to be Mined")
return True
except Exception as e:
logger.error("Wallet: Could not Send Transaction. Try Again." + str(e))
return False
def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message="") -> Transaction:
vout = {}
vin = {}
current_amount = 0
total_amount = sum(amounts)
i = 0
for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():
tx_out = utxo_list[0]
if current_amount >= total_amount:
break
if tx_out.address == sender_public_key:
current_amount += tx_out.amount
vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig="")
i += 1
for i, address in enumerate(receiver_public_keys):
vout[i] = TxOut(amount=amounts[i], address=address)
change = (current_amount - total_amount)
if change > 0:
vout[i + 1] = TxOut(amount=change, address=sender_public_key)
tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message)
return tx
def get_ip(request):
return request.environ.get("HTTP_X_FORWARDED_FOR") or request.environ.get("REMOTE_ADDR")
def log_ip(request, fname):
client_ip = get_ip(request)
iplogger.info(f"{client_ip} : Called function {fname}")
@app.post("/checkBalance")
def checkingbalance():
log_ip(request, inspect.stack()[0][3])
data = request.json
public_key = data["public_key"]
logger.debug(public_key)
current_balance = check_balance(public_key)
return str(current_balance)
@app.post("/makeTransaction")
def make_transaction():
log_ip(request, inspect.stack()[0][3])
data = request.json
bounty = int(data["bounty"])
receiver_public_key = data["receiver_public_key"]
sender_public_key = data["sender_public_key"]
message = "No Message"
if "message" in data:
message = data["message"]
if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH:
logger.debug("Invalid Receiver Public Key")
response.status = 400
return "Invalid Receiver Public Key"
current_balance = check_balance(sender_public_key)
if current_balance < bounty:
logger.debug("Insufficient Balance to make Transaction")
response.status = 400
return "Insufficient Balance to make Transaction, need more " + str(bounty - current_balance)
elif sender_public_key == receiver_public_key:
logger.debug("Someone trying to send money to himself")
response.status = 400
return "Cannot send money to youself"
else:
transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message)
data = {}
data["send_this"] = transaction.to_json()
transaction.vin = {}
data["sign_this"] = transaction.to_json()
return json.dumps(data)
@app.post("/sendTransaction")
def send_transaction():
log_ip(request, inspect.stack()[0][3])
data = request.json
transaction = Transaction.from_json(data["transaction"]).object()
sig = data["signature"]
transaction.add_sign(sig)
logger.debug(transaction)
logger.info("Wallet: Attempting to Send Transaction")
try:
r = requests.post(
"http://0.0.0.0:" + str(consts.MINER_SERVER_PORT) + "/newtransaction",
data=compress(transaction.to_json()),
timeout=(5, 1),
)
if r.status_code == 400:
response.status = 400
logger.error("Wallet: Could not Send Transaction. Invalid transaction")
return "Try Again"
except Exception as e:
response.status = 400
logger.error("Wallet: Could not Send Transaction. Try Again." + str(e))
return "Try Again"
else:
logger.info("Wallet: Transaction Sent, Wait for it to be Mined")
return "Done"
@app.post("/transactionHistory")
def transaction_history():
log_ip(request, inspect.stack()[0][3])
data = request.json
public_key = data["public_key"]
tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key)
return json.dumps(tx_hist)
@app.post("/greetpeer")
def greet_peer_f():
log_ip(request, inspect.stack()[0][3])
try:
peer = {}
peer["port"] = request.forms.get("port")
peer["ip"] = request.remote_addr
peer["time"] = time.time()
peer["version"] = request.forms.get("version")
peer["blockheight"] = request.forms.get("blockheight")
ADD_ENTRY = True
for entry in PEER_LIST:
ip = entry["ip"]
port = entry["port"]
if ip == peer["ip"] and port == peer["port"]:
ADD_ENTRY = False
if ADD_ENTRY:
PEER_LIST.append(peer)
logger.debug("Server: Greet, A new peer joined, Adding to List")
except Exception as e:
logger.debug("Server: Greet Error: " + str(e))
pass
data = {"version": consts.MINER_VERSION, "blockheight": BLOCKCHAIN.active_chain.length}
response.content_type = "application/json"
return json.dumps(data)
@lru_cache(maxsize=128)
def cached_get_block(headerhash: str) -> str:
if headerhash:
db_block = get_block_from_db(headerhash)
if db_block:
return compress(db_block)
else:
logger.error("ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK")
return "Invalid Hash"
@app.post("/getblock")
def getblock():
log_ip(request, inspect.stack()[0][3])
hhash = request.forms.get("headerhash")
return cached_get_block(hhash)
@app.post("/checkblock")
def checkblock():
log_ip(request, inspect.stack()[0][3])
headerhash = request.forms.get("headerhash")
if get_block_from_db(headerhash):
return json.dumps(True)
return json.dumps(False)
@app.post("/getblockhashes")
def send_block_hashes():
log_ip(request, inspect.stack()[0][3])
peer_height = int(request.forms.get("myheight"))
hash_list = []
for i in range(peer_height, BLOCKCHAIN.active_chain.length):
hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i]))
return compress(json.dumps(hash_list)).decode()
@lru_cache(maxsize=16)
def process_new_block(request_data: bytes) -> str:
global BLOCKCHAIN
block_json = decompress(request_data)
if block_json:
try:
block = Block.from_json(block_json).object()
# Check if block already exists
if get_block_from_db(dhash(block.header)):
logger.info("Server: Received block exists, doing nothing")
return "Block already Received Before"
if BLOCKCHAIN.add_block(block):
logger.info("Server: Received a New Valid Block, Adding to Chain")
logger.debug("Server: Sending new block to peers")
# Broadcast block to other peers
send_to_all_peers("/newblock", request_data)
# TODO Make new chain/ orphan set for Block that is not added
except Exception as e:
logger.error("Server: New Block: invalid block received " + str(e))
return "Invalid Block Received"
# Kill Miner
t = Timer(1, miner.stop_mining)
t.start()
return "Block Received"
logger.error("Server: Invalid Block Received")
return "Invalid Block"
@app.post("/newblock")
def received_new_block():
log_ip(request, inspect.stack()[0][3])
return process_new_block(request.body.read())
@lru_cache(maxsize=16)
def process_new_transaction(request_data: bytes) -> str:
global BLOCKCHAIN
transaction_json = decompress(request_data)
if transaction_json:
try:
tx = Transaction.from_json(transaction_json).object()
# Add transaction to Mempool
if tx not in BLOCKCHAIN.mempool:
if BLOCKCHAIN.active_chain.is_transaction_valid(tx):
logger.debug("Valid Transaction received, Adding to Mempool")
BLOCKCHAIN.mempool.add(tx)
# Broadcast block to other peers
send_to_all_peers("/newtransaction", request_data)
else:
logger.debug("The transation is not valid, not added to Mempool")
return False, "Not Valid Transaction"
else:
return True, "Transaction Already received"
except Exception as e:
logger.error("Server: New Transaction: Invalid tx received: " + str(e))
return False, "Not Valid Transaction"
return True, "Done"
# Transactions for all active chains
@app.post("/newtransaction")
def received_new_transaction():
log_ip(request, inspect.stack()[0][3])
result, message = process_new_transaction(request.body.read())
if result:
response.status = 200
else:
response.status = 400
return message
question = '''What is greater than God,
more evil than the devil,
the poor have it,
the rich need it,
and if you eat it, you'll die?'''
actual_answer = "nothing"
@app.get("/")
def home():
log_ip(request, inspect.stack()[0][3])
message = ""
message_type = "info"
return template("index.html", message=message, message_type=message_type, question=question)
with open('uuids.json', 'r') as file:
uuid_json = file.read()
valid_ids = set(json.loads(uuid_json))
@app.post("/")
def puzzle():
log_ip(request, inspect.stack()[0][3])
message = ""
message_type = "info"
uuid = request.forms.get("uuid")
pubkey = request.forms.get("pubkey")
amounts = [300]
if uuid in valid_ids:
logger.debug("Valid Answer, Rewarding " + pubkey)
message = "Well Done!"
if check_balance(MY_WALLET.public_key) >= sum(amounts):
result = send_bounty([pubkey], amounts)
if result:
message = "Your reward is being sent, please wait for it to be mined!"
valid_ids.remove(uuid)
else:
message = "Some Error Occured, Contact Admin."
message_type = "warning"
else:
message = "Invalid Unique ID!"
message_type = "danger"
return template("index.html", message=message, message_type=message_type, question=question)
@app.get('/about')
def about():
return template("about.html")
# @app.get("/wallet")
# def wallet():
# log_ip(request, inspect.stack()[0][3])
# return template("wallet.html", message="", message_type="", pubkey=MY_WALLET.public_key)
# @app.post("/wallet")
# def wallet_post():
# log_ip(request, inspect.stack()[0][3])
# number = int(request.forms.get("number"))
# message = ""
# message_type = "info"
# try:
# receivers = []
# amounts = []
# total_amount = 0
# for i in range(0, number):
# receiver = str(request.forms.get("port" + str(i)))
# bounty = int(request.forms.get("amount" + str(i)))
# publickey = ""
# if len(receiver) < 10:
# wallet = get_wallet_from_db(receiver)
# if wallet is not None:
# publickey = wallet[1]
# else:
# message = "Error with the Receiver Port ID, try again."
# message_type = "danger"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
# else:
# publickey = receiver
# total_amount += bounty
# receivers.append(publickey)
# amounts.append(bounty)
# if check_balance(MY_WALLET.public_key) >= total_amount:
# result = send_bounty(receivers, amounts)
# if result:
# message = "Your transaction is sent, please wait for it to be mined!"
# else:
# message = "Some Error Occured, Contact Admin."
# message_type = "warning"
# else:
# message = "You have Insufficient Balance!"
# message_type = "warning"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
# except Exception as e:
# logger.error(e)
# message = "Some Error Occured. Please try again later."
# message_type = "danger"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
@app.get("/checkmybalance")
def checkblance():
log_ip(request, inspect.stack()[0][3])
return str(check_balance(MY_WALLET.public_key))
@app.route("/static/<filename:path>", name="static")
def serve_static(filename):
log_ip(request, inspect.stack()[0][3])
return static_file(filename, root="static")
@app.get("/favicon.ico")
def get_favicon():
log_ip(request, inspect.stack()[0][3])
return static_file("favicon.ico", root="static")
@app.get("/info")
def sendinfo():
log_ip(request, inspect.stack()[0][3])
s = (
"No. of Blocks: "
+ str(BLOCKCHAIN.active_chain.length)
+ "<br>"
+ dhash(BLOCKCHAIN.active_chain.header_list[-1])
+ "<br>"
+ "Balance "
+ str(check_balance(MY_WALLET.public_key))
+ "<br>Public Key: <br>"
+ str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1])
)
return s
def render_block_header(hdr):
html = "<table>"
html += "<tr><th>" + "Height" + "</th>"
html += "<td>" + str(hdr.height) + "</td></tr>"
html += "<tr><th>" + "Block Hash" + "</th>"
html += "<td>" + dhash(hdr) + "</td></tr>"
html += "<tr><th>" + "Prev Block Hash" + "</th>"
html += "<td>" + str(hdr.prev_block_hash) + "</td></tr>"
html += "<tr><th>" + "Merkle Root" + "</th>"
html += "<td>" + str(hdr.merkle_root) + "</td></tr>"
html += "<tr><th>" + "Timestamp" + "</th>"
html += (
"<td>"
+ str(datetime.fromtimestamp(hdr.timestamp).strftime("%d-%m-%Y %H:%M:%S"))
+ " ("
+ str(hdr.timestamp)
+ ")</td></tr>"
)
# get block
block = Block.from_json(get_block_from_db(dhash(hdr))).object()
html += "<tr><th>" + "Transactions" + "</th>"
html += "<td>" + str(len(block.transactions)) + "</td></tr>"
# for i, transaction in enumerate(block.transactions):
# s = "coinbase: " + str(transaction.is_coinbase) + ", fees: " + str(transaction.fees)
# html += "<tr><th>Transaction " + str(i) + "</th><td>" + str(s) + "</td></tr>"
html += "</table>"
return str(html)
@app.get("/chains")
def visualize_chain():
log_ip(request, inspect.stack()[0][3])
data = []
start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10 else 0
headers = []
hdr_list = BLOCKCHAIN.active_chain.header_list
if len(hdr_list) > 200:
hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:]
for hdr in hdr_list:
d = {}
d["hash"] = dhash(hdr)[-5:]
d["time"] = hdr.timestamp
d["data"] = render_block_header(hdr)
headers.append(d)
data.append(headers)
return template("chains.html", data=data, start=start)
@app.get("/explorer")
def explorer():
log_ip(request, inspect.stack()[0][3])
prev = int(request.query.prev or 0)
if prev < 0:
prev = 0
hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list))
indexes = [i for i in range(prev * 8, (prev + 1) * 8) if i < len(hdr_list)]
blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes]
transactions = list(BLOCKCHAIN.mempool)
return template("explorer.html", blocks=blocks, transactions=transactions, prev=prev)
@app.route("/block/<blockhash>", name="transaction")
def block(blockhash):
log_ip(request, inspect.stack()[0][3])
try:
block = Block.from_json(get_block_from_db(blockhash)).object()
except Exception as e:
logger.debug("BLOCK/blockhash: " + str(e))
return template("error.html")
return template("block.html", block=block)
@app.route("/transaction/<blockhash>/<txhash>", name="transaction")
def transaction(blockhash, txhash):
log_ip(request, inspect.stack()[0][3])
try:
block = Block.from_json(get_block_from_db(blockhash)).object()
tx = None
for t in block.transactions:
if t.hash() == txhash:
tx = t
except Exception as e:
logger.debug("Transaction/bhash/tx: " + str(e))
return template("error.html")
return template("transaction.html", tx=tx, block=block)
@app.route("/address/<pubkey:re:.+>", name="account")
def account(pubkey):
log_ip(request, inspect.stack()[0][3])
balance = check_balance(pubkey)
tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey)
return template("account.html", tx_hist=tx_hist, balance=balance, pubkey=pubkey)
@app.post("/mining")
def mining():
log_ip(request, inspect.stack()[0][3])
password = request.body.read().decode("utf-8")
hashed = b"\x11`\x1e\xdd\xd1\xb6\x80\x0f\xd4\xb0t\x90\x9b\xd3]\xa0\xcc\x1d\x04$\x8b\xb1\x19J\xaa!T5-\x9eJ\xfcI5\xc0\xbb\xf5\xb1\x9d\xba\xbef@\xa1)\xcf\x9b]c(R\x91\x0e\x9dMM\xb6\x94\xa9\xe2\x94il\x15"
dk = hashlib.pbkdf2_hmac("sha512", password.encode("utf-8"), b"forgeteverythingthatyouthinkyouknow", 200000)
if hashed == dk:
consts.NO_MINING = not consts.NO_MINING
logger.info("Mining: " + str(not consts.NO_MINING))
return "Mining Toggled, " + "NOT MINING" if consts.NO_MINING else "MINING"
else:
return "Password Mismatch," + "NOT MINING" if consts.NO_MINING else "MINING"
@app.route("/<url:re:.+>")
@error(403)
@error(404)
@error(505)
def error_handle(url="url", error="404"):
log_ip(request, inspect.stack()[0][3])
return template("error.html")
if __name__ == "__main__":
try:
if consts.NEW_BLOCKCHAIN:
logger.info("FullNode: Starting New Chain from Genesis")
BLOCKCHAIN.add_block(genesis_block)
else:
# Restore Blockchain
logger.info("FullNode: Restoring Existing Chain")
header_list = read_header_list_from_db()
BLOCKCHAIN.build_from_header_list(header_list)
# Sync with all my peers
sync_with_peers()
# Start mining Thread
Thread(target=start_mining_thread, daemon=True).start()
if consts.NO_MINING:
logger.info("FullNode: Not Mining")
# Start server
if LINE_PROFILING:
from wsgi_lineprof.middleware import LineProfilerMiddleware
with open("lineprof" + str(consts.MINER_SERVER_PORT) + ".log", "w") as f:
app = LineProfilerMiddleware(app, stream=f, async_stream=True)
waitress.serve(app, host="0.0.0.0", threads=16, port=consts.MINER_SERVER_PORT)
else:
waitress.serve(app, host="0.0.0.0", threads=16, port=consts.MINER_SERVER_PORT)
except KeyboardInterrupt:
miner.stop_mining()
| [((22, 6, 22, 14), 'bottle.Bottle', 'Bottle', ({}, {}), '()', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((27, 13, 27, 25), 'core.BlockChain', 'BlockChain', ({}, {}), '()', False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((31, 12, 31, 20), 'wallet.Wallet', 'Wallet', ({}, {}), '()', False, 'from wallet import Wallet\n'), ((33, 8, 33, 19), 'authority.Authority', 'Authority', ({}, {}), '()', False, 'from authority import Authority\n'), ((327, 1, 327, 23), 'functools.lru_cache', 'lru_cache', (), '', False, 'from functools import lru_cache\n'), ((364, 1, 364, 22), 'functools.lru_cache', 'lru_cache', (), '', False, 'from functools import lru_cache\n'), ((401, 1, 401, 22), 'functools.lru_cache', 'lru_cache', (), '', False, 'from functools import lru_cache\n'), ((698, 1, 698, 11), 'bottle.error', 'error', ({(698, 7, 698, 10): '(403)'}, {}), '(403)', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((699, 1, 699, 11), 'bottle.error', 'error', ({(699, 7, 699, 10): '(404)'}, {}), '(404)', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((700, 1, 700, 11), 'bottle.error', 'error', ({(700, 7, 700, 10): '(505)'}, {}), '(505)', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((55, 4, 55, 17), 'time.sleep', 'time.sleep', ({(55, 15, 55, 16): '(5)'}, {}), '(5)', False, 'import time\n'), ((99, 13, 99, 31), 'json.loads', 'json.loads', ({(99, 24, 99, 30): 'r.text'}, {}), '(r.text)', False, 'import json\n'), ((106, 11, 106, 61), 'utils.utils.dhash', 'dhash', ({(106, 17, 106, 60): 'BLOCKCHAIN.active_chain.header_list[height]'}, {}), '(BLOCKCHAIN.active_chain.header_list[height])', False, 'from utils.utils import compress, decompress, dhash\n'), ((211, 4, 211, 59), 'utils.logger.iplogger.info', 'iplogger.info', ({(211, 18, 211, 58): 'f"""{client_ip} : Called function {fname}"""'}, {}), "(f'{client_ip} : Called function {fname}')", False, 'from utils.logger import logger, iplogger\n'), ((219, 4, 219, 28), 'utils.logger.logger.debug', 'logger.debug', ({(219, 17, 219, 27): 'public_key'}, {}), '(public_key)', False, 'from utils.logger import logger, iplogger\n'), ((268, 4, 268, 29), 'utils.logger.logger.debug', 'logger.debug', ({(268, 17, 268, 28): 'transaction'}, {}), '(transaction)', False, 'from utils.logger import logger, iplogger\n'), ((269, 4, 269, 57), 'utils.logger.logger.info', 'logger.info', ({(269, 16, 269, 56): '"""Wallet: Attempting to Send Transaction"""'}, {}), "('Wallet: Attempting to Send Transaction')", False, 'from utils.logger import logger, iplogger\n'), ((295, 11, 295, 30), 'json.dumps', 'json.dumps', ({(295, 22, 295, 29): 'tx_hist'}, {}), '(tx_hist)', False, 'import json\n'), ((324, 11, 324, 27), 'json.dumps', 'json.dumps', ({(324, 22, 324, 26): 'data'}, {}), '(data)', False, 'import json\n'), ((341, 12, 341, 43), 'bottle.request.forms.get', 'request.forms.get', ({(341, 30, 341, 42): '"""headerhash"""'}, {}), "('headerhash')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((348, 17, 348, 48), 'bottle.request.forms.get', 'request.forms.get', ({(348, 35, 348, 47): '"""headerhash"""'}, {}), "('headerhash')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((349, 7, 349, 36), 'utils.storage.get_block_from_db', 'get_block_from_db', ({(349, 25, 349, 35): 'headerhash'}, {}), '(headerhash)', False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((351, 11, 351, 28), 'json.dumps', 'json.dumps', ({(351, 22, 351, 27): '(False)'}, {}), '(False)', False, 'import json\n'), ((367, 17, 367, 41), 'utils.utils.decompress', 'decompress', ({(367, 28, 367, 40): 'request_data'}, {}), '(request_data)', False, 'from utils.utils import compress, decompress, dhash\n'), ((391, 4, 391, 50), 'utils.logger.logger.error', 'logger.error', ({(391, 17, 391, 49): '"""Server: Invalid Block Received"""'}, {}), "('Server: Invalid Block Received')", False, 'from utils.logger import logger, iplogger\n'), ((404, 23, 404, 47), 'utils.utils.decompress', 'decompress', ({(404, 34, 404, 46): 'request_data'}, {}), '(request_data)', False, 'from utils.utils import compress, decompress, dhash\n'), ((450, 11, 450, 96), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((455, 16, 455, 37), 'json.loads', 'json.loads', ({(455, 27, 455, 36): 'uuid_json'}, {}), '(uuid_json)', False, 'import json\n'), ((463, 11, 463, 36), 'bottle.request.forms.get', 'request.forms.get', ({(463, 29, 463, 35): '"""uuid"""'}, {}), "('uuid')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((464, 13, 464, 40), 'bottle.request.forms.get', 'request.forms.get', ({(464, 31, 464, 39): '"""pubkey"""'}, {}), "('pubkey')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((482, 11, 482, 96), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((487, 11, 487, 33), 'bottle.template', 'template', ({(487, 20, 487, 32): '"""about.html"""'}, {}), "('about.html')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((553, 11, 553, 47), 'bottle.static_file', 'static_file', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((559, 11, 559, 52), 'bottle.static_file', 'static_file', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((633, 11, 633, 58), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((646, 11, 646, 89), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((657, 11, 657, 46), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((672, 11, 672, 59), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((680, 11, 680, 84), 'bottle.template', 'template', (), '', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((703, 11, 703, 33), 'bottle.template', 'template', ({(703, 20, 703, 32): '"""error.html"""'}, {}), "('error.html')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((40, 8, 40, 57), 'time.sleep', 'time.sleep', ({(40, 19, 40, 56): '(consts.MINING_INTERVAL_THRESHOLD // 2)'}, {}), '(consts.MINING_INTERVAL_THRESHOLD // 2)', False, 'import time\n'), ((61, 12, 61, 90), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((62, 20, 62, 38), 'json.loads', 'json.loads', ({(62, 31, 62, 37): 'r.text'}, {}), '(r.text)', False, 'import json\n'), ((78, 12, 78, 56), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((79, 15, 79, 33), 'json.loads', 'json.loads', ({(79, 26, 79, 32): 'r.text'}, {}), '(r.text)', False, 'import json\n'), ((157, 8, 157, 43), 'utils.logger.logger.debug', 'logger.debug', ({(157, 21, 157, 42): '"""Insuficient balance"""'}, {}), "('Insuficient balance')", False, 'from utils.logger import logger, iplogger\n'), ((196, 18, 196, 59), 'core.TxOut', 'TxOut', (), '', False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((199, 22, 199, 69), 'core.TxOut', 'TxOut', (), '', False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((206, 11, 206, 54), 'bottle.request.environ.get', 'request.environ.get', ({(206, 31, 206, 53): '"""HTTP_X_FORWARDED_FOR"""'}, {}), "('HTTP_X_FORWARDED_FOR')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((206, 58, 206, 92), 'bottle.request.environ.get', 'request.environ.get', ({(206, 78, 206, 91): '"""REMOTE_ADDR"""'}, {}), "('REMOTE_ADDR')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((237, 8, 237, 51), 'utils.logger.logger.debug', 'logger.debug', ({(237, 21, 237, 50): '"""Invalid Receiver Public Key"""'}, {}), "('Invalid Receiver Public Key')", False, 'from utils.logger import logger, iplogger\n'), ((244, 8, 244, 64), 'utils.logger.logger.debug', 'logger.debug', ({(244, 21, 244, 63): '"""Insufficient Balance to make Transaction"""'}, {}), "('Insufficient Balance to make Transaction')", False, 'from utils.logger import logger, iplogger\n'), ((285, 8, 285, 72), 'utils.logger.logger.info', 'logger.info', ({(285, 20, 285, 71): '"""Wallet: Transaction Sent, Wait for it to be Mined"""'}, {}), "('Wallet: Transaction Sent, Wait for it to be Mined')", False, 'from utils.logger import logger, iplogger\n'), ((303, 23, 303, 48), 'bottle.request.forms.get', 'request.forms.get', ({(303, 41, 303, 47): '"""port"""'}, {}), "('port')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((305, 23, 305, 34), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((306, 26, 306, 54), 'bottle.request.forms.get', 'request.forms.get', ({(306, 44, 306, 53): '"""version"""'}, {}), "('version')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((307, 30, 307, 62), 'bottle.request.forms.get', 'request.forms.get', ({(307, 48, 307, 61): '"""blockheight"""'}, {}), "('blockheight')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((330, 19, 330, 48), 'utils.storage.get_block_from_db', 'get_block_from_db', ({(330, 37, 330, 47): 'headerhash'}, {}), '(headerhash)', False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((350, 15, 350, 31), 'json.dumps', 'json.dumps', ({(350, 26, 350, 30): '(True)'}, {}), '(True)', False, 'import json\n'), ((357, 22, 357, 51), 'bottle.request.forms.get', 'request.forms.get', ({(357, 40, 357, 50): '"""myheight"""'}, {}), "('myheight')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((388, 12, 388, 39), 'threading.Timer', 'Timer', ({(388, 18, 388, 19): '1', (388, 21, 388, 38): 'miner.stop_mining'}, {}), '(1, miner.stop_mining)', False, 'from threading import Thread, Timer\n'), ((398, 29, 398, 48), 'bottle.request.body.read', 'request.body.read', ({}, {}), '()', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((430, 46, 430, 65), 'bottle.request.body.read', 'request.body.read', ({}, {}), '()', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((468, 8, 468, 57), 'utils.logger.logger.debug', 'logger.debug', ({(468, 21, 468, 56): "('Valid Answer, Rewarding ' + pubkey)"}, {}), "('Valid Answer, Rewarding ' + pubkey)", False, 'from utils.logger import logger, iplogger\n'), ((51, 4, 51, 74), 'multiprocessing.Process', 'Process', (), '', False, 'from multiprocessing import Pool, Process\n'), ((56, 4, 56, 64), 'threading.Thread', 'Thread', (), '', False, 'from threading import Thread, Timer\n'), ((65, 8, 65, 53), 'utils.logger.logger.error', 'logger.error', ({(65, 21, 65, 52): '"""Could not connect to DNS Seed"""'}, {}), "('Could not connect to DNS Seed')", False, 'from utils.logger import logger, iplogger\n'), ((84, 12, 84, 70), 'utils.logger.logger.debug', 'logger.debug', ({(84, 25, 84, 69): '"""Main: Peer data does not have Block Height"""'}, {}), "('Main: Peer data does not have Block Height')", False, 'from utils.logger import logger, iplogger\n'), ((116, 12, 116, 72), 'utils.logger.logger.error', 'logger.error', ({(116, 25, 116, 71): '"""Sync: Block received is invalid, Cannot Sync"""'}, {}), "('Sync: Block received is invalid, Cannot Sync')", False, 'from utils.logger import logger, iplogger\n'), ((137, 4, 137, 64), 'threading.Timer', 'Timer', ({(137, 10, 137, 46): '(consts.MINING_INTERVAL_THRESHOLD * 2)', (137, 48, 137, 63): 'sync_with_peers'}, {}), '(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers)', False, 'from threading import Thread, Timer\n'), ((153, 12, 153, 53), 'utils.logger.logger.debug', 'logger.debug', ({(153, 25, 153, 52): '"""Invalid Public Key Length"""'}, {}), "('Invalid Public Key Length')", False, 'from utils.logger import logger, iplogger\n'), ((159, 8, 159, 45), 'utils.logger.logger.debug', 'logger.debug', ({(159, 21, 159, 44): '"""Cannot send to myself"""'}, {}), "('Cannot send to myself')", False, 'from utils.logger import logger, iplogger\n'), ((163, 8, 163, 61), 'utils.logger.logger.info', 'logger.info', ({(163, 20, 163, 60): '"""Wallet: Attempting to Send Transaction"""'}, {}), "('Wallet: Attempting to Send Transaction')", False, 'from utils.logger import logger, iplogger\n'), ((248, 8, 248, 63), 'utils.logger.logger.debug', 'logger.debug', ({(248, 21, 248, 62): '"""Someone trying to send money to himself"""'}, {}), "('Someone trying to send money to himself')", False, 'from utils.logger import logger, iplogger\n'), ((257, 15, 257, 31), 'json.dumps', 'json.dumps', ({(257, 26, 257, 30): 'data'}, {}), '(data)', False, 'import json\n'), ((264, 18, 264, 60), 'core.Transaction.from_json', 'Transaction.from_json', ({(264, 40, 264, 59): "data['transaction']"}, {}), "(data['transaction'])", False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((278, 12, 278, 83), 'utils.logger.logger.error', 'logger.error', ({(278, 25, 278, 82): '"""Wallet: Could not Send Transaction. Invalid transaction"""'}, {}), "('Wallet: Could not Send Transaction. Invalid transaction')", False, 'from utils.logger import logger, iplogger\n'), ((317, 12, 317, 76), 'utils.logger.logger.debug', 'logger.debug', ({(317, 25, 317, 75): '"""Server: Greet, A new peer joined, Adding to List"""'}, {}), "('Server: Greet, A new peer joined, Adding to List')", False, 'from utils.logger import logger, iplogger\n'), ((332, 19, 332, 37), 'utils.utils.compress', 'compress', ({(332, 28, 332, 36): 'db_block'}, {}), '(db_block)', False, 'from utils.utils import compress, decompress, dhash\n'), ((334, 12, 334, 72), 'utils.logger.logger.error', 'logger.error', ({(334, 25, 334, 71): '"""ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK"""'}, {}), "('ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK')", False, 'from utils.logger import logger, iplogger\n'), ((360, 25, 360, 70), 'utils.utils.dhash', 'dhash', ({(360, 31, 360, 69): 'BLOCKCHAIN.active_chain.header_list[i]'}, {}), '(BLOCKCHAIN.active_chain.header_list[i])', False, 'from utils.utils import compress, decompress, dhash\n'), ((586, 21, 586, 31), 'utils.utils.dhash', 'dhash', ({(586, 27, 586, 30): 'hdr'}, {}), '(hdr)', False, 'from utils.utils import compress, decompress, dhash\n'), ((628, 20, 628, 30), 'utils.utils.dhash', 'dhash', ({(628, 26, 628, 29): 'hdr'}, {}), '(hdr)', False, 'from utils.utils import compress, decompress, dhash\n'), ((656, 15, 656, 37), 'bottle.template', 'template', ({(656, 24, 656, 36): '"""error.html"""'}, {}), "('error.html')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((671, 15, 671, 37), 'bottle.template', 'template', ({(671, 24, 671, 36): '"""error.html"""'}, {}), "('error.html')", False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((686, 15, 686, 34), 'bottle.request.body.read', 'request.body.read', ({}, {}), '()', False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((709, 12, 709, 68), 'utils.logger.logger.info', 'logger.info', ({(709, 24, 709, 67): '"""FullNode: Starting New Chain from Genesis"""'}, {}), "('FullNode: Starting New Chain from Genesis')", False, 'from utils.logger import logger, iplogger\n'), ((713, 12, 713, 61), 'utils.logger.logger.info', 'logger.info', ({(713, 24, 713, 60): '"""FullNode: Restoring Existing Chain"""'}, {}), "('FullNode: Restoring Existing Chain')", False, 'from utils.logger import logger, iplogger\n'), ((714, 26, 714, 52), 'utils.storage.read_header_list_from_db', 'read_header_list_from_db', ({}, {}), '()', False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((723, 12, 723, 47), 'utils.logger.logger.info', 'logger.info', ({(723, 24, 723, 46): '"""FullNode: Not Mining"""'}, {}), "('FullNode: Not Mining')", False, 'from utils.logger import logger, iplogger\n'), ((733, 12, 733, 90), 'waitress.serve', 'waitress.serve', (), '', False, 'import waitress\n'), ((94, 27, 94, 45), 'utils.utils.decompress', 'decompress', ({(94, 38, 94, 44): 'r.text'}, {}), '(r.text)', False, 'from utils.utils import compress, decompress, dhash\n'), ((201, 77, 201, 88), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((216, 20, 216, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((226, 20, 226, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((262, 20, 262, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((291, 20, 291, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((300, 20, 300, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((340, 20, 340, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((347, 20, 347, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((356, 20, 356, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((361, 20, 361, 41), 'json.dumps', 'json.dumps', ({(361, 31, 361, 40): 'hash_list'}, {}), '(hash_list)', False, 'import json\n'), ((372, 33, 372, 52), 'utils.utils.dhash', 'dhash', ({(372, 39, 372, 51): 'block.header'}, {}), '(block.header)', False, 'from utils.utils import compress, decompress, dhash\n'), ((373, 16, 373, 75), 'utils.logger.logger.info', 'logger.info', ({(373, 28, 373, 74): '"""Server: Received block exists, doing nothing"""'}, {}), "('Server: Received block exists, doing nothing')", False, 'from utils.logger import logger, iplogger\n'), ((376, 16, 376, 82), 'utils.logger.logger.info', 'logger.info', ({(376, 28, 376, 81): '"""Server: Received a New Valid Block, Adding to Chain"""'}, {}), "('Server: Received a New Valid Block, Adding to Chain')", False, 'from utils.logger import logger, iplogger\n'), ((378, 16, 378, 66), 'utils.logger.logger.debug', 'logger.debug', ({(378, 29, 378, 65): '"""Server: Sending new block to peers"""'}, {}), "('Server: Sending new block to peers')", False, 'from utils.logger import logger, iplogger\n'), ((397, 20, 397, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((429, 20, 429, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((447, 20, 447, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((459, 20, 459, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((546, 20, 546, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((552, 20, 552, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((558, 20, 558, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((564, 20, 564, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((574, 14, 574, 58), 'utils.storage.get_wallet_from_db', 'get_wallet_from_db', ({(574, 33, 574, 57): 'consts.MINER_SERVER_PORT'}, {}), '(consts.MINER_SERVER_PORT)', False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((619, 20, 619, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((638, 20, 638, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((651, 20, 651, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((662, 20, 662, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((677, 20, 677, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((685, 20, 685, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((702, 20, 702, 35), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n'), ((721, 8, 721, 55), 'threading.Thread', 'Thread', (), '', False, 'from threading import Thread, Timer\n'), ((730, 22, 730, 78), 'wsgi_lineprof.middleware.LineProfilerMiddleware', 'LineProfilerMiddleware', (), '', False, 'from wsgi_lineprof.middleware import LineProfilerMiddleware\n'), ((731, 16, 731, 94), 'waitress.serve', 'waitress.serve', (), '', False, 'import waitress\n'), ((171, 16, 171, 86), 'utils.logger.logger.info', 'logger.info', ({(171, 28, 171, 85): '"""Wallet: Could not Send Transaction. Invalid Transaction"""'}, {}), "('Wallet: Could not Send Transaction. Invalid Transaction')", False, 'from utils.logger import logger, iplogger\n'), ((173, 16, 173, 80), 'utils.logger.logger.info', 'logger.info', ({(173, 28, 173, 79): '"""Wallet: Transaction Sent, Wait for it to be Mined"""'}, {}), "('Wallet: Transaction Sent, Wait for it to be Mined')", False, 'from utils.logger import logger, iplogger\n'), ((192, 33, 192, 59), 'core.SingleOutput.from_json', 'SingleOutput.from_json', ({(192, 56, 192, 58): 'so'}, {}), '(so)', False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((370, 20, 370, 47), 'core.Block.from_json', 'Block.from_json', ({(370, 36, 370, 46): 'block_json'}, {}), '(block_json)', False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((407, 17, 407, 56), 'core.Transaction.from_json', 'Transaction.from_json', ({(407, 39, 407, 55): 'transaction_json'}, {}), '(transaction_json)', False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((411, 20, 411, 81), 'utils.logger.logger.debug', 'logger.debug', ({(411, 33, 411, 80): '"""Valid Transaction received, Adding to Mempool"""'}, {}), "('Valid Transaction received, Adding to Mempool')", False, 'from utils.logger import logger, iplogger\n'), ((416, 20, 416, 85), 'utils.logger.logger.debug', 'logger.debug', ({(416, 33, 416, 84): '"""The transation is not valid, not added to Mempool"""'}, {}), "('The transation is not valid, not added to Mempool')", False, 'from utils.logger import logger, iplogger\n'), ((604, 46, 604, 56), 'utils.utils.dhash', 'dhash', ({(604, 52, 604, 55): 'hdr'}, {}), '(hdr)', False, 'from utils.utils import compress, decompress, dhash\n'), ((653, 32, 653, 60), 'utils.storage.get_block_from_db', 'get_block_from_db', ({(653, 50, 653, 59): 'blockhash'}, {}), '(blockhash)', False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((664, 32, 664, 60), 'utils.storage.get_block_from_db', 'get_block_from_db', ({(664, 50, 664, 59): 'blockhash'}, {}), '(blockhash)', False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((644, 48, 644, 66), 'utils.utils.dhash', 'dhash', ({(644, 54, 644, 65): 'hdr_list[i]'}, {}), '(hdr_list[i])', False, 'from utils.utils import compress, decompress, dhash\n'), ((569, 10, 569, 56), 'utils.utils.dhash', 'dhash', ({(569, 16, 569, 55): 'BLOCKCHAIN.active_chain.header_list[-1]'}, {}), '(BLOCKCHAIN.active_chain.header_list[-1])', False, 'from utils.utils import compress, decompress, dhash\n'), ((597, 14, 597, 51), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', ({(597, 37, 597, 50): 'hdr.timestamp'}, {}), '(hdr.timestamp)', False, 'from datetime import datetime\n')] |
alexus37/MasterThesisCode | deepexplain/tf/v1_x/main.py | a7eada603686de75968acc8586fd307a91b0491b | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.framework import ops
from collections import OrderedDict
import warnings, logging
from deepexplain.tf.v1_x import constants
from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod
from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP
from deepexplain.tf.v1_x.utils import original_grad
from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling
attribution_methods = OrderedDict({
'zero': (DummyZero, 0),
'saliency': (Saliency, 1),
'grad*input': (GradientXInput, 2),
'intgrad': (IntegratedGradients, 3),
'elrp': (EpsilonLRP, 4),
'deeplift': (DeepLIFTRescale, 5),
'occlusion': (Occlusion, 6),
'shapley_sampling': (ShapleySampling, 7)
})
print(f'Using tf version = {tf.__version__}')
@ops.RegisterGradient("DeepExplainGrad")
def deepexplain_grad(op, grad):
# constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG
constants._GRAD_OVERRIDE_CHECKFLAG = 1
if constants._ENABLED_METHOD_CLASS is not None \
and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod):
return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad)
else:
return original_grad(op, grad)
class DeepExplain(object):
def __init__(self, graph=None, session=tf.compat.v1.get_default_session()):
self.method = None
self.batch_size = None
self.session = session
self.graph = session.graph if graph is None else graph
self.graph_context = self.graph.as_default()
self.override_context = self.graph.gradient_override_map(self.get_override_map())
self.keras_phase_placeholder = None
self.context_on = False
if self.session is None:
raise RuntimeError('DeepExplain: could not retrieve a session. Use DeepExplain(session=your_session).')
def __enter__(self):
# Override gradient of all ops created in context
self.graph_context.__enter__()
self.override_context.__enter__()
self.context_on = True
return self
def __exit__(self, type, value, traceback):
self.graph_context.__exit__(type, value, traceback)
self.override_context.__exit__(type, value, traceback)
self.context_on = False
def get_explainer(self, method, T, X, **kwargs):
if not self.context_on:
raise RuntimeError('Explain can be called only within a DeepExplain context.')
# global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG
self.method = method
if self.method in attribution_methods:
method_class, method_flag = attribution_methods[self.method]
else:
raise RuntimeError('Method must be in %s' % list(attribution_methods.keys()))
if isinstance(X, list):
for x in X:
if 'tensor' not in str(type(x)).lower():
raise RuntimeError('If a list, X must contain only Tensorflow Tensor objects')
else:
if 'tensor' not in str(type(X)).lower():
raise RuntimeError('X must be a Tensorflow Tensor object or a list of them')
if 'tensor' not in str(type(T)).lower():
raise RuntimeError('T must be a Tensorflow Tensor object')
# logging.info('DeepExplain: running "%s" explanation method (%d)' % (self.method, method_flag))
self._check_ops()
constants._GRAD_OVERRIDE_CHECKFLAG = 0
constants._ENABLED_METHOD_CLASS = method_class
method = constants._ENABLED_METHOD_CLASS(T, X,
self.session,
keras_learning_phase=self.keras_phase_placeholder,
**kwargs)
if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \
and constants._GRAD_OVERRIDE_CHECKFLAG == 0:
warnings.warn('DeepExplain detected you are trying to use an attribution method that requires '
'gradient override but the original gradient was used instead. You might have forgot to '
'(re)create your graph within the DeepExlain context. Results are not reliable!')
constants._ENABLED_METHOD_CLASS = None
constants._GRAD_OVERRIDE_CHECKFLAG = 0
self.keras_phase_placeholder = None
return method
def explain(self, method, T, X, xs, ys=None, batch_size=None, **kwargs):
explainer = self.get_explainer(method, T, X, **kwargs)
return explainer.run(xs, ys, batch_size)
@staticmethod
def get_override_map():
return dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS)
def _check_ops(self):
"""
Heuristically check if any op is in the list of unsupported activation functions.
This does not cover all cases where explanation methods would fail, and must be improved in the future.
Also, check if the placeholder named 'keras_learning_phase' exists in the graph. This is used by Keras
and needs to be passed in feed_dict.
:return:
"""
g = tf.compat.v1.get_default_graph()
for op in g.get_operations():
if len(op.inputs) > 0 and not op.name.startswith('gradients'):
if op.type in constants.UNSUPPORTED_ACTIVATIONS:
warnings.warn('Detected unsupported activation (%s). '
'This might lead to unexpected or wrong results.' % op.type)
elif 'keras_learning_phase' in op.name:
self.keras_phase_placeholder = op.outputs[0] | [((16, 22, 25, 2), 'collections.OrderedDict', 'OrderedDict', ({(16, 34, 25, 1): "{'zero': (DummyZero, 0), 'saliency': (Saliency, 1), 'grad*input': (\n GradientXInput, 2), 'intgrad': (IntegratedGradients, 3), 'elrp': (\n EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion': (\n Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7)}"}, {}), "({'zero': (DummyZero, 0), 'saliency': (Saliency, 1),\n 'grad*input': (GradientXInput, 2), 'intgrad': (IntegratedGradients, 3),\n 'elrp': (EpsilonLRP, 4), 'deeplift': (DeepLIFTRescale, 5), 'occlusion':\n (Occlusion, 6), 'shapley_sampling': (ShapleySampling, 7)})", False, 'from collections import OrderedDict\n'), ((29, 1, 29, 40), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(29, 22, 29, 39): '"""DeepExplainGrad"""'}, {}), "('DeepExplainGrad')", False, 'from tensorflow.python.framework import ops\n'), ((35, 15, 35, 83), 'deepexplain.tf.v1_x.constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override', 'constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override', ({(35, 74, 35, 76): 'op', (35, 78, 35, 82): 'grad'}, {}), '(op, grad)', False, 'from deepexplain.tf.v1_x import constants\n'), ((37, 15, 37, 38), 'deepexplain.tf.v1_x.utils.original_grad', 'original_grad', ({(37, 29, 37, 31): 'op', (37, 33, 37, 37): 'grad'}, {}), '(op, grad)', False, 'from deepexplain.tf.v1_x.utils import original_grad\n'), ((41, 43, 41, 77), 'tensorflow.compat.v1.get_default_session', 'tf.compat.v1.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((89, 17, 92, 48), 'deepexplain.tf.v1_x.constants._ENABLED_METHOD_CLASS', 'constants._ENABLED_METHOD_CLASS', (), '', False, 'from deepexplain.tf.v1_x import constants\n'), ((120, 12, 120, 44), 'tensorflow.compat.v1.get_default_graph', 'tf.compat.v1.get_default_graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((96, 12, 98, 109), 'warnings.warn', 'warnings.warn', ({(96, 26, 98, 108): '"""DeepExplain detected you are trying to use an attribution method that requires gradient override but the original gradient was used instead. You might have forgot to (re)create your graph within the DeepExlain context. Results are not reliable!"""'}, {}), "(\n 'DeepExplain detected you are trying to use an attribution method that requires gradient override but the original gradient was used instead. You might have forgot to (re)create your graph within the DeepExlain context. Results are not reliable!'\n )", False, 'import warnings, logging\n'), ((124, 20, 125, 96), 'warnings.warn', 'warnings.warn', ({(124, 34, 125, 95): "('Detected unsupported activation (%s). This might lead to unexpected or wrong results.'\n % op.type)"}, {}), "(\n 'Detected unsupported activation (%s). This might lead to unexpected or wrong results.'\n % op.type)", False, 'import warnings, logging\n')] |
robinupham/cnn_lensing | util/mem_usage.py | f5d4defc7e2c5b7a23744051da904526d04c27c8 | """
Get the memory usage of a Keras model.
From https://stackoverflow.com/a/46216013.
"""
def get_model_memory_usage(batch_size, model):
"""
Get the memory usage of a Keras model in GB.
From https://stackoverflow.com/a/46216013.
"""
import numpy as np
try:
from keras import backend as K
except ImportError:
from tensorflow.keras import backend as K
shapes_mem_count = 0
internal_model_mem_count = 0
for l in model.layers:
layer_type = l.__class__.__name__
if layer_type == 'Model':
internal_model_mem_count += get_model_memory_usage(batch_size, l)
single_layer_mem = 1
out_shape = l.output_shape
if isinstance(out_shape, list):
out_shape = out_shape[0]
for s in out_shape:
if s is None:
continue
single_layer_mem *= s
shapes_mem_count += single_layer_mem
trainable_count = np.sum([K.count_params(p) for p in model.trainable_weights])
non_trainable_count = np.sum([K.count_params(p) for p in model.non_trainable_weights])
number_size = 4.0
if K.floatx() == 'float16':
number_size = 2.0
if K.floatx() == 'float64':
number_size = 8.0
total_memory = number_size * (batch_size * shapes_mem_count + trainable_count + non_trainable_count)
gbytes = np.round(total_memory / (1024.0 ** 3), 3) + internal_model_mem_count
return gbytes
| [((40, 7, 40, 17), 'tensorflow.keras.backend.floatx', 'K.floatx', ({}, {}), '()', True, 'from tensorflow.keras import backend as K\n'), ((42, 7, 42, 17), 'tensorflow.keras.backend.floatx', 'K.floatx', ({}, {}), '()', True, 'from tensorflow.keras import backend as K\n'), ((46, 13, 46, 54), 'numpy.round', 'np.round', ({(46, 22, 46, 50): '(total_memory / 1024.0 ** 3)', (46, 52, 46, 53): '(3)'}, {}), '(total_memory / 1024.0 ** 3, 3)', True, 'import numpy as np\n'), ((36, 30, 36, 47), 'tensorflow.keras.backend.count_params', 'K.count_params', ({(36, 45, 36, 46): 'p'}, {}), '(p)', True, 'from tensorflow.keras import backend as K\n'), ((37, 34, 37, 51), 'tensorflow.keras.backend.count_params', 'K.count_params', ({(37, 49, 37, 50): 'p'}, {}), '(p)', True, 'from tensorflow.keras import backend as K\n')] |
glemaitre/hexrd | hexrd/distortion/distortionabc.py | b68b1ba72e0f480d29bdaae2adbd6c6e2380cc7c | import abc
class DistortionABC(metaclass=abc.ABCMeta):
maptype = None
@abc.abstractmethod
def apply(self, xy_in):
"""Apply distortion mapping"""
pass
@abc.abstractmethod
def apply_inverse(self, xy_in):
"""Apply inverse distortion mapping"""
pass
| [] |
statisticianinstilettos/recommender_metrics | setup.py | 82091ec53eb8b3527f95755006237658deb03c18 | import io
import os
from setuptools import setup
def read(file_name):
"""Read a text file and return the content as a string."""
with io.open(os.path.join(os.path.dirname(__file__), file_name),
encoding='utf-8') as f:
return f.read()
setup(
name='recmetrics',
url='https://github.com/statisticianinstilettos/recommender_metrics',
author='Claire Longo',
author_email='[email protected]',
packages=['recmetrics'],
install_requires=['funcsigs',
'numpy',
'pandas',
'plotly',
'scikit-learn',
'seaborn'],
license='MIT',
version='0.1.4',
description='Evaluation metrics for recommender systems',
long_description=read("README.md"),
long_description_content_type="text/markdown",
)
| [((9, 30, 9, 55), 'os.path.dirname', 'os.path.dirname', ({(9, 46, 9, 54): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
wj-Mcat/model-getting-started | run_classifier.py | abe8c9df10b45841eeb38e859e680a37ec03fe8a | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BERT finetuning runner."""
from __future__ import annotations, absolute_import
import os
from typing import Dict, List
from transformers import (
AutoTokenizer, BertTokenizer,
BertForSequenceClassification, BertConfig,
Trainer, TrainingArguments,
PreTrainedTokenizer
)
from transformers.configuration_utils import PretrainedConfig
from src.schema import (
InputExample, InputFeatures, Config
)
from src.data_process import (
AgNewsDataProcessor
)
from config import create_logger
logger = create_logger()
def convert_single_example(
example_index: int, example: InputExample, label2id: Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer
) -> InputFeatures:
"""Converts a single `InputExample` into a single `InputFeatures`.
example_index: 用于展示example中的前几例数据
"""
parameters = {
"text":example.text_a,
"add_special_tokens":True,
"padding":True,
"max_length":max_seq_length,
"return_attention_mask":True,
"return_token_type_ids":True,
"return_length":True,
"verbose":True
}
if example.text_b:
parameters['text_pair'] = example.text_b
feature = tokenizer(**parameters)
input_feature = InputFeatures(
input_ids=feature['token_ids'],
attention_mask=feature['attention_mask'],
segment_ids=feature['token_type_ids'],
label_id=label2id[example.label],
is_real_example=True
)
if example_index < 5:
logger.info(f'*************************** Example {example_index} ***************************')
logger.info(example)
logger.info(input_feature)
logger.info('*************************** Example End ***************************')
return input_feature
def create_bert_for_sequence_classification_model(config: Config):
bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name)
bert_config.num_labels = config.num_labels
model = BertForSequenceClassification(bert_config)
return model
def create_model(config: Config):
"""Creates a classification model."""
models = {
"bert-for-sequence-classification": create_bert_for_sequence_classification_model,
}
return models[config.model_name](config)
def convert_examples_to_features(
examples, label_list: List[str],
max_seq_length: int, tokenizer: PreTrainedTokenizer
):
"""Convert a set of `InputExample`s to a list of `InputFeatures`."""
label2id = {label: index for index, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 200 == 0:
logger.info("Writing example %d of %d" % (ex_index, len(examples)))
feature = convert_single_example(ex_index, example, label2id,
max_seq_length, tokenizer)
features.append(feature)
return features
class SequenceClassificationTrainer(Trainer):
def compute_loss(self, model, inputs, return_outputs=False):
labels = inputs.pop("labels")
outputs = model(**inputs)
return outputs.loss
def main():
# processors need to be updated
processors = {
'agnews-processor': AgNewsDataProcessor,
}
config: Config = Config.instance()
if not config.do_train and not config.do_eval and not config.do_predict:
raise ValueError(
"At least one of `do_train`, `do_eval` or `do_predict' must be True.")
bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name)
# 根据不同的任务,处理不同的数据集
task_name = config.task_name.lower()
if task_name not in processors:
raise ValueError("Task not found: %s" % (task_name))
processor = processors[task_name]()
label_list = processor.get_labels()
tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name)
train_examples = None
num_train_steps = None
num_warmup_steps = None
if config.do_train:
train_examples: List[InputExample] = processor.get_train_examples(config.data_dir)
train_dataset_loader =
num_train_steps = int(
len(train_examples) / config.train_batch_size * config.epochs
)
num_warmup_steps = int(num_train_steps * config.warmup_proportion)
model = create_model(config=config)
training_arguments = TrainingArguments(
output_dir=config.output_dir,
overwrite_output_dir=True,
)
trainer = SequenceClassificationTrainer(
model=model,
)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPUs
if config.do_train:
train_file = os.path.join(config.output_dir, "train.tf_record")
file_based_convert_examples_to_features(
train_examples, label_list, config.max_seq_length, tokenizer, train_file)
tf.logging.info("***** Running training *****")
tf.logging.info(" Num examples = %d", len(train_examples))
tf.logging.info(" Batch size = %d", config.train_batch_size)
tf.logging.info(" Num steps = %d", num_train_steps)
train_input_fn = file_based_input_fn_builder(
input_file=train_file,
seq_length=config.max_seq_length,
is_training=True,
drop_remainder=True)
estimator.train(input_fn=train_input_fn, max_steps=num_train_steps)
if config.do_eval:
eval_examples = processor.get_dev_examples(config.data_dir)
num_actual_eval_examples = len(eval_examples)
if config.use_tpu:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on. These do NOT count towards the metric (all tf.metrics
# support a per-instance weight, and these get a weight of 0.0).
while len(eval_examples) % config.eval_batch_size != 0:
eval_examples.append(PaddingInputExample())
eval_file = os.path.join(config.output_dir, "eval.tf_record")
file_based_convert_examples_to_features(
eval_examples, label_list, config.max_seq_length, tokenizer, eval_file)
tf.logging.info("***** Running evaluation *****")
tf.logging.info(" Num examples = %d (%d actual, %d padding)",
len(eval_examples), num_actual_eval_examples,
len(eval_examples) - num_actual_eval_examples)
tf.logging.info(" Batch size = %d", config.eval_batch_size)
# This tells the estimator to run through the entire set.
eval_steps = None
# However, if running eval on the TPU, you will need to specify the
# number of steps.
if config.use_tpu:
assert len(eval_examples) % config.eval_batch_size == 0
eval_steps = int(len(eval_examples) // config.eval_batch_size)
eval_drop_remainder = True if config.use_tpu else False
eval_input_fn = file_based_input_fn_builder(
input_file=eval_file,
seq_length=config.max_seq_length,
is_training=False,
drop_remainder=eval_drop_remainder)
result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps)
output_eval_file = os.path.join(config.output_dir, "eval_results.txt")
with tf.gfile.GFile(output_eval_file, "w") as writer:
tf.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if config.do_predict:
predict_examples = processor.get_test_examples(config.data_dir)
num_actual_predict_examples = len(predict_examples)
if config.use_tpu:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on.
while len(predict_examples) % config.predict_batch_size != 0:
predict_examples.append(PaddingInputExample())
predict_file = os.path.join(config.output_dir, "predict.tf_record")
file_based_convert_examples_to_features(predict_examples, label_list,
config.max_seq_length, tokenizer,
predict_file)
tf.logging.info("***** Running prediction*****")
tf.logging.info(" Num examples = %d (%d actual, %d padding)",
len(predict_examples), num_actual_predict_examples,
len(predict_examples) - num_actual_predict_examples)
tf.logging.info(" Batch size = %d", config.predict_batch_size)
predict_drop_remainder = True if config.use_tpu else False
predict_input_fn = file_based_input_fn_builder(
input_file=predict_file,
seq_length=config.max_seq_length,
is_training=False,
drop_remainder=predict_drop_remainder)
result = estimator.predict(input_fn=predict_input_fn)
output_predict_file = os.path.join(config.output_dir, "test_results.tsv")
with tf.gfile.GFile(output_predict_file, "w") as writer:
num_written_lines = 0
tf.logging.info("***** Predict results *****")
for (i, prediction) in enumerate(result):
probabilities = prediction["probabilities"]
if i >= num_actual_predict_examples:
break
output_line = "\t".join(
str(class_probability)
for class_probability in probabilities) + "\n"
writer.write(output_line)
num_written_lines += 1
assert num_written_lines == num_actual_predict_examples
if __name__ == "__main__":
main()
| [] |
TobyChen320/DS-Unit-3-Sprint-2-SQL-and-Databases | module2-sql-for-analysis/rpg_db.py | 306d2252b3756a501e2412fcb5eddbdebc16a362 | import sqlite3
import os
import psycopg2
from dotenv import load_dotenv
load_dotenv()
DB_NAME2 = os.getenv("DB_NAME3")
DB_USER2 = os.getenv("DB_USER3")
DB_PASS2 = os.getenv("DB_PASS3")
DB_HOST2 = os.getenv("DB_HOST3")
conn = psycopg2.connect(dbname=DB_NAME2,
user=DB_USER2,
password=DB_PASS2,
host=DB_HOST2)
cursor = conn.cursor()
sl_conn = sqlite3.connect("rpg_db.sqlite3")
sl_cursor = sl_conn.cursor()
characters = sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall()
print(characters)
create_character_table_query = '''
CREATE TABLE IF NOT EXISTS rpg_characters (
character_id SERIAL PRIMARY KEY,
name VARCHAR(30),
level INT,
exp INT,
hp INT,
strength INT,
intelligence INT,
dexterity INT,
wisdom INT
)
'''
cursor.execute(create_character_table_query)
conn.commit()
for character in characters:
insert_query = f''' INSERT INTO rpg_characters
(character_id, name, level, exp, hp, strength, intelligence, dexterity, wisdom) VALUES
{character}
'''
cursor.execute(insert_query)
conn.commit()
cursor.close()
conn.close()
| [((6, 0, 6, 13), 'dotenv.load_dotenv', 'load_dotenv', ({}, {}), '()', False, 'from dotenv import load_dotenv\n'), ((8, 11, 8, 32), 'os.getenv', 'os.getenv', ({(8, 21, 8, 31): '"""DB_NAME3"""'}, {}), "('DB_NAME3')", False, 'import os\n'), ((9, 11, 9, 32), 'os.getenv', 'os.getenv', ({(9, 21, 9, 31): '"""DB_USER3"""'}, {}), "('DB_USER3')", False, 'import os\n'), ((10, 11, 10, 32), 'os.getenv', 'os.getenv', ({(10, 21, 10, 31): '"""DB_PASS3"""'}, {}), "('DB_PASS3')", False, 'import os\n'), ((11, 11, 11, 32), 'os.getenv', 'os.getenv', ({(11, 21, 11, 31): '"""DB_HOST3"""'}, {}), "('DB_HOST3')", False, 'import os\n'), ((13, 7, 16, 38), 'psycopg2.connect', 'psycopg2.connect', (), '', False, 'import psycopg2\n'), ((20, 10, 20, 43), 'sqlite3.connect', 'sqlite3.connect', ({(20, 26, 20, 42): '"""rpg_db.sqlite3"""'}, {}), "('rpg_db.sqlite3')", False, 'import sqlite3\n')] |
moff-wildfire/sws-battlefy | sws_comp_wiki_gen.py | 04b12b54f91e450980c2c57eed57f0504abec1bb | import battlefy_data
import battlefy_wiki_linkings
from datetime import datetime
from operator import itemgetter
from pathlib import Path
import calcup_roster_tracking
def create_sidebar(data, wiki_name):
sidebar = '{{Infobox league' + '\n'
sidebar += '|liquipediatier=' + '\n'
sidebar += '|name=' + data['name'] + '\n'
sidebar += '|shortname=' + data['name'] + '\n'
sidebar += '|tickername=' + data['name'] + '\n'
sidebar += '|image=' + '\n'
sidebar += '|icon=' + '\n'
sidebar += '|series=' + '\n'
sidebar += '|organizer=' + data['organization']['name'] + '\n'
sidebar += '|organizer-link=' + '\n'
sidebar += '|sponsor=' + '\n'
sidebar += '|localcurrency=' + '\n'
sidebar += '|prizepool=' + data['prizes'] + '\n'
sidebar += '|type=Online' + '\n'
sidebar += '|platform=' + data['platform'] + '\n'
sidebar += '|country=' + '\n'
sidebar += '|format=' + '\n'
sidebar += '|patch=' + '\n'
sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime(
'%Y-%m-%d') + '\n'
try:
sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime(
'%Y-%m-%d') + '\n'
except KeyError:
sidebar += '|edate=\n'
sidebar += '|web=' + '\n'
sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' + data['slug'] + '/' \
+ data['_id'] + '/bracket-list' + '\n'
sidebar += '|rulebook=' + '\n'
sidebar += '|twitter=' + '\n'
sidebar += '|twitch=' + '\n'
sidebar += '|instagram=' + '\n'
sidebar += '|discord=' + '\n'
sidebar += '|map1=' + '\n'
sidebar += '|map2=' + '\n'
sidebar += '|map3=' + '\n'
sidebar += '|map4=' + '\n'
sidebar += '|map5=' + '\n'
sidebar += '|team_number=' + str(len(data['teams'])) + '\n'
sidebar += '|previous=' + '\n'
sidebar += '|next=' + '\n'
sidebar += '}}\n'
sidebar += '{{Upcoming matches tournament|' + wiki_name + '}}\n'
return sidebar
def create_event_format(data):
event_format = ''
for stage in data['stages']:
event_format += '* ' + stage['name'] + '\n'
if stage['bracket']['type'] == "swiss":
event_format += '** ' + str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type'] + '\n'
elif stage['bracket']['type'] == "elimination":
numGames = 0
rounds = 0
for match in stage['bracket']['series']:
if match['numGames'] != numGames:
if rounds:
event_format += '** ' + str(rounds) + '-round ' \
+ stage['bracket']['seriesStyle'] + str(numGames) + '\n'
rounds = 1
numGames = match['numGames']
else:
rounds += 1
if rounds:
event_format += '** ' + str(rounds) + '-round ' \
+ stage['bracket']['seriesStyle'] + str(numGames) + '\n'
return event_format
def rank_teams(data, bw_teams, sort_place=True, break_ties=False):
for stage in data['stages']:
for place, standing in enumerate(stage['standings']):
if 'place' in standing:
if 'place' not in data['teams'][standing['team']['_id']]:
data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place
else:
if break_ties:
data['teams'][standing['team']['_id']]['place'] = \
standing['place'] + (1 - 1 / data['teams'][standing['team']['_id']]['place'])
else:
data['teams'][standing['team']['_id']]['place'] = standing['place']
else:
data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place
teams = list()
for team_id in data['teams']:
if 'place' in data['teams'][team_id]:
place = data['teams'][team_id]['place']
else:
place = 0
team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name'])
teams.append((team_id,
data['teams'][team_id]['name'],
place,
data['teams'][team_id]['persistentTeamID'],
team_info['name']
))
if sort_place:
teams = sorted(teams, key=itemgetter(2, 4, 0))
else:
teams = sorted(teams, key=itemgetter(4, 0))
return teams
def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True):
header = '{{TeamCardToggleButton}}\n'
teams_ordered = ''
# Use prior rounds as a tiebreaker for when multiple teams have the same place at the end
teams = rank_teams(data, bw_teams, sort_place)
dynamic_idx = 0
if dynamic:
header += '{{tabs dynamic\n'
header += '|name' + str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\n'
header += '|This=1\n'
header += '|content' + str(dynamic_idx+1) + '=' + '\n'
header += '{{TeamCard columns start|cols=5|height=250}}\n'
for team_num, team in enumerate(teams):
if dynamic:
if team_num == dynamic[dynamic_idx]['count']:
teams_ordered += '{{TeamCard columns end}}\n'
dynamic_idx += 1
teams_ordered += '|name' + str(dynamic_idx + 1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\n'
teams_ordered += '|content' + str(dynamic_idx+1) + '=' + '\n'
teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\n'
else:
if team_num == 0:
teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\n'
teams_table = '{{TeamCard\n'
team_info = bw_teams.get_team_info(team[3], team[1])
teams_table += '|team=' + team_info['name'] + '\n'
teams_table += '|image=' + team_info['image'] + '\n'
for idx, player in enumerate(data['teams'][team[0]]['players']):
player_tag = 'p' + str(idx + 1)
if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid:
player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']]
player_info = bw_players.get_player_info(player['userID'], player['inGameName'])
teams_table += '|' + player_tag + '=' + player_info['name'] \
+ ' |' + player_tag + 'flag=' + player_info['flag']
if player_info['link']:
teams_table += ' |' + player_tag + 'link=' + player_info['link']
teams_table += '\n'
# teams_table += '|c= |cflag=\n'
# teams_table += '|qualifier=\n'
teams_table += '}}\n'
teams_ordered += teams_table
footer = '{{TeamCard columns end}}\n'
if dynamic:
footer += '}}\n'
return header + teams_ordered + footer
def create_swiss_table(stage, bw_teams):
dropped_style = 'drop'
swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\n'
for i in range(stage['bracket']['teamsCount']):
swiss_table += '|pbg' + str(i + 1) + '=down'
if (i + 1) % 8 == 0:
swiss_table += '\n'
if '\n' not in swiss_table[-1]:
swiss_table += '\n'
for rank, record in enumerate(stage['standings']):
if record['disqualified']:
swiss_table += '|bg' + str(rank + 1) + '=' + dropped_style + ''
else:
swiss_table += '|bg' + str(rank + 1) + '=down'
team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name'])
swiss_table += '|team' + str(rank + 1) + '=' + team_info['teamteamplate']
swiss_table += '|temp_tie' + str(rank+1) + '=' + "{:7.3f}".format(record['opponentsMatchWinPercentage']) + '\n'
swiss_table += '}}\n'
return swiss_table
def create_swiss_matches(matches, teams, bw_teams):
swiss_match_table = ''
rounds = dict()
for match in matches:
match_line = create_match_maps(match, teams, bw_teams)
if not match_line:
continue
try:
rounds[str(match['roundNumber'])].append(match_line)
except KeyError:
rounds[str(match['roundNumber'])] = list()
rounds[str(match['roundNumber'])].append(match_line)
for i in range(1, len(rounds) + 1):
if i == 1:
swiss_match_table += '{{box|start|padding=2em}}\n'
else:
swiss_match_table += '{{box|break|padding=2em}}\n'
swiss_match_table += '====={{HiddenSort|Round ' + str(i) + '}}=====\n'
swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i) + ' Matches|matchsection=Round ' \
+ str(i) + '|hide=false}}\n'
for match in rounds[str(i)]:
swiss_match_table += match
swiss_match_table += '{{MatchListEnd}}\n'
swiss_match_table += '{{box|end}}\n'
return swiss_match_table
def create_elim_bracket(stage, teams, bw_teams):
if stage['bracket']['style'] == 'single':
bracket = '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\n'
elif stage['bracket']['style'] == 'double':
bracket = '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\n'
else:
print('Unknown stage style: ' + stage['bracket']['style'])
return
# todo handle double elimination brackets
# set up team number trackers
team_previous_round = dict()
# set up round-match count trackers
round_max_win_match_count = [1] * (len(stage['bracket']['series']) + 1)
round_max_win_match_count[0] = 0
round_max_loss_match_count = [1] * (len(stage['bracket']['series']) + 1)
round_max_loss_match_count[0] = 0
# matches = sorted(stage['matches'], key=itemgetter('matchNumber'))
matches = stage['matches']
for match in matches:
# TODO: this will need to get updated for non SE16 templates
# In DE brackets D means the team dropped down from the previous round
# In DE brackest W means the team won the previous round
# So there are rounds where D vs L happen such as R2D1 vs R2W5 and R2D2 vs R2W6
# Might want to key off match['inConsolationBracket']
# May also just need to keep track of match['next'] and build up the D and W that way instead
# Default first round to D and then future bracket type is defined by match['next']
# Not exactly sure how to address round_team_number, in a 8 team DE the third winners bracket round is
# called the 4th round and in a 16 team DE the 4th winners bracket round is called the 6th round
# https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc
# https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc
# https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc
# if match['matchType'] == 'winner':
# round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'],
# round_max_win_match_count[match['roundNumber']])
# elif match['matchType'] == 'loser':
# round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'],
# round_max_loss_match_count[match['roundNumber']])
if not 'teamID' in match['top']:
continue
if match['top']['teamID'] in team_previous_round:
if team_previous_round[match['top']['teamID']]:
bracket_type = 'W'
else:
bracket_type = 'D'
else:
bracket_type = 'D'
if match['matchType'] == 'winner':
round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] - 1]
else:
round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] - 1] \
+ (round_max_win_match_count[match['roundNumber']]
- round_max_win_match_count[match['roundNumber'] - 1]) * 2
# Increment for next time
if match['matchType'] == 'winner':
round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'],
round_max_win_match_count[match['roundNumber']])
elif match['matchType'] == 'loser':
round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'],
round_max_loss_match_count[match['roundNumber']])
bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \
+ str(match['matchNumber'] * 2 - 1 + round_match_offset)
if 'teamID' in match['top']:
team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'],
teams[match['top']['teamID']]['name'])['teamteamplate']
bracket += bracket_indicator + 'team=' + team_name + ' '
else:
bracket += bracket_indicator + 'literal=BYE '
if 'score' in match['top']:
bracket += bracket_indicator + 'score=' + str(match['top']['score']) + ' '
if 'winner' in match['top'] and match['top']['winner']:
bracket += bracket_indicator + 'win=1 '
team_previous_round[match['top']['teamID']] = True
else:
team_previous_round[match['top']['teamID']] = False
bracket += '\n'
if 'teamID' in match['bottom']:
if match['bottom']['teamID'] in team_previous_round:
if team_previous_round[match['bottom']['teamID']]:
bracket_type = 'W'
else:
bracket_type = 'D'
else:
bracket_type = 'D'
else:
bracket_type = 'D'
bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \
+ str(match['matchNumber'] * 2 + round_match_offset)
if 'teamID' in match['bottom']:
team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'],
teams[match['bottom']['teamID']]['name'])['teamteamplate']
bracket += bracket_indicator + 'team=' + team_name + ' '
else:
bracket += bracket_indicator + 'literal=BYE '
if 'score' in match['bottom']:
bracket += bracket_indicator + 'score=' + str(match['bottom']['score']) + ' '
if 'winner' in match['bottom'] and match['bottom']['winner']:
bracket += bracket_indicator + 'win=2 '
team_previous_round[match['bottom']['teamID']] = True
elif 'teamID' in match['bottom']:
team_previous_round[match['bottom']['teamID']] = False
bracket += '\n'
bracket += '}}\n'
return bracket
def create_match_maps(match, teams, bw_teams):
match_line = ''
if not match['isComplete']:
return match_line
match_line = '{{MatchMaps\n'
match_line += '|date=\n'
if 'teamID' in match['top']:
team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'],
teams[match['top']['teamID']]['name'])
elif match['isBye']:
team_top = bw_teams.get_team_info('0', 'BYE')
if 'teamID' in match['bottom']:
team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'],
teams[match['bottom']['teamID']]['name'])
elif match['isBye']:
team_bot = bw_teams.get_team_info('0', 'BYE')
match_line += '|team1=' + team_top['teamteamplate']
match_line += '|team2=' + team_bot['teamteamplate']
if 'isTie' in match and match['isTie']:
match_line += '|winner=0\n'
elif 'winner' in match['top'] and match['top']['winner']:
match_line += '|winner=1\n'
elif 'winner' in match['bottom'] and match['bottom']['winner']:
match_line += '|winner=2\n'
else:
match_line += '|winner=0\n'
if match['isBye']:
match_line += '|walkover=1'
match_line += '|games1='
if match['top']['winner']:
match_line += 'W'
else:
match_line += 'FF'
match_line += '|games2='
if 'winner' in match['bottom'] and match['bottom']['winner']:
match_line += 'W'
else:
match_line += 'FF'
else:
match_line += '|games1=' + str(match['top']['score'])
match_line += '|games2=' + str(match['bottom']['score']) + '\n'
match_line += '|details={{BracketMatchSummary\n'
match_line += '|date=|finished=true\n'
match_line += '|twitch= |youtube=\n'
match_line += '|vod=\n'
match_line += '}}\n'
match_line += '}}\n'
return match_line
def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True):
tables = ''
for idx, group in enumerate(stage['groups']):
if idx == 1:
tables += '{{box|start|padding=2em}}\n'
else:
tables += '{{box|break|padding=2em}}\n'
tables += '===={{HiddenSort|Group ' + group['name'] + '}}====\n'
tables += '{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\n'
tables += '|tournament=' + wiki_name + '\n'
group_header = ''
group_table = ''
for pos, standing_id in enumerate(group['standingIDs']):
group_header += '|pbg' + str(pos + 1) + '=down'
for standing in stage['standings']:
if standing_id == standing['_id']:
# if standing['disqualified']:
# has_drop = True
team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'],
teams[standing['team']['_id']]['name'])
group_table += '|bg' + str(pos + 1) + '=down|team' + str(pos + 1) + "=" \
+ team_info['teamteamplate'] + '\n'
group_header += '|tiebreaker1=series\n'
tables += group_header
tables += group_table
tables += "}}\n"
if include_matches:
match_table = '{{MatchListStart|title=Group ' + group['name'] + ' Matches|width=450px|hide=true}}\n'
for match in group['matches']:
match_line = create_match_maps(match, teams, bw_teams)
match_table += match_line
tables += match_table
tables += '{{MatchListEnd}}\n'
tables += '{{box|end}}\n'
return tables
def create_prize_pool(prize):
prize_pool = prize + '\n'
prize_pool += '{{prize pool start}}\n'
prize_pool += '{{prize pool slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\n'
prize_pool += '{{prize pool slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\n'
prize_pool += '{{prize pool slot |place=3-4 |usdprize=0\n'
prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\n'
prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\n'
prize_pool += '}}\n'
prize_pool += '{{prize pool slot |place=5-8 |usdprize=0\n'
prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\n'
prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\n'
prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\n'
prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\n'
prize_pool += '}}\n'
prize_pool += '{{Prize pool end}}\n'
return prize_pool
def main():
ccs_winter_minor_id = '5ff3354193edb53839d44d55'
ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor'
ccs_winter_major_id = '60019f8ebcc5ed46373408a1'
ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major'
ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b'
ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor'
ccs_spring_major_id = '6061b764f68d8733c8455fcf'
ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major'
ccs_summer_minor_id = '60b41961d35b1411a7b31d64'
ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor'
ccs_summer_major_id = '60dd319012cb9c33c2f63868'
ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major'
ccs_fall_minor_id = '60fa26043ba15d73719669bd'
ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor'
ccs_fall_major_id = '61314505635fe17a14eafe03'
ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major'
ccs_championship_id = '6150dd2b0dd060282bebb0eb'
ccs_championship_wiki = 'Calrissian_Cup/Championship'
world_cup_id = '611dac6ecb6f6260d5f30b6e'
world_cup_wiki = 'World_Cup'
twin_suns_tourny_id = '60806876938bed74f6edea9e'
twin_suns_wiki = 'Twin_Suns_Tournament'
gsl_s1_id = '5ff4b388fd124e11b18e185d'
gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1'
tournament_id = world_cup_id
wiki_name = world_cup_wiki
participant_tabs = [
# {'tab_name': 'Top 16',
# 'count': 16},
# {'tab_name': 'Top 32',
# 'count': 32},
# {'tab_name': 'Other Notable Participants',
# 'count': -1},
]
bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings()
bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings()
event_data = battlefy_data.BattlefyData(tournament_id)
event_data.load_tournament_data()
# FORCE REDUCE TEAMS
event_data.reduce_teams()
event_path = event_data.get_tournament_data_path()
event_path.mkdir(parents=True, exist_ok=True)
filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki')
with open(filename, 'w+', newline='\n', encoding='utf-8') as f:
display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\n'
f.write(display)
sidebar = create_sidebar(event_data.tournament_data, wiki_name)
f.write(sidebar)
f.write('==About==\n')
f.write('===Format===\n')
event_format = create_event_format(event_data.tournament_data)
f.write(event_format)
f.write('===Broadcast Talent===\n')
f.write('===Prize Pool===\n')
prize_pool = create_prize_pool(event_data.tournament_data['prizes'])
f.write(prize_pool)
f.write('==Participants==\n')
teams = create_participants(event_data.tournament_data, bw_players, bw_teams,
dynamic=participant_tabs, sort_place=True)
f.write(teams)
f.write('==Results==\n')
for stage in event_data.tournament_data['stages']:
if stage['bracket']['type'] == 'swiss':
f.write('===Swiss Stage===\n')
f.write('====Swiss Standings====\n')
swiss_table = create_swiss_table(stage, bw_teams)
f.write(swiss_table)
f.write('====Swiss Match Results====\n')
swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams)
f.write(swiss_matches)
elif stage['bracket']['type'] == 'elimination':
f.write('===Playoffs===\n')
bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams)
f.write(bracket)
elif stage['bracket']['type'] == 'roundrobin':
f.write('===' + stage['name'] + '===\n')
round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams,
wiki_name, include_matches=True)
f.write(round_robin_tables)
else:
print('Unsupported bracket type of: ' + stage['bracket']['type'])
if __name__ == '__main__':
main()
| [((510, 15, 510, 64), 'battlefy_wiki_linkings.BattlefyWikiTeamLinkings', 'battlefy_wiki_linkings.BattlefyWikiTeamLinkings', ({}, {}), '()', False, 'import battlefy_wiki_linkings\n'), ((511, 17, 511, 68), 'battlefy_wiki_linkings.BattlefyWikiPlayerLinkings', 'battlefy_wiki_linkings.BattlefyWikiPlayerLinkings', ({}, {}), '()', False, 'import battlefy_wiki_linkings\n'), ((513, 17, 513, 58), 'battlefy_data.BattlefyData', 'battlefy_data.BattlefyData', ({(513, 44, 513, 57): 'tournament_id'}, {}), '(tournament_id)', False, 'import battlefy_data\n'), ((521, 15, 521, 86), 'pathlib.Path.joinpath', 'Path.joinpath', ({(521, 29, 521, 39): 'event_path', (521, 41, 521, 85): "event_data.tournament_data['name'] + '.wiki'"}, {}), "(event_path, event_data.tournament_data['name'] + '.wiki')", False, 'from pathlib import Path\n'), ((112, 34, 112, 53), 'operator.itemgetter', 'itemgetter', ({(112, 45, 112, 46): '2', (112, 48, 112, 49): '4', (112, 51, 112, 52): '0'}, {}), '(2, 4, 0)', False, 'from operator import itemgetter\n'), ((114, 34, 114, 50), 'operator.itemgetter', 'itemgetter', ({(114, 45, 114, 46): '4', (114, 48, 114, 49): '0'}, {}), '(4, 0)', False, 'from operator import itemgetter\n'), ((29, 27, 29, 95), 'datetime.datetime.strptime', 'datetime.strptime', ({(29, 45, 29, 69): "data['checkInStartTime']", (29, 71, 29, 94): '"""%Y-%m-%dT%H:%M:%S.%fZ"""'}, {}), "(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ')", False, 'from datetime import datetime\n'), ((32, 31, 32, 103), 'datetime.datetime.strptime', 'datetime.strptime', ({(32, 49, 32, 77): "data['lastCompletedMatchAt']", (32, 79, 32, 102): '"""%Y-%m-%dT%H:%M:%S.%fZ"""'}, {}), "(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ')", False, 'from datetime import datetime\n')] |
DeadZombie14/chillMagicCarPygame | utilidades/texto.py | 756bb6d27939bed3c2834222d03096e90f05a788 | import pygame
class Texto:
def __init__(self, screen, text, x, y, text_size = 20, fuente = 'Calibri', italic = False, bold= False, subrayado= False, color = (250, 240, 230), bg = [] ):
self.screen = screen
fg = color
self.coord = x, y
#load font, prepare values
font = pygame.font.Font(None, 80)
size = font.size(text)
# Font
a_sys_font = pygame.font.SysFont(fuente, text_size)
# Cursiva
if italic:
a_sys_font.set_bold(1)
# Negritas
if bold:
a_sys_font.set_bold(1)
# Subrayado
if subrayado:
a_sys_font.set_underline(1)
# Construccion del texto
if len(bg) > 1: # Si hay fondo de texto
ren = a_sys_font.render(text, 1, fg, bg)
else: # Si no, transparente
ren = a_sys_font.render(text, 1, fg)
# self.size = x+size[0], y
self.text_rect = ren.get_rect()
self.text_rect.center = (x,y)
self.image = ren, (x,y)
screen.blit(ren, (x, y))
# Cursiva
if italic:
a_sys_font.set_bold(0)
# Negritas
if bold:
a_sys_font.set_bold(0)
# Subrayado
if subrayado:
a_sys_font.set_underline(0)
# self.image.blit(ren, self.text_rect)
# self.text_rect = (x, y),ren.get_size()
# text = str(self.counter)
# label = self.myfont.render(text, 1, (255,0,0))
# text_rect = label.get_rect()
# text_rect.center = (50,50)
# self.image.blit(label, text_rect)
pass
def getProperties(self):
return self.text_rect
def redraw(self):
self.screen.blit(self.image[0], self.image[1])
pass
##################### EJEMPLO DE USO ##############################
# texto1 = Texto(screen, 'Hola', 10, 10)
class TextArea():
def __init__(self, screen, text, x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')):
self.coord = x, y
font = pygame.font.SysFont(fuente, text_size)
words = [word.split(' ') for word in text.splitlines()] # 2D array where each row is a list of words.
space = font.size(' ')[0] # The width of a space.
max_width, max_height = screen.get_size()
pos = x,y
for line in words:
for word in line:
word_surface = font.render(word, 0, color)
word_width, word_height = word_surface.get_size()
if x + word_width >= max_width:
x = pos[0] # Reset the x.
y += word_height # Start on new row.
screen.blit(word_surface, (x, y))
x += word_width + space
x = pos[0] # Reset the x.
y += word_height # Start on new row.
self.size = word_width, word_height
pass
def getProperties(self):
return self.size, self.coord
##################### EJEMPLO DE USO ##############################
# textarea1 = Textarea(screen, 'Hola mundo que tal estas hoy') | [((13, 15, 13, 41), 'pygame.font.Font', 'pygame.font.Font', ({(13, 32, 13, 36): 'None', (13, 38, 13, 40): '80'}, {}), '(None, 80)', False, 'import pygame\n'), ((17, 21, 17, 59), 'pygame.font.SysFont', 'pygame.font.SysFont', ({(17, 41, 17, 47): 'fuente', (17, 49, 17, 58): 'text_size'}, {}), '(fuente, text_size)', False, 'import pygame\n'), ((76, 83, 76, 104), 'pygame.Color', 'pygame.Color', ({(76, 96, 76, 103): '"""black"""'}, {}), "('black')", False, 'import pygame\n'), ((78, 15, 78, 53), 'pygame.font.SysFont', 'pygame.font.SysFont', ({(78, 35, 78, 41): 'fuente', (78, 43, 78, 52): 'text_size'}, {}), '(fuente, text_size)', False, 'import pygame\n')] |
MighTy-Weaver/Inefficient-AC-detection | training_xgboost_model.py | 8229f19accd1569ba7b48f77f71783173393d9ed | # This is the code to train the xgboost model with cross-validation for each unique room in the dataset.
# Models are dumped into ./models and results are dumped into two csv files in the current work directory.
import argparse
import json
import math
import os
import pickle
import warnings
from typing import Tuple
import numpy as np
import pandas as pd
import xgboost as xgb
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
from imblearn.over_sampling import SMOTE
from numpy.random import RandomState
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.model_selection import train_test_split
from sklearn.utils import compute_sample_weight
from tqdm import tqdm
from xgboost import DMatrix, cv
# Set up an argument parser to decide the metric function
parser = argparse.ArgumentParser()
parser.add_argument("--metric", choices=['R2', 'RMSE'], type=str, required=False, default='R2',
help="The evaluation metric you want to use to train the XGBoost model")
parser.add_argument("--log", choices=[0, 1, 100], type=int, required=False, default=0,
help="Whether to print out the training progress")
parser.add_argument("--SMOTE", choices=[0, 1], type=int, required=False, default=1, help="Whether use the SMOTE or not")
parser.add_argument("--SMOGN", choices=[0, 1], type=int, required=False, default=0, help="Whether use the SMOGN or not")
parser.add_argument("--SampleWeight", choices=[0, 1], type=int, required=False, default=0,
help="Whether use the sample weight")
args = parser.parse_args()
# Ignore all the warnings and set pandas to display every column and row everytime we print a dataframe
warnings.filterwarnings('ignore')
pd.set_option('display.max_columns', None)
pd.set_option('display.max_rows', None)
assert args.SMOTE != args.SMOGN, "Can't use SMOTE and SMOGN at the same time!"
# Load the data with a positive AC electricity consumption value, and drop the time data as we don't need them
data = pd.read_csv("summer_data_compiled.csv", index_col=0)
data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True)
# Create some directory to store the models and future analysis figures.
# log_folder_name = "Test_{}_{}".format(args.metric, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
log_folder_name = "Test_R2_HYPEROPT"
log_folder_name = log_folder_name + "_SMOTE" if args.SMOTE else log_folder_name
log_folder_name = log_folder_name + "_SMOGN" if args.SMOGN else log_folder_name
log_folder_name = log_folder_name + "_SW" if args.SampleWeight else log_folder_name
previous_parameter_folder = "Test_R2_HYPEROPT"
assert log_folder_name != previous_parameter_folder, "Previous folder name exists"
if not os.path.exists('./{}/'.format(log_folder_name)):
os.mkdir('./{}'.format(log_folder_name))
os.mkdir('./{}/models/'.format(log_folder_name))
os.mkdir('./{}/trntst_models/'.format(log_folder_name))
# Define our evaluation functions
def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:
truth_value = dtrain.get_label()
root_squard_error = math.sqrt(mean_squared_error(truth_value, predt))
return "RMSE", root_squard_error
def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:
truth_value = dtrain.get_label()
r2_value = r2_score(truth_value, predt)
return "R2", r2_value
def fobjective(space):
param_dict_tunning = {'max_depth': int(space['max_depth']),
'learning_rate': space['learning_rate'],
'colsample_bytree': space['colsample_bytree'],
'min_child_weight': int(space['min_child_weight']),
'reg_alpha': int(space['reg_alpha']),
'reg_lambda': space['reg_lambda'],
'subsample': space['subsample'],
'min_split_loss': space['min_split_loss'],
'objective': 'reg:squarederror'}
xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5,
early_stopping_rounds=30, as_pandas=True, num_boost_round=200,
seed=seed, metrics='rmse', maximize=False, shuffle=True)
return {"loss": (xgb_cv_result["test-rmse-mean"]).tail(1).iloc[0], "status": STATUS_OK}
eval_dict = {'RMSE': RMSE, 'R2': R2}
print("Start Training The Models")
# Create two dataframes to store the result during the training and after the training.
error_csv = pd.DataFrame(
columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean',
'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean',
'test-rmse-std'])
prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction'])
room_list = data['Location'].unique()
# ranging through all the rooms and do the training and cross-validation for each room.
for room in tqdm(room_list):
seed = 2030 + room
# Four rooms have low quality data and we delete them manually
if room == 309 or room == 312 or room == 826 or room == 917 or room == 1001:
continue
# We extract the data of particular room and run the SMOTE algorithm on it.
room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True)
if args.SMOTE:
# Label all the AC data by 0.75, all AC above 0.75 will be marked as 1, otherwise 0. Split into X and y
room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')
X = room_data.drop(['SMOTE_split'], axis=1)
y = room_data['SMOTE_split']
# Run the SMOTE algorithm and retrieve the result.
model_smote = SMOTE(random_state=621, k_neighbors=3)
room_data_smote, smote_split = model_smote.fit_resample(X, y)
# concat the result from SMOTE and split the result into X and y for training.
room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)
y = room_data_smote['AC']
X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)
elif args.SMOGN:
if len(room_data) < 500:
room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')
X = room_data.drop(['SMOTE_split'], axis=1)
y = room_data['SMOTE_split']
# Run the SMOTE algorithm and retrieve the result.
model_smote = SMOTE(random_state=621, k_neighbors=3)
room_data_smote, smote_split = model_smote.fit_resample(X, y)
# concat the result from SMOTE and split the result into X and y for training.
room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)
y = room_data_smote['AC']
X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)
else:
room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0)
y = room_data['AC']
X = room_data.drop(['AC'], axis=1)
else:
y = pd.DataFrame(room_data['AC'].fillna(method='pad'))
X = room_data.drop(['AC'], axis=1).fillna(method='pad')
if args.SampleWeight:
class_sample = pd.cut(y, bins=15)
weight = compute_sample_weight(class_weight="balanced", y=class_sample)
X = X.to_numpy()
# Build another full data matrix for the built-in cross validation function to work.
data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y)
# Cross_validation with hyper-parameter tuning
space = {'max_depth': hp.quniform("max_depth", 3, 10, 1),
'learning_rate': hp.uniform("learning_rate", 0.1, 3),
'colsample_bytree': hp.uniform("colsample_bytree", 0.5, 1),
'min_child_weight': hp.quniform("min_child_weight", 1, 20, 1),
'reg_alpha': hp.quniform("reg_alpha", 0, 100, 1),
'reg_lambda': hp.uniform("reg_lambda", 0, 2),
'subsample': hp.uniform("subsample", 0.5, 1),
'min_split_loss': hp.uniform("min_split_loss", 0, 9)}
if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)):
best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room),
allow_pickle=True).item()
np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)
else:
trials = Trials()
best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials,
rstate=RandomState(seed))
# setup our training parameters and a model variable as model checkpoint
best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']),
'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'],
'min_child_weight': best_hyperparams['min_child_weight'],
'colsample_bytree': best_hyperparams['colsample_bytree'],
'learning_rate': best_hyperparams['learning_rate'],
'subsample': best_hyperparams['subsample'],
'min_split_loss': best_hyperparams['min_split_loss']}
np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)
# Use the built-in cv function to do the cross validation, still with ten folds, this will return us the results.
xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5,
early_stopping_rounds=30, as_pandas=True, num_boost_round=200,
seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True)
xgb_cv_result['room'] = room
error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1]
# Use one training_testing for ploting, and save both ground truth and prediction value into the dataframe
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed)
d_train = DMatrix(X_train, label=y_train)
d_test = DMatrix(X_test, label=y_test)
watchlist = [(d_test, 'eval'), (d_train, 'train')]
xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist,
verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)
prediction = np.array(xgb_model_train_test.predict(d_test)).tolist()
real = np.array(y_test).tolist()
prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real),
'prediction': json.dumps(prediction)}
# Dump the error dataframes into csv files.
error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False)
prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False)
# Develop a model using the whole orignial dataset, and save the model
xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist,
verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)
# Save all the models we trained for future use
pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))
pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))
print("Training finished!")
| [((25, 9, 25, 34), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((37, 0, 37, 33), 'warnings.filterwarnings', 'warnings.filterwarnings', ({(37, 24, 37, 32): '"""ignore"""'}, {}), "('ignore')", False, 'import warnings\n'), ((38, 0, 38, 42), 'pandas.set_option', 'pd.set_option', ({(38, 14, 38, 35): '"""display.max_columns"""', (38, 37, 38, 41): 'None'}, {}), "('display.max_columns', None)", True, 'import pandas as pd\n'), ((39, 0, 39, 39), 'pandas.set_option', 'pd.set_option', ({(39, 14, 39, 32): '"""display.max_rows"""', (39, 34, 39, 38): 'None'}, {}), "('display.max_rows', None)", True, 'import pandas as pd\n'), ((44, 7, 44, 59), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((103, 17, 103, 76), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((108, 12, 108, 27), 'tqdm.tqdm', 'tqdm', ({(108, 17, 108, 26): 'room_list'}, {}), '(room_list)', False, 'from tqdm import tqdm\n'), ((73, 15, 73, 43), 'sklearn.metrics.r2_score', 'r2_score', ({(73, 24, 73, 35): 'truth_value', (73, 37, 73, 42): 'predt'}, {}), '(truth_value, predt)', False, 'from sklearn.metrics import r2_score, mean_squared_error\n'), ((88, 20, 90, 83), 'xgboost.cv', 'xgb.cv', (), '', True, 'import xgboost as xgb\n'), ((192, 20, 194, 92), 'xgboost.cv', 'cv', (), '', False, 'from xgboost import DMatrix, cv\n'), ((200, 39, 200, 95), 'sklearn.model_selection.train_test_split', 'train_test_split', (), '', False, 'from sklearn.model_selection import train_test_split\n'), ((201, 14, 201, 45), 'xgboost.DMatrix', 'DMatrix', (), '', False, 'from xgboost import DMatrix, cv\n'), ((202, 13, 202, 42), 'xgboost.DMatrix', 'DMatrix', (), '', False, 'from xgboost import DMatrix, cv\n'), ((206, 27, 207, 120), 'xgboost.train', 'xgb.train', (), '', True, 'import xgboost as xgb\n'), ((220, 21, 221, 114), 'xgboost.train', 'xgb.train', (), '', True, 'import xgboost as xgb\n'), ((67, 34, 67, 72), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', ({(67, 53, 67, 64): 'truth_value', (67, 66, 67, 71): 'predt'}, {}), '(truth_value, predt)', False, 'from sklearn.metrics import r2_score, mean_squared_error\n'), ((124, 22, 124, 60), 'imblearn.over_sampling.SMOTE', 'SMOTE', (), '', False, 'from imblearn.over_sampling import SMOTE\n'), ((128, 26, 128, 75), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((154, 23, 154, 41), 'pandas.cut', 'pd.cut', (), '', True, 'import pandas as pd\n'), ((155, 17, 155, 79), 'sklearn.utils.compute_sample_weight', 'compute_sample_weight', (), '', False, 'from sklearn.utils import compute_sample_weight\n'), ((160, 18, 160, 57), 'xgboost.DMatrix', 'DMatrix', (), '', False, 'from xgboost import DMatrix, cv\n'), ((160, 84, 160, 108), 'xgboost.DMatrix', 'DMatrix', (), '', False, 'from xgboost import DMatrix, cv\n'), ((163, 26, 163, 60), 'hyperopt.hp.quniform', 'hp.quniform', ({(163, 38, 163, 49): '"""max_depth"""', (163, 51, 163, 52): '(3)', (163, 54, 163, 56): '(10)', (163, 58, 163, 59): '(1)'}, {}), "('max_depth', 3, 10, 1)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((164, 30, 164, 65), 'hyperopt.hp.uniform', 'hp.uniform', ({(164, 41, 164, 56): '"""learning_rate"""', (164, 58, 164, 61): '(0.1)', (164, 63, 164, 64): '(3)'}, {}), "('learning_rate', 0.1, 3)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((165, 33, 165, 71), 'hyperopt.hp.uniform', 'hp.uniform', ({(165, 44, 165, 62): '"""colsample_bytree"""', (165, 64, 165, 67): '(0.5)', (165, 69, 165, 70): '(1)'}, {}), "('colsample_bytree', 0.5, 1)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((166, 33, 166, 74), 'hyperopt.hp.quniform', 'hp.quniform', ({(166, 45, 166, 63): '"""min_child_weight"""', (166, 65, 166, 66): '(1)', (166, 68, 166, 70): '(20)', (166, 72, 166, 73): '(1)'}, {}), "('min_child_weight', 1, 20, 1)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((167, 26, 167, 61), 'hyperopt.hp.quniform', 'hp.quniform', ({(167, 38, 167, 49): '"""reg_alpha"""', (167, 51, 167, 52): '(0)', (167, 54, 167, 57): '(100)', (167, 59, 167, 60): '(1)'}, {}), "('reg_alpha', 0, 100, 1)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((168, 27, 168, 57), 'hyperopt.hp.uniform', 'hp.uniform', ({(168, 38, 168, 50): '"""reg_lambda"""', (168, 52, 168, 53): '(0)', (168, 55, 168, 56): '(2)'}, {}), "('reg_lambda', 0, 2)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((169, 26, 169, 57), 'hyperopt.hp.uniform', 'hp.uniform', ({(169, 37, 169, 48): '"""subsample"""', (169, 50, 169, 53): '(0.5)', (169, 55, 169, 56): '(1)'}, {}), "('subsample', 0.5, 1)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((170, 31, 170, 65), 'hyperopt.hp.uniform', 'hp.uniform', ({(170, 42, 170, 58): '"""min_split_loss"""', (170, 60, 170, 61): '(0)', (170, 63, 170, 64): '(9)'}, {}), "('min_split_loss', 0, 9)", False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((177, 17, 177, 25), 'hyperopt.Trials', 'Trials', ({}, {}), '()', False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((212, 76, 212, 92), 'json.dumps', 'json.dumps', ({(212, 87, 212, 91): 'real'}, {}), '(real)', False, 'import json\n'), ((213, 61, 213, 83), 'json.dumps', 'json.dumps', ({(213, 72, 213, 82): 'prediction'}, {}), '(prediction)', False, 'import json\n'), ((210, 11, 210, 27), 'numpy.array', 'np.array', ({(210, 20, 210, 26): 'y_test'}, {}), '(y_test)', True, 'import numpy as np\n'), ((138, 26, 138, 64), 'imblearn.over_sampling.SMOTE', 'SMOTE', (), '', False, 'from imblearn.over_sampling import SMOTE\n'), ((142, 30, 142, 79), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((179, 39, 179, 56), 'numpy.random.RandomState', 'RandomState', ({(179, 51, 179, 55): 'seed'}, {}), '(seed)', False, 'from numpy.random import RandomState\n')] |
editorconfig/editorconfig-core-py | setup.py | f43312abcf6888b78ca80f1e95bfa627281746ad | import os
from setuptools import setup
# Read the version
g = {}
with open(os.path.join("editorconfig", "version.py"), "rt") as fp:
exec(fp.read(), g)
v = g['VERSION']
version = ".".join(str(x) for x in v[:3])
if v[3] != "final":
version += "-" + v[3]
setup(
name='EditorConfig',
version=version,
author='EditorConfig Team',
packages=['editorconfig'],
url='http://editorconfig.org/',
license='python',
description='EditorConfig File Locator and Interpreter for Python',
long_description=open('README.rst').read(),
entry_points = {
'console_scripts': [
'editorconfig = editorconfig.__main__:main',
]
},
classifiers=[
'License :: OSI Approved :: Python Software Foundation License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
| [((6, 10, 6, 52), 'os.path.join', 'os.path.join', ({(6, 23, 6, 37): '"""editorconfig"""', (6, 39, 6, 51): '"""version.py"""'}, {}), "('editorconfig', 'version.py')", False, 'import os\n')] |
wangzy0327/hadoop-cluster-docker | multi_group_memory_contrast.py | cf1de6bf458ade132ad5a688e4f8f9b9968a704a | import numpy as np
import matplotlib.pyplot as plt
t = np.arange(0,375,6.5)
# MEM_1 = [0.031, 0.034, 0.034, 0.034, 0.031, 0.034, 0.034, 0.034, 0.031, 0.033, 0.035, 0.034, 0.031, 0.033, 0.034, 0.034, 0.031, 0.033, 0.034, 0.034, 0.031, 0.033, 0.034, 0.034, 0.031, 0.033, 0.034, 0.034, 0.031, 0.031, 0.031, 0.031, 0.031, 0.031]
# MEM_2 = [0.031, 0.033, 0.045, 0.054, 0.057, 0.068, 0.068, 0.066, 0.071, 0.071, 0.077, 0.079, 0.089, 0.083, 0.079, 0.073, 0.07, 0.076, 0.076, 0.083, 0.086, 0.083, 0.078, 0.074, 0.071, 0.073, 0.073, 0.073, 0.071, 0.071, 0.071, 0.071, 0.071, 0.071]
# MEM_3 = [0.032, 0.034, 0.049, 0.073, 0.082, 0.099, 0.121, 0.132, 0.133, 0.123, 0.109, 0.111, 0.114, 0.114, 0.116, 0.132, 0.148, 0.139, 0.13, 0.116, 0.112, 0.113, 0.114, 0.114, 0.112, 0.112, 0.112, 0.112, 0.112, 0.112, 0.112, 0.112, 0.112, 0.112]
# MEM_4 = [0.032, 0.035, 0.05, 0.073, 0.105, 0.126, 0.149, 0.17, 0.176, 0.18, 0.171, 0.151, 0.145, 0.152, 0.153, 0.166, 0.177, 0.173, 0.166, 0.152, 0.152, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148, 0.148]
# MEM_5 = [0.032, 0.034, 0.049, 0.068, 0.106, 0.141, 0.166, 0.194, 0.221, 0.238, 0.235, 0.213, 0.185, 0.185, 0.189, 0.193, 0.197, 0.2, 0.201, 0.201, 0.197, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.190, 0.190, 0.190]
# MEM_6 = [0.032, 0.034, 0.049, 0.069, 0.102, 0.133, 0.179, 0.193, 0.233, 0.264, 0.299, 0.297, 0.279, 0.237, 0.226, 0.226, 0.228, 0.231, 0.232, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23]
# MEM_7 = [0.03, 0.032, 0.047, 0.066, 0.098, 0.131, 0.169, 0.219, 0.234, 0.281, 0.314, 0.344, 0.337, 0.318, 0.271, 0.264, 0.263, 0.264, 0.265, 0.266, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267, 0.267]
MEM_1 = [0.038, 0.039, 0.04, 0.042, 0.047, 0.048, 0.05, 0.044, 0.038, 0.038, 0.039, 0.044, 0.048, 0.048, 0.048, 0.038, 0.041, 0.041, 0.047, 0.051, 0.049, 0.047, 0.038, 0.04, 0.04, 0.046, 0.052, 0.049, 0.045, 0.038, 0.038, 0.038, 0.043, 0.048, 0.048, 0.048, 0.04, 0.038, 0.04, 0.039, 0.046, 0.05, 0.049, 0.045, 0.039, 0.039, 0.042, 0.042, 0.048, 0.052, 0.05, 0.047, 0.041, 0.039, 0.039, 0.039, 0.039, 0.039]
MEM_2 = [0.041, 0.049, 0.056, 0.064, 0.084, 0.091, 0.096, 0.088, 0.081, 0.076, 0.076, 0.078, 0.088, 0.102, 0.103, 0.094, 0.085, 0.076, 0.077, 0.084, 0.093, 0.097, 0.092, 0.082, 0.076, 0.076, 0.079, 0.085, 0.092, 0.088, 0.085, 0.076, 0.076, 0.076, 0.077, 0.077, 0.077, 0.076, 0.077, 0.077, 0.077, 0.076, 0.077, 0.077, 0.077, 0.076, 0.077, 0.077, 0.077, 0.076, 0.077, 0.077, 0.077, 0.076, 0.077, 0.077, 0.077, 0.077]
MEM_3 = [0.077, 0.077, 0.086, 0.091, 0.108, 0.129, 0.137, 0.14, 0.126, 0.121, 0.117, 0.115, 0.125, 0.139, 0.142, 0.143, 0.126, 0.122, 0.115, 0.114, 0.118, 0.122, 0.122, 0.118, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113, 0.113]
MEM_4 = [0.117, 0.117, 0.128, 0.141, 0.162, 0.191, 0.19, 0.189, 0.166, 0.16, 0.155, 0.158, 0.169, 0.182, 0.178, 0.174, 0.159, 0.156, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153, 0.153]
MEM_5 = [0.154, 0.154, 0.166, 0.173, 0.195, 0.227, 0.232, 0.239, 0.207, 0.197, 0.195, 0.194, 0.205, 0.21, 0.209, 0.198, 0.191, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188, 0.188]
MEM_6 = [0.179, 0.179, 0.195, 0.203, 0.231, 0.267, 0.269, 0.266, 0.238, 0.222, 0.218, 0.214, 0.22, 0.227, 0.226, 0.223, 0.218, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214, 0.214]
MEM_7 = [0.204, 0.205, 0.226, 0.23, 0.251, 0.302, 0.327, 0.32, 0.305, 0.273, 0.257, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.256, 0.256, 0.257, 0.257, 0.258, 0.257, 0.257]
font1 = {
'family' : 'Times New Roman',
'weight' : 'normal',
'size' : 28,
}
font2 = {
'family' : 'Times New Roman',
'weight' : 'normal',
'size' : 20,
}
plt.title('processing Memory% Analysis',font1)
l1, = plt.plot(t,MEM_1,color='green',marker="o",label='1 hadoop group')
l2, = plt.plot(t,MEM_2,color='darkorange',marker="o",label='2 hadoop group')
l3, = plt.plot(t,MEM_3,color='yellow',marker="o",label='3 hadoop group')
l4, = plt.plot(t,MEM_4,color='greenyellow',marker="o",label='4 hadoop group')
l5, = plt.plot(t,MEM_5,color='springgreen',marker="o",label='5 hadoop group')
l6, = plt.plot(t,MEM_6,color='darkslategrey',marker="o",label='6 hadoop group')
l7, = plt.plot(t,MEM_7,color='red',marker="o",label='7 hadoop group')
#l2, = plt.plot(x2,multi,color='red',label='multi hadoop group')
# color: darkorange lightcoral darkgoldenrod yellow greenyellow springgreen darkslategrey deepskyblue fushsia blue
x_ticks = np.arange(0,380,30)
y_ticks = np.arange(0,0.6,0.1)
plt.legend(handles=[l1,l2,l3,l4,l5,l6,l7],labels=['1-hadoop-group-MEM','2-hadoop-group-MEM','3-hadoop-group-MEM','4-hadoop-group-MEM','5-hadoop-group-MEM','6-hadoop-group-MEM','7-hadoop-group-MEM'],loc="best")
plt.xlabel('time unit(seconds)',font2)
plt.ylabel('hadoop occupy MEM unit(% 62G)',font2)
plt.xticks(x_ticks)
plt.yticks(y_ticks)
#plt.savefig('.MEM%.png')
plt.show()
| [((4, 4, 4, 24), 'numpy.arange', 'np.arange', ({(4, 14, 4, 15): '0', (4, 16, 4, 19): '375', (4, 20, 4, 23): '6.5'}, {}), '(0, 375, 6.5)', True, 'import numpy as np\n'), ((33, 0, 33, 46), 'matplotlib.pyplot.title', 'plt.title', ({(33, 10, 33, 39): '"""processing Memory% Analysis"""', (33, 40, 33, 45): 'font1'}, {}), "('processing Memory% Analysis', font1)", True, 'import matplotlib.pyplot as plt\n'), ((34, 6, 34, 71), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((35, 6, 35, 76), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((36, 6, 36, 72), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((37, 6, 37, 77), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((38, 6, 38, 77), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((39, 6, 39, 79), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((40, 6, 40, 69), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((45, 10, 45, 29), 'numpy.arange', 'np.arange', ({(45, 20, 45, 21): '0', (45, 22, 45, 25): '380', (45, 26, 45, 28): '30'}, {}), '(0, 380, 30)', True, 'import numpy as np\n'), ((46, 10, 46, 30), 'numpy.arange', 'np.arange', ({(46, 20, 46, 21): '0', (46, 22, 46, 25): '0.6', (46, 26, 46, 29): '0.1'}, {}), '(0, 0.6, 0.1)', True, 'import numpy as np\n'), ((48, 0, 48, 209), 'matplotlib.pyplot.legend', 'plt.legend', (), '', True, 'import matplotlib.pyplot as plt\n'), ((49, 0, 49, 38), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(49, 11, 49, 31): '"""time unit(seconds)"""', (49, 32, 49, 37): 'font2'}, {}), "('time unit(seconds)', font2)", True, 'import matplotlib.pyplot as plt\n'), ((50, 0, 50, 49), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(50, 11, 50, 42): '"""hadoop occupy MEM unit(% 62G)"""', (50, 43, 50, 48): 'font2'}, {}), "('hadoop occupy MEM unit(% 62G)', font2)", True, 'import matplotlib.pyplot as plt\n'), ((52, 0, 52, 19), 'matplotlib.pyplot.xticks', 'plt.xticks', ({(52, 11, 52, 18): 'x_ticks'}, {}), '(x_ticks)', True, 'import matplotlib.pyplot as plt\n'), ((53, 0, 53, 19), 'matplotlib.pyplot.yticks', 'plt.yticks', ({(53, 11, 53, 18): 'y_ticks'}, {}), '(y_ticks)', True, 'import matplotlib.pyplot as plt\n'), ((57, 0, 57, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n')] |
josephburnett/vaping | vaping/config.py | 16f9092f0b3c1692e6d1a040f746e1277e197353 | import re
import munge
def parse_interval(val):
"""
converts a string to float of seconds
.5 = 500ms
90 = 1m30s
**Arguments**
- val (`str`)
"""
re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)")
val = val.strip()
total = 0.0
for match in re_intv.findall(val):
unit = match[1]
count = float(match[0])
if unit == "s":
total += count
elif unit == "m":
total += count * 60
elif unit == "ms":
total += count / 1000
elif unit == "h":
total += count * 3600
elif unit == "d":
total += count * 86400
else:
raise ValueError("unknown unit from interval string '%s'" % val)
return total
class Config(munge.Config):
"""
Vaping config manager
"""
defaults = {
"config": {
"vaping": {"home_dir": None, "pidfile": "vaping.pid", "plugin_path": [],},
},
"config_dir": "~/.vaping",
"codec": "yaml",
}
| [((16, 14, 16, 49), 're.compile', 're.compile', ({(16, 25, 16, 48): '"""([\\\\d\\\\.]+)([a-zA-Z]+)"""'}, {}), "('([\\\\d\\\\.]+)([a-zA-Z]+)')", False, 'import re\n')] |
Rubiel1/sktime | sktime/annotation/tests/test_all_annotators.py | 2fd2290fb438224f11ddf202148917eaf9b73a87 | # -*- coding: utf-8 -*-
"""Tests for sktime annotators."""
import pandas as pd
import pytest
from sktime.registry import all_estimators
from sktime.utils._testing.estimator_checks import _make_args
ALL_ANNOTATORS = all_estimators(estimator_types="series-annotator", return_names=False)
@pytest.mark.parametrize("Estimator", ALL_ANNOTATORS)
def test_output_type(Estimator):
"""Test annotator output type."""
estimator = Estimator.create_test_instance()
args = _make_args(estimator, "fit")
estimator.fit(*args)
args = _make_args(estimator, "predict")
y_pred = estimator.predict(*args)
assert isinstance(y_pred, pd.Series)
| [((10, 17, 10, 87), 'sktime.registry.all_estimators', 'all_estimators', (), '', False, 'from sktime.registry import all_estimators\n'), ((13, 1, 13, 53), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(13, 25, 13, 36): '"""Estimator"""', (13, 38, 13, 52): 'ALL_ANNOTATORS'}, {}), "('Estimator', ALL_ANNOTATORS)", False, 'import pytest\n'), ((18, 11, 18, 39), 'sktime.utils._testing.estimator_checks._make_args', '_make_args', ({(18, 22, 18, 31): 'estimator', (18, 33, 18, 38): '"""fit"""'}, {}), "(estimator, 'fit')", False, 'from sktime.utils._testing.estimator_checks import _make_args\n'), ((20, 11, 20, 43), 'sktime.utils._testing.estimator_checks._make_args', '_make_args', ({(20, 22, 20, 31): 'estimator', (20, 33, 20, 42): '"""predict"""'}, {}), "(estimator, 'predict')", False, 'from sktime.utils._testing.estimator_checks import _make_args\n')] |
AlexMassin/mlh-react-vr-website | raspberry-pi-camera/cam.py | dc08788ccdecc9923b8dbfd31fa452cb83d214ae | picamera import PiCamera
from time import sleep
import boto3
import os.path
import subprocess
s3 = boto3.client('s3')
bucket = 'cambucket21'
camera = PiCamera()
#camera.resolution(1920,1080)
x = 0
camerafile = x
while True:
if (x == 6):
x = 1
else:
x = x + 1
camera.start_preview()
camera.start_recording('/home/pi/' + str(x) + '.h264')
sleep(2)
camera.stop_recording()
camera.stop_preview()
subprocess.Popen("MP4Box -add " + str(x) + ".h264 " + str(x) +".mp4", shell=True)
sleep(1)
s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/' + str(x) + '.mp4')
| [] |
Mikma03/InfoShareacademy_Python_Courses | Part_3_advanced/m04_datetime_and_timedelta/datetime_formats/example_1.py | 3df1008c8c92831bebf1625f960f25b39d6987e6 | from datetime import datetime
def run_example():
moment_in_time = datetime.fromordinal(256)
print(moment_in_time)
print(moment_in_time.toordinal())
print(moment_in_time.weekday())
print(moment_in_time.isoweekday())
other_moment = datetime.fromtimestamp(16_000_000)
print(other_moment)
print(other_moment.timestamp())
print(other_moment.isocalendar())
if __name__ == "__main__":
run_example()
| [((5, 21, 5, 46), 'datetime.datetime.fromordinal', 'datetime.fromordinal', ({(5, 42, 5, 45): '256'}, {}), '(256)', False, 'from datetime import datetime\n'), ((11, 19, 11, 53), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', ({(11, 42, 11, 52): '16000000'}, {}), '(16000000)', False, 'from datetime import datetime\n')] |
mxmpl/pykaldi | examples/scripts/segmentation/nnet3-segmenter.py | 0570307138c5391cc47b019450d08bcb9686dd98 | #!/usr/bin/env python
from __future__ import print_function
from kaldi.segmentation import NnetSAD, SegmentationProcessor
from kaldi.nnet3 import NnetSimpleComputationOptions
from kaldi.util.table import SequentialMatrixReader
# Construct SAD
model = NnetSAD.read_model("final.raw")
post = NnetSAD.read_average_posteriors("post_output.vec")
transform = NnetSAD.make_sad_transform(post)
graph = NnetSAD.make_sad_graph()
decodable_opts = NnetSimpleComputationOptions()
decodable_opts.extra_left_context = 79
decodable_opts.extra_right_context = 21
decodable_opts.extra_left_context_initial = 0
decodable_opts.extra_right_context_final = 0
decodable_opts.frames_per_chunk = 150
decodable_opts.acoustic_scale = 0.3
sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts)
seg = SegmentationProcessor(target_labels=[2])
# Define feature pipeline as a Kaldi rspecifier
feats_rspec = "ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |"
# Segment
with SequentialMatrixReader(feats_rspec) as f, open ("segments", "w") as s:
for key, feats in f:
out = sad.segment(feats)
segments, stats = seg.process(out["alignment"])
seg.write(key, segments, s)
print("segments:", segments, flush=True)
print("stats:", stats, flush=True)
print("global stats:", seg.stats, flush=True)
| [((10, 8, 10, 39), 'kaldi.segmentation.NnetSAD.read_model', 'NnetSAD.read_model', ({(10, 27, 10, 38): '"""final.raw"""'}, {}), "('final.raw')", False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((11, 7, 11, 57), 'kaldi.segmentation.NnetSAD.read_average_posteriors', 'NnetSAD.read_average_posteriors', ({(11, 39, 11, 56): '"""post_output.vec"""'}, {}), "('post_output.vec')", False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((12, 12, 12, 44), 'kaldi.segmentation.NnetSAD.make_sad_transform', 'NnetSAD.make_sad_transform', ({(12, 39, 12, 43): 'post'}, {}), '(post)', False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((13, 8, 13, 32), 'kaldi.segmentation.NnetSAD.make_sad_graph', 'NnetSAD.make_sad_graph', ({}, {}), '()', False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((14, 17, 14, 47), 'kaldi.nnet3.NnetSimpleComputationOptions', 'NnetSimpleComputationOptions', ({}, {}), '()', False, 'from kaldi.nnet3 import NnetSimpleComputationOptions\n'), ((21, 6, 21, 69), 'kaldi.segmentation.NnetSAD', 'NnetSAD', (), '', False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((22, 6, 22, 46), 'kaldi.segmentation.SegmentationProcessor', 'SegmentationProcessor', (), '', False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((28, 5, 28, 40), 'kaldi.util.table.SequentialMatrixReader', 'SequentialMatrixReader', ({(28, 28, 28, 39): 'feats_rspec'}, {}), '(feats_rspec)', False, 'from kaldi.util.table import SequentialMatrixReader\n')] |
HeegyuKim/CurseFilter | src/dataset.py | dc4a64aebd997706553c24e919a88e19a3c92dd3 | from cProfile import label
from matplotlib.pyplot import text
import pandas as pd
import numpy as np
from tokenizers import Tokenizer
import torch
from torch.utils.data import Dataset, DataLoader
from typing import Dict, Any, Tuple
from datasets import load_dataset
class DataFrameDataset(Dataset):
def __init__(self,
tokenizer: Tokenizer,
df: pd.DataFrame,
text_column: str,
label_column: str,
max_length: int = 256,
padding: str = "max_length") -> None:
super().__init__()
inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors="pt")
self.input_ids = inputs["input_ids"]
self.attention_masks = inputs["attention_mask"]
dtype = np.int64 if len(df[label_column].unique()) > 2 else np.float32
self.labels = torch.from_numpy(df[label_column].values.astype(dtype))
def __len__(self):
return self.input_ids.shape[0]
def __getitem__(self, index: Any) -> Dict:
return self.input_ids[index], self.attention_masks[index], self.labels[index]
def dataloader(self, **kwargs) -> DataLoader:
return DataLoader(self, **kwargs)
class DataFrameStudentDataset(DataFrameDataset):
def __init__(self,
teacher_model: torch.nn.Module,
teacher_tokenizer: Tokenizer,
student_tokenizer: Tokenizer,
df: pd.DataFrame,
text_column: str,
label_column: str,
max_length: int = 256,
padding: str = "max_length",
device: str = 'cuda') -> None:
super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding)
teacher_ds = DataFrameDataset(
teacher_tokenizer,
df,
text_column,
label_column,
max_length,
padding
)
teacher_model = teacher_model.to(device)
with torch.no_grad():
soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device)
for i in range(len(self))]
self.soft_labels = torch.stack(soft_labels)
def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
return *super().__getitem__(index), self.soft_labels[index]
def _get_soft_label(self, model, teacher_ds, index, device):
ids, mask, _ = teacher_ds[index]
ids = ids.unsqueeze(0).to(device)
mask = mask.unsqueeze(0).to(device)
return model(ids, mask).cpu().squeeze(0)
class ApeachDataset(Dataset):
def __init__(self,
split: str,
tokenizer: Tokenizer,
max_length: int = 256,
padding: str = "max_length") -> None:
super().__init__()
dataset = load_dataset("jason9693/APEACH")
texts = dataset[split]['text']
inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors="pt")
self.input_ids = inputs["input_ids"]
self.attention_masks = inputs["attention_mask"]
labels = dataset[split]['class']
self.labels = torch.tensor(labels, dtype=torch.float32)
def __len__(self):
return self.input_ids.shape[0]
def __getitem__(self, index: Any) -> Dict:
return self.input_ids[index], self.attention_masks[index], self.labels[index]
def dataloader(self, **kwargs) -> DataLoader:
return DataLoader(self, **kwargs)
class ApeachStudentDataset(ApeachDataset):
def __init__(self,
teacher_model: torch.nn.Module,
split: str,
teacher_tokenizer: Tokenizer,
student_tokenizer: Tokenizer,
max_length: int = 256,
padding: str = "max_length",
device: str="cuda") -> None:
super().__init__(split, student_tokenizer, max_length, padding)
teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding)
teacher_model = teacher_model.to(device)
with torch.no_grad():
soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device)
for i in range(len(self))]
self.soft_labels = torch.stack(soft_labels)
def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
return *super().__getitem__(index), self.soft_labels[index]
def _get_soft_label(self, model, teacher_ds, index, device):
ids, mask, _ = teacher_ds[index]
ids = ids.unsqueeze(0).to(device)
mask = mask.unsqueeze(0).to(device)
return model(ids, mask).cpu().squeeze(0) | [((34, 15, 34, 41), 'torch.utils.data.DataLoader', 'DataLoader', ({(34, 26, 34, 30): 'self'}, {}), '(self, **kwargs)', False, 'from torch.utils.data import Dataset, DataLoader\n'), ((84, 18, 84, 50), 'datasets.load_dataset', 'load_dataset', ({(84, 31, 84, 49): '"""jason9693/APEACH"""'}, {}), "('jason9693/APEACH')", False, 'from datasets import load_dataset\n'), ((92, 22, 92, 63), 'torch.tensor', 'torch.tensor', (), '', False, 'import torch\n'), ((100, 15, 100, 41), 'torch.utils.data.DataLoader', 'DataLoader', ({(100, 26, 100, 30): 'self'}, {}), '(self, **kwargs)', False, 'from torch.utils.data import Dataset, DataLoader\n'), ((62, 13, 62, 28), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((65, 31, 65, 55), 'torch.stack', 'torch.stack', ({(65, 43, 65, 54): 'soft_labels'}, {}), '(soft_labels)', False, 'import torch\n'), ((117, 13, 117, 28), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((120, 31, 120, 55), 'torch.stack', 'torch.stack', ({(120, 43, 120, 54): 'soft_labels'}, {}), '(soft_labels)', False, 'import torch\n')] |
stko/Schnipsl | helper_tools/raspi_OMX-Player_Howto_demo.py | 824572c657e48f18950f584b9529661ff5bb8069 | #!/usr/bin/python
# mp4museum.org by julius schmiedel 2019
import os
import sys
import glob
from subprocess import Popen, PIPE
import RPi.GPIO as GPIO
FNULL = open(os.devnull, "w")
# setup GPIO pin
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN)
GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN)
# functions to be called by event listener
def buttonPause(channel):
player.stdin.write("p")
def buttonNext(channel):
player.stdin.write("q")
# add event listener
GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime = 234)
GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime = 1234)
# please do not remove my logo screen
player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL)
player.wait()
# the loop
while(1):
for files in sorted(glob.glob(r'/media/*/*.mp4')):
player = Popen(['omxplayer','--adev', 'both',files],stdin=PIPE,stdout=FNULL)
player.wait()
| [((14, 0, 14, 24), 'RPi.GPIO.setmode', 'GPIO.setmode', ({(14, 13, 14, 23): 'GPIO.BOARD'}, {}), '(GPIO.BOARD)', True, 'import RPi.GPIO as GPIO\n'), ((15, 0, 15, 53), 'RPi.GPIO.setup', 'GPIO.setup', (), '', True, 'import RPi.GPIO as GPIO\n'), ((16, 0, 16, 53), 'RPi.GPIO.setup', 'GPIO.setup', (), '', True, 'import RPi.GPIO as GPIO\n'), ((26, 0, 26, 81), 'RPi.GPIO.add_event_detect', 'GPIO.add_event_detect', (), '', True, 'import RPi.GPIO as GPIO\n'), ((27, 0, 27, 81), 'RPi.GPIO.add_event_detect', 'GPIO.add_event_detect', (), '', True, 'import RPi.GPIO as GPIO\n'), ((30, 9, 30, 97), 'subprocess.Popen', 'Popen', (), '', False, 'from subprocess import Popen, PIPE\n'), ((35, 21, 35, 49), 'glob.glob', 'glob.glob', ({(35, 31, 35, 48): '"""/media/*/*.mp4"""'}, {}), "('/media/*/*.mp4')", False, 'import glob\n'), ((36, 11, 36, 78), 'subprocess.Popen', 'Popen', (), '', False, 'from subprocess import Popen, PIPE\n')] |
zeyu2001/ICT1002-Python | dash_app/compare_alg.py | 76a2c8ad3e3c4a3c873a9259e2a11488c33f2bf7 | """
Comparison between the efficiency of the Boyer-Moore algorithm and the naive substring search algorithm.
The runtimes for both algorithms are plotted on the same axes.
"""
import matplotlib.pyplot as plt
import numpy as np
import string
import time
import random
from bm_alg import boyer_moore_match, naive_match
# number of test cases for each iteration
TEST_CASES = 100
# test cases generated based on this pattern (vary_n)
PATTERN = 'ICT1002 is a really great module!'
# test cases generated based on this text (vary_m)
TEXT = PATTERN * 50
def generate_test_cases(pattern, length, k):
"""
Generates <k> test cases with text of length <length> containing <pattern>
Args:
pattern (str): A pattern within the text.
length (int): The length of the pattern
k (int): The number of test cases
Returns:
A list of test cases, i.e. strings that contain <pattern>
"""
result = []
for _ in range(k):
text = pattern
while len(text) < length:
direction = random.choice((0, 1))
# 0 --> Left
if direction == 0:
text = random.choice(string.ascii_lowercase) + text
# 1 --> Right
else:
text = text + random.choice(string.ascii_lowercase)
result.append(text)
return result
def vary_n(max_n):
x = [n for n in range(1, max_n + 1)]
y_bm = []
y_naive = []
for n in x:
print('n =', n)
bm_result = []
naive_result = []
if n >= len(PATTERN):
# generate test cases of length n, which contain PATTERN
test_cases = generate_test_cases(PATTERN, n, TEST_CASES)
else:
# generate test cases of length n, which do not (and can not possibly) contain PATTERN
test_cases = generate_test_cases('', n, TEST_CASES)
for test_case in test_cases:
start = time.time()
naive_match(test_case, PATTERN)
naive_result.append(time.time() - start)
start = time.time()
boyer_moore_match(test_case, PATTERN)
bm_result.append(time.time() - start)
# obtain median runtime (mean is affected by outliers)
y_naive.append(sorted(naive_result)[TEST_CASES // 2])
y_bm.append(sorted(bm_result)[TEST_CASES // 2])
plt.plot(x, y_naive, label="Naive Algorithm")
plt.plot(x, y_bm, label="Boyer-Moore Algorithm")
plt.xlabel("n")
plt.ylabel("Runtime")
plt.title("Substring Search Algorithm Efficiency")
plt.legend()
plt.show()
def vary_m(max_m):
x = [m for m in range(1, max_m + 1)]
y_bm = []
y_naive = []
for m in x:
print('m =', m)
bm_result = []
naive_result = []
# generate test cases of length n
test_cases = generate_test_cases('', m, TEST_CASES)
for test_case in test_cases:
start = time.time()
naive_match(TEXT, test_case)
naive_result.append(time.time() - start)
start = time.time()
boyer_moore_match(TEXT, test_case)
bm_result.append(time.time() - start)
# obtain median runtime (mean is affected by outliers)
y_naive.append(sorted(naive_result)[TEST_CASES // 2])
y_bm.append(sorted(bm_result)[TEST_CASES // 2])
plt.plot(x, y_naive, label="Naive Algorithm")
plt.plot(x, y_bm, label="Boyer-Moore Algorithm")
plt.xlabel("m")
plt.ylabel("Runtime")
plt.title("Substring Search Algorithm Efficiency")
plt.legend()
plt.show()
def main():
done = False
print("m = Length of pattern\nn = Length of text\n")
print("1. Constant m, vary n")
print("2. Constant n, vary m")
print("3. Quit\n")
while not done:
choice = input("Your choice: ")
if choice == '1':
max_n = input("Upper limit of n: ")
while not (max_n.isnumeric() and int(max_n) > 1):
print("That is not a valid number.")
max_n = input("Upper limit of n: ")
vary_n(int(max_n))
elif choice == '2':
max_m = input("Upper limit of m: ")
while not (max_m.isnumeric() and int(max_m) > 1):
print("That is not a valid number.")
max_m = input("Upper limit of m: ")
vary_m(int(max_m))
elif choice == '3':
done = True
else:
print("That is not a valid option.")
if __name__ == '__main__':
main()
| [((84, 4, 84, 49), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((85, 4, 85, 52), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((86, 4, 86, 19), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(86, 15, 86, 18): '"""n"""'}, {}), "('n')", True, 'import matplotlib.pyplot as plt\n'), ((87, 4, 87, 25), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(87, 15, 87, 24): '"""Runtime"""'}, {}), "('Runtime')", True, 'import matplotlib.pyplot as plt\n'), ((88, 4, 88, 54), 'matplotlib.pyplot.title', 'plt.title', ({(88, 14, 88, 53): '"""Substring Search Algorithm Efficiency"""'}, {}), "('Substring Search Algorithm Efficiency')", True, 'import matplotlib.pyplot as plt\n'), ((89, 4, 89, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((90, 4, 90, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((119, 4, 119, 49), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((120, 4, 120, 52), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((121, 4, 121, 19), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(121, 15, 121, 18): '"""m"""'}, {}), "('m')", True, 'import matplotlib.pyplot as plt\n'), ((122, 4, 122, 25), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(122, 15, 122, 24): '"""Runtime"""'}, {}), "('Runtime')", True, 'import matplotlib.pyplot as plt\n'), ((123, 4, 123, 54), 'matplotlib.pyplot.title', 'plt.title', ({(123, 14, 123, 53): '"""Substring Search Algorithm Efficiency"""'}, {}), "('Substring Search Algorithm Efficiency')", True, 'import matplotlib.pyplot as plt\n'), ((124, 4, 124, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((125, 4, 125, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((39, 24, 39, 45), 'random.choice', 'random.choice', ({(39, 38, 39, 44): '(0, 1)'}, {}), '((0, 1))', False, 'import random\n'), ((72, 20, 72, 31), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((73, 12, 73, 43), 'bm_alg.naive_match', 'naive_match', ({(73, 24, 73, 33): 'test_case', (73, 35, 73, 42): 'PATTERN'}, {}), '(test_case, PATTERN)', False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((76, 20, 76, 31), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((77, 12, 77, 49), 'bm_alg.boyer_moore_match', 'boyer_moore_match', ({(77, 30, 77, 39): 'test_case', (77, 41, 77, 48): 'PATTERN'}, {}), '(test_case, PATTERN)', False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((107, 20, 107, 31), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((108, 12, 108, 40), 'bm_alg.naive_match', 'naive_match', ({(108, 24, 108, 28): 'TEXT', (108, 30, 108, 39): 'test_case'}, {}), '(TEXT, test_case)', False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((111, 20, 111, 31), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((112, 12, 112, 46), 'bm_alg.boyer_moore_match', 'boyer_moore_match', ({(112, 30, 112, 34): 'TEXT', (112, 36, 112, 45): 'test_case'}, {}), '(TEXT, test_case)', False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((43, 23, 43, 60), 'random.choice', 'random.choice', ({(43, 37, 43, 59): 'string.ascii_lowercase'}, {}), '(string.ascii_lowercase)', False, 'import random\n'), ((47, 30, 47, 67), 'random.choice', 'random.choice', ({(47, 44, 47, 66): 'string.ascii_lowercase'}, {}), '(string.ascii_lowercase)', False, 'import random\n'), ((74, 32, 74, 43), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((78, 29, 78, 40), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((109, 32, 109, 43), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((113, 29, 113, 40), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
GMKanat/PP2_spring | TSIS_3/3774.py | 423617d559c5690f689741aaa152b9fee5082baf | ans = dict()
pairs = dict()
def create_tree(p):
if p in ans:
return ans[p]
else:
try:
res = 0
if p in pairs:
for ch in pairs[p]:
res += create_tree(ch) + 1
ans[p] = res
return res
except:
pass
n = int(input())
for i in range(0, n-1):
child, parent = input().split()
if parent in pairs:
pairs[parent].append(child)
else:
pairs[parent] = [child]
if n > 0:
for k in pairs:
create_tree(k)
for key in sorted(ans.keys()):
print(key, ans[key]) | [] |
Dorijan-Cirkveni/Miniprojects | italicizer.py | 2109275c9c1b9f5e7a286604cbb1b7966dff9798 | def italicize(s):
b = False
res = ''
for e in s:
if e == '"':
if b:
res += '{\\i}' + e
else:
res += e + '{i}'
b=not b
else:
res += e
return res
def main():
F=open('test_in.txt','r')
X=F.read()
F.close()
print(italicize(X))
return
if __name__ == "__main__":
main()
| [] |
WPRDC/neighborhood-simulacrum | maps/views.py | 46892dfdbc8bc3201e31fee4ee991c49b208753e | import json
from typing import Type, TYPE_CHECKING
from django.core.exceptions import ObjectDoesNotExist
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from rest_framework import viewsets, filters
from rest_framework.exceptions import NotFound
from rest_framework.negotiation import BaseContentNegotiation
from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from indicators.models import Variable, DataViz
from indicators.utils import get_geog_model
from indicators.views import GeoJSONRenderer
from maps.models import DataLayer
from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer
from profiles.settings import VIEW_CACHE_TTL
if TYPE_CHECKING:
from geo.models import AdminRegion
from indicators.models.viz import MiniMap
class DataLayerViewSet(viewsets.ModelViewSet):
queryset = DataLayer.objects.all()
serializer_class = DataLayerSerializer
permission_classes = [IsAuthenticatedOrReadOnly, ]
filter_backends = [filters.SearchFilter, ]
def get_serializer_class(self):
if self.action == 'list':
return DataLayerSerializer
return DataLayerDetailsSerializer
media_type = 'application/geo+json'
format = 'geojson'
def render(self, data, media_type=None, renderer_context=None):
return json.dumps(data)
class GeoJSONContentNegotiation(BaseContentNegotiation):
"""
Custom content negotiation scheme for GeoJSON files.
`GeoJSONRenderer` is used for downloading geojson files
`JSONRenderer` is used for ajax calls.
"""
def select_parser(self, request, parsers):
return super(GeoJSONContentNegotiation, self).select_parser(request, parsers)
def select_renderer(self, request: Request, renderers, format_suffix=None):
renderer = renderers[0]
if request.query_params.get('download', False):
renderer = GeoJSONRenderer()
return renderer, renderer.media_type
class GeoJSONDataLayerView(APIView):
permission_classes = [AllowAny, ]
content_negotiation_class = GeoJSONContentNegotiation
@method_decorator(cache_page(VIEW_CACHE_TTL))
def get(self, request: Request, map_slug=None):
try:
data_layer: DataLayer = DataLayer.objects.get(slug=map_slug)
geojson = data_layer.as_geojson()
except KeyError as e:
# when the geog is wrong todo: make 400 malformed with info on available geo types
raise NotFound
except ObjectDoesNotExist as e:
raise NotFound
if request.query_params.get('download', False):
headers = {
'Content-Disposition': f'attachment; filename="{map_slug}.geojson"'
}
return Response(geojson, headers=headers, content_type='application/geo+json')
return Response(geojson)
| [((28, 15, 28, 38), 'maps.models.DataLayer.objects.all', 'DataLayer.objects.all', ({}, {}), '()', False, 'from maps.models import DataLayer\n'), ((42, 15, 42, 31), 'json.dumps', 'json.dumps', ({(42, 26, 42, 30): 'data'}, {}), '(data)', False, 'import json\n'), ((84, 15, 84, 32), 'rest_framework.response.Response', 'Response', ({(84, 24, 84, 31): 'geojson'}, {}), '(geojson)', False, 'from rest_framework.response import Response\n'), ((67, 22, 67, 48), 'django.views.decorators.cache.cache_page', 'cache_page', ({(67, 33, 67, 47): 'VIEW_CACHE_TTL'}, {}), '(VIEW_CACHE_TTL)', False, 'from django.views.decorators.cache import cache_page\n'), ((59, 23, 59, 40), 'indicators.views.GeoJSONRenderer', 'GeoJSONRenderer', ({}, {}), '()', False, 'from indicators.views import GeoJSONRenderer\n'), ((70, 36, 70, 72), 'maps.models.DataLayer.objects.get', 'DataLayer.objects.get', (), '', False, 'from maps.models import DataLayer\n'), ((82, 19, 82, 90), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n')] |
Kuree/magma | magma/operators.py | be2439aa897768c5810be72e3a55a6f772ac83cf | from magma import _BitType, BitType, BitsType, UIntType, SIntType
class MantleImportError(RuntimeError):
pass
class UndefinedOperatorError(RuntimeError):
pass
def raise_mantle_import_error_unary(self):
raise MantleImportError(
"Operators are not defined until mantle has been imported")
def raise_mantle_import_error_binary(self, other):
raise MantleImportError(
"Operators are not defined until mantle has been imported")
def define_raise_undefined_operator_error(type_str, operator, type_):
if type_ == "unary":
def wrapped(self):
raise UndefinedOperatorError(
f"{operator} is undefined for {type_str}")
else:
assert type_ == "binary"
def wrapped(self, other):
raise UndefinedOperatorError(
f"{operator} is undefined for {type_str}")
return wrapped
for op in ("__eq__", "__ne__"):
setattr(_BitType, op, raise_mantle_import_error_binary)
for op in (
"__and__",
"__or__",
"__xor__",
"__invert__",
"__add__",
"__sub__",
"__mul__",
"__div__",
"__lt__",
# __le__ skipped because it's used for assignment on inputs
# "__le__",
"__gt__",
"__ge__"
):
if op == "__invert__":
setattr(_BitType, op,
define_raise_undefined_operator_error("_BitType", op, "unary"))
else:
setattr(
_BitType, op,
define_raise_undefined_operator_error("_BitType", op, "binary"))
for op in ("__and__",
"__or__",
"__xor__",
"__invert__"
):
if op == "__invert__":
setattr(BitType, op, raise_mantle_import_error_unary)
else:
setattr(BitType, op, raise_mantle_import_error_binary)
for op in ("__and__",
"__or__",
"__xor__",
"__invert__",
"__lshift__",
"__rshift__",
):
if op == "__invert__":
setattr(BitsType, op, raise_mantle_import_error_unary)
else:
setattr(BitsType, op, raise_mantle_import_error_binary)
for op in ("__add__",
"__sub__",
"__mul__",
"__div__",
"__lt__",
# __le__ skipped because it's used for assignment on inputs
# "__le__",
"__gt__",
"__ge__"
):
setattr(BitsType, op,
define_raise_undefined_operator_error("BitsType", op, "binary"))
for op in ("__add__",
"__sub__",
"__mul__",
"__div__",
"__lt__",
# __le__ skipped because it's used for assignment on inputs
# "__le__",
"__gt__",
"__ge__"
):
setattr(SIntType, op, raise_mantle_import_error_binary)
setattr(UIntType, op, raise_mantle_import_error_binary)
| [] |
bquantump/sultan | src/sultan/result.py | a46e8dc9b09385a7226f6151134ae2417166f25d | import subprocess
import sys
import time
import traceback
from queue import Queue
from sultan.core import Base
from sultan.echo import Echo
from threading import Thread
class Result(Base):
"""
Class that encompasses the result of a POpen command.
"""
def __init__(self, process, commands, context, streaming=False, exception=None, halt_on_nonzero=False):
super(Result, self).__init__()
self._process = process
self._commands = commands
self._context = context
self._exception = exception
self.__echo = Echo()
self._streaming = streaming
self.rc = None
self._halt_on_nonzero=halt_on_nonzero
if process and streaming:
self.is_complete = False
self.__stdout = Queue()
self.__stderr = Queue()
self.__stdin = Queue()
self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout))
self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr))
self._stdin_t = Thread(target=self.write_input)
self._wait_t = Thread(target=self.wait_on_process)
for t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t):
t.daemon = True
t.start()
else:
self.is_complete = True
try:
stdout, stderr = process.communicate()
except:
stdout, stderr = None, None
try:
self.rc = process.returncode
except:
pass
self.__stdout = stdout.strip().splitlines() if stdout else []
self.__stderr = stderr.strip().splitlines() if stderr else []
if self._halt_on_nonzero and self.rc != 0:
print(self.stderr)
raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr)
# self.dump_exception()
def read_output(self, pipe, q):
for line in iter(pipe.readline, b''):
if line:
q.put(line.strip())
elif self.is_complete:
break
else:
time.sleep(0.1)
pipe.close()
def write_input(self):
for line in iter(self.__stdin.get, None):
if line.endswith("\n"):
self._process.stdin.write(line)
else:
self._process.stdin.write(line + "\n")
def wait_on_process(self):
self.rc = self._process.wait()
self.__stdin.put(None)
self.is_complete = True
for t in (self._stdout_t, self._stderr_t, self._stdin_t):
t.join()
if self._halt_on_nonzero and self.rc != 0:
self.dump_exception()
sys.exit()
def dump_exception(self):
if not self._exception:
try:
raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr)
except subprocess.CalledProcessError as e:
self._exception = e
self.__echo.critical("Unable to run '%s'" % self._commands)
# traceback
self.print_traceback()
# standard out
self.print_stdout()
# standard error
self.print_stderr()
# print debug information
self.__display_exception_debug_information()
if self._halt_on_nonzero:
raise self._exception
def __display_exception_debug_information(self):
def echo_debug_info(key):
if self._context and len(self._context) > 0:
self.__echo.warn("\t - %s: %s" % (key, self._context[0].get(key, 'N/A')))
self.__echo.warn("The following are additional information that can be used to debug this exception.")
self.__echo.warn("The following is the context used to run:")
echo_debug_info('cwd')
echo_debug_info('sudo')
echo_debug_info('user')
echo_debug_info('hostname')
echo_debug_info('env')
echo_debug_info('logging')
echo_debug_info('executable')
echo_debug_info('ssh_config')
echo_debug_info('src')
def __str__(self):
return '\n'.join(self.stdout)
def __format_line(self, msg):
return '| %s' % msg
def __format_lines_error(self, lines):
for line in lines:
self.__echo.critical(self.__format_line(line))
def __format_lines_info(self, lines):
for line in lines:
self.__echo.info(self.__format_line(line))
@property
def stdout(self):
"""
Converts stdout string to a list.
"""
if self._streaming:
stdout = []
while not self.__stdout.empty():
try:
line = self.__stdout.get_nowait()
stdout.append(line)
except:
pass
else:
stdout = self.__stdout
return stdout
@property
def stderr(self):
"""
Converts stderr string to a list.
"""
if self._streaming:
stderr = []
while not self.__stderr.empty():
try:
line = self.__stderr.get_nowait()
stderr.append(line)
except:
pass
else:
stderr = self.__stderr
return stderr
def stdin(self, line):
"""
Sends input to stdin.
"""
if self._streaming:
self.__stdin.put(line)
@property
def traceback(self):
"""
Converts traceback string to a list.
"""
if self._exception:
return traceback.format_exc().split("\n")
else:
return []
@property
def is_success(self):
"""
Returns if the result of the command was a success.
True for success, False for failure.
"""
return self.is_complete and self.rc == 0
@property
def is_failure(self):
"""
Returns if the result of the command was a failure.
True for failure, False for succes.
"""
return self.is_complete and not self.rc == 0
@property
def has_exception(self):
'''
Returns True if self._exception is not empty.
'''
return bool(self._exception)
def print_stdout(self, always_print=False):
"""
Prints the stdout to console - if there is any stdout, otherwise does nothing.
:param always_print: print the stdout, even if there is nothing in the buffer (default: false)
"""
if self.__stdout or always_print:
self.__echo.info("---------------" + "-" * 100)
self.__format_lines_info(self.stdout)
self.__echo.info("---------------" + "-" * 100)
def print_stderr(self, always_print=False):
"""
Prints the stderr to console - if there is any stdout, otherwise does nothing.
:param always_print: print the stderr, even if there is nothing in the buffer (default: false)
"""
if self.__stderr or always_print:
self.__echo.critical("--{ STDERR }---" + "-" * 100)
self.__format_lines_error(self.stderr)
self.__echo.critical("---------------" + "-" * 100)
def print_traceback(self, always_print=False):
"""
Prints the traceback to console - if there is any traceback, otherwise does nothing.
:param always_print: print the traceback, even if there is nothing in the buffer (default: false)
"""
if self._exception or always_print:
self.__echo.critical("--{ TRACEBACK }" + "-" * 100)
self.__format_lines_error(self.traceback)
self.__echo.critical("---------------" + "-" * 100)
| [((23, 22, 23, 28), 'sultan.echo.Echo', 'Echo', ({}, {}), '()', False, 'from sultan.echo import Echo\n'), ((91, 8, 91, 18), 'sys.exit', 'sys.exit', ({}, {}), '()', False, 'import sys\n'), ((30, 28, 30, 35), 'queue.Queue', 'Queue', ({}, {}), '()', False, 'from queue import Queue\n'), ((31, 28, 31, 35), 'queue.Queue', 'Queue', ({}, {}), '()', False, 'from queue import Queue\n'), ((32, 27, 32, 34), 'queue.Queue', 'Queue', ({}, {}), '()', False, 'from queue import Queue\n'), ((34, 29, 34, 98), 'threading.Thread', 'Thread', (), '', False, 'from threading import Thread\n'), ((35, 29, 35, 98), 'threading.Thread', 'Thread', (), '', False, 'from threading import Thread\n'), ((36, 28, 36, 59), 'threading.Thread', 'Thread', (), '', False, 'from threading import Thread\n'), ((37, 27, 37, 62), 'threading.Thread', 'Thread', (), '', False, 'from threading import Thread\n'), ((71, 16, 71, 31), 'time.sleep', 'time.sleep', ({(71, 27, 71, 30): '(0.1)'}, {}), '(0.1)', False, 'import time\n'), ((200, 19, 200, 41), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n')] |
orenovadia/great_expectations | great_expectations/cli/datasource.py | 76ef0c4e066227f8b589a1ee6ac885618f65906e | import os
import click
from .util import cli_message
from great_expectations.render import DefaultJinjaPageView
from great_expectations.version import __version__ as __version__
def add_datasource(context):
cli_message(
"""
========== Datasources ==========
See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources.
""".format(__version__.replace(".", "_"))
)
data_source_selection = click.prompt(
msg_prompt_choose_data_source,
type=click.Choice(["1", "2", "3", "4"]),
show_choices=False
)
cli_message(data_source_selection)
if data_source_selection == "1": # pandas
path = click.prompt(
msg_prompt_filesys_enter_base_path,
# default='/data/',
type=click.Path(
exists=False,
file_okay=False,
dir_okay=True,
readable=True
),
show_default=True
)
if path.startswith("./"):
path = path[2:]
if path.endswith("/"):
basenamepath = path[:-1]
else:
basenamepath = path
default_data_source_name = os.path.basename(basenamepath) + "__dir"
data_source_name = click.prompt(
msg_prompt_datasource_name,
default=default_data_source_name,
show_default=True
)
context.add_datasource(data_source_name, "pandas",
base_directory=os.path.join("..", path))
elif data_source_selection == "2": # sqlalchemy
data_source_name = click.prompt(
msg_prompt_datasource_name, default="mydb", show_default=True)
cli_message(msg_sqlalchemy_config_connection.format(
data_source_name))
drivername = click.prompt("What is the driver for the sqlalchemy connection?", default="postgres",
show_default=True)
host = click.prompt("What is the host for the sqlalchemy connection?", default="localhost",
show_default=True)
port = click.prompt("What is the port for the sqlalchemy connection?", default="5432",
show_default=True)
username = click.prompt("What is the username for the sqlalchemy connection?", default="postgres",
show_default=True)
password = click.prompt("What is the password for the sqlalchemy connection?", default="",
show_default=False, hide_input=True)
database = click.prompt("What is the database name for the sqlalchemy connection?", default="postgres",
show_default=True)
credentials = {
"drivername": drivername,
"host": host,
"port": port,
"username": username,
"password": password,
"database": database
}
context.add_profile_credentials(data_source_name, **credentials)
context.add_datasource(
data_source_name, "sqlalchemy", profile=data_source_name)
elif data_source_selection == "3": # Spark
path = click.prompt(
msg_prompt_filesys_enter_base_path,
default='/data/',
type=click.Path(
exists=True,
file_okay=False,
dir_okay=True,
readable=True
),
show_default=True
)
if path.startswith("./"):
path = path[2:]
if path.endswith("/"):
basenamepath = path[:-1]
default_data_source_name = os.path.basename(basenamepath)
data_source_name = click.prompt(
msg_prompt_datasource_name, default=default_data_source_name, show_default=True)
context.add_datasource(data_source_name, "spark", base_directory=path)
# if data_source_selection == "5": # dbt
# dbt_profile = click.prompt(msg_prompt_dbt_choose_profile)
# log_message(msg_dbt_go_to_notebook, color="blue")
# context.add_datasource("dbt", "dbt", profile=dbt_profile)
if data_source_selection == "4": # None of the above
cli_message(msg_unknown_data_source)
print("Skipping datasource configuration. You can add a datasource later by editing the great_expectations.yml file.")
return None
if data_source_name != None:
cli_message(
"""
========== Profiling ==========
Would you like to profile '{0:s}' to create candidate expectations and documentation?
Please note: As of v0.7.0, profiling is still a beta feature in Great Expectations.
This generation of profilers will evaluate the entire data source (without sampling) and may be very time consuming.
As a rule of thumb, we recommend starting with data smaller than 100MB.
To learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>.
""".format(data_source_name, __version__.replace(".", "_"))
)
if click.confirm("Proceed?",
default=True
):
profiling_results = context.profile_datasource(
data_source_name,
max_data_assets=20
)
print("\nDone.\n\nProfiling results are saved here:")
for profiling_result in profiling_results:
data_asset_name = profiling_result[1]['meta']['data_asset_name']
expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name']
run_id = profiling_result[1]['meta']['run_id']
print(" {0:s}".format(context.get_validation_location(
data_asset_name, expectation_suite_name, run_id)['filepath']))
cli_message(
"""
========== Data Documentation ==========
To generate documentation from the data you just profiled, the profiling results should be moved from
great_expectations/uncommitted (ignored by git) to great_expectations/fixtures.
Before committing, please make sure that this data does not contain sensitive information!
To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue>
""".format(__version__.replace(".", "_"))
)
if click.confirm("Move the profiled data and build HTML documentation?",
default=True
):
cli_message("\nMoving files...")
for profiling_result in profiling_results:
data_asset_name = profiling_result[1]['meta']['data_asset_name']
expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name']
run_id = profiling_result[1]['meta']['run_id']
context.move_validation_to_fixtures(
data_asset_name, expectation_suite_name, run_id)
cli_message("\nDone.")
cli_message("\nBuilding documentation...")
context.render_full_static_site()
cli_message(
"""
To view the generated data documentation, open this file in a web browser:
<green>great_expectations/uncommitted/documentation/index.html</green>
""")
else:
cli_message(
"Okay, skipping HTML documentation for now.`."
)
else:
cli_message(
"Okay, skipping profiling for now. You can always do this later by running `great_expectations profile`."
)
if data_source_selection == "1": # Pandas
cli_message(msg_filesys_go_to_notebook)
elif data_source_selection == "2": # SQL
cli_message(msg_sqlalchemy_go_to_notebook)
elif data_source_selection == "3": # Spark
cli_message(msg_spark_go_to_notebook)
msg_prompt_choose_data_source = """
Configure a datasource:
1. Pandas DataFrame
2. Relational database (SQL)
3. Spark DataFrame
4. Skip datasource configuration
"""
# msg_prompt_dbt_choose_profile = """
# Please specify the name of the dbt profile (from your ~/.dbt/profiles.yml file Great Expectations \
# should use to connect to the database
# """
# msg_dbt_go_to_notebook = """
# To create expectations for your dbt models start Jupyter and open notebook
# great_expectations/notebooks/using_great_expectations_with_dbt.ipynb -
# it will walk you through next steps.
# """
msg_prompt_filesys_enter_base_path = """
Enter the path of the root directory where the data files are stored.
(The path may be either absolute or relative to current directory.)
"""
msg_prompt_datasource_name = """
Give your new data source a short name.
"""
msg_sqlalchemy_config_connection = """
Great Expectations relies on sqlalchemy to connect to relational databases.
Please make sure that you have it installed.
Next, we will configure database credentials and store them in the "{0:s}" section
of this config file: great_expectations/uncommitted/credentials/profiles.yml:
"""
msg_unknown_data_source = """
We are looking for more types of data types to support.
Please create a GitHub issue here:
https://github.com/great-expectations/great_expectations/issues/new
In the meantime you can see what Great Expectations can do on CSV files.
To create expectations for your CSV files start Jupyter and open notebook
great_expectations/notebooks/using_great_expectations_with_pandas.ipynb -
it will walk you through configuring the database connection and next steps.
"""
msg_filesys_go_to_notebook = """
To create expectations for your data, start Jupyter and open a tutorial notebook:
To launch with jupyter notebooks:
<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>
To launch with jupyter lab:
<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>
"""
msg_sqlalchemy_go_to_notebook = """
To create expectations for your data start Jupyter and open the notebook
that will walk you through next steps.
To launch with jupyter notebooks:
<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>
To launch with jupyter lab:
<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>
"""
msg_spark_go_to_notebook = """
To create expectations for your data start Jupyter and open the notebook
that will walk you through next steps.
To launch with jupyter notebooks:
<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>
To launch with jupyter lab:
<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>
"""
| [((46, 27, 50, 9), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((135, 11, 137, 26), 'click.confirm', 'click.confirm', (), '', False, 'import click\n'), ((15, 11, 15, 40), 'great_expectations.version.__version__.replace', '__version__.replace', ({(15, 31, 15, 34): '"""."""', (15, 36, 15, 39): '"""_"""'}, {}), "('.', '_')", True, 'from great_expectations.version import __version__ as __version__\n'), ((19, 13, 19, 47), 'click.Choice', 'click.Choice', ({(19, 26, 19, 46): "['1', '2', '3', '4']"}, {}), "(['1', '2', '3', '4'])", False, 'import click\n'), ((45, 35, 45, 65), 'os.path.basename', 'os.path.basename', ({(45, 52, 45, 64): 'basenamepath'}, {}), '(basenamepath)', False, 'import os\n'), ((56, 27, 57, 74), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((62, 21, 63, 52), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((64, 15, 65, 46), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((66, 15, 67, 46), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((68, 19, 69, 50), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((70, 19, 71, 68), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((72, 19, 73, 50), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((164, 15, 166, 30), 'click.confirm', 'click.confirm', (), '', False, 'import click\n'), ((29, 17, 34, 13), 'click.Path', 'click.Path', (), '', False, 'import click\n'), ((53, 46, 53, 70), 'os.path.join', 'os.path.join', ({(53, 59, 53, 63): '""".."""', (53, 65, 53, 69): 'path'}, {}), "('..', path)", False, 'import os\n'), ((105, 35, 105, 65), 'os.path.basename', 'os.path.basename', ({(105, 52, 105, 64): 'basenamepath'}, {}), '(basenamepath)', False, 'import os\n'), ((106, 27, 107, 92), 'click.prompt', 'click.prompt', (), '', False, 'import click\n'), ((133, 41, 133, 70), 'great_expectations.version.__version__.replace', '__version__.replace', ({(133, 61, 133, 64): '"""."""', (133, 66, 133, 69): '"""_"""'}, {}), "('.', '_')", True, 'from great_expectations.version import __version__ as __version__\n'), ((162, 11, 162, 40), 'great_expectations.version.__version__.replace', '__version__.replace', ({(162, 31, 162, 34): '"""."""', (162, 36, 162, 39): '"""_"""'}, {}), "('.', '_')", True, 'from great_expectations.version import __version__ as __version__\n'), ((92, 17, 97, 13), 'click.Path', 'click.Path', (), '', False, 'import click\n')] |
rgb-24bit/code-library | python/crawler/downloader.py | 8da8336e241e1428b2b46c6939bd5e9eadcf3e68 | # -*- coding: utf-8 -*-
"""
Provide download function by request
"""
from datetime import datetime
import logging
import time
import urllib.parse
import requests
from bs4 import BeautifulSoup
class Throttle(object):
"""Throttle downloading by sleeping between requests to same domain."""
def __init__(self, delay):
# amount of delay between downloads for each domain
self.delay = delay
# timestamp of when a domain was last accessed
self.domains = {}
def wait(self, url):
domain = urllib.parse.urlparse(url).netloc
last_accessed = self.domains.get(domain)
if self.delay > 0 and last_accessed is not None:
sleep_secs = self.delay - (datetime.now() - last_accessed).seconds
if sleep_secs > 0:
time.sleep(sleep_secs)
self.domains[domain] = datetime.now()
class Downloader(object):
"""Convenient download of web pages or caller to call api.
Args:
delay: Interval between downloads (seconds)
num_retries: Number of retries when downloading errors
timeout: Download timeout
"""
def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1,
timeout=60, cache=None, auth=None):
self.session = requests.Session()
self.session.headers.update({'user-agent': user_agent})
self.session.proxies = proxies
self.session.auth = auth
self.throttle = Throttle(delay)
self.num_retries = num_retries
self.timeout = timeout
self.cache = cache
def get_from_cache(self, request):
"""Try to get the result of the request from the cache."""
result = None
if self.cache:
result = self.cache.get(request.url)
if result and self.num_retries > 0 and 500 <= result['code'] < 600:
result = None
return result
def prepare_request(self, url, params=None):
"""Build requests based on the provided url and parameters."""
request = requests.Request('GET', url, params=params)
return self.session.prepare_request(request)
def send_request(self, request, num_retries):
"""Send request and return response object."""
self.throttle.wait(request.url)
try:
logging.info('Downloading: %s' % request.url)
response = self.session.send(request, timeout=self.timeout)
response.raise_for_status()
except requests.exceptions.HTTPError as e:
logging.warn('Download error: %s' % e)
if num_retries > 0 and 500 <= response.status_code < 600:
return self.send_request(request, num_retries - 1)
except requests.exceptions.RequestException:
logging.error('Download faild: %s' % request.url)
response = None
return response
def text(self, url, params=None, encoding=None):
"""Download web content in text format or html."""
request = self.prepare_request(url, params)
result = self.get_from_cache(request)
if result is None:
response = self.send_request(request, self.num_retries)
if response:
if encoding:
response.encoding = encoding
result = {'text': response.text, 'code': response.status_code}
if self.cache:
self.cache[request.url] = result
return result['text']
def json(self, url, params=None):
"""Access the api and return the json object."""
request = self.prepare_request(url, params)
result = self.get_from_cache(request)
if result is None:
response = self.send_request(request, self.num_retries)
if response:
result = {'json': response.json(), 'code': response.status_code}
if self.cache:
self.cache[request.url] = result
return result['json']
| [((32, 31, 32, 45), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((45, 23, 45, 41), 'requests.Session', 'requests.Session', ({}, {}), '()', False, 'import requests\n'), ((65, 18, 65, 61), 'requests.Request', 'requests.Request', (), '', False, 'import requests\n'), ((72, 12, 72, 57), 'logging.info', 'logging.info', ({(72, 25, 72, 56): "('Downloading: %s' % request.url)"}, {}), "('Downloading: %s' % request.url)", False, 'import logging\n'), ((31, 16, 31, 38), 'time.sleep', 'time.sleep', ({(31, 27, 31, 37): 'sleep_secs'}, {}), '(sleep_secs)', False, 'import time\n'), ((76, 12, 76, 50), 'logging.warn', 'logging.warn', ({(76, 25, 76, 49): "('Download error: %s' % e)"}, {}), "('Download error: %s' % e)", False, 'import logging\n'), ((80, 12, 80, 61), 'logging.error', 'logging.error', ({(80, 26, 80, 60): "('Download faild: %s' % request.url)"}, {}), "('Download faild: %s' % request.url)", False, 'import logging\n'), ((29, 39, 29, 53), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
pisskidney/leetcode | medium/151.py | 08c19cbf3d7afc897908ea05db4ad11a5487f523 | #!/usr/bin/python
class Solution(object):
def reverseWords(self, s):
if s == '':
return s
res = []
i = len(s) - 2
while i >= -1:
if s[i] == ' ' or i == -1:
word = ''
j = i + 1
while j < len(s) and s[j] != ' ':
word += s[j]
j += 1
if word:
res.append(word)
i -= 1
return ' '.join(res)
s = Solution()
print s.reverseWords('a x')
| [] |
ecederstrand/python-keycloak | src/keycloak/connection.py | 77686a2764a3fcba092d78e02f42a58c7214c30e | # -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (C) 2017 Marcos Pereira <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
import requests
from requests.adapters import HTTPAdapter
from .exceptions import KeycloakConnectionError
class ConnectionManager(object):
"""
Represents a simple server connection.
:param base_url: (str) The server URL.
:param headers: (dict) The header parameters of the requests to the server.
:param timeout: (int) Timeout to use for requests to the server.
:param verify: (bool) Verify server SSL.
:param proxies: (dict) The proxies servers requests is sent by.
"""
def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None):
self._base_url = base_url
self._headers = headers
self._timeout = timeout
self._verify = verify
self._s = requests.Session()
self._s.auth = lambda x: x # don't let requests add auth headers
# retry once to reset connection with Keycloak after tomcat's ConnectionTimeout
# see https://github.com/marcospereirampj/python-keycloak/issues/36
for protocol in ("https://", "http://"):
adapter = HTTPAdapter(max_retries=1)
# adds POST to retry whitelist
allowed_methods = set(adapter.max_retries.allowed_methods)
allowed_methods.add("POST")
adapter.max_retries.allowed_methods = frozenset(allowed_methods)
self._s.mount(protocol, adapter)
if proxies:
self._s.proxies.update(proxies)
def __del__(self):
self._s.close()
@property
def base_url(self):
"""Return base url in use for requests to the server."""
return self._base_url
@base_url.setter
def base_url(self, value):
""" """
self._base_url = value
@property
def timeout(self):
"""Return timeout in use for request to the server."""
return self._timeout
@timeout.setter
def timeout(self, value):
""" """
self._timeout = value
@property
def verify(self):
"""Return verify in use for request to the server."""
return self._verify
@verify.setter
def verify(self, value):
""" """
self._verify = value
@property
def headers(self):
"""Return header request to the server."""
return self._headers
@headers.setter
def headers(self, value):
""" """
self._headers = value
def param_headers(self, key):
"""
Return a specific header parameter.
:param key: (str) Header parameters key.
:returns: If the header parameters exist, return its value.
"""
return self.headers.get(key)
def clean_headers(self):
"""Clear header parameters."""
self.headers = {}
def exist_param_headers(self, key):
"""Check if the parameter exists in the header.
:param key: (str) Header parameters key.
:returns: If the header parameters exist, return True.
"""
return self.param_headers(key) is not None
def add_param_headers(self, key, value):
"""Add a single parameter inside the header.
:param key: (str) Header parameters key.
:param value: (str) Value to be added.
"""
self.headers[key] = value
def del_param_headers(self, key):
"""Remove a specific parameter.
:param key: (str) Key of the header parameters.
"""
self.headers.pop(key, None)
def raw_get(self, path, **kwargs):
"""Submit get request to the path.
:param path: (str) Path for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.get(
urljoin(self.base_url, path),
params=kwargs,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_post(self, path, data, **kwargs):
"""Submit post request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.post(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_put(self, path, data, **kwargs):
"""Submit put request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.put(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_delete(self, path, data={}, **kwargs):
"""Submit delete request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.delete(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
| [((51, 18, 51, 36), 'requests.Session', 'requests.Session', ({}, {}), '()', False, 'import requests\n'), ((57, 22, 57, 48), 'requests.adapters.HTTPAdapter', 'HTTPAdapter', (), '', False, 'from requests.adapters import HTTPAdapter\n'), ((157, 16, 157, 44), 'urlparse.urljoin', 'urljoin', ({(157, 24, 157, 37): 'self.base_url', (157, 39, 157, 43): 'path'}, {}), '(self.base_url, path)', False, 'from urlparse import urljoin\n'), ((176, 16, 176, 44), 'urlparse.urljoin', 'urljoin', ({(176, 24, 176, 37): 'self.base_url', (176, 39, 176, 43): 'path'}, {}), '(self.base_url, path)', False, 'from urlparse import urljoin\n'), ((196, 16, 196, 44), 'urlparse.urljoin', 'urljoin', ({(196, 24, 196, 37): 'self.base_url', (196, 39, 196, 43): 'path'}, {}), '(self.base_url, path)', False, 'from urlparse import urljoin\n'), ((216, 16, 216, 44), 'urlparse.urljoin', 'urljoin', ({(216, 24, 216, 37): 'self.base_url', (216, 39, 216, 43): 'path'}, {}), '(self.base_url, path)', False, 'from urlparse import urljoin\n')] |
Valokoodari/advent-of-code | 2020/23.py | c664987f739e0b07ddad34bad87d56768556a5a5 | #!venv/bin/python3
cs = [int(c) for c in open("inputs/23.in", "r").readline().strip()]
def f(cs, ts):
p,cc = {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1]
for _ in range(ts):
cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys())
hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]]
while dc in hc:
dc -= 1
if dc < 1:
dc = max(p.keys())
p[dc],p[hc[-1]] = hc[0],p[dc]
a,n = [],1
for _ in range(8):
n = p[n]
a.append(str(n))
return "".join(a), p[1] * p[p[1]]
print("Part 1:", f(cs.copy(), 100)[0])
print("Part 2:", f(cs.copy() + [i for i in range(10, 1000001)], 10000000)[1]) | [] |
jakewright/home-automation-device-registry | run.py | b073966b1dc259a6997c47f8d369f51dee9cbbf3 | # Import the application
from device_registry import app
# Run the application in debug mode
app.run(host='0.0.0.0', port=int(app.config['PORT']), debug=True)
| [] |
Abrosimov-a-a/dvc | dvc/utils/stage.py | 93280c937b9160003afb0d2f3fd473c03d6d9673 | import yaml
from ruamel.yaml import YAML
from ruamel.yaml.error import YAMLError
try:
from yaml import CSafeLoader as SafeLoader
except ImportError:
from yaml import SafeLoader
from dvc.exceptions import StageFileCorruptedError
from dvc.utils.compat import open
def load_stage_file(path):
with open(path, "r", encoding="utf-8") as fd:
return parse_stage(fd.read(), path)
def parse_stage(text, path):
try:
return yaml.load(text, Loader=SafeLoader) or {}
except yaml.error.YAMLError as exc:
raise StageFileCorruptedError(path, cause=exc)
def parse_stage_for_update(text, path):
"""Parses text into Python structure.
Unlike `parse_stage()` this returns ordered dicts, values have special
attributes to store comments and line breaks. This allows us to preserve
all of those upon dump.
This one is, however, several times slower than simple `parse_stage()`.
"""
try:
yaml = YAML()
return yaml.load(text) or {}
except YAMLError as exc:
raise StageFileCorruptedError(path, cause=exc)
def dump_stage_file(path, data):
with open(path, "w", encoding="utf-8") as fd:
yaml = YAML()
yaml.default_flow_style = False
yaml.dump(data, fd)
| [((15, 9, 15, 42), 'dvc.utils.compat.open', 'open', (), '', False, 'from dvc.utils.compat import open\n'), ((36, 15, 36, 21), 'ruamel.yaml.YAML', 'YAML', ({}, {}), '()', False, 'from ruamel.yaml import YAML\n'), ((43, 9, 43, 42), 'dvc.utils.compat.open', 'open', (), '', False, 'from dvc.utils.compat import open\n'), ((44, 15, 44, 21), 'ruamel.yaml.YAML', 'YAML', ({}, {}), '()', False, 'from ruamel.yaml import YAML\n'), ((23, 14, 23, 54), 'dvc.exceptions.StageFileCorruptedError', 'StageFileCorruptedError', (), '', False, 'from dvc.exceptions import StageFileCorruptedError\n'), ((39, 14, 39, 54), 'dvc.exceptions.StageFileCorruptedError', 'StageFileCorruptedError', (), '', False, 'from dvc.exceptions import StageFileCorruptedError\n')] |
Arguel/old-projects | CAMPODETIRO/test.py | 2e5f594a6303b2e137acf555569eca98aab08054 | entrada = input("palabra")
listaDeLetras = []
for i in entrada:
listaDeLetras.append(i)
| [] |
fire-breathing-rubber-lemons/cs207-FinalProject | demos/nn_classification_demo.py | 92d1d7d70637e2478effb01c9ce56199e0f873c9 | import numpy as np
from pyad.nn import NeuralNet
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
np.random.seed(0)
data = load_breast_cancer()
X_train, X_test, y_train, y_test = train_test_split(
data.data, data.target, train_size=0.8, random_state=0
)
nn = NeuralNet(loss_fn='cross_entropy')
nn.add_layer(X_train.shape[1], 100, activation='linear')
nn.add_layer(100, 100, activation='logistic')
nn.add_layer(100, 1 + np.max(y_train), activation='linear')
nn.train(
X_train, y_train, X_test, y_test,
batch_size=1, learning_rate=1e-3, epochs=20
)
print('Predictions:', nn.predict(X_test))
| [((6, 0, 6, 17), 'numpy.random.seed', 'np.random.seed', ({(6, 15, 6, 16): '(0)'}, {}), '(0)', True, 'import numpy as np\n'), ((7, 7, 7, 27), 'sklearn.datasets.load_breast_cancer', 'load_breast_cancer', ({}, {}), '()', False, 'from sklearn.datasets import load_breast_cancer\n'), ((9, 35, 11, 1), 'sklearn.model_selection.train_test_split', 'train_test_split', (), '', False, 'from sklearn.model_selection import train_test_split\n'), ((13, 5, 13, 39), 'pyad.nn.NeuralNet', 'NeuralNet', (), '', False, 'from pyad.nn import NeuralNet\n'), ((16, 22, 16, 37), 'numpy.max', 'np.max', ({(16, 29, 16, 36): 'y_train'}, {}), '(y_train)', True, 'import numpy as np\n')] |
zobclub/chapter8 | mgatemp.py | fbd9e8711747b7446f75b472bae1465fe0ab495c | from microbit import *
I2CADR = 0x0E
DIE_TEMP = 0x0F
while True:
i2c.write(I2CADR, bytearray([DIE_TEMP]))
d = i2c.read(I2CADR, 1)
x = d[0]
if x >=128:
x -= 256
x += 10
print(x)
sleep(500) | [] |
splovyt/SFPython-Project-Night | utils/nlp.py | 50f20f581e074401d59d91457bac2a69631bef61 | import ssl
import nltk
from textblob import TextBlob
from nltk.corpus import stopwords
# set SSL
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
pass
else:
ssl._create_default_https_context = _create_unverified_https_context
# download noun data (if required)
nltk.download('brown')
nltk.download('punkt')
nltk.download('stopwords')
def extract_nouns(sentence):
"""Extract the nouns from a sentence using the 'textblob' library."""
blob = TextBlob(sentence)
return blob.noun_phrases
def remove_stopwords(sentence):
"""Remove stopwords from a sentence and return the list of words."""
blob = TextBlob(sentence)
return [word for word in blob.words if word not in stopwords.words('english') and len(word)>2]
| [((17, 0, 17, 22), 'nltk.download', 'nltk.download', ({(17, 14, 17, 21): '"""brown"""'}, {}), "('brown')", False, 'import nltk\n'), ((18, 0, 18, 22), 'nltk.download', 'nltk.download', ({(18, 14, 18, 21): '"""punkt"""'}, {}), "('punkt')", False, 'import nltk\n'), ((19, 0, 19, 26), 'nltk.download', 'nltk.download', ({(19, 14, 19, 25): '"""stopwords"""'}, {}), "('stopwords')", False, 'import nltk\n'), ((23, 11, 23, 29), 'textblob.TextBlob', 'TextBlob', ({(23, 20, 23, 28): 'sentence'}, {}), '(sentence)', False, 'from textblob import TextBlob\n'), ((28, 11, 28, 29), 'textblob.TextBlob', 'TextBlob', ({(28, 20, 28, 28): 'sentence'}, {}), '(sentence)', False, 'from textblob import TextBlob\n'), ((29, 55, 29, 81), 'nltk.corpus.stopwords.words', 'stopwords.words', ({(29, 71, 29, 80): '"""english"""'}, {}), "('english')", False, 'from nltk.corpus import stopwords\n')] |
akshedu/toolbox | toolbox/core/management/commands/celery_beat_resource_scraper.py | 7c647433b68f1098ee4c8623f836f74785dc970c |
from django_celery_beat.models import PeriodicTask, IntervalSchedule
from django.core.management.base import BaseCommand
from django.db import IntegrityError
class Command(BaseCommand):
def handle(self, *args, **options):
try:
schedule_channel, created = IntervalSchedule.objects.get_or_create(
every=4,
period=IntervalSchedule.HOURS,
)
except IntegrityError as e:
pass
try:
schedule_video, created = IntervalSchedule.objects.get_or_create(
every=6,
period=IntervalSchedule.HOURS,
)
except IntegrityError as e:
pass
try:
PeriodicTask.objects.create(
interval=schedule_channel,
name='Scrape Channels',
task='toolbox.scraper.tasks.scrape_youtube_channels',
)
except IntegrityError as e:
pass
try:
PeriodicTask.objects.create(
interval=schedule_video,
name='Scrape Videos',
task='toolbox.scraper.tasks.scrape_youtube_videos',
)
except IntegrityError as e:
pass
| [((10, 40, 13, 33), 'django_celery_beat.models.IntervalSchedule.objects.get_or_create', 'IntervalSchedule.objects.get_or_create', (), '', False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n'), ((18, 38, 21, 33), 'django_celery_beat.models.IntervalSchedule.objects.get_or_create', 'IntervalSchedule.objects.get_or_create', (), '', False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n'), ((26, 12, 30, 13), 'django_celery_beat.models.PeriodicTask.objects.create', 'PeriodicTask.objects.create', (), '', False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n'), ((35, 12, 39, 13), 'django_celery_beat.models.PeriodicTask.objects.create', 'PeriodicTask.objects.create', (), '', False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n')] |
zhusonghe/PaddleClas-1 | ppcls/data/preprocess/__init__.py | e2e492f9c78ed5084cc50d7c45eef4cc41e1eeaf | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ppcls.data.preprocess.ops.autoaugment import ImageNetPolicy as RawImageNetPolicy
from ppcls.data.preprocess.ops.randaugment import RandAugment as RawRandAugment
from ppcls.data.preprocess.ops.timm_autoaugment import RawTimmAutoAugment
from ppcls.data.preprocess.ops.cutout import Cutout
from ppcls.data.preprocess.ops.hide_and_seek import HideAndSeek
from ppcls.data.preprocess.ops.random_erasing import RandomErasing
from ppcls.data.preprocess.ops.grid import GridMask
from ppcls.data.preprocess.ops.operators import DecodeImage
from ppcls.data.preprocess.ops.operators import ResizeImage
from ppcls.data.preprocess.ops.operators import CropImage
from ppcls.data.preprocess.ops.operators import RandCropImage
from ppcls.data.preprocess.ops.operators import RandFlipImage
from ppcls.data.preprocess.ops.operators import NormalizeImage
from ppcls.data.preprocess.ops.operators import ToCHWImage
from ppcls.data.preprocess.ops.operators import AugMix
from ppcls.data.preprocess.batch_ops.batch_operators import MixupOperator, CutmixOperator, OpSampler, FmixOperator
import numpy as np
from PIL import Image
def transform(data, ops=[]):
""" transform """
for op in ops:
data = op(data)
return data
class AutoAugment(RawImageNetPolicy):
""" ImageNetPolicy wrapper to auto fit different img types """
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self, img):
if not isinstance(img, Image.Image):
img = np.ascontiguousarray(img)
img = Image.fromarray(img)
img = super().__call__(img)
if isinstance(img, Image.Image):
img = np.asarray(img)
return img
class RandAugment(RawRandAugment):
""" RandAugment wrapper to auto fit different img types """
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self, img):
if not isinstance(img, Image.Image):
img = np.ascontiguousarray(img)
img = Image.fromarray(img)
img = super().__call__(img)
if isinstance(img, Image.Image):
img = np.asarray(img)
return img
class TimmAutoAugment(RawTimmAutoAugment):
""" TimmAutoAugment wrapper to auto fit different img tyeps. """
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self, img):
if not isinstance(img, Image.Image):
img = np.ascontiguousarray(img)
img = Image.fromarray(img)
img = super().__call__(img)
if isinstance(img, Image.Image):
img = np.asarray(img)
return img
| [((54, 18, 54, 43), 'numpy.ascontiguousarray', 'np.ascontiguousarray', ({(54, 39, 54, 42): 'img'}, {}), '(img)', True, 'import numpy as np\n'), ((55, 18, 55, 38), 'PIL.Image.fromarray', 'Image.fromarray', ({(55, 34, 55, 37): 'img'}, {}), '(img)', False, 'from PIL import Image\n'), ((60, 18, 60, 33), 'numpy.asarray', 'np.asarray', ({(60, 29, 60, 32): 'img'}, {}), '(img)', True, 'import numpy as np\n'), ((73, 18, 73, 43), 'numpy.ascontiguousarray', 'np.ascontiguousarray', ({(73, 39, 73, 42): 'img'}, {}), '(img)', True, 'import numpy as np\n'), ((74, 18, 74, 38), 'PIL.Image.fromarray', 'Image.fromarray', ({(74, 34, 74, 37): 'img'}, {}), '(img)', False, 'from PIL import Image\n'), ((79, 18, 79, 33), 'numpy.asarray', 'np.asarray', ({(79, 29, 79, 32): 'img'}, {}), '(img)', True, 'import numpy as np\n'), ((92, 18, 92, 43), 'numpy.ascontiguousarray', 'np.ascontiguousarray', ({(92, 39, 92, 42): 'img'}, {}), '(img)', True, 'import numpy as np\n'), ((93, 18, 93, 38), 'PIL.Image.fromarray', 'Image.fromarray', ({(93, 34, 93, 37): 'img'}, {}), '(img)', False, 'from PIL import Image\n'), ((98, 18, 98, 33), 'numpy.asarray', 'np.asarray', ({(98, 29, 98, 32): 'img'}, {}), '(img)', True, 'import numpy as np\n')] |
scheeloong/lindaedynamics_icml2018 | src/scalar_net/visualisations.py | d03b450e254d33b019161a3cd015e44aafe407cb | # required modules
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib import cm
from matplotlib.colors import Normalize
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.animation import FuncAnimation
# two-dimesional version
def plot_mse_loss_surface_2d(fig, ax, x, y, v=0.0, l2=0.0, w1_range=(-2, 2), w2_range=(2, -2)):
# create weight space
n_w = 100
w1 = np.linspace(w1_range[0], w1_range[1], num=n_w) # weight 1
w2 = np.linspace(w2_range[0], w2_range[1], num=n_w) # weight 2
ws_x, ws_y = np.meshgrid(w1, w2)
cost_ws = np.zeros((n_w, n_w)) # initialize cost matrix
# Fill the cost matrix for each combination of weights
for i in range(n_w):
for j in range(n_w):
y_pred = ws_x[i, j] * ws_y[i, j] * x
y_true = y
cost_ws[i, j] = 0.5 * (y_true - y_pred)**2 + \
0.5 * l2 * (ws_x[i, j]**2 + ws_y[i, j]**2) + 0.5 * v * (ws_x[i, j]*ws_y[i, j])**2
# compute gradients
dy, dx = np.gradient(cost_ws)
# plot vector space
skip = (slice(None, None, 5), slice(None, None, 5))
# fig, ax = plt.subplots(figsize=(8, 8))
#ax.contour(ws_x, ws_y, cost_ws, 200)
im = ax.imshow(cost_ws, extent=[ws_x.min(), ws_x.max(
), ws_y.min(), ws_y.max()], cmap=cm.coolwarm)
ax.quiver(ws_x[skip], ws_y[skip], -dx[skip], dy[skip], cost_ws[skip])
cbar = fig.colorbar(im, ax=ax)
# ax.set(aspect=1, title='Loss Surface')
cbar.ax.set_ylabel('$Loss$', fontsize=15)
ax.set_xlabel('$w_1$', fontsize=15)
ax.set_ylabel('$w_2$', fontsize=15)
# ax.grid()
# add saddle point
ax.scatter(0, 0, label='Saddle point', c='red', marker='*')
# ax.scatter(0,0, c='black', marker=r'$\rightarrow$', label='Negative gradient')
settings = (x, y, v, l2, w1_range, w2_range)
return ax, settings
# three-dimensional version
def plot_mse_loss_surface_3d(ax, x, y, v=0.0, l2=0.0, w1_range=(-2, 2), w2_range=(2, -2), angle=30):
# create weight space
n_w = 100
w1 = np.linspace(w1_range[0], w1_range[1], num=n_w) # weight 1
w2 = np.linspace(w2_range[0], w2_range[1], num=n_w) # weight 2
ws_x, ws_y = np.meshgrid(w1, w2)
cost_ws = np.zeros((n_w, n_w)) # initialize cost matrix
# Fill the cost matrix for each combination of weights
for i in range(n_w):
for j in range(n_w):
y_pred = ws_x[i, j] * ws_y[i, j] * x
y_true = y
cost_ws[i, j] = 0.5 * (y_true - y_pred)**2 + \
0.5 * l2 * (ws_x[i, j]**2 + ws_y[i, j]**2) + 0.5 * v * (ws_x[i, j]*ws_y[i, j])**2
X = ws_x
Y = ws_y
Z = cost_ws
#fig, ax = plt.subplots(figsize=(8, 8))
#ax = fig.add_subplot(1,1,1, projection='3d')
# fourth dimention - colormap
# create colormap according to x-value (can use any 50x50 array)
color_dimension = Z # change to desired fourth dimension
minn, maxx = color_dimension.min(), color_dimension.max()
norm = Normalize(minn, maxx)
m = plt.cm.ScalarMappable(norm=norm, cmap='jet')
m.set_array([])
fcolors = m.to_rgba(color_dimension)
# plot
# fig = plt.figure(figsize=(8, 8))
# ax = fig.gca(projection='3d')
ax.set_zlim(0, 50)
ax.plot([0], [0], 'ro', c='red', marker='*', label='Saddle point')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=fcolors,
vmin=minn, vmax=maxx, shade=False, alpha=1)
ax.set_xlabel('$w_1$', fontsize=20)
ax.set_ylabel('$w_2$', fontsize=20)
ax.set_zlabel('$Loss$', fontsize=20)
settings = (x, y, v, l2, w1_range, w2_range)
ax.view_init(angle, 10)
return ax, settings
def plot_global_minimum_manifold_2d(ax, settings):
# retieve cached settings
x, y, v, l2, w1_range, w2_range = settings
n_w = 1000
man_w1 = np.linspace(w1_range[0], w1_range[1], num=n_w)
man_w2 = np.linspace(w2_range[0], w2_range[1], num=n_w)
man_ws_x, man_ws_y = np.meshgrid(man_w1, man_w2)
loss = 0.5 * y *(1 - man_ws_x * man_ws_y * x)**2 + \
0.5 * l2 * (man_ws_x**2 + man_ws_y**2) + 0.5 * v * (man_ws_x * man_ws_y)**2
min_loss = np.min(loss)
manifold_indices = loss < min_loss + 1e-5
manifold_x = man_ws_x[manifold_indices]
manifold_y = man_ws_y[manifold_indices]
# plot manifold of global minima
ax.scatter(manifold_y, manifold_x, s=0.1, c='cyan',
label='Manifold of global minima')
def plot_global_minimum_manifold_3d(ax, settings):
# retieve cached settings
x, y, v, l2, w1_range, w2_range = settings
n_w = 1000
man_w1 = np.linspace(w1_range[0], w1_range[1], num=n_w)
man_w2 = np.linspace(w2_range[0], w2_range[1], num=n_w)
man_ws_x, man_ws_y = np.meshgrid(man_w1, man_w2)
loss = 0.5 * y * (1 - man_ws_x * man_ws_y * x)**2 + \
0.5 * l2 * (man_ws_x**2 + man_ws_y**2) + 0.5 * v * (man_ws_x*man_ws_y)**2
min_loss = np.min(loss)
manifold_indices = loss < min_loss + 1e-5
manifold_x = man_ws_x[manifold_indices]
manifold_y = man_ws_y[manifold_indices]
pos = np.where(np.abs(np.diff(manifold_y)) >= 0.1)[0]+1
x = np.insert(manifold_x, pos, np.nan)
y = np.insert(manifold_y, pos, np.nan)
# plot manifold of global minima
#ax.scatter(manifold_y, manifold_x, 0, s=0.5, c='cyan',
# label='Manifold of global minima')
ax.plot(y, x, c='cyan',
label='Manifold of global minima')
def plot_optimiser_trajectory_2d(ax, weights, **kwargs):
w1_vals = weights['w1']
w2_vals = weights['w2']
ax.plot(w1_vals, w2_vals, **kwargs)
def plot_optimiser_trajectory_3d(ax, settings, weights, **kwargs):
x, y, v, l2, _, _ = settings
w1_vals = np.array(weights['w1'])
w2_vals = np.array(weights['w2'])
loss = 0.5 * y * (1 - w1_vals * w2_vals * x)**2 + \
0.5 * l2 * (w1_vals**2 + w2_vals**2) + 0.5 * v * (w1_vals*w2_vals)**2
ax.plot(w1_vals, w2_vals, loss, **kwargs)
def plot_optimiser_trajectory(x, y, weights, dim='2d', angle=45, manifold=False, **kwargs):
if dim == '3d':
ax, settings = plot_mse_loss_surface_3d(x, y, angle=angle)
if manifold:
plot_global_minimum_manifold_3d(ax, settings)
plot_optimiser_trajectory_3d(ax, settings, weights, **kwargs)
else:
ax, settings = plot_mse_loss_surface_2d(x, y)
if manifold:
plot_global_minimum_manifold_2d(ax, settings)
plot_optimiser_trajectory_2d(ax, weights, **kwargs)
def plot_weight_norm(ax, weights, **kwargs):
w1_vals = np.array(weights['w1'])
w2_vals = np.array(weights['w2'])
epochs = np.arange(0, len(w1_vals), 1)
norms = np.sqrt(w1_vals**2 + w2_vals**2)
ax.set_xlabel('Epoch', fontsize=12)
ax.set_ylabel('Weight norm', fontsize=12)
ax.plot(epochs, norms, linewidth=2.0, **kwargs)
def animate_optimiser_trajectory_2d(i, ax, weights, **kwargs):
w1_vals = weights['w1']
w2_vals = weights['w2']
ax.plot(w1_vals[:i], w2_vals[:i], **kwargs)
return ax
def animate_optimiser_trajectory_3d(i, ax, settings, weights, **kwargs):
x, y, v, l2, _, _ = settings
w1_vals = np.array(weights['w1'])
w2_vals = np.array(weights['w2'])
loss = 0.5 * y * (1 - w1_vals * w2_vals * x)**2 + \
0.5 * l2 * (w1_vals**2 + w2_vals**2) + 0.5 * v * (w1_vals*w2_vals)**2
ax.plot(w1_vals[:i], w2_vals[:i], loss[:i], **kwargs)
return ax
def plot_optimiser_loss(x, y, v, l2, weights, **kwargs):
loss = []
epoch = np.arange(0, len(weights['w1']))
for w1, w2 in zip(weights['w1'], weights['w2']):
loss_val = 0.5 * y * (1 - w1 * w2 * x)**2 + 0.5 * l2 * (w1**2 + w2**2) + 0.5 * v * (w1 * w2)**2
loss.append(loss_val)
plt.plot(epoch, loss, **kwargs)
plt.xlabel('Epoch')
plt.ylabel('Loss')
def plot_interpolated_trajectory_2d(ax, w1_a, w2_a, w1_b, w2_b, start=0, end=1, **kwargs):
alpha = np.arange(start, end, 0.001)
w1_path = []
w2_path = []
for a in alpha:
ww1 = (1 - a) * w1_a + a * w1_b
ww2 = (1 - a) * w2_a + a * w2_b
w1_path.append(ww1)
w2_path.append(ww2)
ax.plot(w1_path, w2_path, **kwargs)
def plot_interpolated_trajectory_3d(ax, settings, w1_a, w2_a, w1_b, w2_b, start=0, end=1, **kwargs):
x, y, _, _ = settings
alpha = np.arange(start, end, 0.001)
w1_path = []
w2_path = []
loss = []
for a in alpha:
ww1 = (1 - a) * w1_a + a * w1_b
ww2 = (1 - a) * w2_a + a * w2_b
loss_val = 0.5 * (y - ww1 * ww2 * x)**2 + 0.5 * l2 * (ww1**2 + ww2**2)
loss.append(loss_val)
w1_path.append(ww1)
w2_path.append(ww2)
ax.plot(w1_path, w2_path, loss, **kwargs)
def plot_interpolated_loss(x, y, w1_a, w2_a, w1_b, w2_b, start=0, end=1, **kwargs):
alpha = np.arange(start, end, 0.001)
interpolated_loss = []
for a in alpha:
ww1 = (1 - a) * w1_a + a * w1_b
ww2 = (1 - a) * w2_a + a * w2_b
loss_val = 0.5 * (y - ww1 * ww2 * x)**2 + 0.5 * l2 * (ww1**2 + ww2**2)
interpolated_loss.append(loss_val)
plt.plot(alpha, interpolated_loss, **kwargs)
plt.xlabel(r'$\alpha$')
plt.ylabel('Loss')
def plot_learning_dynamics(ax, weights, **kwargs):
epoch = np.arange(0, len(weights['w1']))
scores = []
for w1, w2 in zip(weights['w1'], weights['w2']):
scores.append(w1 * w2)
ax.plot(epoch, scores, **kwargs)
def animate_learning_dynamics(i, ax, weights, y, **kwargs):
n_epoch = len(weights['w1'])
epoch = np.arange(1, n_epoch)
scores = []
for w1, w2 in zip(weights['w1'], weights['w2']):
scores.append(w1 * w2)
ax.set_xlim((1, n_epoch))
ax.set_ylim((0, y))
ax.set_xlabel('Epoch', fontsize=15)
ax.set_ylabel('$w_2 \cdot w_1$', fontsize=15)
ax.plot(epoch[:i], scores[:i], **kwargs)
return ax
def animate_learning(weights, save=False, name='anim'):
gs = gridspec.GridSpec(2, 4)
gs.update(wspace=0.5)
fig = plt.figure(figsize=(12, 8))
ax1 = fig.add_subplot(gs[0, :2], )
ax2 = fig.add_subplot(gs[0, 2:], projection='3d')
ax3 = fig.add_subplot(gs[1, 1:3])
# ax1 = fig.add_subplot(2, 2, 1)
# ax2 = fig.add_subplot(2, 2, 2, projection = '3d')
# ax3 = fig.add_subplot(2, 2, 3)
# ax4 = fig.add_subplot(2, 2, 4)
ax1, settings = plot_mse_loss_surface_2d(ax1, 1, 1)
ax2, settings = plot_mse_loss_surface_3d(ax2, 1, 1, angle=60)
plot_global_minimum_manifold_2d(ax1, settings)
plot_global_minimum_manifold_3d(ax2, settings)
def update(i):
animate_optimiser_trajectory_2d(
i, ax1, settings, weights, 'Gradient descent')
animate_optimiser_trajectory_3d(
i, ax2, settings, weights, 'Gradient descent')
animate_learning_dynamics(i, ax3, weights, 1)
# animate_weight_norm(i, ax4, scalarNet.history)
# suncAnimation will call the 'update' function for each frame
anim = FuncAnimation(fig, update, frames=100, interval=5, save_count=50)
# HTML(anim.to_html5_video())
if save:
anim.save(name + '.gif', dpi=80, writer='imagemagick')
plt.show()
| [((15, 9, 15, 55), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((16, 9, 16, 55), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((17, 17, 17, 36), 'numpy.meshgrid', 'np.meshgrid', ({(17, 29, 17, 31): 'w1', (17, 33, 17, 35): 'w2'}, {}), '(w1, w2)', True, 'import numpy as np\n'), ((18, 14, 18, 34), 'numpy.zeros', 'np.zeros', ({(18, 23, 18, 33): '(n_w, n_w)'}, {}), '((n_w, n_w))', True, 'import numpy as np\n'), ((29, 13, 29, 33), 'numpy.gradient', 'np.gradient', ({(29, 25, 29, 32): 'cost_ws'}, {}), '(cost_ws)', True, 'import numpy as np\n'), ((59, 9, 59, 55), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((60, 9, 60, 55), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((61, 17, 61, 36), 'numpy.meshgrid', 'np.meshgrid', ({(61, 29, 61, 31): 'w1', (61, 33, 61, 35): 'w2'}, {}), '(w1, w2)', True, 'import numpy as np\n'), ((62, 14, 62, 34), 'numpy.zeros', 'np.zeros', ({(62, 23, 62, 33): '(n_w, n_w)'}, {}), '((n_w, n_w))', True, 'import numpy as np\n'), ((83, 11, 83, 32), 'matplotlib.colors.Normalize', 'Normalize', ({(83, 21, 83, 25): 'minn', (83, 27, 83, 31): 'maxx'}, {}), '(minn, maxx)', False, 'from matplotlib.colors import Normalize\n'), ((84, 8, 84, 52), 'matplotlib.pyplot.cm.ScalarMappable', 'plt.cm.ScalarMappable', (), '', True, 'import matplotlib.pyplot as plt\n'), ((111, 13, 111, 59), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((112, 13, 112, 59), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((113, 25, 113, 52), 'numpy.meshgrid', 'np.meshgrid', ({(113, 37, 113, 43): 'man_w1', (113, 45, 113, 51): 'man_w2'}, {}), '(man_w1, man_w2)', True, 'import numpy as np\n'), ((116, 15, 116, 27), 'numpy.min', 'np.min', ({(116, 22, 116, 26): 'loss'}, {}), '(loss)', True, 'import numpy as np\n'), ((131, 13, 131, 59), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((132, 13, 132, 59), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((133, 25, 133, 52), 'numpy.meshgrid', 'np.meshgrid', ({(133, 37, 133, 43): 'man_w1', (133, 45, 133, 51): 'man_w2'}, {}), '(man_w1, man_w2)', True, 'import numpy as np\n'), ((136, 15, 136, 27), 'numpy.min', 'np.min', ({(136, 22, 136, 26): 'loss'}, {}), '(loss)', True, 'import numpy as np\n'), ((142, 8, 142, 42), 'numpy.insert', 'np.insert', ({(142, 18, 142, 28): 'manifold_x', (142, 30, 142, 33): 'pos', (142, 35, 142, 41): 'np.nan'}, {}), '(manifold_x, pos, np.nan)', True, 'import numpy as np\n'), ((143, 8, 143, 42), 'numpy.insert', 'np.insert', ({(143, 18, 143, 28): 'manifold_y', (143, 30, 143, 33): 'pos', (143, 35, 143, 41): 'np.nan'}, {}), '(manifold_y, pos, np.nan)', True, 'import numpy as np\n'), ((160, 14, 160, 37), 'numpy.array', 'np.array', ({(160, 23, 160, 36): "weights['w1']"}, {}), "(weights['w1'])", True, 'import numpy as np\n'), ((161, 14, 161, 37), 'numpy.array', 'np.array', ({(161, 23, 161, 36): "weights['w2']"}, {}), "(weights['w2'])", True, 'import numpy as np\n'), ((181, 14, 181, 37), 'numpy.array', 'np.array', ({(181, 23, 181, 36): "weights['w1']"}, {}), "(weights['w1'])", True, 'import numpy as np\n'), ((182, 14, 182, 37), 'numpy.array', 'np.array', ({(182, 23, 182, 36): "weights['w2']"}, {}), "(weights['w2'])", True, 'import numpy as np\n'), ((184, 12, 184, 44), 'numpy.sqrt', 'np.sqrt', ({(184, 20, 184, 43): 'w1_vals ** 2 + w2_vals ** 2'}, {}), '(w1_vals ** 2 + w2_vals ** 2)', True, 'import numpy as np\n'), ((199, 14, 199, 37), 'numpy.array', 'np.array', ({(199, 23, 199, 36): "weights['w1']"}, {}), "(weights['w1'])", True, 'import numpy as np\n'), ((200, 14, 200, 37), 'numpy.array', 'np.array', ({(200, 23, 200, 36): "weights['w2']"}, {}), "(weights['w2'])", True, 'import numpy as np\n'), ((213, 4, 213, 35), 'matplotlib.pyplot.plot', 'plt.plot', ({(213, 13, 213, 18): 'epoch', (213, 20, 213, 24): 'loss'}, {}), '(epoch, loss, **kwargs)', True, 'import matplotlib.pyplot as plt\n'), ((214, 4, 214, 23), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(214, 15, 214, 22): '"""Epoch"""'}, {}), "('Epoch')", True, 'import matplotlib.pyplot as plt\n'), ((215, 4, 215, 22), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(215, 15, 215, 21): '"""Loss"""'}, {}), "('Loss')", True, 'import matplotlib.pyplot as plt\n'), ((219, 12, 219, 40), 'numpy.arange', 'np.arange', ({(219, 22, 219, 27): 'start', (219, 29, 219, 32): 'end', (219, 34, 219, 39): '0.001'}, {}), '(start, end, 0.001)', True, 'import numpy as np\n'), ((232, 12, 232, 40), 'numpy.arange', 'np.arange', ({(232, 22, 232, 27): 'start', (232, 29, 232, 32): 'end', (232, 34, 232, 39): '0.001'}, {}), '(start, end, 0.001)', True, 'import numpy as np\n'), ((247, 12, 247, 40), 'numpy.arange', 'np.arange', ({(247, 22, 247, 27): 'start', (247, 29, 247, 32): 'end', (247, 34, 247, 39): '0.001'}, {}), '(start, end, 0.001)', True, 'import numpy as np\n'), ((254, 4, 254, 48), 'matplotlib.pyplot.plot', 'plt.plot', ({(254, 13, 254, 18): 'alpha', (254, 20, 254, 37): 'interpolated_loss'}, {}), '(alpha, interpolated_loss, **kwargs)', True, 'import matplotlib.pyplot as plt\n'), ((255, 4, 255, 27), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(255, 15, 255, 26): '"""$\\\\alpha$"""'}, {}), "('$\\\\alpha$')", True, 'import matplotlib.pyplot as plt\n'), ((256, 4, 256, 22), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(256, 15, 256, 21): '"""Loss"""'}, {}), "('Loss')", True, 'import matplotlib.pyplot as plt\n'), ((269, 12, 269, 33), 'numpy.arange', 'np.arange', ({(269, 22, 269, 23): '1', (269, 25, 269, 32): 'n_epoch'}, {}), '(1, n_epoch)', True, 'import numpy as np\n'), ((282, 9, 282, 32), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', ({(282, 27, 282, 28): '2', (282, 30, 282, 31): '4'}, {}), '(2, 4)', True, 'import matplotlib.gridspec as gridspec\n'), ((285, 10, 285, 37), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((310, 11, 310, 76), 'matplotlib.animation.FuncAnimation', 'FuncAnimation', (), '', False, 'from matplotlib.animation import FuncAnimation\n'), ((315, 4, 315, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((141, 26, 141, 45), 'numpy.diff', 'np.diff', ({(141, 34, 141, 44): 'manifold_y'}, {}), '(manifold_y)', True, 'import numpy as np\n')] |
kshithijiyer/qkeras | tests/qconvolutional_test.py | 78ac608c6dcd84151792a986d03fe7afb17929cf | # Copyright 2019 Google LLC
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test layers from qconvolutional.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from numpy.testing import assert_allclose
import pytest
import tempfile
from tensorflow.keras import backend as K
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from tensorflow.keras.backend import clear_session
from qkeras import binary
from qkeras import ternary
from qkeras import QActivation
from qkeras import QDense
from qkeras import QConv1D
from qkeras import QConv2D
from qkeras import QSeparableConv2D
from qkeras import quantized_bits
from qkeras import quantized_relu
from qkeras.utils import model_save_quantized_weights
from qkeras.utils import quantized_model_from_json
from qkeras.utils import load_qmodel
from qkeras import print_qstats
from qkeras import extract_model_operations
# TODO(hzhuang):
# qoctave_conv test
# qbatchnorm test
def test_qnetwork():
x = x_in = Input((28, 28, 1), name='input')
x = QSeparableConv2D(
32, (2, 2),
strides=(2, 2),
depthwise_quantizer=binary(alpha=1.0),
pointwise_quantizer=quantized_bits(4, 0, 1, alpha=1.0),
depthwise_activation=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='conv2d_0_m')(
x)
x = QActivation('quantized_relu(6,2,1)', name='act0_m')(x)
x = QConv2D(
64, (3, 3),
strides=(2, 2),
kernel_quantizer=ternary(alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='conv2d_1_m',
activation=quantized_relu(6, 3, 1))(
x)
x = QConv2D(
64, (2, 2),
strides=(2, 2),
kernel_quantizer=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='conv2d_2_m')(
x)
x = QActivation('quantized_relu(6,4,1)', name='act2_m')(x)
x = Flatten(name='flatten')(x)
x = QDense(
10,
kernel_quantizer=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='dense')(
x)
x = Activation('softmax', name='softmax')(x)
model = Model(inputs=[x_in], outputs=[x])
# reload the model to ensure saving/loading works
json_string = model.to_json()
clear_session()
model = quantized_model_from_json(json_string)
# generate same output for weights
np.random.seed(42)
for layer in model.layers:
all_weights = []
for i, weights in enumerate(layer.get_weights()):
input_size = np.prod(layer.input.shape.as_list()[1:])
if input_size is None:
input_size = 576 * 10 # to avoid learning sizes
shape = weights.shape
assert input_size > 0, 'input size for {} {}'.format(layer.name, i)
# he normal initialization with a scale factor of 2.0
all_weights.append(
10.0 * np.random.normal(0.0, np.sqrt(2.0 / input_size), shape))
if all_weights:
layer.set_weights(all_weights)
# apply quantizer to weights
model_save_quantized_weights(model)
all_weights = []
for layer in model.layers:
for i, weights in enumerate(layer.get_weights()):
w = np.sum(weights)
all_weights.append(w)
all_weights = np.array(all_weights)
# test_qnetwork_weight_quantization
all_weights_signature = np.array(
[2., -6.75, -0.625, -2., -0.25, -56., 1.125, -1.625, -1.125])
assert all_weights.size == all_weights_signature.size
assert np.all(all_weights == all_weights_signature)
# test_qnetwork_forward:
expected_output = np.array(
[[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 0.e+00, 0.e+00, 6.e-08, 1.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[ 0.e+00 ,0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 0.e+00, 0.e+00, 5.e-07, 1.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00 ,1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00 ,0.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 1.e+00,
0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
1.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00]]).astype(np.float16)
inputs = 2 * np.random.rand(10, 28, 28, 1)
actual_output = model.predict(inputs).astype(np.float16)
assert_allclose(actual_output, expected_output, rtol=1e-4)
def test_qconv1d():
np.random.seed(33)
x = Input((4, 4,))
y = QConv1D(
2, 1,
kernel_quantizer=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='qconv1d')(
x)
model = Model(inputs=x, outputs=y)
# Extract model operations
model_ops = extract_model_operations(model)
# Assertion about the number of operations for this Conv1D layer
assert model_ops['qconv1d']['number_of_operations'] == 32
# Print qstats to make sure it works with Conv1D layer
print_qstats(model)
# reload the model to ensure saving/loading works
# json_string = model.to_json()
# clear_session()
# model = quantized_model_from_json(json_string)
for layer in model.layers:
all_weights = []
for i, weights in enumerate(layer.get_weights()):
input_size = np.prod(layer.input.shape.as_list()[1:])
if input_size is None:
input_size = 10 * 10
shape = weights.shape
assert input_size > 0, 'input size for {} {}'.format(layer.name, i)
all_weights.append(
10.0 * np.random.normal(0.0, np.sqrt(2.0 / input_size), shape))
if all_weights:
layer.set_weights(all_weights)
# Save the model as an h5 file using Keras's model.save()
fd, fname = tempfile.mkstemp('.h5')
model.save(fname)
del model # Delete the existing model
# Return a compiled model identical to the previous one
model = load_qmodel(fname)
# Clean the created h5 file after loading the model
os.close(fd)
os.remove(fname)
# apply quantizer to weights
model_save_quantized_weights(model)
inputs = np.random.rand(2, 4, 4)
p = model.predict(inputs).astype(np.float16)
y = np.array([[[-2.441, 3.816], [-3.807, -1.426], [-2.684, -1.317],
[-1.659, 0.9834]],
[[-4.99, 1.139], [-2.559, -1.216], [-2.285, 1.905],
[-2.652, -0.467]]]).astype(np.float16)
assert np.all(p == y)
if __name__ == '__main__':
pytest.main([__file__])
| [((54, 13, 54, 45), 'tensorflow.keras.layers.Input', 'Input', (), '', False, 'from tensorflow.keras.layers import Input\n'), ((90, 10, 90, 43), 'tensorflow.keras.models.Model', 'Model', (), '', False, 'from tensorflow.keras.models import Model\n'), ((94, 2, 94, 17), 'tensorflow.keras.backend.clear_session', 'clear_session', ({}, {}), '()', False, 'from tensorflow.keras.backend import clear_session\n'), ((95, 10, 95, 48), 'qkeras.utils.quantized_model_from_json', 'quantized_model_from_json', ({(95, 36, 95, 47): 'json_string'}, {}), '(json_string)', False, 'from qkeras.utils import quantized_model_from_json\n'), ((98, 2, 98, 20), 'numpy.random.seed', 'np.random.seed', ({(98, 17, 98, 19): '(42)'}, {}), '(42)', True, 'import numpy as np\n'), ((114, 2, 114, 37), 'qkeras.utils.model_save_quantized_weights', 'model_save_quantized_weights', ({(114, 31, 114, 36): 'model'}, {}), '(model)', False, 'from qkeras.utils import model_save_quantized_weights\n'), ((123, 16, 123, 37), 'numpy.array', 'np.array', ({(123, 25, 123, 36): 'all_weights'}, {}), '(all_weights)', True, 'import numpy as np\n'), ((126, 26, 127, 67), 'numpy.array', 'np.array', ({(127, 6, 127, 66): '[2.0, -6.75, -0.625, -2.0, -0.25, -56.0, 1.125, -1.625, -1.125]'}, {}), '([2.0, -6.75, -0.625, -2.0, -0.25, -56.0, 1.125, -1.625, -1.125])', True, 'import numpy as np\n'), ((130, 9, 130, 53), 'numpy.all', 'np.all', ({(130, 16, 130, 52): '(all_weights == all_weights_signature)'}, {}), '(all_weights == all_weights_signature)', True, 'import numpy as np\n'), ((156, 2, 156, 60), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((160, 2, 160, 20), 'numpy.random.seed', 'np.random.seed', ({(160, 17, 160, 19): '(33)'}, {}), '(33)', True, 'import numpy as np\n'), ((161, 6, 161, 20), 'tensorflow.keras.layers.Input', 'Input', ({(161, 12, 161, 19): '(4, 4)'}, {}), '((4, 4))', False, 'from tensorflow.keras.layers import Input\n'), ((168, 10, 168, 36), 'tensorflow.keras.models.Model', 'Model', (), '', False, 'from tensorflow.keras.models import Model\n'), ((171, 14, 171, 45), 'qkeras.extract_model_operations', 'extract_model_operations', ({(171, 39, 171, 44): 'model'}, {}), '(model)', False, 'from qkeras import extract_model_operations\n'), ((177, 2, 177, 21), 'qkeras.print_qstats', 'print_qstats', ({(177, 15, 177, 20): 'model'}, {}), '(model)', False, 'from qkeras import print_qstats\n'), ((197, 14, 197, 37), 'tempfile.mkstemp', 'tempfile.mkstemp', ({(197, 31, 197, 36): '""".h5"""'}, {}), "('.h5')", False, 'import tempfile\n'), ((202, 10, 202, 28), 'qkeras.utils.load_qmodel', 'load_qmodel', ({(202, 22, 202, 27): 'fname'}, {}), '(fname)', False, 'from qkeras.utils import load_qmodel\n'), ((205, 2, 205, 14), 'os.close', 'os.close', ({(205, 11, 205, 13): 'fd'}, {}), '(fd)', False, 'import os\n'), ((206, 2, 206, 18), 'os.remove', 'os.remove', ({(206, 12, 206, 17): 'fname'}, {}), '(fname)', False, 'import os\n'), ((209, 2, 209, 37), 'qkeras.utils.model_save_quantized_weights', 'model_save_quantized_weights', ({(209, 31, 209, 36): 'model'}, {}), '(model)', False, 'from qkeras.utils import model_save_quantized_weights\n'), ((211, 11, 211, 34), 'numpy.random.rand', 'np.random.rand', ({(211, 26, 211, 27): '2', (211, 29, 211, 30): '4', (211, 32, 211, 33): '4'}, {}), '(2, 4, 4)', True, 'import numpy as np\n'), ((217, 9, 217, 23), 'numpy.all', 'np.all', ({(217, 16, 217, 22): '(p == y)'}, {}), '(p == y)', True, 'import numpy as np\n'), ((221, 2, 221, 25), 'pytest.main', 'pytest.main', ({(221, 14, 221, 24): '[__file__]'}, {}), '([__file__])', False, 'import pytest\n'), ((64, 6, 64, 57), 'qkeras.QActivation', 'QActivation', (), '', False, 'from qkeras import QActivation\n'), ((80, 6, 80, 57), 'qkeras.QActivation', 'QActivation', (), '', False, 'from qkeras import QActivation\n'), ((81, 6, 81, 29), 'tensorflow.keras.layers.Flatten', 'Flatten', (), '', False, 'from tensorflow.keras.layers import Flatten\n'), ((88, 6, 88, 43), 'tensorflow.keras.layers.Activation', 'Activation', (), '', False, 'from tensorflow.keras.layers import Activation\n'), ((154, 15, 154, 44), 'numpy.random.rand', 'np.random.rand', ({(154, 30, 154, 32): '(10)', (154, 34, 154, 36): '(28)', (154, 38, 154, 40): '(28)', (154, 42, 154, 43): '(1)'}, {}), '(10, 28, 28, 1)', True, 'import numpy as np\n'), ((120, 10, 120, 25), 'numpy.sum', 'np.sum', ({(120, 17, 120, 24): 'weights'}, {}), '(weights)', True, 'import numpy as np\n'), ((133, 20, 153, 48), 'numpy.array', 'np.array', ({(134, 6, 153, 47): '[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 6e-08, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0,\n 0.0, 0.0, 0.0, 5e-07, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, \n 0.0, 0.0], [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, \n 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0,\n 1.0, 0.0, 0.0, 0.0, 0.0]]'}, {}), '([[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 0.0, 0.0, 6e-08, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, \n 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5e-07, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 0.0], [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, \n 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]])', True, 'import numpy as np\n'), ((213, 6, 216, 36), 'numpy.array', 'np.array', ({(213, 15, 216, 35): '[[[-2.441, 3.816], [-3.807, -1.426], [-2.684, -1.317], [-1.659, 0.9834]], [\n [-4.99, 1.139], [-2.559, -1.216], [-2.285, 1.905], [-2.652, -0.467]]]'}, {}), '([[[-2.441, 3.816], [-3.807, -1.426], [-2.684, -1.317], [-1.659, \n 0.9834]], [[-4.99, 1.139], [-2.559, -1.216], [-2.285, 1.905], [-2.652, \n -0.467]]])', True, 'import numpy as np\n'), ((58, 26, 58, 43), 'qkeras.binary', 'binary', (), '', False, 'from qkeras import binary\n'), ((59, 26, 59, 60), 'qkeras.quantized_bits', 'quantized_bits', (), '', False, 'from qkeras import quantized_bits\n'), ((60, 27, 60, 61), 'qkeras.quantized_bits', 'quantized_bits', (), '', False, 'from qkeras import quantized_bits\n'), ((61, 21, 61, 44), 'qkeras.quantized_bits', 'quantized_bits', ({(61, 36, 61, 37): '4', (61, 39, 61, 40): '0', (61, 42, 61, 43): '1'}, {}), '(4, 0, 1)', False, 'from qkeras import quantized_bits\n'), ((68, 23, 68, 41), 'qkeras.ternary', 'ternary', (), '', False, 'from qkeras import ternary\n'), ((69, 21, 69, 44), 'qkeras.quantized_bits', 'quantized_bits', ({(69, 36, 69, 37): '4', (69, 39, 69, 40): '0', (69, 42, 69, 43): '1'}, {}), '(4, 0, 1)', False, 'from qkeras import quantized_bits\n'), ((71, 17, 71, 40), 'qkeras.quantized_relu', 'quantized_relu', ({(71, 32, 71, 33): '6', (71, 35, 71, 36): '3', (71, 38, 71, 39): '1'}, {}), '(6, 3, 1)', False, 'from qkeras import quantized_relu\n'), ((76, 23, 76, 57), 'qkeras.quantized_bits', 'quantized_bits', (), '', False, 'from qkeras import quantized_bits\n'), ((77, 21, 77, 44), 'qkeras.quantized_bits', 'quantized_bits', ({(77, 36, 77, 37): '4', (77, 39, 77, 40): '0', (77, 42, 77, 43): '1'}, {}), '(4, 0, 1)', False, 'from qkeras import quantized_bits\n'), ((84, 23, 84, 57), 'qkeras.quantized_bits', 'quantized_bits', (), '', False, 'from qkeras import quantized_bits\n'), ((85, 21, 85, 44), 'qkeras.quantized_bits', 'quantized_bits', ({(85, 36, 85, 37): '4', (85, 39, 85, 40): '0', (85, 42, 85, 43): '1'}, {}), '(4, 0, 1)', False, 'from qkeras import quantized_bits\n'), ((164, 23, 164, 57), 'qkeras.quantized_bits', 'quantized_bits', (), '', False, 'from qkeras import quantized_bits\n'), ((165, 21, 165, 44), 'qkeras.quantized_bits', 'quantized_bits', ({(165, 36, 165, 37): '4', (165, 39, 165, 40): '0', (165, 42, 165, 43): '1'}, {}), '(4, 0, 1)', False, 'from qkeras import quantized_bits\n'), ((109, 39, 109, 64), 'numpy.sqrt', 'np.sqrt', ({(109, 47, 109, 63): '(2.0 / input_size)'}, {}), '(2.0 / input_size)', True, 'import numpy as np\n'), ((193, 39, 193, 64), 'numpy.sqrt', 'np.sqrt', ({(193, 47, 193, 63): '(2.0 / input_size)'}, {}), '(2.0 / input_size)', True, 'import numpy as np\n')] |
Lapis256/discord-ext-ui | discord/ext/ui/select.py | 593de0a1107d2a0c26023587a2937f00ecec3ed1 | from typing import Optional, List, TypeVar, Generic, Callable
import discord.ui
from .item import Item
from .select_option import SelectOption
from .custom import CustomSelect
def _default_check(_: discord.Interaction) -> bool:
return True
C = TypeVar("C", bound=discord.ui.Select)
class Select(Item, Generic[C]):
def __init__(
self,
placeholder: Optional[str] = None,
min_values: int = 1,
max_values: int = 1,
options: Optional[list] = None,
cls: C = CustomSelect,
custom_id: Optional[str] = None,
) -> None:
self._placeholder: Optional[str] = placeholder
self._min_values: int = min_values
self._max_values: int = max_values
self._options: list = [] if options is None else options
self._row: Optional[int] = None
self.cls: C = cls
self._custom_id: Optional[str] = custom_id
self.func: Optional[Callable] = None
self.check_func: Callable[[discord.Interaction], bool] = _default_check
def placeholder(self, placeholder: str) -> 'Select':
self._placeholder = placeholder
return self
def min_values(self, min_values: int) -> 'Select':
self._min_values = min_values
return self
def max_values(self, max_values: int) -> 'Select':
self._max_values = max_values
return self
def options(self, options: List[SelectOption]) -> 'Select':
self._options = options
return self
def row(self, row: int) -> 'Select':
self._row = row
return self
def on_select(self, func: Callable) -> 'Select':
self.func = func
return self
def custom_id(self, custom_id: str) -> 'Select':
self._custom_id = custom_id
return self
def check(self, func: Callable[[discord.Interaction], bool]) -> 'Select':
self.check_func = func
return self
def to_discord(self) -> C:
return self.cls(
placeholder=self._placeholder,
min_values=self._min_values,
max_values=self._max_values,
options=[o.to_discord_select_option() for o in self._options],
row=self._row,
custom_id=self._custom_id,
check_func=self.check_func,
callback=self.func
)
| [((14, 4, 14, 41), 'typing.TypeVar', 'TypeVar', (), '', False, 'from typing import Optional, List, TypeVar, Generic, Callable\n')] |
parag-may4/ucscsdk | ucscsdk/mometa/storage/StorageScsiLunRef.py | 2ea762fa070330e3a4e2c21b46b157469555405b | """This module contains the general information for StorageScsiLunRef ManagedObject."""
from ...ucscmo import ManagedObject
from ...ucsccoremeta import UcscVersion, MoPropertyMeta, MoMeta
from ...ucscmeta import VersionMeta
class StorageScsiLunRefConsts():
pass
class StorageScsiLunRef(ManagedObject):
"""This is StorageScsiLunRef class."""
consts = StorageScsiLunRefConsts()
naming_props = set([u'id'])
mo_meta = MoMeta("StorageScsiLunRef", "storageScsiLunRef", "scsi-lun-ref-[id]", VersionMeta.Version131a, "InputOutput", 0x1f, [], ["read-only"], [u'storageLunReplica', u'storageLunSnapshot', u'storageScsiLun', u'storageVirtualDrive'], [], ["Get"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version131a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version131a, MoPropertyMeta.NAMING, 0x4, None, None, None, [], []),
"ls_dn": MoPropertyMeta("ls_dn", "lsDn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"lun_name": MoPropertyMeta("lun_name", "lunName", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"pn_dn": MoPropertyMeta("pn_dn", "pnDn", "string", VersionMeta.Version141a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"profile_dn": MoPropertyMeta("profile_dn", "profileDn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version131a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"id": "id",
"lsDn": "ls_dn",
"lunName": "lun_name",
"pnDn": "pn_dn",
"profileDn": "profile_dn",
"rn": "rn",
"status": "status",
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.ls_dn = None
self.lun_name = None
self.pn_dn = None
self.profile_dn = None
self.status = None
ManagedObject.__init__(self, "StorageScsiLunRef", parent_mo_or_dn, **kwargs)
| [] |
latrocinia/saxstools | saxstools/fullsaxs.py | 8e88474f62466b745791c0ccbb07c80a959880f3 | from __future__ import print_function, absolute_import, division
from sys import stdout as _stdout
from time import time as _time
import numpy as np
try:
import pyfftw
pyfftw.interfaces.cache.enable()
pyfftw.interfaces.cache.set_keepalive_time(10)
rfftn = pyfftw.interfaces.numpy_fft.rfftn
irfftn = pyfftw.interfaces.numpy_fft.irfftn
except ImportError:
from numpy.fft import rfftn, irfftn
from disvis import volume
from disvis.points import dilate_points
from disvis.libdisvis import (rotate_image3d, dilate_points_add, longest_distance)
from powerfit.solutions import Solutions
from saxstools.saxs_curve import scattering_curve, create_fifj_lookup_table
from saxstools.helpers import coarse_grain
from saxstools.libsaxstools import calc_chi2
from saxstools.kernels import Kernels as saxs_Kernels
try:
import pyopencl as cl
import pyopencl.array as cl_array
import disvis.pyclfft
from disvis.kernels import Kernels
from disvis import pyclfft
except ImportError:
pass
class FullSAXS(object):
def __init__(self):
# parameters to be defined
self._receptor = None
self._ligand = None
# parameters with standard values
self.rotations = [[[1, 0, 0], [0, 1, 0], [0, 0, 1]]]
self.weights = None
self.voxelspacing = 1.0
self.interaction_radius = 2.5
self.max_clash = 100
self.min_interaction = 300
self.coarse_grain = True
self.beads_per_residue = 2
# CPU or GPU
self._queue = None
# unchangeable
self._data = {}
self._q = None
self._Iq = None
self._sq = None
@property
def receptor(self):
return self._receptor
@receptor.setter
def receptor(self, receptor):
self._receptor = receptor.duplicate()
@property
def ligand(self):
return self._ligand
@ligand.setter
def ligand(self, ligand):
self._ligand = ligand.duplicate()
@property
def rotations(self):
return self._rotations
@rotations.setter
def rotations(self, rotations):
rotmat = np.asarray(rotations, dtype=np.float64)
if rotmat.ndim != 3:
raise ValueError("Input should be a list of rotation matrices.")
self._rotations = rotmat
@property
def weights(self):
return self._weights
@weights.setter
def weights(self, weights):
self._weights = weights
@property
def interaction_radius(self):
return self._interaction_radius
@interaction_radius.setter
def interaction_radius(self, radius):
if radius <= 0:
raise ValueError("Interaction radius should be bigger than zero")
self._interaction_radius = radius
@property
def voxelspacing(self):
return self._voxelspacing
@voxelspacing.setter
def voxelspacing(self, voxelspacing):
self._voxelspacing = voxelspacing
@property
def max_clash(self):
return self._max_clash
@max_clash.setter
def max_clash(self, max_clash):
if max_clash < 0:
raise ValueError("Maximum allowed clashing volume cannot be negative")
self._max_clash = max_clash + 0.9
@property
def min_interaction(self):
return self._min_interaction
@min_interaction.setter
def min_interaction(self, min_interaction):
if min_interaction < 1:
raise ValueError("Minimum required interaction volume cannot be smaller than 1")
self._min_interaction = min_interaction + 0.9
@property
def queue(self):
return self._queue
@queue.setter
def queue(self, queue):
self._queue = queue
@property
def data(self):
return self._data
@property
def saxsdata(self):
return self._q, self._Iq, self._sq
@saxsdata.setter
def saxsdata(self, saxsdata):
self._q, self._Iq, self._sq = saxsdata
def _initialize(self):
# check if requirements are set
if any(x is None for x in (self.receptor, self.ligand)):
raise ValueError("Not all requirements are met for a search")
if self.weights is None:
self.weights = np.ones(self.rotations.shape[0], dtype=np.float64)
if len(self.weights) != len(self.rotations):
raise ValueError("")
d = self.data
# determine size for grid
shape = grid_shape(self.receptor.coor, self.ligand.coor, self.voxelspacing)
# calculate the interaction surface and core of the receptor
vdw_radii = self.receptor.vdw_radius
radii = vdw_radii + self.interaction_radius
d['rsurf'] = rsurface(self.receptor.coor, radii,
shape, self.voxelspacing)
d['rcore'] = rsurface(self.receptor.coor, vdw_radii,
shape, self.voxelspacing)
# keep track of some data for later calculations
d['origin'] = np.asarray(d['rcore'].origin, dtype=np.float64)
d['shape'] = d['rcore'].shape
d['start'] = d['rcore'].start
d['nrot'] = self.rotations.shape[0]
# set ligand center to the origin of the receptor map
# and make a grid of the ligand
radii = self.ligand.vdw_radius
d['lsurf'] = dilate_points((self.ligand.coor - self.ligand.center \
+ self.receptor.center), radii, volume.zeros_like(d['rcore']))
d['im_center'] = np.asarray((self.receptor.center - d['rcore'].origin)/self.voxelspacing, dtype=np.float64)
d['max_clash'] = self.max_clash/self.voxelspacing**3
d['min_interaction'] = self.min_interaction/self.voxelspacing**3
# SAXS data
d['q'] = self._q
d['targetIq'] = self._Iq
d['sq'] = self._sq
if self.coarse_grain:
e1, xyz1 = coarse_grain(self.receptor, bpr=self.beads_per_residue)
e2, xyz2 = coarse_grain(self.ligand, bpr=self.beads_per_residue)
else:
e1, xyz1 = self.receptor.elements, self.receptor.coor
e2, xyz2 = self.ligand.elements, self.ligand.coor
d['base_Iq'] = scattering_curve(self._q, e1, xyz1, bpr=self.beads_per_residue)
d['base_Iq'] += scattering_curve(self._q, e2, xyz2, bpr=self.beads_per_residue)
d['fifj'], d['rind'], d['lind'] = create_fifj_lookup_table(d['q'], e1, e2, bpr=self.beads_per_residue)
d['rxyz'] = xyz1
d['lxyz'] = xyz2 - self.ligand.center
d['chi2'] = np.zeros(d['rcore'].shape, dtype=np.float64)
d['best_chi2'] = np.zeros_like(d['chi2'])
def search(self):
self._initialize()
if self.queue is None:
self._cpu_init()
self._cpu_search()
else:
self._gpu_init()
self._gpu_search()
if _stdout.isatty():
print()
d = self.data
ind = d['best_chi2'] > 0
d['best_chi2'][ind] -= d['best_chi2'][ind].min()
best_chi2 = volume.Volume(d['best_chi2'], voxelspacing=self.voxelspacing, origin=d['origin'])
return Solutions(best_chi2, self.rotations, d['rot_ind'])
def _cpu_init(self):
self.cpu_data = {}
c = self.cpu_data
d = self.data
c['rcore'] = d['rcore'].array
c['rsurf'] = d['rsurf'].array
c['im_lsurf'] = d['lsurf'].array
c['lsurf'] = np.zeros_like(c['rcore'])
c['clashvol'] = np.zeros_like(c['rcore'])
c['intervol'] = np.zeros_like(c['rcore'])
c['interspace'] = np.zeros_like(c['rcore'], dtype=np.int64)
# complex arrays
c['ft_shape'] = list(d['shape'])
c['ft_shape'][-1] = d['shape'][-1]//2 + 1
c['ft_lsurf'] = np.zeros(c['ft_shape'], dtype=np.complex128)
c['ft_rcore'] = np.zeros(c['ft_shape'], dtype=np.complex128)
c['ft_rsurf'] = np.zeros(c['ft_shape'], dtype=np.complex128)
# initial calculations
c['ft_rcore'] = rfftn(c['rcore'])
c['ft_rsurf'] = rfftn(c['rsurf'])
c['rotmat'] = np.asarray(self.rotations, dtype=np.float64)
c['weights'] = np.asarray(self.weights, dtype=np.float64)
c['nrot'] = d['nrot']
c['shape'] = d['shape']
c['max_clash'] = d['max_clash']
c['min_interaction'] = d['min_interaction']
c['vlength'] = int(np.linalg.norm(self.ligand.coor - \
self.ligand.center, axis=1).max() + \
self.interaction_radius + 1.5)/self.voxelspacing
c['origin'] = d['origin']
# SAXS arrays
c['q'] = d['q']
c['targetIq'] = d['targetIq']
c['sq'] = d['sq']
c['base_Iq'] = d['base_Iq']
c['fifj'] = d['fifj']
c['rind'] = d['rind']
c['lind'] = d['lind']
c['rxyz'] = d['rxyz']
c['lxyz'] = d['lxyz']
c['chi2'] = d['chi2']
c['best_chi2'] = d['best_chi2']
c['rot_ind'] = np.zeros(d['shape'], dtype=np.int32)
c['Iq'] = np.zeros_like(c['targetIq'])
c['tmplxyz'] = np.zeros_like(c['lxyz'])
def _cpu_search(self):
d = self.data
c = self.cpu_data
time0 = _time()
for n in xrange(c['rotmat'].shape[0]):
# rotate ligand image
rotate_image3d(c['im_lsurf'], c['vlength'],
np.linalg.inv(c['rotmat'][n]), d['im_center'], c['lsurf'])
c['ft_lsurf'] = rfftn(c['lsurf']).conj()
c['clashvol'] = irfftn(c['ft_lsurf'] * c['ft_rcore'], s=c['shape'])
c['intervol'] = irfftn(c['ft_lsurf'] * c['ft_rsurf'], s=c['shape'])
np.logical_and(c['clashvol'] < c['max_clash'],
c['intervol'] > c['min_interaction'],
c['interspace'])
print('Number of complexes to analyze: ', c['interspace'].sum())
c['chi2'].fill(0)
calc_chi2(c['interspace'], c['q'], c['base_Iq'],
c['rind'], c['rxyz'], c['lind'], (np.mat(c['rotmat'][n])*np.mat(c['lxyz']).T).T,
c['origin'], self.voxelspacing,
c['fifj'], c['targetIq'], c['sq'], c['chi2'])
ind = c['chi2'] > c['best_chi2']
c['best_chi2'][ind] = c['chi2'][ind]
c['rot_ind'][ind] = n
if _stdout.isatty():
self._print_progress(n, c['nrot'], time0)
d['best_chi2'] = c['best_chi2']
d['rot_ind'] = c['rot_ind']
def _print_progress(self, n, total, time0):
m = n + 1
pdone = m/total
t = _time() - time0
_stdout.write('\r{:d}/{:d} ({:.2%}, ETA: {:d}s) '\
.format(m, total, pdone,
int(t/pdone - t)))
_stdout.flush()
def _gpu_init(self):
self.gpu_data = {}
g = self.gpu_data
d = self.data
q = self.queue
g['rcore'] = cl_array.to_device(q, float32array(d['rcore'].array))
g['rsurf'] = cl_array.to_device(q, float32array(d['rsurf'].array))
g['im_lsurf'] = cl.image_from_array(q.context, float32array(d['lsurf'].array))
g['sampler'] = cl.Sampler(q.context, False, cl.addressing_mode.CLAMP,
cl.filter_mode.LINEAR)
g['lsurf'] = cl_array.zeros_like(g['rcore'])
g['clashvol'] = cl_array.zeros_like(g['rcore'])
g['intervol'] = cl_array.zeros_like(g['rcore'])
g['interspace'] = cl_array.zeros(q, d['shape'], dtype=np.int32)
# complex arrays
g['ft_shape'] = list(d['shape'])
g['ft_shape'][0] = d['shape'][0]//2 + 1
g['ft_rcore'] = cl_array.zeros(q, g['ft_shape'], dtype=np.complex64)
g['ft_rsurf'] = cl_array.zeros_like(g['ft_rcore'])
g['ft_lsurf'] = cl_array.zeros_like(g['ft_rcore'])
g['ft_clashvol'] = cl_array.zeros_like(g['ft_rcore'])
g['ft_intervol'] = cl_array.zeros_like(g['ft_rcore'])
# allocate SAXS arrays
g['q'] = cl_array.to_device(q, float32array(d['q']))
g['targetIq'] = cl_array.to_device(q, float32array(d['targetIq']))
g['sq'] = cl_array.to_device(q, float32array(d['sq']))
g['base_Iq'] = cl_array.to_device(q, float32array(d['base_Iq']))
g['fifj'] = cl_array.to_device(q, float32array(d['fifj']))
g['rind'] = cl_array.to_device(q, d['rind'].astype(np.int32))
g['lind'] = cl_array.to_device(q, d['lind'].astype(np.int32))
g_rxyz = np.zeros((d['rxyz'].shape[0], 4), dtype=np.float32)
g_rxyz[:, :3] = d['rxyz'][:]
g_lxyz = np.zeros((d['lxyz'].shape[0], 4), dtype=np.float32)
g_lxyz[:, :3] = d['lxyz'][:]
g['rxyz'] = cl_array.to_device(q, g_rxyz)
g['lxyz'] = cl_array.to_device(q, g_lxyz)
g['rot_lxyz'] = cl_array.zeros_like(g['lxyz'])
g['chi2'] = cl_array.to_device(q, d['chi2'].astype(np.float32))
g['best_chi2'] = cl_array.to_device(q, d['best_chi2'].astype(np.float32))
g['rot_ind'] = cl_array.zeros(q, d['shape'], dtype=np.int32)
g['origin'] = np.zeros(4, dtype=np.float32)
g['origin'][:3] = d['origin'].astype(np.float32)
g['voxelspacing'] = np.float32(self.voxelspacing)
# kernels
g['k'] = Kernels(q.context)
g['saxs_k'] = saxs_Kernels(q.context)
g['k'].rfftn = pyclfft.RFFTn(q.context, d['shape'])
g['k'].irfftn = pyclfft.iRFFTn(q.context, d['shape'])
g['k'].rfftn(q, g['rcore'], g['ft_rcore'])
g['k'].rfftn(q, g['rsurf'], g['ft_rsurf'])
g['nrot'] = d['nrot']
g['max_clash'] = d['max_clash']
g['min_interaction'] = d['min_interaction']
def _gpu_search(self):
d = self.data
g = self.gpu_data
q = self.queue
k = g['k']
time0 = _time()
for n in xrange(g['nrot']):
k.rotate_image3d(q, g['sampler'], g['im_lsurf'],
self.rotations[n], g['lsurf'], d['im_center'])
k.rfftn(q, g['lsurf'], g['ft_lsurf'])
k.c_conj_multiply(q, g['ft_lsurf'], g['ft_rcore'], g['ft_clashvol'])
k.irfftn(q, g['ft_clashvol'], g['clashvol'])
k.c_conj_multiply(q, g['ft_lsurf'], g['ft_rsurf'], g['ft_intervol'])
k.irfftn(q, g['ft_intervol'], g['intervol'])
k.touch(q, g['clashvol'], g['max_clash'],
g['intervol'], g['min_interaction'],
g['interspace'])
g['saxs_k'].rotate_points(q, g['lxyz'], self.rotations[n], g['rot_lxyz'])
k.fill(q, g['chi2'], 0)
g['saxs_k'].calc_chi2(q, g['interspace'], g['q'], g['base_Iq'],
g['rind'], g['rxyz'], g['lind'], g['rot_lxyz'], g['origin'],
g['voxelspacing'], g['fifj'], g['targetIq'], g['sq'], g['chi2'])
g['saxs_k'].take_best(q, g['chi2'], g['best_chi2'], g['rot_ind'], n)
if _stdout.isatty():
self._print_progress(n, g['nrot'], time0)
self.queue.finish()
d['best_chi2'] = g['best_chi2'].get()
d['rot_ind'] = g['rot_ind'].get()
def rsurface(points, radius, shape, voxelspacing):
dimensions = [x*voxelspacing for x in shape]
origin = volume_origin(points, dimensions)
rsurf = volume.zeros(shape, voxelspacing, origin)
rsurf = dilate_points(points, radius, rsurf)
return rsurf
def volume_origin(points, dimensions):
center = points.mean(axis=0)
origin = [(c - d/2.0) for c, d in zip(center, dimensions)]
return origin
def grid_restraints(restraints, voxelspacing, origin, lcenter):
nrestraints = len(restraints)
g_restraints = np.zeros((nrestraints, 8), dtype=np.float64)
for n in range(nrestraints):
r_sel, l_sel, mindis, maxdis = restraints[n]
r_pos = (r_sel.center - origin)/voxelspacing
l_pos = (l_sel.center - lcenter)/voxelspacing
g_restraints[n, 0:3] = r_pos
g_restraints[n, 3:6] = l_pos
g_restraints[n, 6] = mindis/voxelspacing
g_restraints[n, 7] = maxdis/voxelspacing
return g_restraints
def grid_shape(points1, points2, voxelspacing):
shape = min_grid_shape(points1, points2, voxelspacing)
shape = [volume.radix235(x) for x in shape]
return shape
def min_grid_shape(points1, points2, voxelspacing):
# the minimal grid shape is the size of the fixed protein in
# each dimension and the longest diameter is the scanning chain
dimensions1 = points1.ptp(axis=0)
dimension2 = longest_distance(points2)
grid_shape = np.asarray(((dimensions1 + dimension2)/voxelspacing) + 10, dtype=np.int32)[::-1]
return grid_shape
def float32array(array_like):
return np.asarray(array_like, dtype=np.float32)
| [] |
zehuilu/Learning-from-Sparse-Demonstrations | lib/generate_random_obs.py | 4d652635c24f847fe51bc050773762b549ce41c0 | #!/usr/bin/env python3
import os
import sys
import time
sys.path.append(os.getcwd()+'/lib')
import random
from dataclasses import dataclass, field
from ObsInfo import ObsInfo
def generate_random_obs(num_obs: int, size_list: list, config_data):
"""
config_file_name = "config.json"
json_file = open(config_file_name)
config_data = json.load(json_file)
size_list = [length, width, height]
"""
ObsList = []
if (num_obs > 0.5):
for i in range(0, num_obs):
# random center
center = [random.uniform(config_data["LAB_SPACE_LIMIT"]["LIMIT_X"][0], config_data["LAB_SPACE_LIMIT"]["LIMIT_X"][1]), \
random.uniform(config_data["LAB_SPACE_LIMIT"]["LIMIT_Y"][0], config_data["LAB_SPACE_LIMIT"]["LIMIT_Y"][1]), \
random.uniform(config_data["LAB_SPACE_LIMIT"]["LIMIT_Z"][0], config_data["LAB_SPACE_LIMIT"]["LIMIT_Z"][1])]
ObsList.append( ObsInfo(center, size_list) )
return ObsList | [((5, 16, 5, 27), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((26, 22, 26, 128), 'random.uniform', 'random.uniform', ({(26, 37, 26, 81): "config_data['LAB_SPACE_LIMIT']['LIMIT_X'][0]", (26, 83, 26, 127): "config_data['LAB_SPACE_LIMIT']['LIMIT_X'][1]"}, {}), "(config_data['LAB_SPACE_LIMIT']['LIMIT_X'][0], config_data[\n 'LAB_SPACE_LIMIT']['LIMIT_X'][1])", False, 'import random\n'), ((27, 16, 27, 122), 'random.uniform', 'random.uniform', ({(27, 31, 27, 75): "config_data['LAB_SPACE_LIMIT']['LIMIT_Y'][0]", (27, 77, 27, 121): "config_data['LAB_SPACE_LIMIT']['LIMIT_Y'][1]"}, {}), "(config_data['LAB_SPACE_LIMIT']['LIMIT_Y'][0], config_data[\n 'LAB_SPACE_LIMIT']['LIMIT_Y'][1])", False, 'import random\n'), ((28, 16, 28, 122), 'random.uniform', 'random.uniform', ({(28, 31, 28, 75): "config_data['LAB_SPACE_LIMIT']['LIMIT_Z'][0]", (28, 77, 28, 121): "config_data['LAB_SPACE_LIMIT']['LIMIT_Z'][1]"}, {}), "(config_data['LAB_SPACE_LIMIT']['LIMIT_Z'][0], config_data[\n 'LAB_SPACE_LIMIT']['LIMIT_Z'][1])", False, 'import random\n'), ((30, 28, 30, 54), 'ObsInfo.ObsInfo', 'ObsInfo', ({(30, 36, 30, 42): 'center', (30, 44, 30, 53): 'size_list'}, {}), '(center, size_list)', False, 'from ObsInfo import ObsInfo\n')] |
Abucuyy/Uciha | userbot/helper_funcs/misc.py | 726e9cd61eabf056064e40f7b322d8993161e52a | # TG-UserBot - A modular Telegram UserBot script for Python.
# Copyright (C) 2019 Kandarp <https://github.com/kandnub>
#
# TG-UserBot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TG-UserBot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TG-UserBot. If not, see <https://www.gnu.org/licenses/>.
from typing import Tuple, Union
from telethon.tl import types
from ..utils.client import UserBotClient
from ..utils.helpers import get_chat_link
ChatBannedRights = {
'until_date': 'Banned until:',
'view_messages': 'Read messages:',
'send_messages': 'Send messages:',
'send_media': 'Send media:',
'send_stickers': 'Send stickers:',
'send_gifs': 'Send GIFs:',
'send_games': 'Send games:',
'send_inline': 'Send inline messages:',
'embed_links': 'Send embed links:',
'send_polls': 'Send polls:',
'change_info': 'Change info:',
'invite_users': 'Add users:',
'pin_messages': 'Pin messages:'
}
ChatAdminRights = {
'change_info': 'Change chat info:',
'post_messages': 'Post messages:',
'edit_messages': 'Edit messages:',
'delete_messages': 'Delete messages:',
'ban_users': 'Ban users:',
'invite_users': 'Invite users:',
'pin_messages': 'Pin messages:',
'add_admins': 'Add new admins:'
}
async def parse_admin_rights(AdminRights: types.ChatAdminRights) -> str:
text = []
for attr, string in ChatAdminRights.items():
right = getattr(AdminRights, attr, False)
if right:
text.append(f'{string} {right}')
return '\n'.join(text)
async def parse_banned_rights(BannedRights: types.ChatBannedRights) -> str:
text = []
for attr, string in ChatBannedRights.items():
right = getattr(BannedRights, attr, False)
if right:
if attr == "until_date":
text.append(f'{string} {right.ctime()} (UTC)')
else:
text.append(f'{string} {right}')
return '\n'.join(text)
async def get_entity_info(
arg: Union[types.ChatFull, types.ChannelFull]
) -> Tuple[int, int, int, int, int, int]:
creator, admins, bots, participants, kicked, banned = (None, None, None,
None, None, None)
full_chat = arg.full_chat
if isinstance(full_chat, types.ChannelFull):
if hasattr(full_chat, 'participants_count'):
participants = full_chat.participants_count
if hasattr(full_chat, 'admins_count'):
admins = full_chat.admins_count
if hasattr(full_chat, 'kicked_count'):
kicked = full_chat.kicked_count
if hasattr(full_chat, 'banned_count'):
banned = full_chat.banned_count
if hasattr(full_chat, 'bot_info'):
bots = len(full_chat.bot_info)
else:
if hasattr(full_chat, 'bot_info'):
bots = len(full_chat.bot_info)
if hasattr(full_chat, 'participants'):
admins, participants = 0, 0
for p in full_chat.participants.participants:
if isinstance(p, types.ChatParticipantCreator):
creator = p.user_id
if isinstance(p, types.ChatParticipant):
participants += 1
if isinstance(p, types.ChatParticipantAdmin):
admins += 1
return creator, admins, bots, participants, kicked, banned
async def unparse_info(client: UserBotClient, creator: int, admins: int,
bots: int, users: int, kicked: int, banned: int) -> str:
text = ''
if creator:
c = await client.get_entity(creator)
text += f"\n**Creator:** {await get_chat_link(c)}"
if users:
text += f"\n**Participants:** {users}"
if admins:
text += f"\n**Admins:** {admins}"
if bots:
text += f"\n**Bots:** {bots}"
if kicked:
text += f"\n**Kicked:** {kicked}"
if banned:
text += f"\n**Banned:** {banned}"
return text
async def unparse_rights(title: str, rights: str) -> str:
text = f"**{title}**"
for l in rights.split('\n'):
splat = l.split(':')
text += f"\n **{splat[0]}:** `{':'.join(splat[1:])}`"
return text
async def resolve_channel(client: UserBotClient,
channel: types.ChannelFull) -> str:
text = ''
default_banned_rights = None
banned_rights = None
admin_rights = None
channel_type = "Channel"
for c in channel.chats:
if c.id == channel.full_chat.id:
if c.megagroup:
channel_type = "Megagroup"
admin_rights = c.admin_rights
banned_rights = c.banned_rights
default_banned_rights = c.default_banned_rights
break
text += f"\n**{channel_type} ID:** `{channel.full_chat.id}`"
info = await get_entity_info(channel)
text += await unparse_info(client, *info)
if admin_rights:
parsed = await parse_admin_rights(admin_rights)
unparsed = await unparse_rights("Admin rights:", parsed)
text += f"\n{unparsed}"
if banned_rights:
parsed = await parse_banned_rights(banned_rights)
unparsed = await unparse_rights("Banned rights:", parsed)
text += f"\n{unparsed}"
if default_banned_rights:
parsed = await parse_banned_rights(default_banned_rights)
unparsed = await unparse_rights("Default banned rights:", parsed)
text += f"\n{unparsed}"
return text
async def resolve_chat(client: UserBotClient, chat: types.ChatFull) -> str:
text = f"\n**Chat ID:** `{chat.full_chat.id}``"
info = await get_entity_info(chat)
text += await unparse_info(client, *info)
admin_rights = None
default_banned_rights = None
for c in chat.chats:
if c.id == chat.full_chat.id:
admin_rights = c.admin_rights
default_banned_rights = c.default_banned_rights
break
if admin_rights:
parsed = await parse_admin_rights(admin_rights)
unparsed = await unparse_rights("Admin rights:", parsed)
text += f"\n{unparsed}"
if default_banned_rights:
parsed = await parse_banned_rights(default_banned_rights)
unparsed = await unparse_rights("Default banned rights:", parsed)
text += f"\n{unparsed}"
return text
| [] |
HarryTheBird/gym-multilayerthinfilm | gym-multilayerthinfilm/utils.py | 22eda96e71e95e9ea1b491fae633c4a32fadb023 | import numpy as np
def get_n_from_txt(filepath, points=None, lambda_min=400, lambda_max=700, complex_n=True):
ntxt = np.loadtxt(filepath)
if np.min(np.abs(ntxt[:, 0] - lambda_min)) > 25 or np.min(np.abs(ntxt[:, 0] - lambda_max)) > 25:
print('No measurement data for refractive indicies are available within 25 nm in \n' + filepath)
if points is None:
points = lambda_max - lambda_min + 1
idxmin = np.argmin(np.abs(ntxt[:, 0] - lambda_min))
idxmax = np.argmin(np.abs(ntxt[:, 0] - lambda_max))
if idxmax == idxmin:
if complex_n:
indicies = np.vectorize(complex)(np.array([ntxt[idxmin, 1]]), np.array([ntxt[idxmin, 2]]))
else:
indicies = np.array([ntxt[idxmin, 1]])
else:
xp = ntxt[idxmin:idxmax, 0]
fpn = ntxt[idxmin:idxmax, 1]
n = np.interp(np.linspace(lambda_min, lambda_max, points), xp, fpn)
if complex_n:
fpk = ntxt[idxmin:idxmax, 2].squeeze()
k = np.interp(np.linspace(lambda_min, lambda_max, points), xp, fpk)
indicies = np.vectorize(complex)(n, k)
else:
indicies = n
return indicies
def get_N(path_list, lambda_min, lambda_max, points=None, complex_n=False):
n = []
for path in path_list:
n.append(get_n_from_txt(path, points, lambda_min=lambda_min, lambda_max=lambda_max, complex_n=complex_n))
return np.vstack((n))
| [((4, 11, 4, 31), 'numpy.loadtxt', 'np.loadtxt', ({(4, 22, 4, 30): 'filepath'}, {}), '(filepath)', True, 'import numpy as np\n'), ((32, 11, 32, 25), 'numpy.vstack', 'np.vstack', ({(32, 22, 32, 23): 'n'}, {}), '(n)', True, 'import numpy as np\n'), ((9, 23, 9, 54), 'numpy.abs', 'np.abs', ({(9, 30, 9, 53): 'ntxt[:, (0)] - lambda_min'}, {}), '(ntxt[:, (0)] - lambda_min)', True, 'import numpy as np\n'), ((10, 23, 10, 54), 'numpy.abs', 'np.abs', ({(10, 30, 10, 53): 'ntxt[:, (0)] - lambda_max'}, {}), '(ntxt[:, (0)] - lambda_max)', True, 'import numpy as np\n'), ((15, 23, 15, 50), 'numpy.array', 'np.array', ({(15, 32, 15, 49): '[ntxt[idxmin, 1]]'}, {}), '([ntxt[idxmin, 1]])', True, 'import numpy as np\n'), ((19, 22, 19, 65), 'numpy.linspace', 'np.linspace', ({(19, 34, 19, 44): 'lambda_min', (19, 46, 19, 56): 'lambda_max', (19, 58, 19, 64): 'points'}, {}), '(lambda_min, lambda_max, points)', True, 'import numpy as np\n'), ((5, 14, 5, 45), 'numpy.abs', 'np.abs', ({(5, 21, 5, 44): '(ntxt[:, (0)] - lambda_min)'}, {}), '(ntxt[:, (0)] - lambda_min)', True, 'import numpy as np\n'), ((5, 62, 5, 93), 'numpy.abs', 'np.abs', ({(5, 69, 5, 92): '(ntxt[:, (0)] - lambda_max)'}, {}), '(ntxt[:, (0)] - lambda_max)', True, 'import numpy as np\n'), ((13, 23, 13, 44), 'numpy.vectorize', 'np.vectorize', ({(13, 36, 13, 43): 'complex'}, {}), '(complex)', True, 'import numpy as np\n'), ((13, 45, 13, 72), 'numpy.array', 'np.array', ({(13, 54, 13, 71): '[ntxt[idxmin, 1]]'}, {}), '([ntxt[idxmin, 1]])', True, 'import numpy as np\n'), ((13, 74, 13, 101), 'numpy.array', 'np.array', ({(13, 83, 13, 100): '[ntxt[idxmin, 2]]'}, {}), '([ntxt[idxmin, 2]])', True, 'import numpy as np\n'), ((22, 26, 22, 69), 'numpy.linspace', 'np.linspace', ({(22, 38, 22, 48): 'lambda_min', (22, 50, 22, 60): 'lambda_max', (22, 62, 22, 68): 'points'}, {}), '(lambda_min, lambda_max, points)', True, 'import numpy as np\n'), ((23, 23, 23, 44), 'numpy.vectorize', 'np.vectorize', ({(23, 36, 23, 43): 'complex'}, {}), '(complex)', True, 'import numpy as np\n')] |
joaopalmeiro/pyrocco | pyrocco/__init__.py | 4144f56d654500c3ec49cb04c06b98296004eafe | __package_name__ = "pyrocco"
__version__ = "0.1.0"
__author__ = "João Palmeiro"
__author_email__ = "[email protected]"
__description__ = "A Python CLI to add the Party Parrot to a custom background image."
__url__ = "https://github.com/joaopalmeiro/pyrocco"
| [] |
ingjrs01/adventofcode | 2020/day08/machine.py | c5e4f0158dac0efc2dbfc10167f2700693b41fea | class Machine():
def __init__(self):
self.pointer = 0
self.accum = 0
self.visited = []
def run(self,program):
salir = False
while (salir == False):
if (self.pointer in self.visited):
return False
if (self.pointer >= len(program)):
return True
self.visited.append(self.pointer)
incremento = 1
if (program[self.pointer][0] == "acc"):
self.accum += program[self.pointer][1]
if (program[self.pointer][0] == "jmp"):
incremento = program[self.pointer][1]
self.pointer += incremento
return True
def getVisited(self):
return self.visited
def getAccum(self):
return self.accum
| [] |
BAOOOOOM/EduData | EduData/Task/__init__.py | affa465779cb94db00ed19291f8411229d342c0f | # coding: utf-8
# 2019/8/23 @ tongshiwei
| [] |
richardvecsey/python-basics | 010-round.py | b66abef77bce2ddd6f2f39b631e1dd97a9aa2fac | """
Round a number
--------------
Input (float) A floating point number
(int) Number of decimals
Default value is: 0
Output (float) Rounded number
(int) Whether using the default decimals value, the return number
will be the nearest integer
"""
number = 103.14159
# Rounding with 2 decimals
number_rounded = round(number, 2)
print('Rounding with 2 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
# Rounding with -2 decimals
number_rounded = round(number, -2)
print('\nRounding with -2 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
# Rounding with 0 decimals
number_rounded = round(number, 0)
print('\nRounding with 0 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
# Rounding with default
# Result will be integer (!)
number_rounded = round(number)
print('\nRounding with default')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
| [] |
Kleist/MusicPlayer | service.py | 95f634d1e4d47e7b430e32ad9224d94ad0453c82 | #!/usr/bin/env python3
import RPi.GPIO as GPIO
from mfrc522 import SimpleMFRC522
import play
import time
class TagPlayer(object):
def __init__(self):
self._current = None
self.reader = SimpleMFRC522()
self._failed = 0
def step(self):
id, text = self.reader.read_no_block()
print(id,text)
if id:
self._failed = 0
if text != self._current:
stripped_text = text.strip()
print("Read text: \"{}\"".format(stripped_text))
play.play(stripped_text)
self._current = text
elif self._current:
self._failed += 1
if self._failed > 2:
self._current = None
print("Stopping")
play.stop()
time.sleep(1)
def main():
try:
player = TagPlayer()
while 1:
player.step()
finally:
GPIO.cleanup()
if __name__ == "__main__":
main()
| [((13, 22, 13, 37), 'mfrc522.SimpleMFRC522', 'SimpleMFRC522', ({}, {}), '()', False, 'from mfrc522 import SimpleMFRC522\n'), ((32, 8, 32, 21), 'time.sleep', 'time.sleep', ({(32, 19, 32, 20): '(1)'}, {}), '(1)', False, 'import time\n'), ((40, 8, 40, 22), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ({}, {}), '()', True, 'import RPi.GPIO as GPIO\n'), ((24, 16, 24, 40), 'play.play', 'play.play', ({(24, 26, 24, 39): 'stripped_text'}, {}), '(stripped_text)', False, 'import play\n'), ((31, 16, 31, 27), 'play.stop', 'play.stop', ({}, {}), '()', False, 'import play\n')] |
ckanesan/mypy | mypy/defaults.py | ffb3ce925e8bb3376e19f942c7d3a3806c9bba97 | import os
MYPY = False
if MYPY:
from typing_extensions import Final
PYTHON2_VERSION = (2, 7) # type: Final
PYTHON3_VERSION = (3, 6) # type: Final
PYTHON3_VERSION_MIN = (3, 4) # type: Final
CACHE_DIR = '.mypy_cache' # type: Final
CONFIG_FILE = 'mypy.ini' # type: Final
SHARED_CONFIG_FILES = ['setup.cfg', ] # type: Final
USER_CONFIG_FILES = ['~/.config/mypy/config', '~/.mypy.ini', ] # type: Final
if os.environ.get('XDG_CONFIG_HOME'):
USER_CONFIG_FILES.insert(0, os.path.join(os.environ['XDG_CONFIG_HOME'], 'mypy/config'))
CONFIG_FILES = [CONFIG_FILE, ] + SHARED_CONFIG_FILES + USER_CONFIG_FILES # type: Final
# This must include all reporters defined in mypy.report. This is defined here
# to make reporter names available without importing mypy.report -- this speeds
# up startup.
REPORTER_NAMES = ['linecount',
'any-exprs',
'linecoverage',
'memory-xml',
'cobertura-xml',
'xml',
'xslt-html',
'xslt-txt',
'html',
'txt'] # type: Final
| [((14, 3, 14, 36), 'os.environ.get', 'os.environ.get', ({(14, 18, 14, 35): '"""XDG_CONFIG_HOME"""'}, {}), "('XDG_CONFIG_HOME')", False, 'import os\n'), ((15, 32, 15, 90), 'os.path.join', 'os.path.join', ({(15, 45, 15, 74): "os.environ['XDG_CONFIG_HOME']", (15, 76, 15, 89): '"""mypy/config"""'}, {}), "(os.environ['XDG_CONFIG_HOME'], 'mypy/config')", False, 'import os\n')] |
elcolie/scikit-criteria | skcriteria/preprocessing/push_negatives.py | 216674d699b60d68fefa98d44afd619943f3bb00 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# License: BSD-3 (https://tldrlegal.com/license/bsd-3-clause-license-(revised))
# Copyright (c) 2016-2021, Cabral, Juan; Luczywo, Nadia
# All rights reserved.
# =============================================================================
# DOCS
# =============================================================================
"""Functionalities for remove negatives from criteria.
In addition to the main functionality, an MCDA agnostic function is offered
to push negatives values on an array along an arbitrary axis.
"""
# =============================================================================
# IMPORTS
# =============================================================================
import numpy as np
from ..core import SKCMatrixAndWeightTransformerABC
from ..utils import doc_inherit
# =============================================================================
# FUNCTIONS
# =============================================================================
def push_negatives(arr, axis):
r"""Increment the array until all the valuer are sean >= 0.
If an array has negative values this function increment the values
proportionally to made all the array positive along an axis.
.. math::
\overline{X}_{ij} =
\begin{cases}
X_{ij} + min_{X_{ij}} & \text{if } X_{ij} < 0\\
X_{ij} & \text{otherwise}
\end{cases}
Parameters
----------
arr: :py:class:`numpy.ndarray` like.
A array with values
axis : :py:class:`int` optional
Axis along which to operate. By default, flattened input is used.
Returns
-------
:py:class:`numpy.ndarray`
array with all values >= 0.
Examples
--------
.. code-block:: pycon
>>> from skcriteria.preprocess import push_negatives
>>> mtx = [[1, 2], [3, 4]]
>>> mtx_lt0 = [[-1, 2], [3, 4]] # has a negative value
>>> push_negatives(mtx) # array without negatives don't be affected
array([[1, 2],
[3, 4]])
# all the array is incremented by 1 to eliminate the negative
>>> push_negatives(mtx_lt0)
array([[0, 3],
[4, 5]])
# by column only the first one (with the negative value) is affected
>>> push_negatives(mtx_lt0, axis=0)
array([[0, 2],
[4, 4]])
# by row only the first row (with the negative value) is affected
>>> push_negatives(mtx_lt0, axis=1)
array([[0, 3],
[3, 4]])
"""
arr = np.asarray(arr)
mins = np.min(arr, axis=axis, keepdims=True)
delta = (mins < 0) * mins
return arr - delta
class PushNegatives(SKCMatrixAndWeightTransformerABC):
r"""Increment the matrix/weights until all the valuer are sean >= 0.
If the matrix/weights has negative values this function increment the
values proportionally to made all the matrix/weights positive along an
axis.
.. math::
\overline{X}_{ij} =
\begin{cases}
X_{ij} + min_{X_{ij}} & \text{if } X_{ij} < 0\\
X_{ij} & \text{otherwise}
\end{cases}
"""
@doc_inherit(SKCMatrixAndWeightTransformerABC._transform_weights)
def _transform_weights(self, weights):
return push_negatives(weights, axis=None)
@doc_inherit(SKCMatrixAndWeightTransformerABC._transform_matrix)
def _transform_matrix(self, matrix):
return push_negatives(matrix, axis=0)
| [((85, 10, 85, 25), 'numpy.asarray', 'np.asarray', ({(85, 21, 85, 24): 'arr'}, {}), '(arr)', True, 'import numpy as np\n'), ((86, 11, 86, 48), 'numpy.min', 'np.min', (), '', True, 'import numpy as np\n')] |
Ingenico/direct-sdk-python3 | ingenico/direct/sdk/domain/customer_token.py | d2b30b8e8afb307153a1f19ac4c054d5344449ce | # -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://support.direct.ingenico.com/documentation/api/reference/
#
from ingenico.direct.sdk.data_object import DataObject
from ingenico.direct.sdk.domain.address import Address
from ingenico.direct.sdk.domain.company_information import CompanyInformation
from ingenico.direct.sdk.domain.personal_information_token import PersonalInformationToken
class CustomerToken(DataObject):
__billing_address = None
__company_information = None
__personal_information = None
@property
def billing_address(self) -> Address:
"""
| Object containing billing address details
Type: :class:`ingenico.direct.sdk.domain.address.Address`
"""
return self.__billing_address
@billing_address.setter
def billing_address(self, value: Address):
self.__billing_address = value
@property
def company_information(self) -> CompanyInformation:
"""
| Object containing company information
Type: :class:`ingenico.direct.sdk.domain.company_information.CompanyInformation`
"""
return self.__company_information
@company_information.setter
def company_information(self, value: CompanyInformation):
self.__company_information = value
@property
def personal_information(self) -> PersonalInformationToken:
"""
Type: :class:`ingenico.direct.sdk.domain.personal_information_token.PersonalInformationToken`
"""
return self.__personal_information
@personal_information.setter
def personal_information(self, value: PersonalInformationToken):
self.__personal_information = value
def to_dictionary(self):
dictionary = super(CustomerToken, self).to_dictionary()
if self.billing_address is not None:
dictionary['billingAddress'] = self.billing_address.to_dictionary()
if self.company_information is not None:
dictionary['companyInformation'] = self.company_information.to_dictionary()
if self.personal_information is not None:
dictionary['personalInformation'] = self.personal_information.to_dictionary()
return dictionary
def from_dictionary(self, dictionary):
super(CustomerToken, self).from_dictionary(dictionary)
if 'billingAddress' in dictionary:
if not isinstance(dictionary['billingAddress'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['billingAddress']))
value = Address()
self.billing_address = value.from_dictionary(dictionary['billingAddress'])
if 'companyInformation' in dictionary:
if not isinstance(dictionary['companyInformation'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['companyInformation']))
value = CompanyInformation()
self.company_information = value.from_dictionary(dictionary['companyInformation'])
if 'personalInformation' in dictionary:
if not isinstance(dictionary['personalInformation'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['personalInformation']))
value = PersonalInformationToken()
self.personal_information = value.from_dictionary(dictionary['personalInformation'])
return self
| [((70, 20, 70, 29), 'ingenico.direct.sdk.domain.address.Address', 'Address', ({}, {}), '()', False, 'from ingenico.direct.sdk.domain.address import Address\n'), ((75, 20, 75, 40), 'ingenico.direct.sdk.domain.company_information.CompanyInformation', 'CompanyInformation', ({}, {}), '()', False, 'from ingenico.direct.sdk.domain.company_information import CompanyInformation\n'), ((80, 20, 80, 46), 'ingenico.direct.sdk.domain.personal_information_token.PersonalInformationToken', 'PersonalInformationToken', ({}, {}), '()', False, 'from ingenico.direct.sdk.domain.personal_information_token import PersonalInformationToken\n')] |
pirate/macOS-global-autocomplete | inserter.py | 4ba8c3efdd34e7b4c0044c50f47d21a1bafd9aac | import time
import pykeyboard
# TODO: Replace following two lines with the code that activate the application.
print('Activate the application 3 seconds.')
time.sleep(3)
k = pykeyboard.PyKeyboard()
k.press_key(k.left_key)
time.sleep(1) # Hold down left key for 1 second.
k.release_key(k.left_key)
| [((8, 0, 8, 13), 'time.sleep', 'time.sleep', ({(8, 11, 8, 12): '(3)'}, {}), '(3)', False, 'import time\n'), ((10, 4, 10, 27), 'pykeyboard.PyKeyboard', 'pykeyboard.PyKeyboard', ({}, {}), '()', False, 'import pykeyboard\n'), ((12, 0, 12, 13), 'time.sleep', 'time.sleep', ({(12, 11, 12, 12): '(1)'}, {}), '(1)', False, 'import time\n')] |
EleutherAI/megatron-3d | tools/corpora.py | be3014d47a127f08871d0ba6d6389363f2484397 | import os
import tarfile
from abc import ABC, abstractmethod
from glob import glob
import shutil
import random
import zstandard
"""
This registry is for automatically downloading and extracting datasets.
To register a class you need to inherit the DataDownloader class, provide name, filetype and url attributes, and
(optionally) provide download / extract / exists / tokenize functions to check if the data exists, and, if it doesn't, download,
extract and tokenize the data into the correct directory.
When done, add it to the DATA_DOWNLOADERS dict. The function process_data runs the pre-processing for the selected
dataset.
"""
DATA_DIR = os.environ.get('DATA_DIR', './data')
GPT2_VOCAB_FP = f"{DATA_DIR}/gpt2-vocab.json"
GPT2_VOCAB_URL = "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-vocab.json"
GPT2_MERGE_FP = f"{DATA_DIR}/gpt2-merges.txt"
GPT2_MERGE_URL = "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-merges.txt"
class DataDownloader(ABC):
"""Dataset registry class to automatically download / extract datasets"""
@property
def base_dir(self):
"""base data directory"""
return DATA_DIR
@property
@abstractmethod
def name(self):
"""name of dataset"""
pass
@property
@abstractmethod
def filetype(self):
"""filetype of dataset"""
pass
@property
@abstractmethod
def url(self):
"""URL from which to download dataset"""
pass
def _extract_tar(self):
self.path = os.path.join(self.base_dir, self.name)
os.makedirs(self.path, exist_ok=True)
tarfile_path = os.path.join(self.base_dir, os.path.basename(self.url))
with tarfile.open(tarfile_path, "r:gz") as dataset_tar:
print(f'Extracting files from {tarfile_path}...')
dataset_tar.extractall(self.path)
def _extract_zstd(self, remove_zstd=True):
self.path = os.path.join(self.base_dir, self.name)
os.makedirs(self.path, exist_ok=True)
zstd_file_path = os.path.join(self.base_dir, os.path.basename(self.url))
with open(zstd_file_path, 'rb') as compressed:
decomp = zstandard.ZstdDecompressor()
output_path = zstd_file_path.replace(".zst", "")
with open(output_path, 'wb') as destination:
decomp.copy_stream(compressed, destination)
if remove_zstd:
os.remove(zstd_file_path)
return output_path
def extract(self):
"""extracts dataset and moves to the correct data dir if necessary"""
self._extract_tar()
def exists(self):
"""Checks if the dataset is present"""
return os.path.isdir(f"{self.base_dir}/{self.name}")
def download(self):
"""downloads dataset"""
os.makedirs(self.base_dir, exist_ok=True)
os.system(f"wget {self.url} -O {os.path.join(self.base_dir, os.path.basename(self.url))}")
def tokenize(self):
parent_folder = os.path.join(self.base_dir, self.name)
jsonl_filepath = os.path.join(parent_folder, os.path.basename(self.url)).replace(".zst", "")
assert jsonl_filepath.endswith(".jsonl")
os.system(f"python tools/preprocess_data.py \
--input {jsonl_filepath} \
--output-prefix {parent_folder}/{self.name} \
--vocab {GPT2_VOCAB_FP} \
--dataset-impl mmap \
--tokenizer-type GPT2BPETokenizer \
--merge-file {GPT2_MERGE_FP} \
--append-eod")
def prepare(self):
if not self.exists():
self.download()
self.extract()
self.tokenize()
class Enron(DataDownloader):
name = "enron"
filetype = "jsonl.zst"
url = "http://eaidata.bmk.sh/data/enron_emails.jsonl.zst"
seed = 1
def exists(self):
self.path = os.path.join(self.base_dir, self.name)
return os.path.isfile(os.path.join(self.path, os.path.basename(self.url).replace(".zst", "")))
def extract(self, remove_zstd=True):
self._extract_zstd(remove_zstd=remove_zstd)
shutil.move(os.path.join(self.base_dir, os.path.basename(self.url).replace(".zst", "")), os.path.join(self.base_dir, self.name))
def maybe_download_gpt2_tokenizer_data():
if not os.path.isfile(GPT2_VOCAB_FP):
os.system(f'wget {GPT2_VOCAB_URL} -O {GPT2_VOCAB_FP}')
if not os.path.isfile(GPT2_MERGE_FP):
os.system(f'wget {GPT2_MERGE_URL} -O {GPT2_MERGE_FP}')
DATA_DOWNLOADERS = {
"enron": Enron
}
def prepare_dataset(dataset_name):
os.makedirs(DATA_DIR, exist_ok=True)
maybe_download_gpt2_tokenizer_data()
DownloaderClass = DATA_DOWNLOADERS.get(dataset_name, None)
if DownloaderClass is None:
raise NotImplementedError
else:
d = DownloaderClass()
d.prepare()
| [((18, 11, 18, 47), 'os.environ.get', 'os.environ.get', ({(18, 26, 18, 36): '"""DATA_DIR"""', (18, 38, 18, 46): '"""./data"""'}, {}), "('DATA_DIR', './data')", False, 'import os\n'), ((129, 4, 129, 40), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((52, 20, 52, 58), 'os.path.join', 'os.path.join', ({(52, 33, 52, 46): 'self.base_dir', (52, 48, 52, 57): 'self.name'}, {}), '(self.base_dir, self.name)', False, 'import os\n'), ((53, 8, 53, 45), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((60, 20, 60, 58), 'os.path.join', 'os.path.join', ({(60, 33, 60, 46): 'self.base_dir', (60, 48, 60, 57): 'self.name'}, {}), '(self.base_dir, self.name)', False, 'import os\n'), ((61, 8, 61, 45), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((78, 15, 78, 60), 'os.path.isdir', 'os.path.isdir', ({(78, 29, 78, 59): 'f"""{self.base_dir}/{self.name}"""'}, {}), "(f'{self.base_dir}/{self.name}')", False, 'import os\n'), ((82, 8, 82, 49), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((86, 24, 86, 62), 'os.path.join', 'os.path.join', ({(86, 37, 86, 50): 'self.base_dir', (86, 52, 86, 61): 'self.name'}, {}), '(self.base_dir, self.name)', False, 'import os\n'), ((89, 8, 96, 26), 'os.system', 'os.system', ({(89, 18, 96, 25): 'f"""python tools/preprocess_data.py --input {jsonl_filepath} --output-prefix {parent_folder}/{self.name} --vocab {GPT2_VOCAB_FP} --dataset-impl mmap --tokenizer-type GPT2BPETokenizer --merge-file {GPT2_MERGE_FP} --append-eod"""'}, {}), "(\n f'python tools/preprocess_data.py --input {jsonl_filepath} --output-prefix {parent_folder}/{self.name} --vocab {GPT2_VOCAB_FP} --dataset-impl mmap --tokenizer-type GPT2BPETokenizer --merge-file {GPT2_MERGE_FP} --append-eod'\n )", False, 'import os\n'), ((111, 20, 111, 58), 'os.path.join', 'os.path.join', ({(111, 33, 111, 46): 'self.base_dir', (111, 48, 111, 57): 'self.name'}, {}), '(self.base_dir, self.name)', False, 'import os\n'), ((119, 11, 119, 40), 'os.path.isfile', 'os.path.isfile', ({(119, 26, 119, 39): 'GPT2_VOCAB_FP'}, {}), '(GPT2_VOCAB_FP)', False, 'import os\n'), ((120, 8, 120, 62), 'os.system', 'os.system', ({(120, 18, 120, 61): 'f"""wget {GPT2_VOCAB_URL} -O {GPT2_VOCAB_FP}"""'}, {}), "(f'wget {GPT2_VOCAB_URL} -O {GPT2_VOCAB_FP}')", False, 'import os\n'), ((121, 11, 121, 40), 'os.path.isfile', 'os.path.isfile', ({(121, 26, 121, 39): 'GPT2_MERGE_FP'}, {}), '(GPT2_MERGE_FP)', False, 'import os\n'), ((122, 8, 122, 62), 'os.system', 'os.system', ({(122, 18, 122, 61): 'f"""wget {GPT2_MERGE_URL} -O {GPT2_MERGE_FP}"""'}, {}), "(f'wget {GPT2_MERGE_URL} -O {GPT2_MERGE_FP}')", False, 'import os\n'), ((54, 51, 54, 77), 'os.path.basename', 'os.path.basename', ({(54, 68, 54, 76): 'self.url'}, {}), '(self.url)', False, 'import os\n'), ((55, 13, 55, 47), 'tarfile.open', 'tarfile.open', ({(55, 26, 55, 38): 'tarfile_path', (55, 40, 55, 46): '"""r:gz"""'}, {}), "(tarfile_path, 'r:gz')", False, 'import tarfile\n'), ((62, 53, 62, 79), 'os.path.basename', 'os.path.basename', ({(62, 70, 62, 78): 'self.url'}, {}), '(self.url)', False, 'import os\n'), ((64, 21, 64, 49), 'zstandard.ZstdDecompressor', 'zstandard.ZstdDecompressor', ({}, {}), '()', False, 'import zstandard\n'), ((69, 12, 69, 37), 'os.remove', 'os.remove', ({(69, 22, 69, 36): 'zstd_file_path'}, {}), '(zstd_file_path)', False, 'import os\n'), ((116, 97, 116, 135), 'os.path.join', 'os.path.join', ({(116, 110, 116, 123): 'self.base_dir', (116, 125, 116, 134): 'self.name'}, {}), '(self.base_dir, self.name)', False, 'import os\n'), ((87, 53, 87, 79), 'os.path.basename', 'os.path.basename', ({(87, 70, 87, 78): 'self.url'}, {}), '(self.url)', False, 'import os\n'), ((83, 68, 83, 94), 'os.path.basename', 'os.path.basename', ({(83, 85, 83, 93): 'self.url'}, {}), '(self.url)', False, 'import os\n'), ((112, 54, 112, 80), 'os.path.basename', 'os.path.basename', ({(112, 71, 112, 79): 'self.url'}, {}), '(self.url)', False, 'import os\n'), ((116, 48, 116, 74), 'os.path.basename', 'os.path.basename', ({(116, 65, 116, 73): 'self.url'}, {}), '(self.url)', False, 'import os\n')] |
aka256/othello-rl | othello_rl/qlearning/qlearning.py | ef5e78c6cf6b276e16b50086b53138ab968d728c | from logging import getLogger
logger = getLogger(__name__)
class QLearning:
"""
Q-Learning用のクラス
Attributes
----------
alpha : float
学習率α
gamma : float
割引率γ
data : dict
Q-Learningでの学習結果の保存用辞書
init_value : float
dataの初期値
"""
def __init__(self, alpha: float, gamma: float, data: dict = {}, init_value: float = 0) -> None:
self.alpha = alpha
self.gamma = gamma
self.data = data
self.init_value = init_value
def get(self, s: int, a: int) -> float:
"""
dataから値の取得
Parameters
----------
s : int
状態
a : int
行動
Returns
-------
value : float
Q値, Q(s, a)
"""
return self.data.get((s, a), self.init_value)
def __set(self, s: int, a: int, value: float) -> None:
"""
dataへの値の代入
Parameters
----------
s : int
状態
a : int
行動
value : float
代入するQ値, Q(s, a)
"""
self.data[(s, a)] = value
def update(self, s: int, a: int, r: float, q: float, *q_old: float) -> float:
"""
Q値の更新
Parameters
----------
s : int
状態
a : int
行動
r : float
報酬
q : float
Q(s_t+1, a)
q_old : float
Q(s, a)
Returns
------
q_new : float
updateされたQ値
"""
if len(q_old) == 0:
q_old = self.get(s, a)
else:
q_old = q_old[0]
#print('alpha:{}, q_old:{}, r:{}, gamma:{}, q:{}'.format(self.alpha, q_old, r, self.gamma, q))
q_new = (1-self.alpha)*q_old+self.alpha*(r + self.gamma*q)
self.__set(s, a, q_new)
return q_new
| [((3, 9, 3, 28), 'logging.getLogger', 'getLogger', ({(3, 19, 3, 27): '__name__'}, {}), '(__name__)', False, 'from logging import getLogger\n')] |
loftwah/appscale | SearchService/test/unit/test_solr_interface.py | 586fc1347ebc743d7a632de698f4dbfb09ae38d6 | #!/usr/bin/env python
import os
import json
import sys
import unittest
import urllib2
from flexmock import flexmock
sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))
import solr_interface
import search_exceptions
class FakeSolrDoc():
def __init__(self):
self.fields = []
class FakeDocument():
INDEX_NAME = "indexname"
INDEX_LOCALE = "indexlocale"
def __init__(self):
self.fields = []
self.id = "id"
self.language = "lang"
class FakeSchema():
def __init__(self):
self.fields = []
class FakeIndex():
def __init__(self):
self.name = "name"
self.schema = FakeSchema()
class FakeIndexSpec():
def __init__(self):
pass
def namespace(self):
return 'ns'
def name(self):
return self.name
class FakeUpdate():
def __init__(self, name, field_type):
self.name = name
self.field_type = field_type
class FakeConnection():
def __init__(self, is_good_code):
self.code = 200
if not is_good_code:
self.code = 500
def getcode(self):
return self.code
class TestSolrInterface(unittest.TestCase):
"""
A set of test cases for the solr interface module.
"""
def test_get_index_adapter(self):
appscale_info = flexmock()
appscale_info.should_receive("get_search_location").\
and_return("somelocation")
solr = solr_interface.Solr()
solr = flexmock(solr)
flexmock(solr_interface)
solr_interface.should_receive("get_index_name").and_return("index_ns_name")
flexmock(urllib2)
urllib2.should_receive("urlopen").and_return(FakeConnection(False))
self.assertRaises(search_exceptions.InternalError,
solr._get_index_adapter, "app_id", "ns", "name")
# Test the case of ValueError on a json.load.
urllib2.should_receive("urlopen").and_return(FakeConnection(True))
flexmock(json)
json.should_receive("load").and_raise(ValueError)
self.assertRaises(search_exceptions.InternalError,
solr._get_index_adapter, "app_id", "ns", "name")
# Test a bad status from SOLR.
dictionary = {'responseHeader':{'status': 1}}
json.should_receive("load").and_return(dictionary)
self.assertRaises(search_exceptions.InternalError,
solr._get_index_adapter, "app_id", "ns", "name")
fields = [{'name':"index_ns_name_"}]
dictionary = {'responseHeader':{'status': 0}, "fields": fields}
json.should_receive("load").and_return(dictionary)
index = solr._get_index_adapter("app_id", "ns", "name")
self.assertEquals(index.schema[0]['name'], "index_ns_name_")
def test_update_schema(self):
appscale_info = flexmock()
appscale_info.should_receive("get_search_location").\
and_return("somelocation")
solr = solr_interface.Solr()
flexmock(urllib2)
urllib2.should_receive("urlopen").and_return(FakeConnection(False))
updates = []
self.assertRaises(search_exceptions.InternalError,
solr.update_schema, updates)
updates = [{'name': 'name1', 'type':'type1'}]
flexmock(json)
json.should_receive("load").and_raise(ValueError)
urllib2.should_receive("urlopen").and_return(FakeConnection(True))
self.assertRaises(search_exceptions.InternalError,
solr.update_schema, updates)
dictionary = {"responseHeader":{"status":1}}
json.should_receive("load").and_return(dictionary)
self.assertRaises(search_exceptions.InternalError,
solr.update_schema, updates)
dictionary = {"responseHeader":{"status":0}}
json.should_receive("load").and_return(dictionary)
solr.update_schema(updates)
def test_to_solr_hash_map(self):
appscale_info = flexmock()
appscale_info.should_receive("get_search_location").\
and_return("somelocation")
solr = solr_interface.Solr()
self.assertNotEqual(solr.to_solr_hash_map(FakeIndex(), FakeDocument()), {})
def test_commit_update(self):
appscale_info = flexmock()
appscale_info.should_receive("get_search_location").\
and_return("somelocation")
solr = solr_interface.Solr()
flexmock(json)
json.should_receive("loads").and_return({})
flexmock(urllib2)
urllib2.should_receive("urlopen").and_return(FakeConnection(False))
self.assertRaises(search_exceptions.InternalError, solr.commit_update, {})
json.should_receive("load").and_raise(ValueError)
urllib2.should_receive("urlopen").and_return(FakeConnection(True))
self.assertRaises(search_exceptions.InternalError, solr.commit_update, {})
dictionary = {'responseHeader':{'status': 1}}
json.should_receive("load").and_return(dictionary).once()
self.assertRaises(search_exceptions.InternalError, solr.commit_update, {})
dictionary = {'responseHeader':{'status': 0}}
json.should_receive("load").and_return(dictionary).once()
solr.commit_update({})
def test_update_document(self):
appscale_info = flexmock()
appscale_info.should_receive("get_search_location").\
and_return("somelocation")
solr = solr_interface.Solr()
solr = flexmock(solr)
solr.should_receive("to_solr_doc").and_return(FakeSolrDoc())
solr.should_receive("_get_index_adapter").and_return(FakeIndex())
solr.should_receive("compute_updates").and_return([])
solr.should_receive("to_solr_hash_map").and_return(None)
solr.should_receive("commit_update").and_return(None)
solr.update_document("app_id", None, FakeIndexSpec())
solr.should_receive("compute_updates").and_return([1,2])
solr.should_receive("update_schema").twice()
solr.update_document("app_id", None, FakeIndexSpec())
solr.should_receive("to_solr_hash_map").and_return(None).once()
solr.update_document("app_id", None, FakeIndexSpec())
def test_json_loads_byteified(self):
json_with_unicode = (
'{"key2": [{"\\u2611": 28, "\\u2616": ["\\u263a"]}, "second", "third"], '
'"key1": "value", '
'"\\u2604": {"\\u2708": "\\u2708"}}'
)
parsed_obj = solr_interface.json_loads_byteified(json_with_unicode)
def walk_and_check_type(obj):
if isinstance(obj, dict):
for key, value in obj.iteritems():
self.assertIsInstance(key, str)
walk_and_check_type(value)
elif isinstance(obj, list):
for value in obj:
walk_and_check_type(value)
else:
self.assertIsInstance(obj, (str, int))
walk_and_check_type(parsed_obj)
self.assertEqual(parsed_obj, {
'key1': 'value',
'key2': [
{'\xe2\x98\x91': 28, '\xe2\x98\x96': ['\xe2\x98\xba']},
'second',
'third'
],
'\xe2\x98\x84': {'\xe2\x9c\x88': '\xe2\x9c\x88'}
})
| [((11, 29, 11, 54), 'os.path.dirname', 'os.path.dirname', ({(11, 45, 11, 53): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((62, 20, 62, 30), 'flexmock.flexmock', 'flexmock', ({}, {}), '()', False, 'from flexmock import flexmock\n'), ((65, 11, 65, 32), 'solr_interface.Solr', 'solr_interface.Solr', ({}, {}), '()', False, 'import solr_interface\n'), ((66, 11, 66, 25), 'flexmock.flexmock', 'flexmock', ({(66, 20, 66, 24): 'solr'}, {}), '(solr)', False, 'from flexmock import flexmock\n'), ((67, 4, 67, 28), 'flexmock.flexmock', 'flexmock', ({(67, 13, 67, 27): 'solr_interface'}, {}), '(solr_interface)', False, 'from flexmock import flexmock\n'), ((69, 4, 69, 21), 'flexmock.flexmock', 'flexmock', ({(69, 13, 69, 20): 'urllib2'}, {}), '(urllib2)', False, 'from flexmock import flexmock\n'), ((76, 4, 76, 18), 'flexmock.flexmock', 'flexmock', ({(76, 13, 76, 17): 'json'}, {}), '(json)', False, 'from flexmock import flexmock\n'), ((94, 20, 94, 30), 'flexmock.flexmock', 'flexmock', ({}, {}), '()', False, 'from flexmock import flexmock\n'), ((97, 11, 97, 32), 'solr_interface.Solr', 'solr_interface.Solr', ({}, {}), '()', False, 'import solr_interface\n'), ((99, 4, 99, 21), 'flexmock.flexmock', 'flexmock', ({(99, 13, 99, 20): 'urllib2'}, {}), '(urllib2)', False, 'from flexmock import flexmock\n'), ((106, 4, 106, 18), 'flexmock.flexmock', 'flexmock', ({(106, 13, 106, 17): 'json'}, {}), '(json)', False, 'from flexmock import flexmock\n'), ((122, 20, 122, 30), 'flexmock.flexmock', 'flexmock', ({}, {}), '()', False, 'from flexmock import flexmock\n'), ((125, 11, 125, 32), 'solr_interface.Solr', 'solr_interface.Solr', ({}, {}), '()', False, 'import solr_interface\n'), ((129, 20, 129, 30), 'flexmock.flexmock', 'flexmock', ({}, {}), '()', False, 'from flexmock import flexmock\n'), ((132, 11, 132, 32), 'solr_interface.Solr', 'solr_interface.Solr', ({}, {}), '()', False, 'import solr_interface\n'), ((134, 4, 134, 18), 'flexmock.flexmock', 'flexmock', ({(134, 13, 134, 17): 'json'}, {}), '(json)', False, 'from flexmock import flexmock\n'), ((137, 4, 137, 21), 'flexmock.flexmock', 'flexmock', ({(137, 13, 137, 20): 'urllib2'}, {}), '(urllib2)', False, 'from flexmock import flexmock\n'), ((154, 20, 154, 30), 'flexmock.flexmock', 'flexmock', ({}, {}), '()', False, 'from flexmock import flexmock\n'), ((157, 11, 157, 32), 'solr_interface.Solr', 'solr_interface.Solr', ({}, {}), '()', False, 'import solr_interface\n'), ((158, 11, 158, 25), 'flexmock.flexmock', 'flexmock', ({(158, 20, 158, 24): 'solr'}, {}), '(solr)', False, 'from flexmock import flexmock\n'), ((179, 17, 179, 71), 'solr_interface.json_loads_byteified', 'solr_interface.json_loads_byteified', ({(179, 53, 179, 70): 'json_with_unicode'}, {}), '(json_with_unicode)', False, 'import solr_interface\n'), ((68, 4, 68, 51), 'solr_interface.should_receive', 'solr_interface.should_receive', ({(68, 34, 68, 50): '"""get_index_name"""'}, {}), "('get_index_name')", False, 'import solr_interface\n'), ((70, 4, 70, 37), 'urllib2.should_receive', 'urllib2.should_receive', ({(70, 27, 70, 36): '"""urlopen"""'}, {}), "('urlopen')", False, 'import urllib2\n'), ((75, 4, 75, 37), 'urllib2.should_receive', 'urllib2.should_receive', ({(75, 27, 75, 36): '"""urlopen"""'}, {}), "('urlopen')", False, 'import urllib2\n'), ((77, 4, 77, 31), 'json.should_receive', 'json.should_receive', ({(77, 24, 77, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((83, 4, 83, 31), 'json.should_receive', 'json.should_receive', ({(83, 24, 83, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((89, 4, 89, 31), 'json.should_receive', 'json.should_receive', ({(89, 24, 89, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((100, 4, 100, 37), 'urllib2.should_receive', 'urllib2.should_receive', ({(100, 27, 100, 36): '"""urlopen"""'}, {}), "('urlopen')", False, 'import urllib2\n'), ((107, 4, 107, 31), 'json.should_receive', 'json.should_receive', ({(107, 24, 107, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((108, 4, 108, 37), 'urllib2.should_receive', 'urllib2.should_receive', ({(108, 27, 108, 36): '"""urlopen"""'}, {}), "('urlopen')", False, 'import urllib2\n'), ((113, 4, 113, 31), 'json.should_receive', 'json.should_receive', ({(113, 24, 113, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((118, 4, 118, 31), 'json.should_receive', 'json.should_receive', ({(118, 24, 118, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((135, 4, 135, 32), 'json.should_receive', 'json.should_receive', ({(135, 24, 135, 31): '"""loads"""'}, {}), "('loads')", False, 'import json\n'), ((138, 4, 138, 37), 'urllib2.should_receive', 'urllib2.should_receive', ({(138, 27, 138, 36): '"""urlopen"""'}, {}), "('urlopen')", False, 'import urllib2\n'), ((141, 4, 141, 31), 'json.should_receive', 'json.should_receive', ({(141, 24, 141, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((142, 4, 142, 37), 'urllib2.should_receive', 'urllib2.should_receive', ({(142, 27, 142, 36): '"""urlopen"""'}, {}), "('urlopen')", False, 'import urllib2\n'), ((146, 4, 146, 31), 'json.should_receive', 'json.should_receive', ({(146, 24, 146, 30): '"""load"""'}, {}), "('load')", False, 'import json\n'), ((150, 4, 150, 31), 'json.should_receive', 'json.should_receive', ({(150, 24, 150, 30): '"""load"""'}, {}), "('load')", False, 'import json\n')] |
ppm-avinder/payabbhi-python | payabbhi/error.py | 0f84f01349e365753f4b83eee584618e1a855567 | class PayabbhiError(Exception):
def __init__(self, description=None, http_status=None,
field=None):
self.description = description
self.http_status = http_status
self.field = field
self._message = self.error_message()
super(PayabbhiError, self).__init__(self._message)
def error_message(self):
msg = "message: " + self.description
msg = (msg + ", http_code: " + str(self.http_status)) if self.http_status else msg
msg = (msg + ", field: " + self.field) if self.field else msg
return msg + "\n"
class APIError(PayabbhiError):
pass
class APIConnectionError(PayabbhiError):
pass
class AuthenticationError(PayabbhiError):
pass
class InvalidRequestError(PayabbhiError):
pass
class GatewayError(PayabbhiError):
pass
class SignatureVerificationError(PayabbhiError):
pass
| [] |
TsinghuaAI/CPM-2-Pretrain | src/mpu/__init__.py | 33003865239e7ba13a12aabf9ec2735cef66bf3b | # coding=utf-8
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model parallel utility interface."""
from .cross_entropy import vocab_parallel_cross_entropy
from .data import broadcast_data
from .grads import clip_grad_norm
from .initialize import destroy_model_parallel
from .initialize import get_data_parallel_group
from .initialize import get_data_parallel_rank
from .initialize import get_data_parallel_world_size
from .initialize import get_model_parallel_group
from .initialize import get_model_parallel_rank
from .initialize import get_model_parallel_src_rank
from .initialize import get_model_parallel_world_size
from .initialize import initialize_model_parallel
from .initialize import model_parallel_is_initialized
from .layers import ColumnParallelLinear
from .layers import ParallelEmbedding
from .layers import RowParallelLinear
from .layers import VocabParallelEmbedding
from .mappings import copy_to_model_parallel_region
from .mappings import gather_from_model_parallel_region
from .mappings import reduce_from_model_parallel_region
from .mappings import scatter_to_model_parallel_region
from .random import checkpoint
from .random import partition_activations_in_checkpoint
from .random import get_cuda_rng_tracker
from .random import model_parallel_cuda_manual_seed
from .transformer_enc_dec import ParallelTransformer, LayerNorm
| [] |
MateusMolina/lunoERP | djangosige/apps/cadastro/models/empresa.py | 0880adb93b3a2d3169c6780efa60a229272f927a | # -*- coding: utf-8 -*-
import os
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
from .base import Pessoa
from djangosige.apps.login.models import Usuario
from djangosige.configs.settings import MEDIA_ROOT
def logo_directory_path(instance, filename):
extension = os.path.splitext(filename)[1]
return 'imagens/empresas/logo_{0}_{1}{2}'.format(instance.nome_razao_social, instance.id, extension)
class Empresa(Pessoa):
logo_file = models.ImageField(
upload_to=logo_directory_path, default='imagens/logo.png', blank=True, null=True)
cnae = models.CharField(max_length=10, blank=True, null=True)
iest = models.CharField(max_length=32, null=True, blank=True)
class Meta:
verbose_name = "Empresa"
@property
def caminho_completo_logo(self):
if self.logo_file.name != 'imagens/logo.png':
return os.path.join(MEDIA_ROOT, self.logo_file.name)
else:
return ''
def save(self, *args, **kwargs):
# Deletar logo se ja existir um
try:
obj = Empresa.objects.get(id=self.id)
if obj.logo_file != self.logo_file and obj.logo_file != 'imagens/logo.png':
obj.logo_file.delete(save=False)
except:
pass
super(Empresa, self).save(*args, **kwargs)
def __unicode__(self):
return u'%s' % self.nome_razao_social
def __str__(self):
return u'%s' % self.nome_razao_social
# Deletar logo quando empresa for deletada
@receiver(post_delete, sender=Empresa)
def logo_post_delete_handler(sender, instance, **kwargs):
# Nao deletar a imagem default 'logo.png'
if instance.logo_file != 'imagens/logo.png':
instance.logo_file.delete(False)
class MinhaEmpresa(models.Model):
m_empresa = models.ForeignKey(
Empresa, on_delete=models.CASCADE, related_name='minha_empresa', blank=True, null=True)
m_usuario = models.ForeignKey(
Usuario, on_delete=models.CASCADE, related_name='empresa_usuario')
| [((54, 1, 54, 38), 'django.dispatch.receiver', 'receiver', (), '', False, 'from django.dispatch import receiver\n'), ((20, 16, 21, 89), 'django.db.models.ImageField', 'models.ImageField', (), '', False, 'from django.db import models\n'), ((22, 11, 22, 65), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((23, 11, 23, 65), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((62, 16, 63, 95), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((64, 16, 65, 74), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((15, 16, 15, 42), 'os.path.splitext', 'os.path.splitext', ({(15, 33, 15, 41): 'filename'}, {}), '(filename)', False, 'import os\n'), ((31, 19, 31, 64), 'os.path.join', 'os.path.join', ({(31, 32, 31, 42): 'MEDIA_ROOT', (31, 44, 31, 63): 'self.logo_file.name'}, {}), '(MEDIA_ROOT, self.logo_file.name)', False, 'import os\n')] |
silverriver/Stylized_Dialog | WDJN/eval/eval.py | 559dd97c4ec9c91e94deb048f789684ef3f1f9fa | import os
from nltk.translate.bleu_score import corpus_bleu
from nltk.translate.bleu_score import SmoothingFunction
import json
from tqdm import tqdm, trange
from random import sample
import numpy as np
import pickle
import argparse
import bert_eval_acc
import svm_eval_acc
smooth = SmoothingFunction()
def eval_bleu(ref, pred):
"""
:param ref: list(list(list(any))), a list of reference sentences, each element of the list is a list of references
:param pred: list(list(any)), a list of predictions
:return: corpus bleu score
"""
return corpus_bleu(ref, pred, smoothing_function=smooth.method1)
def eval_bleu_detail(ref, pred):
"""
:param ref: list(list(list(any))), a list of reference sentences, each element of the list is a list of references
:param pred: list(list(any)), a list of predictions
:return: corpus bleu score
"""
return corpus_bleu(ref, pred, weights=[1, 0, 0, 0], smoothing_function=smooth.method1),\
corpus_bleu(ref, pred, weights=[0, 1, 0, 0], smoothing_function=smooth.method1), \
corpus_bleu(ref, pred, weights=[0, 0, 1, 0], smoothing_function=smooth.method1), \
corpus_bleu(ref, pred, weights=[0, 0, 0, 1], smoothing_function=smooth.method1)
def count_ngram(hyps_resp, n):
"""
Count the number of unique n-grams
:param hyps_resp: list, a list of responses
:param n: int, n-gram
:return: the number of unique n-grams in hyps_resp
"""
if len(hyps_resp) == 0:
print("ERROR, eval_distinct get empty input")
return
if type(hyps_resp[0]) != list:
print("ERROR, eval_distinct takes in a list of <class 'list'>, get a list of {} instead".format(
type(hyps_resp[0])))
return
ngram = set()
for resp in hyps_resp:
if len(resp) < n:
continue
for i in range(len(resp) - n + 1):
ngram.add(' '.join(resp[i: i + n]))
return len(ngram)
def eval_distinct_detail(hyps_resp):
"""
compute distinct score for the hyps_resp
:param hyps_resp: list, a list of hyps responses
:return: average distinct score for 1, 2-gram
"""
if len(hyps_resp) == 0:
print("ERROR, eval_distinct get empty input")
return
if type(hyps_resp[0]) != list:
print("ERROR, eval_distinct takes in a list of <class 'list'>, get a list of {} instead".format(
type(hyps_resp[0])))
return
hyps_resp = [[str(x) for x in l] for l in hyps_resp]
hyps_resp = [(' '.join(i)).split() for i in hyps_resp]
num_tokens = sum([len(i) for i in hyps_resp])
dist1 = count_ngram(hyps_resp, 1) / float(num_tokens)
dist2 = count_ngram(hyps_resp, 2) / float(num_tokens)
return dist1, dist2
def eval_f1(ref, pred):
"""
:param ref: list(list(list(any))), a list of reference sentences, each element of the list is a list of references
:param pred: list(list(any)), a list of predictions
:return: f1 score
"""
assert len(ref) == len(pred) > 0
precisions = []
recalls = []
for i, s in enumerate(pred):
ref_set = set()
for rs in ref[i]:
for w in rs:
ref_set.add(w)
pred_set = set()
for w in s:
pred_set.add(w)
p = 0
for w in s:
if w in ref_set:
p += 1
if len(s) > 0:
p /= len(s)
r = 0
for rs in ref[i]:
for w in rs:
if w in pred_set:
r += 1
tot_l = sum([len(rs) for rs in ref[i]])
if tot_l > 0:
r /= tot_l
precisions.append(p)
recalls.append(r)
precision = sum(precisions) / len(precisions)
recall = sum(recalls) / len(recalls)
return 0.0 if precision == recall == 0 else 2 * precision * recall / (precision + recall)
def calc_metrics_value(task, fn, n_sample=None):
with open(fn) as f:
res = [json.loads(i) for i in f.readlines()]
s0_pred, s0_ref = [], []
s1_pred, s1_ref = [], []
for d in res:
if d['style'] == 0:
s0_ref.append([list(d['resp'])])
s0_pred.append(list(d['pred_style0'][0]))
else:
s1_ref.append([list(d['resp'])])
s1_pred.append(list(d['pred_style1'][0]))
if n_sample:
assert len(s0_ref) >= n_sample
assert len(s1_ref) >= n_sample
sampled_idxs = sample(range(len(s0_ref)), n_sample)
s0_ref = [x for i, x in enumerate(s0_ref) if i in sampled_idxs]
s0_pred = [x for i, x in enumerate(s0_pred) if i in sampled_idxs]
sampled_idxs = sample(range(len(s1_ref)), n_sample)
s1_ref = [x for i, x in enumerate(s1_ref) if i in sampled_idxs]
s1_pred = [x for i, x in enumerate(s1_pred) if i in sampled_idxs]
bleu_s0 = eval_bleu_detail(s0_ref, s0_pred)
bleu_s1 = eval_bleu_detail(s1_ref, s1_pred)
dist_s0 = eval_distinct_detail(s0_pred)
dist_s1 = eval_distinct_detail(s1_pred)
f1_s0 = eval_f1(s0_ref, s0_pred)
f1_s1 = eval_f1(s1_ref, s1_pred)
for k in range(1, 4):
print('%d-gram BLEU:' % k,
's0', bleu_s0[k - 1] * 100,
's1', bleu_s1[k - 1] * 100,
'mean', (bleu_s0[k - 1] + bleu_s1[k - 1]) / 2 * 100)
print('F1:',
's0', f1_s0 * 100, 's1', f1_s1 * 100,
'mean', (f1_s0 + f1_s1) / 2 * 100)
print('Dist:',
's0', dist_s0[1] * 100, 's1', dist_s1[1] * 100,
'mean', (dist_s0[1] + dist_s1[1]) / 2 * 100)
parser = argparse.ArgumentParser()
parser.add_argument('--eval_file_path', help='path of the eval file', required=True)
args = parser.parse_args()
file_path = args.eval_file_path
calc_metrics_value(None, file_path)
print("Evaluating acc results:")
bert_eval_acc.main(file_path)
svm_eval_acc.main(file_path)
| [((14, 9, 14, 28), 'nltk.translate.bleu_score.SmoothingFunction', 'SmoothingFunction', ({}, {}), '()', False, 'from nltk.translate.bleu_score import SmoothingFunction\n'), ((170, 9, 170, 34), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((177, 0, 177, 29), 'bert_eval_acc.main', 'bert_eval_acc.main', ({(177, 19, 177, 28): 'file_path'}, {}), '(file_path)', False, 'import bert_eval_acc\n'), ((178, 0, 178, 28), 'svm_eval_acc.main', 'svm_eval_acc.main', ({(178, 18, 178, 27): 'file_path'}, {}), '(file_path)', False, 'import svm_eval_acc\n'), ((23, 11, 23, 68), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (), '', False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((31, 11, 31, 90), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (), '', False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((32, 11, 32, 90), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (), '', False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((33, 11, 33, 90), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (), '', False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((34, 11, 34, 90), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (), '', False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((128, 15, 128, 28), 'json.loads', 'json.loads', ({(128, 26, 128, 27): 'i'}, {}), '(i)', False, 'import json\n')] |
olbjan/home-assistant-1 | homeassistant/components/unifi/const.py | 1adb45f74e96fc5eff137a3727647a7e428e123c | """Constants for the UniFi component."""
import logging
LOGGER = logging.getLogger(__package__)
DOMAIN = "unifi"
CONTROLLER_ID = "{host}-{site}"
CONF_CONTROLLER = "controller"
CONF_SITE_ID = "site"
UNIFI_WIRELESS_CLIENTS = "unifi_wireless_clients"
CONF_ALLOW_BANDWIDTH_SENSORS = "allow_bandwidth_sensors"
CONF_BLOCK_CLIENT = "block_client"
CONF_DETECTION_TIME = "detection_time"
CONF_POE_CLIENTS = "poe_clients"
CONF_TRACK_CLIENTS = "track_clients"
CONF_TRACK_DEVICES = "track_devices"
CONF_TRACK_WIRED_CLIENTS = "track_wired_clients"
CONF_SSID_FILTER = "ssid_filter"
DEFAULT_ALLOW_BANDWIDTH_SENSORS = False
DEFAULT_POE_CLIENTS = True
DEFAULT_TRACK_CLIENTS = True
DEFAULT_TRACK_DEVICES = True
DEFAULT_TRACK_WIRED_CLIENTS = True
DEFAULT_DETECTION_TIME = 300
ATTR_MANUFACTURER = "Ubiquiti Networks"
| [((4, 9, 4, 39), 'logging.getLogger', 'logging.getLogger', ({(4, 27, 4, 38): '__package__'}, {}), '(__package__)', False, 'import logging\n')] |
Jahidul007/Python-Bootcamp | coding_intereview/1656. Design an Ordered Stream.py | 3c870587465ff66c2c1871c8d3c4eea72463abda | class OrderedStream:
def __init__(self, n: int):
self.data = [None]*n
self.ptr = 0
def insert(self, id: int, value: str) -> List[str]:
id -= 1
self.data[id] = value
if id > self.ptr: return []
while self.ptr < len(self.data) and self.data[self.ptr]: self.ptr += 1
return self.data[id:self.ptr]
# Your OrderedStream object will be instantiated and called as such:
# obj = OrderedStream(n)
# param_1 = obj.insert(id,value)
| [] |
EQt/treelas | python/test/test_tree_dp.py | 24a5cebf101180822198806c0a4131b0efb7a36d | import numpy as np
from treelas import post_order, TreeInstance
def test_demo_3x7_postord():
parent = np.array([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8,
9, 14, 17, 12, 15, 16, 19, 16, 17])
po = post_order(parent, include_root=True)
expect = np.array([12, 11, 19, 20, 21, 14, 15, 18, 17, 16, 13,
10, 7, 8, 9, 3, 6, 2, 5, 4, 1], dtype='i4') - 1
assert (po == expect).all()
def test_demo_3x7():
y = np.fromstring("0.62 0.73 0.71 1.5 1.17 0.43 1.08 0.62 " +
"1.73 0.95 1.46 1.6 1.16 0.38 0.9 0.32 " +
"-0.48 0.95 1.08 0.02 0.4", sep=" ")
parent = np.array([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8,
9, 14, 17, 12, 15, 16, 19, 16, 17])
lam = 1.0
prob = TreeInstance(y, parent, lam=lam)
assert prob.root == 0
assert prob.parent.dtype == np.int32
prob.solve()
assert abs(prob.x.mean() - prob.y.mean()) < 1e-15
assert len(np.unique(prob.x)) == 2
assert max(np.abs(prob.dual[2:]) - lam) < 1e-12
assert max(np.abs(prob.gamma)) < 1e-15
| [((6, 13, 7, 58), 'numpy.array', 'np.array', ({(6, 22, 7, 57): '[0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16, 17]'}, {}), '([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16,\n 17])', True, 'import numpy as np\n'), ((8, 9, 8, 46), 'treelas.post_order', 'post_order', (), '', False, 'from treelas import post_order, TreeInstance\n'), ((15, 8, 17, 58), 'numpy.fromstring', 'np.fromstring', (), '', True, 'import numpy as np\n'), ((18, 13, 19, 58), 'numpy.array', 'np.array', ({(18, 22, 19, 57): '[0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16, 17]'}, {}), '([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16,\n 17])', True, 'import numpy as np\n'), ((21, 11, 21, 43), 'treelas.TreeInstance', 'TreeInstance', (), '', False, 'from treelas import post_order, TreeInstance\n'), ((9, 13, 10, 66), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((26, 15, 26, 32), 'numpy.unique', 'np.unique', ({(26, 25, 26, 31): 'prob.x'}, {}), '(prob.x)', True, 'import numpy as np\n'), ((28, 15, 28, 33), 'numpy.abs', 'np.abs', ({(28, 22, 28, 32): 'prob.gamma'}, {}), '(prob.gamma)', True, 'import numpy as np\n'), ((27, 15, 27, 36), 'numpy.abs', 'np.abs', ({(27, 22, 27, 35): 'prob.dual[2:]'}, {}), '(prob.dual[2:])', True, 'import numpy as np\n')] |
SD-CC-UFG/leonardo.fleury | lista01/rpc/ex01_cl.py | 0a8dfc5752c739f5ff98890477355df8960ad730 | import xmlrpc.client
def main():
s = xmlrpc.client.ServerProxy('http://localhost:9991')
nome = input("Nome: ")
cargo = input("Cargo (programador, operador): ")
salario = float(input("Salário: "))
print("\n\n{}".format(s.atualiza_salario(nome, cargo, salario)))
if __name__ == '__main__':
main()
| [] |
openshift-eng/art-dashboard-server | autocomplete/migrations/0001_initial.py | af4e78b3d2213c30038cf69de646f25fd57c9e3c | # Generated by Django 3.0.7 on 2020-07-27 19:23
import build.models
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AutoCompleteRecord',
fields=[
('updated_at', build.models.UnixTimestampField(auto_created=True, null=True)),
('created_at', build.models.UnixTimestampField(auto_created=True, null=True)),
('log_autocomplete_record_id', models.AutoField(primary_key=True, serialize=False)),
('type', models.CharField(max_length=50)),
('value', models.CharField(max_length=300)),
],
options={
'db_table': 'log_autocomplete_record',
},
),
]
| [((20, 47, 20, 98), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((21, 25, 21, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((22, 26, 22, 58), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n')] |
fcollman/pytorch-3dunet | unet3d/config.py | 303336bfdc0234f075c70e0c59759d09bc4081b8 | import argparse
import os
import torch
import yaml
DEFAULT_DEVICE = 'cuda:0'
def load_config():
parser = argparse.ArgumentParser(description='UNet3D training')
parser.add_argument('--config', type=str, help='Path to the YAML config file', required=True)
args = parser.parse_args()
config = _load_config_yaml(args.config)
# Get a device to train on
device = config.get('device', DEFAULT_DEVICE)
config['device'] = torch.device(device if torch.cuda.is_available() else "cpu")
return config
def _load_config_yaml(config_file):
return yaml.load(open(config_file, 'r'), Loader=yaml.FullLoader)
| [((11, 13, 11, 67), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((18, 46, 18, 71), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n')] |
gunpowder78/webdnn | src/graph_transpiler/webdnn/backend/webgl/optimize_rules/simplify_channel_mode_conversion/simplify_channel_mode_conversion.py | c659ea49007f91d178ce422a1eebe289516a71ee | from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion.simplify_nonsense_channel_mode_conversion import \
SimplifyNonsenseChannelModeConversion
from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion.simplify_redundant_channel_mode_conversion import \
SimplifyRedundantChannelModeConversion
from webdnn.graph.optimize_rule import OptimizeRuleGroup
class SimplifyChannelModeConversion(OptimizeRuleGroup):
def __init__(self):
super(SimplifyChannelModeConversion, self).__init__([
SimplifyRedundantChannelModeConversion(),
SimplifyNonsenseChannelModeConversion()
])
| [((11, 12, 11, 52), 'webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion.simplify_redundant_channel_mode_conversion.SimplifyRedundantChannelModeConversion', 'SimplifyRedundantChannelModeConversion', ({}, {}), '()', False, 'from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion.simplify_redundant_channel_mode_conversion import SimplifyRedundantChannelModeConversion\n'), ((12, 12, 12, 51), 'webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion.simplify_nonsense_channel_mode_conversion.SimplifyNonsenseChannelModeConversion', 'SimplifyNonsenseChannelModeConversion', ({}, {}), '()', False, 'from webdnn.backend.webgl.optimize_rules.simplify_channel_mode_conversion.simplify_nonsense_channel_mode_conversion import SimplifyNonsenseChannelModeConversion\n')] |
akuala/REPO.KUALA | script.video.F4mProxy/lib/flvlib/constants.py | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | """
The constants used in FLV files and their meanings.
"""
# Tag type
(TAG_TYPE_AUDIO, TAG_TYPE_VIDEO, TAG_TYPE_SCRIPT) = (8, 9, 18)
# Sound format
(SOUND_FORMAT_PCM_PLATFORM_ENDIAN,
SOUND_FORMAT_ADPCM,
SOUND_FORMAT_MP3,
SOUND_FORMAT_PCM_LITTLE_ENDIAN,
SOUND_FORMAT_NELLYMOSER_16KHZ,
SOUND_FORMAT_NELLYMOSER_8KHZ,
SOUND_FORMAT_NELLYMOSER,
SOUND_FORMAT_G711_A_LAW,
SOUND_FORMAT_G711_MU_LAW) = range(9)
(SOUND_FORMAT_AAC,
SOUND_FORMAT_SPEEX) = range(10, 12)
(SOUND_FORMAT_MP3_8KHZ,
SOUND_FORMAT_DEVICE_SPECIFIC) = range(14, 16)
sound_format_to_string = {
SOUND_FORMAT_PCM_PLATFORM_ENDIAN: "Linear PCM, platform endian",
SOUND_FORMAT_ADPCM: "ADPCM",
SOUND_FORMAT_MP3: "MP3",
SOUND_FORMAT_PCM_LITTLE_ENDIAN: "Linear PCM, little endian",
SOUND_FORMAT_NELLYMOSER_16KHZ: "Nellymoser 16-kHz mono",
SOUND_FORMAT_NELLYMOSER_8KHZ: "Nellymoser 8-kHz mono",
SOUND_FORMAT_NELLYMOSER: "Nellymoser",
SOUND_FORMAT_G711_A_LAW: "G.711 A-law logarithmic PCM",
SOUND_FORMAT_G711_MU_LAW: "G.711 mu-law logarithmic PCM",
SOUND_FORMAT_AAC: "AAC",
SOUND_FORMAT_SPEEX: "Speex",
SOUND_FORMAT_MP3_8KHZ: "MP3 8-kHz",
SOUND_FORMAT_DEVICE_SPECIFIC: "Device-specific sound"
}
# Sound rate
(SOUND_RATE_5_5_KHZ,
SOUND_RATE_11_KHZ,
SOUND_RATE_22_KHZ,
SOUND_RATE_44_KHZ) = range(4)
sound_rate_to_string = {
SOUND_RATE_5_5_KHZ: "5.5-kHz",
SOUND_RATE_11_KHZ: "11-kHz",
SOUND_RATE_22_KHZ: "22-kHz",
SOUND_RATE_44_KHZ: "44-kHz"
}
# Sound size
(SOUND_SIZE_8_BIT, SOUND_SIZE_16_BIT) = range(2)
sound_size_to_string = {
SOUND_SIZE_8_BIT: "snd8Bit",
SOUND_SIZE_16_BIT: "snd16Bit"
}
# Sound type
(SOUND_TYPE_MONO, SOUND_TYPE_STEREO) = range(2)
sound_type_to_string = {
SOUND_TYPE_MONO: "sndMono",
SOUND_TYPE_STEREO: "sndStereo"
}
# AAC packet type
(AAC_PACKET_TYPE_SEQUENCE_HEADER,
AAC_PACKET_TYPE_RAW) = range(2)
aac_packet_type_to_string = {
AAC_PACKET_TYPE_SEQUENCE_HEADER: "sequence header",
AAC_PACKET_TYPE_RAW: "raw"
}
# Codec ID
(CODEC_ID_JPEG,
CODEC_ID_H263,
CODEC_ID_SCREEN_VIDEO,
CODEC_ID_VP6,
CODEC_ID_VP6_WITH_ALPHA,
CODEC_ID_SCREEN_VIDEO_V2,
CODEC_ID_H264) = range(1, 8)
codec_id_to_string = {
CODEC_ID_JPEG: "JPEG",
CODEC_ID_H263: "Sorenson H.263",
CODEC_ID_SCREEN_VIDEO: "Screen video",
CODEC_ID_VP6: "On2 VP6",
CODEC_ID_VP6_WITH_ALPHA: "On2 VP6 with alpha channel",
CODEC_ID_SCREEN_VIDEO_V2: "Screen video version 2",
CODEC_ID_H264: "H.264"
}
# Frame type
(FRAME_TYPE_KEYFRAME,
FRAME_TYPE_INTERFRAME,
FRAME_TYPE_DISPOSABLE_INTERFRAME,
FRAME_TYPE_GENERATED_KEYFRAME,
FRAME_TYPE_INFO_FRAME) = range(1, 6)
frame_type_to_string = {
FRAME_TYPE_KEYFRAME: "keyframe",
FRAME_TYPE_INTERFRAME: "interframe",
FRAME_TYPE_DISPOSABLE_INTERFRAME: "disposable interframe",
FRAME_TYPE_GENERATED_KEYFRAME: "generated keyframe",
FRAME_TYPE_INFO_FRAME: "video info/command frame"
}
# H.264 packet type
(H264_PACKET_TYPE_SEQUENCE_HEADER,
H264_PACKET_TYPE_NALU,
H264_PACKET_TYPE_END_OF_SEQUENCE) = range(3)
h264_packet_type_to_string = {
H264_PACKET_TYPE_SEQUENCE_HEADER: "sequence header",
H264_PACKET_TYPE_NALU: "NAL unit",
H264_PACKET_TYPE_END_OF_SEQUENCE: "sequence end"
}
# Value type
(VALUE_TYPE_NUMBER,
VALUE_TYPE_BOOLEAN,
VALUE_TYPE_STRING,
VALUE_TYPE_OBJECT,
VALUE_TYPE_MOVIECLIP,
VALUE_TYPE_NULL,
VALUE_TYPE_UNDEFINED,
VALUE_TYPE_REFERENCE,
VALUE_TYPE_ECMA_ARRAY) = range(9)
(VALUE_TYPE_STRICT_ARRAY,
VALUE_TYPE_DATE,
VALUE_TYPE_LONGSTRING) = range(10, 13)
value_type_to_string = {
VALUE_TYPE_NUMBER: 'Number',
VALUE_TYPE_BOOLEAN: 'Boolean',
VALUE_TYPE_STRING: 'String',
VALUE_TYPE_OBJECT: 'Object',
VALUE_TYPE_MOVIECLIP: 'MovieClip',
VALUE_TYPE_NULL: 'Null',
VALUE_TYPE_UNDEFINED: 'Undefined',
VALUE_TYPE_REFERENCE: 'Reference',
VALUE_TYPE_ECMA_ARRAY: 'ECMA Array',
VALUE_TYPE_STRICT_ARRAY: 'Strict Array',
VALUE_TYPE_DATE: 'Date',
VALUE_TYPE_LONGSTRING: 'Longstring'
}
| [] |
Rogerwlk/Natural-Language-Processing | A2/semcor_chunk.py | e1c0499180cec49ac0060aad7f0da00b61cfac94 | from nltk.corpus import semcor
class semcor_chunk:
def __init__(self, chunk):
self.chunk = chunk
#returns the synset if applicable, otherwise returns None
def get_syn_set(self):
try:
synset = self.chunk.label().synset()
return synset
except AttributeError:
try:
synset = wn.synset(self.chunk.label())
return synset
except:
return None
#returns a list of the words in the chunk
def get_words(self):
try:
return self.chunk.leaves()
except AttributeError:
return self.chunk
# if __name__ == "__main__":
# s = semcor.tagged_sents(tag='sem')[0]
# for chunk in s:
# a = semcor_chunk(chunk)
# print a.get_syn_set()
# for chunk in s:
# a = semcor_chunk(chunk)
# print a.get_words() | [] |
thoang3/graph_neural_network_benchmark | gnn_model.py | 72dc031ed23c6684c43d6f2ace03425f9b69cee6 | import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from load_cora import load_cora
from baseline_model import create_ffn
from utils import run_experiment
from utils import display_learning_curves
# Graph convolution layer
class GraphConvLayer(layers.Layer):
def __init__(
self,
hidden_units,
dropout_rate=0.2,
aggregation_type="mean",
combination_type="concat",
normalize=False,
*args,
**kwargs
):
super(GraphConvLayer, self).__init__(*args, **kwargs)
self._aggregation_type = aggregation_type
self._combination_type = combination_type
self._normalize = normalize
self._ffn_prepare = create_ffn(hidden_units, dropout_rate)
if self._combination_type == "gated":
self._update_fn = layers.GRU(
units=hidden_units,
activation="tanh",
recurrent_activation="sigmoid",
dropout=dropout_rate,
return_state=True,
recurrent_dropout=dropout_rate
)
else:
self._update_fn = create_ffn(hidden_units, dropout_rate)
def _prepare(self, node_representations, weights=None):
# node_representations shape is [num_edges, embedding_dim]
messages = self._ffn_prepare(node_representations)
if weights is not None:
messages = messages * tf.expand_dims(weights, -1)
return messages
def _aggregate(self, node_indices, neighbour_messages):
# node_indices shape is [num_edges]
# neighbour_messages shape: [num_edges, representation_dim]
num_nodes = tf.math.reduce_max(node_indices) + 1
if self._aggregation_type == "sum":
aggregated_message = tf.math.unsorted_segment_sum(
neighbour_messages,
node_indices,
num_segments=num_nodes
)
elif self._aggregation_type == "mean":
aggregated_message = tf.math.unsorted_segment_mean(
neighbour_messages,
node_indices,
num_segments=num_nodes
)
elif self._aggregation_type == "max":
aggregated_message = tf.math.unsorted_segment_max(
neighbour_messages,
node_indices,
num_segments=num_nodes
)
else:
raise ValueError(f"Invalid aggregation type: {self._aggregation_type}.")
return aggregated_message
def _update(self, node_representations, aggregated_messages):
# node_representations shape is [num_nodes, representation_dim]
# aggregated_messages shape is [num_nodes, representation_dim]
if self._combination_type == "gru":
# Create a sequence of two elements for the GRU layer
h = tf.stack([node_respresentations, aggregated_messages], axis=1)
elif self._combination_type == "concat":
# Concatenate the node_representations and aggregated_messages
h = tf.concat([node_representations, aggregated_messages], axis=1)
elif self._combination_type == "add":
# Add node_representations and aggregated_messages
h = node_representations + aggregated_messages
else:
raise ValueError(f"Invalid combination type: {self._combinatino_type}.")
# Apply the processing function
node_embeddings = self._update_fn(h)
if self._combination_type == "gru":
node_embeddings = tf.unstack(node_embeddings, axis=1)[-1]
if self._normalize:
node_embeddings = tf.nn.l2_normalize(node_embeddings, axis=-1)
return node_embeddings
def call(self, inputs):
"""Process the inputs to produce the node_embeddings.
Args:
Inputs:
A tuple of three elements: node_representations, edges, edge_weights.
Returns:
node_embeddings of shape [num_nodes, representation_dim].
"""
node_representations, edges, edge_weights = inputs
# Get node_indices (source) and neighbour_indices (target) from edges
node_indices, neighbour_indices = edges[0], edges[1]
# neighbour_representations shape is [num_edges, representation_dim]
neighbour_representations = tf.gather(node_representations, neighbour_indices)
# Prepare the messages of the neighbours
neighbour_messages = self._prepare(neighbour_representations, edge_weights)
# Aggregate the neighbour messages
aggregated_messages = self._aggregate(node_indices, neighbour_messages)
# Update the node embedding with the neighbour messages
return self._update(node_representations, aggregated_messages)
class GNNNodeClassifier(tf.keras.Model):
def __init__(
self,
graph_info,
num_classes,
hidden_units,
aggregation_type="sum",
combination_type="concat",
dropout_rate=0.2,
normalize=True,
*args,
**kwargs
):
super(GNNNodeClassifier, self).__init__(*args, **kwargs)
# Unpack graph_info
node_features, edges, edge_weights = graph_info
self._node_features = node_features
self._edges = edges
self._edge_weights = edge_weights
# Set edge_weights to ones if not provided
if self._edge_weights is None:
self._edge_weights = tf.ones(shape=edges.shape[1])
# Scale edge_weights to sum to 1
self._edge_weights = self._edge_weights / tf.math.reduce_sum(self._edge_weights)
# Create a process layer
self._preprocess = create_ffn(hidden_units, dropout_rate, name="preprocess")
# Create the 1st GraphConv layer
self._conv1 = GraphConvLayer(
hidden_units,
dropout_rate,
aggregation_type,
combination_type,
normalize,
name="graph_conv1"
)
# Create the 2nd GraphConv layer
self._conv2 = GraphConvLayer(
hidden_units,
dropout_rate,
aggregation_type,
combination_type,
normalize,
name="graph_conv2"
)
# Create a postprocess layer
self._postprocess = create_ffn(hidden_units, dropout_rate, name="postprocess")
# Create a compute logits layer
self._compute_logits = layers.Dense(units=num_classes, name="logits")
def call(self, input_node_indices):
# Preprocess the node_features to produce node representations
x = self._preprocess(self._node_features)
# Apply the 1st graph conv layer
x1 = self._conv1((x, self._edges, self._edge_weights))
# Skip connection
x = x1 + x
# Apply the 2nd graph conv layer
x2 = self._conv2((x, self._edges, self._edge_weights))
# Skip connection
x = x2 + x
# Postprocess node embedding
x = self._postprocess(x)
# Fetch node embeddings for the input node_indices
node_embeddings = tf.gather(x, input_node_indices)
# Compute logits
return self._compute_logits(node_embeddings)
if __name__ == '__main__':
papers, train_data, test_data, paper_idx, class_idx, citations, feature_names = load_cora(verbose=1)
num_features = len(feature_names)
num_classes = len(class_idx)
hidden_units = [32, 32]
learning_rate = 0.01
dropout_rate = 0.5
epochs = 300
batch_size = 256
# Create an edges array (sparse adjacency matrix) of shape [2, num_edges]
edges = citations[["source", "target"]].to_numpy().T
#print(edges)
# Create an edge weights array of ones (default weights)
edge_weights = tf.ones(shape=edges.shape[1])
# Create a node features array of shape [num_nodes, num_features]
node_features = tf.cast(
papers.sort_values("paper_id")[feature_names].to_numpy(), dtype=tf.float32)
# Create graph info tuple with node_features, edges, and edge_weights
graph_info = (node_features, edges, edge_weights)
print("Edges shape: ", edges.shape)
print("Nodes shape: ", node_features.shape)
gnn_model = GNNNodeClassifier(
graph_info=graph_info,
num_classes=num_classes,
hidden_units=hidden_units,
dropout_rate=dropout_rate,
name="gnn_model"
)
print("GNN output shape: ", gnn_model([1, 10, 100]))
gnn_model.summary()
# Train the GNN model
X_train = train_data.paper_id.to_numpy()
y_train = train_data.subject
history = run_experiment(gnn_model, X_train, y_train, batch_size, epochs, learning_rate)
# Plot the learning curves
display_learning_curves(history, figure_name="gnn.png")
# Evaluate on test data
X_test = test_data.paper_id.to_numpy()
y_test = test_data.subject
_, test_accuracy = gnn_model.evaluate(x=X_test, y=y_test, verbose=1)
print(f"Test accuracy: {round(test_accuracy * 100, 2)}%")
| [((199, 83, 199, 103), 'load_cora.load_cora', 'load_cora', (), '', False, 'from load_cora import load_cora\n'), ((214, 17, 214, 46), 'tensorflow.ones', 'tf.ones', (), '', True, 'import tensorflow as tf\n'), ((240, 12, 240, 90), 'utils.run_experiment', 'run_experiment', ({(240, 27, 240, 36): 'gnn_model', (240, 38, 240, 45): 'X_train', (240, 47, 240, 54): 'y_train', (240, 56, 240, 66): 'batch_size', (240, 68, 240, 74): 'epochs', (240, 76, 240, 89): 'learning_rate'}, {}), '(gnn_model, X_train, y_train, batch_size, epochs, learning_rate)', False, 'from utils import run_experiment\n'), ((243, 2, 243, 57), 'utils.display_learning_curves', 'display_learning_curves', (), '', False, 'from utils import display_learning_curves\n'), ((28, 24, 28, 62), 'baseline_model.create_ffn', 'create_ffn', ({(28, 35, 28, 47): 'hidden_units', (28, 49, 28, 61): 'dropout_rate'}, {}), '(hidden_units, dropout_rate)', False, 'from baseline_model import create_ffn\n'), ((115, 32, 115, 82), 'tensorflow.gather', 'tf.gather', ({(115, 42, 115, 62): 'node_representations', (115, 64, 115, 81): 'neighbour_indices'}, {}), '(node_representations, neighbour_indices)', True, 'import tensorflow as tf\n'), ((153, 23, 153, 80), 'baseline_model.create_ffn', 'create_ffn', (), '', False, 'from baseline_model import create_ffn\n'), ((173, 24, 173, 82), 'baseline_model.create_ffn', 'create_ffn', (), '', False, 'from baseline_model import create_ffn\n'), ((175, 27, 175, 73), 'tensorflow.keras.layers.Dense', 'layers.Dense', (), '', False, 'from tensorflow.keras import layers\n'), ((191, 22, 191, 54), 'tensorflow.gather', 'tf.gather', ({(191, 32, 191, 33): 'x', (191, 35, 191, 53): 'input_node_indices'}, {}), '(x, input_node_indices)', True, 'import tensorflow as tf\n'), ((30, 24, 37, 7), 'tensorflow.keras.layers.GRU', 'layers.GRU', (), '', False, 'from tensorflow.keras import layers\n'), ((39, 24, 39, 62), 'baseline_model.create_ffn', 'create_ffn', ({(39, 35, 39, 47): 'hidden_units', (39, 49, 39, 61): 'dropout_rate'}, {}), '(hidden_units, dropout_rate)', False, 'from baseline_model import create_ffn\n'), ((52, 16, 52, 48), 'tensorflow.math.reduce_max', 'tf.math.reduce_max', ({(52, 35, 52, 47): 'node_indices'}, {}), '(node_indices)', True, 'import tensorflow as tf\n'), ((54, 27, 58, 7), 'tensorflow.math.unsorted_segment_sum', 'tf.math.unsorted_segment_sum', (), '', True, 'import tensorflow as tf\n'), ((81, 10, 81, 72), 'tensorflow.stack', 'tf.stack', (), '', True, 'import tensorflow as tf\n'), ((97, 24, 97, 68), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (), '', True, 'import tensorflow as tf\n'), ((148, 27, 148, 56), 'tensorflow.ones', 'tf.ones', (), '', True, 'import tensorflow as tf\n'), ((150, 46, 150, 84), 'tensorflow.math.reduce_sum', 'tf.math.reduce_sum', ({(150, 65, 150, 83): 'self._edge_weights'}, {}), '(self._edge_weights)', True, 'import tensorflow as tf\n'), ((45, 28, 45, 55), 'tensorflow.expand_dims', 'tf.expand_dims', ({(45, 43, 45, 50): 'weights', (45, 52, 45, 54): '(-1)'}, {}), '(weights, -1)', True, 'import tensorflow as tf\n'), ((60, 27, 64, 7), 'tensorflow.math.unsorted_segment_mean', 'tf.math.unsorted_segment_mean', (), '', True, 'import tensorflow as tf\n'), ((84, 10, 84, 72), 'tensorflow.concat', 'tf.concat', (), '', True, 'import tensorflow as tf\n'), ((94, 24, 94, 59), 'tensorflow.unstack', 'tf.unstack', (), '', True, 'import tensorflow as tf\n'), ((66, 27, 70, 7), 'tensorflow.math.unsorted_segment_max', 'tf.math.unsorted_segment_max', (), '', True, 'import tensorflow as tf\n')] |
jfarmer08/hassio | deps/lib/python3.5/site-packages/netdisco/discoverables/samsung_tv.py | 792a6071a97bb33857c14c9937946233c620035c | """Discover Samsung Smart TV services."""
from . import SSDPDiscoverable
from ..const import ATTR_NAME
# For some models, Samsung forces a [TV] prefix to the user-specified name.
FORCED_NAME_PREFIX = '[TV]'
class Discoverable(SSDPDiscoverable):
"""Add support for discovering Samsung Smart TV services."""
def get_entries(self):
"""Get all the Samsung RemoteControlReceiver entries."""
return self.find_by_st(
"urn:samsung.com:device:RemoteControlReceiver:1")
def info_from_entry(self, entry):
"""Get most important info, by default the description location."""
info = super().info_from_entry(entry)
# Strip the forced prefix, if present
if info[ATTR_NAME].startswith(FORCED_NAME_PREFIX):
info[ATTR_NAME] = info[ATTR_NAME][len(FORCED_NAME_PREFIX):].strip()
return info
| [] |
scrambler-crypto/pyecsca | pyecsca/sca/re/__init__.py | 491abfb548455669abd470382a48dcd07b2eda87 | """Package for reverse-engineering."""
from .rpa import *
| [] |
Juhanostby/django-apotek-sapmi | sapmi/employees/migrations/0002_remove_employee_phone_alt.py | 972a05ca9d54eed62b640572fcf582cc8751d15a | # Generated by Django 3.2.5 on 2021-12-21 19:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('employees', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='employee',
name='phone_alt',
),
]
| [((13, 8, 16, 9), 'django.db.migrations.RemoveField', 'migrations.RemoveField', (), '', False, 'from django.db import migrations\n')] |
konrad2508/kokomi-discord-bot | src/model/exception/emote_fetch_error.py | 5a9d459e92d552fa24ba3ada5188db19d93f0aaa | class EmoteFetchError(Exception):
'''Exception stating that there was a problem while fetching emotes from a source.'''
| [] |
andremtsilva/dissertacao | src/sim/basicExample/main.py | 7c039ffe871468be0215c482adb42830fff586aa | """
This is the most simple scenario with a basic topology, some users and a set of apps with only one service.
@author: Isaac Lera
"""
import os
import time
import json
import random
import logging.config
import networkx as nx
import numpy as np
from pathlib import Path
from yafs.core import Sim
from yafs.application import create_applications_from_json
from yafs.topology import Topology
from yafs.placement import JSONPlacement
from yafs.path_routing import DeviceSpeedAwareRouting
from yafs.distribution import deterministic_distribution
from yafs.stats import Stats
RANDOM_SEED = 1
def main(stop_time, it):
folder_results = Path("results/")
folder_results.mkdir(parents=True, exist_ok=True)
folder_results = str(folder_results)+"/"
"""
TOPOLOGY
"""
# Fix position of nodes for drawing
random.seed(RANDOM_SEED)
np.random.seed(RANDOM_SEED)
t = Topology()
# You also can create a topology using JSONs files. Check out examples folder
size = 3
t.G = nx.generators.binomial_tree(size) # In NX-lib there are a lot of Graphs generators
# Definition of mandatory attributes of a Topology
## Attr. on edges
# PR (link propagation) and BW (bandwith) are 1 unit
attPR_BW = {x: 1 for x in t.G.edges()}
nx.set_edge_attributes(t.G, name="PR", values=attPR_BW)
nx.set_edge_attributes(t.G, name="BW", values=attPR_BW)
## Attr. on nodes
# IPT
attIPT = {x: random.randrange(100, 900, 100) for x in t.G.nodes()}
nx.set_node_attributes(t.G, name="IPT", values=attIPT)
# nx.write_gexf(t.G,folder_results+"graph_binomial_tree_%i"%size) # you can export the Graph in multiples format to view in tools like Gephi, and so on.
nx.write_graphml(t.G,folder_results+"graph_binomial_tree_%i.graphml"%size)
# Graph visualization
pos = nx.spring_layout(t.G)
nx.draw(t.G, pos, with_labels=True, edge_color='black', width=1,
alpha=0.7)
print(t.G.nodes()) # nodes id can be str or int
print()
print(nx.get_node_attributes(t.G, "IPT"))
print()
"""
APPLICATION or SERVICES
"""
dataApp = json.load(open('data/appDefinition.json'))
apps = create_applications_from_json(dataApp)
# print(apps)
"""
SERVICE PLACEMENT
"""
placementJson = json.load(open('data/allocDefinition.json'))
placement = JSONPlacement(name="Placement", json=placementJson)
"""
Defining ROUTING algorithm to define how path messages in the topology among modules
"""
selectorPath = DeviceSpeedAwareRouting()
"""
SIMULATION ENGINE
"""
s = Sim(t, default_results_path=folder_results+"sim_trace")
"""
Deploy services == APP's modules
"""
for aName in apps.keys():
s.deploy_app(apps[aName], placement, selectorPath) # Note: each app can have a different routing algorithm
"""
Deploy users
"""
userJSON = json.load(open('data/usersDefinition.json'))
for user in userJSON["sources"]:
app_name = user["app"]
app = s.apps[app_name]
msg = app.get_message(user["message"])
node = user["id_resource"]
dist = deterministic_distribution(100, name="Deterministic")
idDES = s.deploy_source(app_name, id_node=node, msg=msg, distribution=dist)
"""
RUNNING - last step
"""
logging.info(" Performing simulation: %i " % it)
s.run(stop_time) # To test deployments put test_initial_deploy a TRUE
s.print_debug_assignaments()
if __name__ == '__main__':
logging.config.fileConfig(os.getcwd() + '/logging.ini')
nIterations = 1 # iteration for each experiment
simulationDuration = 1000
# Iteration for each experiment changing the seed of randoms
for iteration in range(nIterations):
random.seed(iteration)
logging.info("Running experiment it: - %i" % iteration)
start_time = time.time()
main(stop_time=simulationDuration,
it=iteration)
print("\n--- %s seconds ---" % (time.time() - start_time))
print("Simulation Done!")
m = Stats(defaultPath="results/sim_trace")
# print ("\tNetwork bytes transmitted:")
# print (f"\t\t{m.bytes_transmitted():.1f}")
# m.df_link.head(15) # from Stats class
time_loops = [["M.USER.APP.0", "M.USER.APP.1", "M.USER.APP.2",
"M.USER.APP.3"]]
m.showResults2(10000, time_loops=time_loops)
m.compute_times_df()
print ("\t- Network saturation -")
print()
print ("\t\tAverage waiting messages : "
f"{m.average_messages_not_transmitted()}")
print()
print ("\t\tPeak of waiting messages :"
f"{m.peak_messages_not_transmitted()}")
print()
print(f"\t\tShow Loops: {m.showLoops(time_loops)}")
print()
print (f"\t\tTOTAL messages not transmitted:"
f" {m.messages_not_transmitted()}")
print()
#print(m.df.head())
#print(m.df['time_latency'])
#print(m.df_link.head())
print(m.get_df_modules()) | [((30, 21, 30, 37), 'pathlib.Path', 'Path', ({(30, 26, 30, 36): '"""results/"""'}, {}), "('results/')", False, 'from pathlib import Path\n'), ((38, 4, 38, 28), 'random.seed', 'random.seed', ({(38, 16, 38, 27): 'RANDOM_SEED'}, {}), '(RANDOM_SEED)', False, 'import random\n'), ((39, 4, 39, 31), 'numpy.random.seed', 'np.random.seed', ({(39, 19, 39, 30): 'RANDOM_SEED'}, {}), '(RANDOM_SEED)', True, 'import numpy as np\n'), ((42, 8, 42, 18), 'yafs.topology.Topology', 'Topology', ({}, {}), '()', False, 'from yafs.topology import Topology\n'), ((46, 10, 46, 43), 'networkx.generators.binomial_tree', 'nx.generators.binomial_tree', ({(46, 38, 46, 42): 'size'}, {}), '(size)', True, 'import networkx as nx\n'), ((52, 4, 52, 59), 'networkx.set_edge_attributes', 'nx.set_edge_attributes', (), '', True, 'import networkx as nx\n'), ((53, 4, 53, 59), 'networkx.set_edge_attributes', 'nx.set_edge_attributes', (), '', True, 'import networkx as nx\n'), ((57, 4, 57, 58), 'networkx.set_node_attributes', 'nx.set_node_attributes', (), '', True, 'import networkx as nx\n'), ((60, 4, 60, 78), 'networkx.write_graphml', 'nx.write_graphml', ({(60, 21, 60, 24): 't.G', (60, 25, 60, 77): "(folder_results + 'graph_binomial_tree_%i.graphml' % size)"}, {}), "(t.G, folder_results + 'graph_binomial_tree_%i.graphml' % size)", True, 'import networkx as nx\n'), ((63, 10, 63, 31), 'networkx.spring_layout', 'nx.spring_layout', ({(63, 27, 63, 30): 't.G'}, {}), '(t.G)', True, 'import networkx as nx\n'), ((64, 4, 65, 22), 'networkx.draw', 'nx.draw', (), '', True, 'import networkx as nx\n'), ((76, 11, 76, 49), 'yafs.application.create_applications_from_json', 'create_applications_from_json', ({(76, 41, 76, 48): 'dataApp'}, {}), '(dataApp)', False, 'from yafs.application import create_applications_from_json\n'), ((84, 16, 84, 67), 'yafs.placement.JSONPlacement', 'JSONPlacement', (), '', False, 'from yafs.placement import JSONPlacement\n'), ((89, 19, 89, 44), 'yafs.path_routing.DeviceSpeedAwareRouting', 'DeviceSpeedAwareRouting', ({}, {}), '()', False, 'from yafs.path_routing import DeviceSpeedAwareRouting\n'), ((94, 8, 94, 63), 'yafs.core.Sim', 'Sim', (), '', False, 'from yafs.core import Sim\n'), ((146, 8, 146, 46), 'yafs.stats.Stats', 'Stats', (), '', False, 'from yafs.stats import Stats\n'), ((56, 17, 56, 48), 'random.randrange', 'random.randrange', ({(56, 34, 56, 37): '(100)', (56, 39, 56, 42): '(900)', (56, 44, 56, 47): '(100)'}, {}), '(100, 900, 100)', False, 'import random\n'), ((69, 10, 69, 44), 'networkx.get_node_attributes', 'nx.get_node_attributes', ({(69, 33, 69, 36): 't.G', (69, 38, 69, 43): '"""IPT"""'}, {}), "(t.G, 'IPT')", True, 'import networkx as nx\n'), ((111, 15, 111, 68), 'yafs.distribution.deterministic_distribution', 'deterministic_distribution', (), '', False, 'from yafs.distribution import deterministic_distribution\n'), ((135, 8, 135, 30), 'random.seed', 'random.seed', ({(135, 20, 135, 29): 'iteration'}, {}), '(iteration)', False, 'import random\n'), ((138, 21, 138, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((128, 30, 128, 41), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((142, 40, 142, 51), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
dbvis-ukon/coronavis | Backend/models/risklayerPrognosis.py | f00374ac655c9d68541183d28ede6fe5536581dc | from db import db
class RisklayerPrognosis(db.Model):
__tablename__ = 'risklayer_prognosis'
datenbestand = db.Column(db.TIMESTAMP, primary_key=True, nullable=False)
prognosis = db.Column(db.Float, nullable=False)
# class RisklayerPrognosisSchema(SQLAlchemyAutoSchema):
# class Meta:
# strict = True
# model = RisklayerPrognosis
#
# timestamp = fields.Timestamp(data_key="datenbestand")
# prognosis = fields.Number(data_key="prognosis")
| [((7, 19, 7, 76), 'db.db.Column', 'db.Column', (), '', False, 'from db import db\n'), ((8, 16, 8, 51), 'db.db.Column', 'db.Column', (), '', False, 'from db import db\n')] |
smartfile/django-secureform | tests.py | 3b7a8b90550327f370ea02c6886220b2db0517b5 | import os
import unittest
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import django
if django.VERSION >= (1, 7):
django.setup()
from django import forms
from django.db import models
from django.forms.forms import NON_FIELD_ERRORS
from django_secureform.forms import SecureForm
def get_form_sname(form, name):
for sname, v in form._secure_field_map.items():
if v and v == name:
return sname
raise KeyError(name)
def get_form_honeypot(form):
for sname, v in form._secure_field_map.items():
if v is None:
return sname
raise Exception('No honeypots found.')
def get_form_secure_data(form):
# We must copy over the security data.
return form._meta.secure_field_name, form[form._meta.secure_field_name].value()
class BasicForm(SecureForm):
name = forms.CharField(required=True, max_length=16)
class FormTestCase(unittest.TestCase):
klass = BasicForm
def setUp(self):
self.form = self.klass()
self.form.secure_data()
def assertIn(self, value, iterable):
self.assertTrue(value in iterable, '%s did not occur in %s' % (value,
iterable))
def getForm(self, **kwargs):
data = dict((get_form_secure_data(self.form), ))
for n, v in kwargs.items():
data[get_form_sname(self.form, n)] = v
return self.klass(data=data)
class BasicTestCase(FormTestCase):
def test_valid(self):
post = self.getForm(name='foobar')
self.assertTrue(post.is_valid())
def test_missing(self):
post = self.getForm()
self.assertFalse(post.is_valid())
self.assertIn('name', post._errors)
def test_replay(self):
post = self.getForm(name='foobar')
post.is_valid()
post = self.getForm(name='foobar')
self.assertFalse(post.is_valid())
self.assertIn(NON_FIELD_ERRORS, post._errors)
self.assertIn('This form has already been submitted.', post._errors[NON_FIELD_ERRORS])
def test_honeypot(self):
honeypot = get_form_honeypot(self.form)
data = dict((get_form_secure_data(self.form), ))
data[honeypot] = 'mmm, hunny!'
data[get_form_sname(self.form, 'name')] = 'foobar'
post = self.klass(data=data)
self.assertFalse(post.is_valid())
self.assertIn(NON_FIELD_ERRORS, post._errors)
self.assertIn('Unexpected value in form field.', post._errors[NON_FIELD_ERRORS])
if __name__ == '__main__':
unittest.main()
| [((7, 4, 7, 18), 'django.setup', 'django.setup', ({}, {}), '()', False, 'import django\n'), ((35, 11, 35, 56), 'django.forms.CharField', 'forms.CharField', (), '', False, 'from django import forms\n'), ((86, 4, 86, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n')] |
hackerman-101/Hacktoberfest-2022 | opencv/resizing.py | 839f28293930987da55f8a2414efaa1cf9676cc9 | import cv2 as cv
import numpy as np
cap = cv.VideoCapture(1)
print(cap.get(cv.CAP_PROP_FRAME_WIDTH))
print(cap.get(cv.CAP_PROP_FRAME_HEIGHT))
cap.set(3,3000)
cap.set(4,3000)
print(cap.get(cv.CAP_PROP_FRAME_WIDTH))
print(cap.get(cv.CAP_PROP_FRAME_HEIGHT))
while (cap.isOpened()):
ret , frame = cap.read()
if (ret == True):
cv.imshow("camVid", frame)
if cv.waitKey(25) & 0xFF == ord('q'):
break
else:
break
cap.release()
cv.destroyAllWindows()
| [((4, 6, 4, 24), 'cv2.VideoCapture', 'cv.VideoCapture', ({(4, 22, 4, 23): '1'}, {}), '(1)', True, 'import cv2 as cv\n'), ((26, 0, 26, 22), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ({}, {}), '()', True, 'import cv2 as cv\n'), ((19, 8, 19, 34), 'cv2.imshow', 'cv.imshow', ({(19, 18, 19, 26): '"""camVid"""', (19, 28, 19, 33): 'frame'}, {}), "('camVid', frame)", True, 'import cv2 as cv\n'), ((20, 11, 20, 25), 'cv2.waitKey', 'cv.waitKey', ({(20, 22, 20, 24): '(25)'}, {}), '(25)', True, 'import cv2 as cv\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.