repo_name
stringlengths
7
94
repo_path
stringlengths
4
237
repo_head_hexsha
stringlengths
40
40
content
stringlengths
10
680k
apis
stringlengths
2
840k
zichuan-scott-xu/automl-workflow
examples/DeepWisdom/Auto_NLP/deepWisdom/transformers_/__init__.py
d108e55da943775953b9f1801311a86ac07e58a0
__version__ = "2.1.1" # Work around to update TensorFlow's absl.logging threshold which alters the # default Python logging output behavior when present. # see: https://github.com/abseil/abseil-py/issues/99 # and: https://github.com/tensorflow/tensorflow/issues/26691#issuecomment-500369493 try: import absl.logging absl.logging.set_verbosity('info') absl.logging.set_stderrthreshold('info') absl.logging._warn_preinit_stderr = False except: pass import logging logger = logging.getLogger(__name__) # pylint: disable=invalid-name # Files and general utilities from .file_utils import (TRANSFORMERS_CACHE, PYTORCH_TRANSFORMERS_CACHE, PYTORCH_PRETRAINED_BERT_CACHE, cached_path, add_start_docstrings, add_end_docstrings, WEIGHTS_NAME, TF2_WEIGHTS_NAME, TF_WEIGHTS_NAME, CONFIG_NAME, is_tf_available, is_torch_available) # Tokenizers from .tokenization_utils import (PreTrainedTokenizer) from .tokenization_auto import AutoTokenizer from .tokenization_bert import BertTokenizer, BasicTokenizer, WordpieceTokenizer from .tokenization_openai import OpenAIGPTTokenizer from .tokenization_transfo_xl import (TransfoXLTokenizer, TransfoXLCorpus) from .tokenization_gpt2 import GPT2Tokenizer from .tokenization_ctrl import CTRLTokenizer from .tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE from .tokenization_xlm import XLMTokenizer from .tokenization_roberta import RobertaTokenizer from .tokenization_distilbert import DistilBertTokenizer # Configurations from .configuration_utils import PretrainedConfig from .configuration_auto import AutoConfig from .configuration_bert import BertConfig, BERT_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_openai import OpenAIGPTConfig, OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_transfo_xl import TransfoXLConfig, TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_gpt2 import GPT2Config, GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_ctrl import CTRLConfig, CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_xlnet import XLNetConfig, XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_ctrl import CTRLConfig, CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_xlm import XLMConfig, XLM_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_roberta import RobertaConfig, ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP from .configuration_distilbert import DistilBertConfig, DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP # Modeling if is_torch_available(): from .modeling_utils import (PreTrainedModel, prune_layer, Conv1D) from .modeling_auto import (AutoModel, AutoModelForSequenceClassification, AutoModelForQuestionAnswering, AutoModelWithLMHead) from .modeling_bert import (BertPreTrainedModel, BertModel, BertForPreTraining, BertForMaskedLM, BertForNextSentencePrediction, BertForSequenceClassification, BertForMultipleChoice, BertForTokenClassification, BertForQuestionAnswering, load_tf_weights_in_bert, BERT_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_openai import (OpenAIGPTPreTrainedModel, OpenAIGPTModel, OpenAIGPTLMHeadModel, OpenAIGPTDoubleHeadsModel, load_tf_weights_in_openai_gpt, OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_transfo_xl import (TransfoXLPreTrainedModel, TransfoXLModel, TransfoXLLMHeadModel, load_tf_weights_in_transfo_xl, TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_gpt2 import (GPT2PreTrainedModel, GPT2Model, GPT2LMHeadModel, GPT2DoubleHeadsModel, load_tf_weights_in_gpt2, GPT2_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_ctrl import (CTRLPreTrainedModel, CTRLModel, CTRLLMHeadModel, CTRL_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_xlnet import (XLNetPreTrainedModel, XLNetModel, XLNetLMHeadModel, XLNetForSequenceClassification, XLNetForMultipleChoice, XLNetForQuestionAnsweringSimple, XLNetForQuestionAnswering, load_tf_weights_in_xlnet, XLNET_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_xlm import (XLMPreTrainedModel , XLMModel, XLMWithLMHeadModel, XLMForSequenceClassification, XLMForQuestionAnswering, XLMForQuestionAnsweringSimple, XLM_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_roberta import (RobertaForMaskedLM, RobertaModel, RobertaForSequenceClassification, RobertaForMultipleChoice, ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_distilbert import (DistilBertForMaskedLM, DistilBertModel, DistilBertForSequenceClassification, DistilBertForQuestionAnswering, DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP) from .modeling_albert import AlbertForSequenceClassification # Optimization from .optimization import (AdamW, ConstantLRSchedule, WarmupConstantSchedule, WarmupCosineSchedule, WarmupCosineWithHardRestartsSchedule, WarmupLinearSchedule) if not is_tf_available() and not is_torch_available(): logger.warning("Neither PyTorch nor TensorFlow >= 2.0 have been found." "Models won't be available and only tokenizers, configuration" "and file/data utilities can be used.")
[((17, 9, 17, 36), 'logging.getLogger', 'logging.getLogger', ({(17, 27, 17, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n')]
TiankunZhou/dials
test/model/data/all_foreground_valid_data.py
bd5c95b73c442cceb1c61b1690fd4562acf4e337
from __future__ import absolute_import, division, print_function data = r"""cdials_array_family_flex_ext shoebox p1 (tRp2 (cscitbx_array_family_flex_ext grid p3 ((I0 t(I8 tI01 tRp4 (I8 tbS'\x02\x01\x02\x08\x00\x03\\\x01\x03m\x01\x03\x04\x06\x03\x15\x06\x00\x02\x01\x02\x03\x02\x01\x02\x11\x02\x11\x02\x9c\x02\x06\x02\x80\x02\x06\x02\xe8\x02\x05\x02\x86\x02\x07\x02\xe0\x02\x03\x02\xa0\x02\x04\x02\x80\x02\x03\x02\x80\x02\x05\x02\xf4\x02\x06\x02\x88\x02\x06\x02\xe8\x02\x06\x02\xdc\x02\x06\x02\x85\x02\x08\x02\xc8\x02\x05\x02\x84\x02\x06\x02\xf0\x02\x04\x02\x90\x02\x06\x00\x02\x84\x02\x06\x02\x84\x02\x06\x02\xf0\x02\x06\x02\xf0\x02\x05\x02\x82\x02\x07\x02\xd8\x02\x05\x02\xd8\x02\x06\x02\x80\x02\x02\x02\xa0\x02\x04\x02\xa2\x02\x07\x02\xc0\x02\x04\x02\xe8\x02\x06\x02\xe0\x02\x03\x02\xa0\x02\x03\x02\x8c\x02\x06\x02\xac\x02\x06\x02\x9c\x02\x06\x02\xb8\x02\x06\x02\xc0\x02\x03\x02\xb4\x02\x06\x02\xc8\x02\x06\x02\xe0\x02\x03\x02\x90\x02\x04\x02\x88\x02\x06\x02\xc8\x02\x06\x02\xba\x02\x07\x82\xc0\x02\x02\x02\xb6\x02\x07\x02\x80\x02\x06\x02\x80\x02\x05\x02\xa0\x02\x04\x02\xf0\x02\x06\x02\xfc\x02\x06\x02\xc8\x02\x05\x02\xf4\x02\x06\x02\xb6\x02\x07\x02\x80\x02\x06\x02\x80\x02\x03\x02\xd8\x02\x05\x02\xf0\x02\x06\x02\x88\x02\x05\x02\xec\x02\x06\x00\x02\xc0\x02\x03\x02\xc0\x02\x04\x02\xe0\x02\x03\x02\xe0\x02\x04\x02\xd4\x02\x06\x02\xa2\x02\x07\x02\xa0\x02\x03\x02\xfc\x02\x07\x02\xc0\x02\x03\x02\x9c\x02\x07\x02\xe0\x02\x05\x02\xe0\x02\x05\x02\x8c\x02\x06\x02\xe0\x02\x03\x02\x80\x02\x07\x02\xe0\x02\x06\x02\xa4\x02\x06\x02\xf8\x02\x06\x02\xb8\x02\x05\x02\xee\x02\x07\x02\xe0\x02\x06\x02\xc4\x02\x06\x02\xc0\x02\x05\x02\xd0\x02\x06\x02\xc2\x02\x07\x02\xa0\x02\x03\x02\x90\x02\x05\x02\x9a\x02\x07\x02\xd0\x02\x05\x02\xd8\x02\x05\x02\x80\x02\x06\x02\xac\x02\x06\x02\x88\x02\x07\x02\xb0\x02\x04\x02\xa6\x02\x07\x02\xa0\x02\x05\x02\xa0\x02\x04\x02\x92\x02\x07\x02\xe2\x02\x07\x02\x94\x02\x06\x02\x90\x02\x04\x02\xc0\x02\x04\x02\x98\x02\x05\x02\xd4\x02\x06\x02\xb8\x02\x05\x02\xd0\x02\x05\x02\x90\x02\x06\x02\xd4\x02\x06\x02\xdc\x02\x06\x02\x90\x02\x04\x02\x90\x02\x06\x02\xa4\x02\x06\x02\xa0\x02\x07\x02\xe8\x02\x07\x02\xe0\x02\x06\x02\x96\x02\x07\x02\x98\x02\x06\x02\xd4\x02\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x03\x02\x01\x02\x11\x02\x11\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x02\x01\x02\x11\x02\x11\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x04\x98UU\x02\x06\x00\x02\xb5\x02\xc6\x03\xab\x05\x03\xbc\x05\x00\x02\x01\x02\x03\x02\x01\x02\x11\x02\x11\x02\xc0\x02\x02\x02\xa0\x02\x05\x82\x80\x02\x01\x02\xd0\x02\x05\x02\xa0\x02\x06\x02\x84\x02\x06\x02\xd0\x02\x06\x02\xa0\x02\x03\x02\x80\x02\x01\x02\xa0\x02\x03\x02\x80\x02\x01\x02\xf0\x02\x05\x02\xa8\x02\x06\x02\xd4\x02\x06\x02\xbc\x02\x06\x02\x8a\x02\x07\x02\xe4\x02\x06\x00\x82\x80\x02\x03\x02\xc0\x02\x02\x02\x90\x02\x04\x02\xaa\x02\x07\x02\xc0\x02\x05\x02\xa0\x02\x06\x02\xf0\x02\x04\x02\xe0\x02\x03\x82\x80\x02\x01\x02\xa0\x02\x03\x02\xe0\x02\x04\x02\xd8\x02\x05\x02\xc4\x02\x06\x83\xc3P\x02\x11\x02\x80\x02\x04\x02\x92\x02\x07\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\xc0\x02\x03\x82\x80\x02\x02\x02\xf4\x02\x06\x02\xa4\x02\x06\x02\xf0\x02\x04\x02\x80\x02\x06\x02\x80\x02\x04\x02\xe6\x02\x07\x02\x94\x02\x07\x02\x98\x02\x06\x02\xb0\x02\x04\x02\xa8\x02\x07\x02\x98\x02\x06\x02\xa0\x02\x05\x02\xa4\x02\x07\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\xc0\x02\x02\x00\x02\xd0\x02\x04\x02\xf0\x02\x05\x02\x80\x02\x02\x02\xf8\x02\x05\x02\x94\x02\x06\x02\x96\x02\x07\x02\x80\x02\x01\x00\x02\xc0\x02\x04\x02\xc0\x02\x02\x02\xf0\x02\x06\x02\x80\x02\x06\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x82\xe0\x02\x03\x82\x80\x02\x01\x02\x80\x02\x02\x02\xc0\x02\x02\x02\xdc\x02\x06\x02\xf8\x02\x06\x02\xb8\x02\x05\x02\xa8\x02\x05\x02\x80\x02\x02\x02\x80\x02\x01\x82\xc0\x02\x02\x82\x80\x02\x02\x02\xb0\x02\x04\x02\xda\x02\x07\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x82\x80\x02\x02\x82\x80\x02\x01\x02\xd0\x02\x05\x02\x80\x02\x03\x02\x88\x02\x06\x02\x80\x02\x02\x02\x80\x02\x01\x82\x80\x02\x01\x00\x02\x88\x02\x05\x02\xf0\x02\x06\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x00\x82\xc0\x02\x02\x82\xc0\x02\x03\x02\xe8\x02\x05\x02\xa8\x02\x07\x02\x96\x02\x07\x02\x8e\x02\x07\x02\xbc\x02\x07\x02\xa8\x02\x05\x02\xb0\x02\x04\x82\x80\x02\x03\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\xc0\x02\x03\x02\xd0\x02\x05\x02\x80\x02\x07\x02\xc0\x02\x03\x02\xc4\x02\x07\x02\xc8\x02\x05\x02\xdc\x02\x06\x02\xc8\x02\x06\x02\xc0\x02\x02\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\xa0\x02\x03\x02\xf0\x02\x05\x02\xe8\x02\x05\x02\xa8\x02\x05\x02\xe8\x02\x05\x02\xa8\x02\x05\x02\xe0\x02\x03\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x00\x02\x98\x02\x05\x82\x80\x02\x01\x02\x80\x02\x02\x02\xb0\x02\x05\x02\x90\x02\x04\x02\xc0\x02\x04\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x82\xc0\x02\x02\x83\xc3P\x02\x11\x02\x80\x02\x02\x82\x80\x02\x01\x02\xb8\x02\x05\x02\xc8\x02\x05\x02\xc8\x02\x05\x02\x84\x02\x06\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x82\x80\x02\x02\x02\xc0\x02\x02\x02\xe0\x02\x05\x02\x88\x02\x06\x02\xd0\x02\x06\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\xa0\x02\x03\x02\xc0\x02\x02\x02\x9e\x02\x07\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\xe0\x02\x03\x83\xc3P\x02\x11\x02\xa0\x02\x04\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x82\x80\x02\x02\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x83\xc3P\x02\x11\x02\x03\x02\x01\x02\x11\x02\x11\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x13\x02\x13\x02\x02\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x05\x02\x04\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x13\x02\x02\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x02\x01\x02\x11\x02\x11\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x04\xbc\xf2\xfc\x02\x05\x00\x03\x1f\x02\x03-\x02\x03\r\x06\x03\x1d\x06\x00\x02\x01\x02\x03\x02\x01\x02\x10\x02\x0e\x02\xc0\x02\x05\x02\xa0\x02\x07\x02\xb8\x02\x05\x02\xaa\x02\x07\x02\x9c\x02\x06\x02\xf8\x02\x06\x02\xd0\x02\x05\x02\x90\x02\x06\x02\x90\x02\x05\x02\xf0\x02\x05\x02\xc0\x02\x02\x02\xdc\x02\x06\x02\xd0\x02\x06\x02\xe8\x02\x06\x02\x80\x02\x05\x02\x90\x02\x06\x02\x9c\x02\x07\x02\x82\x02\x07\x02\x90\x02\x06\x02\xcc\x02\x06\x02\xd0\x02\x06\x02\x8a\x02\x07\x02\xe0\x02\x05\x02\x98\x02\x06\x02\xa0\x02\x06\x02\xd0\x02\x04\x82\xa0\x02\x03\x02\xc8\x02\x05\x02\xe0\x02\x05\x02\xc0\x02\x02\x02\xa8\x02\x05\x02\xb4\x02\x06\x02\xd8\x02\x07\x02\x86\x02\x07\x02\xb2\x02\x07\x02\x9c\x02\x06\x02\xf8\x02\x05\x02\xe4\x02\x06\x02\xd8\x02\x06\x02\x8a\x02\x07\x02\xf8\x02\x05\x02\x94\x02\x06\x02\x90\x02\x07\x02\xc8\x02\x05\x02\xf0\x02\x05\x02\xd0\x02\x04\x02\x8c\x02\x07\x02\x80\x02\x07\x02\x80\x02\x03\x02\xd8\x02\x05\x02\xe8\x02\x05\x02\x90\x02\x07\x02\x8c\x02\x07\x02\xe0\x02\x03\x02\x9c\x02\x06\x02\xdc\x02\x06\x02\x94\x02\x06\x02\x90\x02\x04\x02\x98\x02\x06\x02\xb8\x02\x05\x02\xf8\x02\x06\x02\xbc\x02\x06\x02\x80\x02\x04\x02\xc0\x02\x06\x02\xe4\x02\x06\x02\x90\x02\x06\x02\x80\x02\x05\x02\xec\x02\x06\x02\x8a\x02\x07\x02\x94\x02\x07\x02\x80\x02\x05\x02\xe0\x02\x04\x02\xb2\x02\x07\x02\x80\x02\x02\x82\x80\x02\x02\x02\xd0\x02\x04\x02\x80\x02\x04\x02\xd0\x02\x05\x02\xf0\x02\x05\x02\x80\x02\x04\x02\xb2\x02\x07\x02\xb0\x02\x06\x02\xf0\x02\x04\x02\x80\x02\x05\x02\x80\x02\x02\x02\x80\x02\x02\x02\xde\x02\x07\x02\xbc\x02\x06\x02\x8e\x02\x07\x02\xe0\x02\x06\x02\xc0\x02\x04\x02\xf0\x02\x04\x02\xe8\x02\x05\x02\xa0\x02\x03\x02\x8a\x02\x07\x02\xc0\x02\x05\x02\xec\x02\x06\x02\x9c\x02\x06\x02\xd0\x02\x05\x02\xb4\x02\x07\x02\x8e\x02\x07\x02\xca\x02\x07\x02\x86\x02\x07\x02\x80\x02\x02\x02\xa4\x02\x06\x02\x80\x02\x02\x02\xe8\x02\x05\x02\xa6\x02\x07\x02\x80\x02\x06\x02\xd8\x02\x06\x02\x9c\x02\x07\x02\x88\x02\x07\x02\xb8\x02\x05\x02\xf4\x02\x06\x02\xa4\x02\x06\x02\xcc\x02\x06\x02\xd0\x02\x04\x02\x88\x02\x07\x02\xbc\x02\x06\x02\xa0\x02\x06\x02\x84\x02\x06\x02\xcc\x02\x06\x02\xc0\x02\x06\x02\xc6\x02\x07\x02\xd4\x02\x06\x02\xec\x02\x06\x02\xa8\x02\x07\x02\x8a\x02\x08\x02\xf0\x02\x07\x02\x98\x02\x06\x02\x80\x02\x08\x02\xf8\x02\x07\x02\x8b\x02\x08\x02\x94\x02\x06\x02\xae\x02\x07\x02\xa8\x02\x05\x02\xbe\x02\x07\x02\x8a\x02\x08\x02\x91\x02\x08\x02\xfc\x02\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xa0\x02\x07\x02\xbb\x02\x08\x02\xfe\x02\x07\x02\x81\x02\x08\x02\xa0\x02\x06\x02\x8c\x02\x07\x02\xf0\x02\x06\x02\xfc\x02\x06\x02\xce\x02\x07\x02\x84\x02\x07\x02\xb0\x02\x08\x02\x8a\x02\x08\x02\x8c\x02\x07\x02\x96\x02\x07\x02\x90\x02\x06\x02\xa6\x02\x07\x02\x80\x02\x06\x02\xd0\x02\x06\x82\x80\x02\x01\x02\xd8\x02\x06\x82\x80\x02\x03\x02\xc8\x02\x05\x02\xae\x02\x07\x02\xe0\x02\x06\x02\x82\x02\x07\x02\xfc\x02\x06\x02\xf8\x02\x06\x02\xb4\x02\x06\x02\x98\x02\x06\x02\xb0\x02\x06\x02\xd6\x02\x07\x02\x80\x02\x06\x02\xb8\x02\x05\x02\xde\x02\x07\x02\xae\x02\x07\x02\x90\x02\x06\x02\xb0\x02\x04\x02\xa0\x02\x05\x02\x9c\x02\x06\x02\x94\x02\x06\x02\x80\x02\x05\x02\xa6\x02\x07\x02\x03\x02\x01\x02\x10\x02\x0e\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x04\x02\x04\x02\x04\x02\x04\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x01\x02\x10\x02\x0e\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x04\xb5Eu\x02\x06\x00\x03\x90\x01\x03\xa0\x01\x03\xb6\x05\x03\xc6\x05\x00\x02\x01\x02\x03\x02\x01\x02\x10\x02\x10\x02\x94\x02\x07\x02\xb4\x02\x06\x83\xc3P\x02\x11\x02\xa0\x02\x06\x02\xac\x02\x06\x02\x84\x02\x07\x02\xa2\x02\x07\x02\x94\x02\x06\x02\x9c\x02\x07\x02\xe8\x02\x05\x02\xb8\x02\x05\x02\xd0\x02\x06\x02\x94\x02\x06\x02\xec\x02\x06\x02\xd0\x02\x04\x02\x98\x02\x07\x02\xac\x02\x06\x02\xac\x02\x07\x02\xec\x02\x06\x02\xb0\x02\x06\x02\xa6\x02\x07\x02\xa8\x02\x06\x02\xf0\x02\x04\x02\x80\x02\x06\x02\x80\x02\x03\x02\x94\x02\x06\x02\xdc\x02\x06\x02\xe4\x02\x06\x02\xc0\x02\x05\x02\xa4\x02\x06\x02\xc0\x02\x05\x02\x98\x02\x06\x02\xa0\x02\x04\x02\x9c\x02\x06\x02\x9e\x02\x07\x02\xd8\x02\x06\x02\x85\x02\x08\x02\xa4\x02\x06\x02\xf4\x02\x06\x02\x88\x02\x06\x02\xc0\x02\x03\x02\xe8\x02\x05\x02\xa2\x02\x07\x02\xe8\x02\x05\x02\x88\x02\x07\x02\xa0\x02\x07\x02\xc4\x02\x06\x02\xb8\x02\x06\x02\x94\x02\x06\x02\xc8\x02\x06\x02\xf8\x02\x05\x02\xe0\x02\x07\x02\x80\x02\x03\x02\xc0\x02\x03\x02\x90\x02\x05\x02\xb0\x02\x04\x82\x80\x02\x01\x02\x80\x02\x02\x02\x98\x02\x07\x02\xa0\x02\x07\x02\xec\x02\x06\x02\x80\x02\x04\x02\xd4\x02\x06\x02\xac\x02\x07\x02\xc0\x02\x02\x02\xf0\x02\x06\x02\xc0\x02\x05\x02\xe0\x02\x05\x02\xd8\x02\x05\x02\xe0\x02\x05\x02\xbc\x02\x06\x02\xb8\x02\x05\x02\xf0\x02\x06\x02\x84\x02\x06\x02\xc8\x02\x05\x02\x8e\x02\x07\x02\x8c\x02\x07\x02\x82\x02\x07\x02\xe8\x02\x05\x02\x88\x02\x06\x02\x94\x02\x06\x02\x98\x02\x06\x02\x80\x02\x06\x02\xe0\x02\x03\x02\x90\x02\x05\x02\xb0\x02\x05\x02\x8d\x02\x08\x02\xa4\x02\x06\x02\xc0\x02\x07\x02\x94\x02\x07\x02\xb0\x02\x04\x02\xa8\x02\x07\x02\xb0\x02\x04\x02\xac\x02\x06\x02\x98\x02\x05\x02\xdc\x02\x06\x02\x98\x02\x06\x02\xf0\x02\x06\x02\x98\x02\x06\x02\xcc\x02\x06\x02\xbc\x02\x06\x02\xc8\x02\x07\x02\xc0\x02\x07\x02\x9c\x02\x07\x02\xc0\x02\x02\x02\xc8\x02\x07\x02\x80\x02\x06\x02\xa0\x02\x06\x02\xf0\x02\x05\x02\x98\x02\x05\x82\x80\x02\x01\x02\xd0\x02\x06\x02\x86\x02\x07\x02\x90\x02\x05\x02\xae\x02\x07\x02\xa4\x02\x07\x02\xbc\x02\x07\x02\x94\x02\x07\x02\x82\x02\x07\x02\x80\x02\x01\x02\xfc\x02\x06\x02\xd0\x02\x04\x02\xe0\x02\x05\x02\x84\x02\x08\x02\xd0\x02\x04\x02\x86\x02\x07\x02\x80\x02\x03\x02\xe0\x02\x07\x02\xc0\x02\x07\x02\x80\x02\x02\x02\xf4\x02\x06\x02\xc0\x02\x02\x02\xc0\x02\x05\x02\x82\x02\x08\x02\xd0\x02\x04\x83\xc3P\x02\x11\x02\xd6\x02\x07\x02\x90\x02\x08\x02\xc8\x02\x06\x02\xb4\x02\x07\x02\xf0\x02\x05\x02\xd4\x02\x07\x02\xf8\x02\x05\x02\x80\x02\x03\x02\xe8\x02\x06\x02\xc0\x02\x02\x02\xa0\x02\x05\x02\xf0\x02\x06\x02\xe8\x02\x05\x00\x02\xf0\x02\x04\x02\xd4\x02\x06\x02\xaa\x02\x07\x02\xf8\x02\x05\x02\xc8\x02\x05\x02\x8d\x02\x08\x02\xa4\x02\x06\x02\xf8\x02\x05\x02\xd0\x02\x06\x02\x86\x02\x07\x02\xe0\x02\x04\x02\xb4\x02\x07\x02\x80\x02\x07\x02\x84\x02\x07\x02\xe8\x02\x05\x02\xe0\x02\x04\x02\xdc\x02\x06\x02\xb6\x02\t\x02\xe8\x02\x07\x02\x80\x02\x02\x02\xb4\x02\x06\x02\x9a\x02\x07\x83\xc3P\x02\x11\x02\xb0\x02\x04\x02\x80\x02\x06\x02\xe0\x02\x03\x02\x80\x02\x01\x02\xd0\x02\x05\x02\xb0\x02\x05\x02\xa8\x02\x06\x02\xba\x02\x07\x02\x84\x02\x06\x02\x9c\x02\x06\x02\xb5\x02\x08\x02\xd0\x02\x06\x02\x9c\x02\x06\x02\xa0\x02\x06\x02\xf0\x02\x05\x02\xe0\x02\x05\x02\x80\x02\x03\x02\xb0\x02\x04\x02\xf2\x02\x07\x02\x80\x02\x01\x02\x9c\x02\x06\x02\xe8\x02\x05\x02\x80\x02\x01\x02\x82\x02\x07\x02\xa8\x02\x06\x02\xfe\x02\x07\x02\xd0\x02\x05\x02\xc0\x02\x02\x02\xd0\x02\x04\x02\x90\x02\x05\x02\xd0\x02\x05\x02\xd0\x02\x06\x02\xd8\x02\x06\x02\x80\x02\x03\x02\xd0\x02\x05\x02\xb8\x02\x05\x02\xf0\x02\x05\x02\x8c\x02\x07\x02\xe8\x02\x05\x02\xe6\x02\x07\x02\xf0\x02\x07\x02\xb0\x02\x05\x02\x96\x02\x07\x02\xb0\x02\x04\x02\xc4\x02\x06\x02\xa4\x02\x06\x00\x02\x86\x02\x07\x02\xe0\x02\x06\x02\x94\x02\x06\x02\xe8\x02\x05\x02\x80\x02\x04\x02\xe0\x02\x05\x00\x02\xb8\x02\x05\x02\xc0\x02\x06\x00\x02\x9c\x02\x06\x02\x80\x02\x06\x02\xd6\x02\x07\x02\xa4\x02\x06\x02\xec\x02\x07\x02\x84\x02\x07\x02\x92\x02\x07\x02\xb0\x02\x04\x02\xc8\x02\x06\x02\xa0\x02\x04\x02\xe4\x02\x06\x02\x98\x02\x05\x02\xd4\x02\x06\x02\x80\x02\x06\x02\xe0\x02\x03\x02\x80\x02\x05\x02\xdc\x02\x07\x02\xd8\x02\x05\x02\x84\x02\x07\x02\xba\x02\x07\x02\xd0\x02\x07\x02\xc8\x02\x05\x02\xc0\x02\x06\x02\x86\x02\x07\x02\x90\x02\x04\x02\xc0\x02\x03\x02\x03\x02\x01\x02\x10\x02\x10\x02\x13\x02\x13\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x04\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x02\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x01\x02\x10\x02\x10\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x04\xaf\xfb^\x02\x06\x00\x03\xbb\x01\x03\xcb\x01\x03I\x06\x03Z\x06\x00\x02\x01\x02\x03\x02\x01\x02\x11\x02\x10\x02\xe0\x02\x03\x02\xe8\x02\x05\x02\xb4\x02\x06\x02\xf8\x02\x05\x02\xd8\x02\x05\x02\x80\x02\x05\x02\xf0\x02\x05\x02\x84\x02\x06\x02\xe8\x02\x06\x02\xb0\x02\x05\x02\xa4\x02\x06\x82\x80\x02\x02\x02\x8c\x02\x07\x82\xa0\x02\x03\x02\xa2\x02\x07\x02\xf0\x02\x04\x02\xe8\x02\x05\x02\x80\x02\x04\x02\x80\x02\x02\x02\x84\x02\x06\x02\xd8\x02\x05\x02\x80\x02\x03\x02\x80\x02\x05\x02\xe0\x02\x05\x02\x8c\x02\x07\x02\xa0\x02\x04\x02\xb2\x02\x07\x02\xa8\x02\x05\x02\xe6\x02\x07\x02\xe8\x02\x06\x02\x80\x02\x03\x02\xe8\x02\x05\x02\xf0\x02\x04\x02\x88\x02\x05\x02\xa8\x02\x06\x02\xb4\x02\x06\x02\xb2\x02\x07\x02\x88\x02\x05\x02\xa8\x02\x06\x02\x88\x02\x05\x02\xac\x02\x06\x02\xf0\x02\x06\x02\x94\x02\x06\x02\xb0\x02\x05\x02\x9c\x02\x07\x02\xc6\x02\x07\x02\xb6\x02\x07\x02\x9e\x02\x07\x02\x88\x02\x06\x02\xc8\x02\x05\x02\x88\x02\x06\x02\xe6\x02\x07\x02\xac\x02\x07\x02\xc4\x02\x07\x02\xb8\x02\x06\x02\x8c\x02\x06\x02\xe0\x02\x06\x02\x90\x02\x04\x02\xe0\x02\x06\x02\xf0\x02\x05\x02\x8c\x02\x06\x02\xfc\x02\x06\x02\x80\x02\x01\x02\xd4\x02\x06\x02\x94\x02\x06\x02\xaa\x02\x07\x02\x8c\x02\x06\x02\x88\x02\x05\x02\x8a\x02\x07\x02\xd4\x02\x07\x02\xe0\x02\x04\x02\x88\x02\x06\x02\x8e\x02\x07\x02\xae\x02\x07\x02\x80\x02\x04\x02\x98\x02\x05\x02\xf4\x02\x06\x02\x84\x02\x07\x02\xe0\x02\x03\x02\xc4\x02\x06\x02\xa0\x02\x04\x02\x95\x02\x08\x02\xf8\x02\x05\x02\xa0\x02\x05\x02\x80\x02\x05\x02\x84\x02\x06\x02\xa8\x02\x05\x02\xe0\x02\x03\x02\xc0\x02\x04\x02\xe0\x02\x03\x02\x96\x02\x07\x02\x8f\x02\x08\x02\x90\x02\x05\x02\xe0\x02\x04\x02\xb0\x02\x06\x02\xf8\x02\x05\x02\xa0\x02\x03\x02\xe8\x02\x06\x02\x84\x02\x08\x02\xd0\x02\x06\x02\xc2\x02\x07\x02\xa4\x02\x07\x02\x96\x02\x07\x02\xf4\x02\x06\x02\xb4\x02\x07\x02\xbc\x02\x06\x02\xa0\x02\x03\x02\x98\x02\x07\x02\x98\x02\x05\x02\xc0\x02\x02\x02\xac\x02\x06\x02\xc0\x02\x05\x02\xc0\x02\x05\x02\xf0\x02\x05\x02\xf4\x02\x06\x02\xc8\x02\x05\x02\xd0\x02\x05\x02\x82\x02\x07\x02\xd0\x02\x04\x02\xec\x02\x06\x02\xc0\x02\x05\x02\x90\x02\x07\x02\x90\x02\x05\x02\xa2\x02\x07\x02\xe8\x02\x05\x02\xc0\x02\x03\x02\xd4\x02\x06\x02\x90\x02\x05\x02\x94\x02\x06\x82\xc0\x02\x02\x02\x80\x02\x02\x02\x80\x02\x02\x02\x8c\x02\x07\x02\x98\x02\x06\x02\xa0\x02\x06\x02\xe0\x02\x04\x02\xb4\x02\x06\x02\xf4\x02\x06\x02\x8c\x02\x07\x02\xd4\x02\x06\x02\xec\x02\x06\x02\xe0\x02\x05\x82\x80\x02\x02\x02\xc0\x02\x02\x02\xb8\x02\x05\x02\x80\x02\x04\x02\xd8\x02\x05\x02\xb4\x02\x06\x02\xc0\x02\x05\x02\x90\x02\x05\x02\xa8\x02\x07\x02\x84\x02\x06\x02\xc8\x02\x05\x02\x88\x02\x06\x02\x88\x02\x06\x02\x96\x02\x07\x02\xb8\x02\x06\x00\x02\x80\x02\x03\x02\xe0\x02\x03\x02\x88\x02\x07\x02\xb0\x02\x04\x02\xc0\x02\x04\x02\x80\x02\x07\x02\xbc\x02\x06\x02\xc4\x02\x07\x02\xa0\x02\x03\x02\xe0\x02\x04\x02\x90\x02\x06\x02\x9c\x02\x07\x02\xf8\x02\x05\x02\x98\x02\x07\x02\xc0\x02\x04\x02\x80\x02\x03\x02\xc0\x02\x05\x02\xc8\x02\x05\x02\x94\x02\x07\x02\xf8\x02\x05\x02\xf0\x02\x05\x02\xd4\x02\x06\x02\xc0\x02\x04\x02\xa0\x02\x04\x02\xac\x02\x07\x02\xc0\x02\x04\x02\x80\x02\x02\x02\xa0\x02\x03\x02\xd0\x02\x05\x02\x9c\x02\x07\x02\xd0\x02\x05\x02\xb2\x02\x07\x02\xc0\x02\x05\x02\xf0\x02\x04\x02\xc0\x02\x04\x02\xea\x02\x07\x02\xcc\x02\x06\x02\xac\x02\x06\x02\x90\x02\x04\x02\x88\x02\x06\x02\xa8\x02\x06\x02\xc8\x02\x05\x02\xf0\x02\x05\x02\x80\x02\x01\x02\xf8\x02\x05\x02\xe8\x02\x05\x02\x84\x02\x07\x02\x90\x02\x06\x02\x9c\x02\x07\x02\x94\x02\x06\x02\xf8\x02\x06\x02\xa0\x02\x04\x02\xa8\x02\x05\x02\xc8\x02\x05\x02\x88\x02\x06\x02\xf4\x02\x06\x02\xea\x02\x07\x02\x82\x02\x07\x02\x80\x02\x04\x02\xe4\x02\x06\x02\x94\x02\x06\x02\x80\x02\x02\x02\x80\x02\x01\x02\x84\x02\x06\x02\xc0\x02\x06\x02\xa8\x02\x05\x02\xf8\x02\x05\x02\xe0\x02\x04\x02\x80\x02\x03\x02\x80\x02\x06\x02\xf0\x02\x06\x02\x98\x02\x05\x02\xf8\x02\x06\x02\xf0\x02\x06\x02\xa0\x02\x03\x82\x80\x02\x01\x02\xd0\x02\x05\x02\xb8\x02\x06\x02\xa2\x02\x07\x02\x80\x02\x03\x02\xe8\x02\x05\x02\xe8\x02\x05\x02\x98\x02\x05\x02\xf0\x02\x05\x02\xd0\x02\x05\x02\x80\x02\x07\x02\x88\x02\x05\x02\x82\x02\x07\x02\xa2\x02\x07\x02\xa0\x02\x07\x02\xa8\x02\x07\x02\xd8\x02\x05\x02\xe2\x02\x07\x02\xd4\x02\x06\x02\xc8\x02\x05\x02\xcc\x02\x06\x02\xc0\x02\x04\x02\x98\x02\x07\x02\x84\x02\x06\x02\x8c\x02\x07\x02\x80\x02\x03\x02\x80\x02\x08\x02\xa8\x02\x06\x02\xd8\x02\x05\x02\x80\x02\x04\x02\xf0\x02\x06\x02\x8c\x02\x06\x02\x9a\x02\x07\x02\x8c\x02\x06\x02\xc4\x02\x06\x02\xb0\x02\x06\x02\x84\x02\x06\x02\xc0\x02\x05\x02\xc8\x02\x05\x02\x03\x02\x01\x02\x11\x02\x10\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x01\x02\x11\x02\x10\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x04\x9a\x1a|\x02\x06\x00\x03e\x02\x03t\x02\x03m\x06\x03}\x06\x00\x02\x01\x02\x03\x02\x01\x02\x10\x02\x0f\x02\xdc\x02\x06\x02\xc0\x02\x02\x02\xd8\x02\x05\x02\xac\x02\x06\x02\xc0\x02\x02\x82\x80\x02\x01\x02\xa0\x02\x04\x02\xdc\x02\x06\x02\xf8\x02\x06\x02\xe0\x02\x03\x02\xcc\x02\x06\x02\xe8\x02\x06\x02\xe0\x02\x04\x02\xa6\x02\x07\x02\x90\x02\x06\x02\xb0\x02\x05\x02\x84\x02\x07\x02\x86\x02\x08\x02\xd8\x02\x05\x02\xa0\x02\x06\x02\xe0\x02\x05\x02\x93\x02\x08\x02\xb8\x02\x05\x02\xd4\x02\x06\x02\xc0\x02\x04\x02\x94\x02\x07\x02\xa6\x02\x07\x02\xe4\x02\x06\x02\xd4\x02\x06\x02\xe8\x02\x05\x02\xe8\x02\x05\x02\x80\x02\x03\x02\xd8\x02\x05\x02\xd4\x02\x06\x02\x84\x02\x06\x02\xd6\x02\x07\x02\xf8\x02\x05\x02\xfc\x02\x06\x02\xb4\x02\x06\x02\xb0\x02\x05\x02\xe8\x02\x05\x02\xf0\x02\x06\x02\xe8\x02\x05\x02\x84\x02\x06\x02\xdc\x02\x06\x02\x8c\x02\x06\x02\xa6\x02\x07\x02\xb6\x02\x07\x02\xec\x02\x06\x02\xc4\x02\x07\x02\x8a\x02\x07\x02\x86\x02\x07\x02\xe0\x02\x03\x02\xa0\x02\x03\x02\xa8\x02\x07\x02\xd0\x02\x07\x02\x84\x02\x06\x02\xbc\x02\x06\x02\xb8\x02\x05\x02\x80\x02\x02\x02\xa0\x02\x05\x02\x98\x02\x05\x02\xe0\x02\x05\x02\xf0\x02\x06\x02\xe8\x02\x05\x02\xcc\x02\x06\x02\xc0\x02\x05\x02\xd4\x02\x06\x02\xf8\x02\x06\x02\x8a\x02\x07\x02\xca\x02\x07\x02\x81\x02\x08\x82\x80\x02\x02\x02\x84\x02\x06\x02\xe0\x02\x05\x02\xe0\x02\x06\x02\xf8\x02\x05\x02\x92\x02\x07\x02\x84\x02\x08\x02\x80\x02\x04\x02\x84\x02\x06\x02\x82\x02\x07\x02\xb8\x02\x05\x02\xb8\x02\x05\x02\xa8\x02\x06\x02\xbe\x02\x07\x02\xd8\x02\x05\x02\x9c\x02\x06\x02\xd4\x02\x06\x02\xe0\x02\x05\x02\x90\x02\x04\x02\xb4\x02\x06\x02\x8e\x02\x07\x02\xb8\x02\x05\x02\x88\x02\x06\x02\xd4\x02\x06\x02\xe0\x02\x06\x02\x81\x02\x08\x02\x84\x02\x08\x02\x80\x02\x07\x02\xa0\x02\x06\x02\x86\x02\x07\x02\x86\x02\x07\x02\xb8\x02\x06\x02\x80\x02\x01\x02\x80\x02\x02\x02\xb8\x02\x06\x02\x80\x02\x03\x02\xa4\x02\x06\x02\x80\x02\x03\x02\xa4\x02\x06\x02\xb7\x02\x08\x02\xef\x02\t\x02\xbe\x02\x07\x02\x9a\x02\x07\x02\xb6\x02\x07\x82\x80\x02\x01\x02\xb4\x02\x06\x02\xe8\x02\x05\x02\x80\x02\x01\x02\xb8\x02\x07\x02\x8a\x02\x07\x02\x8c\x02\x07\x02\xb2\x02\x07\x02\x80\x02\x07\x02\x8c\x02\x06\x02\xb1\x02\t\x03\x9b@\x02\n\x02\xbc\x02\x06\x02\xe0\x02\x04\x02\x88\x02\x05\x02\x90\x02\x05\x02\xb2\x02\x07\x02\x9c\x02\x06\x02\xf0\x02\x04\x02\xf0\x02\x06\x02\xe4\x02\x06\x02\x84\x02\x06\x02\x80\x02\x08\x02\x80\x02\x07\x02\xe0\x02\x05\x02\xf0\x02\x07\x02\x84\x02\x07\x02\xf8\x02\x06\x02\xa4\x02\x06\x02\xb0\x02\x04\x02\x90\x02\x06\x02\x9c\x02\x06\x02\xc8\x02\x05\x02\xc0\x02\x06\x02\x94\x02\x06\x02\xe0\x02\x04\x02\xf0\x02\x07\x02\xd6\x02\x07\x02\xa0\x02\x03\x02\xac\x02\x07\x02\xd8\x02\x06\x02\x88\x02\x05\x02\xf0\x02\x05\x02\x9c\x02\x07\x02\xa0\x02\x03\x02\x88\x02\x06\x02\xe8\x02\x06\x02\xa0\x02\x03\x02\xd4\x02\x06\x02\xd8\x02\x06\x02\xda\x02\x07\x02\x9c\x02\x07\x02\xa8\x02\x06\x02\xa0\x02\x04\x02\xf8\x02\x06\x02\x80\x02\x06\x02\xb8\x02\x06\x02\x9c\x02\x06\x02\xd0\x02\x05\x02\xec\x02\x06\x02\x80\x02\x07\x02\xdc\x02\x06\x02\xa8\x02\x06\x02\xca\x02\x07\x02\xc0\x02\x05\x02\xa0\x02\x06\x02\x92\x02\x07\x00\x02\x94\x02\x07\x02\xc0\x02\x05\x02\xa0\x02\x03\x02\xc0\x02\x05\x02\xc8\x02\x05\x02\xae\x02\x07\x02\x9c\x02\x06\x02\x8c\x02\x07\x02\xe4\x02\x06\x02\x86\x02\x07\x02\xd8\x02\x06\x02\xa0\x02\x04\x02\xf4\x02\x06\x02\xf0\x02\x06\x02\xd8\x02\x05\x02\xa8\x02\x05\x02\xc0\x02\x05\x02\x80\x02\x01\x02\xe0\x02\x05\x02\x9e\x02\x07\x02\xc4\x02\x06\x02\xf0\x02\x05\x02\xf4\x02\x06\x02\xc4\x02\x06\x02\x84\x02\x06\x02\xf0\x02\x05\x02\xe0\x02\x05\x02\xa6\x02\x07\x02\xb8\x02\x06\x02\xa4\x02\x06\x02\x86\x02\x08\x02\x80\x02\x07\x02\xd8\x02\x06\x02\x80\x02\x01\x02\x9c\x02\x07\x02\xa0\x02\x04\x02\x80\x02\x03\x02\xc0\x02\x03\x02\xc4\x02\x07\x02\xaa\x02\x07\x82\x80\x02\x01\x02\xf0\x02\x05\x02\xdc\x02\x06\x02\x90\x02\x08\x02\xf8\x02\x05\x02\xd0\x02\x05\x02\xc0\x02\x05\x02\xa4\x02\x07\x02\x80\x02\x05\x02\xb8\x02\x06\x02\x84\x02\x06\x02\x84\x02\x06\x02\x94\x02\x07\x02\xa8\x02\x06\x02\xb0\x02\x04\x02\x80\x02\x03\x02\x03\x02\x01\x02\x10\x02\x0f\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x01\x02\x10\x02\x0f\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x04\xb3\xafJ\x02\x06\x00\x034\x01\x03E\x01\x03\xd4\x05\x03\xe5\x05\x00\x02\x01\x02\x03\x02\x01\x02\x11\x02\x11\x00\x02\xf0\x02\x06\x02\xc0\x02\x05\x82\x80\x02\x02\x02\x8c\x02\x06\x02\x88\x02\x06\x02\x80\x02\x02\x02\xa4\x02\x06\x02\x8c\x02\x06\x02\xf0\x02\x06\x02\xf2\x02\x07\x02\x80\x02\x07\x02\x90\x02\x06\x02\x80\x02\x02\x02\xe8\x02\x05\x02\xc0\x02\x05\x02\x84\x02\x06\x02\xd0\x02\x04\x02\xa8\x02\x05\x02\xaa\x02\x07\x02\x84\x02\x07\x02\xc4\x02\x06\x02\xa4\x02\x07\x02\xf0\x02\x05\x02\xb4\x02\x06\x02\xd0\x02\x05\x02\xb4\x02\x06\x02\x8c\x02\x06\x02\xa4\x02\x07\x02\xd0\x02\x05\x02\xa8\x02\x05\x02\xbc\x02\x06\x02\x88\x02\x05\x02\x8e\x02\x07\x02\xa8\x02\x05\x02\x84\x02\x06\x02\x84\x02\x06\x02\x82\x02\x07\x02\x82\x02\x07\x02\x80\x02\x03\x02\xc8\x02\x05\x02\x80\x02\x01\x02\x82\x02\x07\x02\x92\x02\x07\x02\xd0\x02\x05\x02\x8c\x02\x06\x02\x80\x02\x04\x02\x90\x02\x04\x02\xf4\x02\x06\x02\xc4\x02\x06\x02\xe0\x02\x05\x02\xb0\x02\x04\x02\xf0\x02\x05\x02\x82\x02\x07\x02\x98\x02\x06\x02\xcc\x02\x06\x02\xd0\x02\x05\x02\x80\x02\x03\x02\x98\x02\x06\x02\xb8\x02\x05\x02\xf0\x02\x04\x02\xa0\x02\x03\x02\xc0\x02\x05\x02\x8c\x02\x06\x02\xc0\x02\x02\x02\x8c\x02\x07\x02\xa8\x02\x07\x02\xc0\x02\x02\x02\xd8\x02\x06\x02\xf8\x02\x05\x02\xf8\x02\x06\x02\xe4\x02\x06\x02\x80\x02\x03\x02\xc0\x02\x02\x02\x88\x02\x05\x02\x80\x02\x04\x02\xe0\x02\x03\x02\x80\x02\x03\x02\xa8\x02\x05\x02\xc8\x02\x05\x00\x02\x88\x02\x06\x02\xd0\x02\x07\x02\xb0\x02\x05\x02\xd8\x02\x05\x02\xb4\x02\x06\x02\x98\x02\x05\x02\x80\x02\x03\x82\x80\x02\x01\x02\xb0\x02\x06\x02\xa8\x02\x05\x02\xe0\x02\x03\x02\xe0\x02\x05\x02\xe8\x02\x05\x02\xe0\x02\x06\x02\xc0\x02\x03\x02\x80\x02\x05\x02\x9c\x02\x06\x02\xf0\x02\x04\x02\x92\x02\x08\x02\xe0\x02\x07\x02\xf8\x02\x06\x02\xe4\x02\x06\x02\x8c\x02\x06\x02\x88\x02\x06\x02\xd0\x02\x06\x02\xc0\x02\x05\x02\x84\x02\x06\x02\x80\x02\x02\x02\xc0\x02\x06\x02\x80\x02\x03\x02\xc0\x02\x02\x02\x9e\x02\x07\x02\xb0\x02\x04\x02\xc0\x02\x05\x02\xc0\x02\x06\x02\xa0\x02\x06\x02\x80\x02\x05\x02\xc8\x02\x05\x02\x90\x02\x07\x82\x80\x02\x02\x82\xc0\x02\x02\x82\xc0\x02\x03\x02\xd0\x02\x05\x02\xf0\x02\x06\x02\xd0\x02\x07\x02\xac\x02\x07\x02\xb0\x02\x06\x02\x86\x02\x07\x02\xa4\x02\x07\x02\x8e\x02\x07\x02\xd4\x02\x06\x02\xba\x02\x07\x02\xa0\x02\x05\x02\x88\x02\x06\x02\xb0\x02\x05\x02\xb6\x02\x07\x02\x90\x02\x06\x02\xb8\x02\x05\x02\xf0\x02\x04\x02\xa0\x02\x04\x02\xe4\x02\x06\x02\xc0\x02\x03\x02\x90\x02\x04\x02\xdc\x02\x06\x02\xac\x02\x07\x02\xdc\x02\x06\x02\xc0\x02\x04\x02\xf0\x02\x06\x02\xe0\x02\x05\x02\xdc\x02\x06\x02\xc8\x02\x05\x02\xc0\x02\x04\x02\xb8\x02\x06\x02\xc0\x02\x06\x02\x80\x02\x02\x02\x80\x02\x02\x02\x9c\x02\x06\x02\xf0\x02\x04\x02\xc0\x02\x03\x02\xf0\x02\x05\x02\x80\x02\x04\x02\x80\x02\x02\x02\xb0\x02\x04\x02\xe8\x02\x05\x02\xf8\x02\x05\x02\x90\x02\x05\x02\xf8\x02\x05\x02\xaa\x02\x07\x02\x88\x02\x05\x02\xa8\x02\x06\x02\xd8\x02\x05\x02\xe0\x02\x03\x02\xa4\x02\x06\x02\x80\x02\x06\x02\xf4\x02\x06\x02\xdc\x02\x07\x02\xb6\x02\x07\x02\xe8\x02\x05\x00\x02\x88\x02\x05\x02\x85\x02\x08\x02\x88\x02\x06\x02\xf8\x02\x06\x02\x94\x02\x06\x02\xa0\x02\x05\x02\xa0\x02\x04\x02\xf0\x02\x05\x02\x9a\x02\x07\x02\xc6\x02\x07\x02\xf0\x02\x05\x02\xc0\x02\x06\x02\xcc\x02\x06\x02\xb0\x02\x06\x02\x92\x02\x07\x02\x90\x02\x05\x02\xc0\x02\x04\x02\x9e\x02\x07\x02\x86\x02\x07\x02\xb0\x02\x04\x02\x98\x02\x05\x02\xf8\x02\x05\x02\xd0\x02\x05\x02\xc0\x02\x05\x02\xc0\x02\x02\x02\x9e\x02\x07\x02\x90\x02\x04\x02\xc4\x02\x06\x02\x8c\x02\x06\x02\xc8\x02\x06\x02\xb9\x02\x08\x02\x98\x02\x05\x02\xf0\x02\x06\x02\x84\x02\x06\x02\x80\x02\x06\x02\x94\x02\x06\x02\xbc\x02\x06\x02\x8c\x02\x06\x02\xe0\x02\x05\x02\xd8\x02\x05\x02\xc0\x02\x06\x02\x84\x02\x06\x02\x90\x02\x06\x02\xb0\x02\x04\x02\x84\x02\x06\x02\x84\x02\x07\x02\x98\x02\x07\x02\xee\x02\x07\x02\xc4\x02\x07\x02\xf8\x02\x05\x02\x84\x02\x06\x02\x98\x02\x05\x02\xbc\x02\x06\x02\x98\x02\x06\x02\x80\x02\x04\x02\xf0\x02\x05\x02\xa4\x02\x06\x02\xd8\x02\x05\x02\xa8\x02\x05\x02\x80\x02\x06\x02\xac\x02\x06\x02\xd0\x02\x06\x02\x9d\x02\x08\x02\xdc\x02\x06\x02\x94\x02\x06\x02\x90\x02\x06\x02\xa0\x02\x04\x02\xd8\x02\x05\x02\xb2\x02\x07\x02\x80\x02\x06\x02\x84\x02\x07\x02\x80\x02\x03\x02\xc0\x02\x02\x02\xe0\x02\x07\x02\xdc\x02\x06\x02\xb0\x02\x06\x02\x88\x02\x06\x02\xd8\x02\x07\x02\x84\x02\x06\x02\xc0\x02\x05\x02\xc0\x02\x04\x02\xea\x02\x07\x02\xa0\x02\x03\x82\x80\x02\x01\x02\xe4\x02\x06\x02\xc0\x02\x07\x02\x80\x02\x05\x02\xd8\x02\x06\x02\xa8\x02\x05\x82\x80\x02\x03\x02\xf0\x02\x06\x02\xd0\x02\x06\x02\x8c\x02\x06\x02\xda\x02\x07\x02\xd4\x02\x06\x02\xd4\x02\x06\x02\x84\x02\x06\x02\x88\x02\x06\x02\xe8\x02\x05\x02\x88\x02\x06\x02\xd8\x02\x05\x02\x84\x02\x06\x02\xe0\x02\x05\x02\x84\x02\x08\x02\x84\x02\x06\x02\x80\x02\x02\x02\x90\x02\x05\x02\xa0\x02\x05\x02\xe0\x02\x05\x02\x03\x02\x01\x02\x11\x02\x11\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x01\x02\x11\x02\x11\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x04\x91\xf3\x83\x02\x06\x00\x03\xa4\x02\x03\xb1\x02\x03h\x06\x03w\x06\x00\x02\x01\x02\x03\x02\x01\x02\x0f\x02\r\x02\x88\x02\x06\x02\x90\x02\x05\x02\xa0\x02\x03\x02\xf0\x02\x06\x02\xaa\x02\x07\x02\xe0\x02\x06\x02\xb0\x02\x07\x02\xe0\x02\x04\x02\xac\x02\x06\x02\xb4\x02\x06\x02\xd0\x02\x06\x02\xa8\x02\x06\x02\x8a\x02\x07\x02\xba\x02\x07\x02\xb8\x02\x05\x02\x80\x02\x02\x02\xf8\x02\x05\x02\xf0\x02\x06\x02\xf0\x02\x05\x02\xf8\x02\x06\x02\xa0\x02\x07\x02\x8c\x02\x06\x02\x80\x02\x06\x02\x80\x02\x07\x82\xa0\x02\x03\x02\x80\x02\x01\x02\x80\x02\x05\x02\x98\x02\x07\x02\xf0\x02\x05\x02\xc0\x02\x07\x02\xd4\x02\x06\x02\xe0\x02\x07\x02\x90\x02\x04\x02\x98\x02\x06\x02\xd8\x02\x06\x02\xf4\x02\x06\x02\xe8\x02\x06\x02\xd0\x02\x05\x02\xb0\x02\x06\x02\xa8\x02\x05\x02\x88\x02\x06\x02\xac\x02\x06\x02\xf4\x02\x06\x02\x84\x02\x06\x02\x88\x02\x07\x02\x98\x02\x06\x02\x96\x02\x08\x02\xae\x02\x07\x02\xfc\x02\x06\x02\xa8\x02\x06\x02\xc8\x02\x06\x02\xd8\x02\x06\x02\x84\x02\x07\x02\x96\x02\x07\x02\x80\x02\x06\x02\xd8\x02\x05\x02\xf0\x02\x06\x02\x80\x02\x01\x02\xf8\x02\x05\x02\x80\x02\x03\x02\xe2\x02\x07\x02\xf8\x02\x07\x02\x9c\x02\x06\x02\x9c\x02\x06\x02\x80\x02\x01\x02\x94\x02\x07\x02\xc0\x02\x04\x02\xa0\x02\x07\x02\xdc\x02\x06\x02\xec\x02\x06\x02\x94\x02\x06\x02\x94\x02\x07\x02\x94\x02\x06\x02\xc8\x02\x05\x02\x80\x02\x01\x02\xc0\x02\x03\x02\xa6\x02\x07\x02\x8c\x02\x06\x02\xf0\x02\x05\x02\x88\x02\x07\x02\xa0\x02\x05\x02\xf0\x02\x06\x02\xd8\x02\x06\x02\x89\x02\x08\x03\xba\xc0\x02\n\x02\xd0\x02\x06\x02\x90\x02\x06\x02\xa0\x02\x05\x02\xac\x02\x07\x02\xa0\x02\x03\x00\x02\x84\x02\x07\x02\xe0\x02\x05\x02\xe0\x02\x03\x02\xe8\x02\x05\x02\xe0\x02\x05\x03\xab\x80\x02\t\x02\xe1\x02\n\x02\xec\x02\x06\x02\x80\x02\x02\x02\xe4\x02\x07\x02\x8c\x02\x06\x02\xd4\x02\x06\x00\x02\x90\x02\x07\x02\xd4\x02\x06\x02\x98\x02\x05\x02\x98\x02\x05\x02\x90\x02\x07\x02\xa2\x02\x07\x02\xcf\x02\x08\x02\xf0\x02\x05\x02\x80\x02\x01\x02\xec\x02\x06\x02\x80\x02\x07\x02\x84\x02\x06\x02\xe4\x02\x06\x02\xe0\x02\x03\x02\x88\x02\x07\x02\xa0\x02\x06\x02\xae\x02\x07\x02\xe8\x02\x05\x02\x84\x02\x07\x02\x94\x02\x06\x02\xb0\x02\x06\x02\x80\x02\x03\x02\xf0\x02\x05\x02\xa0\x02\x07\x02\xd4\x02\x07\x02\xe0\x02\x05\x02\xd0\x02\x04\x02\xe0\x02\x06\x02\xb4\x02\x06\x02\xc0\x02\x03\x02\x80\x02\x02\x02\xdc\x02\x06\x02\xa8\x02\x05\x02\xd8\x02\x05\x02\xe8\x02\x05\x02\xb6\x02\x07\x02\x98\x02\x05\x02\xc0\x02\x05\x02\xc8\x02\x06\x02\xc6\x02\x07\x02\xdc\x02\x06\x02\xd0\x02\x06\x02\xd0\x02\x06\x02\x84\x02\x06\x02\xf8\x02\x05\x02\x80\x02\x06\x02\xf0\x02\x05\x02\xe0\x02\x04\x02\x80\x02\x06\x02\xb0\x02\x05\x02\xdc\x02\x06\x02\x84\x02\x07\x02\xc0\x02\x02\x02\xa8\x02\x06\x02\x90\x02\x05\x02\xf0\x02\x05\x02\xac\x02\x06\x02\xbc\x02\x07\x02\xac\x02\x07\x02\xd0\x02\x05\x02\xd4\x02\x07\x02\xaa\x02\x07\x02\xac\x02\x06\x02\x84\x02\x06\x02\xf8\x02\x05\x02\xb0\x02\x04\x02\x88\x02\x06\x02\x84\x02\x06\x02\x88\x02\x07\x02\x9c\x02\x06\x02\xa0\x02\x03\x02\xb4\x02\x07\x02\xd0\x02\x06\x02\xa0\x02\x06\x02\x94\x02\x06\x02\x90\x02\x05\x02\xb0\x02\x06\x02\xf0\x02\x06\x02\xc0\x02\x04\x82\x80\x02\x02\x02\xe0\x02\x03\x02\xc0\x02\x02\x02\xb4\x02\x07\x02\xdc\x02\x06\x02\xc0\x02\x05\x02\xfc\x02\x06\x02\xd4\x02\x06\x02\xb0\x02\x06\x00\x82\x80\x02\x01\x02\x80\x02\x05\x02\x03\x02\x01\x02\x0f\x02\r\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x05\x02\x05\x02\x05\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x13\x02\x03\x02\x01\x02\x0f\x02\r\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06\x04\xaf\x03>\x02\x06' tb."""
[]
sofieditmer/self-assigned
src/use-model.py
3033b64d2848fcf73c44dd79ad4e7f07f8387c65
#!/usr/bin/env python """ Info: This script loads the model trained in the cnn-asl.py script and enables the user to use it for classifying unseen ASL letters. It also visualizes the feature map of the last convolutional layer of the network to enable the user to get an insight into exactly which parts of the original image that the model is paying attention to when classifying the image. Parameters: (optional) model_name: str <name-of-the-model-to-load>, default = "saved_model.json" (optional) train_data: str <name-of-training-data>, default = "asl_alphabet_train_subset" (optional) unseen_image: str <name-of-unseen-image>, default = "unseen_img_test1.png" Usage: $ python use-model.py Output: - unseen_image_superimposed_heatmap.png: superimposed heatmap on unseen image. - unseen_image_prediction.txt: model prediction of unseen image. """ ### DEPENDENCIES ### # Core libraries import os import sys sys.path.append(os.path.join("..")) # Matplotlib, numpy, OpenCV import matplotlib.pyplot as plt import numpy as np import cv2 # TensorFlow import tensorflow as tf from tensorflow.keras.preprocessing.image import (load_img, img_to_array) from tensorflow.keras.applications.resnet import preprocess_input from tensorflow.keras.models import model_from_json from tensorflow.keras import backend as K # argparse import argparse ### MAIN FUNCTION ### def main(): ### ARGPARSE ### # Initialize ArgumentParser class ap = argparse.ArgumentParser() # Argument 1: Model name ap.add_argument("-m", "--model_name", type = str, required = False, # the argument is not required help = "Name of the model", default = "saved_model.json") # default name # Argument 2: Training data ap.add_argument("-t", "--train_data", type = str, required = False, # the argument is not required help = "Name of training data folder", default = "asl_alphabet_train_subset") # default is a subset of the training dataset # Argument 3: Input image ap.add_argument("-u", "--unseen_image", type = str, required = False, # the argument is not required help = "Name of the image the model should classify", default = "unseen_img_test1.png") # default unseen image provided in the unseen_images folder # Parse arguments args = vars(ap.parse_args()) # Save input parameters model_name = args["model_name"] train_data = os.path.join("..", "data", "subset_asl_sign_language", args["train_data"]) unseen_image = args["unseen_image"] # Create output directory if it does not already exist if not os.path.exists(os.path.join("..", "output")): os.mkdir(os.path.join("..", "output")) # Start message print("\n[INFO] Initializing...") # Instantiate the class classifier = Loaded_model_classifier(train_data, unseen_image) # Create list of label names from the directory names in the training data folder labels = classifier.list_labels() # Load the model print(f"\n[INFO] Loading the CNN model, {model_name}, from 'output' directory...") model = classifier.load_model(model_name) # Classify input image print(f"\n[INFO] Using the model to predict the class of {unseen_image}...") label = classifier.classify_unseen_image(labels, model) # Visualize feature map of network for input image print(f"\n[INFO] Visualizing the feature map of the last convolutional layer of the network...") classifier.visualize_feature_map(model) # User message print(f"\n[INFO] Done! The {unseen_image} has been classified as {label} and the feature map of the last convolutional layer of the network has been visualized and saved as {unseen_image}_superimposed_heatmap.png in 'output' directory\n") # Creating classifier class class Loaded_model_classifier: def __init__(self, train_data, unseen_image): # Receive inputs: train data and input image self.train_data = train_data self.unseen_image = unseen_image def list_labels(self): """ This method defines the label names by listing the names of the folders within training directory without listing hidden files. It sorts the names alphabetically. """ # Create empty list labels = [] # For every name in training directory for name in os.listdir(self.train_data): # If it does not start with . (which hidden files do) if not name.startswith('.'): labels.append(name) # Sort labels alphabetically labels = sorted(labels) return labels def load_model(self, model_name): """ This method loads the model and the model weights that are saved in the output directory. """ # Load JSON-file and create model model_path = os.path.join("..", "output", model_name) json_model = open(model_path, "r") # Read file loaded_file = json_model.read() # Create model loaded_model = model_from_json(loaded_file) # Load weights into new model loaded_model.load_weights(os.path.join("..", "output", "model_weights.h5")) # Compile model loaded_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) return loaded_model def classify_unseen_image(self, labels, model): """ This method takes an unseen image, performs some preprocessing to prepare it for the model, and predicts the class of the image using the model. """ # Define path img_path = os.path.join("..", "data", "unseen_images", self.unseen_image) # Load unseen image image = load_img(img_path, target_size=(224, 224)) # using the same size as the images the model has been trained on # Convert the image to a numpy array image = img_to_array(image) # Reshape the image, because the model expects a tensor of rank 4. The image goes from being 3-dimensional to 4-dimensional: (1, 224, 224, 3) image = image.reshape((1, image.shape[0], image.shape[1], image.shape[2])) # Prepare the image for the ResNet50 model image = preprocess_input(image) # Predict the class of the image prediction = np.argmax(model.predict(image)) # Convert labels to be a dictionary which is needed to extract the label that corresponds to the prediction labels = dict(zip(labels, range(len(labels)))) # Define function that finds the key (letter) that corresponds to the predicted value def find_key(dictionary, value): return {k for k, v in dictionary.items() if v == value} # Extract letter that corresponds to the predicted value from the label dictionary label = find_key(labels, prediction) # Print the predicted class to the terminal print(f"\nThe model predicts {self.unseen_image} to be the letter {label}") # Save prediction as txt-file to output directory with open(os.path.join("..", "output", f"{self.unseen_image}_prediction.txt"), "w") as f: f.write(f"The predicted class of the {self.unseen_image} made by the model is {label}") return label def visualize_feature_map(self, model): """ This method visualizes the feature map of the last convolutional layer of the network. """ # Define path img_path = os.path.join("..", "data", "unseen_images", self.unseen_image) # Load image with dimensions corresponding to training images img = load_img(img_path, target_size=(224, 224)) # Convert image to array x = img_to_array(img) # Convert to rank 4 tensor x = np.expand_dims(x, axis=0) # Preprocess to be in line with ResNet50 data x = preprocess_input(x) # Create activation heatmap for final layer. This is done by taking advantage of how the model learns through gradient descent. We use the gradients that have been learned through training, and we go the opposite way (rather than minimizing we are maximizing). Essentially, we make use of the gradients in the final layer to highlight which regions are particularly informative when predicting a given class. with tf.GradientTape() as tape: # Take the last convolutional layer in the network last_conv_layer = model.get_layer('conv5_block3_out') # Create a model that maps the input image to the activations of the last convolutional layer as well as the output predictions iterate = tf.keras.models.Model([model.inputs], [model.output, last_conv_layer.output]) # Compute the gradient of the top predicted class for the input image with respect to the activations of the last conv layer # Take the gradients from the last layer model_out, last_conv_layer = iterate(x) # Find the class that has been predicted by the model class_out = model_out[:, np.argmax(model_out[0])] # Extract gradient of the output neuron of the last convolutional layer grads = tape.gradient(class_out, last_conv_layer) # Vector of mean intensity of the gradient over a specific feature map channel pooled_grads = K.mean(grads, axis=(0, 1, 2)) # Multiply each channel in the feature map array by "how important this channel is" with regard to the top predicted class. Then sum all the channels to obtain the heatmap class activation heatmap = tf.reduce_mean(tf.multiply(pooled_grads, last_conv_layer), axis=-1) heatmap = np.maximum(heatmap, 0) heatmap /= np.max(heatmap) heatmap = heatmap.reshape((7,7)) plt.matshow(heatmap) # Load unseen image with OpenCV img = cv2.imread(img_path) # Make heatmap semi-transparent intensity = 0.5 # Resize the heatmap to be the original dimensions of the input heatmap = cv2.resize(heatmap, (img.shape[1], img.shape[0])) # Apply colormap heatmap = cv2.applyColorMap(np.uint8(255*heatmap), cv2.COLORMAP_JET) # Multiply heatmap by intensity and 'add' this on top of the original image superimposed = (heatmap * intensity) + img # Save the superimposed image to output directory cv2.imwrite(os.path.join("..", "output", f"{self.unseen_image}_superimposed_heatmap.png"), superimposed) # User message print(f"\n[INFO] The feature map has now been visualized and superimposed on {self.unseen_image}. Find image as {self.unseen_image}_superimposed_heatmap.png in 'output' directory...") # Define behaviour when called from command line if __name__=="__main__": main()
[((23, 16, 23, 34), 'os.path.join', 'os.path.join', ({(23, 29, 23, 33): '""".."""'}, {}), "('..')", False, 'import os\n'), ((48, 9, 48, 34), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((76, 17, 76, 91), 'os.path.join', 'os.path.join', ({(76, 30, 76, 34): '""".."""', (76, 36, 76, 42): '"""data"""', (76, 44, 76, 70): '"""subset_asl_sign_language"""', (76, 72, 76, 90): "args['train_data']"}, {}), "('..', 'data', 'subset_asl_sign_language', args['train_data'])", False, 'import os\n'), ((125, 20, 125, 47), 'os.listdir', 'os.listdir', ({(125, 31, 125, 46): 'self.train_data'}, {}), '(self.train_data)', False, 'import os\n'), ((141, 21, 141, 61), 'os.path.join', 'os.path.join', ({(141, 34, 141, 38): '""".."""', (141, 40, 141, 48): '"""output"""', (141, 50, 141, 60): 'model_name'}, {}), "('..', 'output', model_name)", False, 'import os\n'), ((148, 23, 148, 51), 'tensorflow.keras.models.model_from_json', 'model_from_json', ({(148, 39, 148, 50): 'loaded_file'}, {}), '(loaded_file)', False, 'from tensorflow.keras.models import model_from_json\n'), ((166, 19, 166, 81), 'os.path.join', 'os.path.join', ({(166, 32, 166, 36): '""".."""', (166, 38, 166, 44): '"""data"""', (166, 46, 166, 61): '"""unseen_images"""', (166, 63, 166, 80): 'self.unseen_image'}, {}), "('..', 'data', 'unseen_images', self.unseen_image)", False, 'import os\n'), ((169, 16, 169, 58), 'tensorflow.keras.preprocessing.image.load_img', 'load_img', (), '', False, 'from tensorflow.keras.preprocessing.image import load_img, img_to_array\n'), ((172, 16, 172, 35), 'tensorflow.keras.preprocessing.image.img_to_array', 'img_to_array', ({(172, 29, 172, 34): 'image'}, {}), '(image)', False, 'from tensorflow.keras.preprocessing.image import load_img, img_to_array\n'), ((178, 16, 178, 39), 'tensorflow.keras.applications.resnet.preprocess_input', 'preprocess_input', ({(178, 33, 178, 38): 'image'}, {}), '(image)', False, 'from tensorflow.keras.applications.resnet import preprocess_input\n'), ((208, 19, 208, 81), 'os.path.join', 'os.path.join', ({(208, 32, 208, 36): '""".."""', (208, 38, 208, 44): '"""data"""', (208, 46, 208, 61): '"""unseen_images"""', (208, 63, 208, 80): 'self.unseen_image'}, {}), "('..', 'data', 'unseen_images', self.unseen_image)", False, 'import os\n'), ((211, 14, 211, 56), 'tensorflow.keras.preprocessing.image.load_img', 'load_img', (), '', False, 'from tensorflow.keras.preprocessing.image import load_img, img_to_array\n'), ((214, 12, 214, 29), 'tensorflow.keras.preprocessing.image.img_to_array', 'img_to_array', ({(214, 25, 214, 28): 'img'}, {}), '(img)', False, 'from tensorflow.keras.preprocessing.image import load_img, img_to_array\n'), ((217, 12, 217, 37), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((220, 12, 220, 31), 'tensorflow.keras.applications.resnet.preprocess_input', 'preprocess_input', ({(220, 29, 220, 30): 'x'}, {}), '(x)', False, 'from tensorflow.keras.applications.resnet import preprocess_input\n'), ((80, 26, 80, 54), 'os.path.join', 'os.path.join', ({(80, 39, 80, 43): '""".."""', (80, 45, 80, 53): '"""output"""'}, {}), "('..', 'output')", False, 'import os\n'), ((81, 17, 81, 45), 'os.path.join', 'os.path.join', ({(81, 30, 81, 34): '""".."""', (81, 36, 81, 44): '"""output"""'}, {}), "('..', 'output')", False, 'import os\n'), ((151, 34, 151, 82), 'os.path.join', 'os.path.join', ({(151, 47, 151, 51): '""".."""', (151, 53, 151, 61): '"""output"""', (151, 63, 151, 81): '"""model_weights.h5"""'}, {}), "('..', 'output', 'model_weights.h5')", False, 'import os\n'), ((223, 13, 223, 30), 'tensorflow.GradientTape', 'tf.GradientTape', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((229, 22, 230, 83), 'tensorflow.keras.models.Model', 'tf.keras.models.Model', ({(229, 44, 229, 58): '[model.inputs]', (230, 44, 230, 82): '[model.output, last_conv_layer.output]'}, {}), '([model.inputs], [model.output, last_conv_layer.output])', True, 'import tensorflow as tf\n'), ((244, 27, 244, 56), 'tensorflow.keras.backend.mean', 'K.mean', (), '', True, 'from tensorflow.keras import backend as K\n'), ((248, 22, 248, 44), 'numpy.maximum', 'np.maximum', ({(248, 33, 248, 40): 'heatmap', (248, 42, 248, 43): '0'}, {}), '(heatmap, 0)', True, 'import numpy as np\n'), ((249, 23, 249, 38), 'numpy.max', 'np.max', ({(249, 30, 249, 37): 'heatmap'}, {}), '(heatmap)', True, 'import numpy as np\n'), ((251, 12, 251, 32), 'matplotlib.pyplot.matshow', 'plt.matshow', ({(251, 24, 251, 31): 'heatmap'}, {}), '(heatmap)', True, 'import matplotlib.pyplot as plt\n'), ((254, 18, 254, 38), 'cv2.imread', 'cv2.imread', ({(254, 29, 254, 37): 'img_path'}, {}), '(img_path)', False, 'import cv2\n'), ((260, 22, 260, 71), 'cv2.resize', 'cv2.resize', ({(260, 33, 260, 40): 'heatmap', (260, 42, 260, 70): '(img.shape[1], img.shape[0])'}, {}), '(heatmap, (img.shape[1], img.shape[0]))', False, 'import cv2\n'), ((197, 18, 197, 85), 'os.path.join', 'os.path.join', ({(197, 31, 197, 35): '""".."""', (197, 37, 197, 45): '"""output"""', (197, 47, 197, 84): 'f"""{self.unseen_image}_prediction.txt"""'}, {}), "('..', 'output', f'{self.unseen_image}_prediction.txt')", False, 'import os\n'), ((247, 37, 247, 79), 'tensorflow.multiply', 'tf.multiply', ({(247, 49, 247, 61): 'pooled_grads', (247, 63, 247, 78): 'last_conv_layer'}, {}), '(pooled_grads, last_conv_layer)', True, 'import tensorflow as tf\n'), ((263, 40, 263, 61), 'numpy.uint8', 'np.uint8', ({(263, 49, 263, 60): '255 * heatmap'}, {}), '(255 * heatmap)', True, 'import numpy as np\n'), ((269, 24, 269, 101), 'os.path.join', 'os.path.join', ({(269, 37, 269, 41): '""".."""', (269, 43, 269, 51): '"""output"""', (269, 53, 269, 100): 'f"""{self.unseen_image}_superimposed_heatmap.png"""'}, {}), "('..', 'output', f'{self.unseen_image}_superimposed_heatmap.png')", False, 'import os\n'), ((237, 37, 237, 60), 'numpy.argmax', 'np.argmax', ({(237, 47, 237, 59): 'model_out[0]'}, {}), '(model_out[0])', True, 'import numpy as np\n')]
algorithmiaio/algorithmia-adk-python
examples/hello_world/src/Algorithm.py
1e5c6b9de08fe34260f3b4c03eb4596cccb4d070
from Algorithmia import ADK # API calls will begin at the apply() method, with the request body passed as 'input' # For more details, see algorithmia.com/developers/algorithm-development/languages def apply(input): # If your apply function uses state that's loaded into memory via load, you can pass that loaded state to your apply # function by defining an additional "globals" parameter in your apply function; but it's optional! return "hello {}".format(str(input)) # This turns your library code into an algorithm that can run on the platform. # If you intend to use loading operations, remember to pass a `load` function as a second variable. algorithm = ADK(apply) # The 'init()' function actually starts the algorithm, you can follow along in the source code # to see how everything works. algorithm.init("Algorithmia")
[((15, 12, 15, 22), 'Algorithmia.ADK', 'ADK', ({(15, 16, 15, 21): 'apply'}, {}), '(apply)', False, 'from Algorithmia import ADK\n')]
Xiaoxiong-Liu/gluon-ts
src/gluonts/nursery/autogluon_tabular/estimator.py
097c492769258dd70b7f223f826b17b0051ceee9
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. # A copy of the License is located at # # http://www.apache.org/licenses/LICENSE-2.0 # # or in the "license" file accompanying this file. This file is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. import logging from typing import Callable, Optional, List, Tuple import pandas as pd from autogluon.tabular import TabularPredictor as AutogluonTabularPredictor from gluonts.core.component import validated from gluonts.dataset.common import Dataset from gluonts.dataset.util import to_pandas from gluonts.model.estimator import Estimator from gluonts.time_feature import ( TimeFeature, get_lags_for_frequency, time_features_from_frequency_str, ) from .predictor import ( TabularPredictor, mean_abs_scaling, get_features_dataframe, ) logger = logging.getLogger(__name__) class TabularEstimator(Estimator): """An estimator that trains an Autogluon Tabular model for time series forecasting. Additional keyword arguments to the constructor, other than the ones documented below, will be passed on to Autogluon Tabular's ``fit`` method used for training the model. Parameters ---------- freq Frequency of the data to handle prediction_length Prediction length lag_indices List of indices of the lagged observations to use as features. If None, this will be set automatically based on the frequency. time_features List of time features to be used. If None, this will be set automatically based on the frequency. scaling Function to be used to scale time series. This should take a pd.Series object as input, and return a scaled pd.Series and the scale (float). By default, this divides a series by the mean of its absolute value. batch_size Batch size of the resulting predictor; this is just used at prediction time, and does not affect training in any way. disable_auto_regression Whether to forecefully disable auto-regression in the model. If ``True``, this will remove any lag index which is smaller than ``prediction_length``. This will make predictions more efficient, but may impact their accuracy. quantiles_to_predict Whether to forecast in quantile way. If assigned with quantile values, this will train model using quantile prediction model. If None, then the model will be trained in a regular way. """ @validated() def __init__( self, freq: str, prediction_length: int, lag_indices: Optional[List[int]] = None, time_features: Optional[List[TimeFeature]] = None, scaling: Callable[ [pd.Series], Tuple[pd.Series, float] ] = mean_abs_scaling, batch_size: Optional[int] = 32, disable_auto_regression: bool = False, last_k_for_val: Optional[int] = None, quantiles_to_predict: Optional[List[float]] = None, eval_metric: str = "mean_absolute_error", **kwargs, ) -> None: super().__init__() self.freq = freq self.prediction_length = prediction_length self.lag_indices = ( lag_indices if lag_indices is not None else get_lags_for_frequency(self.freq) ) self.time_features = ( time_features if time_features is not None else time_features_from_frequency_str(self.freq) ) self.batch_size = batch_size self.disable_auto_regression = disable_auto_regression self.scaling = scaling self.last_k_for_val = last_k_for_val self.eval_metric = eval_metric self.quantiles_to_predict = quantiles_to_predict if self.disable_auto_regression: self.lag_indices = [ lag_idx for lag_idx in self.lag_indices if lag_idx >= self.prediction_length ] default_kwargs = { "time_limit": 60, # "excluded_model_types": ["KNN", "XT", "RF"], "presets": [ "high_quality_fast_inference_only_refit", "optimize_for_deployment", ], "auto_stack": True, } self.kwargs = {**default_kwargs, **kwargs} def train( self, training_data: Dataset, validation_data: Optional[Dataset] = None, ) -> TabularPredictor: kwargs_override = {} dfs = [ get_features_dataframe( series=self.scaling(to_pandas(entry))[0], time_features=self.time_features, lag_indices=self.lag_indices, ) for entry in training_data ] if validation_data is not None or self.last_k_for_val is not None: kwargs_override["auto_stack"] = False logger.warning( "Auto Stacking is turned off " "as validation dataset is provided before input into Tabular Predictor." ) if validation_data is not None: logger.log(20, "Validation dataset is directly provided.") validation_dfs = [ get_features_dataframe( series=self.scaling(to_pandas(entry))[0], time_features=self.time_features, lag_indices=self.lag_indices, ) for entry in validation_data ] train_df = pd.concat(dfs) val_df = pd.concat(validation_dfs) elif self.last_k_for_val is not None: logger.log( 20, f"last_k_for_val is provided, choosing last {self.last_k_for_val} of each time series as validation set.", ) train_dfs = [ tmp_df.iloc[: -self.last_k_for_val, :] for tmp_df in dfs ] validation_dfs = [ tmp_df.iloc[-self.last_k_for_val :, :] for tmp_df in dfs ] train_df = pd.concat(train_dfs) val_df = pd.concat(validation_dfs) else: logger.log( 20, "No validation dataset is provided, will let TabularPredictor do the splitting automatically," "Note that this might break the time order of time series data.", ) train_df = pd.concat(dfs) val_df = None if self.quantiles_to_predict is not None: ag_model = AutogluonTabularPredictor( label="target", problem_type="quantile", quantile_levels=self.quantiles_to_predict, ).fit( train_df, tuning_data=val_df, **{**self.kwargs, **kwargs_override}, ) else: ag_model = AutogluonTabularPredictor( label="target", problem_type="regression", eval_metric=self.eval_metric, ).fit( train_df, tuning_data=val_df, **{**self.kwargs, **kwargs_override}, ) return TabularPredictor( ag_model=ag_model, freq=self.freq, prediction_length=self.prediction_length, time_features=self.time_features, lag_indices=self.lag_indices, scaling=self.scaling, batch_size=self.batch_size, quantiles_to_predict=self.quantiles_to_predict, )
[((34, 9, 34, 36), 'logging.getLogger', 'logging.getLogger', ({(34, 27, 34, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((74, 5, 74, 16), 'gluonts.core.component.validated', 'validated', ({}, {}), '()', False, 'from gluonts.core.component import validated\n'), ((98, 17, 98, 50), 'gluonts.time_feature.get_lags_for_frequency', 'get_lags_for_frequency', ({(98, 40, 98, 49): 'self.freq'}, {}), '(self.freq)', False, 'from gluonts.time_feature import TimeFeature, get_lags_for_frequency, time_features_from_frequency_str\n'), ((103, 17, 103, 60), 'gluonts.time_feature.time_features_from_frequency_str', 'time_features_from_frequency_str', ({(103, 50, 103, 59): 'self.freq'}, {}), '(self.freq)', False, 'from gluonts.time_feature import TimeFeature, get_lags_for_frequency, time_features_from_frequency_str\n'), ((163, 23, 163, 37), 'pandas.concat', 'pd.concat', ({(163, 33, 163, 36): 'dfs'}, {}), '(dfs)', True, 'import pandas as pd\n'), ((164, 21, 164, 46), 'pandas.concat', 'pd.concat', ({(164, 31, 164, 45): 'validation_dfs'}, {}), '(validation_dfs)', True, 'import pandas as pd\n'), ((176, 23, 176, 43), 'pandas.concat', 'pd.concat', ({(176, 33, 176, 42): 'train_dfs'}, {}), '(train_dfs)', True, 'import pandas as pd\n'), ((177, 21, 177, 46), 'pandas.concat', 'pd.concat', ({(177, 31, 177, 45): 'validation_dfs'}, {}), '(validation_dfs)', True, 'import pandas as pd\n'), ((184, 23, 184, 37), 'pandas.concat', 'pd.concat', ({(184, 33, 184, 36): 'dfs'}, {}), '(dfs)', True, 'import pandas as pd\n'), ((188, 23, 192, 13), 'autogluon.tabular.TabularPredictor', 'AutogluonTabularPredictor', (), '', True, 'from autogluon.tabular import TabularPredictor as AutogluonTabularPredictor\n'), ((198, 23, 202, 13), 'autogluon.tabular.TabularPredictor', 'AutogluonTabularPredictor', (), '', True, 'from autogluon.tabular import TabularPredictor as AutogluonTabularPredictor\n'), ((140, 36, 140, 52), 'gluonts.dataset.util.to_pandas', 'to_pandas', ({(140, 46, 140, 51): 'entry'}, {}), '(entry)', False, 'from gluonts.dataset.util import to_pandas\n'), ((157, 40, 157, 56), 'gluonts.dataset.util.to_pandas', 'to_pandas', ({(157, 50, 157, 55): 'entry'}, {}), '(entry)', False, 'from gluonts.dataset.util import to_pandas\n')]
andreas19/dcar
src/dcar/errors.py
31118ac5924b7cb01f8b7da5a84480824c046df2
"""Errors module.""" __all__ = [ 'Error', 'AddressError', 'AuthenticationError', 'TransportError', 'ValidationError', 'RegisterError', 'MessageError', 'DBusError', 'SignatureError', 'TooLongError', ] class Error(Exception): """Base class.""" class AddressError(Error): """Raised for errors in server addresses.""" class AuthenticationError(Error): """Raised when authentication failed.""" class TransportError(Error): """Raised for transport related errors.""" class ValidationError(Error): """Raised when validation failed.""" class RegisterError(Error): """Raised when a signal or method could not be registered.""" class MessageError(Error): """Raised for errors in messages.""" class DBusError(MessageError): """Raised for errors from ERROR messages.""" class SignatureError(MessageError): """Raised for errors in signatures.""" class TooLongError(MessageError): """Raised when a message, an array, a name etc. is too long."""
[]
SergeBakharev/content
Packs/CortexXDR/Integrations/XDR_iocs/XDR_iocs_test.py
d66cc274f5bf6f9f0e9ed7e4df1af7b6f305aacf
from XDR_iocs import * import pytest from freezegun import freeze_time Client.severity = 'INFO' client = Client({'url': 'test'}) def d_sort(in_dict): return sorted(in_dict.items()) class TestGetHeaders: @freeze_time('2020-06-01T00:00:00Z') def test_sanity(self, mocker): """ Given: - API key - API key ID Then: - Verify headers created correct. """ params = { "apikey_id": "7", "apikey": "t3PkfrEhaRAD9a3r6Lq5cVPyqdMqtLd8cOJlSWUtbslkbERUgb2BTkSNRtDr3C6CWAgYqxvyzwDFJ83BLBgu1V2cxQY7rsoo2ks2u3W2aBL2BlteF8C8u75lCVUrNbv1" # noqa: E501 } headers = { 'Authorization': 'da94963b561e3c95899d843b1284cecf410606e9e809be528ec1cf03880c6e9e', 'x-iocs-source': 'xsoar', 'x-xdr-auth-id': '7', 'x-xdr-nonce': '1111111111111111111111111111111111111111111111111111111111111111', 'x-xdr-timestamp': '1590969600000' } mocker.patch('secrets.choice', return_value='1') output = get_headers(params) assert output == headers, f'get_headers({params})\n\treturns: {d_sort(output)}\n\tinstead: {d_sort(headers)}' def test_empty_case(self): """ Given: Empty params Then: get_headers will not raise error """ get_headers({}) class TestHttpRequest: class Res: content = 'error'.encode() def __init__(self, code): self.status_code = code @staticmethod def json(): return {} XDR_SERVER_ERROR = 500 INVALID_CREDS = 401 LICENSE_ERROR = 402 PERMISSION_ERROR = 403 OK = 200 data_test_http_request_error_codes = [ (OK, {}), (XDR_SERVER_ERROR, 'XDR internal server error.\t(error)'), (INVALID_CREDS, 'Unauthorized access. An issue occurred during authentication. This can indicate an incorrect key, id, or other invalid authentication parameters.\t(error)'), # noqa: E501 (LICENSE_ERROR, 'Unauthorized access. User does not have the required license type to run this API.\t(error)'), (PERMISSION_ERROR, 'Unauthorized access. The provided API key does not have the required RBAC permissions to run this API.\t(error)') # noqa: E501 ] @pytest.mark.parametrize('res, expected_output', data_test_http_request_error_codes) def test_http_request_error_codes(self, res, expected_output, mocker): """ Given: - Status code When: - http_request returns this status code. Then: - Verify error/success format. """ mocker.patch('requests.post', return_value=self.Res(res)) try: output = client.http_request('', {}) except DemistoException as error: output = str(error) assert output == expected_output, f'status code {res}\n\treturns: {output}\n\tinstead: {expected_output}' class TestGetRequestsKwargs: def test_with_file(self, mocker): """ Given: - file to upload Then: - Verify output format. """ def override_open(open_path, *_other): return open_path mocker.patch('builtins.open', side_effect=override_open) path = '/Users/some_user/some_dir/some_file.file' output = get_requests_kwargs(file_path=path) expected_output = {'files': [('file', ('iocs.json', path, 'application/json'))]} assert output == expected_output, f'get_requests_kwargs(file_path={path})\n\treturns: {output}\n\t instead: {expected_output}' # noqa: E501 def test_with_json(self): """ Given: - simple json Then: - the json ready to send """ _json = {'test': 'test'} output = get_requests_kwargs(_json=_json) expected_output = {'data': '{"request_data": {"test": "test"}}'} assert output == expected_output, f'get_requests_kwargs(_json={_json})\n\treturns: {output}\n\t instead: {expected_output}' # noqa: E501 class TestPrepareCommands: def test_prepare_get_changes(self): """ Given: - get changes command Then: - Verify url and json format. """ ts = int(datetime.now(timezone.utc).timestamp() * 1000) url_suffix, _json = prepare_get_changes(ts) assert url_suffix == 'get_changes', f'prepare_get_changes\n\treturns url_suffix: {url_suffix}\n\tinstead url_suffix: get_changes' # noqa: E501 assert _json == {'last_update_ts': ts} def test_prepare_enable_iocs(self): """ Given: - enable iocs command Then: - Verify url and json format. """ url_suffix, iocs = prepare_enable_iocs('8.8.8.8,domain.com') assert url_suffix == 'enable_iocs', f'prepare_enable_iocs\n\treturns url_suffix: {url_suffix}\n\tinstead url_suffix: enable_iocs' # noqa: E501 assert iocs == ['8.8.8.8', 'domain.com'] def test_prepare_disable_iocs(self): """ Given: - disable iocs command Then: - Verify url and json format. """ url_suffix, iocs = prepare_disable_iocs('8.8.8.8,domain.com') assert url_suffix == 'disable_iocs', f'prepare_disable_iocs\n\treturns url_suffix: {url_suffix}\n\tinstead url_suffix: disable_iocs' # noqa: E501 assert iocs == ['8.8.8.8', 'domain.com'] class TestCreateFile: path = 'test_data/sync_file_test.json' data_test_create_file_sync = [ ('Domain_iocs', 'Domain_sync_file'), ('IP_iocs', 'IP_sync_file'), ('File_iocs', 'File_sync_file') ] data_test_create_file_iocs_to_keep = [ ('Domain_iocs', 'Domain_iocs_to_keep_file'), ('IP_iocs', 'IP_iocs_to_keep_file'), ('File_iocs', 'File_iocs_to_keep_file') ] def setup(self): # creates the file with open(TestCreateFile.path, 'w') as _file: _file.write('') def teardown(self): # removes the file when done os.remove(TestCreateFile.path) @staticmethod def get_file(path): with open(path, 'r') as _file: return _file.read() @staticmethod def get_all_iocs(go_over, extension): iocs = [] total = 0 data = [] for in_iocs, out_iocs in go_over: ioc = json.loads(TestCreateFile.get_file(f'test_data/{in_iocs}.json')) iocs.extend(ioc['iocs']) total += ioc['total'] data.append(TestCreateFile.get_file(f'test_data/{out_iocs}.{extension}')) all_iocs = {'iocs': iocs, 'total': total} all_data = ''.join(data) return all_iocs, all_data def test_create_file_sync_without_iocs(self, mocker): """ Given: - Sync command When: - there is no iocs Then: - Verify sync file data. """ mocker.patch.object(demisto, 'searchIndicators', return_value={}) create_file_sync(TestCreateFile.path) data = self.get_file(TestCreateFile.path) expected_data = '' assert data == expected_data, f'create_file_sync with no iocs\n\tcreates: {data}\n\tinstead: {expected_data}' @pytest.mark.parametrize('in_iocs, out_iocs', data_test_create_file_sync) def test_create_file_sync(self, in_iocs, out_iocs, mocker): """ Given: - Sync command When: - iocs type is a specific type. Then: - Verify sync file data. """ mocker.patch.object(demisto, 'searchIndicators', return_value=json.loads(self.get_file(f'test_data/{in_iocs}.json'))) # noqa: E501 create_file_sync(TestCreateFile.path) data = self.get_file(TestCreateFile.path) expected_data = self.get_file(f'test_data/{out_iocs}.txt') assert data == expected_data, f'create_file_sync with {in_iocs} iocs\n\tcreates: {data}\n\tinstead: {expected_data}' def test_create_file_sync_all_types(self, mocker): """ Given: - Sync command When: - iocs as all types Then: - Verify sync file data. """ all_iocs, expected_data = self.get_all_iocs(self.data_test_create_file_sync, 'txt') mocker.patch.object(demisto, 'searchIndicators', return_value=all_iocs) create_file_sync(TestCreateFile.path) data = self.get_file(TestCreateFile.path) assert data == expected_data, f'create_file_sync with all iocs\n\tcreates: {data}\n\tinstead: {expected_data}' data_test_create_file_with_empty_indicators = [ {}, {'value': '11.11.11.11'}, {'indicator_type': 'IP'} ] @pytest.mark.parametrize('defective_indicator', data_test_create_file_with_empty_indicators) def test_create_file_sync_with_empty_indicators(self, defective_indicator, mocker): """ Given: - Sync command When: - a part iocs dont have all required data Then: - Verify sync file data. """ all_iocs, expected_data = self.get_all_iocs(self.data_test_create_file_sync, 'txt') all_iocs['iocs'].append(defective_indicator) all_iocs['total'] += 1 mocker.patch.object(demisto, 'searchIndicators', return_value=all_iocs) warnings = mocker.patch.object(demisto, 'debug') create_file_sync(TestCreateFile.path) data = self.get_file(TestCreateFile.path) assert data == expected_data, f'create_file_sync with all iocs\n\tcreates: {data}\n\tinstead: {expected_data}' error_msg = warnings.call_args.args[0] assert error_msg.startswith("unexpected IOC format in key: '"), f"create_file_sync empty message\n\tstarts: {error_msg}\n\tinstead: unexpected IOC format in key: '" # noqa: E501 assert error_msg.endswith(f"', {str(defective_indicator)}"), f"create_file_sync empty message\n\tends: {error_msg}\n\tinstead: ', {str(defective_indicator)}" # noqa: E501 def test_create_file_iocs_to_keep_without_iocs(self, mocker): """ Given: - iocs to keep command When: - there is no iocs Then: - Verify iocs to keep file data. """ mocker.patch.object(demisto, 'searchIndicators', return_value={}) create_file_iocs_to_keep(TestCreateFile.path) data = self.get_file(TestCreateFile.path) expected_data = '' assert data == expected_data, f'create_file_iocs_to_keep with no iocs\n\tcreates: {data}\n\tinstead: {expected_data}' @pytest.mark.parametrize('in_iocs, out_iocs', data_test_create_file_iocs_to_keep) def test_create_file_iocs_to_keep(self, in_iocs, out_iocs, mocker): """ Given: - iocs to keep command When: - iocs type is a specific type. Then: - Verify iocs to keep file data. """ mocker.patch.object(demisto, 'searchIndicators', return_value=json.loads( self.get_file(f'test_data/{in_iocs}.json'))) create_file_iocs_to_keep(TestCreateFile.path) data = self.get_file(TestCreateFile.path) expected_data = self.get_file(f'test_data/{out_iocs}.txt') assert data == expected_data, f'create_file_iocs_to_keep with {in_iocs} iocs\n\tcreates: {data}\n\tinstead: {expected_data}' # noqa: E501 def test_create_file_iocs_to_keep_all_types(self, mocker): """ Given: - iocs to keep command When: - iocs as all types Then: - Verify iocs to keep file data. """ all_iocs, expected_data = self.get_all_iocs(self.data_test_create_file_iocs_to_keep, 'txt') mocker.patch.object(demisto, 'searchIndicators', return_value=all_iocs) create_file_iocs_to_keep(TestCreateFile.path) data = self.get_file(TestCreateFile.path) assert data == expected_data, f'create_file_iocs_to_keep with all iocs\n\tcreates: {data}\n\tinstead: {expected_data}' class TestDemistoIOCToXDR: data_test_demisto_expiration_to_xdr = [ (None, -1), ('', -1), ('0001-01-01T00:00:00Z', -1), ('2020-06-03T00:00:00Z', 1591142400000) ] @pytest.mark.parametrize('demisto_expiration, xdr_expiration', data_test_demisto_expiration_to_xdr) def test_demisto_expiration_to_xdr(self, demisto_expiration, xdr_expiration): """ Given: - demisto indicator expiration Then: - Verify XDR expiration. """ output = demisto_expiration_to_xdr(demisto_expiration) assert xdr_expiration == output, f'demisto_expiration_to_xdr({demisto_expiration})\n\treturns: {output}\n\tinstead: {xdr_expiration}' # noqa: E501 data_test_demisto_reliability_to_xdr = [ (None, 'F'), ('A - Completely reliable', 'A'), ('B - Usually reliable', 'B'), ('C - Fairly reliable', 'C'), ('D - Not usually reliable', 'D'), ('E - Unreliable', 'E'), ('F - Reliability cannot be judged', 'F') ] @pytest.mark.parametrize('demisto_reliability, xdr_reliability', data_test_demisto_reliability_to_xdr) def test_demisto_reliability_to_xdr(self, demisto_reliability, xdr_reliability): """ Given: - demisto indicator reliability Then: - Verify XDR reliability. """ output = demisto_reliability_to_xdr(demisto_reliability) assert output == xdr_reliability, f'demisto_reliability_to_xdr({demisto_reliability})\n\treturns: {output}\n\tinstead: {xdr_reliability}' # noqa: E501 data_test_demisto_types_to_xdr = [ ('File', 'HASH'), ('IP', 'IP'), ('Domain', 'DOMAIN_NAME') ] @pytest.mark.parametrize('demisto_type, xdr_type', data_test_demisto_types_to_xdr) def test_demisto_types_to_xdr(self, demisto_type, xdr_type): """ Given: - demisto indicator type Then: - Verify XDR type. """ output = demisto_types_to_xdr(demisto_type) assert output == xdr_type, f'demisto_reliability_to_xdr({demisto_type})\n\treturns: {output}\n\tinstead: {xdr_type}' data_test_demisto_vendors_to_xdr = [ ( {'moduleID': {'sourceBrand': 'test', 'reliability': 'A - Completely reliable', 'score': 2}}, {'vendor_name': 'test', 'reputation': 'SUSPICIOUS', 'reliability': 'A'} ), ( {'moduleID': {'reliability': 'A - Completely reliable', 'score': 2}}, {'vendor_name': 'moduleID', 'reputation': 'SUSPICIOUS', 'reliability': 'A'} ), ( {'moduleID': {'sourceBrand': 'test', 'score': 2}}, {'vendor_name': 'test', 'reputation': 'SUSPICIOUS', 'reliability': 'F'} ), ( {'moduleID': {'reliability': 'A - Completely reliable', 'score': 0}}, {'vendor_name': 'moduleID', 'reputation': 'UNKNOWN', 'reliability': 'A'} ) ] @pytest.mark.parametrize('demisto_vendor, xdr_vendor', data_test_demisto_vendors_to_xdr) def test_demisto_vendors_to_xdr(self, demisto_vendor, xdr_vendor): """ Given: - demisto indicator vendors reports. Then: - Verify XDR vendors format. """ output = demisto_vendors_to_xdr(demisto_vendor)[0] assert output == xdr_vendor, f'demisto_vendors_to_xdr({demisto_vendor})\n\treturns: {d_sort(output)}\n\tinstead: {d_sort(xdr_vendor)}' # noqa: E501 data_test_demisto_ioc_to_xdr = [ ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'score': 2}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'SUSPICIOUS', 'severity': 'INFO', 'type': 'IP'} ), ( {'value': '11.11.11.11', 'indicator_type': 100, 'score': 2}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'SUSPICIOUS', 'severity': 'INFO', 'type': '100'} ), ( {'value': '11.11.11.11', 'indicator_type': 'IP'}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP'} ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'expiration': '2020-06-03T00:00:00Z'}, {'expiration_date': 1591142400000, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP'} # noqa: E501 ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'comments': [{'type': 'IndicatorCommentTimeLine', 'content': 'test'}]}, # noqa: E501 {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP'} ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'comments': [{'type': 'IndicatorCommentRegular', 'content': 'test'}]}, # noqa: E501 {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP', 'comment': 'test'} # noqa: E501 ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'comments': [{'type': 'IndicatorCommentRegular', 'content': 'test'}, {'type': 'IndicatorCommentRegular', 'content': 'this is the comment'}]}, # noqa: E501 {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP', 'comment': 'this is the comment'} # noqa: E501 ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'aggregatedReliability': 'A - Completely reliable'}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP', 'reliability': 'A'} # noqa: E501 ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'CustomFields': {'threattypes': {'threatcategory': 'Malware'}}}, # noqa: E501 {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP', 'class': 'Malware'} # noqa: E501 ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'moduleToFeedMap': {'module': {'sourceBrand': 'test', 'score': 2}}}, # noqa: E501 {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'UNKNOWN', 'severity': 'INFO', 'type': 'IP', 'vendors': [{'vendor_name': 'test', 'reputation': 'SUSPICIOUS', 'reliability': 'F'}]} # noqa: E501 ) ] @pytest.mark.parametrize('demisto_ioc, xdr_ioc', data_test_demisto_ioc_to_xdr) def test_demisto_ioc_to_xdr(self, demisto_ioc, xdr_ioc): """ Given: - demisto indicator. Then: - Verify XDR indicator format. """ output = demisto_ioc_to_xdr(demisto_ioc) assert output == xdr_ioc, f'demisto_ioc_to_xdr({demisto_ioc})\n\treturns: {d_sort(output)}\n\tinstead: {d_sort(xdr_ioc)}' # noqa: E501 def test_empty_demisto_ioc_to_xdr(self, mocker): warnings = mocker.patch.object(demisto, 'debug') output = demisto_ioc_to_xdr({}) assert output == {}, 'demisto_ioc_to_xdr({})\n\treturns: ' + str(d_sort(output)) + '\n\tinstead: {}' assert warnings.call_args.args[0] == "unexpected IOC format in key: 'value', {}" class TestXDRIOCToDemisto: data_test_xdr_expiration_to_demisto = [ (-1, 'Never'), (1591142400000, '2020-06-03T00:00:00Z'), (1592142400000, '2020-06-14T13:46:40Z') ] @pytest.mark.parametrize('xdr_expiration, demisto_expiration', data_test_xdr_expiration_to_demisto) def test_xdr_expiration_to_demisto(self, xdr_expiration, demisto_expiration): """ Given: - expiration in XDR format. Then: - expiration in demisto format. """ output = xdr_expiration_to_demisto(xdr_expiration) assert output == demisto_expiration, f'xdr_expiration_to_demisto({xdr_expiration})\n\treturns: {output}\n\tinstead: {demisto_expiration}' # noqa: E501 data_test_xdr_ioc_to_demisto = [ ( { 'RULE_ID': 863, 'RULE_INSERT_TIME': 1591165763753, 'RULE_MODIFY_TIME': 1591166095668, 'RULE_SEVERITY': 'SEV_010_INFO', 'NUMBER_OF_HITS': 0, 'RULE_SOURCE': 'XSOAR TIM', 'RULE_COMMENT': '', 'RULE_STATUS': 'DISABLED', 'BS_STATUS': 'DONE', 'BS_TS': 1591165801230, 'BS_RETRIES': 1, 'RULE_EXPIRATION_TIME': -1, 'IOC_TYPE': 'HASH', 'RULE_INDICATOR': 'fa66f1e0e318b6d7b595b6cee580dc0d8e4ac38fbc8dbfcac6ad66dbe282832e', 'REPUTATION': 'GOOD', # noqa: E501 'RELIABILITY': None, 'VENDORS': None, 'KLASS': None, 'IS_DEFAULT_TTL': False, 'RULE_TTL': -1, 'MARKED_DELETED': 0 }, { 'value': 'fa66f1e0e318b6d7b595b6cee580dc0d8e4ac38fbc8dbfcac6ad66dbe282832e', 'type': 'File', 'score': 1, 'fields': { 'expirationdate': 'Never', 'tags': 'Cortex XDR', 'xdrstatus': 'disabled' } } ), ( { 'RULE_ID': 861, 'RULE_INSERT_TIME': 1591165763753, 'RULE_MODIFY_TIME': 1591166095668, 'RULE_SEVERITY': 'SEV_010_INFO', 'NUMBER_OF_HITS': 0, 'RULE_SOURCE': 'XSOAR TIM', 'RULE_COMMENT': '', 'RULE_STATUS': 'DISABLED', 'BS_STATUS': 'DONE', 'BS_TS': 1591165801784, 'BS_RETRIES': 1, 'RULE_EXPIRATION_TIME': -1, 'IOC_TYPE': 'DOMAIN_NAME', 'RULE_INDICATOR': 'test.com', 'REPUTATION': 'GOOD', # noqa: E501 'RELIABILITY': None, 'VENDORS': None, 'KLASS': None, 'IS_DEFAULT_TTL': False, 'RULE_TTL': -1, 'MARKED_DELETED': 0 }, { 'value': 'test.com', 'type': 'Domain', 'score': 1, 'fields': { 'expirationdate': 'Never', 'tags': 'Cortex XDR', 'xdrstatus': 'disabled' } } ), ( { 'RULE_ID': 862, 'RULE_INSERT_TIME': 1591165763753, 'RULE_MODIFY_TIME': 1591166095668, 'RULE_SEVERITY': 'SEV_010_INFO', 'NUMBER_OF_HITS': 0, 'RULE_SOURCE': 'XSOAR TIM', 'RULE_COMMENT': '', 'RULE_STATUS': 'ENABLED', 'BS_STATUS': 'DONE', 'BS_TS': 1591165801784, 'BS_RETRIES': 1, 'RULE_EXPIRATION_TIME': -1, 'IOC_TYPE': 'DOMAIN_NAME', 'RULE_INDICATOR': 'test.co.il', 'REPUTATION': 'SUSPICIOUS', 'RELIABILITY': 'A', 'VENDORS': [{'vendor_name': 'Cortex XDR - IOC', 'reputation': 'SUSPICIOUS', 'reliability': 'A'}], 'KLASS': None, 'IS_DEFAULT_TTL': False, 'RULE_TTL': -1, 'MARKED_DELETED': 0 }, { 'value': 'test.co.il', 'type': 'Domain', 'score': 2, 'fields': { 'expirationdate': 'Never', 'tags': 'Cortex XDR', 'xdrstatus': 'enabled' } } ) ] @pytest.mark.parametrize('xdr_ioc, demisto_ioc', data_test_xdr_ioc_to_demisto) def test_xdr_ioc_to_demisto(self, xdr_ioc, demisto_ioc, mocker): """ Given: - IOC in XDR format. Then: - IOC in demisto format. """ mocker.patch.object(demisto, 'searchIndicators', return_value={}) output = xdr_ioc_to_demisto(xdr_ioc) del output['rawJSON'] assert output == demisto_ioc, f'xdr_ioc_to_demisto({xdr_ioc})\n\treturns: {d_sort(output)}\n\tinstead: {d_sort(demisto_ioc)}' # noqa: E501 class TestCommands: # test commands full flow class TestIOCSCommand: def test_iocs_command_with_enable(self, mocker): """ Given: - enable command Then: - Verify enable command is called. """ mocker.patch.object(demisto, 'command', return_value='xdr-iocs-enable') mocker.patch.object(demisto, 'args', return_value={'indicator': '11.11.11.11'}) mocker.patch('XDR_iocs.Client.http_request', return_value={}) outputs = mocker.patch('XDR_iocs.return_outputs') enable_ioc = mocker.patch('XDR_iocs.prepare_enable_iocs', side_effect=prepare_enable_iocs) iocs_command(client) output = outputs.call_args.args[0] assert output == 'indicators 11.11.11.11 enabled.', f'enable command\n\tprints: {output}\n\tinstead: indicators 11.11.11.11 enabled.' # noqa: E501 assert enable_ioc.call_count == 1, 'enable command not called' def test_iocs_command_with_disable(self, mocker): """ Given: - disable command Then: - Verify disable command is called. """ mocker.patch.object(demisto, 'command', return_value='xdr-iocs-disable') mocker.patch.object(demisto, 'args', return_value={'indicator': '11.11.11.11'}) mocker.patch('XDR_iocs.Client.http_request', return_value={}) outputs = mocker.patch('XDR_iocs.return_outputs') disable_ioc = mocker.patch('XDR_iocs.prepare_disable_iocs', side_effect=prepare_disable_iocs) iocs_command(client) output = outputs.call_args.args[0] assert output == 'indicators 11.11.11.11 disabled.', f'disable command\n\tprints: {output}\n\tinstead: indicators 11.11.11.11 disabled.' # noqa: E501 assert disable_ioc.call_count == 1, 'disable command not called' def test_sync(self, mocker): http_request = mocker.patch.object(Client, 'http_request') iocs, data = TestCreateFile.get_all_iocs(TestCreateFile.data_test_create_file_sync, 'txt') mocker.patch.object(demisto, 'searchIndicators', returnvalue=iocs) mocker.patch('XDR_iocs.return_outputs') sync(client) assert http_request.call_args.args[0] == 'sync_tim_iocs', 'sync command url changed' @freeze_time('2020-06-03T02:00:00Z') def test_iocs_to_keep(self, mocker): http_request = mocker.patch.object(Client, 'http_request') iocs, data = TestCreateFile.get_all_iocs(TestCreateFile.data_test_create_file_iocs_to_keep, 'txt') mocker.patch.object(demisto, 'searchIndicators', returnvalue=iocs) mocker.patch('XDR_iocs.return_outputs') iocs_to_keep(client) assert http_request.call_args.args[0] == 'iocs_to_keep', 'iocs_to_keep command url changed' def test_tim_insert_jsons(self, mocker): http_request = mocker.patch.object(Client, 'http_request') mocker.patch.object(demisto, 'getIntegrationContext', return_value={'time': '2020-06-03T00:00:00Z'}) iocs, _ = TestCreateFile.get_all_iocs(TestCreateFile.data_test_create_file_sync, 'txt') mocker.patch.object(demisto, 'searchIndicators', return_value=iocs) mocker.patch('XDR_iocs.return_outputs') tim_insert_jsons(client) assert http_request.call_args.kwargs['url_suffix'] == 'tim_insert_jsons/', 'tim_insert_jsons command url changed' def test_get_changes(self, mocker): mocker.patch.object(demisto, 'getIntegrationContext', return_value={'ts': 1591142400000}) mocker.patch.object(demisto, 'createIndicators') mocker.patch.object(demisto, 'searchIndicators', return_value={}) xdr_res = {'reply': list(map(lambda xdr_ioc: xdr_ioc[0], TestXDRIOCToDemisto.data_test_xdr_ioc_to_demisto))} mocker.patch.object(Client, 'http_request', return_value=xdr_res) get_changes(client) xdr_ioc_to_timeline(list(map(lambda x: str(x[0].get('RULE_INDICATOR')), TestXDRIOCToDemisto.data_test_xdr_ioc_to_demisto))) # noqa: E501 class TestParams: tags_test = [ ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'score': 2}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'SUSPICIOUS', 'severity': 'INFO', 'type': 'IP'}, {'tlp_color': ''}, 'Cortex XDR', None ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'score': 2}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'SUSPICIOUS', 'severity': 'INFO', 'type': 'IP'}, {'tag': 'tag1'}, 'tag1', None ), ( {'value': '11.11.11.11', 'indicator_type': 'IP', 'score': 2}, {'expiration_date': -1, 'indicator': '11.11.11.11', 'reputation': 'SUSPICIOUS', 'severity': 'INFO', 'type': 'IP'}, {'feedTags': 'tag2', 'tlp_color': 'AMBER'}, 'tag2', 'AMBER' ) ] @pytest.mark.parametrize('demisto_ioc, xdr_ioc, param_value, expected_tags, expected_tlp_color', tags_test) def test_feed_tags_and_tlp_color(self, demisto_ioc, xdr_ioc, param_value, expected_tags, expected_tlp_color, mocker): """ Given: - IOC in XDR format. Then: - IOC in demisto format. """ mocker.patch.object(demisto, 'searchIndicators', return_value={}) mocker.patch.object(demisto, 'params', return_value=param_value) mocker.patch.object(demisto, 'getIntegrationContext', return_value={'ts': 1591142400000}) mocker.patch.object(demisto, 'searchIndicators', return_value={}) outputs = mocker.patch.object(demisto, 'createIndicators') Client.tag = demisto.params().get('feedTags', demisto.params().get('tag', Client.tag)) Client.tlp_color = demisto.params().get('tlp_color') client = Client({'url': 'yana'}) xdr_res = {'reply': list(map(lambda xdr_ioc: xdr_ioc[0], TestXDRIOCToDemisto.data_test_xdr_ioc_to_demisto))} mocker.patch.object(Client, 'http_request', return_value=xdr_res) get_changes(client) output = outputs.call_args.args[0] assert output[0]['fields']['tags'] == expected_tags assert output[0]['fields'].get('trafficlightprotocol') == expected_tlp_color
[((15, 5, 15, 40), 'freezegun.freeze_time', 'freeze_time', ({(15, 17, 15, 39): '"""2020-06-01T00:00:00Z"""'}, {}), "('2020-06-01T00:00:00Z')", False, 'from freezegun import freeze_time\n'), ((73, 5, 73, 88), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(73, 29, 73, 51): '"""res, expected_output"""', (73, 53, 73, 87): 'data_test_http_request_error_codes'}, {}), "('res, expected_output',\n data_test_http_request_error_codes)", False, 'import pytest\n'), ((217, 5, 217, 77), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(217, 29, 217, 48): '"""in_iocs, out_iocs"""', (217, 50, 217, 76): 'data_test_create_file_sync'}, {}), "('in_iocs, out_iocs', data_test_create_file_sync)", False, 'import pytest\n'), ((254, 5, 254, 96), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(254, 29, 254, 50): '"""defective_indicator"""', (254, 52, 254, 95): 'data_test_create_file_with_empty_indicators'}, {}), "('defective_indicator',\n data_test_create_file_with_empty_indicators)", False, 'import pytest\n'), ((292, 5, 292, 85), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(292, 29, 292, 48): '"""in_iocs, out_iocs"""', (292, 50, 292, 84): 'data_test_create_file_iocs_to_keep'}, {}), "('in_iocs, out_iocs', data_test_create_file_iocs_to_keep\n )", False, 'import pytest\n'), ((334, 5, 334, 103), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(334, 29, 334, 65): '"""demisto_expiration, xdr_expiration"""', (334, 67, 334, 102): 'data_test_demisto_expiration_to_xdr'}, {}), "('demisto_expiration, xdr_expiration',\n data_test_demisto_expiration_to_xdr)", False, 'import pytest\n'), ((356, 5, 356, 106), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(356, 29, 356, 67): '"""demisto_reliability, xdr_reliability"""', (356, 69, 356, 105): 'data_test_demisto_reliability_to_xdr'}, {}), "('demisto_reliability, xdr_reliability',\n data_test_demisto_reliability_to_xdr)", False, 'import pytest\n'), ((374, 5, 374, 86), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(374, 29, 374, 53): '"""demisto_type, xdr_type"""', (374, 55, 374, 85): 'data_test_demisto_types_to_xdr'}, {}), "('demisto_type, xdr_type',\n data_test_demisto_types_to_xdr)", False, 'import pytest\n'), ((405, 5, 405, 92), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(405, 29, 405, 57): '"""demisto_vendor, xdr_vendor"""', (405, 59, 405, 91): 'data_test_demisto_vendors_to_xdr'}, {}), "('demisto_vendor, xdr_vendor',\n data_test_demisto_vendors_to_xdr)", False, 'import pytest\n'), ((461, 5, 461, 82), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(461, 29, 461, 51): '"""demisto_ioc, xdr_ioc"""', (461, 53, 461, 81): 'data_test_demisto_ioc_to_xdr'}, {}), "('demisto_ioc, xdr_ioc', data_test_demisto_ioc_to_xdr)", False, 'import pytest\n'), ((488, 5, 488, 103), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(488, 29, 488, 65): '"""xdr_expiration, demisto_expiration"""', (488, 67, 488, 102): 'data_test_xdr_expiration_to_demisto'}, {}), "('xdr_expiration, demisto_expiration',\n data_test_xdr_expiration_to_demisto)", False, 'import pytest\n'), ((565, 5, 565, 82), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(565, 29, 565, 51): '"""xdr_ioc, demisto_ioc"""', (565, 53, 565, 81): 'data_test_xdr_ioc_to_demisto'}, {}), "('xdr_ioc, demisto_ioc', data_test_xdr_ioc_to_demisto)", False, 'import pytest\n'), ((625, 5, 625, 40), 'freezegun.freeze_time', 'freeze_time', ({(625, 17, 625, 39): '"""2020-06-03T02:00:00Z"""'}, {}), "('2020-06-03T02:00:00Z')", False, 'from freezegun import freeze_time\n'), ((681, 5, 681, 111), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(681, 29, 681, 99): '"""demisto_ioc, xdr_ioc, param_value, expected_tags, expected_tlp_color"""', (681, 101, 681, 110): 'tags_test'}, {}), "(\n 'demisto_ioc, xdr_ioc, param_value, expected_tags, expected_tlp_color',\n tags_test)", False, 'import pytest\n')]
rchdlps/django-docker
project/users/models.py
2c12732264c1f17cd62e20927b5956db498c30b7
from django.contrib.auth.models import AbstractUser from django.db.models import CharField from django.urls import reverse from django.utils.translation import ugettext_lazy as _ from django.db import models from PIL import Image class User(AbstractUser): # First Name and Last Name do not cover name patterns # around the globe. name = CharField(_('Nome de usuário:'), blank=True, max_length=255) # Profile Models image = models.ImageField(verbose_name='Foto de Perfil:', default='default.jpg', upload_to='profile_pics') birth_date = models.DateField(_('Data de Nascimento:'), null=True, blank=True) cpf = models.CharField(_('CPF:'), max_length=50, blank=True) cnpj = models.CharField(_('CNPJ:'), max_length=50, blank=True) bio = models.TextField(_('Descrição:'), blank=True, default='') cep = models.CharField(_('CEP:'), max_length=50, blank=True) street = models.CharField(_('Rua:'), max_length=100, blank=True) number_home = models.CharField(_('Número:'), max_length=10, blank=True) neighborhood = models.CharField(_('Bairro:'), max_length=100, blank=True) city = models.CharField(_('Cidade:'), max_length=50, blank=True) state = models.CharField(_('Estado:'), max_length=50, blank=True) phone = models.CharField(_('Telefone:'), max_length=50, blank=True) cel_phone = models.CharField(_('Celular:'), max_length=50, blank=True) def get_absolute_url(self): return reverse("users:detail", kwargs={"username": self.username}) """def save(self): super().save() img = Image.open(self.image.path) if img.height > 300 or img.width > 300: output_size = (300, 300) img.thumbnail(output_size) img.save(self.image.path)"""
[((16, 12, 17, 78), 'django.db.models.ImageField', 'models.ImageField', (), '', False, 'from django.db import models\n'), ((13, 21, 13, 43), 'django.utils.translation.ugettext_lazy', '_', ({(13, 23, 13, 42): '"""Nome de usuário:"""'}, {}), "('Nome de usuário:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((18, 34, 18, 58), 'django.utils.translation.ugettext_lazy', '_', ({(18, 36, 18, 57): '"""Data de Nascimento:"""'}, {}), "('Data de Nascimento:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19, 27, 19, 36), 'django.utils.translation.ugettext_lazy', '_', ({(19, 29, 19, 35): '"""CPF:"""'}, {}), "('CPF:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20, 28, 20, 38), 'django.utils.translation.ugettext_lazy', '_', ({(20, 30, 20, 37): '"""CNPJ:"""'}, {}), "('CNPJ:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21, 27, 21, 44), 'django.utils.translation.ugettext_lazy', '_', ({(21, 29, 21, 43): '"""Descrição:"""'}, {}), "('Descrição:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((22, 27, 22, 36), 'django.utils.translation.ugettext_lazy', '_', ({(22, 29, 22, 35): '"""CEP:"""'}, {}), "('CEP:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((23, 30, 23, 39), 'django.utils.translation.ugettext_lazy', '_', ({(23, 32, 23, 38): '"""Rua:"""'}, {}), "('Rua:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24, 35, 24, 48), 'django.utils.translation.ugettext_lazy', '_', ({(24, 37, 24, 47): '"""Número:"""'}, {}), "('Número:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25, 36, 25, 48), 'django.utils.translation.ugettext_lazy', '_', ({(25, 38, 25, 47): '"""Bairro:"""'}, {}), "('Bairro:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26, 28, 26, 40), 'django.utils.translation.ugettext_lazy', '_', ({(26, 30, 26, 39): '"""Cidade:"""'}, {}), "('Cidade:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((27, 29, 27, 41), 'django.utils.translation.ugettext_lazy', '_', ({(27, 31, 27, 40): '"""Estado:"""'}, {}), "('Estado:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28, 29, 28, 43), 'django.utils.translation.ugettext_lazy', '_', ({(28, 31, 28, 42): '"""Telefone:"""'}, {}), "('Telefone:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((29, 33, 29, 46), 'django.utils.translation.ugettext_lazy', '_', ({(29, 35, 29, 45): '"""Celular:"""'}, {}), "('Celular:')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((32, 15, 32, 74), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n')]
cloudify-incubator/cloudify-plugins-sdk
cloudify_terminal_sdk/netconf_connection.py
9805008e739d31e5f9fe3184411648f9be5e6214
# Copyright (c) 2015-2020 Cloudify Platform Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from cloudify_common_sdk import exceptions from cloudify_terminal_sdk import base_connection # final of any package NETCONF_1_0_END = "]]>]]>" # base level of communication NETCONF_1_0_CAPABILITY = 'urn:ietf:params:netconf:base:1.0' # package based communication NETCONF_1_1_CAPABILITY = 'urn:ietf:params:netconf:base:1.1' class NetConfConnection(base_connection.SSHConnection): # ssh connection ssh = None chan = None # buffer for same packages, will save partial packages between calls buff = "" current_level = NETCONF_1_0_CAPABILITY def connect( self, ip, user, hello_string, password=None, key_content=None, port=830 ): """open connection and send xml string by link""" self._ssh_connect(ip, user, password, key_content, port) self.conn = self.ssh.get_transport().open_session() self.conn.invoke_subsystem('netconf') self.buff = "" capabilities = self.send(hello_string) return capabilities def send(self, xml): """send xml string by connection""" if self.current_level == NETCONF_1_1_CAPABILITY: self._send_1_1(xml) return self._recv_1_1() else: self._send_1_0(xml) return self._recv_1_0() def _send_1_0(self, xml): """send xml string with NETCONF_1_0_END by connection""" if xml: message = xml + NETCONF_1_0_END self._conn_send(message) def _recv_1_0(self): """recv xml string with NETCONF_1_0_END by connection""" while self.buff.find(NETCONF_1_0_END) == -1: self.buff += self._conn_recv(8192) if self.conn.closed: break package_end = self.buff.find(NETCONF_1_0_END) # we have already closed connection if package_end == -1: package_end = len(self.buff) response = self.buff[:package_end] self.buff = self.buff[package_end + len(NETCONF_1_0_END):] return response def _send_1_1(self, xml): """send xml string as package by connection""" if xml: message = "\n#{0}\n".format(len(xml)) message += xml message += "\n##\n" self._conn_send(message) def _recv_1_1(self): """send xml string as package by connection""" get_everything = False response = "" while not get_everything: if len(self.buff) < 2: self.buff += self._conn_recv(2) # skip new line if self.buff[:2] != "\n#": # We have already closed connection # caller shoud stop to ask new messages if not self.buff and self.conn.closed: return "" raise exceptions.NonRecoverableError("no start") self.buff = self.buff[2:] # get package length while self.buff.find("\n") == -1: self.buff += self._conn_recv(20) if self.buff[:2] == "#\n": get_everything = True self.buff = self.buff[2:] break length = int(self.buff[:self.buff.find("\n")]) self.buff = self.buff[self.buff.find("\n") + 1:] # load current package while length > len(self.buff): self.buff += self._conn_recv(length - len(self.buff)) response += self.buff[:length] self.buff = self.buff[length:] return response def close(self, goodbye_string=None): """send xml string by link and close connection""" response = None if goodbye_string: # we have something to say response = self.send(goodbye_string) self._ssh_close() return response
[((99, 22, 99, 64), 'cloudify_common_sdk.exceptions.NonRecoverableError', 'exceptions.NonRecoverableError', ({(99, 53, 99, 63): '"""no start"""'}, {}), "('no start')", False, 'from cloudify_common_sdk import exceptions\n')]
dyt1990/Seis_DCEC
Seismic_Conv1D_dec.py
6cc56a7db10dd87b0ef39ece73578fca8b23c55f
# -*- coding: utf-8 -*- """ Created on Sun Aug 19 17:48:13 2018 @author: Sediment """ # -*- coding: utf-8 -*- ''' Keras implementation of deep embedder to improve clustering, inspired by: "Unsupervised Deep Embedding for Clustering Analysis" (Xie et al, ICML 2016) Definition can accept somewhat custom neural networks. Defaults are from paper. ''' import sys import numpy as np import pandas as pd import keras.backend as K from keras.initializers import RandomNormal from keras.engine.topology import Layer, InputSpec from keras.models import Model, Sequential from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Nadam from keras.regularizers import l2 from sklearn.preprocessing import normalize from keras.callbacks import LearningRateScheduler from sklearn.utils.linear_assignment_ import linear_assignment from sklearn.metrics import normalized_mutual_info_score, adjusted_rand_score from sklearn import manifold from sklearn.cluster import KMeans from sklearn.decomposition import PCA from matplotlib import pyplot as plt if (sys.version[0] == 2): import cPickle as pickle else: import pickle class ClusteringLayer(Layer): ''' Clustering layer which converts latent space Z of input layer into a probability vector for each cluster defined by its centre in Z-space. Use Kullback-Leibler divergence as loss, with a probability target distribution. # Arguments output_dim: int > 0. Should be same as number of clusters. input_dim: dimensionality of the input (integer). This argument (or alternatively, the keyword argument `input_shape`) is required when using this layer as the first layer in a model. weights: list of Numpy arrays to set as initial weights. The list should have 2 elements, of shape `(input_dim, output_dim)` and (output_dim,) for weights and biases respectively. alpha: parameter in Student's t-distribution. Default is 1.0. # Input shape 2D tensor with shape: `(nb_samples, input_dim)`. # Output shape 2D tensor with shape: `(nb_samples, output_dim)`. ''' def __init__(self, output_dim, input_dim=None, weights=None, alpha=1.0, **kwargs): self.output_dim = output_dim self.input_dim = input_dim self.alpha = alpha # kmeans cluster centre locations self.initial_weights = weights self.input_spec = [InputSpec(ndim=2)] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(ClusteringLayer, self).__init__(**kwargs) def build(self, input_shape): assert len(input_shape) == 2 input_dim = input_shape[1] self.input_spec = [InputSpec(dtype=K.floatx(), shape=(None, input_dim))] self.W = K.variable(self.initial_weights) self.trainable_weights = [self.W] def call(self, x, mask=None): q = 1.0/(1.0 + K.sqrt(K.sum(K.square(K.expand_dims(x, 1) - self.W), axis=2))**2 /self.alpha) q = q**((self.alpha+1.0)/2.0) q = K.transpose(K.transpose(q)/K.sum(q, axis=1)) return q def get_output_shape_for(self, input_shape): assert input_shape and len(input_shape) == 2 return (input_shape[0], self.output_dim) def compute_output_shape(self, input_shape): assert input_shape and len(input_shape) == 2 return (input_shape[0], self.output_dim) def get_config(self): config = {'output_dim': self.output_dim, 'input_dim': self.input_dim} base_config = super(ClusteringLayer, self).get_config() return dict(list(base_config.items()) + list(config.items())) class DeepEmbeddingClustering(object): def __init__(self, n_clusters, input_dim, learning_rate=0.1, encoded=None, decoded=None, alpha=1.0, pretrained_weights=None, cluster_centres=None, batch_size=256, conv_filters=[8, 16, 32], kernel_size=12, Maxpooling_size=2, LatentSpace_Z=25, finetune_epochs=5, **kwargs): super(DeepEmbeddingClustering, self).__init__() self.n_clusters = n_clusters self.input_dim = input_dim self.encoded = encoded self.decoded = decoded self.alpha = alpha self.pretrained_weights = pretrained_weights self.cluster_centres = cluster_centres self.batch_size = batch_size self.learning_rate = learning_rate self.iters_lr_update = 6000 self.lr_change_rate = 0.1 self.finetune_epochs = finetune_epochs self.conv_filters = conv_filters self.kernel_size = kernel_size self.Maxpooling_size = Maxpooling_size self.LatentSpace_Z = LatentSpace_Z self.encoders = [] self.decoders = [] input_data = Input(shape=(self.input_dim, 1)) x = Conv1D(self.conv_filters[0], (self.kernel_size), activation='relu', padding='same')(input_data) # x = BatchNormalization()(x) # x = Activation('relu')(x) x = MaxPooling1D((self.Maxpooling_size), padding='same')(x) x = Conv1D(self.conv_filters[1], (self.kernel_size), activation='relu', padding='same')(x) # x = BatchNormalization()(x) # x = Activation('relu')(x) x = MaxPooling1D((self.Maxpooling_size), padding='same')(x) x = Conv1D(self.conv_filters[2], (self.kernel_size), activation='relu', padding='same')(x) # x = BatchNormalization()(x) # x = Activation('relu')(x) x = MaxPooling1D((self.Maxpooling_size), padding='same')(x) # at this point the representation is (16 x conv_filters) i.e. 128-dimensional x = Flatten()(x) # at this point the representation is (6) i.e. 128-dimensional encoded = Dense(LatentSpace_Z, activation='relu')(x) # 256 = input_data / ((2^maxpool_num) * conv_fileters * 4) x = Dense(self.input_dim // (2**3) * self.conv_filters[2], kernel_initializer=RandomNormal(mean=0.0, stddev=0.01, seed=None), bias_initializer='zeros', activation='relu')(encoded) x = Reshape((self.input_dim // (2**3), self.conv_filters[2]))(x) # 16 * 2 * 2 * 2 = 128, 多少个maxpool就与多少个2相乘 x = Conv1D(self.conv_filters[2], (self.kernel_size), activation='relu', padding='same')(x) # x = BatchNormalization()(x) # x = Activation('relu')(x) x = UpSampling1D((self.Maxpooling_size))(x) x = Conv1D(self.conv_filters[1], (self.kernel_size), activation='relu', padding='same')(x) # x = BatchNormalization()(x) # x = Activation('relu')(x) x = UpSampling1D((self.Maxpooling_size))(x) x = Conv1D(self.conv_filters[0], (1), activation='relu')(x) # x = BatchNormalization()(x) # x = Activation('relu')(x) x = UpSampling1D((self.Maxpooling_size))(x) decoded = Conv1D(1, (self.kernel_size), activation='relu', padding='same')(x) self.autoencoder = Model(input_data, decoded) self.autoencoder.summary() self.encoder = Model(input_data, encoded) # build the end-to-end autoencoder for finetuning # Note that at this point dropout is discarded self.encoder.compile(loss='mse', optimizer=SGD(lr=self.learning_rate, decay=0, momentum=0.9)) self.autoencoder.compile(loss='mse', optimizer=SGD(lr=self.learning_rate, decay=0, momentum=0.9)) if cluster_centres is not None: assert cluster_centres.shape[0] == self.n_clusters assert cluster_centres.shape[1] == self.encoder.layers[-1].output_dim if self.pretrained_weights is not None: self.autoencoder.load_weights(self.pretrained_weights) def p_mat(self, q): weight = q**2 / q.sum(0) return (weight.T / weight.sum(1)).T def initialize(self, X, save_autoencoder=False, finetune_iters=5000): if self.pretrained_weights is None: iters_per_epoch = int(len(X) / self.batch_size) print('layerwise pretrain') lr_epoch_update = max(1, self.iters_lr_update / float(iters_per_epoch)) def step_decay(epoch): initial_rate = self.learning_rate factor = int(epoch / lr_epoch_update) lr = initial_rate / (10 ** factor) return lr lr_schedule = LearningRateScheduler(step_decay) #update encoder and decoder weights: self.autoencoder.fit(X, X, batch_size=self.batch_size, epochs=self.finetune_epochs, callbacks=[lr_schedule]) if save_autoencoder: self.autoencoder.save_weights('autoencoder.h5') else: print('Loading pretrained weights for autoencoder.') self.autoencoder.load_weights(self.pretrained_weights) # update encoder, decoder # TODO: is this needed? Might be redundant... for i in range(len(self.encoder.layers)): self.encoder.layers[i].set_weights(self.autoencoder.layers[i].get_weights()) # initialize cluster centres using k-means print('Initializing cluster centres with k-means.') if self.cluster_centres is None: np.random.seed(42) #随机种子,用于初始化聚类中心 kmeans = KMeans(n_clusters=self.n_clusters, max_iter=100, n_init=6, precompute_distances='auto', random_state=None, tol=1e-4) self.y_pred = kmeans.fit_predict(self.encoder.predict(X)) self.cluster_centres = kmeans.cluster_centers_ print ('cluster_centres:\n ', self.cluster_centres) # prepare DCEC model self.DCEC = Sequential([self.encoder, ClusteringLayer(self.n_clusters, weights=self.cluster_centres, name='clustering')]) self.DCEC.compile(loss='kullback_leibler_divergence', optimizer=SGD(lr=self.learning_rate, decay=0, momentum=0.9)) # loss: 'mean_squared_error', 'categorical_crossentropy', 'hinge', 'squared_hinge' return def visualizeData(self, Z, labels, num_clusters, csv_filename, title): ''' TSNE visualization of the points in latent space Z :param Z: Numpy array containing points in latent space in which clustering was performed :param labels: True labels - used for coloring points :param num_clusters: Total number of clusters :param title: filename where the plot should be saved :return: None - (side effect) saves clustering visualization plot in specified location ''' print ('Start visualizing Data') labels = labels.astype(int) tsne = manifold.TSNE(n_components=2, init='pca', random_state=0) Z_tsne = tsne.fit_transform(Z) fig = plt.figure() plt.scatter(Z_tsne[:, 0], Z_tsne[:, 1], s=2, c=labels, cmap=plt.cm.get_cmap("jet", num_clusters)) plt.colorbar(ticks=range(num_clusters)) # fig.savefig(title, dpi=fig.dpi) fig.savefig(title, dpi=600) # save t_sne results print('Save t_sne results') dataframe = pd.DataFrame({'Z_tsne_x':Z_tsne[:, 0], 'Z_tsne_y':Z_tsne[:, 1], 'labels':labels}) dataframe.to_csv(csv_filename, index=False, sep=',') def cluster(self, X, y=None, tol=0.001, update_interval=None, iter_max=799, save_interval=None, **kwargs): if update_interval is None: # 1 epochs update_interval = X.shape[0]/self.batch_size print('Update interval', update_interval) if save_interval is None: # 50 epochs save_interval = X.shape[0]/self.batch_size*50 print('Save interval', save_interval) assert save_interval >= update_interval train = True iteration, index = 0, 0 self.accuracy = [] while train: sys.stdout.write('\r') # cutoff iteration if iter_max < iteration: print('Reached maximum iteration limit. Stopping training.') return self.y_pred # update (or initialize) probability distributions and propagate weight changes # from DCEC model to encoder. if iteration % update_interval == 0: self.q = self.DCEC.predict(X, verbose=0) self.p = self.p_mat(self.q) y_pred = self.q.argmax(1) delta_label = ((y_pred == self.y_pred).sum().astype(np.float32) / y_pred.shape[0]) if y is None: print(str(np.round(delta_label*100, 5))+'% change in label assignment') if iteration > 0 and delta_label < tol: print('delta_label ', delta_label, '< tol ', tol) print('Reached tolerance threshold. Stopping training.') train = False continue else: self.y_pred = y_pred for i in range(len(self.encoder.layers)): self.encoder.layers[i].set_weights(self.DCEC.layers[0].layers[i].get_weights()) self.cluster_centres = self.DCEC.layers[-1].get_weights()[0] # train on batch sys.stdout.write('Iteration %d, ' % iteration) if (index+1)*self.batch_size >= X.shape[0]: loss = self.DCEC.train_on_batch(X[index*self.batch_size::], self.p[index*self.batch_size::]) index = 0 sys.stdout.write('Loss %f\n' % loss) else: loss = self.DCEC.train_on_batch(X[index*self.batch_size:(index+1) * self.batch_size], self.p[index*self.batch_size:(index+1) * self.batch_size]) sys.stdout.write('Loss %f\n' % loss) index += 1 # save intermediate if iteration % save_interval == 0: z = self.encoder.predict(X) pca = PCA(n_components=2).fit(z) z_2d = pca.transform(z) clust_2d = pca.transform(self.cluster_centres) # save states for visualization pickle.dump({'z_2d': z_2d, 'clust_2d': clust_2d, 'q': self.q, 'p': self.p}, open('c'+str(iteration)+'.pkl', 'wb')) # save DCEC model checkpoints self.DCEC.save('DCEC_model_'+str(iteration)+'.h5') iteration += 1 sys.stdout.flush() return y_pred
[((76, 17, 76, 49), 'keras.backend.variable', 'K.variable', ({(76, 28, 76, 48): 'self.initial_weights'}, {}), '(self.initial_weights)', True, 'import keras.backend as K\n'), ((142, 21, 142, 53), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((181, 27, 181, 53), 'keras.models.Model', 'Model', ({(181, 33, 181, 43): 'input_data', (181, 45, 181, 52): 'decoded'}, {}), '(input_data, decoded)', False, 'from keras.models import Model, Sequential\n'), ((184, 23, 184, 49), 'keras.models.Model', 'Model', ({(184, 29, 184, 39): 'input_data', (184, 41, 184, 48): 'encoded'}, {}), '(input_data, encoded)', False, 'from keras.models import Model, Sequential\n'), ((260, 15, 260, 72), 'sklearn.manifold.TSNE', 'manifold.TSNE', (), '', False, 'from sklearn import manifold\n'), ((262, 14, 262, 26), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((270, 20, 270, 101), 'pandas.DataFrame', 'pd.DataFrame', ({(270, 33, 270, 100): "{'Z_tsne_x': Z_tsne[:, (0)], 'Z_tsne_y': Z_tsne[:, (1)], 'labels': labels}"}, {}), "({'Z_tsne_x': Z_tsne[:, (0)], 'Z_tsne_y': Z_tsne[:, (1)],\n 'labels': labels})", True, 'import pandas as pd\n'), ((64, 27, 64, 44), 'keras.engine.topology.InputSpec', 'InputSpec', (), '', False, 'from keras.engine.topology import Layer, InputSpec\n'), ((144, 12, 144, 95), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((147, 12, 147, 64), 'keras.layers.MaxPooling1D', 'MaxPooling1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((148, 12, 148, 95), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((151, 12, 151, 64), 'keras.layers.MaxPooling1D', 'MaxPooling1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((152, 12, 152, 95), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((155, 12, 155, 64), 'keras.layers.MaxPooling1D', 'MaxPooling1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((157, 12, 157, 21), 'keras.layers.Flatten', 'Flatten', ({}, {}), '()', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((160, 18, 160, 57), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((165, 12, 165, 69), 'keras.layers.Reshape', 'Reshape', ({(165, 20, 165, 68): '(self.input_dim // 2 ** 3, self.conv_filters[2])'}, {}), '((self.input_dim // 2 ** 3, self.conv_filters[2]))', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((167, 12, 167, 95), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((170, 12, 170, 48), 'keras.layers.UpSampling1D', 'UpSampling1D', ({(170, 26, 170, 46): 'self.Maxpooling_size'}, {}), '(self.Maxpooling_size)', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((171, 12, 171, 95), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((174, 12, 174, 48), 'keras.layers.UpSampling1D', 'UpSampling1D', ({(174, 26, 174, 46): 'self.Maxpooling_size'}, {}), '(self.Maxpooling_size)', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((175, 12, 175, 64), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((178, 12, 178, 48), 'keras.layers.UpSampling1D', 'UpSampling1D', ({(178, 26, 178, 46): 'self.Maxpooling_size'}, {}), '(self.Maxpooling_size)', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((179, 18, 179, 82), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Dense, Dropout, Input, Conv1D, MaxPooling1D, BatchNormalization, Activation, Flatten, UpSampling1D, Reshape\n'), ((215, 26, 215, 59), 'keras.callbacks.LearningRateScheduler', 'LearningRateScheduler', ({(215, 48, 215, 58): 'step_decay'}, {}), '(step_decay)', False, 'from keras.callbacks import LearningRateScheduler\n'), ((233, 12, 233, 30), 'numpy.random.seed', 'np.random.seed', ({(233, 27, 233, 29): '(42)'}, {}), '(42)', True, 'import numpy as np\n'), ((234, 21, 234, 137), 'sklearn.cluster.KMeans', 'KMeans', (), '', False, 'from sklearn.cluster import KMeans\n'), ((296, 12, 296, 34), 'sys.stdout.write', 'sys.stdout.write', ({(296, 29, 296, 33): "'\\r'"}, {}), "('\\r')", False, 'import sys\n'), ((326, 12, 326, 58), 'sys.stdout.write', 'sys.stdout.write', ({(326, 29, 326, 57): "('Iteration %d, ' % iteration)"}, {}), "('Iteration %d, ' % iteration)", False, 'import sys\n'), ((350, 12, 350, 30), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n'), ((82, 24, 82, 38), 'keras.backend.transpose', 'K.transpose', ({(82, 36, 82, 37): 'q'}, {}), '(q)', True, 'import keras.backend as K\n'), ((82, 39, 82, 55), 'keras.backend.sum', 'K.sum', (), '', True, 'import keras.backend as K\n'), ((189, 51, 189, 100), 'keras.optimizers.SGD', 'SGD', (), '', False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Nadam\n'), ((190, 55, 190, 104), 'keras.optimizers.SGD', 'SGD', (), '', False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Nadam\n'), ((245, 72, 245, 121), 'keras.optimizers.SGD', 'SGD', (), '', False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Nadam\n'), ((263, 68, 263, 104), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', ({(263, 84, 263, 89): '"""jet"""', (263, 91, 263, 103): 'num_clusters'}, {}), "('jet', num_clusters)", True, 'from matplotlib import pyplot as plt\n'), ((330, 16, 330, 52), 'sys.stdout.write', 'sys.stdout.write', ({(330, 33, 330, 51): "('Loss %f\\n' % loss)"}, {}), "('Loss %f\\n' % loss)", False, 'import sys\n'), ((334, 16, 334, 52), 'sys.stdout.write', 'sys.stdout.write', ({(334, 33, 334, 51): "('Loss %f\\n' % loss)"}, {}), "('Loss %f\\n' % loss)", False, 'import sys\n'), ((73, 43, 73, 53), 'keras.backend.floatx', 'K.floatx', ({}, {}), '()', True, 'import keras.backend as K\n'), ((163, 86, 163, 132), 'keras.initializers.RandomNormal', 'RandomNormal', (), '', False, 'from keras.initializers import RandomNormal\n'), ((340, 22, 340, 41), 'sklearn.decomposition.PCA', 'PCA', (), '', False, 'from sklearn.decomposition import PCA\n'), ((311, 30, 311, 58), 'numpy.round', 'np.round', ({(311, 39, 311, 54): '(delta_label * 100)', (311, 56, 311, 57): '(5)'}, {}), '(delta_label * 100, 5)', True, 'import numpy as np\n'), ((80, 45, 80, 64), 'keras.backend.expand_dims', 'K.expand_dims', ({(80, 59, 80, 60): 'x', (80, 62, 80, 63): '(1)'}, {}), '(x, 1)', True, 'import keras.backend as K\n')]
xiguadong/ppq
ppq/utils/round.py
6c71adb3c2a8ca95967f101724b5e4b3e6f761ff
from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal from math import ceil, floor, log2 from typing import Union import torch from ppq.core import RoundingPolicy def ppq_numerical_round(value: float, policy: RoundingPolicy=RoundingPolicy.ROUND_HALF_EVEN) -> int: """ reference: https://en.wikipedia.org/wiki/Rounding decimal defination: - decimal.ROUND_CEILING (towards Infinity) - decimal.ROUND_DOWN (towards zero) - decimal.ROUND_FLOOR (towards -Infinity) - decimal.ROUND_HALF_DOWN (to nearest with ties going towards zero) - decimal.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) - decimal.ROUND_HALF_UP (to nearest with ties going away from zero) - decimal.ROUND_UP (away from zero) - decimal.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) Args: value (float): [description] policy (RoundingPolicy, optional): [description]. Defaults to RoundingPolicy.ROUND_HALF_EVEN. Raises: ValueError: [description] Returns: int: [description] """ assert isinstance(value, float), 'numerical round only takes effect on float number.' if policy == RoundingPolicy.ROUND_HALF_EVEN: return int(Decimal(value).quantize(exp=Decimal(1), rounding=ROUND_HALF_EVEN)) elif policy == RoundingPolicy.ROUND_HALF_UP: if value > 0: return int(Decimal(value).quantize(exp=Decimal(1), rounding=ROUND_HALF_UP)) else: return int(Decimal(value).quantize(exp=Decimal(1), rounding=ROUND_HALF_DOWN)) elif policy == RoundingPolicy.ROUND_HALF_DOWN: if value > 0: return int(Decimal(value).quantize(exp=Decimal(1), rounding=ROUND_HALF_DOWN)) else: return int(Decimal(value).quantize(exp=Decimal(1), rounding=ROUND_HALF_UP)) elif policy == RoundingPolicy.ROUND_HALF_TOWARDS_ZERO: return ppq_numerical_round(value, RoundingPolicy.ROUND_HALF_DOWN) elif policy == RoundingPolicy.ROUND_HALF_FAR_FORM_ZERO: return ppq_numerical_round(value, RoundingPolicy.ROUND_HALF_UP) elif policy == RoundingPolicy.ROUND_TO_NEAR_INT: if value > 0: return floor(value + 0.5) else: return ceil(value - 0.5) elif policy == RoundingPolicy.ROUND_UP: return ceil(value) else: raise ValueError('Unexpected rounding policy found.') def ppq_tensor_round(value: torch.Tensor, policy:RoundingPolicy=RoundingPolicy.ROUND_HALF_EVEN) -> torch.Tensor: """ reference: https://en.wikipedia.org/wiki/Rounding Args: value (torch.Tensor): [description] policy (RoundingPolicy, optional): [description]. Defaults to RoundingPolicy.ROUND_HALF_EVEN. Raises: ValueError: [description] Returns: torch.Tensor: [description] """ assert isinstance(value, torch.Tensor), 'tensor round only takes effect on torch tensor.' if policy == RoundingPolicy.ROUND_HALF_EVEN: # default rounding policy of torch is ROUND_TO_NEAR_EVEN # try this: print(torch.Tensor([1.5, 2.5, 3.5, 4.5]).round()) # However it may generate unexpected results due to version difference. return value.round() elif policy == RoundingPolicy.ROUND_UP: return value.ceil() elif policy == RoundingPolicy.ROUND_HALF_TOWARDS_ZERO: return torch.sign(value) * torch.ceil(value.abs() - 0.5) elif policy == RoundingPolicy.ROUND_HALF_FAR_FORM_ZERO: return torch.sign(value) * torch.floor(value.abs() + 0.5) elif policy == RoundingPolicy.ROUND_HALF_DOWN: return torch.ceil(value - 0.5) elif policy == RoundingPolicy.ROUND_HALF_UP: return torch.floor(value + 0.5) elif policy == RoundingPolicy.ROUND_TO_NEAR_INT: raise NotImplementedError(f'Torch Tensor can not use this rounding policy({policy}) try ROUND_HALF_EVEN instead.') else: raise ValueError('Unexpected rounding policy found.') def ppq_round_to_power_of_2(value: Union[float, int], policy: RoundingPolicy=RoundingPolicy.ROUND_UP) -> float: if value == 0: return 0 sign = 1 if value >= 0 else -1 assert isinstance(value, float) or isinstance(value, int), \ 'power-of-2 round only takes effect on float or int.' return sign * float(pow(2, ppq_numerical_round(log2(sign * value), policy=policy)))
[((36, 19, 36, 33), 'decimal.Decimal', 'Decimal', ({(36, 27, 36, 32): 'value'}, {}), '(value)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((36, 47, 36, 57), 'decimal.Decimal', 'Decimal', ({(36, 55, 36, 56): '(1)'}, {}), '(1)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((79, 15, 79, 32), 'torch.sign', 'torch.sign', ({(79, 26, 79, 31): 'value'}, {}), '(value)', False, 'import torch\n'), ((97, 51, 97, 69), 'math.log2', 'log2', ({(97, 56, 97, 68): '(sign * value)'}, {}), '(sign * value)', False, 'from math import ceil, floor, log2\n'), ((81, 15, 81, 32), 'torch.sign', 'torch.sign', ({(81, 26, 81, 31): 'value'}, {}), '(value)', False, 'import torch\n'), ((83, 15, 83, 38), 'torch.ceil', 'torch.ceil', ({(83, 26, 83, 37): '(value - 0.5)'}, {}), '(value - 0.5)', False, 'import torch\n'), ((38, 33, 38, 47), 'decimal.Decimal', 'Decimal', ({(38, 41, 38, 46): 'value'}, {}), '(value)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((38, 61, 38, 71), 'decimal.Decimal', 'Decimal', ({(38, 69, 38, 70): '(1)'}, {}), '(1)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((39, 25, 39, 39), 'decimal.Decimal', 'Decimal', ({(39, 33, 39, 38): 'value'}, {}), '(value)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((39, 53, 39, 63), 'decimal.Decimal', 'Decimal', ({(39, 61, 39, 62): '(1)'}, {}), '(1)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((85, 15, 85, 39), 'torch.floor', 'torch.floor', ({(85, 27, 85, 38): '(value + 0.5)'}, {}), '(value + 0.5)', False, 'import torch\n'), ((41, 33, 41, 47), 'decimal.Decimal', 'Decimal', ({(41, 41, 41, 46): 'value'}, {}), '(value)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((41, 61, 41, 71), 'decimal.Decimal', 'Decimal', ({(41, 69, 41, 70): '(1)'}, {}), '(1)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((42, 25, 42, 39), 'decimal.Decimal', 'Decimal', ({(42, 33, 42, 38): 'value'}, {}), '(value)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((42, 53, 42, 63), 'decimal.Decimal', 'Decimal', ({(42, 61, 42, 62): '(1)'}, {}), '(1)', False, 'from decimal import ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, Decimal\n'), ((48, 29, 48, 47), 'math.floor', 'floor', ({(48, 35, 48, 46): '(value + 0.5)'}, {}), '(value + 0.5)', False, 'from math import ceil, floor, log2\n'), ((49, 21, 49, 38), 'math.ceil', 'ceil', ({(49, 26, 49, 37): '(value - 0.5)'}, {}), '(value - 0.5)', False, 'from math import ceil, floor, log2\n'), ((51, 15, 51, 26), 'math.ceil', 'ceil', ({(51, 20, 51, 25): 'value'}, {}), '(value)', False, 'from math import ceil, floor, log2\n')]
maropu/scavenger
python/repair/train.py
03a935968f4aa507d4d98c8ca528195b770757d9
#!/usr/bin/env python3 # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy import time import numpy as np # type: ignore[import] import pandas as pd # type: ignore[import] from collections import namedtuple from typing import Any, Dict, List, Optional, Tuple from repair.utils import elapsed_time, get_option_value, setup_logger _logger = setup_logger() # List of internal configurations _option = namedtuple('_option', 'key default_value type_class validator err_msg') _opt_boosting_type = \ _option('model.lgb.boosting_type', 'gbdt', str, lambda v: v in ['gbdt', 'dart', 'goss', 'rf'], "`{}` should be in ['gbdt', 'dart', 'goss', 'rf']") _opt_class_weight = \ _option('model.lgb.class_weight', 'balanced', str, None, None) _opt_learning_rate = \ _option('model.lgb.learning_rate', 0.01, float, lambda v: v > 0.0, '`{}` should be positive') _opt_max_depth = \ _option('model.lgb.max_depth', 7, int, None, None) _opt_max_bin = \ _option('model.lgb.max_bin', 255, int, None, None) _opt_reg_alpha = \ _option('model.lgb.reg_alpha', 0.0, float, lambda v: v >= 0.0, '`{}` should be greater than or equal to 0.0') _opt_min_split_gain = \ _option('model.lgb.min_split_gain', 0.0, float, lambda v: v >= 0.0, '`{}` should be greater than or equal to 0.0') _opt_n_estimators = \ _option('model.lgb.n_estimators', 300, int, lambda v: v > 0, '`{}` should be positive') _opt_importance_type = \ _option('model.lgb.importance_type', 'gain', str, lambda v: v in ['split', 'gain'], "`{}` should be in ['split', 'gain']") _opt_n_splits = \ _option('model.cv.n_splits', 3, int, lambda v: v >= 3, '`{}` should be greater than 2') _opt_timeout = \ _option('model.hp.timeout', 0, int, None, None) _opt_max_evals = \ _option('model.hp.max_evals', 100000000, int, lambda v: v > 0, '`{}` should be positive') _opt_no_progress_loss = \ _option('model.hp.no_progress_loss', 50, int, lambda v: v > 0, '`{}` should be positive') train_option_keys = [ _opt_boosting_type.key, _opt_class_weight.key, _opt_learning_rate.key, _opt_max_depth.key, _opt_max_bin.key, _opt_reg_alpha.key, _opt_min_split_gain.key, _opt_n_estimators.key, _opt_importance_type.key, _opt_n_splits.key, _opt_timeout.key, _opt_max_evals.key, _opt_no_progress_loss.key ] @elapsed_time # type: ignore def _build_lgb_model(X: pd.DataFrame, y: pd.Series, is_discrete: bool, num_class: int, n_jobs: int, opts: Dict[str, str]) -> Tuple[Any, float]: import lightgbm as lgb # type: ignore[import] def _get_option_value(*args) -> Any: # type: ignore return get_option_value(opts, *args) if is_discrete: objective = "binary" if num_class <= 2 else "multiclass" else: objective = "regression" fixed_params = { "boosting_type": _get_option_value(*_opt_boosting_type), "objective": objective, "class_weight": _get_option_value(*_opt_class_weight), "learning_rate": _get_option_value(*_opt_learning_rate), "max_depth": _get_option_value(*_opt_max_depth), "max_bin": _get_option_value(*_opt_max_bin), "reg_alpha": _get_option_value(*_opt_reg_alpha), "min_split_gain": _get_option_value(*_opt_min_split_gain), "n_estimators": _get_option_value(*_opt_n_estimators), "importance_type": _get_option_value(*_opt_importance_type), "random_state": 42, "n_jobs": n_jobs } # Set `num_class` only in the `multiclass` mode if objective == "multiclass": fixed_params["num_class"] = num_class model_class = lgb.LGBMClassifier if is_discrete \ else lgb.LGBMRegressor def _create_model(params: Dict[str, Any]) -> Any: # Some params must be int for k in ["num_leaves", "subsample_freq", "min_child_samples"]: if k in params: params[k] = int(params[k]) p = copy.deepcopy(fixed_params) p.update(params) return model_class(**p) from hyperopt import hp, tpe, Trials # type: ignore[import] from hyperopt.early_stop import no_progress_loss # type: ignore[import] from hyperopt.fmin import fmin # type: ignore[import] from sklearn.model_selection import ( # type: ignore[import] cross_val_score, KFold, StratifiedKFold ) # TODO: Temporality supress `sklearn.model_selection` user's warning import warnings warnings.simplefilter("ignore", UserWarning) # Forcibly disable INFO-level logging in the `hyperopt` module from logging import getLogger, WARN getLogger("hyperopt").setLevel(WARN) param_space = { "num_leaves": hp.quniform("num_leaves", 2, 100, 1), "subsample": hp.uniform("subsample", 0.5, 1.0), "subsample_freq": hp.quniform("subsample_freq", 1, 20, 1), "colsample_bytree": hp.uniform("colsample_bytree", 0.01, 1.0), "min_child_samples": hp.quniform("min_child_samples", 1, 50, 1), "min_child_weight": hp.loguniform("min_child_weight", -3, 1), "reg_lambda": hp.loguniform("reg_lambda", -2, 3) } scorer = "f1_macro" if is_discrete else "neg_mean_squared_error" n_splits = int(_get_option_value(*_opt_n_splits)) cv = StratifiedKFold(n_splits=n_splits, shuffle=True) if is_discrete \ else KFold(n_splits=n_splits, shuffle=True) def _objective(params: Dict[str, Any]) -> float: model = _create_model(params) fit_params: Dict[str, str] = { # TODO: Raises an error if a single regressor is used # "categorical_feature": "auto", } try: # TODO: Replace with `lgb.cv` to remove the `sklearn` dependency scores = cross_val_score( model, X, y, scoring=scorer, cv=cv, fit_params=fit_params, n_jobs=n_jobs) return -scores.mean() # it might throw an exception because `y` contains # previously unseen labels. except Exception as e: _logger.warning(f"{e.__class__}: {e}") return 0.0 def _early_stop_fn() -> Any: no_progress_loss_fn = no_progress_loss(int(_get_option_value(*_opt_no_progress_loss))) timeout = int(_get_option_value(*_opt_timeout)) if timeout <= 0: return no_progress_loss_fn # Set base time for budget mechanism start_time = time.time() def timeout_fn(trials, best_loss=None, iteration_no_progress=0): # type: ignore no_progress_loss, meta = no_progress_loss_fn(trials, best_loss, iteration_no_progress) to = time.time() - start_time > timeout return no_progress_loss or to, meta return timeout_fn try: trials = Trials() max_evals = int(_get_option_value(*_opt_max_evals)) best_params = fmin( fn=_objective, space=param_space, algo=tpe.suggest, trials=trials, max_evals=max_evals, early_stop_fn=_early_stop_fn(), rstate=np.random.RandomState(42), show_progressbar=False, verbose=False) _logger.info("hyperopt: #eval={}/{}".format(len(trials.trials), max_evals)) # Builds a model with `best_params` # TODO: Could we extract constraint rules (e.g., FD and CFD) from built statistical models? model = _create_model(best_params) model.fit(X, y) def _feature_importances() -> List[Any]: f = filter(lambda x: x[1] > 0.0, zip(model.feature_name_, model.feature_importances_)) return list(sorted(f, key=lambda x: x[1], reverse=True)) _logger.debug(f"lightgbm: feature_importances={_feature_importances()}") sorted_lst = sorted(trials.trials, key=lambda x: x['result']['loss']) min_loss = sorted_lst[0]['result']['loss'] return model, -min_loss except Exception as e: _logger.warning(f"Failed to build a stat model because: {e}") return None, 0.0 def build_model(X: pd.DataFrame, y: pd.Series, is_discrete: bool, num_class: int, n_jobs: int, opts: Dict[str, str]) -> Tuple[Any, float]: return _build_lgb_model(X, y, is_discrete, num_class, n_jobs, opts) def compute_class_nrow_stdv(y: pd.Series, is_discrete: bool) -> Optional[float]: from collections import Counter return float(np.std(list(map(lambda x: x[1], Counter(y).items())))) if is_discrete else None def rebalance_training_data(X: pd.DataFrame, y: pd.Series, target: str) -> Tuple[pd.DataFrame, pd.Series]: # Uses median as the number of training rows for each class from collections import Counter prev_nrows = len(X) prev_stdv = compute_class_nrow_stdv(y, is_discrete=True) hist = dict(Counter(y).items()) # type: ignore median = int(np.median([count for key, count in hist.items()])) def _split_data(df: pd.DataFrame) -> Tuple[pd.DataFrame, pd.Series]: X = df[df.columns[df.columns != target]] # type: ignore y = df[target] return X, y # Filters out rows having NaN values for over-sampling X[target] = y X_notna, y_notna = _split_data(X.dropna()) X_na, y_na = _split_data(X[X.isnull().any(axis=1)]) # Over-sampling for training data whose row number is smaller than the median value hist_na = dict(Counter(y_na).items()) # type: ignore smote_targets = [] kn = 5 # `k_neighbors` default value in `SMOTEN` for key, count in hist.items(): if count < median: nna = hist_na[key] if key in hist_na else 0 if count - nna > kn: smote_targets.append((key, median - nna)) else: _logger.warning(f"Over-sampling of '{key}' in y='{target}' failed because the number of the clean rows " f"is too small: {count - nna}") if len(smote_targets) > 0: from imblearn.over_sampling import SMOTEN sampler = SMOTEN(random_state=42, sampling_strategy=dict(smote_targets), k_neighbors=kn) X_notna, y_notna = sampler.fit_resample(X_notna, y_notna) X = pd.concat([X_notna, X_na]) y = pd.concat([y_notna, y_na]) # Under-sampling for training data whose row number is greater than the median value rus_targets = list(map(lambda x: (x[0], median), filter(lambda x: x[1] > median, hist.items()))) if len(rus_targets) > 0: # NOTE: The other smarter implementations can skew samples if there are many rows having NaN values, # so we just use `RandomUnderSampler` here. from imblearn.under_sampling import RandomUnderSampler sampler = RandomUnderSampler(random_state=42, sampling_strategy=dict(rus_targets)) X, y = sampler.fit_resample(X, y) _logger.info("Rebalanced training data (y={}, median={}): #rows={}(stdv={}) -> #rows={}(stdv={})".format( target, median, prev_nrows, prev_stdv, len(X), compute_class_nrow_stdv(y, is_discrete=True))) _logger.debug("class hist: {} => {}".format(hist.items(), Counter(y).items())) return X, y
[((30, 10, 30, 24), 'repair.utils.setup_logger', 'setup_logger', ({}, {}), '()', False, 'from repair.utils import elapsed_time, get_option_value, setup_logger\n'), ((34, 10, 34, 81), 'collections.namedtuple', 'namedtuple', ({(34, 21, 34, 30): '"""_option"""', (34, 32, 34, 80): '"""key default_value type_class validator err_msg"""'}, {}), "('_option', 'key default_value type_class validator err_msg')", False, 'from collections import namedtuple\n'), ((142, 4, 142, 48), 'warnings.simplefilter', 'warnings.simplefilter', ({(142, 26, 142, 34): '"""ignore"""', (142, 36, 142, 47): 'UserWarning'}, {}), "('ignore', UserWarning)", False, 'import warnings\n'), ((278, 8, 278, 34), 'pandas.concat', 'pd.concat', ({(278, 18, 278, 33): '[X_notna, X_na]'}, {}), '([X_notna, X_na])', True, 'import pandas as pd\n'), ((279, 8, 279, 34), 'pandas.concat', 'pd.concat', ({(279, 18, 279, 33): '[y_notna, y_na]'}, {}), '([y_notna, y_na])', True, 'import pandas as pd\n'), ((95, 15, 95, 44), 'repair.utils.get_option_value', 'get_option_value', ({(95, 32, 95, 36): 'opts', (95, 38, 95, 43): '*args'}, {}), '(opts, *args)', False, 'from repair.utils import elapsed_time, get_option_value, setup_logger\n'), ((129, 12, 129, 39), 'copy.deepcopy', 'copy.deepcopy', ({(129, 26, 129, 38): 'fixed_params'}, {}), '(fixed_params)', False, 'import copy\n'), ((149, 22, 149, 58), 'hyperopt.hp.quniform', 'hp.quniform', ({(149, 34, 149, 46): '"""num_leaves"""', (149, 48, 149, 49): '(2)', (149, 51, 149, 54): '(100)', (149, 56, 149, 57): '(1)'}, {}), "('num_leaves', 2, 100, 1)", False, 'from hyperopt import hp, tpe, Trials\n'), ((150, 21, 150, 54), 'hyperopt.hp.uniform', 'hp.uniform', ({(150, 32, 150, 43): '"""subsample"""', (150, 45, 150, 48): '(0.5)', (150, 50, 150, 53): '(1.0)'}, {}), "('subsample', 0.5, 1.0)", False, 'from hyperopt import hp, tpe, Trials\n'), ((151, 26, 151, 65), 'hyperopt.hp.quniform', 'hp.quniform', ({(151, 38, 151, 54): '"""subsample_freq"""', (151, 56, 151, 57): '(1)', (151, 59, 151, 61): '(20)', (151, 63, 151, 64): '(1)'}, {}), "('subsample_freq', 1, 20, 1)", False, 'from hyperopt import hp, tpe, Trials\n'), ((152, 28, 152, 69), 'hyperopt.hp.uniform', 'hp.uniform', ({(152, 39, 152, 57): '"""colsample_bytree"""', (152, 59, 152, 63): '(0.01)', (152, 65, 152, 68): '(1.0)'}, {}), "('colsample_bytree', 0.01, 1.0)", False, 'from hyperopt import hp, tpe, Trials\n'), ((153, 29, 153, 71), 'hyperopt.hp.quniform', 'hp.quniform', ({(153, 41, 153, 60): '"""min_child_samples"""', (153, 62, 153, 63): '(1)', (153, 65, 153, 67): '(50)', (153, 69, 153, 70): '(1)'}, {}), "('min_child_samples', 1, 50, 1)", False, 'from hyperopt import hp, tpe, Trials\n'), ((154, 28, 154, 68), 'hyperopt.hp.loguniform', 'hp.loguniform', ({(154, 42, 154, 60): '"""min_child_weight"""', (154, 62, 154, 64): '(-3)', (154, 66, 154, 67): '(1)'}, {}), "('min_child_weight', -3, 1)", False, 'from hyperopt import hp, tpe, Trials\n'), ((155, 22, 155, 56), 'hyperopt.hp.loguniform', 'hp.loguniform', ({(155, 36, 155, 48): '"""reg_lambda"""', (155, 50, 155, 52): '(-2)', (155, 54, 155, 55): '(3)'}, {}), "('reg_lambda', -2, 3)", False, 'from hyperopt import hp, tpe, Trials\n'), ((160, 9, 160, 57), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', (), '', False, 'from sklearn.model_selection import cross_val_score, KFold, StratifiedKFold\n'), ((161, 13, 161, 51), 'sklearn.model_selection.KFold', 'KFold', (), '', False, 'from sklearn.model_selection import cross_val_score, KFold, StratifiedKFold\n'), ((188, 21, 188, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((198, 17, 198, 25), 'hyperopt.Trials', 'Trials', ({}, {}), '()', False, 'from hyperopt import hp, tpe, Trials\n'), ((146, 4, 146, 25), 'logging.getLogger', 'getLogger', ({(146, 14, 146, 24): '"""hyperopt"""'}, {}), "('hyperopt')", False, 'from logging import getLogger, WARN\n'), ((171, 21, 172, 89), 'sklearn.model_selection.cross_val_score', 'cross_val_score', (), '', False, 'from sklearn.model_selection import cross_val_score, KFold, StratifiedKFold\n'), ((207, 19, 207, 44), 'numpy.random.RandomState', 'np.random.RandomState', ({(207, 41, 207, 43): '42'}, {}), '(42)', True, 'import numpy as np\n'), ((247, 16, 247, 26), 'collections.Counter', 'Counter', ({(247, 24, 247, 25): 'y'}, {}), '(y)', False, 'from collections import Counter\n'), ((261, 19, 261, 32), 'collections.Counter', 'Counter', ({(261, 27, 261, 31): 'y_na'}, {}), '(y_na)', False, 'from collections import Counter\n'), ((192, 17, 192, 28), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((292, 62, 292, 72), 'collections.Counter', 'Counter', ({(292, 70, 292, 71): 'y'}, {}), '(y)', False, 'from collections import Counter\n'), ((239, 49, 239, 59), 'collections.Counter', 'Counter', ({(239, 57, 239, 58): 'y'}, {}), '(y)', False, 'from collections import Counter\n')]
volzotan/django-howl
howl/roomsensor/urls.py
3b11c530da95d152844934da09592619b3d4497f
from django.conf.urls import patterns, url from roomsensor import views urlpatterns = patterns('', url(r'^$', views.index, name='roomsensor'), # ex: /roomsensor/name/ url(r'^(?P<roomsensor_name>\w+)/$', views.display, name='roomsensor_display'), url(r'^(?P<roomsensor_name>\w+)/read/$', views.read, name='roomsensor_read'), # JSON data for graph creation url(r'^(?P<roomsensor_name>\w+)/rawdata/(?P<datapoints>\d+)/(?P<compression_factor>\d+)/$', views.rawdata, name='roomsensor_rawdata'), )
[((6, 4, 6, 46), 'django.conf.urls.url', 'url', (), '', False, 'from django.conf.urls import patterns, url\n'), ((9, 4, 9, 81), 'django.conf.urls.url', 'url', (), '', False, 'from django.conf.urls import patterns, url\n'), ((10, 4, 10, 80), 'django.conf.urls.url', 'url', (), '', False, 'from django.conf.urls import patterns, url\n'), ((13, 4, 13, 137), 'django.conf.urls.url', 'url', (), '', False, 'from django.conf.urls import patterns, url\n')]
vu-telab/DAKOTA-moga-post-processing-tool
main.py
2f41561bd8ca44c693e5994f7f68a1edc1a82361
# main.py # # currently just an example script I use to test my optimization_results module # # WARNING: design point numbers 0-indexed in pandas database, but # eval_id column is the original 1-indexed value given by DAKOTA import optimization_results as optr def main(): a4 = optr.MogaOptimizationResults() print a4.gen_size_list print a4.pareto_front assert a4.gen_size_list == [100, 94, 48, 45, 45, 46, 62, 85, 102, 108, 131, 130, 134, 119, 127, 128, 155, 124, 124, 130, 128, 123, 137, 135, 149, 165, 154, 164, 169, 177, 205, 196, 215, 185, 205, 190, 162, 158, 154, 159, 163, 183, 175, 183, 186, 188, 188, 186, 201, 213, 222] ### OLD MATLAB CODE I NEED TO REWORK ### # # read force and atan accuracy objectives from # # all_accuracy_objectives.dat # A3 = load('all_accuracy_objectives.dat'); # completed_points = A3(:,1); # force_objs = A3(:,2); # atan_objs = A3(:,3); # n3 = length(A3(:,1)); if __name__=='__main__': main()
[]
Rhodolite/Gem.py.UnitTest
Topaz/Core.py
eaa8b6855bcfbb12f67e7eb146928814543ef9d4
# # Copyright (c) 2017 Joy Diamond. All rights reserved. # @gem('Topaz.Core') def gem(): require_gem('Gem.Global') from Gem import gem_global gem_global.testing = true require_gem('Gem.Cache2') require_gem('Gem.DumpCache') require_gem('Gem.GeneratedConjureQuadruple') require_gem('Gem.Map') require_gem('Gem.Method') require_gem('Gem.Path') require_gem('Gem.System') from Gem import create_cache, create_herd_2, create_horde_2, dump_cache_to_string, empty_herd from Gem import print_cache, produce_conjure_by_name__V2 from Gem import produce_conjure_unique_dual, produce_conjure_unique_dual__21 from Gem import produce_conjure_quadruple__4123 from Gem import produce_conjure_unique_triple, produce_conjure_unique_triple__312 from Gem import reference_count, values_tuple_sorted_by_key, write_binary_to_path share( # # Imported functions # 'create_cache', create_cache, 'create_herd_2', create_herd_2, 'create_horde_2', create_horde_2, 'dump_cache_to_string', dump_cache_to_string, 'print_cache', print_cache, 'produce_conjure_by_name__V2', produce_conjure_by_name__V2, 'produce_conjure_unique_dual__21', produce_conjure_unique_dual__21, 'produce_conjure_unique_dual', produce_conjure_unique_dual, 'produce_conjure_unique_dual', produce_conjure_unique_dual, 'produce_conjure_quadruple__4123', produce_conjure_quadruple__4123, 'produce_conjure_unique_triple__312', produce_conjure_unique_triple__312, 'produce_conjure_unique_triple', produce_conjure_unique_triple, 'reference_count', reference_count, 'values_tuple_sorted_by_key', values_tuple_sorted_by_key, 'write_binary_to_path', write_binary_to_path, # # Imported Values # 'empty_herd', empty_herd, )
[]
kosovojs/wikibooster
app.py
70a9d9d7bf41be9fa5e58d40fba216d9b6df008d
import flask from flask import Flask from flask import jsonify from flask import request from flask_cors import CORS, cross_origin from flask import render_template import mwoauth import requests_oauthlib import os import yaml import mwapi from tasks.main import Tasks from save import Save from db import DB from typo.fix import TypoFix app = Flask(__name__, static_folder="./frontend/build/static", template_folder="./frontend/build") #app = Flask(__name__) CORS(app) user_agent = 'WikiBooster' __dir__ = os.path.dirname(__file__) configFile = open(os.path.join(__dir__, 'config.yaml')) app.config.update(yaml.safe_load(configFile)) def authenticated_session(domain = 'meta.wikimedia.org'): if 'oauth_access_token' in flask.session: access_token = mwoauth.AccessToken(**flask.session['oauth_access_token']) auth = requests_oauthlib.OAuth1(client_key=app.config['CONSUMER_KEY'], client_secret=app.config['CONSUMER_SECRET'], resource_owner_key=access_token.key, resource_owner_secret=access_token.secret) return mwapi.Session(host='https://'+domain, auth=auth, user_agent=user_agent) else: return None def getUserInfo(domain = 'meta.wikimedia.org'): session = authenticated_session(domain) if not session: return None, None, {'status':'error','message':'not logged in'} try: userinfo = session.get(action='query', meta='userinfo', uiprop=['groups', 'centralids'])['query']['userinfo'] return True, session, {'status':'ok','username':userinfo['name']} except mwapi.errors.APIError as e: if e.code == 'mwoauth-invalid-authorization-invalid-user': # user is viewing a batch for a wiki where they do not have a local user account # treat as anonymous on the local wiki, but query Meta to find out if they’re a steward return None, None, {'status':'error','message':'server error'} else: raise e return None, None, {'status':'error','message':'server error'} @app.route('/', methods=['GET']) def index_page(): return render_template('index.html') #http://127.0.0.1:5000/task/lvwiki/1/Helēna Mārnija @app.route('/task/<wiki>/<name>/<page>', methods=['GET']) def getTaskResult(wiki,name,page): tasks = Tasks(wiki) articleInfo = tasks.getDataForTask(name,page) return jsonify(articleInfo) @app.route('/testing', methods=['GET']) def runTests(): tasks = Tasks('lvwiki') articleInfo = tasks.runTests() return articleInfo @app.route('/wikis', methods=['GET']) def listWikis(): db = DB() wikis = db.getAvailableWikis() return jsonify(wikis) @app.route('/tasks/<wiki>', methods=['GET']) def listJobs(wiki): db = DB() articles = db.getTasksForWiki(wiki) return jsonify(articles) @app.route('/task/<wiki>/<task_id>/articles', methods=['GET']) def listArticles(wiki,task_id): db = DB() articles = db.get_articles_for_task(wiki,task_id) return jsonify(articles) # @app.route('/typo/<wiki>', methods=['GET']) def listTypos(wiki): db = DB() typos = db.getTyposForWiki(wiki) return jsonify(typos) @app.route('/typo/articles', methods=['GET']) def typo_list_for_wiki(): db = DB() wiki = 'lvwiki' typos = db.get_typo_articles(wiki) return jsonify(typos) @app.route('/typo/fix/<article>', methods=['GET']) def fix_typos(article): db = DB() typoFixer = TypoFix() res = typoFixer.getData('lvwiki', article, db) return jsonify(res) @app.route('/rules/<wiki>', methods=['GET']) def listRules(wiki): db = DB() rules = db.getRulesForWiki(wiki) return jsonify(rules) @app.route('/save', methods=['POST']) def doSave(): req = request.get_json() wiki = req['wiki'] domain = "{}.wikipedia.org".format(wiki) userStatus, session, respFromGettingUserInfo = getUserInfo(domain) if not userStatus: return jsonify(respFromGettingUserInfo) # userName = respFromGettingUserInfo['username'] if 'username' in respFromGettingUserInfo else respFromGettingUserInfo['message'] job = req['job'] article = req['article'] result = req['result'] wikitext = req['wikitext'] status = req['status'] handlingSave = Save(session) respFromSave = handlingSave.saveArticle(job,article,result,wikitext,status,userName) return jsonify(respFromSave) @app.route('/save_typo', methods=['POST']) def doSaveTypo(): req = request.get_json() wiki = req['wiki'] domain = "{}.wikipedia.org".format(wiki.replace('wiki','')) userStatus, session, respFromGettingUserInfo = getUserInfo(domain) if not userStatus: return jsonify(respFromGettingUserInfo) userName = respFromGettingUserInfo['username'] if 'username' in respFromGettingUserInfo else respFromGettingUserInfo['message'] active = req['active'] case = req['case'] comment = req['comment'] dumpsearch = req['dumpsearch'] minor = req['minor'] name = req['name'] regex = req['regex'] replace_with = req['replace_with'] search_for = req['search_for'] test_cases = req['test_cases'] whole = req['whole'] id = req['id'] db = DB() typoData = db.saveTypo(active,case,comment,dumpsearch,minor,name,regex,replace_with,search_for,test_cases,whole,wiki,userName,id) return jsonify({'status':'ok', 'info':typoData}) @app.route('/save_rule', methods=['POST']) def saveRule(): req = request.get_json() wiki = req['wiki'] domain = "{}.wikipedia.org".format(wiki.replace('wiki','')) userStatus, session, respFromGettingUserInfo = getUserInfo(domain) if not userStatus: return jsonify(respFromGettingUserInfo) userName = respFromGettingUserInfo['username'] if 'username' in respFromGettingUserInfo else respFromGettingUserInfo['message'] wiki = req['wiki'] rule_name = req['rule_name'] rule_object = req['rule_object'] rule = req['rule'] result = req['result'] id = req['id'] db = DB() db.saveRule(id, wiki, rule_name, rule_object, rule, result) return jsonify({'status':'ok'}) @app.route('/info', methods=['GET']) def user_info(): userStatus, _,respFromGettingUserInfo = getUserInfo() return jsonify(respFromGettingUserInfo) @app.route('/login') def login(): consumer_token = mwoauth.ConsumerToken(app.config['CONSUMER_KEY'], app.config['CONSUMER_SECRET']) redirect, request_token = mwoauth.initiate('https://meta.wikimedia.org/w/index.php', consumer_token, user_agent=user_agent) flask.session['oauth_request_token'] = dict(zip(request_token._fields, request_token)) return flask.redirect(redirect) @app.route('/oauth-callback') def oauth_callback(): consumer_token = mwoauth.ConsumerToken(app.config['CONSUMER_KEY'], app.config['CONSUMER_SECRET']) request_token = mwoauth.RequestToken(**flask.session.pop('oauth_request_token')) access_token = mwoauth.complete('https://meta.wikimedia.org/w/index.php', consumer_token, request_token, flask.request.query_string, user_agent=user_agent) flask.session['oauth_access_token'] = dict(zip(access_token._fields, access_token)) return flask.redirect(flask.url_for('index_page')) @app.route('/logout') def logout(): """Log the user out by clearing their session.""" flask.session.clear() return flask.redirect(flask.url_for('index_page')) if __name__ == '__main__': app.run(debug=True)
[((20, 6, 20, 98), 'flask.Flask', 'Flask', (), '', False, 'from flask import Flask\n'), ((22, 0, 22, 9), 'flask_cors.CORS', 'CORS', ({(22, 5, 22, 8): 'app'}, {}), '(app)', False, 'from flask_cors import CORS, cross_origin\n'), ((26, 10, 26, 35), 'os.path.dirname', 'os.path.dirname', ({(26, 26, 26, 34): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((28, 18, 28, 54), 'os.path.join', 'os.path.join', ({(28, 31, 28, 38): '__dir__', (28, 40, 28, 53): '"""config.yaml"""'}, {}), "(__dir__, 'config.yaml')", False, 'import os\n'), ((29, 18, 29, 44), 'yaml.safe_load', 'yaml.safe_load', ({(29, 33, 29, 43): 'configFile'}, {}), '(configFile)', False, 'import yaml\n'), ((66, 8, 66, 37), 'flask.render_template', 'render_template', ({(66, 24, 66, 36): '"""index.html"""'}, {}), "('index.html')", False, 'from flask import render_template\n'), ((71, 9, 71, 20), 'tasks.main.Tasks', 'Tasks', ({(71, 15, 71, 19): 'wiki'}, {}), '(wiki)', False, 'from tasks.main import Tasks\n'), ((75, 8, 75, 28), 'flask.jsonify', 'jsonify', ({(75, 16, 75, 27): 'articleInfo'}, {}), '(articleInfo)', False, 'from flask import jsonify\n'), ((79, 9, 79, 24), 'tasks.main.Tasks', 'Tasks', ({(79, 15, 79, 23): '"""lvwiki"""'}, {}), "('lvwiki')", False, 'from tasks.main import Tasks\n'), ((87, 6, 87, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((89, 8, 89, 22), 'flask.jsonify', 'jsonify', ({(89, 16, 89, 21): 'wikis'}, {}), '(wikis)', False, 'from flask import jsonify\n'), ((93, 6, 93, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((95, 8, 95, 25), 'flask.jsonify', 'jsonify', ({(95, 16, 95, 24): 'articles'}, {}), '(articles)', False, 'from flask import jsonify\n'), ((99, 6, 99, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((101, 8, 101, 25), 'flask.jsonify', 'jsonify', ({(101, 16, 101, 24): 'articles'}, {}), '(articles)', False, 'from flask import jsonify\n'), ((106, 6, 106, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((108, 8, 108, 22), 'flask.jsonify', 'jsonify', ({(108, 16, 108, 21): 'typos'}, {}), '(typos)', False, 'from flask import jsonify\n'), ((113, 6, 113, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((116, 8, 116, 22), 'flask.jsonify', 'jsonify', ({(116, 16, 116, 21): 'typos'}, {}), '(typos)', False, 'from flask import jsonify\n'), ((120, 6, 120, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((121, 13, 121, 22), 'typo.fix.TypoFix', 'TypoFix', ({}, {}), '()', False, 'from typo.fix import TypoFix\n'), ((125, 8, 125, 20), 'flask.jsonify', 'jsonify', ({(125, 16, 125, 19): 'res'}, {}), '(res)', False, 'from flask import jsonify\n'), ((129, 6, 129, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((131, 8, 131, 22), 'flask.jsonify', 'jsonify', ({(131, 16, 131, 21): 'rules'}, {}), '(rules)', False, 'from flask import jsonify\n'), ((135, 7, 135, 25), 'flask.request.get_json', 'request.get_json', ({}, {}), '()', False, 'from flask import request\n'), ((151, 16, 151, 29), 'save.Save', 'Save', ({(151, 21, 151, 28): 'session'}, {}), '(session)', False, 'from save import Save\n'), ((154, 8, 154, 29), 'flask.jsonify', 'jsonify', ({(154, 16, 154, 28): 'respFromSave'}, {}), '(respFromSave)', False, 'from flask import jsonify\n'), ((158, 7, 158, 25), 'flask.request.get_json', 'request.get_json', ({}, {}), '()', False, 'from flask import request\n'), ((181, 6, 181, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((184, 8, 184, 49), 'flask.jsonify', 'jsonify', ({(184, 16, 184, 48): "{'status': 'ok', 'info': typoData}"}, {}), "({'status': 'ok', 'info': typoData})", False, 'from flask import jsonify\n'), ((188, 7, 188, 25), 'flask.request.get_json', 'request.get_json', ({}, {}), '()', False, 'from flask import request\n'), ((205, 6, 205, 10), 'db.DB', 'DB', ({}, {}), '()', False, 'from db import DB\n'), ((208, 8, 208, 32), 'flask.jsonify', 'jsonify', ({(208, 16, 208, 31): "{'status': 'ok'}"}, {}), "({'status': 'ok'})", False, 'from flask import jsonify\n'), ((214, 8, 214, 40), 'flask.jsonify', 'jsonify', ({(214, 16, 214, 39): 'respFromGettingUserInfo'}, {}), '(respFromGettingUserInfo)', False, 'from flask import jsonify\n'), ((218, 18, 218, 98), 'mwoauth.ConsumerToken', 'mwoauth.ConsumerToken', ({(218, 40, 218, 66): "app.config['CONSUMER_KEY']", (218, 68, 218, 97): "app.config['CONSUMER_SECRET']"}, {}), "(app.config['CONSUMER_KEY'], app.config['CONSUMER_SECRET']\n )", False, 'import mwoauth\n'), ((219, 27, 219, 124), 'mwoauth.initiate', 'mwoauth.initiate', (), '', False, 'import mwoauth\n'), ((221, 8, 221, 32), 'flask.redirect', 'flask.redirect', ({(221, 23, 221, 31): 'redirect'}, {}), '(redirect)', False, 'import flask\n'), ((225, 18, 225, 98), 'mwoauth.ConsumerToken', 'mwoauth.ConsumerToken', ({(225, 40, 225, 66): "app.config['CONSUMER_KEY']", (225, 68, 225, 97): "app.config['CONSUMER_SECRET']"}, {}), "(app.config['CONSUMER_KEY'], app.config['CONSUMER_SECRET']\n )", False, 'import mwoauth\n'), ((227, 16, 227, 156), 'mwoauth.complete', 'mwoauth.complete', (), '', False, 'import mwoauth\n'), ((234, 1, 234, 22), 'flask.session.clear', 'flask.session.clear', ({}, {}), '()', False, 'import flask\n'), ((33, 17, 33, 75), 'mwoauth.AccessToken', 'mwoauth.AccessToken', ({}, {}), "(**flask.session['oauth_access_token'])", False, 'import mwoauth\n'), ((34, 9, 35, 89), 'requests_oauthlib.OAuth1', 'requests_oauthlib.OAuth1', (), '', False, 'import requests_oauthlib\n'), ((36, 9, 36, 80), 'mwapi.Session', 'mwapi.Session', (), '', False, 'import mwapi\n'), ((141, 9, 141, 41), 'flask.jsonify', 'jsonify', ({(141, 17, 141, 40): 'respFromGettingUserInfo'}, {}), '(respFromGettingUserInfo)', False, 'from flask import jsonify\n'), ((164, 9, 164, 41), 'flask.jsonify', 'jsonify', ({(164, 17, 164, 40): 'respFromGettingUserInfo'}, {}), '(respFromGettingUserInfo)', False, 'from flask import jsonify\n'), ((194, 9, 194, 41), 'flask.jsonify', 'jsonify', ({(194, 17, 194, 40): 'respFromGettingUserInfo'}, {}), '(respFromGettingUserInfo)', False, 'from flask import jsonify\n'), ((229, 23, 229, 50), 'flask.url_for', 'flask.url_for', ({(229, 37, 229, 49): '"""index_page"""'}, {}), "('index_page')", False, 'import flask\n'), ((235, 23, 235, 50), 'flask.url_for', 'flask.url_for', ({(235, 37, 235, 49): '"""index_page"""'}, {}), "('index_page')", False, 'import flask\n'), ((226, 40, 226, 80), 'flask.session.pop', 'flask.session.pop', ({(226, 58, 226, 79): '"""oauth_request_token"""'}, {}), "('oauth_request_token')", False, 'import flask\n')]
shelleyyyyu/few_shot
pre_embed.py
0fe54444e820fe3201927e6363682913b6d61028
import numpy as np from collections import defaultdict, Counter import random import json from tqdm import tqdm def transX(dataset): rel2id = json.load(open(dataset + '/relation2ids')) ent2id = json.load(open(dataset + '/ent2ids')) with open('../Fast-TransX/' + dataset + '_base/entity2id.txt', 'w') as g1: num_ents = len(ent2id.keys()) g1.write(str(num_ents) + '\n') for k, v in ent2id.items(): g1.write(k + '\t' + str(v) + '\n') with open('../Fast-TransX/' + dataset + '_base/relation2id.txt', 'w') as g1: num_rels = len(rel2id.keys()) g1.write(str(num_rels) + '\n') for k, v in rel2id.items(): g1.write(k + '\t' + str(v) + '\n') file_name = dataset + '/path_graph' train_triples = [] with open(file_name) as f: lines = f.readlines() for line in tqdm(lines): e1 = line.split('\t')[0] e2 = line.rstrip().split('\t')[2] rel = line.split('\t')[1] train_triples.append([e1,rel,e2]) train_triples.append([e2,rel+'_inv',e1]) with open('../Fast-TransX/' + dataset + '_base/train2id.txt', 'w') as g3: num_triples = len(train_triples) g3.write(str(num_triples) + '\n') for triple in train_triples: e1, rel, e2 = triple g3.write(str(ent2id[e1]) + '\t' + str(ent2id[e2]) + '\t' + str(rel2id[rel]) + '\n') if __name__ == '__main__': transX('Wiki')
[((30, 20, 30, 31), 'tqdm.tqdm', 'tqdm', ({(30, 25, 30, 30): 'lines'}, {}), '(lines)', False, 'from tqdm import tqdm\n')]
Xinverse/BOTC-Bot
botc/gamemodes/troublebrewing/FortuneTeller.py
1932c649c81a5a1eab735d7abdee0761c2853940
"""Contains the Fortune Teller Character class""" import json import random import discord import datetime from botc import Action, ActionTypes, Townsfolk, Character, Storyteller, RedHerring, \ RecurringAction, Category, StatusList from botc.BOTCUtils import GameLogic from ._utils import TroubleBrewing, TBRole import globvars with open('botc/gamemodes/troublebrewing/character_text.json') as json_file: character_text = json.load(json_file)[TBRole.fortuneteller.value.lower()] with open('botutils/bot_text.json') as json_file: bot_text = json.load(json_file) butterfly = bot_text["esthetics"]["butterfly"] with open('botc/game_text.json') as json_file: strings = json.load(json_file) fortune_teller_nightly = strings["gameplay"]["fortune_teller_nightly"] copyrights_str = strings["misc"]["copyrights"] yes = strings["gameplay"]["yes"] no = strings["gameplay"]["no"] good_link = strings["images"]["good"] evil_link = strings["images"]["evil"] class FortuneTeller(Townsfolk, TroubleBrewing, Character, RecurringAction): """Fortune Teller: Each night, choose 2 players: you learn if either is a Demon. There is 1 good player that registers falsely to you. ===== FORTUNE TELLER ===== true_self = fortune teller ego_self = fortune teller social_self = fortune teller commands: - read <player> and <player> initialize setup? -> NO initialize role? -> YES ----- First night START: override first night instruction? -> YES # default is to send instruction string only => Send query for "read" command ----- Regular night START: override regular night instruction? -> YES # default is to send nothing => Send query for "read" command """ def __init__(self): Character.__init__(self) TroubleBrewing.__init__(self) Townsfolk.__init__(self) self._desc_string = character_text["description"] self._examp_string = character_text["examples"] self._instr_string = character_text["instruction"] self._lore_string = character_text["lore"] self._brief_string = character_text["brief"] self._action = character_text["action"] self._art_link = "https://bloodontheclocktower.com/wiki/images/3/3a/Fortune_Teller_Token.png" self._art_link_cropped = "https://imgur.com/23ZXb1y.png" self._wiki_link = "https://bloodontheclocktower.com/wiki/Fortune_Teller" self._role_enum = TBRole.fortuneteller self._emoji = "<:tbfortuneteller:739317350733578280>" def create_n1_instr_str(self): """Create the instruction field on the opening dm card""" # First line is the character instruction string msg = f"{self.emoji} {self.instruction}" addendum = character_text["n1_addendum"] # Some characters have a line of addendum if addendum: with open("botutils/bot_text.json") as json_file: bot_text = json.load(json_file) scroll_emoji = bot_text["esthetics"]["scroll"] msg += f"\n{scroll_emoji} {addendum}" return msg def add_action_field_n1(self, embed_obj): """Send the stats list n1""" msg = self.action msg += globvars.master_state.game.create_sitting_order_stats_string() embed_obj.add_field(name = butterfly + " **「 Your Action 」**", value = msg, inline = False) return embed_obj def exec_init_role(self, setup): """Assign one of the townsfolks or outsiders as a red herring""" possibilities = setup.townsfolks + setup.outsiders chosen = random.choice(possibilities) chosen.add_status_effect(RedHerring(Storyteller(), chosen)) globvars.logging.info(f">>> Fortune Teller [exec_init_role] Set red herring to {str(chosen)}") def has_finished_night_action(self, player): """Return True if fortune teller has submitted the read action""" if player.is_alive(): current_phase_id = globvars.master_state.game._chrono.phase_id received_action = player.action_grid.retrieve_an_action(current_phase_id) return received_action is not None and received_action.action_type == ActionTypes.read return True @GameLogic.requires_two_targets @GameLogic.requires_different_targets @GameLogic.changes_not_allowed async def register_read(self, player, targets): """Read command""" # Must be 2 targets assert len(targets) == 2, "Received a number of targets different than 2 for fortune teller 'read'" action = Action(player, targets, ActionTypes.read, globvars.master_state.game._chrono.phase_id) player.action_grid.register_an_action(action, globvars.master_state.game._chrono.phase_id) msg = butterfly + " " + character_text["feedback"].format(targets[0].game_nametag, targets[1].game_nametag) await player.user.send(msg) async def exec_read(self, fortune_teller_player, read_player_1, read_player_2): """Execute the read action (night ability interaction)""" if fortune_teller_player.is_alive(): # Correct info if not fortune_teller_player.is_droisoned(): response = read_player_1.role.social_self.category == Category.demon or \ read_player_2.role.social_self.category == Category.demon or \ read_player_1.has_status_effect(StatusList.red_herring) or \ read_player_2.has_status_effect(StatusList.red_herring) # Droisoned info else: response = random.choice((True, False)) reply = yes if response else no link = evil_link if response else good_link recipient = fortune_teller_player.user msg = f"***{recipient.name}#{recipient.discriminator}***, the **{self.name}**:" msg += "\n" msg += self.emoji + " " + self.instruction msg += "\n" msg += fortune_teller_nightly.format(reply) embed = discord.Embed(description = msg) embed.set_thumbnail(url = link) embed.set_footer(text = copyrights_str) embed.timestamp = datetime.datetime.utcnow() try: await recipient.send(embed = embed) except discord.Forbidden: pass # If the fortune teller player is dead, then nothing is sent to them else: pass async def process_night_ability(self, player): """Process night actions for the fortune teller character. @player : the Fortune Teller player (Player object) """ phase = globvars.master_state.game._chrono.phase_id action = player.action_grid.retrieve_an_action(phase) # The Fortune teller has submitted an action. We call the execution function immediately if action: assert action.action_type == ActionTypes.read, f"Wrong action type {action} in fortune teller" targets = action.target_player read_player_1 = targets[0] read_player_2 = targets[1] await self.exec_read(player, read_player_1, read_player_2) # The fortune teller has not submitted an action. We will not randomize the action since # the reading ability is a "priviledged" ability else: pass
[((17, 15, 17, 35), 'json.load', 'json.load', ({(17, 25, 17, 34): 'json_file'}, {}), '(json_file)', False, 'import json\n'), ((21, 14, 21, 34), 'json.load', 'json.load', ({(21, 24, 21, 33): 'json_file'}, {}), '(json_file)', False, 'import json\n'), ((14, 21, 14, 41), 'json.load', 'json.load', ({(14, 31, 14, 40): 'json_file'}, {}), '(json_file)', False, 'import json\n'), ((59, 8, 59, 32), 'botc.Character.__init__', 'Character.__init__', ({(59, 27, 59, 31): 'self'}, {}), '(self)', False, 'from botc import Action, ActionTypes, Townsfolk, Character, Storyteller, RedHerring, RecurringAction, Category, StatusList\n'), ((61, 8, 61, 32), 'botc.Townsfolk.__init__', 'Townsfolk.__init__', ({(61, 27, 61, 31): 'self'}, {}), '(self)', False, 'from botc import Action, ActionTypes, Townsfolk, Character, Storyteller, RedHerring, RecurringAction, Category, StatusList\n'), ((97, 15, 97, 77), 'globvars.master_state.game.create_sitting_order_stats_string', 'globvars.master_state.game.create_sitting_order_stats_string', ({}, {}), '()', False, 'import globvars\n'), ((105, 17, 105, 45), 'random.choice', 'random.choice', ({(105, 31, 105, 44): 'possibilities'}, {}), '(possibilities)', False, 'import random\n'), ((126, 17, 126, 103), 'botc.Action', 'Action', ({(126, 24, 126, 30): 'player', (126, 32, 126, 39): 'targets', (126, 41, 126, 57): 'ActionTypes.read', (126, 59, 126, 102): 'globvars.master_state.game._chrono.phase_id'}, {}), '(player, targets, ActionTypes.read, globvars.master_state.game.\n _chrono.phase_id)', False, 'from botc import Action, ActionTypes, Townsfolk, Character, Storyteller, RedHerring, RecurringAction, Category, StatusList\n'), ((155, 20, 155, 52), 'discord.Embed', 'discord.Embed', (), '', False, 'import discord\n'), ((158, 30, 158, 56), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ({}, {}), '()', False, 'import datetime\n'), ((87, 27, 87, 47), 'json.load', 'json.load', ({(87, 37, 87, 46): 'json_file'}, {}), '(json_file)', False, 'import json\n'), ((106, 44, 106, 57), 'botc.Storyteller', 'Storyteller', ({}, {}), '()', False, 'from botc import Action, ActionTypes, Townsfolk, Character, Storyteller, RedHerring, RecurringAction, Category, StatusList\n'), ((143, 27, 143, 55), 'random.choice', 'random.choice', ({(143, 41, 143, 54): '(True, False)'}, {}), '((True, False))', False, 'import random\n')]
bjuvensjo/schmetterling
src/schmetterling/build/tests/test_maven.py
0cdbfe4f379a081d9d4711dd21866b90983365cf
from unittest.mock import call, MagicMock, patch from schmetterling.build.maven import build_multi_modules from schmetterling.build.maven import create_build_result from schmetterling.build.maven import create_command from schmetterling.build.maven import create_multi_modules from schmetterling.build.maven import create_state from schmetterling.build.maven import get_maven_infos from schmetterling.build.maven import get_maven_repos from schmetterling.build.maven import get_multi_modules from schmetterling.build.state import BuildState, Build from schmetterling.setup.state import Repo def test_build_multi_modules(): mm = [ { 'updated': 'updated1', 'pom_dir': 'pom_dir1', 'coordinates': 'coordinates1' }, { 'updated': 'updated2', 'pom_dir': 'pom_dir2', 'coordinates': 'coordinates2' }, ] with patch( 'schmetterling.build.maven.create_command', return_value='create_command') as m_create_command, patch( 'schmetterling.build.maven.run_command') as m_run_command, patch( 'schmetterling.build.maven.create_build_result', return_value=[['success_coordinates'], [ 'failure_coordinates' ]]) as m_create_build_result: assert ( ['success_coordinates', 'success_coordinates'], ['failure_coordinates', 'failure_coordinates'], ) == build_multi_modules(mm, 'repository_dir', 'settings_file', 'logback_file') assert [ call('updated1', 'pom_dir1/mvn.log', 'repository_dir', 'settings_file', 'logback_file'), call('updated2', 'pom_dir2/mvn.log', 'repository_dir', 'settings_file', 'logback_file') ] == m_create_command.mock_calls assert [ call('create_command', cwd='pom_dir1'), call('create_command', cwd='pom_dir2') ] == m_run_command.mock_calls assert [ call('coordinates1', 'updated1', 'pom_dir1/mvn.log'), call('coordinates2', 'updated2', 'pom_dir2/mvn.log') ] == m_create_build_result.mock_calls def test_create_command(): assert str('mvn -Dmaven.repo.local=repository ' '-s settings.xml ' '-DcreateChecksum=true ' '-Dfile.encoding=UTF-8 ' '-Dsun.jnu.encoding=UTF-8 ' '-Dlogback.configurationFile=logback.xml ' '-B -amd -pl mygroup:app.admin,mygroup:app.sign ' 'clean install javadoc:jar source:jar ' '--fail-at-end | tee mvn.log') == create_command( [{ 'artifact_id': 'app.admin', 'group_id': 'mygroup', }, { 'artifact_id': 'app.sign', 'group_id': 'mygroup', }], 'mvn.log', 'repository', 'settings.xml', 'logback.xml') @patch( 'schmetterling.build.maven.get_summary', return_value=(['mygroup:app.admin'], ['app.sign'])) def test_create_build_result(mock_get_summary): assert ( [ { 'artifact_id': 'app.admin', 'group_id': 'mygroup', }, ], [ { 'artifact_id': 'app.sign', 'group_id': 'mygroup', }, { 'artifact_id': 'pipeline.env', 'group_id': 'mygroup', }, ], ) == create_build_result( [ { 'artifact_id': 'app.admin', 'group_id': 'mygroup', }, { 'artifact_id': 'app.sign', 'group_id': 'mygroup', }, { 'artifact_id': 'pipeline.env', 'group_id': 'mygroup', }, { 'artifact_id': 'xml.ws', 'group_id': 'mygroup', }, ], [ { 'artifact_id': 'app.admin', 'group_id': 'mygroup', }, { 'artifact_id': 'app.sign', 'group_id': 'mygroup', }, { 'artifact_id': 'pipeline.env', 'group_id': 'mygroup', }, ], 'mvn.log', ) def test_create_multi_modules(): with patch('schmetterling.build.maven.makedirs') as m, patch( 'schmetterling.build.maven.open') as o: f = MagicMock() o.return_value = MagicMock(__enter__=MagicMock(return_value=f)) create_multi_modules([ { 'pom_dir': 'pd1', 'pom_content': 'pc1' }, { 'pom_dir': 'pd2', 'pom_content': 'pc2' }, ]) assert [call('pd1', exist_ok=True), call('pd2', exist_ok=True)] == m.mock_calls assert [call.write('pc1'), call.write('pc2')] == f.mock_calls def test_create_state(): state = BuildState('schmetterling.build.maven', [ Build('mygroup', 'app.admin', '0.0.1-SNAPSHOT', 'app.admin', Build.SUCCESS, 1), Build('mygroup', 'pipeline-apache-proxy', '1.0.0-SNAPSHOT', 'pipeline-apache-proxy', Build.FAILURE, 1), ]) assert state == create_state( [], [{ 'pom_path': 'app.admin/pom.xml', 'artifact_id': 'app.admin', 'group_id': 'mygroup', 'version': '0.0.1-SNAPSHOT', 'packaging': 'jar' }], [{ 'pom_path': 'pipeline-apache-proxy/pom.xml', 'artifact_id': 'pipeline-apache-proxy', 'group_id': 'mygroup', 'version': '1.0.0-SNAPSHOT', 'packaging': 'jar' }], 1, ) def test_get_maven_info(): with patch('schmetterling.build.maven.get_pom_info', side_effect=lambda x: x): repos = [ MagicMock(status=Repo.STATUS_UPDATED, path='path1'), MagicMock(status=Repo.STATUS_UNCHANGED, path='path2'), ] assert [(True, 'path1/pom.xml'), (False, 'path2/pom.xml')] == get_maven_infos(repos) def test_get_maven_repos(): with patch('schmetterling.build.maven.isinstance', return_value=True): with patch('schmetterling.build.maven.exists', side_effect=[False, True]): m = MagicMock(path='pom_repo', return_value='pom_repo') state = [MagicMock(repos=[ MagicMock(path='non_pom_repo'), m, ])] assert [m] == get_maven_repos(state) def test_get_multi_modules(): with patch('schmetterling.build.maven.get_pom', return_value='pom_content'): assert [] == get_multi_modules([(False, {})], 'build_dir') assert [{ 'coordinates': [{}], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/jar-modules', 'updated': [{}] }] == get_multi_modules([(True, {})], 'build_dir') assert [{ 'coordinates': [{ 'packaging': 'jar' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/jar-modules', 'updated': [{ 'packaging': 'jar' }] }] == get_multi_modules([(True, { 'packaging': 'jar' })], 'build_dir') assert [{ 'coordinates': [{ 'artifact_id': 'super-pom', 'packaging': 'pom' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/super-pom-modules', 'updated': [{ 'artifact_id': 'super-pom', 'packaging': 'pom' }] }] == get_multi_modules([(True, { 'artifact_id': 'super-pom', 'packaging': 'pom' })], 'build_dir') assert [{ 'coordinates': [{ 'artifact_id': 'pom', 'packaging': 'pom' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/pom-pom-modules', 'updated': [{ 'artifact_id': 'pom', 'packaging': 'pom' }] }] == get_multi_modules([(True, { 'artifact_id': 'pom', 'packaging': 'pom' })], 'build_dir') assert [{ 'coordinates': [{ 'artifact_id': 'x', 'packaging': 'x' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/other-modules', 'updated': [{ 'artifact_id': 'x', 'packaging': 'x' }] }] == get_multi_modules([(True, { 'artifact_id': 'x', 'packaging': 'x' })], 'build_dir') assert [{ 'coordinates': [{ 'artifact_id': 'war', 'packaging': 'war' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/war-modules', 'updated': [{ 'artifact_id': 'war', 'packaging': 'war' }] }] == get_multi_modules([(True, { 'artifact_id': 'war', 'packaging': 'war' })], 'build_dir') assert [{ 'coordinates': [{ 'artifact_id': 'jar1', 'packaging': 'jar' }, { 'artifact_id': 'jar2' }, { 'artifact_id': 'jar3' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/jar-modules', 'updated': [{ 'artifact_id': 'jar1', 'packaging': 'jar' }, { 'artifact_id': 'jar2' }] }, { 'coordinates': [{ 'artifact_id': 'war', 'packaging': 'war' }], 'pom_content': 'pom_content', 'pom_dir': 'build_dir/war-modules', 'updated': [{ 'artifact_id': 'war', 'packaging': 'war' }] }] == get_multi_modules([(True, { 'artifact_id': 'jar1', 'packaging': 'jar' }), (True, { 'artifact_id': 'jar2' }), (False, { 'artifact_id': 'jar3' }), (True, { 'artifact_id': 'war', 'packaging': 'war' })], 'build_dir')
[((73, 1, 75, 55), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((28, 9, 30, 42), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((30, 64, 31, 48), 'unittest.mock.patch', 'patch', ({(31, 8, 31, 47): '"""schmetterling.build.maven.run_command"""'}, {}), "('schmetterling.build.maven.run_command')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((31, 67, 35, 11), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((63, 49, 70, 67), 'schmetterling.build.maven.create_command', 'create_command', ({(64, 8, 70, 10): "[{'artifact_id': 'app.admin', 'group_id': 'mygroup'}, {'artifact_id':\n 'app.sign', 'group_id': 'mygroup'}]", (70, 12, 70, 21): '"""mvn.log"""', (70, 23, 70, 35): '"""repository"""', (70, 37, 70, 51): '"""settings.xml"""', (70, 53, 70, 66): '"""logback.xml"""'}, {}), "([{'artifact_id': 'app.admin', 'group_id': 'mygroup'}, {\n 'artifact_id': 'app.sign', 'group_id': 'mygroup'}], 'mvn.log',\n 'repository', 'settings.xml', 'logback.xml')", False, 'from schmetterling.build.maven import create_command\n'), ((94, 16, 128, 5), 'schmetterling.build.maven.create_build_result', 'create_build_result', ({(95, 8, 112, 9): "[{'artifact_id': 'app.admin', 'group_id': 'mygroup'}, {'artifact_id':\n 'app.sign', 'group_id': 'mygroup'}, {'artifact_id': 'pipeline.env',\n 'group_id': 'mygroup'}, {'artifact_id': 'xml.ws', 'group_id': 'mygroup'}]", (113, 8, 126, 9): "[{'artifact_id': 'app.admin', 'group_id': 'mygroup'}, {'artifact_id':\n 'app.sign', 'group_id': 'mygroup'}, {'artifact_id': 'pipeline.env',\n 'group_id': 'mygroup'}]", (127, 8, 127, 17): '"""mvn.log"""'}, {}), "([{'artifact_id': 'app.admin', 'group_id': 'mygroup'}, {\n 'artifact_id': 'app.sign', 'group_id': 'mygroup'}, {'artifact_id':\n 'pipeline.env', 'group_id': 'mygroup'}, {'artifact_id': 'xml.ws',\n 'group_id': 'mygroup'}], [{'artifact_id': 'app.admin', 'group_id':\n 'mygroup'}, {'artifact_id': 'app.sign', 'group_id': 'mygroup'}, {\n 'artifact_id': 'pipeline.env', 'group_id': 'mygroup'}], 'mvn.log')", False, 'from schmetterling.build.maven import create_build_result\n'), ((132, 9, 132, 52), 'unittest.mock.patch', 'patch', ({(132, 15, 132, 51): '"""schmetterling.build.maven.makedirs"""'}, {}), "('schmetterling.build.maven.makedirs')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((132, 59, 133, 45), 'unittest.mock.patch', 'patch', ({(133, 12, 133, 44): '"""schmetterling.build.maven.open"""'}, {}), "('schmetterling.build.maven.open')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((134, 12, 134, 23), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import call, MagicMock, patch\n'), ((137, 8, 146, 10), 'schmetterling.build.maven.create_multi_modules', 'create_multi_modules', ({(137, 29, 146, 9): "[{'pom_dir': 'pd1', 'pom_content': 'pc1'}, {'pom_dir': 'pd2', 'pom_content':\n 'pc2'}]"}, {}), "([{'pom_dir': 'pd1', 'pom_content': 'pc1'}, {'pom_dir':\n 'pd2', 'pom_content': 'pc2'}])", False, 'from schmetterling.build.maven import create_multi_modules\n'), ((160, 20, 177, 5), 'schmetterling.build.maven.create_state', 'create_state', ({(161, 8, 161, 10): '[]', (162, 8, 168, 10): "[{'pom_path': 'app.admin/pom.xml', 'artifact_id': 'app.admin', 'group_id':\n 'mygroup', 'version': '0.0.1-SNAPSHOT', 'packaging': 'jar'}]", (169, 8, 175, 10): "[{'pom_path': 'pipeline-apache-proxy/pom.xml', 'artifact_id':\n 'pipeline-apache-proxy', 'group_id': 'mygroup', 'version':\n '1.0.0-SNAPSHOT', 'packaging': 'jar'}]", (176, 8, 176, 9): '(1)'}, {}), "([], [{'pom_path': 'app.admin/pom.xml', 'artifact_id':\n 'app.admin', 'group_id': 'mygroup', 'version': '0.0.1-SNAPSHOT',\n 'packaging': 'jar'}], [{'pom_path': 'pipeline-apache-proxy/pom.xml',\n 'artifact_id': 'pipeline-apache-proxy', 'group_id': 'mygroup',\n 'version': '1.0.0-SNAPSHOT', 'packaging': 'jar'}], 1)", False, 'from schmetterling.build.maven import create_state\n'), ((181, 9, 181, 81), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((191, 9, 191, 73), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((202, 9, 202, 79), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((39, 20, 39, 94), 'schmetterling.build.maven.build_multi_modules', 'build_multi_modules', ({(39, 40, 39, 42): 'mm', (39, 44, 39, 60): '"""repository_dir"""', (39, 62, 39, 77): '"""settings_file"""', (39, 79, 39, 93): '"""logback_file"""'}, {}), "(mm, 'repository_dir', 'settings_file', 'logback_file')", False, 'from schmetterling.build.maven import build_multi_modules\n'), ((155, 27, 156, 50), 'schmetterling.build.state.Build', 'Build', ({(155, 33, 155, 42): '"""mygroup"""', (155, 44, 155, 55): '"""app.admin"""', (155, 57, 155, 73): '"""0.0.1-SNAPSHOT"""', (155, 75, 155, 86): '"""app.admin"""', (156, 33, 156, 46): 'Build.SUCCESS', (156, 48, 156, 49): '1'}, {}), "('mygroup', 'app.admin', '0.0.1-SNAPSHOT', 'app.admin', Build.SUCCESS, 1)", False, 'from schmetterling.build.state import BuildState, Build\n'), ((157, 27, 158, 75), 'schmetterling.build.state.Build', 'Build', ({(157, 33, 157, 42): '"""mygroup"""', (157, 44, 157, 67): '"""pipeline-apache-proxy"""', (157, 69, 157, 85): '"""1.0.0-SNAPSHOT"""', (158, 33, 158, 56): '"""pipeline-apache-proxy"""', (158, 58, 158, 71): 'Build.FAILURE', (158, 73, 158, 74): '1'}, {}), "('mygroup', 'pipeline-apache-proxy', '1.0.0-SNAPSHOT',\n 'pipeline-apache-proxy', Build.FAILURE, 1)", False, 'from schmetterling.build.state import BuildState, Build\n'), ((183, 12, 183, 63), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((184, 12, 184, 65), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((187, 45, 187, 67), 'schmetterling.build.maven.get_maven_infos', 'get_maven_infos', ({(187, 61, 187, 66): 'repos'}, {}), '(repos)', False, 'from schmetterling.build.maven import get_maven_infos\n'), ((192, 13, 192, 81), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((193, 16, 193, 67), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((203, 21, 203, 66), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(203, 39, 203, 52): '[(False, {})]', (203, 54, 203, 65): '"""build_dir"""'}, {}), "([(False, {})], 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((209, 14, 209, 58), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(209, 32, 209, 44): '[(True, {})]', (209, 46, 209, 57): '"""build_dir"""'}, {}), "([(True, {})], 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((219, 14, 221, 25), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(219, 32, 221, 11): "[(True, {'packaging': 'jar'})]", (221, 13, 221, 24): '"""build_dir"""'}, {}), "([(True, {'packaging': 'jar'})], 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((235, 14, 238, 25), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(235, 32, 238, 11): "[(True, {'artifact_id': 'super-pom', 'packaging': 'pom'})]", (238, 13, 238, 24): '"""build_dir"""'}, {}), "([(True, {'artifact_id': 'super-pom', 'packaging': 'pom'})\n ], 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((250, 14, 253, 25), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(250, 32, 253, 11): "[(True, {'artifact_id': 'pom', 'packaging': 'pom'})]", (253, 13, 253, 24): '"""build_dir"""'}, {}), "([(True, {'artifact_id': 'pom', 'packaging': 'pom'})],\n 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((265, 14, 268, 25), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(265, 32, 268, 11): "[(True, {'artifact_id': 'x', 'packaging': 'x'})]", (268, 13, 268, 24): '"""build_dir"""'}, {}), "([(True, {'artifact_id': 'x', 'packaging': 'x'})], 'build_dir'\n )", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((280, 14, 283, 25), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(280, 32, 283, 11): "[(True, {'artifact_id': 'war', 'packaging': 'war'})]", (283, 13, 283, 24): '"""build_dir"""'}, {}), "([(True, {'artifact_id': 'war', 'packaging': 'war'})],\n 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((314, 14, 324, 25), 'schmetterling.build.maven.get_multi_modules', 'get_multi_modules', ({(314, 32, 324, 11): "[(True, {'artifact_id': 'jar1', 'packaging': 'jar'}), (True, {'artifact_id':\n 'jar2'}), (False, {'artifact_id': 'jar3'}), (True, {'artifact_id':\n 'war', 'packaging': 'war'})]", (324, 13, 324, 24): '"""build_dir"""'}, {}), "([(True, {'artifact_id': 'jar1', 'packaging': 'jar'}), (\n True, {'artifact_id': 'jar2'}), (False, {'artifact_id': 'jar3'}), (True,\n {'artifact_id': 'war', 'packaging': 'war'})], 'build_dir')", False, 'from schmetterling.build.maven import get_multi_modules\n'), ((41, 19, 41, 106), 'unittest.mock.call', 'call', ({(41, 24, 41, 34): '"""updated1"""', (41, 36, 41, 54): '"""pom_dir1/mvn.log"""', (41, 56, 41, 72): '"""repository_dir"""', (41, 74, 41, 89): '"""settings_file"""', (41, 91, 41, 105): '"""logback_file"""'}, {}), "('updated1', 'pom_dir1/mvn.log', 'repository_dir', 'settings_file',\n 'logback_file')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((42, 19, 42, 106), 'unittest.mock.call', 'call', ({(42, 24, 42, 34): '"""updated2"""', (42, 36, 42, 54): '"""pom_dir2/mvn.log"""', (42, 56, 42, 72): '"""repository_dir"""', (42, 74, 42, 89): '"""settings_file"""', (42, 91, 42, 105): '"""logback_file"""'}, {}), "('updated2', 'pom_dir2/mvn.log', 'repository_dir', 'settings_file',\n 'logback_file')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((45, 19, 45, 57), 'unittest.mock.call', 'call', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((46, 19, 46, 57), 'unittest.mock.call', 'call', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((49, 19, 49, 71), 'unittest.mock.call', 'call', ({(49, 24, 49, 38): '"""coordinates1"""', (49, 40, 49, 50): '"""updated1"""', (49, 52, 49, 70): '"""pom_dir1/mvn.log"""'}, {}), "('coordinates1', 'updated1', 'pom_dir1/mvn.log')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((50, 19, 50, 71), 'unittest.mock.call', 'call', ({(50, 24, 50, 38): '"""coordinates2"""', (50, 40, 50, 50): '"""updated2"""', (50, 52, 50, 70): '"""pom_dir2/mvn.log"""'}, {}), "('coordinates2', 'updated2', 'pom_dir2/mvn.log')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((135, 45, 135, 70), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((147, 16, 147, 42), 'unittest.mock.call', 'call', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((148, 16, 148, 42), 'unittest.mock.call', 'call', (), '', False, 'from unittest.mock import call, MagicMock, patch\n'), ((149, 16, 149, 33), 'unittest.mock.call.write', 'call.write', ({(149, 27, 149, 32): '"""pc1"""'}, {}), "('pc1')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((149, 35, 149, 52), 'unittest.mock.call.write', 'call.write', ({(149, 46, 149, 51): '"""pc2"""'}, {}), "('pc2')", False, 'from unittest.mock import call, MagicMock, patch\n'), ((198, 26, 198, 48), 'schmetterling.build.maven.get_maven_repos', 'get_maven_repos', ({(198, 42, 198, 47): 'state'}, {}), '(state)', False, 'from schmetterling.build.maven import get_maven_repos\n'), ((195, 16, 195, 46), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import call, MagicMock, patch\n')]
FdelMazo/7540rw-Algo1
Copados y Clases/Mastermind_DEBUG.py
8900604873195df9e902ead6bcb67723a8b654c8
#Sacar las lineas con DEBUG para que el juego funcione import random DIGITOS = 4 def mastermind(): """Funcion principal del juego Mastermind""" print("Bienvenido al Mastermind!") print("Instrucciones: Tenes que adivinar un codigo de {} digitos distintos. Tu cantidad de aciertos son los numeros que estan correctamente posicionados, tu cantidad de coincidencias son los numeros bien elegidos pero mal posicionados. Suerte!".format(DIGITOS)) codigo = elegir_codigo() intentos = 1 propuesta = input("Que codigo propones? (o pone 'Me retiro') ") retirarse = "Me retiro" while propuesta != codigo and propuesta != retirarse: intentos+=1 aciertos, coincidencias = analizar_propuesta(propuesta, codigo) print ("Tu propuesta ({}) tiene {} aciertos y {} coincidencias.".format(propuesta,aciertos,coincidencias)) propuesta = input("Propone otro codigo: ") if propuesta == retirarse: print ("El codigo era: {}".format(codigo)) else: print ("Ganaste! Ganaste en {} intentos".format(intentos)) def elegir_codigo(): """Elige un codigo de DIGITOS digitos al azar""" digitos= ("0","1","2","3","4","5","6","7","8","9") codigo = "" for i in range(DIGITOS): candidato = random.choice(digitos) print("[DEBUG] candidato:", candidato) while candidato in codigo: candidato = random.choice(digitos) codigo = codigo + candidato print("[DEBUG] el codigo va siendo", codigo) return codigo def analizar_propuesta(propuesta, codigo): """Determina aciertos y coincidencias""" aciertos = 0 coincidencias = 0 for i in range(DIGITOS): if propuesta[i] == codigo[i]: aciertos += 1 elif propuesta[i] in codigo: coincidencias += 1 return aciertos,coincidencias mastermind()
[((29, 14, 29, 36), 'random.choice', 'random.choice', ({(29, 28, 29, 35): 'digitos'}, {}), '(digitos)', False, 'import random\n'), ((32, 15, 32, 37), 'random.choice', 'random.choice', ({(32, 29, 32, 36): 'digitos'}, {}), '(digitos)', False, 'import random\n')]
ovnicraft/runa
setup.py
4834b7467314c51c3e8e010b47a10bdfae597a5b
#!/usr/bin/env python # -*- coding: utf-8 -*- """The setup script.""" from setuptools import setup, find_packages with open("README.rst") as readme_file: readme = readme_file.read() with open("HISTORY.rst") as history_file: history = history_file.read() requirements = ["Click>=6.0", "suds2==0.7.1"] setup_requirements = [ # TODO(ovnicraft): put setup requirements (distutils extensions, etc.) here ] test_requirements = [ # TODO: put package test requirements here ] setup( name="runa", version="0.2.10", description="Librería para uso de WS del Bus Gubernamental de Ecuador", long_description=readme + "\n\n" + history, author="Cristian Salamea", author_email="[email protected]", url="https://github.com/ovnicraft/runa", packages=find_packages(include=["runa"]), entry_points={"console_scripts": ["runa=runa.cli:main"]}, include_package_data=True, install_requires=requirements, license="MIT license", zip_safe=False, keywords="runa webservices ecuador bgs", classifiers=[ "Development Status :: 3 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ], test_suite="tests", tests_require=test_requirements, setup_requires=setup_requirements, )
[((32, 13, 32, 44), 'setuptools.find_packages', 'find_packages', (), '', False, 'from setuptools import setup, find_packages\n')]
RichardA1/Adafruit_Learning_System_Guides
PyPortal_User_Interface/code.py
7d06d8a126f357a431384c3af73339cb46f44c19
import time import board import displayio import busio from analogio import AnalogIn import neopixel import adafruit_adt7410 from adafruit_bitmap_font import bitmap_font from adafruit_display_text.label import Label from adafruit_button import Button import adafruit_touchscreen from adafruit_pyportal import PyPortal # ------------- Inputs and Outputs Setup ------------- # # init. the temperature sensor i2c_bus = busio.I2C(board.SCL, board.SDA) adt = adafruit_adt7410.ADT7410(i2c_bus, address=0x48) adt.high_resolution = True # init. the light sensor light_sensor = AnalogIn(board.LIGHT) pixel = neopixel.NeoPixel(board.NEOPIXEL, 1, brightness=1) WHITE = 0xffffff RED = 0xff0000 YELLOW = 0xffff00 GREEN = 0x00ff00 BLUE = 0x0000ff PURPLE = 0xff00ff BLACK = 0x000000 # ---------- Sound Effects ------------- # soundDemo = '/sounds/sound.wav' soundBeep = '/sounds/beep.wav' soundTab = '/sounds/tab.wav' # ------------- Other Helper Functions------------- # # Helper for cycling through a number set of 1 to x. def numberUP(num, max_val): num += 1 if num <= max_val: return num else: return 1 # ------------- Screen Setup ------------- # pyportal = PyPortal() display = board.DISPLAY display.rotation = 270 # Backlight function # Value between 0 and 1 where 0 is OFF, 0.5 is 50% and 1 is 100% brightness. def set_backlight(val): val = max(0, min(1.0, val)) board.DISPLAY.auto_brightness = False board.DISPLAY.brightness = val # Set the Backlight set_backlight(0.3) # Touchscreen setup # ------Rotate 270: screen_width = 240 screen_height = 320 ts = adafruit_touchscreen.Touchscreen(board.TOUCH_YD, board.TOUCH_YU, board.TOUCH_XR, board.TOUCH_XL, calibration=((5200, 59000), (5800, 57000)), size=(screen_width, screen_height)) # ------------- Display Groups ------------- # splash = displayio.Group(max_size=15) # The Main Display Group view1 = displayio.Group(max_size=15) # Group for View 1 objects view2 = displayio.Group(max_size=15) # Group for View 2 objects view3 = displayio.Group(max_size=15) # Group for View 3 objects def hideLayer(hide_target): try: splash.remove(hide_target) except ValueError: pass def showLayer(show_target): try: time.sleep(0.1) splash.append(show_target) except ValueError: pass # ------------- Setup for Images ------------- # # Display an image until the loop starts pyportal.set_background('/images/loading.bmp') bg_group = displayio.Group(max_size=1) splash.append(bg_group) icon_group = displayio.Group(max_size=1) icon_group.x = 180 icon_group.y = 120 icon_group.scale = 1 view2.append(icon_group) # This will handel switching Images and Icons def set_image(group, filename): """Set the image file for a given goup for display. This is most useful for Icons or image slideshows. :param group: The chosen group :param filename: The filename of the chosen image """ print("Set image to ", filename) if group: group.pop() if not filename: return # we're done, no icon desired image_file = open(filename, "rb") image = displayio.OnDiskBitmap(image_file) try: image_sprite = displayio.TileGrid(image, pixel_shader=displayio.ColorConverter()) except TypeError: image_sprite = displayio.TileGrid(image, pixel_shader=displayio.ColorConverter(), position=(0, 0)) group.append(image_sprite) set_image(bg_group, "/images/BGimage.bmp") # ---------- Text Boxes ------------- # # Set the font and preload letters font = bitmap_font.load_font("/fonts/Helvetica-Bold-16.bdf") font.load_glyphs(b'abcdefghjiklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890- ()') # Default Label styling: TABS_X = 5 TABS_Y = 50 # Text Label Objects feed1_label = Label(font, text="Text Wondow 1", color=0xE39300, max_glyphs=200) feed1_label.x = TABS_X feed1_label.y = TABS_Y view1.append(feed1_label) feed2_label = Label(font, text="Text Wondow 2", color=0xFFFFFF, max_glyphs=200) feed2_label.x = TABS_X feed2_label.y = TABS_Y view2.append(feed2_label) sensors_label = Label(font, text="Data View", color=0x03AD31, max_glyphs=200) sensors_label.x = TABS_X sensors_label.y = TABS_Y view3.append(sensors_label) sensor_data = Label(font, text="Data View", color=0x03AD31, max_glyphs=100) sensor_data.x = TABS_X+15 sensor_data.y = 170 view3.append(sensor_data) text_hight = Label(font, text="M", color=0x03AD31, max_glyphs=10) # return a reformatted string with word wrapping using PyPortal.wrap_nicely def text_box(target, top, string, max_chars): text = pyportal.wrap_nicely(string, max_chars) new_text = "" test = "" for w in text: new_text += '\n'+w test += 'M\n' text_hight.text = test # Odd things happen without this glyph_box = text_hight.bounding_box target.text = "" # Odd things happen without this target.y = int(glyph_box[3]/2)+top target.text = new_text # ---------- Display Buttons ------------- # # Default button styling: BUTTON_HEIGHT = 40 BUTTON_WIDTH = 80 # We want three buttons across the top of the screen TAPS_HEIGHT = 40 TAPS_WIDTH = int(screen_width/3) TAPS_Y = 0 # We want two big buttons at the bottom of the screen BIG_BUTTON_HEIGHT = int(screen_height/3.2) BIG_BUTTON_WIDTH = int(screen_width/2) BIG_BUTTON_Y = int(screen_height-BIG_BUTTON_HEIGHT) # This group will make it easy for us to read a button press later. buttons = [] # Main User Interface Buttons button_view1 = Button(x=0, y=0, width=TAPS_WIDTH, height=TAPS_HEIGHT, label="View1", label_font=font, label_color=0xff7e00, fill_color=0x5c5b5c, outline_color=0x767676, selected_fill=0x1a1a1a, selected_outline=0x2e2e2e, selected_label=0x525252) buttons.append(button_view1) # adding this button to the buttons group button_view2 = Button(x=TAPS_WIDTH, y=0, width=TAPS_WIDTH, height=TAPS_HEIGHT, label="View2", label_font=font, label_color=0xff7e00, fill_color=0x5c5b5c, outline_color=0x767676, selected_fill=0x1a1a1a, selected_outline=0x2e2e2e, selected_label=0x525252) buttons.append(button_view2) # adding this button to the buttons group button_view3 = Button(x=TAPS_WIDTH*2, y=0, width=TAPS_WIDTH, height=TAPS_HEIGHT, label="View3", label_font=font, label_color=0xff7e00, fill_color=0x5c5b5c, outline_color=0x767676, selected_fill=0x1a1a1a, selected_outline=0x2e2e2e, selected_label=0x525252) buttons.append(button_view3) # adding this button to the buttons group button_switch = Button(x=0, y=BIG_BUTTON_Y, width=BIG_BUTTON_WIDTH, height=BIG_BUTTON_HEIGHT, label="Switch", label_font=font, label_color=0xff7e00, fill_color=0x5c5b5c, outline_color=0x767676, selected_fill=0x1a1a1a, selected_outline=0x2e2e2e, selected_label=0x525252) buttons.append(button_switch) # adding this button to the buttons group button_2 = Button(x=BIG_BUTTON_WIDTH, y=BIG_BUTTON_Y, width=BIG_BUTTON_WIDTH, height=BIG_BUTTON_HEIGHT, label="Button", label_font=font, label_color=0xff7e00, fill_color=0x5c5b5c, outline_color=0x767676, selected_fill=0x1a1a1a, selected_outline=0x2e2e2e, selected_label=0x525252) buttons.append(button_2) # adding this button to the buttons group # Add all of the main buttons to the spalsh Group for b in buttons: splash.append(b.group) # Make a button to change the icon image on view2 button_icon = Button(x=150, y=60, width=BUTTON_WIDTH, height=BUTTON_HEIGHT, label="Icon", label_font=font, label_color=0xffffff, fill_color=0x8900ff, outline_color=0xbc55fd, selected_fill=0x5a5a5a, selected_outline=0xff6600, selected_label=0x525252, style=Button.ROUNDRECT) buttons.append(button_icon) # adding this button to the buttons group # Add this button to view2 Group view2.append(button_icon.group) # Make a button to play a sound on view2 button_sound = Button(x=150, y=170, width=BUTTON_WIDTH, height=BUTTON_HEIGHT, label="Sound", label_font=font, label_color=0xffffff, fill_color=0x8900ff, outline_color=0xbc55fd, selected_fill=0x5a5a5a, selected_outline=0xff6600, selected_label=0x525252, style=Button.ROUNDRECT) buttons.append(button_sound) # adding this button to the buttons group # Add this button to view2 Group view3.append(button_sound.group) #pylint: disable=global-statement def switch_view(what_view): global view_live if what_view == 1: hideLayer(view2) hideLayer(view3) button_view1.selected = False button_view2.selected = True button_view3.selected = True showLayer(view1) view_live = 1 print("View1 On") elif what_view == 2: # global icon hideLayer(view1) hideLayer(view3) button_view1.selected = True button_view2.selected = False button_view3.selected = True showLayer(view2) view_live = 2 print("View2 On") else: hideLayer(view1) hideLayer(view2) button_view1.selected = True button_view2.selected = True button_view3.selected = False showLayer(view3) view_live = 3 print("View3 On") #pylint: enable=global-statement # Set veriables and startup states button_view1.selected = False button_view2.selected = True button_view3.selected = True showLayer(view1) hideLayer(view2) hideLayer(view3) view_live = 1 icon = 1 icon_name = "Ruby" button_mode = 1 switch_state = 0 button_switch.label = "OFF" button_switch.selected = True # Update out Labels with display text. text_box(feed1_label, TABS_Y, "The text on this screen is wrapped so that all of it fits nicely into a \ text box that is ### x ###.", 30) text_box(feed1_label, TABS_Y, 'The text on this screen is wrapped so that all of it fits nicely into a \ text box that is {} x {}.' .format(feed1_label.bounding_box[2], feed1_label.bounding_box[3]*2), 30) text_box(feed2_label, TABS_Y, 'Tap on the Icon button to meet a new friend.', 18) text_box(sensors_label, TABS_Y, "This screen can display sensor readings and tap Sound to play a WAV file.", 28) board.DISPLAY.show(splash) # ------------- Code Loop ------------- # while True: touch = ts.touch_point light = light_sensor.value tempC = round(adt.temperature) tempF = tempC * 1.8 + 32 sensor_data.text = 'Touch: {}\nLight: {}\n Temp: {}°F'.format(touch, light, tempF) # ------------- Handle Button Press Detection ------------- # if touch: # Only do this if the screen is touched # loop with buttons using enumerate() to number each button group as i for i, b in enumerate(buttons): if b.contains(touch): # Test each button to see if it was pressed print('button%d pressed' % i) if i == 0 and view_live != 1: # only if view1 is visable pyportal.play_file(soundTab) switch_view(1) while ts.touch_point: pass if i == 1 and view_live != 2: # only if view2 is visable pyportal.play_file(soundTab) switch_view(2) while ts.touch_point: pass if i == 2 and view_live != 3: # only if view3 is visable pyportal.play_file(soundTab) switch_view(3) while ts.touch_point: pass if i == 3: pyportal.play_file(soundBeep) # Toggle switch button type if switch_state == 0: switch_state = 1 b.label = "ON" b.selected = False pixel.fill(WHITE) print("Swich ON") else: switch_state = 0 b.label = "OFF" b.selected = True pixel.fill(BLACK) print("Swich OFF") # for debounce while ts.touch_point: pass print("Swich Pressed") if i == 4: pyportal.play_file(soundBeep) # Momentary button type b.selected = True print('Button Pressed') button_mode = numberUP(button_mode, 5) if button_mode == 1: pixel.fill(RED) elif button_mode == 2: pixel.fill(YELLOW) elif button_mode == 3: pixel.fill(GREEN) elif button_mode == 4: pixel.fill(BLUE) elif button_mode == 5: pixel.fill(PURPLE) switch_state = 1 button_switch.label = "ON" button_switch.selected = False # for debounce while ts.touch_point: pass print("Button released") b.selected = False if i == 5 and view_live == 2: # only if view2 is visable pyportal.play_file(soundBeep) b.selected = True while ts.touch_point: pass print("Icon Button Pressed") icon = numberUP(icon, 3) if icon == 1: icon_name = "Ruby" elif icon == 2: icon_name = "Gus" elif icon == 3: icon_name = "Billie" b.selected = False text_box(feed2_label, TABS_Y, "Every time you tap the Icon button the icon image will \ change. Say hi to {}!".format(icon_name), 18) set_image(icon_group, "/images/"+icon_name+".bmp") if i == 6 and view_live == 3: # only if view3 is visable b.selected = True while ts.touch_point: pass print("Sound Button Pressed") pyportal.play_file(soundDemo) b.selected = False
[((16, 10, 16, 41), 'busio.I2C', 'busio.I2C', ({(16, 20, 16, 29): 'board.SCL', (16, 31, 16, 40): 'board.SDA'}, {}), '(board.SCL, board.SDA)', False, 'import busio\n'), ((17, 6, 17, 53), 'adafruit_adt7410.ADT7410', 'adafruit_adt7410.ADT7410', (), '', False, 'import adafruit_adt7410\n'), ((21, 15, 21, 36), 'analogio.AnalogIn', 'AnalogIn', ({(21, 24, 21, 35): 'board.LIGHT'}, {}), '(board.LIGHT)', False, 'from analogio import AnalogIn\n'), ((23, 8, 23, 58), 'neopixel.NeoPixel', 'neopixel.NeoPixel', (), '', False, 'import neopixel\n'), ((47, 11, 47, 21), 'adafruit_pyportal.PyPortal', 'PyPortal', ({}, {}), '()', False, 'from adafruit_pyportal import PyPortal\n'), ((65, 5, 69, 73), 'adafruit_touchscreen.Touchscreen', 'adafruit_touchscreen.Touchscreen', (), '', False, 'import adafruit_touchscreen\n'), ((73, 9, 73, 37), 'displayio.Group', 'displayio.Group', (), '', False, 'import displayio\n'), ((74, 8, 74, 36), 'displayio.Group', 'displayio.Group', (), '', False, 'import displayio\n'), ((75, 8, 75, 36), 'displayio.Group', 'displayio.Group', (), '', False, 'import displayio\n'), ((76, 8, 76, 36), 'displayio.Group', 'displayio.Group', (), '', False, 'import displayio\n'), ((97, 11, 97, 38), 'displayio.Group', 'displayio.Group', (), '', False, 'import displayio\n'), ((101, 13, 101, 40), 'displayio.Group', 'displayio.Group', (), '', False, 'import displayio\n'), ((134, 7, 134, 60), 'adafruit_bitmap_font.bitmap_font.load_font', 'bitmap_font.load_font', ({(134, 29, 134, 59): '"""/fonts/Helvetica-Bold-16.bdf"""'}, {}), "('/fonts/Helvetica-Bold-16.bdf')", False, 'from adafruit_bitmap_font import bitmap_font\n'), ((142, 14, 142, 79), 'adafruit_display_text.label.Label', 'Label', (), '', False, 'from adafruit_display_text.label import Label\n'), ((147, 14, 147, 79), 'adafruit_display_text.label.Label', 'Label', (), '', False, 'from adafruit_display_text.label import Label\n'), ((152, 16, 152, 77), 'adafruit_display_text.label.Label', 'Label', (), '', False, 'from adafruit_display_text.label import Label\n'), ((157, 14, 157, 75), 'adafruit_display_text.label.Label', 'Label', (), '', False, 'from adafruit_display_text.label import Label\n'), ((163, 13, 163, 65), 'adafruit_display_text.label.Label', 'Label', (), '', False, 'from adafruit_display_text.label import Label\n'), ((197, 15, 202, 46), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((205, 15, 210, 46), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((213, 15, 218, 46), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((221, 16, 226, 47), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((229, 11, 234, 42), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((243, 14, 248, 69), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((255, 15, 260, 70), 'adafruit_button.Button', 'Button', (), '', False, 'from adafruit_button import Button\n'), ((329, 0, 329, 26), 'board.DISPLAY.show', 'board.DISPLAY.show', ({(329, 19, 329, 25): 'splash'}, {}), '(splash)', False, 'import board\n'), ((122, 12, 122, 46), 'displayio.OnDiskBitmap', 'displayio.OnDiskBitmap', ({(122, 35, 122, 45): 'image_file'}, {}), '(image_file)', False, 'import displayio\n'), ((86, 8, 86, 23), 'time.sleep', 'time.sleep', ({(86, 19, 86, 22): '(0.1)'}, {}), '(0.1)', False, 'import time\n'), ((124, 62, 124, 88), 'displayio.ColorConverter', 'displayio.ColorConverter', ({}, {}), '()', False, 'import displayio\n'), ((126, 62, 126, 88), 'displayio.ColorConverter', 'displayio.ColorConverter', ({}, {}), '()', False, 'import displayio\n')]
yottatix/btse-python
btse_futures/order.py
1c5019d0a68dff797afc70c4cc32c1950c28af4e
import json from btse_futures.constants import OrderType, Side, TimeInForce class Order: """ Class to represent a BTSE Order ... Attributes ---------- size : int order quantity or size. e.g. 1 price : float price. e.g. 7000.0 side: str order side. BUY or SELL time_in_force: str time the order is in force. Possible options defined in TimeInForce. e.g. GTC symbol: str instrument symbol. e.g. BTCPFC type: str order type. "LIMIT", "MARKET", or "OCO" txType: str transaction type postOnly: bool Is order post only? reduceOnly: bool Is order reduce only? triggerPrice: float Trigger price. Relevant only for LIMIT and OCO order types stopPrice: float Stop price. trailValue: float Trail value. clOrderId: str User defined order id trigger: str If an order is a stop loss or take profit order, then this parameter determines the trigger price. Available values are: 1. markPrice = Mark Price (Default) and 2. lastPrice = Last transacted Price Documentation: https://www.btse.com/apiexplorer/futures/?shell#tocs_orderformv2 """ def __init__(self, size: int, price: float, side: str, time_in_force: str, symbol: str, type: str, txType: str, postOnly: bool, reduceOnly: bool, triggerPrice: float, stopPrice: float = None, trailValue: float = None, clOrderId: str = None, trigger: str = None) -> None: assert(isinstance(size, int)) assert(isinstance(price, float)) assert(isinstance(side, str)) assert(isinstance(time_in_force, str)) assert(isinstance(symbol, str)) assert(isinstance(type, str)) assert(isinstance(postOnly, bool)) assert(isinstance(reduceOnly, bool)) assert(isinstance(triggerPrice, float)) self.size = size self.price = price self.side = side self.time_in_force = time_in_force self.symbol = symbol self.type = type self.txType = txType self.postOnly = postOnly self.reduceOnly = reduceOnly self.triggerPrice = triggerPrice self.stopPrice = stopPrice self.trailValue = trailValue self.clOrderId = clOrderId self.trigger = trigger @property def quantity(self): return self.size def to_json(self): json_string = json.dumps(self.order_without_none_values()) print(f'json string: {json_string}') return json_string def order_without_none_values(self): order_dict = self.__dict__ for key, value in list(order_dict.items()): if value is None: del order_dict[key] return order_dict class OpenOrder: """ open order endpoint response format https://www.btse.com/apiexplorer/futures/#tocs_positionrespv2_1 Example: -------- `{ "orderType": 0, "price": 6875, "size": 4, "side": "BUY", "filledSize": 3, "orderValue": 20.625, "pegPriceMin": 0, "pegPriceMax": 0, "pegPriceDeviation": 0, "cancelDuration": 0, "timestamp": 1576661434072, "orderID": "string", "stealth": 0.2, "triggerOrder": true, "triggered": true, "triggerPrice": 0, "triggerOriginalPrice": 0, "triggerOrderType": 1001, "triggerTrailingStopDeviation": 0, "triggerStopPrice": 0, "symbol": "string", "trailValue": 0, "clOrderID": "market001", "reduceOnly": true, "orderState": "string" }` """ def __init__(self) -> None: self.orderType = 0 self.price = 0 self.size = 0 self.side = '' self.filledSize = 0 self.orderValue = 0.0 self.pegPriceMin = 0 self.pegPriceMax = 0 self.pegPriceDeviation = 0 self.cancelDuration = 0 self.timestamp = 0 self.orderID = '' self.stealth = 0.0 self.triggerOrder = '' self.triggered = '' self.triggerPrice = 0 self.triggerOriginalPrice = 0 self.triggerOrderType = 0 self.triggerTrailingStopDeviation = 0 self.triggerStopPrice = 0 self.symbol = '' self.trailValue = 0 self.clOrderID = '' self.reduceOnly = '' self.orderState = '' @staticmethod def from_dict(data): open_order = OpenOrder() open_order.orderType = data.get('orderType') open_order.price = data.get('price') open_order.size = data.get('size') open_order.side = data.get('side') open_order.filledSize = data.get('filledSize') open_order.orderValue = data.get('orderValue') open_order.pegPriceMin = data.get('pegPriceMin') open_order.pegPriceMax = data.get('pegPriceMax') open_order.pegPriceDeviation = data.get('pegPriceDeviation') open_order.cancelDuration = data.get('cancelDuration') open_order.timestamp = data.get('timestamp') open_order.orderID = data.get('orderID') open_order.stealth = data.get('stealth') open_order.triggerOrder = data.get('triggerOrder') open_order.triggered = data.get('triggered') open_order.triggerPrice = data.get('triggerPrice') open_order.triggerOriginalPrice = data.get('triggerOriginalPrice') open_order.triggerOrderType = data.get('triggerOrderType') open_order.triggerTrailingStopDeviation = data.get( 'triggerTrailingStopDeviation') open_order.triggerStopPrice = data.get('triggerStopPrice') open_order.symbol = data.get('symbol') open_order.trailValue = data.get('trailValue') open_order.clOrderID = data.get('clOrderID') open_order.reduceOnly = data.get('reduceOnly') open_order.orderState = data.get('orderState') return open_order class OrderResponseV21: """ Order Response V2.1 Documentation -- https://www.btse.com/apiexplorer/futures/?shell#tocs_orderrespv2_1 """ def __init__(self) -> None: self.status = 0 self.symbol = '' self.orderType = 0 self.price = 0.0 self.side = '' self.size = 0 self.orderID = '' self.timestamp = 0 self.triggerPrice = 0.0 self.trigger = '' self.deviation = 0.0 self.stealth = 0.0 self.message = '' self.avgFillPrice = 0.0 self.fillSize = 0.0 self.clOrderID = '' @staticmethod def from_dict(data): order_response_v21 = OrderResponseV21() order_response_v21.status = data.get('status') order_response_v21.symbol = data.get('symbol') order_response_v21.orderType = data.get('orderType') order_response_v21.price = data.get('price') order_response_v21.side = data.get('side') order_response_v21.size = data.get('size') order_response_v21.orderID = data.get('orderID') order_response_v21.timestamp = data.get('timestamp') order_response_v21.triggerPrice = data.get('triggerPrice') order_response_v21.trigger = data.get('trigger') order_response_v21.deviation = data.get('deviation') order_response_v21.stealth = data.get('stealth') order_response_v21.message = data.get('message') order_response_v21.avgFillPrice = data.get('avgFillPrice') order_response_v21.fillSize = data.get('fillSize') order_response_v21.clOrderID = data.get('clOrderID') return order_response_v21
[]
md-reddevil/blinkpy
tests/mock_responses.py
3c7892385352079227c6251eb88257870bea0bb3
"""Simple mock responses definitions.""" from blinkpy.helpers.util import BlinkURLHandler import blinkpy.helpers.constants as const LOGIN_RESPONSE = { 'region': {'mock': 'Test'}, 'networks': { '1234': {'name': 'test', 'onboarded': True} }, 'authtoken': {'authtoken': 'foobar123', 'message': 'auth'} } class MockResponse: """Class for mock request response.""" def __init__(self, json_data, status_code, raw_data=None): """Initialize mock get response.""" self.json_data = json_data self.status_code = status_code self.raw_data = raw_data def json(self): """Return json data from get_request.""" return self.json_data @property def raw(self): """Return raw data from get request.""" return self.raw_data def mocked_session_send(*args, **kwargs): """Mock session.""" prepped = args[0] url = prepped.url header = prepped.headers method = prepped.method if method == 'GET': expected_token = LOGIN_RESPONSE['authtoken']['authtoken'] if header['TOKEN_AUTH'] != expected_token: response = {'message': 'Not Authorized', 'code': 400} status = 400 elif url == 'use_bad_response': response = {'foo': 'bar'} status = 200 elif url == 'reauth': response = {'message': 'REAUTH', 'code': 777} status = 777 else: response = {'test': 'foo'} status = 200 elif method == 'POST': if url in (const.LOGIN_URL, const.LOGIN_BACKUP_URL): response = LOGIN_RESPONSE status = 200 elif url == 'http://wrong.url/' or url is None: response = {'message': 'Error', 'code': 404} status = 404 else: response = {'message': 'foo', 'code': 200} status = 200 return MockResponse(response, status) class MockURLHandler(BlinkURLHandler): """Mocks URL Handler in blinkpy module.""" pass
[]
steveschulze/Photometry
fits_tools.py
3bc4ce457a270962321176d0e3e288b5a96cd34b
from astropy import coordinates as coord from astropy import wcs from astropy.io import fits from astropy import units as u from misc import bcolors import numpy as np import os def convert_hms_dd(RA, DEC): ''' Convert HMS to DD system ''' if (':' in RA) and (':' in DEC): Coord_dd = coord.SkyCoord(RA, DEC, unit=(u.hour,u.degree), frame='icrs') RA_dd = Coord_dd.ra.deg Dec_dd = Coord_dd.dec.deg elif (not (':' in RA) and not (':' in DEC)) and (('.' in RA) and ('.' in DEC)): RA_dd, Dec_dd = float(RA), float(DEC) else: print(bcolors.FAIL + 'Coordinates have wrong format.' + bcolors.ENDC) sys.exit() return RA_dd, Dec_dd def get_header(FILE, KEYWORD): ''' Get keyword from fits file ''' header = fits.getheader(FILE) return header[KEYWORD] def pix2arcsec(FITS): ''' Get pixel scale ''' hdu = fits.open(FITS) if len(hdu) > 1: header = fits.getheader(FITS, 0) header += fits.getheader(FITS, 1) else: header = fits.getheader(FITS) hdu_wcs = wcs.WCS(header) return np.median(wcs.utils.proj_plane_pixel_scales(hdu_wcs)) * 3600 def sky2xy (FITS, RA=False, DEC=False, CAT=None): ''' Coordinate transformation: sky -> xy ''' if CAT == None: if RA != False and DEC != False: cmd=('sky2xy %s %s %s | grep -v off' %(FITS, RA, DEC)) program_call = os.popen(cmd) xy = [] for line in program_call: xy=np.array(line.strip().split()[-2:]).astype(float) if len(xy) > 0: return xy else: cmd =("more %s | awk '{print $1,$2}' > %s" %(CAT, CAT.replace(CAT.split('.')[-1], 'reg'))) os.system(cmd) cmd = ("sky2xy %s @%s | grep -v off | awk '{print $5, $6}'" %(FITS, CAT.replace(CAT.split('.')[-1], 'reg'))) cat = os.popen(cmd) xy = [] for line in cat: xy.append(list(map(float, line.replace('\n', '').split()))) return np.array(xy) def xy2sky (FITSFILE,X,Y): ''' Coordinate transformation: xy -> sky ''' program_call = os.popen('xy2sky %s %s %s' %(FITSFILE, X, Y)) sky = [] for line in program_call: sky.append(line.strip().split()[:2]) return sky
[((35, 10, 35, 30), 'astropy.io.fits.getheader', 'fits.getheader', ({(35, 25, 35, 29): 'FILE'}, {}), '(FILE)', False, 'from astropy.io import fits\n'), ((44, 8, 44, 23), 'astropy.io.fits.open', 'fits.open', ({(44, 18, 44, 22): 'FITS'}, {}), '(FITS)', False, 'from astropy.io import fits\n'), ((51, 13, 51, 28), 'astropy.wcs.WCS', 'wcs.WCS', ({(51, 21, 51, 27): 'header'}, {}), '(header)', False, 'from astropy import wcs\n'), ((90, 16, 90, 61), 'os.popen', 'os.popen', ({(90, 25, 90, 60): "'xy2sky %s %s %s' % (FITSFILE, X, Y)"}, {}), "('xy2sky %s %s %s' % (FITSFILE, X, Y))", False, 'import os\n'), ((16, 13, 16, 74), 'astropy.coordinates.SkyCoord', 'coord.SkyCoord', (), '', True, 'from astropy import coordinates as coord\n'), ((46, 11, 46, 34), 'astropy.io.fits.getheader', 'fits.getheader', ({(46, 26, 46, 30): 'FITS', (46, 32, 46, 33): '0'}, {}), '(FITS, 0)', False, 'from astropy.io import fits\n'), ((47, 12, 47, 35), 'astropy.io.fits.getheader', 'fits.getheader', ({(47, 27, 47, 31): 'FITS', (47, 33, 47, 34): '(1)'}, {}), '(FITS, 1)', False, 'from astropy.io import fits\n'), ((49, 11, 49, 31), 'astropy.io.fits.getheader', 'fits.getheader', ({(49, 26, 49, 30): 'FITS'}, {}), '(FITS)', False, 'from astropy.io import fits\n'), ((73, 2, 73, 16), 'os.system', 'os.system', ({(73, 12, 73, 15): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((75, 9, 75, 22), 'os.popen', 'os.popen', ({(75, 18, 75, 21): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((82, 9, 82, 21), 'numpy.array', 'np.array', ({(82, 18, 82, 20): 'xy'}, {}), '(xy)', True, 'import numpy as np\n'), ((52, 18, 52, 60), 'astropy.wcs.utils.proj_plane_pixel_scales', 'wcs.utils.proj_plane_pixel_scales', ({(52, 52, 52, 59): 'hdu_wcs'}, {}), '(hdu_wcs)', False, 'from astropy import wcs\n'), ((64, 18, 64, 31), 'os.popen', 'os.popen', ({(64, 27, 64, 30): 'cmd'}, {}), '(cmd)', False, 'import os\n')]
neurom-iot/n3ml
test_stbp_snn_eval.py
39c6b50661f293d58b4b37ef613643860724bb24
import argparse import torch import torch.nn as nn import torchvision import torchvision.transforms as transforms from n3ml.model import DynamicModel_STBP_SNN def validate(val_loader, model, encoder, criterion, opt): model.eval() total_images = 0 num_corrects = 0 total_loss = 0 with torch.no_grad(): for step, (images, labels) in enumerate(val_loader): images = images.cuda() labels = labels.cuda() preds = model(encoder, images, opt.num_steps) labels_ = torch.zeros(torch.numel(labels), 10, device=labels.device) labels_ = labels_.scatter_(1, labels.view(-1, 1), 1) loss = criterion(preds, labels_) num_corrects += torch.argmax(preds, dim=1).eq(labels).sum(dim=0) total_loss += loss.cpu().detach().numpy() * images.size(0) total_images += images.size(0) val_acc = num_corrects.float() / total_images val_loss = total_loss / total_images return val_acc, val_loss def app(opt): print(opt) val_loader = torch.utils.data.DataLoader( torchvision.datasets.MNIST( opt.data, train=False, download=True, transform=torchvision.transforms.Compose([transforms.ToTensor()])), batch_size=opt.batch_size) state_dict = torch.load(opt.pretrained) model = DynamicModel_STBP_SNN(batch_size=opt.batch_size) for m in state_dict['arch']: model.add_module(m[0], m[1]) if torch.cuda.is_available(): model.cuda() encoder = lambda x: (x > torch.rand(x.size(), device=x.device)).float() criterion = nn.MSELoss() acc, loss = validate(val_loader, model, encoder, criterion, opt) print("In test, loss: {} - acc: {}".format(loss, acc)) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--data', default='data') parser.add_argument('--batch_size', default=100, type=int) parser.add_argument('--num_steps', default=15, type=int) parser.add_argument('--pretrained', default='pretrained/stbp_dynamic_acc_9897.pt') app(parser.parse_args())
[((50, 17, 50, 43), 'torch.load', 'torch.load', ({(50, 28, 50, 42): 'opt.pretrained'}, {}), '(opt.pretrained)', False, 'import torch\n'), ((52, 12, 52, 60), 'n3ml.model.DynamicModel_STBP_SNN', 'DynamicModel_STBP_SNN', (), '', False, 'from n3ml.model import DynamicModel_STBP_SNN\n'), ((56, 7, 56, 32), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((61, 16, 61, 28), 'torch.nn.MSELoss', 'nn.MSELoss', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((68, 13, 68, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((18, 9, 18, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((24, 34, 24, 53), 'torch.numel', 'torch.numel', ({(24, 46, 24, 52): 'labels'}, {}), '(labels)', False, 'import torch\n'), ((47, 54, 47, 75), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', True, 'import torchvision.transforms as transforms\n'), ((29, 28, 29, 54), 'torch.argmax', 'torch.argmax', (), '', False, 'import torch\n')]
govex/python-lessons
section_07_(files)/read_csv.py
e692f48b6db008a45df0b941dee1e580f5a6c800
# If you're new to file handling, be sure to check out with_open.py first! # You'll also want to check out read_text.py before this example. This one is a bit more advanced. with open('read_csv.csv', 'r') as states_file: # Instead of leaving the file contents as a string, we're splitting the file into a list at every new line, and we save that list into the variable states states = states_file.read().split("\n") # Since this is a spreadsheet in comma separated values (CSV) format, we can think of states as a list of rows. # But we'll need to split the columns into a list as well! for index, state in enumerate(states): states[index] = state.split(",") # Now we have a nested list with all of the information! # Our file looks like this: # State, Population Estimate, Percent of Total population # California, 38332521, 11.91% # Texas, 26448193, 8.04% # ... # Our header row is at state[0], so we can use that to display the information in a prettier way. for state in states[1:]: # We use [1:] so we skip the header row. # state[0] is the first column in the row, which contains the name of the state. print("\n---{0}---".format(state[0])) for index, info in enumerate(state[1:]): # We use [1:] so we don't repeat the state name. print("{0}:\t{1}".format(states[0][index+1], info)) # states is the full list of all of the states. It's a nested list. The outer list contains the rows, each inner list contains the columns in that row. # states[0] refers to the header row of the list # So states[0][0] would refer to "State", states[0][1] would refer to "Population Estimate", and states[0][2] would refer to "Percent of total population" # state is one state within states. state is also a list, containing the name, population, and percentage of that particular state. # So the first time through the loop, state[0] would refer to "California", state[1] would refer to 38332521, and state[2] would refer to 11.91% # Since state is being create by the for loop in line 24, it gets a new value each time through. # We're using enumerate to get the index (slicing number) of the column we're on, along with the information. # That way we can pair the column name with the information, as shown in line 30. # NOTE: Since we're slicing from [1:] in line 29, we need to increase the index by + 1, otherwise our headers will be off by one. # Sample output: # ---"California"--- # "Population Estimate": 38332521 # "Percent of Total population": "11.91%" # ---"Texas"--- # "Population Estimate": 26448193 # "Percent of Total population": "8.04%" # ---"New York"--- # "Population Estimate": 19651127 # "Percent of Total population": "6.19%"
[]
tinve/kaggle_melanoma
kaggle_melanoma/schedulers.py
6d2d16d62a394fd9cc2498bdf1a19ce60fe047eb
import math from torch.optim.lr_scheduler import _LRScheduler from torch.optim.optimizer import Optimizer class PolyLR(_LRScheduler): """ Sets the learning rate of each parameter group according to poly learning rate policy """ def __init__(self, optimizer, max_iter=90000, power=0.9, last_epoch=-1): self.max_iter = max_iter self.power = power super().__init__(optimizer, last_epoch) def get_lr(self): return [base_lr * (1 - float(self.last_epoch) / self.max_iter) ** self.power for base_lr in self.base_lrs] func_zoo = { "cosine_decay": lambda epoch, step, len_epoch, total_epoch: 0.5 * (math.cos(step * math.pi / (total_epoch * len_epoch)) + 1) } class CosineWarmRestart: def __init__( self, optimizer: Optimizer, func: str = "cosine_decay", warmup: bool = True, warmup_epoch: int = 1, period: int = 10, min_lr: float = 1e-5, low_epoch: int = 1, ): # self.base_lrs = list(map(lambda group: group["lr"], optimizer.param_groups))[0] self.base_lrs = [x["lr"] for x in optimizer.param_groups][0] self.optimizer = optimizer self.warmup = warmup self.warmup_epoch = warmup_epoch self.period = period self.cos_period = period - low_epoch self.low_epoch = low_epoch self.lr_func = func_zoo[func] self.min_lr = min_lr def cosine_step(self, current_epoch: int, global_step: int, len_epoch: int) -> float: if self.warmup and current_epoch < self.warmup_epoch: lr = self.base_lrs * float(1 + global_step) / (self.warmup_epoch * len_epoch) else: lr = self.base_lrs * self.lr_func(current_epoch, global_step, len_epoch, self.cos_period) lr = max(self.min_lr, lr) for param_group in self.optimizer.param_groups: param_group["lr"] = lr return lr def step(self, current_epoch: int, global_step: int, len_epoch: int) -> float: current_epoch = current_epoch % self.period if current_epoch >= self.period - self.low_epoch: global_step = len_epoch * self.cos_period else: global_step = global_step % (self.period * len_epoch) return self.cosine_step(current_epoch, global_step, len_epoch)
[((23, 7, 23, 59), 'math.cos', 'math.cos', ({(23, 16, 23, 58): '(step * math.pi / (total_epoch * len_epoch))'}, {}), '(step * math.pi / (total_epoch * len_epoch))', False, 'import math\n')]
PumpkinYing/GAT
data/data/__init__.py
723a20fcd9f915123d46ef4ef03eeadb6910635a
from .dataset import load_data
[]
federicosapienza/InboxNotionTelegramBot
utils.py
031d5e78cd352dfb692b93f3e0b421695f1dc18e
import json import logging logger = logging.getLogger(__name__) with open('configuration.json') as f: config = json.load(f) TELEGRAM_TOKEN = config["telegram-bot-token"] NOTION_TOKEN = config["notion-token"] NOTION_TABLE_URL = config["inbox_table"]["table_url"] def check_allowed_user(user_id): """ check if allowed user :param user_id: telegram user id :return True if user is valid , False otherwise """ valid_user = config["allowed_user_id"] user_id = str(user_id) return user_id == valid_user def restrict_action(handled_action): """ Wrapper for creating a private bot :param handled_action: the action to perform """ def check_private(update, context): if not (check_allowed_user(update.message.from_user.id)): logging.warning("An unauthorized user attempted to use the bot. username: {}, id: {} .".format( update.message.from_user.username, update.message.from_user.id )) return else: return handled_action(update, context) return check_private
[((4, 9, 4, 36), 'logging.getLogger', 'logging.getLogger', ({(4, 27, 4, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((7, 13, 7, 25), 'json.load', 'json.load', ({(7, 23, 7, 24): 'f'}, {}), '(f)', False, 'import json\n')]
timgates42/enaml
enaml/core/byteplay/__init__.py
054efe6a4047d84f2fff718d656a64a2363884dc
#------------------------------------------------------------------------------ # Copyright (c) 2013-2018, Nucleic Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #------------------------------------------------------------------------------ from ...compat import USE_WORDCODE if USE_WORDCODE: from .wbyteplay import * else: from .byteplay3 import *
[]
artemigkh/cassiopeia
cassiopeia/datastores/riotapi/match.py
fa78cb8f86ea21857916a707d04de6a05498033e
from time import time from typing import Type, TypeVar, MutableMapping, Any, Iterable, Generator, Union import arrow import datetime import math from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query from .common import RiotAPIService, APINotFoundError from ...data import Platform, Season, Queue, SEASON_IDS, QUEUE_IDS from ...dto.match import MatchDto, MatchListDto, TimelineDto from ..uniquekeys import convert_region_to_platform T = TypeVar("T") def _get_current_time(query: MutableMapping[str, Any], context: PipelineContext = None) -> int: return int(time()) * 1000 class MatchAPI(RiotAPIService): @DataSource.dispatch def get(self, type: Type[T], query: MutableMapping[str, Any], context: PipelineContext = None) -> T: pass @DataSource.dispatch def get_many(self, type: Type[T], query: MutableMapping[str, Any], context: PipelineContext = None) -> Iterable[T]: pass _validate_get_match_query = Query. \ has("id").as_(int).also. \ has("platform").as_(Platform) @get.register(MatchDto) @validate_query(_validate_get_match_query, convert_region_to_platform) def get_match(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> MatchDto: url = "https://{platform}.api.riotgames.com/lol/match/v4/matches/{id}".format(platform=query["platform"].value.lower(), id=query["id"]) try: app_limiter, method_limiter = self._get_rate_limiter(query["platform"], "matches/id") data = self._get(url, {}, app_limiter=app_limiter, method_limiter=method_limiter) except APINotFoundError as error: raise NotFoundError(str(error)) from error data["gameId"] = query["id"] data["region"] = query["platform"].region.value for p in data["participantIdentities"]: aid = p.get("player", {}).get("currentAccountId", None) if aid == 0: p["player"]["bot"] = True return MatchDto(data) _validate_get_many_match_query = Query. \ has("ids").as_(Iterable).also. \ has("platform").as_(Platform) @get_many.register(MatchDto) @validate_query(_validate_get_many_match_query, convert_region_to_platform) def get_many_match(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> Generator[MatchDto, None, None]: def generator(): for id in query["ids"]: url = "https://{platform}.api.riotgames.com/lol/match/v4/matches/{id}".format(platform=query["platform"].value.lower(), id=id) try: app_limiter, method_limiter = self._get_rate_limiter(query["platform"], "matches/id") data = self._get(url, {}, app_limiter=app_limiter, method_limiter=method_limiter) except APINotFoundError as error: raise NotFoundError(str(error)) from error for participant in data["participants"]: participant.setdefault("runes", []) for p in data["participantIdentities"]: aid = p.get("player", {}).get("currentAccountId", None) if aid == 0: p["player"]["bot"] = True data["gameId"] = id data["region"] = query["platform"].region.value yield MatchDto(data) return generator() _validate_get_match_list_query = Query. \ has("accountId").as_(str).also. \ has("platform").as_(Platform).also. \ has("beginTime").as_(int).also. \ can_have("endTime").as_(int).also. \ has("beginIndex").as_(int).also. \ has("maxNumberOfMatches").as_(float).also. \ can_have("seasons").as_(Iterable).also. \ can_have("champion.ids").as_(Iterable).also. \ can_have("queues").as_(Iterable) @get.register(MatchListDto) @validate_query(_validate_get_match_list_query, convert_region_to_platform) def get_match_list(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> MatchListDto: params = {} riot_index_interval = 100 riot_date_interval = datetime.timedelta(days=7) begin_time = query["beginTime"] # type: arrow.Arrow end_time = query.get("endTime", arrow.now()) # type: arrow.Arrow if isinstance(begin_time, int): begin_time = arrow.get(begin_time / 1000) if isinstance(end_time, int): end_time = arrow.get(end_time / 1000) def determine_calling_method(begin_time, end_time) -> str: """Returns either "by_date" or "by_index".""" matches_per_date_interval = 10 # This is an assumption seconds_per_day = (60 * 60 * 24) riot_date_interval_in_days = riot_date_interval.total_seconds() / seconds_per_day # in units of days npulls_by_date = (end_time - begin_time).total_seconds() / seconds_per_day / riot_date_interval_in_days npulls_by_index = (arrow.now() - begin_time).total_seconds() / seconds_per_day / riot_date_interval_in_days * matches_per_date_interval / riot_index_interval if math.ceil(npulls_by_date) < math.ceil(npulls_by_index): by = "by_date" else: by = "by_index" return by calling_method = determine_calling_method(begin_time, end_time) if calling_method == "by_date": params["beginTime"] = begin_time.timestamp * 1000 if "endTime" in query: params["endTime"] = min((begin_time + riot_date_interval).timestamp * 1000, query["endTime"]) else: params["endTime"] = (begin_time + riot_date_interval).timestamp * 1000 else: params["beginIndex"] = query["beginIndex"] params["endIndex"] = query["beginIndex"] + min(riot_index_interval, query["maxNumberOfMatches"]) params["endIndex"] = int(params["endIndex"]) if "seasons" in query: seasons = {Season(season) for season in query["seasons"]} params["season"] = {SEASON_IDS[season] for season in seasons} else: seasons = set() if "champion.ids" in query: champions = query["champion.ids"] params["champion"] = champions else: champions = set() if "queues" in query: queues = {Queue(queue) for queue in query["queues"]} params["queue"] = {QUEUE_IDS[queue] for queue in queues} else: queues = set() url = "https://{platform}.api.riotgames.com/lol/match/v4/matchlists/by-account/{accountId}".format(platform=query["platform"].value.lower(), accountId=query["accountId"]) try: app_limiter, method_limiter = self._get_rate_limiter(query["platform"], "matchlists/by-account/accountId") data = self._get(url, params, app_limiter=app_limiter, method_limiter=method_limiter) except APINotFoundError: data = {"matches": []} data["accountId"] = query["accountId"] data["region"] = query["platform"].region.value data["season"] = seasons data["champion"] = champions data["queue"] = queues if calling_method == "by_index": data["beginIndex"] = params["beginIndex"] data["endIndex"] = params["endIndex"] data["maxNumberOfMatches"] = query["maxNumberOfMatches"] else: data["beginTime"] = params["beginTime"] data["endTime"] = params["endTime"] for match in data["matches"]: match["accountId"] = query["accountId"] match["region"] = Platform(match["platformId"]).region.value return MatchListDto(data) _validate_get_many_match_list_query = Query. \ has("accountIds").as_(Iterable).also. \ has("platform").as_(Platform).also. \ can_have("beginTime").as_(int).also. \ can_have("endTime").as_(int).also. \ can_have("beginIndex").as_(int).also. \ can_have("endIndex").as_(int).also. \ can_have("seasons").as_(Iterable).also. \ can_have("champion.ids").as_(Iterable).also. \ can_have("queues").as_(Iterable) @get_many.register(MatchListDto) @validate_query(_validate_get_many_match_list_query, convert_region_to_platform) def get_many_match_list(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> Generator[MatchListDto, None, None]: params = {} if "beginIndex" in query: params["beginIndex"] = query["beginIndex"] if "endIndex" in query: params["endIndex"] = query["endIndex"] if "seasons" in query: seasons = {Season(season) for season in query["seasons"]} params["season"] = {SEASON_IDS[season] for season in seasons} else: seasons = set() if "champion.ids" in query: params["champion"] = {query["champion.ids"]} if "queues" in query: queues = {Queue(queue) for queue in query["queues"]} params["queue"] = {QUEUE_IDS[queue] for queue in queues} else: queues = set() def generator(): for id in query["accountIds"]: url = "https://{platform}.api.riotgames.com/lol/match/v4/matchlists/by-account/{accountId}".format(platform=query["platform"].value.lower(), accountId=id) try: app_limiter, method_limiter = self._get_rate_limiter(query["platform"], "matchlists/by-account/accountId") data = self._get(url, params, app_limiter=app_limiter, method_limiter=method_limiter) except APINotFoundError as error: raise NotFoundError(str(error)) from error data["accountId"] = id data["region"] = query["platform"].region.value if "beginIndex" in query: data["beginIndex"] = query["beginIndex"] if "endIndex" in query: data["endIndex"] = query["endIndex"] if "seasons" in query: data["seasons"] = seasons if "champion.ids" in query: data["champion"] = params["champion"] if "queues" in query: params["queue"] = queues yield MatchListDto(data) return generator() _validate_get_timeline_query = Query. \ has("id").as_(int).also. \ has("platform").as_(Platform) @get.register(TimelineDto) @validate_query(_validate_get_timeline_query, convert_region_to_platform) def get_match_timeline(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> TimelineDto: url = "https://{platform}.api.riotgames.com/lol/match/v4/timelines/by-match/{id}".format(platform=query["platform"].value.lower(), id=query["id"]) try: app_limiter, method_limiter = self._get_rate_limiter(query["platform"], "timelines/by-match/id") data = self._get(url, {}, app_limiter=app_limiter, method_limiter=method_limiter) except APINotFoundError as error: raise NotFoundError(str(error)) from error data["matchId"] = query["id"] data["region"] = query["platform"].region.value return TimelineDto(data) _validate_get_many_timeline_query = Query. \ has("ids").as_(Iterable).also. \ has("platform").as_(Platform) @get_many.register(TimelineDto) @validate_query(_validate_get_many_timeline_query, convert_region_to_platform) def get_many_match_timeline(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> Generator[TimelineDto, None, None]: def generator(): for id in query["ids"]: url = "https://{platform}.api.riotgames.com/lol/match/v4/timelines/by-match/{id}".format(platform=query["platform"].value.lower(), id=id) try: app_limiter, method_limiter = self._get_rate_limiter(query["platform"], "timelines/by-match/id") data = self._get(url, {}, app_limiter=app_limiter, method_limiter=method_limiter) except APINotFoundError as error: raise NotFoundError(str(error)) from error data["matchId"] = id data["region"] = query["platform"].region.value yield TimelineDto(data) return generator()
[((14, 4, 14, 16), 'typing.TypeVar', 'TypeVar', ({(14, 12, 14, 15): '"""T"""'}, {}), "('T')", False, 'from typing import Type, TypeVar, MutableMapping, Any, Iterable, Generator, Union\n'), ((35, 5, 35, 74), 'datapipelines.validate_query', 'validate_query', ({(35, 20, 35, 45): '_validate_get_match_query', (35, 47, 35, 73): 'convert_region_to_platform'}, {}), '(_validate_get_match_query, convert_region_to_platform)', False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((57, 5, 57, 79), 'datapipelines.validate_query', 'validate_query', ({(57, 20, 57, 50): '_validate_get_many_match_query', (57, 52, 57, 78): 'convert_region_to_platform'}, {}), '(_validate_get_many_match_query, convert_region_to_platform)', False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((93, 5, 93, 79), 'datapipelines.validate_query', 'validate_query', ({(93, 20, 93, 50): '_validate_get_match_list_query', (93, 52, 93, 78): 'convert_region_to_platform'}, {}), '(_validate_get_match_list_query, convert_region_to_platform)', False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((187, 5, 187, 84), 'datapipelines.validate_query', 'validate_query', ({(187, 20, 187, 55): '_validate_get_many_match_list_query', (187, 57, 187, 83): 'convert_region_to_platform'}, {}), '(_validate_get_many_match_list_query, convert_region_to_platform)', False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((242, 5, 242, 77), 'datapipelines.validate_query', 'validate_query', ({(242, 20, 242, 48): '_validate_get_timeline_query', (242, 50, 242, 76): 'convert_region_to_platform'}, {}), '(_validate_get_timeline_query, convert_region_to_platform)', False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((260, 5, 260, 82), 'datapipelines.validate_query', 'validate_query', ({(260, 20, 260, 53): '_validate_get_many_timeline_query', (260, 55, 260, 81): 'convert_region_to_platform'}, {}), '(_validate_get_many_timeline_query, convert_region_to_platform)', False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((98, 29, 98, 55), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((18, 15, 18, 21), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((101, 40, 101, 51), 'arrow.now', 'arrow.now', ({}, {}), '()', False, 'import arrow\n'), ((103, 25, 103, 53), 'arrow.get', 'arrow.get', ({(103, 35, 103, 52): 'begin_time / 1000'}, {}), '(begin_time / 1000)', False, 'import arrow\n'), ((105, 23, 105, 49), 'arrow.get', 'arrow.get', ({(105, 33, 105, 48): 'end_time / 1000'}, {}), '(end_time / 1000)', False, 'import arrow\n'), ((114, 15, 114, 40), 'math.ceil', 'math.ceil', ({(114, 25, 114, 39): 'npulls_by_date'}, {}), '(npulls_by_date)', False, 'import math\n'), ((114, 43, 114, 69), 'math.ceil', 'math.ceil', ({(114, 53, 114, 68): 'npulls_by_index'}, {}), '(npulls_by_index)', False, 'import math\n'), ((30, 32, 31, 17), 'datapipelines.Query.has', 'Query.has', ({(31, 12, 31, 16): '"""id"""'}, {}), "('id')", False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((52, 37, 53, 18), 'datapipelines.Query.has', 'Query.has', ({(53, 12, 53, 17): '"""ids"""'}, {}), "('ids')", False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((237, 35, 238, 17), 'datapipelines.Query.has', 'Query.has', ({(238, 12, 238, 16): '"""id"""'}, {}), "('id')", False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((255, 40, 256, 18), 'datapipelines.Query.has', 'Query.has', ({(256, 12, 256, 17): '"""ids"""'}, {}), "('ids')", False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((113, 31, 113, 42), 'arrow.now', 'arrow.now', ({}, {}), '()', False, 'import arrow\n'), ((81, 37, 82, 24), 'datapipelines.Query.has', 'Query.has', ({(82, 12, 82, 23): '"""accountId"""'}, {}), "('accountId')", False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n'), ((175, 42, 176, 25), 'datapipelines.Query.has', 'Query.has', ({(176, 12, 176, 24): '"""accountIds"""'}, {}), "('accountIds')", False, 'from datapipelines import DataSource, PipelineContext, Query, NotFoundError, validate_query\n')]
fochoao/cpython
Lib/site-packages/hackedit/vendor/jedi/cache.py
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
""" This caching is very important for speed and memory optimizations. There's nothing really spectacular, just some decorators. The following cache types are available: - module caching (`load_parser` and `save_parser`), which uses pickle and is really important to assure low load times of modules like ``numpy``. - ``time_cache`` can be used to cache something for just a limited time span, which can be useful if there's user interaction and the user cannot react faster than a certain time. This module is one of the reasons why |jedi| is not thread-safe. As you can see there are global variables, which are holding the cache information. Some of these variables are being cleaned after every API usage. """ import time import os import sys import json import hashlib import gc import inspect import shutil import re try: import cPickle as pickle except ImportError: import pickle from jedi import settings from jedi import common from jedi import debug _time_caches = {} # for fast_parser, should not be deleted parser_cache = {} class ParserCacheItem(object): def __init__(self, parser, change_time=None): self.parser = parser if change_time is None: change_time = time.time() self.change_time = change_time def clear_time_caches(delete_all=False): """ Jedi caches many things, that should be completed after each completion finishes. :param delete_all: Deletes also the cache that is normally not deleted, like parser cache, which is important for faster parsing. """ global _time_caches if delete_all: for cache in _time_caches.values(): cache.clear() parser_cache.clear() else: # normally just kill the expired entries, not all for tc in _time_caches.values(): # check time_cache for expired entries for key, (t, value) in list(tc.items()): if t < time.time(): # delete expired entries del tc[key] def time_cache(time_add_setting): """ s This decorator works as follows: Call it with a setting and after that use the function with a callable that returns the key. But: This function is only called if the key is not available. After a certain amount of time (`time_add_setting`) the cache is invalid. """ def _temp(key_func): dct = {} _time_caches[time_add_setting] = dct def wrapper(*args, **kwargs): generator = key_func(*args, **kwargs) key = next(generator) try: expiry, value = dct[key] if expiry > time.time(): return value except KeyError: pass value = next(generator) time_add = getattr(settings, time_add_setting) if key is not None: dct[key] = time.time() + time_add, value return value return wrapper return _temp @time_cache("call_signatures_validity") def cache_call_signatures(evaluator, call, source, user_pos): """This function calculates the cache key.""" index = user_pos[0] - 1 lines = common.splitlines(source) before_cursor = lines[index][:user_pos[1]] other_lines = lines[call.start_pos[0]:index] whole = '\n'.join(other_lines + [before_cursor]) before_bracket = re.match(r'.*\(', whole, re.DOTALL) module_path = call.get_parent_until().path yield None if module_path is None else (module_path, before_bracket, call.start_pos) yield evaluator.eval_element(call) def underscore_memoization(func): """ Decorator for methods:: class A(object): def x(self): if self._x: self._x = 10 return self._x Becomes:: class A(object): @underscore_memoization def x(self): return 10 A now has an attribute ``_x`` written by this decorator. """ name = '_' + func.__name__ def wrapper(self): try: return getattr(self, name) except AttributeError: result = func(self) if inspect.isgenerator(result): result = list(result) setattr(self, name, result) return result return wrapper def memoize_method(method): """A normal memoize function.""" def wrapper(self, *args, **kwargs): dct = self.__dict__.setdefault('_memoize_method_dct', {}) key = (args, frozenset(kwargs.items())) try: return dct[key] except KeyError: result = method(self, *args, **kwargs) dct[key] = result return result return wrapper def memoize_function(obj): """ A normal memoize function for memoizing free functions. """ cache = obj.cache = {} def memoizer(*args, **kwargs): key = str(args) + str(kwargs) if key not in cache: cache[key] = obj(*args, **kwargs) return cache[key] return memoizer def cache_star_import(func): @time_cache("star_import_cache_validity") def wrapper(self): yield self.base # The cache key yield func(self) return wrapper def _invalidate_star_import_cache_module(module, only_main=False): """ Important if some new modules are being reparsed """ try: t, modules = _time_caches['star_import_cache_validity'][module] except KeyError: pass else: del _time_caches['star_import_cache_validity'][module] def invalidate_star_import_cache(path): """On success returns True.""" try: parser_cache_item = parser_cache[path] except KeyError: pass else: _invalidate_star_import_cache_module(parser_cache_item.parser.module) def load_parser(path): """ Returns the module or None, if it fails. """ p_time = os.path.getmtime(path) if path else None try: parser_cache_item = parser_cache[path] if not path or p_time <= parser_cache_item.change_time: return parser_cache_item.parser else: # In case there is already a module cached and this module # has to be reparsed, we also need to invalidate the import # caches. _invalidate_star_import_cache_module(parser_cache_item.parser.module) except KeyError: if settings.use_filesystem_cache: return ParserPickling.load_parser(path, p_time) def save_parser(path, parser, pickling=True): try: p_time = None if path is None else os.path.getmtime(path) except OSError: p_time = None pickling = False item = ParserCacheItem(parser, p_time) parser_cache[path] = item if settings.use_filesystem_cache and pickling: ParserPickling.save_parser(path, item) class ParserPickling(object): version = 24 """ Version number (integer) for file system cache. Increment this number when there are any incompatible changes in parser representation classes. For example, the following changes are regarded as incompatible. - Class name is changed. - Class is moved to another module. - Defined slot of the class is changed. """ def __init__(self): self.__index = None self.py_tag = 'cpython-%s%s' % sys.version_info[:2] """ Short name for distinguish Python implementations and versions. It's like `sys.implementation.cache_tag` but for Python < 3.3 we generate something similar. See: http://docs.python.org/3/library/sys.html#sys.implementation .. todo:: Detect interpreter (e.g., PyPy). """ def load_parser(self, path, original_changed_time): try: pickle_changed_time = self._index[path] except KeyError: return None if original_changed_time is not None \ and pickle_changed_time < original_changed_time: # the pickle file is outdated return None with open(self._get_hashed_path(path), 'rb') as f: try: gc.disable() parser_cache_item = pickle.load(f) finally: gc.enable() debug.dbg('pickle loaded: %s', path) parser_cache[path] = parser_cache_item return parser_cache_item.parser def save_parser(self, path, parser_cache_item): self.__index = None try: files = self._index except KeyError: files = {} self._index = files with open(self._get_hashed_path(path), 'wb') as f: pickle.dump(parser_cache_item, f, pickle.HIGHEST_PROTOCOL) files[path] = parser_cache_item.change_time self._flush_index() @property def _index(self): if self.__index is None: try: with open(self._get_path('index.json')) as f: data = json.load(f) except (IOError, ValueError): self.__index = {} else: # 0 means version is not defined (= always delete cache): if data.get('version', 0) != self.version: self.clear_cache() self.__index = {} else: self.__index = data['index'] return self.__index def _remove_old_modules(self): # TODO use change = False if change: self._flush_index(self) self._index # reload index def _flush_index(self): data = {'version': self.version, 'index': self._index} with open(self._get_path('index.json'), 'w') as f: json.dump(data, f) self.__index = None def clear_cache(self): shutil.rmtree(self._cache_directory()) def _get_hashed_path(self, path): return self._get_path('%s.pkl' % hashlib.md5(path.encode("utf-8")).hexdigest()) def _get_path(self, file): dir = self._cache_directory() if not os.path.exists(dir): os.makedirs(dir) return os.path.join(dir, file) def _cache_directory(self): return os.path.join(settings.cache_directory, self.py_tag) # is a singleton ParserPickling = ParserPickling()
[((106, 12, 106, 37), 'jedi.common.splitlines', 'common.splitlines', ({(106, 30, 106, 36): 'source'}, {}), '(source)', False, 'from jedi import common\n'), ((111, 21, 111, 56), 're.match', 're.match', ({(111, 30, 111, 37): '""".*\\\\("""', (111, 39, 111, 44): 'whole', (111, 46, 111, 55): 're.DOTALL'}, {}), "('.*\\\\(', whole, re.DOTALL)", False, 'import re\n'), ((210, 13, 210, 35), 'os.path.getmtime', 'os.path.getmtime', ({(210, 30, 210, 34): 'path'}, {}), '(path)', False, 'import os\n'), ((283, 8, 283, 44), 'jedi.debug.dbg', 'debug.dbg', ({(283, 18, 283, 37): '"""pickle loaded: %s"""', (283, 39, 283, 43): 'path'}, {}), "('pickle loaded: %s', path)", False, 'from jedi import debug\n'), ((341, 15, 341, 38), 'os.path.join', 'os.path.join', ({(341, 28, 341, 31): 'dir', (341, 33, 341, 37): 'file'}, {}), '(dir, file)', False, 'import os\n'), ((344, 15, 344, 66), 'os.path.join', 'os.path.join', ({(344, 28, 344, 52): 'settings.cache_directory', (344, 54, 344, 65): 'self.py_tag'}, {}), '(settings.cache_directory, self.py_tag)', False, 'import os\n'), ((44, 26, 44, 37), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((227, 43, 227, 65), 'os.path.getmtime', 'os.path.getmtime', ({(227, 60, 227, 64): 'path'}, {}), '(path)', False, 'import os\n'), ((296, 12, 296, 70), 'pickle.dump', 'pickle.dump', ({(296, 24, 296, 41): 'parser_cache_item', (296, 43, 296, 44): 'f', (296, 46, 296, 69): 'pickle.HIGHEST_PROTOCOL'}, {}), '(parser_cache_item, f, pickle.HIGHEST_PROTOCOL)', False, 'import pickle\n'), ((328, 12, 328, 30), 'json.dump', 'json.dump', ({(328, 22, 328, 26): 'data', (328, 28, 328, 29): 'f'}, {}), '(data, f)', False, 'import json\n'), ((339, 15, 339, 34), 'os.path.exists', 'os.path.exists', ({(339, 30, 339, 33): 'dir'}, {}), '(dir)', False, 'import os\n'), ((340, 12, 340, 28), 'os.makedirs', 'os.makedirs', ({(340, 24, 340, 27): 'dir'}, {}), '(dir)', False, 'import os\n'), ((144, 15, 144, 42), 'inspect.isgenerator', 'inspect.isgenerator', ({(144, 35, 144, 41): 'result'}, {}), '(result)', False, 'import inspect\n'), ((278, 16, 278, 28), 'gc.disable', 'gc.disable', ({}, {}), '()', False, 'import gc\n'), ((279, 36, 279, 50), 'pickle.load', 'pickle.load', ({(279, 48, 279, 49): 'f'}, {}), '(f)', False, 'import pickle\n'), ((281, 16, 281, 27), 'gc.enable', 'gc.enable', ({}, {}), '()', False, 'import gc\n'), ((66, 23, 66, 34), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((88, 28, 88, 39), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((306, 27, 306, 39), 'json.load', 'json.load', ({(306, 37, 306, 38): 'f'}, {}), '(f)', False, 'import json\n'), ((96, 27, 96, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')]
adarshrs/Drone-Simulator-for-ROS-Kinetic
sandia_hand/ros/sandia_hand_teleop/simple_grasp/simple_grasp.py
a44eef1bcaacc55539325bba663f0c8abfd7c75b
#!/usr/bin/env python # # Software License Agreement (Apache License) # # Copyright 2013 Open Source Robotics Foundation # Author: Morgan Quigley # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import roslib; roslib.load_manifest('sandia_hand_teleop') import rospy import sys from sandia_hand_msgs.srv import SimpleGraspSrv, SimpleGraspSrvResponse, SimpleGraspWithSlew, SimpleGraspWithSlewResponse from sandia_hand_msgs.msg import SimpleGrasp from osrf_msgs.msg import JointCommands g_jc_pub = None g_jc = JointCommands() g_prev_jc_target = JointCommands() def grasp_srv(req): grasp_cb(req.grasp) return SimpleGraspSrvResponse() def grasp_slew_srv(req): #print "going to %s in %.3f" % (req.grasp.name, req.slew_duration) rate = rospy.Rate(100.0) t_start = rospy.Time.now() t_end = t_start + rospy.Duration(req.slew_duration) while rospy.Time.now() < t_end: dt = (rospy.Time.now() - t_start).to_sec() dt_norm = dt / req.slew_duration #print "%.3f" % dt_norm grasp_spline(req.grasp.name, req.grasp.closed_amount, dt_norm) rate.sleep() grasp_spline(req.grasp.name, req.grasp.closed_amount, 1.0) return SimpleGraspWithSlewResponse() def grasp_spline(grasp_name, closed_amount, spline_amount): global g_jc_pub, g_jc, g_prev_jc_target #print "request: grasp [%s] amount [%f]" % (grasp_name, closed_amount) # save some typing gn = grasp_name x = closed_amount if x < 0: x = 0 elif x > 1: x = 1 origin = [0] * 12 g0 = [0] * 12 if (gn == "cylindrical"): g0 = [0,1.5,1.7, 0,1.5,1.7, 0,1.5,1.7, 0.2,.8,1.2] elif (gn == "spherical"): origin = [-0.7,0,0, 0.1,0,0, 0.7,0,0, 0,0,0] g0 = [0,1.4,1.4, 0,1.4,1.4, 0,1.4,1.4, 0,0.7,0.7] elif (gn == "prismatic"): origin = [0,1.4,0, 0,1.4,0, 0,1.4,0, -0.1,0.8,-0.8] g0 = [0,0,1.4, 0,0,1.4, 0,0,1.4, 0,0,1.4] elif (gn == "finger_0_test"): g0 = [0,1.5,1.7, 0,0,0, 0,0,0, 0,0,0] elif (gn == "number_one"): origin = [0,0,0, 0,1.5,1.5, 0,1.5,1.5, 0.4,0.8,1 ] elif (gn == "peace"): origin = [-0.2,0,0, 0.05,0,0, 0,1.5,1.5, 0.4,0.8,1 ] elif (gn == "asl_a"): origin = [0,1.5,1.5, 0,1.5,1.5, 0,1.5,1.5, 1.5,0.9,0.2 ] elif (gn == "asl_b"): origin = [0.1,0,0, 0,0,0, -0.1,0,0, 1,0.8,0.9 ] elif (gn == "asl_c"): origin = [0,0.7,0.9, 0,0.7,0.9, 0,0.7,0.9, 0,0.4,0.4 ] elif (gn == "asl_d"): origin = [0,0,0, 0,1.5,1.5, 0,1.5,1.5, 0.4,0.8,1 ] elif (gn == "asl_e"): origin = [0,1,1.8, 0,1,1.8, 0,1,1.8, 1.5,0.6,1] elif (gn == "asl_f"): origin = [0,1.3,1.2, 0.1,0,0, 0.2,0,0, 0.3,0.7,0.7 ] elif (gn == "asl_g"): origin = [0,1.5,0, 0,1.5,1.5, 0,1.5,1.5, 0,1,-.4 ] elif (gn == "asl_h"): origin = [0.1,1.5,0, 0,1.5,0, 0,1.5,1.5, 0,1,0.6 ] elif (gn == "asl_i"): origin = [0,1.5,1.5, 0,1.5,1.5, 0,0,0, 1.5,1.0,0.3 ] elif (gn == "asl_j"): origin = [0,1.5,1.5, 0,1.5,1.5, 0,0,0, 1.5,1.0,0.3 ] g0 = [0,0,0, 0,0,0, 0,0,0, 0.5,1,1] g1 = [0,0,0, 0,0,0, 0,0,0, 0,1,1] elif (gn == "asl_k"): origin = [0,0,0, 0,1.5,0, 0,1.5,1.5, 1.5,1.0,0.3] elif (gn == "asl_l"): origin = [0,0,0, 0,1.5,1.5, 0,1.5,1.5, 1.5,0,0] elif (gn == "asl_m"): origin = [0,1,1.5, 0,1,1.5, 0,1,1.5, 0,1,1] elif (gn == "asl_n"): origin = [0,1,1.5, 0,1,1.5, 0,1.5,1.5, 0,1,1] elif (gn == "asl_o"): origin = [0.1,1.3,1.2, 0,1.3,1.2, -0.1,1.3,1.2, 0.2,0.8,0.5] elif (gn == "asl_p"): origin = [0,0,0, 0,1.5,0, 0,1.5,1.5, 1.5,1,0.3] elif (gn == "asl_q"): origin = [0,1.3,1.2, 0,1.5,1.5, 0,1.5,1.5, 0.4,0.8,0.5] elif (gn == "asl_r"): origin = [0.1,0,0, -0.1,0,0, 0,1.5,1.5, 0,1,1] elif (gn == "asl_s"): origin = [0,1.5,1.5, 0,1.5,1.5, 0,1.5,1.5, 0,1,0.2] elif (gn == "asl_t"): origin = [-.4,1.3,1.5, 0,1.5,1.5, 0,1.5,1.5, 0.4,1,1] elif (gn == "asl_u"): origin = [0,0,0, 0,0,0, 0,1.5,1.5, 0,1,1] elif (gn == "asl_v"): origin = [-0.3,0,0, 0.1,0,0, 0,1.5,1.5, 0,1,1] elif (gn == "asl_w"): origin = [-0.3,0,0, 0,0,0, 0.3,0,0, 0,1,1] elif (gn == "asl_x"): origin = [0,0,1.5, 0,1.5,1.5, 0,1.5,1.5, 0,1,1] elif (gn == "asl_y"): origin = [0,1.5,1.5, 0,1.5,1.5, 0.3,0,0, 1.5,0,0] elif (gn == "asl_z"): origin = [0,1.0,0, 0,1.5,1.5, 0,1.5,1.5, 0.4,0.8,0.8] g0 = [0.3,0.3,0, 0,0,0, 0,0,0, 0,0,0] g1 = [-0.3,0.3,0, 0,0,0, 0,0,0, 0,0,0] else: return None # bogus g_jc.position = [0] * 12 if (spline_amount < 0): spline_amount = 0 elif (spline_amount > 1): spline_amount = 1 for i in xrange(0, 12): target = origin[i] + g0[i] * x prev_target = g_prev_jc_target.position[i] #g_jc.position[i] = origin[i] + g0[i] * x #delta = target - g_prev_jc_target.position[i] # compute convex combination between old and new targets g_jc.position[i] = ( spline_amount) * target + \ (1.0 - spline_amount) * prev_target #print "joint state: %s" % (str(g_jc.position)) g_jc_pub.publish(g_jc) if (spline_amount == 1.0): for i in xrange(0, 12): g_prev_jc_target.position[i] = g_jc.position[i] # todo: make this better def grasp_cb(msg): grasp_spline(msg.name, msg.closed_amount, 1) if __name__ == '__main__': rospy.init_node('simple_grasp') g_jc.name = ["f0_j0", "f0_j1", "f0_j2", "f1_j0", "f1_j1", "f1_j2", "f2_j0", "f2_j1", "f2_j2", "f3_j0", "f3_j1", "f3_j2"] g_jc.position = [0] * 12 g_prev_jc_target.position = [0] * 12 g_jc_pub = rospy.Publisher('joint_commands', JointCommands, queue_size=1) # same namespace g_jc_srv = rospy.Service('simple_grasp', SimpleGraspSrv, grasp_srv) g_sgws_srv = rospy.Service('simple_grasp_with_slew', SimpleGraspWithSlew, grasp_slew_srv) g_jc_sub = rospy.Subscriber('simple_grasp', SimpleGrasp, grasp_cb) print "simple grasp service is now running." rospy.spin()
[]
edzzn/Manejo_Liberia
ui/ui_prestamo_libros.py
c735d35b32fc53839acfc48d4e088e69983edf16
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'PrestamoDeLibros.ui' # # Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Form(object): def setupUi(self, Form): Form.setObjectName(_fromUtf8("Form")) Form.resize(400, 300) self.pushButton = QtGui.QPushButton(Form) self.pushButton.setGeometry(QtCore.QRect(140, 70, 121, 41)) self.pushButton.setObjectName(_fromUtf8("pushButton")) self.pushButton_2 = QtGui.QPushButton(Form) self.pushButton_2.setGeometry(QtCore.QRect(140, 160, 121, 41)) self.pushButton_2.setObjectName(_fromUtf8("pushButton_2")) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): Form.setWindowTitle(_translate("Form", "Form", None)) self.pushButton.setText(_translate("Form", "Solicitar", None)) self.pushButton_2.setText(_translate("Form", "Reservar", None)) if __name__ == "__main__": import sys app = QtGui.QApplication(sys.argv) Form = QtGui.QWidget() ui = Ui_Form() ui.setupUi(Form) Form.show() sys.exit(app.exec_())
[((47, 10, 47, 38), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', ({(47, 29, 47, 37): 'sys.argv'}, {}), '(sys.argv)', False, 'from PyQt4 import QtCore, QtGui\n'), ((48, 11, 48, 26), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((20, 15, 20, 79), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', ({(20, 44, 20, 51): 'context', (20, 53, 20, 57): 'text', (20, 59, 20, 67): 'disambig', (20, 69, 20, 78): '_encoding'}, {}), '(context, text, disambig, _encoding)', False, 'from PyQt4 import QtCore, QtGui\n'), ((29, 26, 29, 49), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', ({(29, 44, 29, 48): 'Form'}, {}), '(Form)', False, 'from PyQt4 import QtCore, QtGui\n'), ((32, 28, 32, 51), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', ({(32, 46, 32, 50): 'Form'}, {}), '(Form)', False, 'from PyQt4 import QtCore, QtGui\n'), ((37, 8, 37, 51), 'PyQt4.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', ({(37, 46, 37, 50): 'Form'}, {}), '(Form)', False, 'from PyQt4 import QtCore, QtGui\n'), ((23, 15, 23, 68), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', ({(23, 44, 23, 51): 'context', (23, 53, 23, 57): 'text', (23, 59, 23, 67): 'disambig'}, {}), '(context, text, disambig)', False, 'from PyQt4 import QtCore, QtGui\n'), ((30, 36, 30, 66), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(30, 49, 30, 52): '(140)', (30, 54, 30, 56): '(70)', (30, 58, 30, 61): '(121)', (30, 63, 30, 65): '(41)'}, {}), '(140, 70, 121, 41)', False, 'from PyQt4 import QtCore, QtGui\n'), ((33, 38, 33, 69), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(33, 51, 33, 54): '(140)', (33, 56, 33, 59): '(160)', (33, 61, 33, 64): '(121)', (33, 66, 33, 68): '(41)'}, {}), '(140, 160, 121, 41)', False, 'from PyQt4 import QtCore, QtGui\n')]
zopefoundation/zope.app.content
src/zope/app/content/__init__.py
d4c0276ff90bceed2156d808ab6b42b85d7b3810
############################################################################## # # Copyright (c) 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Content Type convenience lookup functions.""" from zope.interface import provider from zope.interface import providedBy from zope.schema.interfaces import IVocabularyFactory from zope.app.content.interfaces import IContentType from zope.componentvocabulary.vocabulary import UtilityVocabulary from zope.security.proxy import removeSecurityProxy def queryType(object, interface): """Returns the object's interface which implements interface. >>> from zope.interface import Interface >>> class IContentType(Interface): ... pass >>> from zope.interface import Interface, implementer, directlyProvides >>> class I(Interface): ... pass >>> class J(Interface): ... pass >>> directlyProvides(I, IContentType) >>> @implementer(I) ... class C(object): ... pass >>> @implementer(J, I) ... class D(object): ... pass >>> obj = C() >>> c1_ctype = queryType(obj, IContentType) >>> c1_ctype.__name__ 'I' >>> class I1(I): ... pass >>> class I2(I1): ... pass >>> class I3(Interface): ... pass >>> @implementer(I1) ... class C1(object): ... pass >>> obj1 = C1() >>> c1_ctype = queryType(obj1, IContentType) >>> c1_ctype.__name__ 'I' >>> @implementer(I2) ... class C2(object): ... pass >>> obj2 = C2() >>> c2_ctype = queryType(obj2, IContentType) >>> c2_ctype.__name__ 'I' >>> @implementer(I3) ... class C3(object): ... pass >>> obj3 = C3() If Interface doesn't provide `IContentType`, `queryType` returns ``None``. >>> c3_ctype = queryType(obj3, IContentType) >>> c3_ctype >>> c3_ctype is None True >>> class I4(I): ... pass >>> directlyProvides(I4, IContentType) >>> @implementer(I4) ... class C4(object): ... pass >>> obj4 = C4() >>> c4_ctype = queryType(obj4, IContentType) >>> c4_ctype.__name__ 'I4' """ # Remove the security proxy, so that we can introspect the type of the # object's interfaces. naked = removeSecurityProxy(object) object_iro = providedBy(naked).__iro__ for iface in object_iro: if interface.providedBy(iface): return iface return None def queryContentType(object): """Returns the interface implemented by object which implements :class:`zope.app.content.interfaces.IContentType`. >>> from zope.interface import Interface, implementer, directlyProvides >>> class I(Interface): ... pass >>> directlyProvides(I, IContentType) >>> @implementer(I) ... class C(object): ... pass >>> obj = C() >>> c1_ctype = queryContentType(obj) >>> c1_ctype.__name__ 'I' """ return queryType(object, IContentType) @provider(IVocabularyFactory) class ContentTypesVocabulary(UtilityVocabulary): interface = IContentType
[((129, 1, 129, 29), 'zope.interface.provider', 'provider', ({(129, 10, 129, 28): 'IVocabularyFactory'}, {}), '(IVocabularyFactory)', False, 'from zope.interface import provider\n'), ((99, 12, 99, 39), 'zope.security.proxy.removeSecurityProxy', 'removeSecurityProxy', ({(99, 32, 99, 38): 'object'}, {}), '(object)', False, 'from zope.security.proxy import removeSecurityProxy\n'), ((100, 17, 100, 34), 'zope.interface.providedBy', 'providedBy', ({(100, 28, 100, 33): 'naked'}, {}), '(naked)', False, 'from zope.interface import providedBy\n')]
yschiebelhut/ewm-cloud-robotics
python-modules/robcoewmrobotconfigurator/robcoewmrobotconfigurator/run.py
bdf3a6c13850d266b70168912494300c32d4d803
#!/usr/bin/env python3 # encoding: utf-8 # # Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. # # This file is part of ewm-cloud-robotics # (see https://github.com/SAP/ewm-cloud-robotics). # # This file is licensed under the Apache Software License, v. 2 except as noted # otherwise in the LICENSE file (https://github.com/SAP/ewm-cloud-robotics/blob/master/LICENSE) # """Run the SAP EWM robot configurator.""" import sys import signal import traceback import logging import time from robcoewmrobotconfigurator.ewm_robot_sync import EWMRobotSync from robcoewmrobotconfigurator.robotconfigcontroller import RobotConfigurationController from robcoewmrobotconfigurator.robco_robot_api import RobCoRobotAPI _LOGGER = logging.getLogger(__name__) class MainLoopController: """Control the main loop.""" def __init__(self): """Construct.""" # Shutdown Handler self.shutdown = False signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) # Sleep handler self.last_time = time.time() def exit_gracefully(self, signum, frame): """Set shutdown flag on SIGTERM and SIGINT.""" self.shutdown = True _LOGGER.info('Closing application because signal %s received', signum) def sleep(self, seconds: float): """Sleep maximum n seconds after the last call.""" timediff = time.time() - self.last_time if timediff < seconds: time.sleep(seconds-timediff) self.last_time = time.time() def run_robotconfigurator(): """Run one instance of the robot configurator.""" # Register handler to control main loop loop_control = MainLoopController() # Create CR watcher instances k8s_rb = RobCoRobotAPI() k8s_rc = RobotConfigurationController() # Create EWM robot syncer instance robotsync = EWMRobotSync(k8s_rc) # Register callback functions k8s_rb.register_callback('ConfigurationController', ['ADDED'], k8s_rc.robco_robot_cb) k8s_rc.register_callback( 'EWMRobotSync', ['ADDED', 'MODIFIED', 'REPROCESS'], robotsync.robotconfiguration_cb) # Start k8s_rb.run() k8s_rc.run(reprocess=True) _LOGGER.info('SAP EWM Robot Configurator started') try: # Looping while K8S watchers are running while loop_control.shutdown is False: # Refresh bearer token when using OAuth if robotsync.odataconfig.authorization == robotsync.odataconfig.AUTH_OAUTH: robotsync.odatahandler.refresh_access_token() # Check if K8S CR handler exception occured for k, exc in k8s_rb.thread_exceptions.items(): _LOGGER.error( 'Uncovered exception in "%s" thread of RobCoRobotAPI. Raising it in main ' 'thread', k) raise exc for k, exc in k8s_rc.thread_exceptions.items(): _LOGGER.error( 'Uncovered exception in "%s" thread of RobotConfigurationController. Raising ' 'it in main thread', k) raise exc # Sleep maximum 1.0 second loop_control.sleep(1.0) except KeyboardInterrupt: _LOGGER.info('Keyboard interrupt - terminating') except SystemExit: _LOGGER.info('System exit - terminating') finally: # Stop K8S CR watchers _LOGGER.info('Stopping K8S CR watchers') k8s_rb.stop_watcher() k8s_rc.stop_watcher() # Shutdown threadpool executor robotsync.executor.shutdown() if __name__ == '__main__': # Create root logger if running as main program ROOT_LOGGER = logging.getLogger() ROOT_LOGGER.setLevel(logging.INFO) # Create console handler and set level to info CH = logging.StreamHandler() CH.setLevel(logging.INFO) # Create formatter FORMATTER = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # Add formatter to ch CH.setFormatter(FORMATTER) # Add ch to logger ROOT_LOGGER.addHandler(CH) # Run robot master try: run_robotconfigurator() except Exception: # pylint: disable=broad-except EXC_INFO = sys.exc_info() _LOGGER.critical( 'Unexpected error "%s" - "%s" - TRACEBACK: %s', EXC_INFO[0], EXC_INFO[1], traceback.format_exception(*EXC_INFO)) sys.exit('Application terminated with exception: "{}" - "{}"'.format( EXC_INFO[0], EXC_INFO[1]))
[((25, 10, 25, 37), 'logging.getLogger', 'logging.getLogger', ({(25, 28, 25, 36): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((61, 13, 61, 28), 'robcoewmrobotconfigurator.robco_robot_api.RobCoRobotAPI', 'RobCoRobotAPI', ({}, {}), '()', False, 'from robcoewmrobotconfigurator.robco_robot_api import RobCoRobotAPI\n'), ((62, 13, 62, 43), 'robcoewmrobotconfigurator.robotconfigcontroller.RobotConfigurationController', 'RobotConfigurationController', ({}, {}), '()', False, 'from robcoewmrobotconfigurator.robotconfigcontroller import RobotConfigurationController\n'), ((65, 16, 65, 36), 'robcoewmrobotconfigurator.ewm_robot_sync.EWMRobotSync', 'EWMRobotSync', ({(65, 29, 65, 35): 'k8s_rc'}, {}), '(k8s_rc)', False, 'from robcoewmrobotconfigurator.ewm_robot_sync import EWMRobotSync\n'), ((111, 18, 111, 37), 'logging.getLogger', 'logging.getLogger', ({}, {}), '()', False, 'import logging\n'), ((115, 9, 115, 32), 'logging.StreamHandler', 'logging.StreamHandler', ({}, {}), '()', False, 'import logging\n'), ((119, 16, 119, 89), 'logging.Formatter', 'logging.Formatter', ({(119, 34, 119, 88): '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'}, {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')", False, 'import logging\n'), ((35, 8, 35, 58), 'signal.signal', 'signal.signal', ({(35, 22, 35, 35): 'signal.SIGINT', (35, 37, 35, 57): 'self.exit_gracefully'}, {}), '(signal.SIGINT, self.exit_gracefully)', False, 'import signal\n'), ((36, 8, 36, 59), 'signal.signal', 'signal.signal', ({(36, 22, 36, 36): 'signal.SIGTERM', (36, 38, 36, 58): 'self.exit_gracefully'}, {}), '(signal.SIGTERM, self.exit_gracefully)', False, 'import signal\n'), ((38, 25, 38, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((52, 25, 52, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((47, 19, 47, 30), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((50, 12, 50, 40), 'time.sleep', 'time.sleep', ({(50, 23, 50, 39): '(seconds - timediff)'}, {}), '(seconds - timediff)', False, 'import time\n'), ((130, 19, 130, 33), 'sys.exc_info', 'sys.exc_info', ({}, {}), '()', False, 'import sys\n'), ((133, 12, 133, 49), 'traceback.format_exception', 'traceback.format_exception', ({(133, 39, 133, 48): '*EXC_INFO'}, {}), '(*EXC_INFO)', False, 'import traceback\n')]
DanielSBrown/osf.io
website/addons/forward/views/__init__.py
98dda2ac237377197acacce78274bc0a4ce8f303
from . import config, widget # noqa
[]
crvallance/wlanpi-hwtest
hwtest/automated/usb3_test.py
8858ef6e8fa78767238b968b121b4d5ab2155701
from hwtest.shell_utils import run_command def test_linux_usb3hub(): """ Test for Linux Foundation 3.0 root hub in `lsusb` output """ resp = run_command(["lsusb"]) assert "1d6b:0003" in resp
[((9, 11, 9, 33), 'hwtest.shell_utils.run_command', 'run_command', ({(9, 23, 9, 32): "['lsusb']"}, {}), "(['lsusb'])", False, 'from hwtest.shell_utils import run_command\n')]
tp-m/meson
ninjabackend.py
2d1aa395e86848ca948d30d83cc5357777e5b490
# Copyright 2012-2014 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import backends import environment, mesonlib import build import mlog import dependencies from mesonlib import File from meson_install import InstallData from build import InvalidArguments from coredata import MesonException import os, sys, pickle, re import subprocess, shutil if mesonlib.is_windows(): quote_char = '"' execute_wrapper = 'cmd /c' else: quote_char = "'" execute_wrapper = '' def ninja_quote(text): return text.replace(' ', '$ ').replace(':', '$:') class RawFilename(): def __init__(self, fname): self.fname = fname def split(self, c): return self.fname.split(c) def startswith(self, s): return self.fname.startswith(s) class NinjaBuildElement(): def __init__(self, outfilenames, rule, infilenames): if isinstance(outfilenames, str): self.outfilenames = [outfilenames] else: self.outfilenames = outfilenames assert(isinstance(rule, str)) self.rule = rule if isinstance(infilenames, str): self.infilenames = [infilenames] else: self.infilenames = infilenames self.deps = [] self.orderdeps = [] self.elems = [] def add_dep(self, dep): if isinstance(dep, list): self.deps += dep else: self.deps.append(dep) def add_orderdep(self, dep): if isinstance(dep, list): self.orderdeps += dep else: self.orderdeps.append(dep) def add_item(self, name, elems): if isinstance(elems, str): elems = [elems] self.elems.append((name, elems)) def write(self, outfile): line = 'build %s: %s %s' % (' '.join([ninja_quote(i) for i in self.outfilenames]),\ self.rule, ' '.join([ninja_quote(i) for i in self.infilenames])) if len(self.deps) > 0: line += ' | ' + ' '.join([ninja_quote(x) for x in self.deps]) if len(self.orderdeps) > 0: line += ' || ' + ' '.join([ninja_quote(x) for x in self.orderdeps]) line += '\n' # This is the only way I could find to make this work on all # platforms including Windows command shell. Slash is a dir separator # on Windows, too, so all characters are unambiguous and, more importantly, # do not require quoting. line = line.replace('\\', '/') outfile.write(line) for e in self.elems: (name, elems) = e should_quote = True if name == 'DEPFILE' or name == 'DESC' or name == 'pool': should_quote = False line = ' %s = ' % name q_templ = quote_char + "%s" + quote_char noq_templ = "%s" newelems = [] for i in elems: if not should_quote or i == '&&': # Hackety hack hack templ = noq_templ else: templ = q_templ i = i.replace('\\', '\\\\') if quote_char == '"': i = i.replace('"', '\\"') newelems.append(templ % ninja_quote(i)) line += ' '.join(newelems) line += '\n' outfile.write(line) outfile.write('\n') class NinjaBackend(backends.Backend): def __init__(self, build): super().__init__(build) self.source_suffix_in_objs = True self.ninja_filename = 'build.ninja' self.fortran_deps = {} self.all_outputs = {} def check_outputs(self, elem): for n in elem.outfilenames: if n in self.all_outputs: raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n) self.all_outputs[n] = True def detect_vs_dep_prefix(self, outfile, tempfilename): '''VS writes its dependency in a locale dependent format. Detect the search prefix to use.''' if shutil.which('cl') is None: return outfile outfile.close() open(os.path.join(self.environment.get_scratch_dir(), 'incdetect.c'), 'w').write('''#include<stdio.h> int dummy; ''') pc = subprocess.Popen(['cl', '/showIncludes', '/c', 'incdetect.c'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.environment.get_scratch_dir()) (stdo, _) = pc.communicate() for line in stdo.split(b'\r\n'): if line.endswith(b'stdio.h'): matchstr = b':'.join(line.split(b':')[0:2]) + b':' binfile = open(tempfilename, 'ab') binfile.write(b'msvc_deps_prefix = ' + matchstr + b'\r\n') binfile.close() return open(tempfilename, 'a') raise MesonException('Could not determine vs dep dependency prefix string.') def generate(self, interp): self.interpreter = interp outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) tempfilename = outfilename + '~' outfile = open(tempfilename, 'w') outfile.write('# This is the build file for project "%s"\n' % self.build.get_project()) outfile.write('# It is autogenerated by the Meson build system.\n') outfile.write('# Do not edit by hand.\n\n') outfile.write('ninja_required_version = 1.5.1\n\n') outfile = self.detect_vs_dep_prefix(outfile, tempfilename) self.generate_rules(outfile) self.generate_phony(outfile) outfile.write('# Build rules for targets\n\n') [self.generate_target(t, outfile) for t in self.build.get_targets().values()] if len(self.build.pot) > 0: outfile.write('# Build rules for localisation.\n\n') self.generate_po(outfile) outfile.write('# Test rules\n\n') self.generate_tests(outfile) outfile.write('# Install rules\n\n') self.generate_install(outfile) if self.environment.coredata.get_builtin_option('coverage'): outfile.write('# Coverage rules\n\n') self.generate_coverage_rules(outfile) outfile.write('# Suffix\n\n') self.generate_ending(outfile) # Only ovewrite the old build file after the new one has been # fully created. outfile.close() os.replace(tempfilename, outfilename) self.generate_compdb() # http://clang.llvm.org/docs/JSONCompilationDatabase.html def generate_compdb(self): ninja_exe = environment.detect_ninja() builddir = self.environment.get_build_dir() jsondb = subprocess.check_output([ninja_exe, '-t', 'compdb', 'c_COMPILER', 'cpp_COMPILER'], cwd=builddir) open(os.path.join(builddir, 'compile_commands.json'), 'wb').write(jsondb) # Get all generated headers. Any source file might need them so # we need to add an order dependency to them. def get_generated_headers(self, target): header_deps = [] for gensource in target.get_generated_sources(): if isinstance(gensource, build.CustomTarget): continue for src in gensource.get_outfilelist(): if self.environment.is_header(src): header_deps.append(os.path.join(self.get_target_private_dir(target), src)) for dep in target.link_targets: if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): header_deps += self.get_generated_headers(dep) return header_deps def generate_target(self, target, outfile): if isinstance(target, build.CustomTarget): self.generate_custom_target(target, outfile) if isinstance(target, build.RunTarget): self.generate_run_target(target, outfile) name = target.get_id() gen_src_deps = [] if name in self.processed_targets: return if isinstance(target, build.Jar): self.generate_jar_target(target, outfile) return if 'rust' in self.environment.coredata.compilers.keys() and self.has_rust(target): self.generate_rust_target(target, outfile) return if 'cs' in self.environment.coredata.compilers.keys() and self.has_cs(target): self.generate_cs_target(target, outfile) return if 'vala' in self.environment.coredata.compilers.keys() and self.has_vala(target): gen_src_deps += self.generate_vala_compile(target, outfile) if 'swift' in self.environment.coredata.compilers.keys() and self.has_swift(target): self.generate_swift_target(target, outfile) return self.scan_fortran_module_outputs(target) # The following deals with C/C++ compilation. (gen_src, gen_other_deps) = self.process_dep_gens(outfile, target) gen_src_deps += gen_src self.process_target_dependencies(target, outfile) self.generate_custom_generator_rules(target, outfile) outname = self.get_target_filename(target) obj_list = [] use_pch = self.environment.coredata.get_builtin_option('use_pch') is_unity = self.environment.coredata.get_builtin_option('unity') if use_pch and target.has_pch(): pch_objects = self.generate_pch(target, outfile) else: pch_objects = [] header_deps = gen_other_deps unity_src = [] unity_deps = [] # Generated sources that must be built before compiling a Unity target. header_deps += self.get_generated_headers(target) for gensource in target.get_generated_sources(): if isinstance(gensource, build.CustomTarget): for src in gensource.output: src = os.path.join(self.get_target_dir(gensource), src) if self.environment.is_source(src) and not self.environment.is_header(src): if is_unity: unity_deps.append(os.path.join(self.environment.get_build_dir(), RawFilename(src))) else: obj_list.append(self.generate_single_compile(target, outfile, RawFilename(src), True, header_deps)) elif self.environment.is_object(src): obj_list.append(src) elif self.environment.is_library(src): pass else: # Assume anything not specifically a source file is a header. This is because # people generate files with weird suffixes (.inc, .fh) that they then include # in their source files. header_deps.append(RawFilename(src)) else: for src in gensource.get_outfilelist(): if self.environment.is_object(src): obj_list.append(os.path.join(self.get_target_private_dir(target), src)) elif not self.environment.is_header(src): if is_unity: if self.has_dir_part(src): rel_src = src else: rel_src = os.path.join(self.get_target_private_dir(target), src) unity_deps.append(rel_src) abs_src = os.path.join(self.environment.get_build_dir(), rel_src) unity_src.append(abs_src) else: obj_list.append(self.generate_single_compile(target, outfile, src, True, header_deps=header_deps)) src_list = [] for src in gen_src_deps: src_list.append(src) if is_unity: unity_src.append(os.path.join(self.environment.get_build_dir(), src)) header_deps.append(src) else: # Generated targets are ordered deps because the must exist # before the sources compiling them are used. After the first # compile we get precise dependency info from dep files. # This should work in all cases. If it does not, then just # move them from orderdeps to proper deps. if self.environment.is_header(src): header_deps.append(src) else: obj_list.append(self.generate_single_compile(target, outfile, src, True, [], header_deps)) for src in target.get_sources(): if src.endswith('.vala'): continue if not self.environment.is_header(src): src_list.append(src) if is_unity: abs_src = os.path.join(self.environment.get_build_dir(), src.rel_to_builddir(self.build_to_src)) unity_src.append(abs_src) else: obj_list.append(self.generate_single_compile(target, outfile, src, False, [], header_deps)) obj_list += self.flatten_object_list(target) if is_unity: for src in self.generate_unity_files(target, unity_src): obj_list.append(self.generate_single_compile(target, outfile, src, True, unity_deps + header_deps)) linker = self.determine_linker(target, src_list) elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects) self.generate_shlib_aliases(target, self.get_target_dir(target)) elem.write(outfile) self.processed_targets[name] = True def process_target_dependencies(self, target, outfile): for t in target.get_dependencies(): tname = t.get_basename() + t.type_suffix() if not tname in self.processed_targets: self.generate_target(t, outfile) def generate_custom_target(self, target, outfile): (srcs, ofilenames, cmd) = self.eval_custom_target_command(target) deps = [] for i in target.get_dependencies(): # FIXME, should not grab element at zero but rather expand all. if isinstance(i, list): i = i[0] fname = i.get_filename() if isinstance(fname, list): fname = fname[0] deps.append(os.path.join(self.get_target_dir(i), fname)) if target.build_always: deps.append('PHONY') elem = NinjaBuildElement(ofilenames, 'CUSTOM_COMMAND', srcs) for i in target.depend_files: if isinstance(i, mesonlib.File): deps.append(i.rel_to_builddir(self.build_to_src)) else: deps.append(os.path.join(self.build_to_src, i)) elem.add_dep(deps) for d in target.extra_depends: tmp = d.get_filename() if not isinstance(tmp, list): tmp = [tmp] for fname in tmp: elem.add_dep(os.path.join(self.get_target_dir(d), fname)) elem.add_item('COMMAND', cmd) elem.add_item('description', 'Generating %s with a custom command.' % target.name) elem.write(outfile) self.check_outputs(elem) self.processed_targets[target.name + target.type_suffix()] = True def generate_run_target(self, target, outfile): runnerscript = os.path.join(self.environment.get_script_dir(), 'commandrunner.py') deps = [] arg_strings = [] for i in target.args: if isinstance(i, str): arg_strings.append(i) elif isinstance(i, (build.BuildTarget, build.CustomTarget)): relfname = self.get_target_filename(i) deps.append(relfname) arg_strings.append(os.path.join(self.environment.get_build_dir(), relfname)) else: mlog.debug(str(i)) raise MesonException('Unreachable code in generate_run_target.') elem = NinjaBuildElement(target.name, 'CUSTOM_COMMAND', deps) cmd = [sys.executable, runnerscript, self.environment.get_source_dir(), self.environment.get_build_dir(), target.subdir] texe = target.command try: texe = texe.held_object except AttributeError: pass if isinstance(texe, build.Executable): abs_exe = os.path.join(self.environment.get_build_dir(), self.get_target_filename(texe)) deps.append(self.get_target_filename(texe)) if self.environment.is_cross_build() \ and self.environment.cross_info.config['binaries'].get('exe_wrapper', None) is not None: cmd += [self.environment.cross_info.config['binaries']['exe_wrapper']] cmd.append(abs_exe) else: cmd.append(target.command) cmd += arg_strings elem.add_item('COMMAND', cmd) elem.add_item('description', 'Running external command %s.' % target.name) elem.add_item('pool', 'console') elem.write(outfile) self.check_outputs(elem) self.processed_targets[target.name + target.type_suffix()] = True def generate_po(self, outfile): for p in self.build.pot: (packagename, languages, subdir) = p input_file = os.path.join(subdir, 'POTFILES') elem = NinjaBuildElement('pot', 'GEN_POT', []) elem.add_item('PACKAGENAME', packagename) elem.add_item('OUTFILE', packagename + '.pot') elem.add_item('FILELIST', os.path.join(self.environment.get_source_dir(), input_file)) elem.add_item('OUTDIR', os.path.join(self.environment.get_source_dir(), subdir)) elem.write(outfile) self.check_outputs(elem) for l in languages: infile = os.path.join(self.environment.get_source_dir(), subdir, l + '.po') outfilename = os.path.join(subdir, l + '.gmo') lelem = NinjaBuildElement(outfilename, 'GEN_GMO', infile) lelem.add_item('INFILE', infile) lelem.add_item('OUTFILE', outfilename) lelem.write(outfile) self.check_outputs(lelem) def generate_coverage_rules(self, outfile): (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools() added_rule = False if gcovr_exe: added_rule = True elem = NinjaBuildElement('coverage-xml', 'CUSTOM_COMMAND', '') elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', self.environment.get_build_dir(),\ '-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')]) elem.add_item('DESC', 'Generating XML coverage report.') elem.write(outfile) elem = NinjaBuildElement('coverage-text', 'CUSTOM_COMMAND', '') elem.add_item('COMMAND', [gcovr_exe, '-r', self.environment.get_build_dir(),\ '-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')]) elem.add_item('DESC', 'Generating text coverage report.') elem.write(outfile) self.check_outputs(elem) if lcov_exe and genhtml_exe: added_rule = True phony_elem = NinjaBuildElement('coverage-html', 'phony', 'coveragereport/index.html') phony_elem.write(outfile) elem = NinjaBuildElement('coveragereport/index.html', 'CUSTOM_COMMAND', '') command = [lcov_exe, '--directory', self.environment.get_build_dir(),\ '--capture', '--output-file', 'coverage.info', '--no-checksum',\ '&&', genhtml_exe, '--prefix', self.environment.get_build_dir(),\ '--output-directory', self.environment.get_log_dir(), '--title', 'Code coverage',\ '--legend', '--show-details', 'coverage.info'] elem.add_item('COMMAND', command) elem.add_item('DESC', 'Generating HTML coverage report.') self.check_outputs(elem) elem.write(outfile) if not added_rule: mlog.log(mlog.red('Warning:'), 'coverage requested but neither gcovr nor lcov/genhtml found.') def generate_install(self, outfile): script_root = self.environment.get_script_dir() install_script = os.path.join(script_root, 'meson_install.py') install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat') depfixer = os.path.join(script_root, 'depfixer.py') d = InstallData(self.environment.get_source_dir(), self.environment.get_build_dir(), self.environment.get_prefix(), depfixer) elem = NinjaBuildElement('install', 'CUSTOM_COMMAND', 'PHONY') elem.add_dep('all') elem.add_item('DESC', 'Installing files.') elem.add_item('COMMAND', [sys.executable, install_script, install_data_file]) elem.add_item('pool', 'console') self.generate_depmf_install(d) self.generate_target_install(d) self.generate_header_install(d) self.generate_man_install(d) self.generate_data_install(d) self.generate_po_install(d, elem) self.generate_custom_install_script(d) self.generate_subdir_install(d) elem.write(outfile) self.check_outputs(elem) ofile = open(install_data_file, 'wb') pickle.dump(d, ofile) def generate_po_install(self, d, elem): for p in self.build.pot: (package_name, languages, subdir) = p # FIXME: assumes only one po package per source d.po_package_name = package_name for lang in languages: rel_src = os.path.join(subdir, lang + '.gmo') src_file = os.path.join(self.environment.get_build_dir(), rel_src) d.po.append((src_file, self.environment.coredata.get_builtin_option('localedir'), lang)) elem.add_dep(rel_src) def generate_target_install(self, d): libdir = self.environment.get_libdir() bindir = self.environment.get_bindir() should_strip = self.environment.coredata.get_builtin_option('strip') for t in self.build.get_targets().values(): if t.should_install(): outdir = t.get_custom_install_dir() if outdir is None: if isinstance(t, build.Executable): outdir = bindir else: outdir = libdir i = [self.get_target_filename(t), outdir, t.get_aliaslist(),\ should_strip, t.install_rpath] d.targets.append(i) def generate_custom_install_script(self, d): d.install_scripts = self.build.install_scripts def generate_header_install(self, d): incroot = self.environment.get_includedir() headers = self.build.get_headers() for h in headers: outdir = h.get_custom_install_dir() if outdir is None: outdir = os.path.join(incroot, h.get_install_subdir()) for f in h.get_sources(): abspath = os.path.join(self.environment.get_source_dir(), h.get_source_subdir(), f) i = [abspath, outdir] d.headers.append(i) def generate_man_install(self, d): manroot = self.environment.get_mandir() man = self.build.get_man() for m in man: for f in m.get_sources(): num = f.split('.')[-1] subdir = m.get_custom_install_dir() if subdir is None: subdir = os.path.join(manroot, 'man' + num) srcabs = os.path.join(self.environment.get_source_dir(), m.get_source_subdir(), f) dstabs = os.path.join(subdir, f + '.gz') i = [srcabs, dstabs] d.man.append(i) def generate_data_install(self, d): data = self.build.get_data() for de in data: assert(isinstance(de, build.Data)) subdir = de.install_dir for f in de.sources: if de.in_sourcetree: srcprefix = self.environment.get_source_dir() else: srcprefix = self.environment.get_build_dir() srcabs = os.path.join(srcprefix, de.source_subdir, f) dstabs = os.path.join(subdir, f) i = [srcabs, dstabs] d.data.append(i) def generate_subdir_install(self, d): for sd in self.build.get_install_subdirs(): src_dir = os.path.join(self.environment.get_source_dir(), sd.source_subdir, sd.installable_subdir) dst_dir = os.path.join(self.environment.get_prefix(), sd.install_dir) d.install_subdirs.append([src_dir, dst_dir]) def write_test_suite_targets(self, cmd, outfile): suites = {} for t in self.build.get_tests(): for s in t.suite: suites[s] = True suites = list(suites.keys()) suites.sort() for s in suites: if s == '': visible_name = 'for top level tests' else: visible_name = s elem = NinjaBuildElement('test-' + s, 'CUSTOM_COMMAND', ['all', 'PHONY']) elem.add_item('COMMAND', cmd + ['--suite=' + s]) elem.add_item('DESC', 'Running test suite %s.' % visible_name) elem.add_item('pool', 'console') elem.write(outfile) self.check_outputs(elem) def generate_tests(self, outfile): self.serialise_tests() valgrind = environment.find_valgrind() script_root = self.environment.get_script_dir() test_script = os.path.join(script_root, 'meson_test.py') test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') cmd = [sys.executable, test_script, test_data] elem = NinjaBuildElement('test', 'CUSTOM_COMMAND', ['all', 'PHONY']) elem.add_item('COMMAND', cmd) elem.add_item('DESC', 'Running all tests.') elem.add_item('pool', 'console') elem.write(outfile) self.check_outputs(elem) self.write_test_suite_targets(cmd, outfile) if valgrind: velem = NinjaBuildElement('test-valgrind', 'CUSTOM_COMMAND', ['all', 'PHONY']) velem.add_item('COMMAND', cmd + ['--wrapper=' + valgrind]) velem.add_item('DESC', 'Running test suite under Valgrind.') velem.add_item('pool', 'console') velem.write(outfile) self.check_outputs(velem) # And then benchmarks. benchmark_script = os.path.join(script_root, 'meson_benchmark.py') benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat') cmd = [sys.executable, benchmark_script, benchmark_data] elem = NinjaBuildElement('benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY']) elem.add_item('COMMAND', cmd) elem.add_item('DESC', 'Running benchmark suite.') elem.add_item('pool', 'console') elem.write(outfile) self.check_outputs(elem) def generate_rules(self, outfile): outfile.write('# Rules for compiling.\n\n') self.generate_compile_rules(outfile) outfile.write('# Rules for linking.\n\n') if self.environment.is_cross_build(): self.generate_static_link_rules(True, outfile) self.generate_static_link_rules(False, outfile) self.generate_dynamic_link_rules(outfile) outfile.write('# Other rules\n\n') outfile.write('rule CUSTOM_COMMAND\n') outfile.write(' command = $COMMAND\n') outfile.write(' description = $DESC\n') outfile.write(' restat = 1\n\n') outfile.write('rule REGENERATE_BUILD\n') c = (quote_char + ninja_quote(sys.executable) + quote_char, quote_char + ninja_quote(self.environment.get_build_command()) + quote_char, quote_char + ninja_quote(self.environment.get_source_dir()) + quote_char, quote_char + ninja_quote(self.environment.get_build_dir()) + quote_char) outfile.write(" command = %s %s %s %s --backend ninja secret-handshake\n" % c) outfile.write(' description = Regenerating build files\n') outfile.write(' generator = 1\n\n') if len(self.build.pot) > 0: self.generate_gettext_rules(outfile) outfile.write('\n') def generate_gettext_rules(self, outfile): rule = 'rule GEN_POT\n' command = " command = xgettext --package-name=$PACKAGENAME -p $OUTDIR -f $FILELIST -D '%s' -k_ -o $OUTFILE\n" % \ self.environment.get_source_dir() desc = " description = Creating pot file for package $PACKAGENAME.\n" outfile.write(rule) outfile.write(command) outfile.write(desc) outfile.write('\n') rule = 'rule GEN_GMO\n' command = ' command = msgfmt $INFILE -o $OUTFILE\n' desc = ' description = Generating gmo file $OUTFILE\n' outfile.write(rule) outfile.write(command) outfile.write(desc) outfile.write('\n') def generate_phony(self, outfile): outfile.write('# Phony build target, always out of date\n') outfile.write('build PHONY: phony\n') outfile.write('\n') def generate_jar_target(self, target, outfile): fname = target.get_filename() subdir = target.get_subdir() outname_rel = os.path.join(self.get_target_dir(target), fname) src_list = target.get_sources() class_list = [] compiler = self.get_compiler_for_source(src_list[0]) assert(compiler.get_language() == 'java') c = 'c' m = '' e = '' f = 'f' main_class = target.get_main_class() if main_class != '': e = 'e' for src in src_list: plain_class_path = self.generate_single_java_compile(src, target, compiler, outfile) class_list.append(plain_class_path) class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list] jar_rule = 'java_LINKER' commands = [c+m+e+f] if e != '': commands.append(main_class) commands.append(self.get_target_filename(target)) for cls in class_list: commands += ['-C', self.get_target_private_dir(target), cls] elem = NinjaBuildElement(outname_rel, jar_rule, []) elem.add_dep(class_dep_list) elem.add_item('ARGS', commands) elem.write(outfile) self.check_outputs(elem) def generate_cs_resource_tasks(self, target, outfile): args = [] deps = [] for r in target.resources: rel_sourcefile = os.path.join(self.build_to_src, target.subdir, r) if r.endswith('.resources'): a = '-resource:' + rel_sourcefile elif r.endswith('.txt') or r.endswith('.resx'): ofilebase = os.path.splitext(os.path.basename(r))[0] + '.resources' ofilename = os.path.join(self.get_target_private_dir(target), ofilebase) elem = NinjaBuildElement(ofilename, "CUSTOM_COMMAND", rel_sourcefile) elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) elem.add_item('DESC', 'Compiling resource %s.' % rel_sourcefile) elem.write(outfile) self.check_outputs(elem) deps.append(ofilename) a = '-resource:' + ofilename else: raise InvalidArguments('Unknown resource file %s.' % r) args.append(a) return (args, deps) def generate_cs_target(self, target, outfile): buildtype = self.environment.coredata.get_builtin_option('buildtype') fname = target.get_filename() outname_rel = os.path.join(self.get_target_dir(target), fname) src_list = target.get_sources() compiler = self.get_compiler_for_source(src_list[0]) assert(compiler.get_language() == 'cs') rel_srcs = [s.rel_to_builddir(self.build_to_src) for s in src_list] deps = [] commands = target.extra_args.get('cs', []) commands += compiler.get_buildtype_args(buildtype) if isinstance(target, build.Executable): commands.append('-target:exe') elif isinstance(target, build.SharedLibrary): commands.append('-target:library') else: raise MesonException('Unknown C# target type.') (resource_args, resource_deps) = self.generate_cs_resource_tasks(target, outfile) commands += resource_args deps += resource_deps commands += compiler.get_output_args(outname_rel) for l in target.link_targets: lname = os.path.join(self.get_target_dir(l), l.get_filename()) commands += compiler.get_link_args(lname) deps.append(lname) if '-g' in commands: outputs = [outname_rel, outname_rel + '.mdb'] else: outputs = [outname_rel] elem = NinjaBuildElement(outputs, 'cs_COMPILER', rel_srcs) elem.add_dep(deps) elem.add_item('ARGS', commands) self.check_outputs(elem) elem.write(outfile) def generate_single_java_compile(self, src, target, compiler, outfile): args = [] args += compiler.get_buildtype_args(self.environment.coredata.get_builtin_option('buildtype')) args += compiler.get_output_args(self.get_target_private_dir(target)) for i in target.include_dirs: for idir in i.get_incdirs(): args += ['-sourcepath', os.path.join(self.build_to_src, i.curdir, idir)] rel_src = src.rel_to_builddir(self.build_to_src) plain_class_path = src.fname[:-4] + 'class' rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) element = NinjaBuildElement(rel_obj, compiler.get_language() + '_COMPILER', rel_src) element.add_item('ARGS', args) element.write(outfile) self.check_outputs(element) return plain_class_path def generate_java_link(self, outfile): rule = 'rule java_LINKER\n' command = ' command = jar $ARGS\n' description = ' description = Creating jar $out.\n' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write('\n') def split_vala_sources(self, sources): src = [] vapi_src = [] for s in sources: if s.endswith('.vapi'): vapi_src.append(s) else: src.append(s) return (src, vapi_src) def determine_dep_vapis(self, target): result = [] for dep in target.link_targets: for i in dep.sources: if hasattr(i, 'fname'): i = i.fname if i.endswith('vala'): vapiname = os.path.splitext(os.path.split(i)[1])[0] + '.vapi' fullname = os.path.join(self.get_target_private_dir(dep), vapiname) result.append(fullname) break return result def generate_vala_compile(self, target, outfile): """Vala is compiled into C. Set up all necessary build steps here.""" valac = self.environment.coredata.compilers['vala'] (src, vapi_src) = self.split_vala_sources(target.get_sources()) vapi_src = [x.rel_to_builddir(self.build_to_src) for x in vapi_src] extra_dep_files = [] vala_input_files = [] for s in src: if s.endswith('.vala'): vala_input_files.append(s.rel_to_builddir(self.build_to_src)) namebase = os.path.splitext(os.path.split(vala_input_files[0])[1])[0] hname = namebase + '.h' vapiname = namebase + '.vapi' outputs = [vapiname] args = ['-d', self.get_target_private_dir(target)] args += ['-C']#, '-o', cname] if not isinstance(target, build.Executable): outputs.append(hname) args += ['-H', hname] args += ['--vapi=' + vapiname] for src in vala_input_files: namebase = os.path.splitext(os.path.split(src)[1])[0] + '.c' outputs.append(namebase) if self.environment.coredata.get_builtin_option('werror'): args += valac.get_werror_args() for d in target.external_deps: if isinstance(d, dependencies.PkgConfigDependency): if d.name == 'glib-2.0' and d.version_requirement is not None \ and d.version_requirement.startswith(('>=', '==')): args += ['--target-glib', d.version_requirement[2:]] args += ['--pkg', d.name] extra_args = [] for a in target.extra_args.get('vala', []): if isinstance(a, File): relname = a.rel_to_builddir(self.build_to_src) extra_dep_files.append(relname) extra_args.append(relname) else: extra_args.append(a) dependency_vapis = self.determine_dep_vapis(target) extra_dep_files += dependency_vapis args += extra_args args += dependency_vapis outputs = [os.path.join(self.get_target_private_dir(target), x) for x in outputs] element = NinjaBuildElement(outputs, valac.get_language() + '_COMPILER', vala_input_files + vapi_src) element.add_item('ARGS', args) element.add_dep(extra_dep_files) element.write(outfile) self.check_outputs(element) return outputs def generate_rust_target(self, target, outfile): rustc = self.environment.coredata.compilers['rust'] relsrc = [] for i in target.get_sources(): if not rustc.can_compile(i): raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename()) relsrc.append(i.rel_to_builddir(self.build_to_src)) target_name = os.path.join(target.subdir, target.get_filename()) args = ['--crate-type'] if isinstance(target, build.Executable): cratetype = 'bin' elif isinstance(target, build.SharedLibrary): cratetype = 'rlib' elif isinstance(target, build.StaticLibrary): cratetype = 'rlib' else: raise InvalidArguments('Unknown target type for rustc.') args.append(cratetype) args += rustc.get_buildtype_args(self.environment.coredata.get_builtin_option('buildtype')) depfile = target.name + '.d' args += ['--out-dir', target.subdir] args += ['--emit', 'dep-info', '--emit', 'link'] orderdeps = [os.path.join(t.subdir, t.get_filename()) for t in target.link_targets] linkdirs = {} for d in target.link_targets: linkdirs[d.subdir] = True for d in linkdirs.keys(): if d == '': d = '.' args += ['-L', d] element = NinjaBuildElement(target_name, 'rust_COMPILER', relsrc) if len(orderdeps) > 0: element.add_orderdep(orderdeps) element.add_item('ARGS', args) element.add_item('targetdep', depfile) element.add_item('cratetype', cratetype) element.write(outfile) self.check_outputs(element) def swift_module_file_name(self, target): return os.path.join(self.get_target_private_dir(target), self.target_swift_modulename(target) + '.swiftmodule') def target_swift_modulename(self, target): return target.name def is_swift_target(self, target): for s in target.sources: if s.endswith('swift'): return True return False def determine_swift_dep_modules(self, target): result = [] for l in target.link_targets: if self.is_swift_target(l): result.append(self.swift_module_file_name(l)) return result def determine_swift_dep_dirs(self, target): result = [] for l in target.link_targets: result.append(self.get_target_private_dir_abs(l)) return result def get_swift_link_deps(self, target): result = [] for l in target.link_targets: result.append(self.get_target_filename(l)) return result def split_swift_generated_sources(self, target): all_srcs = [] for genlist in target.get_generated_sources(): if isinstance(genlist, build.CustomTarget): for ifile in genlist.get_filename(): rel = os.path.join(self.get_target_dir(genlist), ifile) all_srcs.append(rel) else: for ifile in genlist.get_outfilelist(): rel = os.path.join(self.get_target_private_dir(target), ifile) all_srcs.append(rel) srcs = [] others = [] for i in all_srcs: if i.endswith('.swift'): srcs.append(i) else: others.append(i) return (srcs, others) def generate_swift_target(self, target, outfile): module_name = self.target_swift_modulename(target) swiftc = self.environment.coredata.compilers['swift'] abssrc = [] abs_headers = [] header_imports = [] for i in target.get_sources(): if swiftc.can_compile(i): relsrc = i.rel_to_builddir(self.build_to_src) abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), relsrc)) abssrc.append(abss) elif self.environment.is_header(i): relh = i.rel_to_builddir(self.build_to_src) absh = os.path.normpath(os.path.join(self.environment.get_build_dir(), relh)) abs_headers.append(absh) header_imports += swiftc.get_header_import_args(absh) else: raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename()) os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) compile_args = swiftc.get_compile_only_args() compile_args += swiftc.get_module_args(module_name) link_args = swiftc.get_output_args(os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))) rundir = self.get_target_private_dir(target) out_module_name = self.swift_module_file_name(target) in_module_files = self.determine_swift_dep_modules(target) abs_module_dirs = self.determine_swift_dep_dirs(target) module_includes = [] for x in abs_module_dirs: module_includes += swiftc.get_include_args(x) link_deps = self.get_swift_link_deps(target) abs_link_deps = [os.path.join(self.environment.get_build_dir(), x) for x in link_deps] (rel_generated, _) = self.split_swift_generated_sources(target) abs_generated = [os.path.join(self.environment.get_build_dir(), x) for x in rel_generated] # We need absolute paths because swiftc needs to be invoked in a subdir # and this is the easiest way about it. objects = [] # Relative to swift invocation dir rel_objects = [] # Relative to build.ninja for i in abssrc + abs_generated: base = os.path.split(i)[1] oname = os.path.splitext(base)[0] + '.o' objects.append(oname) rel_objects.append(os.path.join(self.get_target_private_dir(target), oname)) # Swiftc does not seem to be able to emit objects and module files in one go. elem = NinjaBuildElement(rel_objects, 'swift_COMPILER', abssrc) elem.add_dep(in_module_files + rel_generated) elem.add_dep(abs_headers) elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes) elem.add_item('RUNDIR', rundir) elem.write(outfile) self.check_outputs(elem) elem = NinjaBuildElement(out_module_name, 'swift_COMPILER', abssrc) elem.add_dep(in_module_files + rel_generated) elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args()) elem.add_item('RUNDIR', rundir) elem.write(outfile) self.check_outputs(elem) if isinstance(target, build.StaticLibrary): elem = self.generate_link(target, outfile, self.get_target_filename(target), rel_objects, self.build.static_linker) elem.write(outfile) elif isinstance(target, build.Executable): elem = NinjaBuildElement(self.get_target_filename(target), 'swift_COMPILER', []) elem.add_dep(rel_objects) elem.add_dep(link_deps) elem.add_item('ARGS', link_args + swiftc.get_std_exe_link_args() + objects + abs_link_deps) elem.add_item('RUNDIR', rundir) elem.write(outfile) self.check_outputs(elem) else: raise MesonException('Swift supports only executable and static library targets.') def generate_static_link_rules(self, is_cross, outfile): if self.build.has_language('java'): if not is_cross: self.generate_java_link(outfile) if is_cross: if self.environment.cross_info.need_cross_compiler(): static_linker = self.build.static_cross_linker else: static_linker = self.build.static_linker crstr = '_CROSS' else: static_linker = self.build.static_linker crstr = '' if static_linker is None: return rule = 'rule STATIC%s_LINKER\n' % crstr if mesonlib.is_windows(): command_templ = ''' command = %s @$out.rsp rspfile = $out.rsp rspfile_content = $LINK_ARGS %s $in ''' else: command_templ = ' command = %s $LINK_ARGS %s $in\n' command = command_templ %\ (' '.join(static_linker.get_exelist()), ' '.join(static_linker.get_output_args('$out'))) description = ' description = Static linking library $out\n\n' outfile.write(rule) outfile.write(command) outfile.write(description) def generate_dynamic_link_rules(self, outfile): ctypes = [(self.build.compilers, False)] if self.environment.is_cross_build(): if self.environment.cross_info.need_cross_compiler(): ctypes.append((self.build.cross_compilers, True)) else: # Native compiler masquerades as the cross compiler. ctypes.append((self.build.compilers, True)) else: ctypes.append((self.build.cross_compilers, True)) for (complist, is_cross) in ctypes: for compiler in complist: langname = compiler.get_language() if langname == 'java' or langname == 'vala' or\ langname == 'rust' or langname == 'cs': continue crstr = '' cross_args = [] if is_cross: crstr = '_CROSS' try: cross_args = self.environment.cross_info.config['properties'][langname + '_link_args'] except KeyError: pass rule = 'rule %s%s_LINKER\n' % (langname, crstr) if mesonlib.is_windows(): command_template = ''' command = %s @$out.rsp rspfile = $out.rsp rspfile_content = %s $ARGS %s $in $LINK_ARGS $aliasing ''' else: command_template = ' command = %s %s $ARGS %s $in $LINK_ARGS $aliasing\n' command = command_template % \ (' '.join(compiler.get_linker_exelist()),\ ' '.join(cross_args),\ ' '.join(compiler.get_linker_output_args('$out'))) description = ' description = Linking target $out' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write('\n') scriptdir = self.environment.get_script_dir() outfile.write('\n') symrule = 'rule SHSYM\n' symcmd = ' command = "%s" "%s" %s %s $CROSS\n' % (ninja_quote(sys.executable), ninja_quote(os.path.join(scriptdir, 'symbolextractor.py')), '$in', '$out') synstat = ' restat = 1\n' syndesc = ' description = Generating symbol file $out.\n' outfile.write(symrule) outfile.write(symcmd) outfile.write(synstat) outfile.write(syndesc) outfile.write('\n') def generate_java_compile_rule(self, compiler, outfile): rule = 'rule %s_COMPILER\n' % compiler.get_language() invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) command = ' command = %s $ARGS $in\n' % invoc description = ' description = Compiling Java object $in.\n' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write('\n') def generate_cs_compile_rule(self, compiler, outfile): rule = 'rule %s_COMPILER\n' % compiler.get_language() invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) command = ' command = %s $ARGS $in\n' % invoc description = ' description = Compiling cs target $out.\n' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write('\n') def generate_vala_compile_rules(self, compiler, outfile): rule = 'rule %s_COMPILER\n' % compiler.get_language() invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) command = ' command = %s $ARGS $in\n' % invoc description = ' description = Compiling Vala source $in.\n' restat = ' restat = 1\n' # ValaC does this always to take advantage of it. outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write(restat) outfile.write('\n') def generate_rust_compile_rules(self, compiler, outfile): rule = 'rule %s_COMPILER\n' % compiler.get_language() invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) command = ' command = %s $ARGS $in\n' % invoc description = ' description = Compiling Rust source $in.\n' depfile = ' depfile = $targetdep\n' depstyle = ' deps = gcc\n' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write(depfile) outfile.write(depstyle) outfile.write('\n') def generate_swift_compile_rules(self, compiler, outfile): rule = 'rule %s_COMPILER\n' % compiler.get_language() full_exe = [sys.executable, os.path.join(self.environment.get_script_dir(), 'dirchanger.py'), '$RUNDIR'] + compiler.get_exelist() invoc = ' '.join([ninja_quote(i) for i in full_exe]) command = ' command = %s $ARGS $in\n' % invoc description = ' description = Compiling Swift source $in.\n' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write('\n') def generate_fortran_dep_hack(self, outfile): if mesonlib.is_windows(): cmd = 'cmd /C ""' else: cmd = 'true' template = '''# Workaround for these issues: # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485 rule FORTRAN_DEP_HACK command = %s description = Dep hack restat = 1 ''' outfile.write(template % cmd) def generate_compile_rule_for(self, langname, compiler, qstr, is_cross, outfile): if langname == 'java': if not is_cross: self.generate_java_compile_rule(compiler, outfile) return if langname == 'cs': if not is_cross: self.generate_cs_compile_rule(compiler, outfile) return if langname == 'vala': if not is_cross: self.generate_vala_compile_rules(compiler, outfile) return if langname == 'rust': if not is_cross: self.generate_rust_compile_rules(compiler, outfile) return if langname == 'swift': if not is_cross: self.generate_swift_compile_rules(compiler, outfile) return if langname == 'fortran': self.generate_fortran_dep_hack(outfile) if is_cross: crstr = '_CROSS' else: crstr = '' rule = 'rule %s%s_COMPILER\n' % (langname, crstr) depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') quoted_depargs = [] for d in depargs: if d != '$out' and d != '$in': d = qstr % d quoted_depargs.append(d) cross_args = [] if is_cross: try: cross_args = self.environment.cross_info.config['properties'][langname + '_args'] except KeyError: pass if mesonlib.is_windows(): command_template = ''' command = %s @$out.rsp rspfile = $out.rsp rspfile_content = %s $ARGS %s %s %s $in ''' else: command_template = ' command = %s %s $ARGS %s %s %s $in\n' command = command_template % \ (' '.join(compiler.get_exelist()),\ ' '.join(cross_args), ' '.join(quoted_depargs),\ ' '.join(compiler.get_output_args('$out')),\ ' '.join(compiler.get_compile_only_args())) description = ' description = Compiling %s object $out\n' % langname if compiler.get_id() == 'msvc': deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' deps += ' depfile = $DEPFILE\n' outfile.write(rule) outfile.write(command) outfile.write(deps) outfile.write(description) outfile.write('\n') def generate_pch_rule_for(self, langname, compiler, qstr, is_cross, outfile): if langname != 'c' and langname != 'cpp': return if is_cross: crstr = '_CROSS' else: crstr = '' rule = 'rule %s%s_PCH\n' % (langname, crstr) depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') cross_args = [] if is_cross: try: cross_args = self.environment.cross_info.config['properties'][langname + '_args'] except KeyError: pass quoted_depargs = [] for d in depargs: if d != '$out' and d != '$in': d = qstr % d quoted_depargs.append(d) if compiler.get_id() == 'msvc': output = '' else: output = ' '.join(compiler.get_output_args('$out')) command = " command = %s %s $ARGS %s %s %s $in\n" % \ (' '.join(compiler.get_exelist()),\ ' '.join(cross_args),\ ' '.join(quoted_depargs),\ output,\ ' '.join(compiler.get_compile_only_args())) description = ' description = Precompiling header %s\n' % '$in' if compiler.get_id() == 'msvc': deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' deps += ' depfile = $DEPFILE\n' outfile.write(rule) outfile.write(command) outfile.write(deps) outfile.write(description) outfile.write('\n') def generate_compile_rules(self, outfile): qstr = quote_char + "%s" + quote_char for compiler in self.build.compilers: langname = compiler.get_language() self.generate_compile_rule_for(langname, compiler, qstr, False, outfile) self.generate_pch_rule_for(langname, compiler, qstr, False, outfile) if self.environment.is_cross_build(): # In case we are going a target-only build, make the native compilers # masquerade as cross compilers. if self.environment.cross_info.need_cross_compiler(): cclist = self.build.cross_compilers else: cclist = self.build.compilers for compiler in cclist: langname = compiler.get_language() self.generate_compile_rule_for(langname, compiler, qstr, True, outfile) self.generate_pch_rule_for(langname, compiler, qstr, True, outfile) outfile.write('\n') def replace_outputs(self, args, private_dir, output_list): newargs = [] regex = re.compile('@OUTPUT(\d+)@') for arg in args: m = regex.search(arg) while m is not None: index = int(m.group(1)) src = '@OUTPUT%d@' % index arg = arg.replace(src, os.path.join(private_dir, output_list[index])) m = regex.search(arg) newargs.append(arg) return newargs def generate_custom_generator_rules(self, target, outfile): for genlist in target.get_generated_sources(): if isinstance(genlist, build.CustomTarget): continue # Customtarget has already written its output rules generator = genlist.get_generator() exe = generator.get_exe() exe_arr = self.exe_object_to_cmd_array(exe) infilelist = genlist.get_infilelist() outfilelist = genlist.get_outfilelist() base_args = generator.get_arglist() extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends] for i in range(len(infilelist)): if len(generator.outputs) == 1: sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) else: sole_output = '' curfile = infilelist[i] infilename = os.path.join(self.build_to_src, curfile) outfiles = genlist.get_outputs_for(curfile) outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles] args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)\ for x in base_args] args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist) # We have consumed output files, so drop them from the list of remaining outputs. if sole_output == '': outfilelist = outfilelist[len(generator.outputs):] relout = self.get_target_private_dir(target) args = [x.replace("@SOURCE_DIR@", self.build_to_src).replace("@BUILD_DIR@", relout) for x in args] final_args = [] for a in args: if a == '@EXTRA_ARGS@': final_args += genlist.get_extra_args() else: final_args.append(a) cmdlist = exe_arr + final_args elem = NinjaBuildElement(outfiles, 'CUSTOM_COMMAND', infilename) if len(extra_dependencies) > 0: elem.add_dep(extra_dependencies) elem.add_item('DESC', 'Generating $out') if isinstance(exe, build.BuildTarget): elem.add_dep(self.get_target_filename(exe)) elem.add_item('COMMAND', cmdlist) elem.write(outfile) self.check_outputs(elem) def scan_fortran_module_outputs(self, target): compiler = None for c in self.build.compilers: if c.get_language() == 'fortran': compiler = c break if compiler is None: self.fortran_deps[target.get_basename()] = {} return modre = re.compile(r"\s*module\s+(\w+)", re.IGNORECASE) module_files = {} for s in target.get_sources(): # FIXME, does not work for generated Fortran sources, # but those are really rare. I hope. if not compiler.can_compile(s): continue for line in open(os.path.join(self.environment.get_source_dir(), s.subdir, s.fname)): modmatch = modre.match(line) if modmatch is not None: modname = modmatch.group(1) if modname.lower() == 'procedure': # MODULE PROCEDURE construct continue if modname in module_files: raise InvalidArguments('Namespace collision: module %s defined in two files %s and %s.' % (modname, module_files[modname], s)) module_files[modname] = s self.fortran_deps[target.get_basename()] = module_files def get_fortran_deps(self, compiler, src, target): mod_files = [] usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE) dirname = self.get_target_private_dir(target) tdeps= self.fortran_deps[target.get_basename()] for line in open(src): usematch = usere.match(line) if usematch is not None: usename = usematch.group(1) if usename not in tdeps: # The module is not provided by any source file. This is due to # a) missing file/typo/etc # b) using a module provided by the compiler, such as OpenMP # There's no easy way to tell which is which (that I know of) # so just ignore this and go on. Ideally we would print a # warning message to the user but this is a common occurrance, # which would lead to lots of distracting noise. continue mod_source_file = tdeps[usename] # Check if a source uses a module it exports itself. # Potential bug if multiple targets have a file with # the same name. if mod_source_file.fname == os.path.split(src)[1]: continue mod_name = compiler.module_name_to_filename(usematch.group(1)) mod_files.append(os.path.join(dirname, mod_name)) return mod_files def generate_single_compile(self, target, outfile, src, is_generated=False, header_deps=[], order_deps=[]): if(isinstance(src, str) and src.endswith('.h')): raise RuntimeError('Fug') if isinstance(src, RawFilename) and src.fname.endswith('.h'): raise RuntimeError('Fug') extra_orderdeps = [] compiler = self.get_compiler_for_source(src) commands = self.generate_basic_compiler_args(target, compiler) commands += compiler.get_include_args(self.get_target_private_dir(target), False) curdir = target.get_subdir() tmppath = os.path.normpath(os.path.join(self.build_to_src, curdir)) commands += compiler.get_include_args(tmppath, False) if curdir == '': curdir = '.' commands += compiler.get_include_args(curdir, False) for d in target.external_deps: if d.need_threads(): commands += compiler.thread_flags() break if isinstance(src, RawFilename): rel_src = src.fname elif is_generated: if self.has_dir_part(src): rel_src = src else: rel_src = os.path.join(self.get_target_private_dir(target), src) abs_src = os.path.join(self.environment.get_source_dir(), rel_src) else: if isinstance(src, File): rel_src = src.rel_to_builddir(self.build_to_src) else: raise build.InvalidArguments('Invalid source type.') abs_src = os.path.join(self.environment.get_build_dir(), rel_src) if isinstance(src, RawFilename): src_filename = src.fname elif isinstance(src, File): src_filename = src.fname elif os.path.isabs(src): src_filename = os.path.basename(src) else: src_filename = src obj_basename = src_filename.replace('/', '_').replace('\\', '_') rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) rel_obj += '.' + self.environment.get_object_suffix() dep_file = compiler.depfile_for_object(rel_obj) if self.environment.coredata.get_builtin_option('use_pch'): pchlist = target.get_pch(compiler.language) else: pchlist = [] if len(pchlist) == 0: pch_dep = [] else: arr = [] i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0])) arr.append(i) pch_dep = arr for i in target.get_include_dirs(): basedir = i.get_curdir() for d in i.get_incdirs(): expdir = os.path.join(basedir, d) srctreedir = os.path.join(self.build_to_src, expdir) bargs = compiler.get_include_args(expdir, i.is_system) sargs = compiler.get_include_args(srctreedir, i.is_system) commands += bargs commands += sargs for d in i.get_extra_build_dirs(): commands += compiler.get_include_args(d, i.is_system) custom_target_include_dirs = [] for i in target.generated: if isinstance(i, build.CustomTarget): idir = self.get_target_dir(i) if idir not in custom_target_include_dirs: custom_target_include_dirs.append(idir) for i in custom_target_include_dirs: commands+= compiler.get_include_args(i, False) if self.environment.coredata.get_builtin_option('use_pch'): commands += self.get_pch_include_args(compiler, target) crstr = '' if target.is_cross: crstr = '_CROSS' compiler_name = '%s%s_COMPILER' % (compiler.get_language(), crstr) extra_deps = [] if compiler.get_language() == 'fortran': extra_deps += self.get_fortran_deps(compiler, abs_src, target) # Dependency hack. Remove once multiple outputs in Ninja is fixed: # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 for modname, srcfile in self.fortran_deps[target.get_basename()].items(): modfile = os.path.join(self.get_target_private_dir(target), compiler.module_name_to_filename(modname)) if srcfile == src: depelem = NinjaBuildElement(modfile, 'FORTRAN_DEP_HACK', rel_obj) depelem.write(outfile) self.check_outputs(depelem) commands += compiler.get_module_outdir_args(self.get_target_private_dir(target)) element = NinjaBuildElement(rel_obj, compiler_name, rel_src) for d in header_deps: if isinstance(d, RawFilename): d = d.fname elif not self.has_dir_part(d): d = os.path.join(self.get_target_private_dir(target), d) element.add_dep(d) for d in extra_deps: element.add_dep(d) for d in order_deps: if isinstance(d, RawFilename): d = d.fname elif not self.has_dir_part(d): d = os.path.join(self.get_target_private_dir(target), d) element.add_orderdep(d) element.add_orderdep(pch_dep) element.add_orderdep(extra_orderdeps) for i in self.get_fortran_orderdeps(target, compiler): element.add_orderdep(i) element.add_item('DEPFILE', dep_file) element.add_item('ARGS', commands) element.write(outfile) self.check_outputs(element) return rel_obj def has_dir_part(self, fname): return '/' in fname or '\\' in fname # Fortran is a bit weird (again). When you link against a library, just compiling a source file # requires the mod files that are output when single files are built. To do this right we would need to # scan all inputs and write out explicit deps for each file. That is stoo slow and too much effort so # instead just have an ordered dependendy on the library. This ensures all required mod files are created. # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that # produce incorrect dep files but such is life. def get_fortran_orderdeps(self, target, compiler): if compiler.language != 'fortran': return [] return [os.path.join(self.get_target_dir(lt), lt.get_filename()) for lt in target.link_targets] def generate_msvc_pch_command(self, target, compiler, pch): if len(pch) != 2: raise RuntimeError('MSVC requires one header and one source to produce precompiled headers.') header = pch[0] source = pch[1] pchname = compiler.get_pch_name(header) dst = os.path.join(self.get_target_private_dir(target), pchname) commands = [] commands += self.generate_basic_compiler_args(target, compiler) just_name = os.path.split(header)[1] (objname, pch_args) = compiler.gen_pch_args(just_name, source, dst) commands += pch_args dep = dst + '.' + compiler.get_depfile_suffix() return (commands, dep, dst, [objname]) def generate_gcc_pch_command(self, target, compiler, pch): commands = [] commands += self.generate_basic_compiler_args(target, compiler) dst = os.path.join(self.get_target_private_dir(target), os.path.split(pch)[-1] + '.' + compiler.get_pch_suffix()) dep = dst + '.' + compiler.get_depfile_suffix() return (commands, dep, dst, []) # Gcc does not create an object file during pch generation. def generate_pch(self, target, outfile): cstr = '' pch_objects = [] if target.is_cross: cstr = '_CROSS' for lang in ['c', 'cpp']: pch = target.get_pch(lang) if len(pch) == 0: continue if '/' not in pch[0] or '/' not in pch[-1]: raise build.InvalidArguments('Precompiled header of "%s" must not be in the same directory as source, please put it in a subdirectory.' % target.get_basename()) compiler = self.get_compiler_for_lang(lang) if compiler.id == 'msvc': src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1]) (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch) extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) else: src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0]) extradep = None pch_objects += objs rulename = compiler.get_language() + cstr + '_PCH' elem = NinjaBuildElement(dst, rulename, src) if extradep is not None: elem.add_dep(extradep) elem.add_item('ARGS', commands) elem.add_item('DEPFILE', dep) elem.write(outfile) self.check_outputs(elem) return pch_objects def generate_shsym(self, outfile, target): target_name = self.get_target_filename(target) targetdir = self.get_target_private_dir(target) symname = os.path.join(targetdir, target_name + '.symbols') elem = NinjaBuildElement(symname, 'SHSYM', target_name) if self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler(): elem.add_item('CROSS', '--cross-host=' + self.environment.cross_info.config['host_machine']['system']) elem.write(outfile) self.check_outputs(elem) def generate_link(self, target, outfile, outname, obj_list, linker, extra_args=[]): if isinstance(target, build.StaticLibrary): linker_base = 'STATIC' else: linker_base = linker.get_language() # Fixme. if isinstance(target, build.SharedLibrary): self.generate_shsym(outfile, target) crstr = '' if target.is_cross: crstr = '_CROSS' linker_rule = linker_base + crstr + '_LINKER' abspath = os.path.join(self.environment.get_build_dir(), target.subdir) commands = [] commands += linker.get_linker_always_args() commands += linker.get_buildtype_linker_args(self.environment.coredata.get_builtin_option('buildtype')) commands += linker.get_option_link_args(self.environment.coredata.compiler_options) if not(isinstance(target, build.StaticLibrary)): commands += self.environment.coredata.external_link_args[linker.get_language()] if isinstance(target, build.Executable): commands += linker.get_std_exe_link_args() elif isinstance(target, build.SharedLibrary): commands += linker.get_std_shared_lib_link_args() commands += linker.get_pic_args() if hasattr(target, 'soversion'): soversion = target.soversion else: soversion = None commands += linker.get_soname_args(target.name, abspath, soversion) elif isinstance(target, build.StaticLibrary): commands += linker.get_std_link_args() else: raise RuntimeError('Unknown build target type.') # Link arguments of static libraries are not put in the command line of # the library. They are instead appended to the command line where # the static library is used. if linker_base == 'STATIC': dependencies = [] else: dependencies = target.get_dependencies() commands += self.build_target_link_arguments(linker, dependencies) for d in target.external_deps: if d.need_threads(): commands += linker.thread_link_flags() if not isinstance(target, build.StaticLibrary): commands += target.link_args # External deps must be last because target link libraries may depend on them. if not(isinstance(target, build.StaticLibrary)): for dep in target.get_external_deps(): commands += dep.get_link_args() for d in target.get_dependencies(): if isinstance(d, build.StaticLibrary): for dep in d.get_external_deps(): commands += dep.get_link_args() commands += linker.build_rpath_args(self.environment.get_build_dir(),\ self.determine_rpath_dirs(target), target.install_rpath) if self.environment.coredata.get_builtin_option('coverage'): commands += linker.get_coverage_link_args() custom_target_libraries = self.get_custom_target_provided_libraries(target) commands += extra_args commands += custom_target_libraries commands = linker.unixtype_flags_to_native(commands) dep_targets = [self.get_dependency_filename(t) for t in dependencies] dep_targets += [os.path.join(self.environment.source_dir, target.subdir, t) for t in target.link_depends] elem = NinjaBuildElement(outname, linker_rule, obj_list) elem.add_dep(dep_targets + custom_target_libraries) elem.add_item('LINK_ARGS', commands) self.check_outputs(elem) return elem def get_custom_target_provided_libraries(self, target): libs = [] for t in target.get_generated_sources(): if not isinstance(t, build.CustomTarget): continue for f in t.output: if self.environment.is_library(f): libs.append(os.path.join(self.get_target_dir(t), f)) return libs def determine_rpath_dirs(self, target): link_deps = target.get_all_link_deps() result = [] for ld in link_deps: prospective = self.get_target_dir(ld) if not prospective in result: result.append(prospective) return result def get_dependency_filename(self, t): if isinstance(t, build.SharedLibrary): return os.path.join(self.get_target_private_dir(t), self.get_target_filename(t) + '.symbols') return self.get_target_filename(t) def generate_shlib_aliases(self, target, outdir): basename = target.get_filename() aliases = target.get_aliaslist() if not mesonlib.is_windows(): for alias in aliases: aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias) try: os.remove(aliasfile) except Exception: pass os.symlink(basename, aliasfile) else: mlog.debug("Library versioning disabled because host does not support symlinks.") def generate_gcov_clean(self, outfile): gcno_elem = NinjaBuildElement('clean-gcno', 'CUSTOM_COMMAND', 'PHONY') script_root = self.environment.get_script_dir() clean_script = os.path.join(script_root, 'delwithsuffix.py') gcno_elem.add_item('COMMAND', [sys.executable, clean_script, '.', 'gcno']) gcno_elem.add_item('description', 'Deleting gcno files') gcno_elem.write(outfile) self.check_outputs(gcno_elem) gcda_elem = NinjaBuildElement('clean-gcda', 'CUSTOM_COMMAND', 'PHONY') script_root = self.environment.get_script_dir() clean_script = os.path.join(script_root, 'delwithsuffix.py') gcda_elem.add_item('COMMAND', [sys.executable, clean_script, '.', 'gcda']) gcda_elem.add_item('description', 'Deleting gcda files') gcda_elem.write(outfile) self.check_outputs(gcda_elem) def is_compilable_file(self, filename): if filename.endswith('.cpp') or\ filename.endswith('.c') or\ filename.endswith('.cxx') or\ filename.endswith('.cc') or\ filename.endswith('.C'): return True return False def process_dep_gens(self, outfile, target): src_deps = [] other_deps = [] for rule in self.dep_rules.values(): srcs = target.get_original_kwargs().get(rule.src_keyword, []) if isinstance(srcs, str): srcs = [srcs] for src in srcs: plainname = os.path.split(src)[1] basename = plainname.split('.')[0] outname = rule.name_templ.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) outfilename = os.path.join(self.get_target_private_dir(target), outname) infilename = os.path.join(self.build_to_src, target.get_source_subdir(), src) elem = NinjaBuildElement(outfilename, rule.name, infilename) elem.write(outfile) self.check_outputs(elem) if self.is_compilable_file(outfilename): src_deps.append(outfilename) else: other_deps.append(outfilename) return (src_deps, other_deps) def generate_ending(self, outfile): targetlist = [self.get_target_filename(t) for t in self.build.get_targets().values()\ if not isinstance(t, build.RunTarget)] elem = NinjaBuildElement('all', 'phony', targetlist) elem.write(outfile) self.check_outputs(elem) default = 'default all\n\n' outfile.write(default) ninja_command = environment.detect_ninja() if ninja_command is None: raise MesonException('Could not detect ninja command') elem = NinjaBuildElement('clean', 'CUSTOM_COMMAND', 'PHONY') elem.add_item('COMMAND', [ninja_command, '-t', 'clean']) elem.add_item('description', 'Cleaning') if self.environment.coredata.get_builtin_option('coverage'): self.generate_gcov_clean(outfile) elem.add_dep('clean-gcda') elem.add_dep('clean-gcno') elem.write(outfile) self.check_outputs(elem) deps = self.get_regen_filelist() elem = NinjaBuildElement('build.ninja', 'REGENERATE_BUILD', deps) elem.add_item('pool', 'console') elem.write(outfile) elem = NinjaBuildElement(deps, 'phony', '') elem.write(outfile) self.check_outputs(elem)
[((27, 3, 27, 24), 'mesonlib.is_windows', 'mesonlib.is_windows', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((159, 14, 159, 84), 'coredata.MesonException', 'MesonException', ({(159, 29, 159, 83): '"""Could not determine vs dep dependency prefix string."""'}, {}), "('Could not determine vs dep dependency prefix string.')", False, 'from coredata import MesonException\n'), ((190, 8, 190, 45), 'os.replace', 'os.replace', ({(190, 19, 190, 31): 'tempfilename', (190, 33, 190, 44): 'outfilename'}, {}), '(tempfilename, outfilename)', False, 'import os, sys, pickle, re\n'), ((195, 20, 195, 46), 'environment.detect_ninja', 'environment.detect_ninja', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((197, 17, 197, 113), 'subprocess.check_output', 'subprocess.check_output', (), '', False, 'import subprocess, shutil\n'), ((426, 45, 426, 78), 'environment.find_coverage_tools', 'environment.find_coverage_tools', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((461, 25, 461, 70), 'os.path.join', 'os.path.join', ({(461, 38, 461, 49): 'script_root', (461, 51, 461, 69): '"""meson_install.py"""'}, {}), "(script_root, 'meson_install.py')", False, 'import os, sys, pickle, re\n'), ((463, 19, 463, 59), 'os.path.join', 'os.path.join', ({(463, 32, 463, 43): 'script_root', (463, 45, 463, 58): '"""depfixer.py"""'}, {}), "(script_root, 'depfixer.py')", False, 'import os, sys, pickle, re\n'), ((484, 8, 484, 29), 'pickle.dump', 'pickle.dump', ({(484, 20, 484, 21): 'd', (484, 23, 484, 28): 'ofile'}, {}), '(d, ofile)', False, 'import os, sys, pickle, re\n'), ((586, 19, 586, 46), 'environment.find_valgrind', 'environment.find_valgrind', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((588, 22, 588, 64), 'os.path.join', 'os.path.join', ({(588, 35, 588, 46): 'script_root', (588, 48, 588, 63): '"""meson_test.py"""'}, {}), "(script_root, 'meson_test.py')", False, 'import os, sys, pickle, re\n'), ((608, 27, 608, 74), 'os.path.join', 'os.path.join', ({(608, 40, 608, 51): 'script_root', (608, 53, 608, 73): '"""meson_benchmark.py"""'}, {}), "(script_root, 'meson_benchmark.py')", False, 'import os, sys, pickle, re\n'), ((1040, 11, 1040, 32), 'mesonlib.is_windows', 'mesonlib.is_windows', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((1171, 11, 1171, 32), 'mesonlib.is_windows', 'mesonlib.is_windows', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((1226, 11, 1226, 32), 'mesonlib.is_windows', 'mesonlib.is_windows', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((1315, 16, 1315, 43), 're.compile', 're.compile', ({(1315, 27, 1315, 42): '"""@OUTPUT(\\\\d+)@"""'}, {}), "('@OUTPUT(\\\\d+)@')", False, 'import os, sys, pickle, re\n'), ((1381, 16, 1381, 63), 're.compile', 're.compile', ({(1381, 27, 1381, 47): '"""\\\\s*module\\\\s+(\\\\w+)"""', (1381, 49, 1381, 62): 're.IGNORECASE'}, {}), "('\\\\s*module\\\\s+(\\\\w+)', re.IGNORECASE)", False, 'import os, sys, pickle, re\n'), ((1402, 16, 1402, 60), 're.compile', 're.compile', ({(1402, 27, 1402, 44): '"""\\\\s*use\\\\s+(\\\\w+)"""', (1402, 46, 1402, 59): 're.IGNORECASE'}, {}), "('\\\\s*use\\\\s+(\\\\w+)', re.IGNORECASE)", False, 'import os, sys, pickle, re\n'), ((1620, 18, 1620, 67), 'os.path.join', 'os.path.join', ({(1620, 31, 1620, 40): 'targetdir', (1620, 42, 1620, 66): "target_name + '.symbols'"}, {}), "(targetdir, target_name + '.symbols')", False, 'import os, sys, pickle, re\n'), ((1738, 27, 1738, 72), 'os.path.join', 'os.path.join', ({(1738, 40, 1738, 51): 'script_root', (1738, 53, 1738, 71): '"""delwithsuffix.py"""'}, {}), "(script_root, 'delwithsuffix.py')", False, 'import os, sys, pickle, re\n'), ((1746, 27, 1746, 72), 'os.path.join', 'os.path.join', ({(1746, 40, 1746, 51): 'script_root', (1746, 53, 1746, 71): '"""delwithsuffix.py"""'}, {}), "(script_root, 'delwithsuffix.py')", False, 'import os, sys, pickle, re\n'), ((1794, 24, 1794, 50), 'environment.detect_ninja', 'environment.detect_ninja', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((137, 11, 137, 29), 'shutil.which', 'shutil.which', ({(137, 24, 137, 28): '"""cl"""'}, {}), "('cl')", False, 'import subprocess, shutil\n'), ((408, 25, 408, 57), 'os.path.join', 'os.path.join', ({(408, 38, 408, 44): 'subdir', (408, 46, 408, 56): '"""POTFILES"""'}, {}), "(subdir, 'POTFILES')", False, 'import os, sys, pickle, re\n'), ((701, 29, 701, 78), 'os.path.join', 'os.path.join', ({(701, 42, 701, 59): 'self.build_to_src', (701, 61, 701, 74): 'target.subdir', (701, 76, 701, 77): 'r'}, {}), '(self.build_to_src, target.subdir, r)', False, 'import os, sys, pickle, re\n'), ((1438, 35, 1438, 74), 'os.path.join', 'os.path.join', ({(1438, 48, 1438, 65): 'self.build_to_src', (1438, 67, 1438, 73): 'curdir'}, {}), '(self.build_to_src, curdir)', False, 'import os, sys, pickle, re\n'), ((1572, 20, 1572, 41), 'os.path.split', 'os.path.split', ({(1572, 34, 1572, 40): 'header'}, {}), '(header)', False, 'import os, sys, pickle, re\n'), ((1689, 24, 1690, 54), 'os.path.join', 'os.path.join', ({(1689, 37, 1689, 64): 'self.environment.source_dir', (1690, 37, 1690, 50): 'target.subdir', (1690, 52, 1690, 53): 't'}, {}), '(self.environment.source_dir, target.subdir, t)', False, 'import os, sys, pickle, re\n'), ((1724, 15, 1724, 36), 'mesonlib.is_windows', 'mesonlib.is_windows', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((1733, 12, 1733, 93), 'mlog.debug', 'mlog.debug', ({(1733, 23, 1733, 92): '"""Library versioning disabled because host does not support symlinks."""'}, {}), "(\n 'Library versioning disabled because host does not support symlinks.')", False, 'import mlog\n'), ((1796, 18, 1796, 66), 'coredata.MesonException', 'MesonException', ({(1796, 33, 1796, 65): '"""Could not detect ninja command"""'}, {}), "('Could not detect ninja command')", False, 'from coredata import MesonException\n'), ((131, 22, 131, 113), 'coredata.MesonException', 'MesonException', ({(131, 37, 131, 112): '(\'Multiple producers for Ninja target "%s". Please rename your targets.\' % n)'}, {}), '(\n \'Multiple producers for Ninja target "%s". Please rename your targets.\' % n\n )', False, 'from coredata import MesonException\n'), ((418, 30, 418, 62), 'os.path.join', 'os.path.join', ({(418, 43, 418, 49): 'subdir', (418, 51, 418, 61): "l + '.gmo'"}, {}), "(subdir, l + '.gmo')", False, 'import os, sys, pickle, re\n'), ((457, 21, 457, 41), 'mlog.red', 'mlog.red', ({(457, 30, 457, 40): '"""Warning:"""'}, {}), "('Warning:')", False, 'import mlog\n'), ((492, 27, 492, 62), 'os.path.join', 'os.path.join', ({(492, 40, 492, 46): 'subdir', (492, 48, 492, 61): "lang + '.gmo'"}, {}), "(subdir, lang + '.gmo')", False, 'import os, sys, pickle, re\n'), ((540, 25, 540, 56), 'os.path.join', 'os.path.join', ({(540, 38, 540, 44): 'subdir', (540, 46, 540, 55): "f + '.gz'"}, {}), "(subdir, f + '.gz')", False, 'import os, sys, pickle, re\n'), ((554, 25, 554, 69), 'os.path.join', 'os.path.join', ({(554, 38, 554, 47): 'srcprefix', (554, 49, 554, 65): 'de.source_subdir', (554, 67, 554, 68): 'f'}, {}), '(srcprefix, de.source_subdir, f)', False, 'import os, sys, pickle, re\n'), ((555, 25, 555, 48), 'os.path.join', 'os.path.join', ({(555, 38, 555, 44): 'subdir', (555, 46, 555, 47): 'f'}, {}), '(subdir, f)', False, 'import os, sys, pickle, re\n'), ((735, 18, 735, 59), 'coredata.MesonException', 'MesonException', ({(735, 33, 735, 58): '"""Unknown C# target type."""'}, {}), "('Unknown C# target type.')", False, 'from coredata import MesonException\n'), ((986, 19, 986, 35), 'os.path.split', 'os.path.split', ({(986, 33, 986, 34): 'i'}, {}), '(i)', False, 'import os, sys, pickle, re\n'), ((1022, 18, 1022, 94), 'coredata.MesonException', 'MesonException', ({(1022, 33, 1022, 93): '"""Swift supports only executable and static library targets."""'}, {}), "('Swift supports only executable and static library targets.')", False, 'from coredata import MesonException\n'), ((1080, 19, 1080, 40), 'mesonlib.is_windows', 'mesonlib.is_windows', ({}, {}), '()', False, 'import environment, mesonlib\n'), ((1336, 34, 1336, 68), 'os.path.join', 'os.path.join', ({(1336, 47, 1336, 64): 'self.build_to_src', (1336, 66, 1336, 67): 'i'}, {}), '(self.build_to_src, i)', False, 'import os, sys, pickle, re\n'), ((1343, 29, 1343, 69), 'os.path.join', 'os.path.join', ({(1343, 42, 1343, 59): 'self.build_to_src', (1343, 61, 1343, 68): 'curfile'}, {}), '(self.build_to_src, curfile)', False, 'import os, sys, pickle, re\n'), ((1465, 13, 1465, 31), 'os.path.isabs', 'os.path.isabs', ({(1465, 27, 1465, 30): 'src'}, {}), '(src)', False, 'import os, sys, pickle, re\n'), ((1487, 26, 1487, 50), 'os.path.join', 'os.path.join', ({(1487, 39, 1487, 46): 'basedir', (1487, 48, 1487, 49): 'd'}, {}), '(basedir, d)', False, 'import os, sys, pickle, re\n'), ((1488, 29, 1488, 68), 'os.path.join', 'os.path.join', ({(1488, 42, 1488, 59): 'self.build_to_src', (1488, 61, 1488, 67): 'expdir'}, {}), '(self.build_to_src, expdir)', False, 'import os, sys, pickle, re\n'), ((1731, 16, 1731, 47), 'os.symlink', 'os.symlink', ({(1731, 27, 1731, 35): 'basename', (1731, 37, 1731, 46): 'aliasfile'}, {}), '(basename, aliasfile)', False, 'import os, sys, pickle, re\n'), ((198, 13, 198, 60), 'os.path.join', 'os.path.join', ({(198, 26, 198, 34): 'builddir', (198, 36, 198, 59): '"""compile_commands.json"""'}, {}), "(builddir, 'compile_commands.json')", False, 'import os, sys, pickle, re\n'), ((352, 28, 352, 62), 'os.path.join', 'os.path.join', ({(352, 41, 352, 58): 'self.build_to_src', (352, 60, 352, 61): 'i'}, {}), '(self.build_to_src, i)', False, 'import os, sys, pickle, re\n'), ((380, 22, 380, 80), 'coredata.MesonException', 'MesonException', ({(380, 37, 380, 79): '"""Unreachable code in generate_run_target."""'}, {}), "('Unreachable code in generate_run_target.')", False, 'from coredata import MesonException\n'), ((538, 29, 538, 63), 'os.path.join', 'os.path.join', ({(538, 42, 538, 49): 'manroot', (538, 51, 538, 62): "'man' + num"}, {}), "(manroot, 'man' + num)", False, 'import os, sys, pickle, re\n'), ((715, 22, 715, 71), 'build.InvalidArguments', 'InvalidArguments', ({(715, 39, 715, 70): "('Unknown resource file %s.' % r)"}, {}), "('Unknown resource file %s.' % r)", False, 'from build import InvalidArguments\n'), ((760, 40, 760, 87), 'os.path.join', 'os.path.join', ({(760, 53, 760, 70): 'self.build_to_src', (760, 72, 760, 80): 'i.curdir', (760, 82, 760, 86): 'idir'}, {}), '(self.build_to_src, i.curdir, idir)', False, 'import os, sys, pickle, re\n'), ((812, 36, 812, 70), 'os.path.split', 'os.path.split', ({(812, 50, 812, 69): 'vala_input_files[0]'}, {}), '(vala_input_files[0])', False, 'import os, sys, pickle, re\n'), ((873, 18, 873, 68), 'build.InvalidArguments', 'InvalidArguments', ({(873, 35, 873, 67): '"""Unknown target type for rustc."""'}, {}), "('Unknown target type for rustc.')", False, 'from build import InvalidArguments\n'), ((987, 20, 987, 42), 'os.path.splitext', 'os.path.splitext', ({(987, 37, 987, 41): 'base'}, {}), '(base)', False, 'import os, sys, pickle, re\n'), ((1100, 53, 1100, 98), 'os.path.join', 'os.path.join', ({(1100, 66, 1100, 75): 'scriptdir', (1100, 77, 1100, 97): '"""symbolextractor.py"""'}, {}), "(scriptdir, 'symbolextractor.py')", False, 'import os, sys, pickle, re\n'), ((1321, 39, 1321, 84), 'os.path.join', 'os.path.join', ({(1321, 52, 1321, 63): 'private_dir', (1321, 65, 1321, 83): 'output_list[index]'}, {}), '(private_dir, output_list[index])', False, 'import os, sys, pickle, re\n'), ((1425, 33, 1425, 64), 'os.path.join', 'os.path.join', ({(1425, 46, 1425, 53): 'dirname', (1425, 55, 1425, 63): 'mod_name'}, {}), '(dirname, mod_name)', False, 'import os, sys, pickle, re\n'), ((1459, 22, 1459, 68), 'build.InvalidArguments', 'build.InvalidArguments', ({(1459, 45, 1459, 67): '"""Invalid source type."""'}, {}), "('Invalid source type.')", False, 'import build\n'), ((1466, 27, 1466, 48), 'os.path.basename', 'os.path.basename', ({(1466, 44, 1466, 47): 'src'}, {}), '(src)', False, 'import os, sys, pickle, re\n'), ((1728, 20, 1728, 40), 'os.remove', 'os.remove', ({(1728, 30, 1728, 39): 'aliasfile'}, {}), '(aliasfile)', False, 'import os, sys, pickle, re\n'), ((1769, 28, 1769, 46), 'os.path.split', 'os.path.split', ({(1769, 42, 1769, 45): 'src'}, {}), '(src)', False, 'import os, sys, pickle, re\n'), ((1395, 30, 1396, 83), 'build.InvalidArguments', 'InvalidArguments', ({(1395, 47, 1396, 82): "('Namespace collision: module %s defined in two files %s and %s.' % (\n modname, module_files[modname], s))"}, {}), "(\n 'Namespace collision: module %s defined in two files %s and %s.' % (\n modname, module_files[modname], s))", False, 'from build import InvalidArguments\n'), ((1422, 44, 1422, 62), 'os.path.split', 'os.path.split', ({(1422, 58, 1422, 61): 'src'}, {}), '(src)', False, 'import os, sys, pickle, re\n'), ((1582, 27, 1582, 45), 'os.path.split', 'os.path.split', ({(1582, 41, 1582, 44): 'pch'}, {}), '(pch)', False, 'import os, sys, pickle, re\n'), ((824, 40, 824, 58), 'os.path.split', 'os.path.split', ({(824, 54, 824, 57): 'src'}, {}), '(src)', False, 'import os, sys, pickle, re\n'), ((705, 45, 705, 64), 'os.path.basename', 'os.path.basename', ({(705, 62, 705, 63): 'r'}, {}), '(r)', False, 'import os, sys, pickle, re\n'), ((796, 48, 796, 64), 'os.path.split', 'os.path.split', ({(796, 62, 796, 63): 'i'}, {}), '(i)', False, 'import os, sys, pickle, re\n')]
y-tetsu/othello
tests/strategies/common/test_cputime.py
73eabfe22d6b44bbfa0b436e6287e3e7356620f4
"""Tests of cputime.py """ import unittest from reversi.strategies.common import CPU_TIME class TestCputime(unittest.TestCase): """cputime """ def test_cputime(self): self.assertEqual(CPU_TIME, 0.5)
[]
coopersigrist/RecurrentNeuralSystem-
experiments/cifar10_recon.py
bd5bb680ec7f2166547709195f7bb3cd52cca5e8
# -*- coding: utf-8 -*- """ReNS experiments - CIFAR10 Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1byZ4xTfCK2x1Rhkxpl-Vv4sqA-bo4bis # SETUP """ #@title Insatlling Pyorch # !pip install torch # !pip install torchvision #@title Import Dependencies import numpy as np import torch import torch.nn as nn import torchvision.datasets as dsets import torchvision.transforms as transforms from torch.autograd import Variable from tqdm import tqdm from typing import Optional, Union, Tuple, List, Sequence, Iterable import math from scipy.spatial.distance import euclidean from torch.nn.modules.utils import _pair from torchvision import models from sklearn.metrics import jaccard_score import matplotlib.pyplot as plt from models.models import RegularAutoEncoder, ModulatedAutoEncoder, PseudoRecAutoEncoder """# TRAINING""" batch_size = 32 num_epochs = 5 transform = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) # Load MNIST data. train_data = dsets.CIFAR10(root = './data', train = True, transform = transform, download = True) test_data = dsets.CIFAR10(root = './data', train = False, transform = transform) train_gen = torch.utils.data.DataLoader(dataset = train_data, batch_size = batch_size, shuffle = True) test_gen = torch.utils.data.DataLoader(dataset = test_data, batch_size = batch_size, shuffle = False) reflexor_size = 500 image_size = 32 channels = 3 # net = recurrentLayer(784, 784, 10, 5, 10, 0) net1 = RegularAutoEncoder(channels * image_size ** 2, channels * image_size ** 2, reflexor_size) net2 = ModulatedAutoEncoder(channels * image_size ** 2, channels * image_size ** 2, reflexor_size) net3 = PseudoRecAutoEncoder(channels * image_size ** 2, channels * image_size ** 2, reflexor_size) lr = .0001 # size of step loss_function = nn.MSELoss() # Unnormalize the image to display it def img_fix(img): return np.transpose((img / 2 + 0.5).numpy(), (1, 2, 0)) # Commented out IPython magic to ensure Python compatibility. train_losses = [[],[],[]] test_losses = [[],[],[]] real_imgs = [[],[],[]] reconstructed_imgs = [[],[],[]] param_counts = np.ones(3) steps = [[],[],[]] for num, net in enumerate([net1, net2, net3]): optimizer = torch.optim.Adam( net.parameters(), lr=lr) param_counts[num] = (sum(p.numel() for p in net.parameters() if p.requires_grad)) for epoch in range(num_epochs): for i ,(images,labels) in enumerate(train_gen): #images = Variable(images.view(-1,28*28)) labels = Variable(images.view(-1,3 * image_size ** 2)) optimizer.zero_grad() outputs = net(images) loss = loss_function(outputs, labels) loss.backward() optimizer.step() if (i+1) % 300 == 0: temp_loss = loss.item() print('Epoch [%d/%d], Step [%d/%d], Loss: %.4f' %(epoch+1, num_epochs, i+1, len(train_data)//batch_size, temp_loss)) dupe = Variable(outputs[0].data, requires_grad=False) # plt.imshow(img_fix(images[0])) # plt.show() # plt.imshow(img_fix(dupe.view(3, image_size, image_size))) # plt.show() train_losses[num].append(temp_loss) steps[num].append((50000 * epoch) + ((i + 1) * batch_size)) real_imgs[num].append(img_fix(images[0])) reconstructed_imgs[num].append(img_fix(dupe.view(3, image_size, image_size))) # Test Data score = 0 total = 0 for images,labels in test_gen: #images = Variable(images.view(-1,784)) output = net(images) score += loss_function(output, images.view(-1, 3 * image_size ** 2)).item() test_losses[num].append((score)) plt.plot(steps[0], train_losses[0], label= "Baseline") plt.plot(steps[1], train_losses[1], label= "Modulated") plt.plot(steps[2], train_losses[2], label= "Recurrent with Modulation") plt.xlabel('Iteration') plt.ylabel('Loss') plt.title('Training loss history') plt.legend() plt.show() plt.plot(steps[0], test_losses[0], label= "Baseline") plt.plot(steps[1], test_losses[1], label= "Modulated") plt.plot(steps[2], test_losses[2], label= "Recurrent with Modulation") plt.xlabel('Iteration') plt.ylabel('Loss') plt.title('Testing loss history') plt.legend() plt.show() for num,count in enumerate(param_counts): param_counts[num] /= 1000 plt.bar(["Base", "Modulated", "ReNS"], param_counts) plt.xlabel('Model') plt.ylabel('# of thousands of Parameters') plt.show() from mpl_toolkits.axes_grid1 import ImageGrid num_smaples = len(real_imgs[0]) for num in [0,1,2]: fig = plt.figure(figsize=(20.,20.)) grid = ImageGrid(fig, 111, # similar to subplot(111) nrows_ncols=(2, num_smaples), # creates 2x2 grid of axes axes_pad=0.1, # pad between axes in inch. ) for ax, im in zip(grid, real_imgs[num]+reconstructed_imgs[num]): # Iterating over the grid returns the Axes. ax.imshow(im) ax.axis("off") plt.show()
[((45, 13, 46, 63), 'torchvision.datasets.CIFAR10', 'dsets.CIFAR10', (), '', True, 'import torchvision.datasets as dsets\n'), ((48, 12, 49, 45), 'torchvision.datasets.CIFAR10', 'dsets.CIFAR10', (), '', True, 'import torchvision.datasets as dsets\n'), ((51, 12, 53, 60), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (), '', False, 'import torch\n'), ((55, 11, 57, 54), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (), '', False, 'import torch\n'), ((64, 7, 64, 96), 'models.models.RegularAutoEncoder', 'RegularAutoEncoder', ({(64, 26, 64, 52): 'channels * image_size ** 2', (64, 54, 64, 80): 'channels * image_size ** 2', (64, 82, 64, 95): 'reflexor_size'}, {}), '(channels * image_size ** 2, channels * image_size ** 2,\n reflexor_size)', False, 'from models.models import RegularAutoEncoder, ModulatedAutoEncoder, PseudoRecAutoEncoder\n'), ((65, 7, 65, 98), 'models.models.ModulatedAutoEncoder', 'ModulatedAutoEncoder', ({(65, 28, 65, 54): 'channels * image_size ** 2', (65, 56, 65, 82): 'channels * image_size ** 2', (65, 84, 65, 97): 'reflexor_size'}, {}), '(channels * image_size ** 2, channels * image_size ** 2,\n reflexor_size)', False, 'from models.models import RegularAutoEncoder, ModulatedAutoEncoder, PseudoRecAutoEncoder\n'), ((66, 7, 66, 98), 'models.models.PseudoRecAutoEncoder', 'PseudoRecAutoEncoder', ({(66, 28, 66, 54): 'channels * image_size ** 2', (66, 56, 66, 82): 'channels * image_size ** 2', (66, 84, 66, 97): 'reflexor_size'}, {}), '(channels * image_size ** 2, channels * image_size ** 2,\n reflexor_size)', False, 'from models.models import RegularAutoEncoder, ModulatedAutoEncoder, PseudoRecAutoEncoder\n'), ((71, 16, 71, 28), 'torch.nn.MSELoss', 'nn.MSELoss', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((84, 15, 84, 25), 'numpy.ones', 'np.ones', ({(84, 23, 84, 24): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((130, 0, 130, 54), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((131, 0, 131, 55), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((132, 0, 132, 71), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((133, 0, 133, 23), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(133, 11, 133, 22): '"""Iteration"""'}, {}), "('Iteration')", True, 'import matplotlib.pyplot as plt\n'), ((134, 0, 134, 18), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(134, 11, 134, 17): '"""Loss"""'}, {}), "('Loss')", True, 'import matplotlib.pyplot as plt\n'), ((135, 0, 135, 34), 'matplotlib.pyplot.title', 'plt.title', ({(135, 10, 135, 33): '"""Training loss history"""'}, {}), "('Training loss history')", True, 'import matplotlib.pyplot as plt\n'), ((136, 0, 136, 12), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((137, 0, 137, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((139, 0, 139, 53), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((140, 0, 140, 54), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((141, 0, 141, 70), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((142, 0, 142, 23), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(142, 11, 142, 22): '"""Iteration"""'}, {}), "('Iteration')", True, 'import matplotlib.pyplot as plt\n'), ((143, 0, 143, 18), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(143, 11, 143, 17): '"""Loss"""'}, {}), "('Loss')", True, 'import matplotlib.pyplot as plt\n'), ((144, 0, 144, 33), 'matplotlib.pyplot.title', 'plt.title', ({(144, 10, 144, 32): '"""Testing loss history"""'}, {}), "('Testing loss history')", True, 'import matplotlib.pyplot as plt\n'), ((145, 0, 145, 12), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((146, 0, 146, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((152, 0, 152, 52), 'matplotlib.pyplot.bar', 'plt.bar', ({(152, 8, 152, 37): "['Base', 'Modulated', 'ReNS']", (152, 39, 152, 51): 'param_counts'}, {}), "(['Base', 'Modulated', 'ReNS'], param_counts)", True, 'import matplotlib.pyplot as plt\n'), ((153, 0, 153, 19), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(153, 11, 153, 18): '"""Model"""'}, {}), "('Model')", True, 'import matplotlib.pyplot as plt\n'), ((154, 0, 154, 42), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(154, 11, 154, 41): '"""# of thousands of Parameters"""'}, {}), "('# of thousands of Parameters')", True, 'import matplotlib.pyplot as plt\n'), ((155, 0, 155, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((163, 8, 163, 37), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((164, 9, 167, 19), 'mpl_toolkits.axes_grid1.ImageGrid', 'ImageGrid', (), '', False, 'from mpl_toolkits.axes_grid1 import ImageGrid\n'), ((174, 2, 174, 12), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((41, 5, 41, 26), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', True, 'import torchvision.transforms as transforms\n'), ((42, 5, 42, 59), 'torchvision.transforms.Normalize', 'transforms.Normalize', ({(42, 26, 42, 41): '(0.5, 0.5, 0.5)', (42, 43, 42, 58): '(0.5, 0.5, 0.5)'}, {}), '((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))', True, 'import torchvision.transforms as transforms\n'), ((108, 15, 108, 61), 'torch.autograd.Variable', 'Variable', (), '', False, 'from torch.autograd import Variable\n')]
ameoba/horizon
horizon/forms/__init__.py
ff9e367c98a8bb79f10914abffaaa04b0a461819
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # FIXME(gabriel): Legacy imports for API compatibility. from django.forms import * # noqa from django.forms import widgets # Convenience imports for public API components. from horizon.forms.base import DateForm # noqa from horizon.forms.base import SelfHandlingForm # noqa from horizon.forms.base import SelfHandlingMixin # noqa from horizon.forms.fields import DynamicChoiceField # noqa from horizon.forms.fields import DynamicTypedChoiceField # noqa from horizon.forms.views import ModalFormMixin # noqa from horizon.forms.views import ModalFormView # noqa assert widgets assert SelfHandlingMixin assert SelfHandlingForm assert DateForm assert ModalFormView assert ModalFormMixin assert DynamicTypedChoiceField assert DynamicChoiceField
[]
noironetworks/heat
heat/tests/test_rpc_listener_client.py
7cdadf1155f4d94cf8f967635b98e4012a7acfb7
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import oslo_messaging as messaging from heat.rpc import api as rpc_api from heat.rpc import listener_client as rpc_client from heat.tests import common class ListenerClientTest(common.HeatTestCase): @mock.patch('heat.common.messaging.get_rpc_client', return_value=mock.Mock()) def test_engine_alive_ok(self, rpc_client_method): mock_rpc_client = rpc_client_method.return_value mock_prepare_method = mock_rpc_client.prepare mock_prepare_client = mock_prepare_method.return_value mock_cnxt = mock.Mock() listener_client = rpc_client.EngineListenerClient('engine-007') rpc_client_method.assert_called_once_with( version=rpc_client.EngineListenerClient.BASE_RPC_API_VERSION, topic=rpc_api.LISTENER_TOPIC, server='engine-007', ) mock_prepare_method.assert_called_once_with(timeout=2) self.assertEqual(mock_prepare_client, listener_client._client, "Failed to create RPC client") ret = listener_client.is_alive(mock_cnxt) self.assertTrue(ret) mock_prepare_client.call.assert_called_once_with(mock_cnxt, 'listening') @mock.patch('heat.common.messaging.get_rpc_client', return_value=mock.Mock()) def test_engine_alive_timeout(self, rpc_client_method): mock_rpc_client = rpc_client_method.return_value mock_prepare_method = mock_rpc_client.prepare mock_prepare_client = mock_prepare_method.return_value mock_cnxt = mock.Mock() listener_client = rpc_client.EngineListenerClient('engine-007') rpc_client_method.assert_called_once_with( version=rpc_client.EngineListenerClient.BASE_RPC_API_VERSION, topic=rpc_api.LISTENER_TOPIC, server='engine-007', ) mock_prepare_method.assert_called_once_with(timeout=2) self.assertEqual(mock_prepare_client, listener_client._client, "Failed to create RPC client") mock_prepare_client.call.side_effect = messaging.MessagingTimeout( 'too slow') ret = listener_client.is_alive(mock_cnxt) self.assertFalse(ret) mock_prepare_client.call.assert_called_once_with(mock_cnxt, 'listening')
[((30, 20, 30, 31), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((32, 26, 32, 71), 'heat.rpc.listener_client.EngineListenerClient', 'rpc_client.EngineListenerClient', ({(32, 58, 32, 70): '"""engine-007"""'}, {}), "('engine-007')", True, 'from heat.rpc import listener_client as rpc_client\n'), ((53, 20, 53, 31), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((55, 26, 55, 71), 'heat.rpc.listener_client.EngineListenerClient', 'rpc_client.EngineListenerClient', ({(55, 58, 55, 70): '"""engine-007"""'}, {}), "('engine-007')", True, 'from heat.rpc import listener_client as rpc_client\n'), ((65, 47, 66, 23), 'oslo_messaging.MessagingTimeout', 'messaging.MessagingTimeout', ({(66, 12, 66, 22): '"""too slow"""'}, {}), "('too slow')", True, 'import oslo_messaging as messaging\n'), ((25, 29, 25, 40), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((48, 29, 48, 40), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n')]
akshitsingla/amadeus-python
amadeus/travel/trip_parser_jobs/_status.py
d8f3595e556b674998156f98d8a318045bb4c21c
from amadeus.client.decorator import Decorator class TripParserStatus(Decorator, object): def __init__(self, client, job_id): Decorator.__init__(self, client) self.job_id = job_id def get(self, **params): ''' Returns the parsing status and the link to the result in case of successful parsing. .. code-block:: python amadeus.travel.trip_parser_jobs.status('XXX').get :rtype: amadeus.Response :raises amadeus.ResponseError: if the request could not be completed ''' return self.client.get( '/v2/travel/trip-parser-jobs/{0}'.format(self.job_id), **params)
[((6, 8, 6, 40), 'amadeus.client.decorator.Decorator.__init__', 'Decorator.__init__', ({(6, 27, 6, 31): 'self', (6, 33, 6, 39): 'client'}, {}), '(self, client)', False, 'from amadeus.client.decorator import Decorator\n')]
meyerweb/wpt
tools/third_party/iniconfig/testing/test_iniconfig.py
f04261533819893c71289614c03434c06856c13e
import py import pytest from iniconfig import IniConfig, ParseError, __all__ as ALL from iniconfig import iscommentline from textwrap import dedent check_tokens = { 'section': ( '[section]', [(0, 'section', None, None)] ), 'value': ( 'value = 1', [(0, None, 'value', '1')] ), 'value in section': ( '[section]\nvalue=1', [(0, 'section', None, None), (1, 'section', 'value', '1')] ), 'value with continuation': ( 'names =\n Alice\n Bob', [(0, None, 'names', 'Alice\nBob')] ), 'value with aligned continuation': ( 'names = Alice\n' ' Bob', [(0, None, 'names', 'Alice\nBob')] ), 'blank line': ( '[section]\n\nvalue=1', [(0, 'section', None, None), (2, 'section', 'value', '1')] ), 'comment': ( '# comment', [] ), 'comment on value': ( 'value = 1', [(0, None, 'value', '1')] ), 'comment on section': ( '[section] #comment', [(0, 'section', None, None)] ), 'comment2': ( '; comment', [] ), 'comment2 on section': ( '[section] ;comment', [(0, 'section', None, None)] ), 'pseudo section syntax in value': ( 'name = value []', [(0, None, 'name', 'value []')] ), 'assignment in value': ( 'value = x = 3', [(0, None, 'value', 'x = 3')] ), 'use of colon for name-values': ( 'name: y', [(0, None, 'name', 'y')] ), 'use of colon without space': ( 'value:y=5', [(0, None, 'value', 'y=5')] ), 'equality gets precedence': ( 'value=xyz:5', [(0, None, 'value', 'xyz:5')] ), } @pytest.fixture(params=sorted(check_tokens)) def input_expected(request): return check_tokens[request.param] @pytest.fixture def input(input_expected): return input_expected[0] @pytest.fixture def expected(input_expected): return input_expected[1] def parse(input): # only for testing purposes - _parse() does not use state except path ini = object.__new__(IniConfig) ini.path = "sample" return ini._parse(input.splitlines(True)) def parse_a_error(input): return py.test.raises(ParseError, parse, input) def test_tokenize(input, expected): parsed = parse(input) assert parsed == expected def test_parse_empty(): parsed = parse("") assert not parsed ini = IniConfig("sample", "") assert not ini.sections def test_ParseError(): e = ParseError("filename", 0, "hello") assert str(e) == "filename:1: hello" def test_continuation_needs_perceeding_token(): excinfo = parse_a_error(' Foo') assert excinfo.value.lineno == 0 def test_continuation_cant_be_after_section(): excinfo = parse_a_error('[section]\n Foo') assert excinfo.value.lineno == 1 def test_section_cant_be_empty(): excinfo = parse_a_error('[]') assert excinfo.value.lineno == 0 @py.test.mark.parametrize('line', [ '!!', ]) def test_error_on_weird_lines(line): parse_a_error(line) def test_iniconfig_from_file(tmpdir): path = tmpdir/'test.txt' path.write('[metadata]\nname=1') config = IniConfig(path=path) assert list(config.sections) == ['metadata'] config = IniConfig(path, "[diff]") assert list(config.sections) == ['diff'] with pytest.raises(TypeError): IniConfig(data=path.read()) def test_iniconfig_section_first(tmpdir): with pytest.raises(ParseError) as excinfo: IniConfig("x", data='name=1') assert excinfo.value.msg == "no section header defined" def test_iniconig_section_duplicate_fails(): with pytest.raises(ParseError) as excinfo: IniConfig("x", data='[section]\n[section]') assert 'duplicate section' in str(excinfo.value) def test_iniconfig_duplicate_key_fails(): with pytest.raises(ParseError) as excinfo: IniConfig("x", data='[section]\nname = Alice\nname = bob') assert 'duplicate name' in str(excinfo.value) def test_iniconfig_lineof(): config = IniConfig("x.ini", data=( '[section]\n' 'value = 1\n' '[section2]\n' '# comment\n' 'value =2' )) assert config.lineof('missing') is None assert config.lineof('section') == 1 assert config.lineof('section2') == 3 assert config.lineof('section', 'value') == 2 assert config.lineof('section2', 'value') == 5 assert config['section'].lineof('value') == 2 assert config['section2'].lineof('value') == 5 def test_iniconfig_get_convert(): config = IniConfig("x", data='[section]\nint = 1\nfloat = 1.1') assert config.get('section', 'int') == '1' assert config.get('section', 'int', convert=int) == 1 def test_iniconfig_get_missing(): config = IniConfig("x", data='[section]\nint = 1\nfloat = 1.1') assert config.get('section', 'missing', default=1) == 1 assert config.get('section', 'missing') is None def test_section_get(): config = IniConfig("x", data='[section]\nvalue=1') section = config['section'] assert section.get('value', convert=int) == 1 assert section.get('value', 1) == "1" assert section.get('missing', 2) == 2 def test_missing_section(): config = IniConfig("x", data='[section]\nvalue=1') with pytest.raises(KeyError): config["other"] def test_section_getitem(): config = IniConfig("x", data='[section]\nvalue=1') assert config['section']['value'] == '1' assert config['section']['value'] == '1' def test_section_iter(): config = IniConfig("x", data='[section]\nvalue=1') names = list(config['section']) assert names == ['value'] items = list(config['section'].items()) assert items == [('value', '1')] def test_config_iter(): config = IniConfig("x.ini", data=dedent(''' [section1] value=1 [section2] value=2 ''')) l = list(config) assert len(l) == 2 assert l[0].name == 'section1' assert l[0]['value'] == '1' assert l[1].name == 'section2' assert l[1]['value'] == '2' def test_config_contains(): config = IniConfig("x.ini", data=dedent(''' [section1] value=1 [section2] value=2 ''')) assert 'xyz' not in config assert 'section1' in config assert 'section2' in config def test_iter_file_order(): config = IniConfig("x.ini", data=""" [section2] #cpython dict ordered before section value = 1 value2 = 2 # dict ordered before value [section] a = 1 b = 2 """) l = list(config) secnames = [x.name for x in l] assert secnames == ['section2', 'section'] assert list(config['section2']) == ['value', 'value2'] assert list(config['section']) == ['a', 'b'] def test_example_pypirc(): config = IniConfig("pypirc", data=dedent(''' [distutils] index-servers = pypi other [pypi] repository: <repository-url> username: <username> password: <password> [other] repository: http://example.com/pypi username: <username> password: <password> ''')) distutils, pypi, other = list(config) assert distutils["index-servers"] == "pypi\nother" assert pypi['repository'] == '<repository-url>' assert pypi['username'] == '<username>' assert pypi['password'] == '<password>' assert ['repository', 'username', 'password'] == list(other) def test_api_import(): assert ALL == ['IniConfig', 'ParseError'] @pytest.mark.parametrize("line", [ "#qwe", " #qwe", ";qwe", " ;qwe", ]) def test_iscommentline_true(line): assert iscommentline(line)
[((138, 1, 140, 6), 'py.test.mark.parametrize', 'py.test.mark.parametrize', ({(138, 26, 138, 32): '"""line"""', (138, 34, 140, 5): "['!!']"}, {}), "('line', ['!!'])", False, 'import py\n'), ((307, 1, 312, 2), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(307, 25, 307, 31): '"""line"""', (307, 33, 312, 1): "['#qwe', ' #qwe', ';qwe', ' ;qwe']"}, {}), "('line', ['#qwe', ' #qwe', ';qwe', ' ;qwe'])", False, 'import pytest\n'), ((103, 11, 103, 51), 'py.test.raises', 'py.test.raises', ({(103, 26, 103, 36): 'ParseError', (103, 38, 103, 43): 'parse', (103, 45, 103, 50): 'input'}, {}), '(ParseError, parse, input)', False, 'import py\n'), ((114, 10, 114, 33), 'iniconfig.IniConfig', 'IniConfig', ({(114, 20, 114, 28): '"""sample"""', (114, 30, 114, 32): '""""""'}, {}), "('sample', '')", False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((119, 8, 119, 42), 'iniconfig.ParseError', 'ParseError', ({(119, 19, 119, 29): '"""filename"""', (119, 31, 119, 32): '0', (119, 34, 119, 41): '"""hello"""'}, {}), "('filename', 0, 'hello')", False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((149, 13, 149, 33), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((151, 13, 151, 38), 'iniconfig.IniConfig', 'IniConfig', ({(151, 23, 151, 27): 'path', (151, 29, 151, 37): '"""[diff]"""'}, {}), "(path, '[diff]')", False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((177, 13, 183, 6), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((196, 13, 196, 67), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((202, 13, 202, 67), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((208, 13, 208, 54), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((216, 13, 216, 54), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((222, 13, 222, 54), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((228, 13, 228, 54), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((263, 13, 270, 4), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((314, 11, 314, 30), 'iniconfig.iscommentline', 'iscommentline', ({(314, 25, 314, 29): 'line'}, {}), '(line)', False, 'from iniconfig import iscommentline\n'), ((153, 9, 153, 33), 'pytest.raises', 'pytest.raises', ({(153, 23, 153, 32): 'TypeError'}, {}), '(TypeError)', False, 'import pytest\n'), ((158, 9, 158, 34), 'pytest.raises', 'pytest.raises', ({(158, 23, 158, 33): 'ParseError'}, {}), '(ParseError)', False, 'import pytest\n'), ((159, 8, 159, 37), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((164, 9, 164, 34), 'pytest.raises', 'pytest.raises', ({(164, 23, 164, 33): 'ParseError'}, {}), '(ParseError)', False, 'import pytest\n'), ((165, 8, 165, 51), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((170, 9, 170, 34), 'pytest.raises', 'pytest.raises', ({(170, 23, 170, 33): 'ParseError'}, {}), '(ParseError)', False, 'import pytest\n'), ((171, 8, 171, 66), 'iniconfig.IniConfig', 'IniConfig', (), '', False, 'from iniconfig import IniConfig, ParseError, __all__ as ALL\n'), ((217, 9, 217, 32), 'pytest.raises', 'pytest.raises', ({(217, 23, 217, 31): 'KeyError'}, {}), '(KeyError)', False, 'import pytest\n'), ((236, 37, 241, 8), 'textwrap.dedent', 'dedent', ({(236, 44, 241, 7): '"""\n [section1]\n value=1\n [section2]\n value=2\n """'}, {}), '(\n """\n [section1]\n value=1\n [section2]\n value=2\n """\n )', False, 'from textwrap import dedent\n'), ((251, 37, 256, 8), 'textwrap.dedent', 'dedent', ({(251, 44, 256, 7): '"""\n [section1]\n value=1\n [section2]\n value=2\n """'}, {}), '(\n """\n [section1]\n value=1\n [section2]\n value=2\n """\n )', False, 'from textwrap import dedent\n'), ((279, 38, 294, 8), 'textwrap.dedent', 'dedent', ({(279, 45, 294, 7): '"""\n [distutils]\n index-servers =\n pypi\n other\n\n [pypi]\n repository: <repository-url>\n username: <username>\n password: <password>\n\n [other]\n repository: http://example.com/pypi\n username: <username>\n password: <password>\n """'}, {}), '(\n """\n [distutils]\n index-servers =\n pypi\n other\n\n [pypi]\n repository: <repository-url>\n username: <username>\n password: <password>\n\n [other]\n repository: http://example.com/pypi\n username: <username>\n password: <password>\n """\n )', False, 'from textwrap import dedent\n')]
natebragg/java-sketch
jskparser/jskparser/util.py
f5ac26f2cc46ae4556f9a61c55afd37f55c961ff
import os from subprocess import call from . import glob2 pwd = os.path.dirname(__file__) def get_files_from_path(path, ext): # use set to remove duplicate files. weird...but it happens if os.path.isfile(path): return set([os.path.abspath(path)]) else: # i.e., folder files = glob2.glob(os.path.abspath(os.path.join(path, "**/*.{}".format(ext)))) return set(sorted(files)) # to guarantee the order of files read """ handling javajskparser AST """ def toAST(files, ext, add_libs): prg_files = [] for f in files: prg_files.extend(get_files_from_path(f, "java")) if not prg_files: exit('jskparser.util: File(s) not found!') java_in = os.path.abspath(os.path.join(pwd, '../tests/ir_asts/API.java')) json_out = os.path.abspath(os.path.join(pwd, '../tests/ir_asts/java.json')) if add_libs: obj_path = os.path.abspath(os.path.join(pwd, '../../model/lang/Object.java')) str_path = os.path.abspath(os.path.join(pwd, '../../model/lang/String.java')) num_path = os.path.abspath(os.path.join(pwd, '../../model/lang/Number.java')) int_path = os.path.abspath(os.path.join(pwd, '../../model/lang/Integer.java')) char_path = os.path.abspath(os.path.join(pwd, '../../model/lang/Character.java')) itbl_path = os.path.abspath(os.path.join(pwd, '../../model/lang/Iterable.java')) iter_path = os.path.abspath(os.path.join(pwd, '../../model/util/Iterator.java')) arr_path = os.path.abspath(os.path.join(pwd, '../../model/util/Arrays.java')) list_path = os.path.abspath(os.path.join(pwd, '../../model/util/List.java')) alist_path = os.path.abspath(os.path.join(pwd, '../../model/util/ArrayList.java')) llist_path = os.path.abspath(os.path.join(pwd, '../../model/util/LinkedList.java')) hmap_path = os.path.abspath(os.path.join(pwd, '../../model/util/HashMap.java')) hset_path = os.path.abspath(os.path.join(pwd, '../../model/util/HashSet.java')) if obj_path not in prg_files: prg_files.append(obj_path) if str_path not in prg_files: prg_files.append(str_path) if num_path not in prg_files: prg_files.append(num_path) if int_path not in prg_files: prg_files.append(int_path) if char_path not in prg_files: prg_files.append(char_path) if itbl_path not in prg_files: prg_files.append(itbl_path) if iter_path not in prg_files: prg_files.append(iter_path) if arr_path not in prg_files: prg_files.append(arr_path) if list_path not in prg_files: prg_files.append(list_path) if alist_path not in prg_files: prg_files.append(alist_path) if llist_path not in prg_files: prg_files.append(llist_path) if hmap_path not in prg_files: prg_files.append(hmap_path) if hset_path not in prg_files: prg_files.append(hset_path) api = "" for fname in prg_files: with open(fname, 'r') as fd: api += fd.read() with open(java_in, 'w') as fd: fd.write(api) # this classpath stuff seems awful. Jsonify is hardcoded, passing a # single string to subprocess.call is platform dependant, and shell=True # can be a security vulnerability (if allowed to take user input). # This just got a whole lot nastier cmd = 'cd ' + pwd + '/..; /usr/bin/java -cp .:javaparser/javaparser-core/target/classes:$HOME/.m2/repository/com/cedarsoftware/json-io/4.3.0/json-io-4.3.0.jar jskparser.Jsonify ' + java_in + ' ' + json_out ret = call(cmd, shell=True) if ret != 0: exit('Problem parsing.') return json_out
[((6, 6, 6, 31), 'os.path.dirname', 'os.path.dirname', ({(6, 22, 6, 30): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((10, 7, 10, 27), 'os.path.isfile', 'os.path.isfile', ({(10, 22, 10, 26): 'path'}, {}), '(path)', False, 'import os\n'), ((63, 10, 63, 31), 'subprocess.call', 'call', (), '', False, 'from subprocess import call\n'), ((23, 30, 23, 76), 'os.path.join', 'os.path.join', ({(23, 43, 23, 46): 'pwd', (23, 48, 23, 75): '"""../tests/ir_asts/API.java"""'}, {}), "(pwd, '../tests/ir_asts/API.java')", False, 'import os\n'), ((24, 31, 24, 78), 'os.path.join', 'os.path.join', ({(24, 44, 24, 47): 'pwd', (24, 49, 24, 77): '"""../tests/ir_asts/java.json"""'}, {}), "(pwd, '../tests/ir_asts/java.json')", False, 'import os\n'), ((26, 35, 26, 84), 'os.path.join', 'os.path.join', ({(26, 48, 26, 51): 'pwd', (26, 53, 26, 83): '"""../../model/lang/Object.java"""'}, {}), "(pwd, '../../model/lang/Object.java')", False, 'import os\n'), ((27, 35, 27, 84), 'os.path.join', 'os.path.join', ({(27, 48, 27, 51): 'pwd', (27, 53, 27, 83): '"""../../model/lang/String.java"""'}, {}), "(pwd, '../../model/lang/String.java')", False, 'import os\n'), ((28, 35, 28, 84), 'os.path.join', 'os.path.join', ({(28, 48, 28, 51): 'pwd', (28, 53, 28, 83): '"""../../model/lang/Number.java"""'}, {}), "(pwd, '../../model/lang/Number.java')", False, 'import os\n'), ((29, 35, 29, 85), 'os.path.join', 'os.path.join', ({(29, 48, 29, 51): 'pwd', (29, 53, 29, 84): '"""../../model/lang/Integer.java"""'}, {}), "(pwd, '../../model/lang/Integer.java')", False, 'import os\n'), ((30, 36, 30, 88), 'os.path.join', 'os.path.join', ({(30, 49, 30, 52): 'pwd', (30, 54, 30, 87): '"""../../model/lang/Character.java"""'}, {}), "(pwd, '../../model/lang/Character.java')", False, 'import os\n'), ((31, 36, 31, 87), 'os.path.join', 'os.path.join', ({(31, 49, 31, 52): 'pwd', (31, 54, 31, 86): '"""../../model/lang/Iterable.java"""'}, {}), "(pwd, '../../model/lang/Iterable.java')", False, 'import os\n'), ((32, 36, 32, 87), 'os.path.join', 'os.path.join', ({(32, 49, 32, 52): 'pwd', (32, 54, 32, 86): '"""../../model/util/Iterator.java"""'}, {}), "(pwd, '../../model/util/Iterator.java')", False, 'import os\n'), ((33, 35, 33, 84), 'os.path.join', 'os.path.join', ({(33, 48, 33, 51): 'pwd', (33, 53, 33, 83): '"""../../model/util/Arrays.java"""'}, {}), "(pwd, '../../model/util/Arrays.java')", False, 'import os\n'), ((34, 36, 34, 83), 'os.path.join', 'os.path.join', ({(34, 49, 34, 52): 'pwd', (34, 54, 34, 82): '"""../../model/util/List.java"""'}, {}), "(pwd, '../../model/util/List.java')", False, 'import os\n'), ((35, 37, 35, 89), 'os.path.join', 'os.path.join', ({(35, 50, 35, 53): 'pwd', (35, 55, 35, 88): '"""../../model/util/ArrayList.java"""'}, {}), "(pwd, '../../model/util/ArrayList.java')", False, 'import os\n'), ((36, 37, 36, 90), 'os.path.join', 'os.path.join', ({(36, 50, 36, 53): 'pwd', (36, 55, 36, 89): '"""../../model/util/LinkedList.java"""'}, {}), "(pwd, '../../model/util/LinkedList.java')", False, 'import os\n'), ((37, 36, 37, 86), 'os.path.join', 'os.path.join', ({(37, 49, 37, 52): 'pwd', (37, 54, 37, 85): '"""../../model/util/HashMap.java"""'}, {}), "(pwd, '../../model/util/HashMap.java')", False, 'import os\n'), ((38, 36, 38, 86), 'os.path.join', 'os.path.join', ({(38, 49, 38, 52): 'pwd', (38, 54, 38, 85): '"""../../model/util/HashSet.java"""'}, {}), "(pwd, '../../model/util/HashSet.java')", False, 'import os\n'), ((10, 41, 10, 62), 'os.path.abspath', 'os.path.abspath', ({(10, 57, 10, 61): 'path'}, {}), '(path)', False, 'import os\n')]
SNeugber/fiftyone
fiftyone/core/patches.py
a50be47bbbf189e4bbdcd631b93c4c9cbf41c6b7
""" Patches views. | Copyright 2017-2021, Voxel51, Inc. | `voxel51.com <https://voxel51.com/>`_ | """ from copy import deepcopy import eta.core.utils as etau import fiftyone.core.aggregations as foa import fiftyone.core.dataset as fod import fiftyone.core.fields as fof import fiftyone.core.labels as fol import fiftyone.core.media as fom import fiftyone.core.sample as fos import fiftyone.core.view as fov _SINGLE_TYPES_MAP = { fol.Detections: fol.Detection, fol.Polylines: fol.Polyline, } _PATCHES_TYPES = (fol.Detections, fol.Polylines) _NO_MATCH_ID = "" class _PatchView(fos.SampleView): @property def _sample_id(self): return self._doc.sample_id def save(self): super().save() self._view._sync_source_sample(self) class PatchView(_PatchView): """A patch in a :class:`PatchesView`. :class:`PatchView` instances should not be created manually; they are generated by iterating over :class:`PatchesView` instances. Args: doc: a :class:`fiftyone.core.odm.DatasetSampleDocument` view: the :class:`PatchesView` that the patch belongs to selected_fields (None): a set of field names that this view is restricted to excluded_fields (None): a set of field names that are excluded from this view filtered_fields (None): a set of field names of list fields that are filtered in this view """ pass class EvaluationPatchView(_PatchView): """A patch in an :class:`EvaluationPatchesView`. :class:`EvaluationPatchView` instances should not be created manually; they are generated by iterating over :class:`EvaluationPatchesView` instances. Args: doc: a :class:`fiftyone.core.odm.DatasetSampleDocument` view: the :class:`EvaluationPatchesView` that the patch belongs to selected_fields (None): a set of field names that this view is restricted to excluded_fields (None): a set of field names that are excluded from this view filtered_fields (None): a set of field names of list fields that are filtered in this view """ pass class _PatchesView(fov.DatasetView): def __init__( self, source_collection, patches_stage, patches_dataset, _stages=None ): if _stages is None: _stages = [] self._source_collection = source_collection self._patches_stage = patches_stage self._patches_dataset = patches_dataset self.__stages = _stages def __copy__(self): return self.__class__( self._source_collection, deepcopy(self._patches_stage), self._patches_dataset, _stages=deepcopy(self.__stages), ) @property def _base_view(self): return self.__class__( self._source_collection, self._patches_stage, self._patches_dataset, ) @property def _dataset(self): return self._patches_dataset @property def _root_dataset(self): return self._source_collection._root_dataset @property def _stages(self): return self.__stages @property def _all_stages(self): return ( self._source_collection.view()._all_stages + [self._patches_stage] + self.__stages ) @property def _label_fields(self): raise NotImplementedError("subclass must implement _label_fields") @property def _element_str(self): return "patch" @property def _elements_str(self): return "patches" @property def name(self): return self.dataset_name + "-patches" @classmethod def _get_default_sample_fields( cls, include_private=False, use_db_fields=False ): fields = super()._get_default_sample_fields( include_private=include_private, use_db_fields=use_db_fields ) if use_db_fields: return fields + ("_sample_id",) return fields + ("sample_id",) def set_values(self, field_name, *args, **kwargs): field = field_name.split(".", 1)[0] must_sync = field in self._label_fields # The `set_values()` operation could change the contents of this view, # so we first record the sample IDs that need to be synced if must_sync and self._stages: ids = self.values("_id") else: ids = None super().set_values(field_name, *args, **kwargs) if must_sync: self._sync_source_field(field, ids=ids) def save(self, fields=None): """Overwrites the object patches in the source dataset with the contents of the view. If this view contains any additional fields that were not extracted from the source dataset, these fields are not saved. .. warning:: This will permanently delete any omitted, filtered, or otherwise modified patches from the source dataset. Args: fields (None): an optional field or list of fields to save. If specified, only these fields are overwritten """ if etau.is_str(fields): fields = [fields] super().save(fields=fields) if fields is None: fields = self._label_fields else: fields = [l for l in fields if l in self._label_fields] # # IMPORTANT: we sync the contents of `_patches_dataset`, not `self` # here because the `save()` call above updated the dataset, which means # this view may no longer have the same contents (e.g., if `skip()` is # involved) # self._sync_source_root(fields) def reload(self): self._root_dataset.reload() # # Regenerate the patches dataset # # This assumes that calling `load_view()` when the current patches # dataset has been deleted will cause a new one to be generated # self._patches_dataset.delete() _view = self._patches_stage.load_view(self._source_collection) self._patches_dataset = _view._patches_dataset def _sync_source_sample(self, sample): for field in self._label_fields: self._sync_source_sample_field(sample, field) def _sync_source_sample_field(self, sample, field): label_type = self._patches_dataset._get_label_field_type(field) is_list_field = issubclass(label_type, fol._LABEL_LIST_FIELDS) doc = sample._doc.field_to_mongo(field) if is_list_field: doc = doc[label_type._LABEL_LIST_FIELD] self._source_collection._set_labels_by_id( field, [sample.sample_id], [doc] ) def _sync_source_field(self, field, ids=None): _, label_path = self._patches_dataset._get_label_field_path(field) if ids is not None: view = self._patches_dataset.mongo( [{"$match": {"_id": {"$in": ids}}}] ) else: view = self._patches_dataset sample_ids, docs = view.aggregate( [foa.Values("sample_id"), foa.Values(label_path, _raw=True)] ) self._source_collection._set_labels_by_id(field, sample_ids, docs) def _sync_source_root(self, fields): for field in fields: self._sync_source_root_field(field) def _sync_source_root_field(self, field): _, id_path = self._get_label_field_path(field, "id") label_path = id_path.rsplit(".", 1)[0] # # Sync label updates # sample_ids, docs, label_ids = self._patches_dataset.aggregate( [ foa.Values("sample_id"), foa.Values(label_path, _raw=True), foa.Values(id_path, unwind=True), ] ) self._source_collection._set_labels_by_id(field, sample_ids, docs) # # Sync label deletions # _, src_id_path = self._source_collection._get_label_field_path( field, "id" ) src_ids = self._source_collection.values(src_id_path, unwind=True) delete_ids = set(src_ids) - set(label_ids) if delete_ids: self._source_collection._dataset.delete_labels( ids=delete_ids, fields=field ) def _get_ids_map(self, field): label_type = self._patches_dataset._get_label_field_type(field) is_list_field = issubclass(label_type, fol._LABEL_LIST_FIELDS) _, id_path = self._get_label_field_path(field, "id") sample_ids, label_ids = self.values(["id", id_path]) ids_map = {} if is_list_field: for sample_id, _label_ids in zip(sample_ids, label_ids): if not _label_ids: continue for label_id in _label_ids: ids_map[label_id] = sample_id else: for sample_id, label_id in zip(sample_ids, label_ids): if not label_id: continue ids_map[label_id] = sample_id return ids_map class PatchesView(_PatchesView): """A :class:`fiftyone.core.view.DatasetView` of patches from a :class:`fiftyone.core.dataset.Dataset`. Patches views contain an ordered collection of patch samples, each of which contains a subset of a sample of the parent dataset corresponding to a single object or logical grouping of of objects. Patches retrieved from patches views are returned as :class:`PatchView` objects. Args: source_collection: the :class:`fiftyone.core.collections.SampleCollection` from which this view was created patches_stage: the :class:`fiftyone.core.stages.ToPatches` stage that defines how the patches were extracted patches_dataset: the :class:`fiftyone.core.dataset.Dataset` that serves the patches in this view """ _SAMPLE_CLS = PatchView def __init__( self, source_collection, patches_stage, patches_dataset, _stages=None ): super().__init__( source_collection, patches_stage, patches_dataset, _stages=_stages ) self._patches_field = patches_stage.field @property def _label_fields(self): return [self._patches_field] @property def patches_field(self): """The field from which the patches in this view were extracted.""" return self._patches_field class EvaluationPatchesView(_PatchesView): """A :class:`fiftyone.core.view.DatasetView` containing evaluation patches from a :class:`fiftyone.core.dataset.Dataset`. Evalation patches views contain an ordered collection of evaluation examples, each of which contains the ground truth and/or predicted labels for a true positive, false positive, or false negative example from an evaluation run on the underlying dataset. Patches retrieved from patches views are returned as :class:`EvaluationPatchView` objects. Args: source_collection: the :class:`fiftyone.core.collections.SampleCollection` from which this view was created patches_stage: the :class:`fiftyone.core.stages.ToEvaluationPatches` stage that defines how the patches were extracted patches_dataset: the :class:`fiftyone.core.dataset.Dataset` that serves the patches in this view """ _SAMPLE_CLS = EvaluationPatchView def __init__( self, source_collection, patches_stage, patches_dataset, _stages=None ): super().__init__( source_collection, patches_stage, patches_dataset, _stages=_stages ) eval_key = patches_stage.eval_key eval_info = source_collection.get_evaluation_info(eval_key) self._gt_field = eval_info.config.gt_field self._pred_field = eval_info.config.pred_field @property def _label_fields(self): return [self._gt_field, self._pred_field] @property def gt_field(self): """The ground truth field for the evaluation patches in this view.""" return self._gt_field @property def pred_field(self): """The predictions field for the evaluation patches in this view.""" return self._pred_field def make_patches_dataset( sample_collection, field, keep_label_lists=False, name=None ): """Creates a dataset that contains one sample per object patch in the specified field of the collection. Fields other than ``field`` and the default sample fields will not be included in the returned dataset. A ``sample_id`` field will be added that records the sample ID from which each patch was taken. Args: sample_collection: a :class:`fiftyone.core.collections.SampleCollection` field: the patches field, which must be of type :class:`fiftyone.core.labels.Detections` or :class:`fiftyone.core.labels.Polylines` keep_label_lists (False): whether to store the patches in label list fields of the same type as the input collection rather than using their single label variants name (None): a name for the returned dataset Returns: a :class:`fiftyone.core.dataset.Dataset` """ if keep_label_lists: field_type = sample_collection._get_label_field_type(field) else: field_type = _get_single_label_field_type(sample_collection, field) dataset = fod.Dataset(name, _patches=True) dataset.media_type = fom.IMAGE dataset.add_sample_field( "sample_id", fof.ObjectIdField, db_field="_sample_id" ) dataset.add_sample_field( field, fof.EmbeddedDocumentField, embedded_doc_type=field_type ) patches_view = _make_patches_view( sample_collection, field, keep_label_lists=keep_label_lists ) _write_samples(dataset, patches_view) return dataset def _get_single_label_field_type(sample_collection, field): label_type = sample_collection._get_label_field_type(field) if label_type not in _SINGLE_TYPES_MAP: raise ValueError("Unsupported label field type %s" % label_type) return _SINGLE_TYPES_MAP[label_type] def make_evaluation_dataset(sample_collection, eval_key, name=None): """Creates a dataset based on the results of the evaluation with the given key that contains one sample for each true positive, false positive, and false negative example in the input collection, respectively. True positive examples will result in samples with both their ground truth and predicted fields populated, while false positive/negative examples will only have one of their corresponding predicted/ground truth fields populated, respectively. If multiple predictions are matched to a ground truth object (e.g., if the evaluation protocol includes a crowd attribute), then all matched predictions will be stored in the single sample along with the ground truth object. The returned dataset will also have top-level ``type`` and ``iou`` fields populated based on the evaluation results for that example, as well as a ``sample_id`` field recording the sample ID of the example, and a ``crowd`` field if the evaluation protocol defines a crowd attribute. .. note:: The returned dataset will contain patches for the contents of the input collection, which may differ from the view on which the ``eval_key`` evaluation was performed. This may exclude some labels that were evaluated and/or include labels that were not evaluated. If you would like to see patches for the exact view on which an evaluation was performed, first call :meth:`load_evaluation_view() <fiftyone.core.collections.SampleCollection.load_evaluation_view>` to load the view and then convert to patches. Args: sample_collection: a :class:`fiftyone.core.collections.SampleCollection` eval_key: an evaluation key that corresponds to the evaluation of ground truth/predicted fields that are of type :class:`fiftyone.core.labels.Detections` or :class:`fiftyone.core.labels.Polylines` name (None): a name for the returned dataset Returns: a :class:`fiftyone.core.dataset.Dataset` """ # Parse evaluation info eval_info = sample_collection.get_evaluation_info(eval_key) pred_field = eval_info.config.pred_field gt_field = eval_info.config.gt_field if hasattr(eval_info.config, "iscrowd"): crowd_attr = eval_info.config.iscrowd else: crowd_attr = None pred_type = sample_collection._get_label_field_type(pred_field) gt_type = sample_collection._get_label_field_type(gt_field) # Setup dataset with correct schema dataset = fod.Dataset(name, _patches=True) dataset.media_type = fom.IMAGE dataset.add_sample_field( pred_field, fof.EmbeddedDocumentField, embedded_doc_type=pred_type ) dataset.add_sample_field( gt_field, fof.EmbeddedDocumentField, embedded_doc_type=gt_type ) dataset.add_sample_field( "sample_id", fof.ObjectIdField, db_field="_sample_id" ) dataset.add_sample_field("type", fof.StringField) dataset.add_sample_field("iou", fof.FloatField) if crowd_attr is not None: dataset.add_sample_field("crowd", fof.BooleanField) # Add ground truth patches gt_view = _make_eval_view( sample_collection, eval_key, gt_field, crowd_attr=crowd_attr ) _write_samples(dataset, gt_view) # Merge matched predictions _merge_matched_labels(dataset, sample_collection, eval_key, pred_field) # Add unmatched predictions unmatched_pred_view = _make_eval_view( sample_collection, eval_key, pred_field, skip_matched=True ) _add_samples(dataset, unmatched_pred_view) return dataset def _make_patches_view(sample_collection, field, keep_label_lists=False): if sample_collection._is_frames: raise ValueError( "Creating patches views into frame views is not yet supported" ) if sample_collection._is_frame_field(field): raise ValueError( "Frame label patches cannot be directly extracted; you must first " "convert your video dataset to frames via `to_frames()`" ) label_type = sample_collection._get_label_field_type(field) if issubclass(label_type, _PATCHES_TYPES): list_field = field + "." + label_type._LABEL_LIST_FIELD else: raise ValueError( "Invalid label field type %s. Extracting patches is only " "supported for the following types: %s" % (label_type, _PATCHES_TYPES) ) pipeline = [ { "$project": { "_id": True, "_sample_id": "$_id", "_media_type": True, "filepath": True, "metadata": True, "tags": True, field + "._cls": True, list_field: True, } }, {"$unwind": "$" + list_field}, {"$set": {"_rand": {"$rand": {}}}}, {"$set": {"_id": "$" + list_field + "._id"}}, ] if keep_label_lists: pipeline.append({"$set": {list_field: ["$" + list_field]}}) else: pipeline.append({"$set": {field: "$" + list_field}}) return sample_collection.mongo(pipeline) def _make_eval_view( sample_collection, eval_key, field, skip_matched=False, crowd_attr=None ): eval_type = field + "." + eval_key eval_id = field + "." + eval_key + "_id" eval_iou = field + "." + eval_key + "_iou" view = _make_patches_view(sample_collection, field) if skip_matched: view = view.mongo( [ { "$match": { "$expr": { "$or": [ {"$eq": ["$" + eval_id, _NO_MATCH_ID]}, {"$not": {"$gt": ["$" + eval_id, None]}}, ] } } } ] ) view = view.mongo( [{"$set": {"type": "$" + eval_type, "iou": "$" + eval_iou}}] ) if crowd_attr is not None: crowd_path1 = "$" + field + "." + crowd_attr # @todo remove Attributes usage crowd_path2 = "$" + field + ".attributes." + crowd_attr + ".value" view = view.mongo( [ { "$set": { "crowd": { "$cond": { "if": {"$gt": [crowd_path1, None]}, "then": {"$toBool": crowd_path1}, "else": { "$cond": { "if": {"$gt": [crowd_path2, None]}, "then": {"$toBool": crowd_path2}, "else": None, } }, } } } } ] ) return _upgrade_labels(view, field) def _upgrade_labels(view, field): tmp_field = "_" + field label_type = view._get_label_field_type(field) return view.mongo( [ {"$set": {tmp_field: "$" + field}}, {"$unset": field}, { "$set": { field: { "_cls": label_type.__name__, label_type._LABEL_LIST_FIELD: ["$" + tmp_field], } } }, {"$unset": tmp_field}, ] ) def _merge_matched_labels(dataset, src_collection, eval_key, field): field_type = src_collection._get_label_field_type(field) list_field = field + "." + field_type._LABEL_LIST_FIELD eval_id = eval_key + "_id" eval_field = list_field + "." + eval_id pipeline = src_collection._pipeline(detach_frames=True) pipeline.extend( [ {"$project": {list_field: True}}, {"$unwind": "$" + list_field}, { "$match": { "$expr": { "$and": [ {"$gt": ["$" + eval_field, None]}, {"$ne": ["$" + eval_field, _NO_MATCH_ID]}, ] } } }, { "$group": { "_id": {"$toObjectId": "$" + eval_field}, "_labels": {"$push": "$" + list_field}, } }, { "$project": { field: { "_cls": field_type.__name__, field_type._LABEL_LIST_FIELD: "$_labels", } }, }, { "$merge": { "into": dataset._sample_collection_name, "on": "_id", "whenMatched": "merge", "whenNotMatched": "discard", } }, ] ) src_collection._dataset._aggregate(pipeline=pipeline, attach_frames=False) def _write_samples(dataset, src_collection): pipeline = src_collection._pipeline(detach_frames=True) pipeline.append({"$out": dataset._sample_collection_name}) src_collection._dataset._aggregate(pipeline=pipeline, attach_frames=False) def _add_samples(dataset, src_collection): pipeline = src_collection._pipeline(detach_frames=True) pipeline.append( { "$merge": { "into": dataset._sample_collection_name, "on": "_id", "whenMatched": "keepExisting", "whenNotMatched": "insert", } } ) src_collection._dataset._aggregate(pipeline=pipeline, attach_frames=False)
[((439, 14, 439, 46), 'fiftyone.core.dataset.Dataset', 'fod.Dataset', (), '', True, 'import fiftyone.core.dataset as fod\n'), ((522, 14, 522, 46), 'fiftyone.core.dataset.Dataset', 'fod.Dataset', (), '', True, 'import fiftyone.core.dataset as fod\n'), ((188, 11, 188, 30), 'eta.core.utils.is_str', 'etau.is_str', ({(188, 23, 188, 29): 'fields'}, {}), '(fields)', True, 'import eta.core.utils as etau\n'), ((94, 12, 94, 41), 'copy.deepcopy', 'deepcopy', ({(94, 21, 94, 40): 'self._patches_stage'}, {}), '(self._patches_stage)', False, 'from copy import deepcopy\n'), ((96, 20, 96, 43), 'copy.deepcopy', 'deepcopy', ({(96, 29, 96, 42): 'self.__stages'}, {}), '(self.__stages)', False, 'from copy import deepcopy\n'), ((248, 13, 248, 36), 'fiftyone.core.aggregations.Values', 'foa.Values', ({(248, 24, 248, 35): '"""sample_id"""'}, {}), "('sample_id')", True, 'import fiftyone.core.aggregations as foa\n'), ((248, 38, 248, 71), 'fiftyone.core.aggregations.Values', 'foa.Values', (), '', True, 'import fiftyone.core.aggregations as foa\n'), ((267, 16, 267, 39), 'fiftyone.core.aggregations.Values', 'foa.Values', ({(267, 27, 267, 38): '"""sample_id"""'}, {}), "('sample_id')", True, 'import fiftyone.core.aggregations as foa\n'), ((268, 16, 268, 49), 'fiftyone.core.aggregations.Values', 'foa.Values', (), '', True, 'import fiftyone.core.aggregations as foa\n'), ((269, 16, 269, 48), 'fiftyone.core.aggregations.Values', 'foa.Values', (), '', True, 'import fiftyone.core.aggregations as foa\n')]
ocesaulo/cookiecutter-ocn_sci
{{cookiecutter.repo_name}}/setup.py
d41e826f56ba67cfde878ffc8188d497214a5f5b
#!/usr/bin/env python # -*- coding: utf-8 -*- """The setup script.""" from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() {%- set license_classifiers = { 'MIT license': 'License :: OSI Approved :: MIT License', 'BSD license': 'License :: OSI Approved :: BSD License', 'ISC license': 'License :: OSI Approved :: ISC License (ISCL)', 'Apache Software License 2.0': 'License :: OSI Approved :: Apache Software License', 'GNU General Public License v3': 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)' } %} # get the dependencies and installs with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f: all_reqs = f.read().split('\n') install_requires = [x.strip() for x in all_reqs if 'git+' not in x] dependency_links = [x.strip().replace('git+', '') for x in all_reqs if x.startswith('git+')] tests_requirements = ['pytest'], setup_requirements = ['pytest-runner'] requirements = [ # package requirements go here ] setup( name='{{ cookiecutter.repo_name }}', version=__version__, description="{{ cookiecutter.project_short_description }}", long_description=readme, author="{{ cookiecutter.full_name.replace('\"', '\\\"') }}", author_email='{{ cookiecutter.email }}', url='https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}', packages=find_packages(include=['{{ cookiecutter.repo_name }}'], exclude=('docs', 'tests*',)), {%- if cookiecutter.open_source_license in license_classifiers %} license="{{ cookiecutter.open_source_license }}", {%- endif %} install_requires=install_requires, dependency_links=dependency_links, setup_requires=setup_requirements, test_suite='tests', tests_require=test_requirements, keywords='{{ cookiecutter.repo_name }}', classifiers=[ 'Programming Language :: Python :: 3.6', ] )
[]
zopefoundation/zope.app.debug
src/zope/app/debug/debug.py
4f31e98f6a633f089bf132dd55cb3ead0270887b
############################################################################## # # Copyright (c) 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Code to initialize the application server """ from __future__ import print_function __docformat__ = 'restructuredtext' import base64 import time import sys from pdb import Pdb from io import BytesIO from zope.publisher.publish import publish as _publish, debug_call from zope.publisher.browser import TestRequest, setDefaultSkin from zope.app.publication.browser import BrowserPublication from zope.app.appsetup import config, database try: from time import process_time as time_process_time # pragma: PY3 except ImportError: from time import clock as time_process_time # pragma: PY2 try: import urllib.parse as urllib # pragma: PY3 except ImportError: import urllib # pragma: PY2 try: text_type = unicode # pragma: PY2 except NameError: text_type = str # pragma: PY3 class Debugger(object): pdb = Pdb def __init__(self, db=None, config_file=None, stdout=None): if db is None and config_file is None: db = 'Data.fs' config_file = 'site.zcml' if config_file is not None: config(config_file) self.db = database(db) self.stdout = stdout @classmethod def fromDatabase(cls, db): inst = cls.__new__(cls) inst.db = db return inst def root(self): """Get the top-level application object The object returned is connected to an open database connection. """ from zope.app.publication.zopepublication import ZopePublication return self.db.open().root()[ZopePublication.root_name] def _request(self, path='/', stdin='', basic=None, environment=None, form=None, request=None, publication=BrowserPublication): """Create a request """ env = {} if isinstance(stdin, text_type): stdin = stdin.encode("utf-8") if isinstance(stdin, bytes): stdin = BytesIO(stdin) p = path.split('?') if len(p) == 1: env['PATH_INFO'] = p[0] elif len(p) == 2: env['PATH_INFO'], env['QUERY_STRING'] = p else: raise ValueError("Too many ?s in path", path) env['PATH_INFO'] = urllib.unquote(env['PATH_INFO']) if environment is not None: env.update(environment) if basic: basic_bytes = basic.encode('ascii') if not isinstance( basic, bytes) else basic basic64_bytes = base64.b64encode(basic_bytes) basic64 = basic64_bytes.decode('ascii').strip() env['HTTP_AUTHORIZATION'] = "Basic %s" % basic64 pub = publication(self.db) if request is not None: request = request(stdin, env) else: request = TestRequest(stdin, env) setDefaultSkin(request) request.setPublication(pub) if form: request.form.update(form) return request def publish(self, path='/', stdin='', *args, **kw): t, pt = time.time(), time_process_time() request = self._request(path, stdin, *args, **kw) # agroszer: 2008.feb.1.: if a retry occurs in the publisher, # the response will be LOST, so we must accept the returned request request = _publish(request) getStatus = getattr(request.response, 'getStatus', lambda: None) headers = sorted(request.response.getHeaders()) print( 'Status %s\r\n%s\r\n\r\n%s' % ( request.response.getStatusString(), '\r\n'.join([("%s: %s" % h) for h in headers]), request.response.consumeBody(), ), file=self.stdout or sys.stdout) return time.time() - t, time_process_time() - pt, getStatus() def run(self, *args, **kw): t, pt = time.time(), time_process_time() request = self._request(*args, **kw) # agroszer: 2008.feb.1.: if a retry occurs in the publisher, # the response will be LOST, so we must accept the returned request request = _publish(request, handle_errors=False) getStatus = getattr(request.response, 'getStatus', lambda: None) return time.time() - t, time_process_time() - pt, getStatus() def debug(self, *args, **kw): out = self.stdout or sys.stdout class ZopePdb(self.Pdb): done_pub = False done_ob = False def do_pub(self, arg): if self.done_pub: print('pub already done.', file=out) return self.do_s('') self.do_s('') self.do_c('') self.done_pub = True def do_ob(self, arg): if self.done_ob: print('ob already done.', file=out) return self.do_pub('') self.do_c('') self.done_ob = True dbg = ZopePdb() request = self._request(*args, **kw) fbreak(dbg, _publish) fbreak(dbg, debug_call) print('* Type c<cr> to jump to published object call.', file=out) dbg.runcall(_publish, request) return dbg def getlineno(code): return code.co_firstlineno def fbreak(db, meth): try: meth = meth.__func__ except AttributeError: pass code = meth.__code__ lineno = getlineno(code) filename = code.co_filename db.set_break(filename, lineno)
[((59, 18, 59, 30), 'zope.app.appsetup.database', 'database', ({(59, 27, 59, 29): 'db'}, {}), '(db)', False, 'from zope.app.appsetup import config, database\n'), ((100, 27, 100, 59), 'urllib.unquote', 'urllib.unquote', ({(100, 42, 100, 58): "env['PATH_INFO']"}, {}), "(env['PATH_INFO'])", False, 'import urllib\n'), ((132, 18, 132, 35), 'zope.publisher.publish.publish', '_publish', ({(132, 27, 132, 34): 'request'}, {}), '(request)', True, 'from zope.publisher.publish import publish as _publish, debug_call\n'), ((150, 18, 150, 56), 'zope.publisher.publish.publish', '_publish', (), '', True, 'from zope.publisher.publish import publish as _publish, debug_call\n'), ((58, 12, 58, 31), 'zope.app.appsetup.config', 'config', ({(58, 19, 58, 30): 'config_file'}, {}), '(config_file)', False, 'from zope.app.appsetup import config, database\n'), ((90, 20, 90, 34), 'io.BytesIO', 'BytesIO', ({(90, 28, 90, 33): 'stdin'}, {}), '(stdin)', False, 'from io import BytesIO\n'), ((108, 28, 108, 57), 'base64.b64encode', 'base64.b64encode', ({(108, 45, 108, 56): 'basic_bytes'}, {}), '(basic_bytes)', False, 'import base64\n'), ((117, 22, 117, 45), 'zope.publisher.browser.TestRequest', 'TestRequest', ({(117, 34, 117, 39): 'stdin', (117, 41, 117, 44): 'env'}, {}), '(stdin, env)', False, 'from zope.publisher.browser import TestRequest, setDefaultSkin\n'), ((118, 12, 118, 35), 'zope.publisher.browser.setDefaultSkin', 'setDefaultSkin', ({(118, 27, 118, 34): 'request'}, {}), '(request)', False, 'from zope.publisher.browser import TestRequest, setDefaultSkin\n'), ((126, 16, 126, 27), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((126, 29, 126, 48), 'time.clock', 'time_process_time', ({}, {}), '()', True, 'from time import clock as time_process_time\n'), ((146, 16, 146, 27), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((146, 29, 146, 48), 'time.clock', 'time_process_time', ({}, {}), '()', True, 'from time import clock as time_process_time\n'), ((143, 15, 143, 26), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((143, 32, 143, 51), 'time.clock', 'time_process_time', ({}, {}), '()', True, 'from time import clock as time_process_time\n'), ((153, 15, 153, 26), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((153, 32, 153, 51), 'time.clock', 'time_process_time', ({}, {}), '()', True, 'from time import clock as time_process_time\n')]
terryli710/SIIM-ACR-Pneumothorax-Classification
transfer_learning.py
8b278a9885b71c919d7064b2df42863b53f7adf3
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Mon May 18 22:42:54 2020 @author: mike """ import numpy as np import tensorflow as tf from tensorflow import keras from sklearn.model_selection import train_test_split from tensorflow.keras.applications import VGG16 from tensorflow.keras import layers from sklearn.preprocessing import OneHotEncoder from skimage.transform import resize import matplotlib.pyplot as plt train_data = np.load("train_data.npy") x_data = np.zeros((210,204,204,3)) y_data = np.zeros(210) for i in range(210): img = train_data[i,1:].reshape(1024,1024) img_resized = resize(img,(204,204)) y_data[i] = train_data[i,0] x_data[i,:,:,0] = img_resized.astype(int) x_data[i,:,:,1] = img_resized.astype(int) x_data[i,:,:,2] = img_resized.astype(int) x_train, x_test, y_train, y_test = train_test_split( x_data, y_data, test_size=0.2, random_state=42) y_train = OneHotEncoder().fit_transform(y_train.reshape(-1,1)).toarray() y_test = OneHotEncoder().fit_transform(y_test.reshape(-1,1)).toarray() base_model = VGG16(include_top=False, weights='vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5', input_shape=(204, 204, 3)) base_model.trainable = False inputs = tf.keras.Input(shape=(204, 204, 3)) x = base_model(inputs) x = tf.keras.layers.Flatten()(x) x = tf.keras.layers.Dense(256, activation='relu')(x) x = tf.keras.layers.Dense(64, activation='relu')(x) outputs = tf.keras.layers.Dense(2, activation='softmax')(x) model = keras.Model(inputs, outputs) model.summary() model.compile(optimizer=tf.keras.optimizers.SGD(learning_rate=0.001),loss="binary_crossentropy",metrics=["accuracy"]) model.fit(x_train, y_train, batch_size=16, epochs=5) pred = model.predict(x_train) score = model.evaluate(x_test, y_test, verbose=0) print(score[0],score[1])
[((21, 13, 21, 38), 'numpy.load', 'np.load', ({(21, 21, 21, 37): '"""train_data.npy"""'}, {}), "('train_data.npy')", True, 'import numpy as np\n'), ((23, 9, 23, 34), 'numpy.zeros', 'np.zeros', ({(23, 18, 23, 33): '(210, 204, 204, 3)'}, {}), '((210, 204, 204, 3))', True, 'import numpy as np\n'), ((24, 9, 24, 22), 'numpy.zeros', 'np.zeros', ({(24, 18, 24, 21): '210'}, {}), '(210)', True, 'import numpy as np\n'), ((34, 35, 35, 55), 'sklearn.model_selection.train_test_split', 'train_test_split', (), '', False, 'from sklearn.model_selection import train_test_split\n'), ((45, 13, 46, 45), 'tensorflow.keras.applications.VGG16', 'VGG16', (), '', False, 'from tensorflow.keras.applications import VGG16\n'), ((49, 9, 49, 44), 'tensorflow.keras.Input', 'tf.keras.Input', (), '', True, 'import tensorflow as tf\n'), ((55, 8, 55, 36), 'tensorflow.keras.Model', 'keras.Model', ({(55, 20, 55, 26): 'inputs', (55, 28, 55, 35): 'outputs'}, {}), '(inputs, outputs)', False, 'from tensorflow import keras\n'), ((28, 18, 28, 39), 'skimage.transform.resize', 'resize', ({(28, 25, 28, 28): 'img', (28, 29, 28, 38): '(204, 204)'}, {}), '(img, (204, 204))', False, 'from skimage.transform import resize\n'), ((51, 4, 51, 29), 'tensorflow.keras.layers.Flatten', 'tf.keras.layers.Flatten', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((52, 4, 52, 49), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (), '', True, 'import tensorflow as tf\n'), ((53, 4, 53, 48), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (), '', True, 'import tensorflow as tf\n'), ((54, 10, 54, 56), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (), '', True, 'import tensorflow as tf\n'), ((61, 24, 61, 68), 'tensorflow.keras.optimizers.SGD', 'tf.keras.optimizers.SGD', (), '', True, 'import tensorflow as tf\n'), ((39, 10, 39, 25), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ({}, {}), '()', False, 'from sklearn.preprocessing import OneHotEncoder\n'), ((40, 9, 40, 24), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ({}, {}), '()', False, 'from sklearn.preprocessing import OneHotEncoder\n')]
erexer/polyaxon
core/tests/test_polyflow/test_workflows/test_hyperband.py
be14dae1ed56d568983388736bcdaf27a7baa4a4
#!/usr/bin/python # # Copyright 2018-2020 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest from marshmallow.exceptions import ValidationError from tests.utils import BaseTestCase, assert_equal_dict from polyaxon.polyflow.matrix import V1Hyperband from polyaxon.polyflow.optimization import V1Optimization, V1OptimizationMetric @pytest.mark.workflow_mark class TestWorkflowV1Hyperbands(BaseTestCase): def test_hyperband_config(self): config_dict = { "kind": "hyperband", "maxIterations": 10, "eta": 3, "resource": {"name": "steps", "type": "int"}, "resume": False, "metric": V1OptimizationMetric( name="loss", optimization=V1Optimization.MINIMIZE ).to_dict(), "params": {"lr": {"kind": "choice", "value": [[0.1], [0.9]]}}, } config = V1Hyperband.from_dict(config_dict) assert_equal_dict(config.to_dict(), config_dict) # Raises for negative values config_dict["maxIterations"] = 0 with self.assertRaises(ValidationError): V1Hyperband.from_dict(config_dict) config_dict["maxIterations"] = -0.5 with self.assertRaises(ValidationError): V1Hyperband.from_dict(config_dict) config_dict["maxIterations"] = 3 # Add numRuns percent config_dict["eta"] = -0.5 with self.assertRaises(ValidationError): V1Hyperband.from_dict(config_dict) config_dict["eta"] = 2.9 config = V1Hyperband.from_dict(config_dict) assert_equal_dict(config.to_dict(), config_dict)
[((40, 17, 40, 51), 'polyaxon.polyflow.matrix.V1Hyperband.from_dict', 'V1Hyperband.from_dict', ({(40, 39, 40, 50): 'config_dict'}, {}), '(config_dict)', False, 'from polyaxon.polyflow.matrix import V1Hyperband\n'), ((59, 17, 59, 51), 'polyaxon.polyflow.matrix.V1Hyperband.from_dict', 'V1Hyperband.from_dict', ({(59, 39, 59, 50): 'config_dict'}, {}), '(config_dict)', False, 'from polyaxon.polyflow.matrix import V1Hyperband\n'), ((46, 12, 46, 46), 'polyaxon.polyflow.matrix.V1Hyperband.from_dict', 'V1Hyperband.from_dict', ({(46, 34, 46, 45): 'config_dict'}, {}), '(config_dict)', False, 'from polyaxon.polyflow.matrix import V1Hyperband\n'), ((50, 12, 50, 46), 'polyaxon.polyflow.matrix.V1Hyperband.from_dict', 'V1Hyperband.from_dict', ({(50, 34, 50, 45): 'config_dict'}, {}), '(config_dict)', False, 'from polyaxon.polyflow.matrix import V1Hyperband\n'), ((56, 12, 56, 46), 'polyaxon.polyflow.matrix.V1Hyperband.from_dict', 'V1Hyperband.from_dict', ({(56, 34, 56, 45): 'config_dict'}, {}), '(config_dict)', False, 'from polyaxon.polyflow.matrix import V1Hyperband\n'), ((35, 22, 37, 13), 'polyaxon.polyflow.optimization.V1OptimizationMetric', 'V1OptimizationMetric', (), '', False, 'from polyaxon.polyflow.optimization import V1Optimization, V1OptimizationMetric\n')]
fatimatswanya/fatimaCSC102
Class Work oop.py
cab70bd696d39a9e16bcb57e0180e872be4f49bc
class Student: studentLevel = 'first year computer science 2020/2021 session' studentCounter = 0 registeredCourse='csc102' def __init__(self, thename, thematricno, thesex,thehostelname,theage,thecsc102examscore): self.name = thename self.matricno = thematricno self.sex = thesex self.hostelname =thehostelname self.age=theage self.csc102examscore=thecsc102examscore Student.studentCounter = Student.studentCounter + 1 def getName(self): return self.name def setName(self, thenewName): self.name = thenewName def agedeterminer(self): if self.age>16: print('Student is above 16') def finalscore(self): if self.csc102examscore < 45: print('You will carryover this course, sorry') else: print('You have passed') @classmethod def course(): print(f'Students registered course is {Student.registeredCourse}') @staticmethod def PAUNanthem(): print('Pau, here we come, Pau, here we come ') @staticmethod def ODDorEVEN(num): if num % 2==0: print('Number is even') else: print('Number is odd') @classmethod def studentnum(cls): print(Student.studentCounter) studendt1 = Student('James Kaka', '021074', 'M','Amethyst','16', '49') print(studendt1.getName()) studendt1.setName('James Gaga') print(studendt1.getName()) Student.PAUNanthem()
[]
Fozar/clickhouse-sqlalchemy
clickhouse_sqlalchemy/drivers/reflection.py
88fd630856655cc470430b365dce7e85516abf62
from sqlalchemy.engine import reflection from clickhouse_sqlalchemy import Table, engines class ClickHouseInspector(reflection.Inspector): def reflect_table(self, table, *args, **kwargs): # This check is necessary to support direct instantiation of # `clickhouse_sqlalchemy.Table` and then reflection of it. if not isinstance(table, Table): table.metadata.remove(table) ch_table = Table._make_from_standard( table, _extend_on=kwargs.get('_extend_on') ) else: ch_table = table super(ClickHouseInspector, self).reflect_table( ch_table, *args, **kwargs ) with self._operation_context() as conn: schema = conn.schema_for_object(ch_table) self._reflect_engine(ch_table.name, schema, ch_table) def _reflect_engine(self, table_name, schema, table): should_reflect = ( self.dialect.supports_engine_reflection and self.dialect.engine_reflection ) if not should_reflect: return engine_cls_by_name = {e.__name__: e for e in engines.__all__} e = self.get_engine(table_name, schema=table.schema) if not e: raise ValueError("Cannot find engine for table '%s'" % table_name) engine_cls = engine_cls_by_name.get(e['engine']) if engine_cls is not None: engine = engine_cls.reflect(table, **e) engine._set_parent(table) else: table.engine = None def get_engine(self, table_name, schema=None, **kw): with self._operation_context() as conn: return self.dialect.get_engine( conn, table_name, schema=schema, info_cache=self.info_cache, **kw )
[]
abdul-khalid/pydisque
tests/test_disque.py
a9b5caa6dac0621a0174d168f4a04c88d0e2f8b5
""" Unit Tests for the pydisque module. Currently, most of these tests require a fresh instance of Disque to be valid and pass. """ import unittest import json import time import random import six from pydisque.client import Client from redis.exceptions import ResponseError class TestDisque(unittest.TestCase): """TestCase class for pydisque.""" testID = None def setUp(self): """Setup the tests.""" self.client = Client(['localhost:7711']) self.client.connect() self.testID = "%d.%d" % (time.time(), random.randint(1000, 1000000)) def test_publish_and_receive(self): """Test the most important functions of pydisque.""" t1 = str(time.time()) self.client.add_job("test_q", t1, timeout=100) jobs = self.client.get_job(['test_q']) assert len(jobs) == 1 for queue_name, job_id, job in jobs: assert job == six.b(t1) self.client.ack_job(job_id) assert len(self.client.get_job(['test_q'], timeout=100)) == 0 def test_nack(self): """Fetch the queue, return a job, check that it's back.""" t1 = str(time.time()) queuename = "test_nack." + self.testID self.client.add_job(queuename, str(t1), timeout=100) jobs = self.client.get_job([queuename]) # NACK the first read assert len(jobs) == 1 for queue_name, job_id, job in jobs: assert len(jobs) == 1 assert job == six.b(t1) self.client.nack_job(job_id) # this time ACK it jobs = self.client.get_job([queuename]) assert len(jobs) == 1 for queue_name, job_id, job in jobs: assert job == six.b(t1) self.client.ack_job(job_id) assert len(self.client.get_job([queuename], timeout=100)) == 0 def test_qpeek(self): """ Test qpeek. Ran into some problems with an ENQUEUE/DEQUEUE test that was using qpeek, checking core functionality of qpeek(). """ queuename = "test_qpeek-%s" % self.testID job_id = self.client.add_job(queuename, "Peek A Boo") peeked = self.client.qpeek(queuename, 1) assert peeked[0][1] == job_id def test_qscan(self): """ Test the qscan function. This test relies on add_job() being functional, and the local disque not being a disque proxy to a mesh. TODO: unique the queues with self.testID. """ t1 = str(time.time()) self.client.add_job("q1", t1, timeout=100) self.client.add_job("q2", t1, timeout=100) qb = self.client.qscan() assert qb[0] assert qb[1] assert six.b("q1") in qb[1] assert six.b("q2") in qb[1] def test_jscan(self): """Simple test of the jscan function.""" t1 = time.time() queuename = "test_jscan-%s" % self.testID j1 = self.client.add_job(queuename, str(t1), timeout=100) jerbs = self.client.jscan(queue=queuename) assert j1 in jerbs[1] def test_del_job(self): """Simple test of del_job, needs qpeek. FIXME: This function has grown ugly. """ t1 = time.time() queuename = "test_del_job-%s" % self.testID j1 = self.client.add_job(queuename, str(t1)) jerbs = self.client.qpeek(queuename, 1) jlist = [] for item in jerbs: jlist.append(item[1]) assert j1 in jlist self.client.del_job(j1) jerbs = self.client.qpeek(queuename, 1) jlist = [] for item in jerbs: jlist.append(item[1]) assert j1 not in jerbs def test_qlen(self): """Simple test of qlen.""" queuename = "test_qlen-%s" % self.testID lengthOfTest = 100 test_job = "Useless Job." for x in range(lengthOfTest): self.client.add_job(queuename, test_job) assert self.client.qlen(queuename) == lengthOfTest def test_qstat(self): """Testing QSTAT (default behavior).""" queuename = "test_qstat-%s" % self.testID testqueue = ["a", "b", "c"] for x in testqueue: self.client.add_job(queuename, x) stat = self.client.qstat(queuename) # check the basics assert 'jobs-in' in stat assert 'jobs-out' in stat def test_qstat_dict(self): """Testing QSTAT's (new dict behavior).""" queuename = "test_qstat_dict-%s" % self.testID testqueue = ["a", "b", "c"] for x in testqueue: self.client.add_job(queuename, x) stat = self.client.qstat(queuename, True) assert stat.get('jobs-in', None) is not None assert stat.get('jobs-out', None) is not None def test_shownack(self): """Test that NACK and SHOW work appropriately.""" queuename = "test_show-%s" % self.testID test_job = "Show me." self.client.add_job(queuename, test_job) jobs = self.client.get_job([queuename]) for queue_name, job_id, job in jobs: self.client.nack_job(job_id) shown = self.client.show(job_id, True) assert shown.get('body') == test_job assert shown.get('nacks') == 1 def test_pause(self): """Test that a PAUSE message is acknowledged.""" queuename = "test_show-%s" % self.testID test_job = "Jerbs, they are a thing" self.client.pause(queuename, kw_in=True) try: job_id = self.client.add_job(queuename, test_job) except ResponseError: pass # can we add a job again? self.client.pause(queuename, kw_none=True) job_id = self.client.add_job(queuename, test_job) jobs = self.client.get_job([queuename]) # TODO(canardleteer): add a test of PAUSE SHOW def test_get_job(self): queue_name = "test_get_job." + self.testID job = str(time.time()) job_id = self.client.add_job(queue_name, job) expected = [(queue_name, job_id, job)] got = self.client.get_job([queue_name], withcounters=False) assert expected == got def test_get_job_withcounters(self): queue_name = "test_get_job." + self.testID job = str(time.time()) job_id = self.client.add_job(queue_name, job) nacks = 0 additional_deliveries = 0 expected = [(queue_name, job_id, job, nacks, additional_deliveries)] got = self.client.get_job([queue_name], withcounters=True) assert expected == got if __name__ == '__main__': unittest.main()
[((232, 4, 232, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((25, 22, 25, 48), 'pydisque.client.Client', 'Client', ({(25, 29, 25, 47): "['localhost:7711']"}, {}), "(['localhost:7711'])", False, 'from pydisque.client import Client\n'), ((98, 13, 98, 24), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((110, 13, 110, 24), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((32, 17, 32, 28), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((43, 17, 43, 28), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((83, 17, 83, 28), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((93, 15, 93, 26), 'six.b', 'six.b', ({(93, 21, 93, 25): '"""q1"""'}, {}), "('q1')", False, 'import six\n'), ((94, 15, 94, 26), 'six.b', 'six.b', ({(94, 21, 94, 25): '"""q2"""'}, {}), "('q2')", False, 'import six\n'), ((212, 18, 212, 29), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((222, 18, 222, 29), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((27, 33, 27, 44), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((28, 33, 28, 62), 'random.randint', 'random.randint', ({(28, 48, 28, 52): '(1000)', (28, 54, 28, 61): '(1000000)'}, {}), '(1000, 1000000)', False, 'import random\n'), ((37, 26, 37, 35), 'six.b', 'six.b', ({(37, 32, 37, 34): 't1'}, {}), '(t1)', False, 'import six\n'), ((51, 26, 51, 35), 'six.b', 'six.b', ({(51, 32, 51, 34): 't1'}, {}), '(t1)', False, 'import six\n'), ((57, 26, 57, 35), 'six.b', 'six.b', ({(57, 32, 57, 34): 't1'}, {}), '(t1)', False, 'import six\n')]
samirsahoo007/Naive-Bayes-and-Decision-Tree-Classifiers
src/runner.py
619c5c0b17438d1014f7ca7e4ce13cc44c45de3c
# -*- coding: utf-8 -*- # """*********************************************************************************************""" # FileName [ runner.py ] # Synopsis [ main program that runs the 'Naive Bayes' and 'Decision Tree' training / testing ] # Author [ Ting-Wei Liu (Andi611) ] # Copyright [ Copyleft(c), NTUEE, NTU, Taiwan ] """*********************************************************************************************""" ############### # IMPORTATION # ############### import os import csv import argparse import numpy as np from data_loader import data_loader from classifiers import naive_bayes_runner from classifiers import decision_tree_runner ################## # CONFIGURATIONS # ################## def get_config(): parser = argparse.ArgumentParser(description='descrip_msg') classifier = parser.add_argument_group('classifier') classifier.add_argument('--classifier', type=str, default='', help='classifier to be specified by user') classifier.add_argument('--naive_bayes', action='store_true', help='enable Naive Bayes classification mode') classifier.add_argument('--decision_tree', action='store_true', help='enable Decision Tree classification mode') mode_args = parser.add_argument_group('mode') mode_args.add_argument('--search_opt', action='store_true', help='search for optimal parameters for classifiers') mode_args.add_argument('--run_all', action='store_true', help='run all distribution assumption for the Naive Bayes classifier') mode_args.add_argument('--visualize_tree', action='store_true', help='plot and visualize the Decision Tree classifier') data_args = parser.add_argument_group('data') data_args.add_argument('--data_news', action='store_true', help='Training and testing on the News dataset') data_args.add_argument('--data_mushroom', action='store_true', help='Training and testing on the Mushroom dataset') data_args.add_argument('--data_income', action='store_true', help='Training and testing on the Income dataset') path_args = parser.add_argument_group('train_path') path_args.add_argument('--train_path', type=str, default='', help='training path to be specified by user') path_args.add_argument('--train_path_news', type=str, default='../data/news/news_train.csv', help='path to the News training dataset') path_args.add_argument('--train_path_mushroom', type=str, default='../data/mushroom/mushroom_train.csv', help='path to the Mushroom training dataset') path_args.add_argument('--train_path_income', type=str, default='../data/income/income_train.csv', help='path to the Income training dataset') path_args = parser.add_argument_group('test_path') path_args.add_argument('--test_path', type=str, default='', help='testing path to be specified by user') path_args.add_argument('--test_path_news', type=str, default='../data/news/news_test.csv', help='path to the News testing dataset') path_args.add_argument('--test_path_mushroom', type=str, default='../data/mushroom/mushroom_test.csv', help='path to the Mushroom testing dataset') path_args.add_argument('--test_path_income', type=str, default='../data/income/income_test.csv', help='path to the Income testing dataset') path_args = parser.add_argument_group('output_path') path_args.add_argument('--output_path', type=str, default='../result/output.csv', help='path to save model prediction') args = parser.parse_args() args = error_handling(args) return args ################## # ERROR HANDLING # ################## def error_handling(args): if args.classifier != '': args.naive_bayes = True if args.classifier == 'N' else False args.decision_tree = True if args.classifier == 'D' else False if args.naive_bayes and args.decision_tree == True: raise AssertionError('Please choose one classifier at once, or specify the correct classifier!') if args.search_opt and args.run_all and args.visualize_tree == True: raise AssertionError('Please choose one mode at a time!') if args.data_news and args.data_mushroom and args.income == True: raise AssertionError('Please choose one and at least one dataset at a time!') if args.train_path != '' and args.test_path != '': if not os.path.isfile(args.train_path) or not os.path.isfile(args.test_path): raise AssertionError('The given file path is invalid!') if args.data_news: args.train_path_news = args.train_path args.test_path_news = args.test_path elif args.data_mushroom: args.train_path_mushroom = args.train_path args.test_path_mushroom = args.test_path elif args.data_income: args.train_path_income = args.train_path args.test_path_income = args.test_path else: raise AssertionError('Must choose a dataset!') return args ################# # OUTPUT WRITER # ################# def output_writer(path, result): with open(path, 'w') as f: file = csv.writer(f, delimiter=',', quotechar='\r') for item in result: file.writerow([int(item)]) print('Results have been successfully saved to: %s' % (path)) return True ######## # MAIN # ######## """ main function """ def main(): args = get_config() loader = data_loader(args) #---fetch data---# if args.data_news: train_x, train_y, test_x, test_y = loader.fetch_news() MODEL = 'NEWS' elif args.data_mushroom: train_x, train_y, test_x, test_y = loader.fetch_mushroom() MODEL = 'MUSHROOM' elif args.data_income: train_x, train_y, test_x, test_y = loader.fetch_income() # -> test_y == None MODEL = 'INCOME' ############### # NAIVE BAYES # ############### if args.naive_bayes: #---construct model---# naive_bayes = naive_bayes_runner(MODEL, train_x, train_y, test_x, test_y) #---modes---# if args.search_opt: naive_bayes.search_alpha() elif args.run_all: naive_bayes.run_best_all() else: pred_y = naive_bayes.run_best() output_writer(args.output_path, pred_y) ################# # DECISION TREE # ################# if args.decision_tree: #---construct model---# decision_tree = decision_tree_runner(MODEL, train_x, train_y, test_x, test_y) #---modes---# if args.search_opt: decision_tree.search_max_depth() elif args.visualize_tree: decision_tree.visualize() else: pred_y = decision_tree.run_best() output_writer(args.output_path, pred_y) if __name__ == '__main__': main()
[((26, 10, 26, 60), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((114, 10, 114, 27), 'data_loader.data_loader', 'data_loader', ({(114, 22, 114, 26): 'args'}, {}), '(args)', False, 'from data_loader import data_loader\n'), ((98, 9, 98, 53), 'csv.writer', 'csv.writer', (), '', False, 'import csv\n'), ((132, 16, 132, 75), 'classifiers.naive_bayes_runner', 'naive_bayes_runner', ({(132, 35, 132, 40): 'MODEL', (132, 42, 132, 49): 'train_x', (132, 51, 132, 58): 'train_y', (132, 60, 132, 66): 'test_x', (132, 68, 132, 74): 'test_y'}, {}), '(MODEL, train_x, train_y, test_x, test_y)', False, 'from classifiers import naive_bayes_runner\n'), ((148, 18, 148, 79), 'classifiers.decision_tree_runner', 'decision_tree_runner', ({(148, 39, 148, 44): 'MODEL', (148, 46, 148, 53): 'train_x', (148, 55, 148, 62): 'train_y', (148, 64, 148, 70): 'test_x', (148, 72, 148, 78): 'test_y'}, {}), '(MODEL, train_x, train_y, test_x, test_y)', False, 'from classifiers import decision_tree_runner\n'), ((77, 9, 77, 40), 'os.path.isfile', 'os.path.isfile', ({(77, 24, 77, 39): 'args.train_path'}, {}), '(args.train_path)', False, 'import os\n'), ((77, 48, 77, 78), 'os.path.isfile', 'os.path.isfile', ({(77, 63, 77, 77): 'args.test_path'}, {}), '(args.test_path)', False, 'import os\n')]
Nick-AhSen/iGibson
igibson/metrics/agent.py
c6854f11eec5d935fa3ef3d6d4852c6571beab4b
import copy import numpy as np import pybullet as p from igibson.metrics.metric_base import MetricBase class BehaviorRobotMetric(MetricBase): def __init__(self): self.initialized = False self.state_cache = {} self.next_state_cache = {} self.agent_pos = {part: [] for part in ["left_hand", "right_hand", "body"]} self.agent_grasping = {part: [] for part in ["left_hand", "right_hand"]} self.agent_local_pos = {part: [] for part in ["left_hand", "right_hand"]} self.agent_reset = {part: [] for part in ["left_hand", "right_hand", "body"]} self.delta_agent_work = {part: [] for part in ["left_hand", "right_hand", "body"]} self.delta_agent_distance = {part: [] for part in ["left_hand", "right_hand", "body"]} self.delta_agent_grasp_distance = {part: [] for part in ["left_hand", "right_hand"]} self.clip = 0.2 def step_callback(self, igbhvr_act_inst, _): robot = igbhvr_act_inst.simulator.robots[0] agent_work = {part: 0 for part in ["left_hand", "right_hand", "body"]} agent_distance = {part: 0 for part in ["left_hand", "right_hand", "body"]} for part in ["left_hand", "right_hand", "body"]: self.next_state_cache[part] = { "position": np.array(p.getBasePositionAndOrientation(robot.parts[part].get_body_id())[0]), } if not self.initialized: self.state_cache = copy.deepcopy(self.next_state_cache) self.initialized = True if robot.action[19] > 0 and robot.action[27] > 0: self.agent_reset["left_hand"].append(True) self.agent_reset["right_hand"].append(True) self.agent_reset["body"].append(True) if robot.action[19] > 0: self.agent_reset["left_hand"].append(True) self.agent_reset["right_hand"].append(False) self.agent_reset["body"].append(True) elif robot.action[27] > 0: self.agent_reset["left_hand"].append(False) self.agent_reset["right_hand"].append(True) self.agent_reset["body"].append(True) else: self.agent_reset["left_hand"].append(False) self.agent_reset["right_hand"].append(False) self.agent_reset["body"].append(False) for part in self.state_cache: delta_pos = np.linalg.norm(self.next_state_cache[part]["position"] - self.state_cache[part]["position"]) self.agent_pos[part].append(list(self.state_cache[part]["position"])) # Exclude agent teleports delta_pos = np.clip(delta_pos, -self.clip, self.clip) if robot.parts[part].movement_cid is None: force = 0 work = 0 else: force = p.getConstraintState(robot.parts[part].movement_cid) work = np.abs((delta_pos * np.linalg.norm(force))) distance = np.abs(delta_pos) if part in ["left_hand", "right_hand"]: self.agent_local_pos[part].append(list(robot.parts[part].get_local_position_orientation()[0])) if part in ["left_hand", "right_hand"] and ( len(p.getContactPoints(robot.parts[part].get_body_id())) > 0 or robot.parts[part].object_in_hand is not None ): self.delta_agent_grasp_distance[part].append(distance) self.agent_grasping[part].append(True) elif part in ["left_hand", "right_hand"]: self.delta_agent_grasp_distance[part].append(0) self.agent_grasping[part].append(False) agent_work[part] = work agent_distance[part] = distance self.delta_agent_work[part].append(work) self.delta_agent_distance[part].append(distance) self.state_cache = copy.deepcopy(self.next_state_cache) def gather_results(self): return { "agent_distance": { "timestep": self.delta_agent_distance, }, "grasp_distance": { "timestep": self.delta_agent_grasp_distance, }, "work": { "timestep": self.delta_agent_work, }, "pos": { "timestep": self.agent_pos, }, "local_pos": { "timestep": self.agent_local_pos, }, "grasping": { "timestep": self.agent_grasping, }, "reset": { "timestep": self.agent_reset, }, } class FetchRobotMetric(MetricBase): def __init__(self): self.initialized = False self.state_cache = {} self.next_state_cache = {} self.agent_pos = {part: [] for part in ["gripper", "body"]} self.agent_grasping = {part: [] for part in ["gripper"]} self.agent_local_pos = {part: [] for part in ["gripper"]} self.delta_agent_distance = {part: [] for part in ["gripper", "body"]} self.delta_agent_grasp_distance = {part: [] for part in ["gripper"]} self.clip = 0.2 def step_callback(self, igbhvr_act_inst, _): robot = igbhvr_act_inst.simulator.robots[0] agent_distance = {part: 0 for part in self.agent_pos} self.next_state_cache = { "gripper": {"position": robot.get_end_effector_position()}, "body": {"position": robot.get_position()}, } if not self.initialized: self.state_cache = copy.deepcopy(self.next_state_cache) self.initialized = True self.agent_pos["body"].append(list(self.state_cache["body"]["position"])) delta_pos = np.linalg.norm( np.array(self.next_state_cache["body"]["position"]) - self.state_cache["body"]["position"] ) distance = np.abs(delta_pos) self.delta_agent_distance["body"].append(distance) self.agent_pos["gripper"].append(list(self.state_cache["gripper"]["position"])) delta_pos = np.linalg.norm( self.next_state_cache["gripper"]["position"] - self.state_cache["gripper"]["position"] ) gripper_distance = np.abs(delta_pos) self.delta_agent_distance["gripper"].append(gripper_distance) self.agent_local_pos["gripper"].append(list(robot.get_relative_eef_position())) contacts = p.getContactPoints(bodyA=robot.robot_ids[0], linkIndexA=robot.eef_link_id) if len(contacts) > 0: self.delta_agent_grasp_distance["gripper"].append(gripper_distance) self.agent_grasping["gripper"].append(True) else: self.delta_agent_grasp_distance["gripper"].append(0) self.agent_grasping["gripper"].append(False) self.state_cache = copy.deepcopy(self.next_state_cache) def gather_results(self): return { "agent_distance": { "timestep": self.delta_agent_distance, }, "grasp_distance": { "timestep": self.delta_agent_grasp_distance, }, "pos": { "timestep": self.agent_pos, }, "local_pos": { "timestep": self.agent_local_pos, }, "grasping": { "timestep": self.agent_grasping, }, }
[((91, 27, 91, 63), 'copy.deepcopy', 'copy.deepcopy', ({(91, 41, 91, 62): 'self.next_state_cache'}, {}), '(self.next_state_cache)', False, 'import copy\n'), ((153, 19, 153, 36), 'numpy.abs', 'np.abs', ({(153, 26, 153, 35): 'delta_pos'}, {}), '(delta_pos)', True, 'import numpy as np\n'), ((157, 20, 159, 9), 'numpy.linalg.norm', 'np.linalg.norm', ({(158, 12, 158, 98): "self.next_state_cache['gripper']['position'] - self.state_cache['gripper'][\n 'position']"}, {}), "(self.next_state_cache['gripper']['position'] - self.\n state_cache['gripper']['position'])", True, 'import numpy as np\n'), ((160, 27, 160, 44), 'numpy.abs', 'np.abs', ({(160, 34, 160, 43): 'delta_pos'}, {}), '(delta_pos)', True, 'import numpy as np\n'), ((165, 19, 165, 93), 'pybullet.getContactPoints', 'p.getContactPoints', (), '', True, 'import pybullet as p\n'), ((173, 27, 173, 63), 'copy.deepcopy', 'copy.deepcopy', ({(173, 41, 173, 62): 'self.next_state_cache'}, {}), '(self.next_state_cache)', False, 'import copy\n'), ((40, 31, 40, 67), 'copy.deepcopy', 'copy.deepcopy', ({(40, 45, 40, 66): 'self.next_state_cache'}, {}), '(self.next_state_cache)', False, 'import copy\n'), ((61, 24, 61, 116), 'numpy.linalg.norm', 'np.linalg.norm', ({(61, 39, 61, 115): "self.next_state_cache[part]['position'] - self.state_cache[part]['position']"}, {}), "(self.next_state_cache[part]['position'] - self.state_cache[\n part]['position'])", True, 'import numpy as np\n'), ((64, 24, 64, 65), 'numpy.clip', 'np.clip', ({(64, 32, 64, 41): 'delta_pos', (64, 43, 64, 53): '-self.clip', (64, 55, 64, 64): 'self.clip'}, {}), '(delta_pos, -self.clip, self.clip)', True, 'import numpy as np\n'), ((72, 23, 72, 40), 'numpy.abs', 'np.abs', ({(72, 30, 72, 39): 'delta_pos'}, {}), '(delta_pos)', True, 'import numpy as np\n'), ((146, 31, 146, 67), 'copy.deepcopy', 'copy.deepcopy', ({(146, 45, 146, 66): 'self.next_state_cache'}, {}), '(self.next_state_cache)', False, 'import copy\n'), ((69, 24, 69, 76), 'pybullet.getConstraintState', 'p.getConstraintState', ({(69, 45, 69, 75): 'robot.parts[part].movement_cid'}, {}), '(robot.parts[part].movement_cid)', True, 'import pybullet as p\n'), ((151, 12, 151, 63), 'numpy.array', 'np.array', ({(151, 21, 151, 62): "self.next_state_cache['body']['position']"}, {}), "(self.next_state_cache['body']['position'])", True, 'import numpy as np\n'), ((70, 43, 70, 64), 'numpy.linalg.norm', 'np.linalg.norm', ({(70, 58, 70, 63): 'force'}, {}), '(force)', True, 'import numpy as np\n')]
Arahabica/font-subset-css
fontslice/__init__.py
393b9a452af49c2168c7a9f84983e4170937ea67
import sys from .main import ( _chunk_list, _get_unicode_range_hash, convert_unicode_range, get_120_unicode_ranges, get_unicode_ranges_from_text, generate_css, main, ) __all__ = [ "_chunk_list", "_get_unicode_range_hash", "convert_unicode_range", "get_120_unicode_ranges", "get_unicode_ranges_from_text", "generate_css", "main", ] if __name__ == "__main__": sys.exit(main())
[]
MrJaatt/ttkbootstrap
src/ttkbootstrap/dialogs/dialogs.py
4e837d64859e5a230ef0500faddbb2c384f5b9d4
""" This module contains various base dialog base classes that can be used to create custom dialogs for the end user. These classes serve as the basis for the pre-defined static helper methods in the `Messagebox`, and `Querybox` container classes. """ import calendar import textwrap from datetime import datetime from tkinter import font import ttkbootstrap as ttk from ttkbootstrap import utility from ttkbootstrap.icons import Icon from ttkbootstrap.constants import * from tkinter import BaseWidget from ttkbootstrap.localization import MessageCatalog class Dialog(BaseWidget): """A simple dialog base class.""" def __init__(self, parent=None, title="", alert=False): """ Parameters: parent (Widget): Makes the window the logical parent of the message box. The messagebox is displayed on top of its parent window. title (str): The string displayed as the title of the message box. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. alert (bool): Ring the display's bell when the dialog is shown. """ BaseWidget._setup(self, parent, {}) self._winsys = self.master.tk.call("tk", "windowingsystem") self._toplevel = None self._title = title or " " self._result = None self._alert = alert self._initial_focus = None def _locate(self): toplevel = self._toplevel master = toplevel.master screen_height = toplevel.winfo_screenheight() screen_width = toplevel.winfo_screenwidth() toplevel.update_idletasks() if master.winfo_viewable(): m_width = master.winfo_width() m_height = master.winfo_height() m_x = master.winfo_rootx() m_y = master.winfo_rooty() else: m_width = screen_width m_height = screen_height m_x = m_y = 0 w_width = toplevel.winfo_reqwidth() w_height = toplevel.winfo_reqheight() x = int(m_x + (m_width - w_width) * 0.45) y = int(m_y + (m_height - w_height) * 0.3) if x + w_width > screen_width: x = screen_width - w_width elif x < 0: x = 0 if y + w_height > screen_height: y = screen_height - w_height elif y < 0: y = 0 toplevel.geometry(f"+{x}+{y}") def show(self): """Show the popup dialog""" self._result = None self.build() self._locate() self._toplevel.deiconify() if self._alert: self._toplevel.bell() if self._initial_focus: self._initial_focus.focus_force() self._toplevel.grab_set() self._toplevel.wait_window() def create_body(self, master): """Create the dialog body. This method should be overridden and is called by the `build` method. Set the `self._initial_focus` for the widget that should receive the initial focus. Parameters: master (Widget): The parent widget. """ raise NotImplementedError def create_buttonbox(self, master): """Create the dialog button box. This method should be overridden and is called by the `build` method. Set the `self._initial_focus` for the button that should receive the intial focus. Parameters: master (Widget): The parent widget. """ raise NotImplementedError def build(self): """Build the dialog from settings""" # setup toplevel based on widowing system if self._winsys == "win32": self._toplevel = ttk.Toplevel( transient=self.master, title=self._title, resizable=(0, 0), minsize=(250, 15), iconify=True, ) else: self._toplevel = ttk.Toplevel( transient=self.master, title=self._title, resizable=(0, 0), windowtype="dialog", iconify=True, ) self._toplevel.withdraw() # reset the iconify state # bind <Escape> event to window close self._toplevel.bind("<Escape>", lambda _: self._toplevel.destroy()) # set position of popup from parent window #self._locate() # create widgets self.create_body(self._toplevel) self.create_buttonbox(self._toplevel) # update the window before showing self._toplevel.update_idletasks() @property def result(self): """Returns the result of the dialog.""" return self._result class MessageDialog(Dialog): """A simple modal dialog class that can be used to build simple message dialogs. Displays a message and a set of buttons. Each of the buttons in the message window is identified by a unique symbolic name. After the message window is popped up, the message box awaits for the user to select one of the buttons. Then it returns the symbolic name of the selected button. Use a `Toplevel` widget for more advanced modal dialog designs. """ def __init__( self, message, title=" ", buttons=None, command=None, width=50, parent=None, alert=False, default=None, padding=(20, 20), icon=None, **kwargs ): """ Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the message box. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. buttons (List[str]): A list of buttons to appear at the bottom of the popup messagebox. The buttons can be a list of strings which will define the symbolic name and the button text. `['OK', 'Cancel']`. Alternatively, you can assign a bootstyle to each button by using the colon to separate the button text and the bootstyle. If no colon is found, then the style is set to 'primary' by default. `['OK:success','Cancel:danger']`. command (Tuple[Callable, str]): The function to invoke when the user closes the dialog. The actual command is a tuple that consists of the function to call and the symbolic name of the button that closes the dialog. width (int): The maximum number of characters per line in the message. If the text stretches beyond the limit, the line will break at the word. parent (Widget): Makes the window the logical parent of the message box. The messagebox is displayed on top of its parent window. alert (bool): Ring the display's bell when the dialog is shown. default (str): The symbolic name of the default button. The default button is invoked when the the <Return> key is pressed. If no default is provided, the right-most button in the button list will be set as the default., padding (Union[int, Tuple[int]]): The amount of space between the border and the widget contents. icon (str): An image path, path-like object or image data to be displayed to the left of the text. **kwargs (Dict): Other optional keyword arguments. Example: ```python root = tk.Tk() md = MessageDialog("Displays a message with buttons.") md.show() ``` """ super().__init__(parent, title, alert) self._message = message self._command = command self._width = width self._alert = alert self._default = (default,) self._padding = padding self._icon = icon self._localize = kwargs.get('localize') if buttons is None: self._buttons = [ f"{MessageCatalog.translate('Cancel')}:secondary", f"{MessageCatalog.translate('OK')}:primary" ] else: self._buttons = buttons def create_body(self, master): """Overrides the parent method; adds the message section.""" container = ttk.Frame(master, padding=self._padding) if self._icon: try: # assume this is image data self._img = ttk.PhotoImage(data=self._icon) icon_lbl = ttk.Label(container, image=self._img) icon_lbl.pack(side=LEFT, padx=5) except: try: # assume this is a file path self._img = ttk.PhotoImage(file=self._icon) icon_lbl = ttk.Label(container, image=self._img) icon_lbl.pack(side=LEFT, padx=5) except: # icon is neither data nor a valid file path print('MessageDialog icon is invalid') if self._message: for msg in self._message.split("\n"): message = "\n".join(textwrap.wrap(msg, width=self._width)) message_label = ttk.Label(container, text=message) message_label.pack(pady=(0, 3), fill=X, anchor=N) container.pack(fill=X, expand=True) def create_buttonbox(self, master): """Overrides the parent method; adds the message buttonbox""" frame = ttk.Frame(master, padding=(5, 5)) button_list = [] for i, button in enumerate(self._buttons[::-1]): cnf = button.split(":") if len(cnf) == 2: text, bootstyle = cnf else: text = cnf[0] bootstyle = "secondary" if self._localize == True: text = MessageCatalog.translate(text) btn = ttk.Button(frame, bootstyle=bootstyle, text=text) btn.bind("<Return>", lambda _: btn.invoke()) btn.configure(command=lambda b=btn: self.on_button_press(b)) btn.pack(padx=2, side=RIGHT) btn.lower() # set focus traversal left-to-right button_list.append(btn) if self._default is not None and text == self._default: self._initial_focus = btn elif self._default is None and i == 0: self._initial_focus = btn # bind default button to return key press and set focus self._toplevel.bind("<Return>", lambda _, b=btn: b.invoke()) self._toplevel.bind("<KP_Enter>", lambda _, b=btn: b.invoke()) ttk.Separator(self._toplevel).pack(fill=X) frame.pack(side=BOTTOM, fill=X, anchor=S) if not self._initial_focus: self._initial_focus = button_list[0] def on_button_press(self, button): """Save result, destroy the toplevel, and execute command.""" self._result = button["text"] command = self._command if command is not None: command() self._toplevel.destroy() def show(self): """Create and display the popup messagebox.""" super().show() class QueryDialog(Dialog): """A simple modal dialog class that can be used to build simple data input dialogs. Displays a prompt, and input box, and a set of buttons. Additional data manipulation can be performed on the user input post-hoc by overriding the `apply` method. Use a `Toplevel` widget for more advanced modal dialog designs. """ def __init__( self, prompt, title=" ", initialvalue="", minvalue=None, maxvalue=None, width=65, datatype=str, padding=(20, 20), parent=None, ): """ Parameters: prompt (str): A message to display in the message box above the entry widget. title (str): The string displayed as the title of the message box. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. initialvalue (Any): The initial value in the entry widget. minvalue (Any): The minimum allowed value. Only valid for int and float data types. maxvalue (Any): The maximum allowed value. Only valid for int and float data types. width (int): The maximum number of characters per line in the message. If the text stretches beyond the limit, the line will break at the word. parent (Widget): Makes the window the logical parent of the message box. The messagebox is displayed on top of its parent window. padding (Union[int, Tuple[int]]): The amount of space between the border and the widget contents. datatype (Union[int, str, float]): The data type used to validate the entry value. """ super().__init__(parent, title) self._prompt = prompt self._initialvalue = initialvalue self._minvalue = minvalue self._maxvalue = maxvalue self._width = width self._datatype = datatype self._padding = padding self._result = None def create_body(self, master): """Overrides the parent method; adds the message and input section.""" frame = ttk.Frame(master, padding=self._padding) if self._prompt: for p in self._prompt.split("\n"): prompt = "\n".join(textwrap.wrap(p, width=self._width)) prompt_label = ttk.Label(frame, text=prompt) prompt_label.pack(pady=(0, 5), fill=X, anchor=N) entry = ttk.Entry(master=frame) entry.insert(END, self._initialvalue) entry.pack(pady=(0, 5), fill=X) entry.bind("<Return>", self.on_submit) entry.bind("<KP_Enter>", self.on_submit) entry.bind("<Escape>", self.on_cancel) frame.pack(fill=X, expand=True) self._initial_focus = entry def create_buttonbox(self, master): """Overrides the parent method; adds the message buttonbox""" frame = ttk.Frame(master, padding=(5, 10)) submit = ttk.Button( master=frame, bootstyle="primary", text=MessageCatalog.translate("Submit"), command=self.on_submit, ) submit.pack(padx=5, side=RIGHT) submit.lower() # set focus traversal left-to-right cancel = ttk.Button( master=frame, bootstyle="secondary", text=MessageCatalog.translate("Cancel"), command=self.on_cancel, ) cancel.pack(padx=5, side=RIGHT) cancel.lower() # set focus traversal left-to-right ttk.Separator(self._toplevel).pack(fill=X) frame.pack(side=BOTTOM, fill=X, anchor=S) def on_submit(self, *_): """Save result, destroy the toplevel, and apply any post-hoc data manipulations.""" self._result = self._initial_focus.get() valid_result = self.validate() if not valid_result: return # keep toplevel open for valid response self._toplevel.destroy() self.apply() def on_cancel(self, *_): """Close the toplevel and return empty.""" self._toplevel.destroy() return def validate(self): """Validate the data This method is called automatically to validate the data before the dialog is destroyed. Can be subclassed and overridden. """ # no default checks required for string data types if self._datatype not in [float, int, complex]: return True # convert result to appropriate data type try: self._result = self._datatype(self._result) except ValueError: msg = MessageCatalog.translate('Should be of data type') Messagebox.ok( message=f"{msg} `{self._datatype}`", title=MessageCatalog.translate("Invalid data type"), ) return False # max value range if self._maxvalue is not None: if self._result > self._maxvalue: msg = MessageCatalog.translate('Number cannot be greater than') Messagebox.ok( message=f"{msg} {self._maxvalue}", title=MessageCatalog.translate("Out of range"), ) return False # min value range if self._minvalue is not None: if self._result < self._minvalue: msg = MessageCatalog.translate('Number cannot be less than') Messagebox.ok( message=f"{msg} {self._minvalue}", title=MessageCatalog.translate("Out of range"), ) return False # valid result return True def apply(self): """Process the data. This method is called automatically to process the data after the dialog is destroyed. By default, it does nothing. """ pass # override class DatePickerDialog: """A dialog that displays a calendar popup and returns the selected date as a datetime object. The current date is displayed by default unless the `startdate` parameter is provided. The month can be changed by clicking the chevrons to the left and right of the month-year title. Left-click the arrow to move the calendar by one month. Right-click the arrow to move the calendar by one year. Right-click the title to reset the calendar to the start date. The starting weekday can be changed with the `firstweekday` parameter for geographies that do not start the calendar on Sunday, which is the default. The widget grabs focus and all screen events until released. If you want to cancel a date selection, click the 'X' button at the top-right corner of the widget. The bootstyle api may be used to change the style of the widget. The available colors include -> primary, secondary, success, info, warning, danger, light, dark. ![](../../assets/dialogs/date-picker-dialog.png) """ def __init__( self, parent=None, title=" ", firstweekday=6, startdate=None, bootstyle=PRIMARY, ): """ Parameters: parent (Widget): The parent widget; the popup will appear to the bottom-right of the parent widget. If no parent is provided, the widget is centered on the screen. title (str): The text that appears on the titlebar. firstweekday (int): Specifies the first day of the week. 0=Monday, 1=Tuesday, etc... startdate (datetime): The date to be in focus when the widget is displayed. bootstyle (str): The following colors can be used to change the color of the title and hover / pressed color -> primary, secondary, info, warning, success, danger, light, dark. """ self.parent = parent self.root = ttk.Toplevel( title=title, transient=self.parent, resizable=(False, False), topmost=True, minsize=(226, 1), iconify=True ) self.firstweekday = firstweekday self.startdate = startdate or datetime.today().date() self.bootstyle = bootstyle or PRIMARY self.date_selected = self.startdate self.date = startdate or self.date_selected self.calendar = calendar.Calendar(firstweekday=firstweekday) self.titlevar = ttk.StringVar() self.datevar = ttk.IntVar() self._setup_calendar() self.root.grab_set() self.root.wait_window() def _setup_calendar(self): """Setup the calendar widget""" # create the widget containers self.frm_calendar = ttk.Frame( master=self.root, padding=0, borderwidth=0, relief=FLAT ) self.frm_calendar.pack(fill=BOTH, expand=YES) self.frm_title = ttk.Frame(self.frm_calendar, padding=(3, 3)) self.frm_title.pack(fill=X) self.frm_header = ttk.Frame(self.frm_calendar, bootstyle=SECONDARY) self.frm_header.pack(fill=X) # setup the toplevel widget self.root.withdraw() # reset the iconify state self.frm_calendar.update_idletasks() # actualize geometry # create visual components self._draw_titlebar() self._draw_calendar() # make toplevel visible self._set_window_position() self.root.deiconify() def _update_widget_bootstyle(self): self.frm_title.configure(bootstyle=self.bootstyle) self.title.configure(bootstyle=f"{self.bootstyle}-inverse") self.prev_period.configure(style=f"Chevron.{self.bootstyle}.TButton") self.next_period.configure(style=f"Chevron.{self.bootstyle}.TButton") def _draw_calendar(self): self._update_widget_bootstyle() self._set_title() self._current_month_days() self.frm_dates = ttk.Frame(self.frm_calendar) self.frm_dates.pack(fill=BOTH, expand=YES) for row, weekday_list in enumerate(self.monthdays): for col, day in enumerate(weekday_list): self.frm_dates.columnconfigure(col, weight=1) if day == 0: ttk.Label( master=self.frm_dates, text=self.monthdates[row][col].day, anchor=CENTER, padding=5, bootstyle=SECONDARY, ).grid(row=row, column=col, sticky=NSEW) else: if all( [ day == self.date_selected.day, self.date.month == self.date_selected.month, self.date.year == self.date_selected.year, ] ): day_style = "secondary-toolbutton" else: day_style = f"{self.bootstyle}-calendar" def selected(x=row, y=col): self._on_date_selected(x, y) btn = ttk.Radiobutton( master=self.frm_dates, variable=self.datevar, value=day, text=day, bootstyle=day_style, padding=5, command=selected, ) btn.grid(row=row, column=col, sticky=NSEW) def _draw_titlebar(self): """Draw the calendar title bar which includes the month title and the buttons that increment and decrement the selected month. In addition to the previous and next MONTH commands that are assigned to the button press, a "right-click" event is assigned to each button that causes the calendar to move to the previous and next YEAR. """ # create and pack the title and action buttons self.prev_period = ttk.Button( master=self.frm_title, text="«", command=self.on_prev_month ) self.prev_period.pack(side=LEFT) self.title = ttk.Label( master=self.frm_title, textvariable=self.titlevar, anchor=CENTER, font="-weight bold", ) self.title.pack(side=LEFT, fill=X, expand=YES) self.next_period = ttk.Button( master=self.frm_title, text="»", command=self.on_next_month, ) self.next_period.pack(side=LEFT) # bind "year" callbacks to action buttons self.prev_period.bind("<Button-3>", self.on_prev_year, "+") self.next_period.bind("<Button-3>", self.on_next_year, "+") self.title.bind("<Button-1>", self.on_reset_date) # create and pack days of the week header for col in self._header_columns(): ttk.Label( master=self.frm_header, text=col, anchor=CENTER, padding=5, bootstyle=(SECONDARY, INVERSE), ).pack(side=LEFT, fill=X, expand=YES) def _set_title(self): _titledate = f'{self.date.strftime("%B %Y")}' self.titlevar.set(value=_titledate) def _current_month_days(self): """Fetch the day numbers and dates for all days in the current month. `monthdays` is a list of days as integers, and `monthdates` is a list of `datetime` objects. """ self.monthdays = self.calendar.monthdayscalendar( year=self.date.year, month=self.date.month ) self.monthdates = self.calendar.monthdatescalendar( year=self.date.year, month=self.date.month ) def _header_columns(self): """Create and return a list of weekdays to be used as a header in the calendar. The order of the weekdays is based on the `firstweekday` property. Returns: List[str]: A list of weekday column names for the calendar header. """ weekdays = [MessageCatalog.translate("Mo"), MessageCatalog.translate("Tu"), MessageCatalog.translate("We"), MessageCatalog.translate("Th"), MessageCatalog.translate("Fr"), MessageCatalog.translate("Sa"), MessageCatalog.translate("Su")] header = weekdays[self.firstweekday :] + weekdays[: self.firstweekday] return header def _on_date_selected(self, row, col): """Callback for selecting a date. An index is assigned to each date button that corresponds to the dates in the `monthdates` matrix. When the user clicks a button to select a date, the index from this button is used to lookup the date value of the button based on the row and column index reference. This value is saved in the `date_selected` property and the `Toplevel` is destroyed. Parameters: index (Tuple[int, int]): A row and column index of the date selected; to be found in the `monthdates` matrix. Returns: datetime: The date selected """ self.date_selected = self.monthdates[row][col] self.root.destroy() def _selection_callback(func): """Calls the decorated `func` and redraws the calendar.""" def inner(self, *args): func(self, *args) self.frm_dates.destroy() self._draw_calendar() return inner @_selection_callback def on_next_month(self): """Increment the calendar data to the next month""" year, month = self._nextmonth(self.date.year, self.date.month) self.date = datetime(year=year, month=month, day=1).date() @_selection_callback def on_next_year(self, *_): """Increment the calendar data to the next year""" year = self.date.year + 1 month = self.date.month self.date = datetime(year=year, month=month, day=1).date() @_selection_callback def on_prev_month(self): """Decrement the calendar to the previous year""" year, month = self._prevmonth(self.date.year, self.date.month) self.date = datetime(year=year, month=month, day=1).date() @_selection_callback def on_prev_year(self, *_): year = self.date.year - 1 month = self.date.month self.date = datetime(year=year, month=month, day=1).date() @_selection_callback def on_reset_date(self, *_): """Set the calendar to the start date""" self.date = self.startdate def _set_window_position(self): """Move the window the to bottom-right of the parent widget, or to the middle of the screen if no parent is provided. """ width = self.root.winfo_reqwidth() height = self.root.winfo_reqheight() if self.parent: xpos = self.parent.winfo_rootx() + self.parent.winfo_width() ypos = self.parent.winfo_rooty() + self.parent.winfo_height() self.root.geometry(f"+{xpos}+{ypos}") else: xpos = self.root.winfo_screenwidth() // 2 - width ypos = self.root.winfo_screenheight() // 2 - height self.root.geometry(f"+{xpos}+{ypos}") @staticmethod def _nextmonth(year, month): if month == 12: return year+1, 1 else: return year, month+1 @staticmethod def _prevmonth(year, month): if month == 1: return year-1, 12 else: return year, month-1 class FontDialog(Dialog): """A dialog that displays a variety of options for choosing a font. This dialog constructs and returns a `Font` object based on the options selected by the user. The initial font is based on OS settings and will vary. The font object is returned when the **Ok** button is pressed and can be passed to any widget that accepts a _font_ configuration option. ![](../../assets/dialogs/querybox-get-font.png) """ def __init__(self, title="Font Selector", parent=None): title = MessageCatalog.translate(title) super().__init__(parent=parent, title=title) self._style = ttk.Style() self._default = font.nametofont("TkDefaultFont") self._actual = self._default.actual() self._size = ttk.Variable(value=self._actual["size"]) self._family = ttk.Variable(value=self._actual["family"]) self._slant = ttk.Variable(value=self._actual["slant"]) self._weight = ttk.Variable(value=self._actual["weight"]) self._overstrike = ttk.Variable(value=self._actual["overstrike"]) self._underline = ttk.Variable(value=self._actual["underline"]) self._preview_font = font.Font() self._slant.trace_add("write", self._update_font_preview) self._weight.trace_add("write", self._update_font_preview) self._overstrike.trace_add("write", self._update_font_preview) self._underline.trace_add("write", self._update_font_preview) _headingfont = font.nametofont("TkHeadingFont") _headingfont.configure(weight="bold") self._update_font_preview() self._families = set([self._family.get()]) for f in font.families(): if all([f, not f.startswith("@"), "emoji" not in f.lower()]): self._families.add(f) def create_body(self, master): width = utility.scale_size(master, 600) height = utility.scale_size(master, 500) self._toplevel.geometry(f"{width}x{height}") family_size_frame = ttk.Frame(master, padding=10) family_size_frame.pack(fill=X, anchor=N) self._initial_focus = self._font_families_selector(family_size_frame) self._font_size_selector(family_size_frame) self._font_options_selectors(master, padding=10) self._font_preview(master, padding=10) def create_buttonbox(self, master): container = ttk.Frame(master, padding=(5, 10)) container.pack(fill=X) ok_btn = ttk.Button( master=container, bootstyle="primary", text=MessageCatalog.translate("OK"), command=self._on_submit, ) ok_btn.pack(side=RIGHT, padx=5) ok_btn.bind("<Return>", lambda _: ok_btn.invoke()) cancel_btn = ttk.Button( master=container, bootstyle="secondary", text=MessageCatalog.translate("Cancel"), command=self._on_cancel, ) cancel_btn.pack(side=RIGHT, padx=5) cancel_btn.bind("<Return>", lambda _: cancel_btn.invoke()) def _font_families_selector(self, master): container = ttk.Frame(master) container.pack(fill=BOTH, expand=YES, side=LEFT) header = ttk.Label(container, text=MessageCatalog.translate("Family"), font="TkHeadingFont") header.pack(fill=X, pady=(0, 2), anchor=N) listbox = ttk.Treeview( master=container, height=5, show="", columns=[0], ) listbox.column(0, width=utility.scale_size(listbox, 250)) listbox.pack(side=LEFT, fill=BOTH, expand=YES) listbox_vbar = ttk.Scrollbar( container, command=listbox.yview, orient=VERTICAL, bootstyle="rounded", ) listbox_vbar.pack(side=RIGHT, fill=Y) listbox.configure(yscrollcommand=listbox_vbar.set) for f in self._families: listbox.insert("", iid=f, index=END, tags=[f], values=[f]) listbox.tag_configure(f, font=(f, self._size.get())) iid = self._family.get() listbox.selection_set(iid) # select default value listbox.see(iid) # ensure default is visible listbox.bind( "<<TreeviewSelect>>", lambda e: self._on_select_font_family(e) ) return listbox def _font_size_selector(self, master): container = ttk.Frame(master) container.pack(side=LEFT, fill=Y, padx=(10, 0)) header = ttk.Label(container, text=MessageCatalog.translate("Size"), font="TkHeadingFont") header.pack(fill=X, pady=(0, 2), anchor=N) sizes_listbox = ttk.Treeview(container, height=7, columns=[0], show="") sizes_listbox.column(0, width=utility.scale_size(sizes_listbox, 24)) sizes = [*range(8, 13), *range(13, 30, 2), 36, 48, 72] for s in sizes: sizes_listbox.insert("", iid=s, index=END, values=[s]) iid = self._size.get() sizes_listbox.selection_set(iid) sizes_listbox.see(iid) sizes_listbox.bind( "<<TreeviewSelect>>", lambda e: self._on_select_font_size(e) ) sizes_listbox_vbar = ttk.Scrollbar( master=container, orient=VERTICAL, command=sizes_listbox.yview, bootstyle="round", ) sizes_listbox.configure(yscrollcommand=sizes_listbox_vbar.set) sizes_listbox.pack(side=LEFT, fill=Y, expand=YES, anchor=N) sizes_listbox_vbar.pack(side=LEFT, fill=Y, expand=YES) def _font_options_selectors(self, master, padding: int): container = ttk.Frame(master, padding=padding) container.pack(fill=X, padx=2, pady=2, anchor=N) weight_lframe = ttk.Labelframe(container, text=MessageCatalog.translate("Weight"), padding=5) weight_lframe.pack(side=LEFT, fill=X, expand=YES) opt_normal = ttk.Radiobutton( master=weight_lframe, text=MessageCatalog.translate("normal"), value="normal", variable=self._weight, ) opt_normal.invoke() opt_normal.pack(side=LEFT, padx=5, pady=5) opt_bold = ttk.Radiobutton( master=weight_lframe, text=MessageCatalog.translate("bold"), value="bold", variable=self._weight, ) opt_bold.pack(side=LEFT, padx=5, pady=5) slant_lframe = ttk.Labelframe(container, text=MessageCatalog.translate("Slant"), padding=5) slant_lframe.pack(side=LEFT, fill=X, padx=10, expand=YES) opt_roman = ttk.Radiobutton( master=slant_lframe, text=MessageCatalog.translate("roman"), value="roman", variable=self._slant, ) opt_roman.invoke() opt_roman.pack(side=LEFT, padx=5, pady=5) opt_italic = ttk.Radiobutton( master=slant_lframe, text=MessageCatalog.translate("italic"), value="italic", variable=self._slant, ) opt_italic.pack(side=LEFT, padx=5, pady=5) effects_lframe = ttk.Labelframe(container, text=MessageCatalog.translate("Effects"), padding=5) effects_lframe.pack(side=LEFT, padx=(2, 0), fill=X, expand=YES) opt_underline = ttk.Checkbutton( master=effects_lframe, text=MessageCatalog.translate("underline"), variable=self._underline ) opt_underline.pack(side=LEFT, padx=5, pady=5) opt_overstrike = ttk.Checkbutton( master=effects_lframe, text=MessageCatalog.translate("overstrike"), variable=self._overstrike ) opt_overstrike.pack(side=LEFT, padx=5, pady=5) def _font_preview(self, master, padding: int): container = ttk.Frame(master, padding=padding) container.pack(fill=BOTH, expand=YES, anchor=N) header = ttk.Label(container, text=MessageCatalog.translate("Preview"), font="TkHeadingFont") header.pack(fill=X, pady=2, anchor=N) content = MessageCatalog.translate("The quick brown fox jumps over the lazy dog.") self._preview_text = ttk.Text( master=container, height=3, font=self._preview_font, highlightbackground=self._style.colors.primary, ) self._preview_text.insert(END, content) self._preview_text.pack(fill=BOTH, expand=YES) container.pack_propagate(False) def _on_select_font_family(self, e): tree: ttk.Treeview = self._toplevel.nametowidget(e.widget) fontfamily = tree.selection()[0] self._family.set(value=fontfamily) self._update_font_preview() def _on_select_font_size(self, e): tree: ttk.Treeview = self._toplevel.nametowidget(e.widget) fontsize = tree.selection()[0] self._size.set(value=fontsize) self._update_font_preview() def _on_submit(self) -> font.Font: self._toplevel.destroy() return self.result def _on_cancel(self): self._toplevel.destroy() def _update_font_preview(self, *_): family = self._family.get() size = self._size.get() slant = self._slant.get() overstrike = self._overstrike.get() underline = self._underline.get() self._preview_font.config( family=family, size=size, slant=slant, overstrike=overstrike, underline=underline, ) try: self._preview_text.configure(font=self._preview_font) except: pass self._result = self._preview_font class Messagebox: """This class contains various static methods that show popups with a message to the end user with various arrangments of buttons and alert options.""" @staticmethod def show_info(message, title=" ", parent=None, **kwargs): """Display a modal dialog box with an OK button and an INFO icon. ![](../../assets/dialogs/messagebox-show-info.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. """ sd = MessageDialog( message=message, title=title, parent=parent, buttons=["OK:primary"], icon=Icon.info, localize=True ) sd.show() @staticmethod def show_warning(message, title=" ", parent=None, **kwargs): """Display a modal dialog box with an OK button and a warning icon. Also will ring the display bell. ![](../../assets/dialogs/messagebox-show-warning.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. """ sd = MessageDialog( message=message, title=title, parent=parent, buttons=["OK:primary"], icon=Icon.warning, alert=True, localize=True, **kwargs, ) sd.show() @staticmethod def show_error(message, title=" ", parent=None, **kwargs): """Display a modal dialog box with an OK button and an error icon. Also will ring the display bell. ![](../../assets/dialogs/messagebox-show-error.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. """ sd = MessageDialog( message=message, title=title, parent=parent, buttons=["OK:primary"], icon=Icon.error, alert=True, localize=True, **kwargs, ) sd.show() @staticmethod def show_question( message, title=" ", parent=None, buttons=["No:secondary", "Yes:primary"], **kwargs, ): """Display a modal dialog box with yes, no buttons and a question icon. Also will ring the display bell. You may also change the button scheme using the `buttons` parameter. ![](../../assets/dialogs/messagebox-show-question.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. buttons (List[str]): A list of buttons to appear at the bottom of the popup messagebox. The buttons can be a list of strings which will define the symbolic name and the button text. `['OK', 'Cancel']`. Alternatively, you can assign a bootstyle to each button by using the colon to separate the button text and the bootstyle. If no colon is found, then the style is set to 'primary' by default. `['Yes:success','No:danger']`. **kwargs (Dict): Other optional keyword arguments. Returns: Union[str, None]: The symbolic name of the button pressed, or None if the window is closed without pressing a button. """ sd = MessageDialog( message=message, title=title, parent=parent, buttons=buttons, icon=Icon.question, alert=True, localize=True, **kwargs, ) sd.show() return sd.result @staticmethod def ok(message, title=" ", alert=False, parent=None, **kwargs): """Display a modal dialog box with an OK button and and optional bell alert. ![](../../assets/dialogs/messagebox-ok.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. alert (bool): Specified whether to ring the display bell. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. """ sd = MessageDialog( title=title, message=message, parent=parent, alert=alert, buttons=["OK:primary"], localize=True, **kwargs, ) sd.show() @staticmethod def okcancel(message, title=" ", alert=False, parent=None, **kwargs): """Displays a modal dialog box with OK and Cancel buttons and return the symbolic name of the button pressed. ![](../../assets/dialogs/messagebox-ok-cancel.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. alert (bool): Specified whether to ring the display bell. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. Returns: Union[str, None]: The symbolic name of the button pressed, or None if the window is closed without pressing a button. """ sd = MessageDialog( title=title, message=message, parent=parent, alert=alert, localize=True, **kwargs ) sd.show() return sd.result @staticmethod def yesno(message, title=" ", alert=False, parent=None, **kwargs): """Display a modal dialog box with YES and NO buttons and return the symbolic name of the button pressed. ![](../../assets/dialogs/messagebox-yes-no.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. alert (bool): Specified whether to ring the display bell. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. Returns: Union[str, None]: The symbolic name of the button pressed, or None if the window is closed without pressing a button. """ sd = MessageDialog( title=title, message=message, parent=parent, buttons=["No", "Yes:primary"], alert=alert, localize=True, **kwargs, ) sd.show() return sd.result @staticmethod def yesnocancel(message, title=" ", alert=False, parent=None, **kwargs): """Display a modal dialog box with YES, NO, and Cancel buttons, and return the symbolic name of the button pressed. ![](../../assets/dialogs/messagebox-yes-no-cancel.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. alert (bool): Specified whether to ring the display bell. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Optional keyword arguments. Returns: Union[str, None]: The symbolic name of the button pressed, or None if the window is closed without pressing a button. """ sd = MessageDialog( title=title, message=message, parent=parent, alert=alert, buttons=["Cancel", "No", "Yes:primary"], localize=True, **kwargs, ) sd.show() return sd.result @staticmethod def retrycancel(message, title=" ", alert=False, parent=None, **kwargs): """Display a modal dialog box with RETRY and Cancel buttons; returns the symbolic name of the button pressed. ![](../../assets/dialogs/messagebox-retry-cancel.png) Parameters: message (str): A message to display in the message box. title (str): The string displayed as the title of the messagebox. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. alert (bool): Specified whether to ring the display bell. parent (Union[Window, Toplevel]): Makes the window the logical parent of the message box. The message box is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. Returns: Union[str, None]: The symbolic name of the button pressed, or None if the window is closed without pressing a button. """ sd = MessageDialog( title=title, message=message, parent=parent, alert=alert, buttons=["Cancel", "Retry:primary"], localize=True, **kwargs, ) sd.show() return sd.result class Querybox: """This class contains various static methods that request data from the end user.""" @staticmethod def get_color( parent=None, title="Color Chooser", initialcolor=None, ): """Show a color picker and return the select color when the user pressed OK. ![](../../assets/dialogs/querybox-get-color.png) Parameters: parent (Widget): The parent widget. title (str): Optional text that appears on the titlebar. initialcolor (str): The initial color to display in the 'Current' color frame. Returns: Tuple[rgb, hsl, hex] The selected color in various colors models. """ from ttkbootstrap.dialogs.colorchooser import ColorChooserDialog cd = ColorChooserDialog(parent, title, initialcolor) cd.show() return cd.result @staticmethod def get_date( parent=None, title=" ", firstweekday=6, startdate=None, bootstyle="primary", ): """Shows a calendar popup and returns the selection. ![](../../assets/dialogs/querybox-get-date.png) Parameters: parent (Widget): The parent widget; the popup will appear to the bottom-right of the parent widget. If no parent is provided, the widget is centered on the screen. title (str): The text that appears on the popup titlebar. firstweekday (int): Specifies the first day of the week. `0` is Monday, `6` is Sunday (the default). startdate (datetime): The date to be in focus when the widget is displayed; bootstyle (str): The following colors can be used to change the color of the title and hover / pressed color -> primary, secondary, info, warning, success, danger, light, dark. Returns: datetime: The date selected; the current date if no date is selected. """ chooser = DatePickerDialog( parent=parent, title=title, firstweekday=firstweekday, startdate=startdate, bootstyle=bootstyle, ) return chooser.date_selected @staticmethod def get_string( prompt="", title=" ", initialvalue=None, parent=None, **kwargs ): """Request a string type input from the user. ![](../../assets/dialogs/querybox-get-string.png) Parameters: prompt (str): A message to display in the message box above the entry widget. title (str): The string displayed as the title of the message box. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. initialvalue (Any): The initial value in the entry widget. parent (Widget): Makes the window the logical parent of the message box. The messagebox is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. Returns: str: The string value of the entry widget. """ initialvalue = initialvalue or "" dialog = QueryDialog( prompt, title, initialvalue, parent=parent, **kwargs ) dialog.show() return dialog._result @staticmethod def get_integer( prompt="", title=" ", initialvalue=None, minvalue=None, maxvalue=None, parent=None, **kwargs, ): """Request an integer type input from the user. ![](../../assets/dialogs/querybox-get-integer.png) Parameters: prompt (str): A message to display in the message box above the entry widget. title (str): The string displayed as the title of the message box. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. initialvalue (int): The initial value in the entry widget. minvalue (int): The minimum allowed value. maxvalue (int): The maximum allowed value. parent (Widget): Makes the window the logical parent of the message box. The messagebox is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. Returns: int: The integer value of the entry widget. """ initialvalue = initialvalue or "" dialog = QueryDialog( prompt, title, initialvalue, minvalue, maxvalue, datatype=int, parent=parent, **kwargs, ) dialog.show() return dialog._result @staticmethod def get_float( prompt="", title=" ", initialvalue=None, minvalue=None, maxvalue=None, parent=None, **kwargs, ): """Request a float type input from the user. ![](../../assets/dialogs/querybox-get-float.png) Parameters: prompt (str): A message to display in the message box above the entry widget. title (str): The string displayed as the title of the message box. This option is ignored on Mac OS X, where platform guidelines forbid the use of a title on this kind of dialog. initialvalue (float): The initial value in the entry widget. minvalue (float): The minimum allowed value. maxvalue (float): The maximum allowed value. parent (Widget): Makes the window the logical parent of the message box. The messagebox is displayed on top of its parent window. **kwargs (Dict): Other optional keyword arguments. Returns: float: The float value of the entry widget. """ initialvalue = initialvalue or "" dialog = QueryDialog( prompt, title, initialvalue, minvalue, maxvalue, datatype=float, parent=parent, **kwargs, ) dialog.show() return dialog._result @staticmethod def get_font(parent=None, **kwargs): """Request a customized font ![](../../assets/dialogs/querybox-get-font.png) Parameters: parent (Widget): Makes the window the logical parent of the dialog box. The dialog is displayed on top of its parent window. **kwargs (Dict): Other keyword arguments. Returns: Font: A font object. """ dialog = FontDialog(parent=parent, **kwargs) dialog.show() return dialog.result
[((41, 8, 41, 43), 'tkinter.BaseWidget._setup', 'BaseWidget._setup', ({(41, 26, 41, 30): 'self', (41, 32, 41, 38): 'parent', (41, 40, 41, 42): '{}'}, {}), '(self, parent, {})', False, 'from tkinter import BaseWidget\n'), ((277, 20, 277, 60), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((303, 16, 303, 49), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((428, 16, 428, 56), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((435, 16, 435, 39), 'ttkbootstrap.Entry', 'ttk.Entry', (), '', True, 'import ttkbootstrap as ttk\n'), ((446, 16, 446, 50), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((600, 20, 607, 9), 'ttkbootstrap.Toplevel', 'ttk.Toplevel', (), '', True, 'import ttkbootstrap as ttk\n'), ((614, 24, 614, 68), 'calendar.Calendar', 'calendar.Calendar', (), '', False, 'import calendar\n'), ((616, 24, 616, 39), 'ttkbootstrap.StringVar', 'ttk.StringVar', ({}, {}), '()', True, 'import ttkbootstrap as ttk\n'), ((617, 23, 617, 35), 'ttkbootstrap.IntVar', 'ttk.IntVar', ({}, {}), '()', True, 'import ttkbootstrap as ttk\n'), ((626, 28, 628, 9), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((630, 25, 630, 69), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((632, 26, 632, 75), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((657, 25, 657, 53), 'ttkbootstrap.Frame', 'ttk.Frame', ({(657, 35, 657, 52): 'self.frm_calendar'}, {}), '(self.frm_calendar)', True, 'import ttkbootstrap as ttk\n'), ((708, 27, 710, 9), 'ttkbootstrap.Button', 'ttk.Button', (), '', True, 'import ttkbootstrap as ttk\n'), ((713, 21, 718, 9), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n'), ((721, 27, 725, 9), 'ttkbootstrap.Button', 'ttk.Button', (), '', True, 'import ttkbootstrap as ttk\n'), ((889, 16, 889, 47), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(889, 41, 889, 46): 'title'}, {}), '(title)', False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((891, 22, 891, 33), 'ttkbootstrap.Style', 'ttk.Style', ({}, {}), '()', True, 'import ttkbootstrap as ttk\n'), ((892, 24, 892, 56), 'tkinter.font.nametofont', 'font.nametofont', ({(892, 40, 892, 55): '"""TkDefaultFont"""'}, {}), "('TkDefaultFont')", False, 'from tkinter import font\n'), ((894, 21, 894, 61), 'ttkbootstrap.Variable', 'ttk.Variable', (), '', True, 'import ttkbootstrap as ttk\n'), ((895, 23, 895, 65), 'ttkbootstrap.Variable', 'ttk.Variable', (), '', True, 'import ttkbootstrap as ttk\n'), ((896, 22, 896, 63), 'ttkbootstrap.Variable', 'ttk.Variable', (), '', True, 'import ttkbootstrap as ttk\n'), ((897, 23, 897, 65), 'ttkbootstrap.Variable', 'ttk.Variable', (), '', True, 'import ttkbootstrap as ttk\n'), ((898, 27, 898, 73), 'ttkbootstrap.Variable', 'ttk.Variable', (), '', True, 'import ttkbootstrap as ttk\n'), ((899, 26, 899, 71), 'ttkbootstrap.Variable', 'ttk.Variable', (), '', True, 'import ttkbootstrap as ttk\n'), ((900, 29, 900, 40), 'tkinter.font.Font', 'font.Font', ({}, {}), '()', False, 'from tkinter import font\n'), ((906, 23, 906, 55), 'tkinter.font.nametofont', 'font.nametofont', ({(906, 39, 906, 54): '"""TkHeadingFont"""'}, {}), "('TkHeadingFont')", False, 'from tkinter import font\n'), ((911, 17, 911, 32), 'tkinter.font.families', 'font.families', ({}, {}), '()', False, 'from tkinter import font\n'), ((916, 16, 916, 47), 'ttkbootstrap.utility.scale_size', 'utility.scale_size', ({(916, 35, 916, 41): 'master', (916, 43, 916, 46): '600'}, {}), '(master, 600)', False, 'from ttkbootstrap import utility\n'), ((917, 17, 917, 48), 'ttkbootstrap.utility.scale_size', 'utility.scale_size', ({(917, 36, 917, 42): 'master', (917, 44, 917, 47): '500'}, {}), '(master, 500)', False, 'from ttkbootstrap import utility\n'), ((920, 28, 920, 57), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((928, 20, 928, 54), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((950, 20, 950, 37), 'ttkbootstrap.Frame', 'ttk.Frame', ({(950, 30, 950, 36): 'master'}, {}), '(master)', True, 'import ttkbootstrap as ttk\n'), ((956, 18, 961, 9), 'ttkbootstrap.Treeview', 'ttk.Treeview', (), '', True, 'import ttkbootstrap as ttk\n'), ((965, 23, 970, 9), 'ttkbootstrap.Scrollbar', 'ttk.Scrollbar', (), '', True, 'import ttkbootstrap as ttk\n'), ((987, 20, 987, 37), 'ttkbootstrap.Frame', 'ttk.Frame', ({(987, 30, 987, 36): 'master'}, {}), '(master)', True, 'import ttkbootstrap as ttk\n'), ((993, 24, 993, 79), 'ttkbootstrap.Treeview', 'ttk.Treeview', (), '', True, 'import ttkbootstrap as ttk\n'), ((1007, 29, 1012, 9), 'ttkbootstrap.Scrollbar', 'ttk.Scrollbar', (), '', True, 'import ttkbootstrap as ttk\n'), ((1018, 20, 1018, 54), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((1069, 20, 1069, 54), 'ttkbootstrap.Frame', 'ttk.Frame', (), '', True, 'import ttkbootstrap as ttk\n'), ((1075, 18, 1075, 90), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1075, 43, 1075, 89): '"""The quick brown fox jumps over the lazy dog."""'}, {}), "('The quick brown fox jumps over the lazy dog.')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1076, 29, 1081, 9), 'ttkbootstrap.Text', 'ttk.Text', (), '', True, 'import ttkbootstrap as ttk\n'), ((1543, 13, 1543, 60), 'ttkbootstrap.dialogs.colorchooser.ColorChooserDialog', 'ColorChooserDialog', ({(1543, 32, 1543, 38): 'parent', (1543, 40, 1543, 45): 'title', (1543, 47, 1543, 59): 'initialcolor'}, {}), '(parent, title, initialcolor)', False, 'from ttkbootstrap.dialogs.colorchooser import ColorChooserDialog\n'), ((128, 29, 134, 13), 'ttkbootstrap.Toplevel', 'ttk.Toplevel', (), '', True, 'import ttkbootstrap as ttk\n'), ((136, 29, 142, 13), 'ttkbootstrap.Toplevel', 'ttk.Toplevel', (), '', True, 'import ttkbootstrap as ttk\n'), ((318, 18, 318, 67), 'ttkbootstrap.Button', 'ttk.Button', (), '', True, 'import ttkbootstrap as ttk\n'), ((769, 20, 769, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(769, 45, 769, 49): '"""Mo"""'}, {}), "('Mo')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((770, 20, 770, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(770, 45, 770, 49): '"""Tu"""'}, {}), "('Tu')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((771, 20, 771, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(771, 45, 771, 49): '"""We"""'}, {}), "('We')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((772, 20, 772, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(772, 45, 772, 49): '"""Th"""'}, {}), "('Th')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((773, 20, 773, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(773, 45, 773, 49): '"""Fr"""'}, {}), "('Fr')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((774, 20, 774, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(774, 45, 774, 49): '"""Sa"""'}, {}), "('Sa')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((775, 20, 775, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(775, 45, 775, 49): '"""Su"""'}, {}), "('Su')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((281, 28, 281, 59), 'ttkbootstrap.PhotoImage', 'ttk.PhotoImage', (), '', True, 'import ttkbootstrap as ttk\n'), ((282, 27, 282, 64), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n'), ((297, 32, 297, 66), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n'), ((316, 23, 316, 53), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(316, 48, 316, 52): 'text'}, {}), '(text)', False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((334, 8, 334, 37), 'ttkbootstrap.Separator', 'ttk.Separator', ({(334, 22, 334, 36): 'self._toplevel'}, {}), '(self._toplevel)', True, 'import ttkbootstrap as ttk\n'), ((432, 31, 432, 60), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n'), ((451, 17, 451, 51), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(451, 42, 451, 50): '"""Submit"""'}, {}), "('Submit')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((460, 17, 460, 51), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(460, 42, 460, 50): '"""Cancel"""'}, {}), "('Cancel')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((466, 8, 466, 37), 'ttkbootstrap.Separator', 'ttk.Separator', ({(466, 22, 466, 36): 'self._toplevel'}, {}), '(self._toplevel)', True, 'import ttkbootstrap as ttk\n'), ((498, 18, 498, 68), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(498, 43, 498, 67): '"""Should be of data type"""'}, {}), "('Should be of data type')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((508, 22, 508, 79), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(508, 47, 508, 78): '"""Number cannot be greater than"""'}, {}), "('Number cannot be greater than')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((518, 22, 518, 76), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(518, 47, 518, 75): '"""Number cannot be less than"""'}, {}), "('Number cannot be less than')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((817, 20, 817, 59), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import datetime\n'), ((824, 20, 824, 59), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import datetime\n'), ((830, 20, 830, 59), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import datetime\n'), ((836, 20, 836, 59), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import datetime\n'), ((934, 17, 934, 47), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(934, 42, 934, 46): '"""OK"""'}, {}), "('OK')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((943, 17, 943, 51), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(943, 42, 943, 50): '"""Cancel"""'}, {}), "('Cancel')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((953, 43, 953, 77), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(953, 68, 953, 76): '"""Family"""'}, {}), "('Family')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((962, 32, 962, 64), 'ttkbootstrap.utility.scale_size', 'utility.scale_size', ({(962, 51, 962, 58): 'listbox', (962, 60, 962, 63): '(250)'}, {}), '(listbox, 250)', False, 'from ttkbootstrap import utility\n'), ((990, 43, 990, 75), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(990, 68, 990, 74): '"""Size"""'}, {}), "('Size')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((994, 38, 994, 75), 'ttkbootstrap.utility.scale_size', 'utility.scale_size', ({(994, 57, 994, 70): 'sizes_listbox', (994, 72, 994, 74): '(24)'}, {}), '(sizes_listbox, 24)', False, 'from ttkbootstrap import utility\n'), ((1021, 55, 1021, 89), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1021, 80, 1021, 88): '"""Weight"""'}, {}), "('Weight')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1025, 17, 1025, 51), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1025, 42, 1025, 50): '"""normal"""'}, {}), "('normal')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1033, 17, 1033, 49), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1033, 42, 1033, 48): '"""bold"""'}, {}), "('bold')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1039, 54, 1039, 87), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1039, 79, 1039, 86): '"""Slant"""'}, {}), "('Slant')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1043, 17, 1043, 50), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1043, 42, 1043, 49): '"""roman"""'}, {}), "('roman')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1051, 17, 1051, 51), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1051, 42, 1051, 50): '"""italic"""'}, {}), "('italic')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1057, 56, 1057, 91), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1057, 81, 1057, 90): '"""Effects"""'}, {}), "('Effects')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1060, 40, 1060, 77), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1060, 65, 1060, 76): '"""underline"""'}, {}), "('underline')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1064, 40, 1064, 78), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1064, 65, 1064, 77): '"""overstrike"""'}, {}), "('overstrike')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((1072, 43, 1072, 78), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(1072, 68, 1072, 77): '"""Preview"""'}, {}), "('Preview')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((296, 36, 296, 73), 'textwrap.wrap', 'textwrap.wrap', (), '', False, 'import textwrap\n'), ((431, 35, 431, 70), 'textwrap.wrap', 'textwrap.wrap', (), '', False, 'import textwrap\n'), ((609, 38, 609, 54), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((686, 26, 694, 21), 'ttkbootstrap.Radiobutton', 'ttk.Radiobutton', (), '', True, 'import ttkbootstrap as ttk\n'), ((735, 12, 741, 13), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n'), ((269, 19, 269, 53), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(269, 44, 269, 52): '"""Cancel"""'}, {}), "('Cancel')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((270, 19, 270, 49), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(270, 44, 270, 48): '"""OK"""'}, {}), "('OK')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((287, 32, 287, 63), 'ttkbootstrap.PhotoImage', 'ttk.PhotoImage', (), '', True, 'import ttkbootstrap as ttk\n'), ((288, 31, 288, 68), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n'), ((501, 22, 501, 67), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(501, 47, 501, 66): '"""Invalid data type"""'}, {}), "('Invalid data type')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((511, 26, 511, 66), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(511, 51, 511, 65): '"""Out of range"""'}, {}), "('Out of range')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((521, 26, 521, 66), 'ttkbootstrap.localization.MessageCatalog.translate', 'MessageCatalog.translate', ({(521, 51, 521, 65): '"""Out of range"""'}, {}), "('Out of range')", False, 'from ttkbootstrap.localization import MessageCatalog\n'), ((664, 20, 670, 21), 'ttkbootstrap.Label', 'ttk.Label', (), '', True, 'import ttkbootstrap as ttk\n')]
venky4121994/ga-learner-dsmp-repo
Google-Play-Store-App-Rating/code.py
1bef03489931eece0d5ecb9ce0501dfeb558dc59
# -------------- #Importing header files import pandas as pd import matplotlib.pyplot as plt import seaborn as sns #Code starts here data = pd.read_csv(path) data.hist(['Rating']) data = data[data['Rating']<=5] data.hist(['Rating']) #Code ends here # -------------- # code starts here total_null = data.isnull().sum() percent_null = (total_null/data.isnull().count()) missing_data = pd.concat([total_null,percent_null],keys=['Total','Percent'],axis=1) print(missing_data) data.dropna(inplace=True) total_null_1 = data.isnull().sum() percent_null_1 = (total_null_1/data.isnull().count()) missing_data_1 = pd.concat([total_null_1,percent_null_1],keys=['Total','Percent'],axis=1) print(missing_data_1) # code ends here # -------------- #Code starts here plt.figure(figsize=(10,20)) catplot = sns.catplot(x = "Category", y = "Rating", data=data, kind="box",height=10) catplot.set_xticklabels(rotation=90) plt.title('Rating vs Category [BoxPlot]',size = 20) #Code ends here # -------------- #Importing header files from sklearn.preprocessing import MinMaxScaler, LabelEncoder #Code starts here print(data['Installs']) data['Installs'] = data['Installs'].str.replace('+','') data['Installs'] = data['Installs'].str.replace(',','') data['Installs'] = data['Installs'].astype('int32') le = LabelEncoder() data['Installs'] = le.fit_transform(data['Installs']) graph = sns.regplot(data['Installs'],data['Rating'],data=data) graph.set_title('Rating vs Installs [Boxplot]') plt.show() #Code ends here # -------------- #Code starts here print(data['Price'].value_counts()) data['Price'] = data['Price'].str.replace('$','') data['Price'] = data['Price'].astype('float32') graph2 = sns.regplot(data['Price'],data['Rating'],data=data) graph2.set_title('Rating vs Price [RegPlot]') #Code ends here # -------------- #Code starts here print(len(data['Genres'].unique()), "genres") data['Genres'] = data['Genres'].str.split(';').str[0] gr_mean = data[['Genres','Rating']].groupby(['Genres'],as_index=False).mean() print(gr_mean.describe()) gr_mean=gr_mean.sort_values('Rating') print(gr_mean.head(1)) print(gr_mean.head(1)) #Code ends here # -------------- #Code starts here data['Last Updated'] = pd.to_datetime(data['Last Updated']) data['Last Updated Days'] = (data['Last Updated'].max()-data['Last Updated']).dt.days plt.figure(figsize = (10,10)) sns.regplot(x="Last Updated Days", y="Rating",color='lightpink',data=data) plt.title('Rating vs Last Updated [Regplot]',size =20) #Code ends here
[((12, 7, 12, 24), 'pandas.read_csv', 'pd.read_csv', ({(12, 19, 12, 23): 'path'}, {}), '(path)', True, 'import pandas as pd\n'), ((23, 15, 23, 83), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((30, 17, 30, 89), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((38, 0, 38, 27), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((39, 10, 39, 84), 'seaborn.catplot', 'sns.catplot', (), '', True, 'import seaborn as sns\n'), ((41, 0, 41, 51), 'matplotlib.pyplot.title', 'plt.title', (), '', True, 'import matplotlib.pyplot as plt\n'), ((54, 5, 54, 19), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ({}, {}), '()', False, 'from sklearn.preprocessing import MinMaxScaler, LabelEncoder\n'), ((56, 8, 56, 62), 'seaborn.regplot', 'sns.regplot', (), '', True, 'import seaborn as sns\n'), ((58, 0, 58, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((68, 9, 68, 60), 'seaborn.regplot', 'sns.regplot', (), '', True, 'import seaborn as sns\n'), ((90, 23, 90, 59), 'pandas.to_datetime', 'pd.to_datetime', ({(90, 38, 90, 58): "data['Last Updated']"}, {}), "(data['Last Updated'])", True, 'import pandas as pd\n'), ((92, 0, 92, 29), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((93, 0, 93, 74), 'seaborn.regplot', 'sns.regplot', (), '', True, 'import seaborn as sns\n'), ((94, 0, 94, 54), 'matplotlib.pyplot.title', 'plt.title', (), '', True, 'import matplotlib.pyplot as plt\n')]
Banguiskode/nerds
converters/brat2iob.py
366420b2ec57bf790562de62a79f4973cbd6b3ed
import argparse import operator import os import re import shutil import spacy import tempfile from nerds.utils import spans_to_tokens, get_logger def segment_text_to_sentences(text_file, sentence_splitter): """ Segment text into sentences. Text is provided by BRAT in .txt file. Args: text_file (str): the full path to the BRAT .txt file. sentence_splitter (spacy LM): SpaCy EN language model. Returns: sentences (list((int, int, str))): list of sentence spans. Spans are triples of (start_offset, end_offset, text), where offset is relative to the text. """ sentences = [] ftext = open(text_file, "r") for line in ftext: splits = sentence_splitter(line.strip()) for sent in splits.sents: sentences.append((sent.start_char, sent.end_char, sent.text)) ftext.close() return sentences def parse_text_annotations(ann_file): """ Parses BRAT annotations provided in the .ann file and converts them to annotation spans of (start_position, end_position, entity_class). Args: ann_file (str): full path to the BRAT .ann file. Returns: annotations (list((int, int, str))): list of annotation spans. Spans are triples of (start_offset, end_offset, entity_class) where offset is relative to the text. """ annots = [] fann = open(ann_file, "r") for line in fann: cols = re.split(r"\s+", line.strip()) if not cols[0].startswith("T"): continue annots.append((int(cols[2]), int(cols[3]), cols[1])) fann.close() return annots def apply_annotations(sentences, annotations, tokenizer): """ Apply annotation spans to the sentence spans to create a list of tokens and tags. Args: sentences (list((int, int, str))): list of sentence spans. annotations (list((int, int, str))): list of annotation spans. tokenizer (spacy LM): SpaCy EN language model. Returns: tokens_tags_list (list((list(str), list(str)))): list of list of token tag pairs. Each list of token-tag pairs corresponds to a single sentence. """ tokens_tags_list = [] for sent_start, sent_end, sent_text in sentences: sent_annots = [a for a in annotations if a[0] >= sent_start and a[1] <= sent_end] # convert document offsets to sentence offsets sent_annots = [(s[0] - sent_start, s[1] - sent_start, s[2]) for s in sent_annots] tokens, tags = spans_to_tokens(sent_text, sent_annots, tokenizer) tokens_tags_list.append(zip(tokens, tags)) return tokens_tags_list def convert_brat_to_iob(input_dir, output_file, nlp): """ Convenience Convertor function. Args: input_dir (str): the directory where the BRAT .txt and .ann files are located. output_file (str): the full path name of file to write output in IOB format to. nlp (SpaCy LM): reference to the SpaCy EN model. Returns: None. """ fout = open(output_file, "w") for text_file in os.listdir(input_dir): # only process .txt and .ann pairs in specified directory if not text_file.endswith(".txt"): continue annot_file = text_file[:-4] + ".ann" if not os.path.exists(os.path.join(input_dir, annot_file)): # do not process file if no corresponding .ann file continue # process file pair logger.info("Processing file: {:s}".format(text_file)) sentences = segment_text_to_sentences(os.path.join(input_dir, text_file), nlp) annotations = parse_text_annotations(os.path.join(input_dir, annot_file)) tokens_tags_list = apply_annotations(sentences, annotations, nlp) for tokens_tags in tokens_tags_list: for token, tag in tokens_tags: fout.write("{:s}\t{:s}\n".format(token, tag)) fout.write("\n") fout.close() def do_self_test(nlp): """ Simple self-test with small dataset to prove that this works okay. """ text = "Pierre Vinken, 61 years old, will join the board as a nonexecutive director, Nov. 29. Mr. Vinken is chairman of Elsevier N.V., the Dutch publishing group." annotations = [ "T1 PER 0 13 Pierre Vinken", "T2 PER 86 96 Mr. Vinken", "T3 DATE 15 27 61 years old", "T4 DATE 77 84 Nov. 29", "T5 ORG 112 125 Elsevier N.V.", "T6 NORP 131 136 Dutch" ] input_dir = tempfile.mkdtemp(dir="/tmp") ftext = open(os.path.join(input_dir, "test.txt"), "w") ftext.write(text) ftext.close() fann = open(os.path.join(input_dir, "test.ann"), "w") for line in annotations: fann.write(line + "\n") fann.close() output_file = os.path.join(input_dir, "test.iob") convert_brat_to_iob(input_dir, output_file, nlp) fout = open(output_file, "r") for line in fout: logger.warn(line.strip()) shutil.rmtree(input_dir) ################################ main ################################ # # usage: brat2iob.py [-h] [-i INPUT_DIR] [-o OUTPUT_FILE] [-t] # Script to convert BRAT annotations to IOB (NERDS) format. # optional arguments: # -h, --help show this help message and exit # -i INPUT_DIR, --input_dir INPUT_DIR # Directory to store BRAT .txt and .ann files. # -o OUTPUT_FILE, --output_file OUTPUT_FILE # Output file to write IOB output to. # -t, --test Runs self test. ###################################################################### parser = argparse.ArgumentParser( description="Script to convert BRAT annotations to IOB (NERDS) format.") parser.add_argument("-i", "--input_dir", help="Directory to store BRAT .txt and .ann files.") parser.add_argument("-o", "--output_file", help="Output file to write IOB output to.") parser.add_argument("-t", "--test", help="Runs self test.", action="store_true") args = parser.parse_args() logger = get_logger() input_dir = args.input_dir output_file = args.output_file self_test = args.test nlp = spacy.load("en") if self_test: logger.info("Executing self test...") do_self_test(nlp) else: logger.info("Reading BRAT .txt and .ann files from: {:s}".format(input_dir)) logger.info("Writing IOB tokens/tags to file: {:s}".format(output_file)) convert_brat_to_iob(input_dir, output_file, nlp)
[((156, 9, 157, 76), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((163, 9, 163, 21), 'nerds.utils.get_logger', 'get_logger', ({}, {}), '()', False, 'from nerds.utils import spans_to_tokens, get_logger\n'), ((169, 6, 169, 22), 'spacy.load', 'spacy.load', ({(169, 17, 169, 21): '"""en"""'}, {}), "('en')", False, 'import spacy\n'), ((95, 21, 95, 42), 'os.listdir', 'os.listdir', ({(95, 32, 95, 41): 'input_dir'}, {}), '(input_dir)', False, 'import os\n'), ((127, 16, 127, 44), 'tempfile.mkdtemp', 'tempfile.mkdtemp', (), '', False, 'import tempfile\n'), ((135, 18, 135, 53), 'os.path.join', 'os.path.join', ({(135, 31, 135, 40): 'input_dir', (135, 42, 135, 52): '"""test.iob"""'}, {}), "(input_dir, 'test.iob')", False, 'import os\n'), ((140, 4, 140, 28), 'shutil.rmtree', 'shutil.rmtree', ({(140, 18, 140, 27): 'input_dir'}, {}), '(input_dir)', False, 'import shutil\n'), ((76, 23, 76, 73), 'nerds.utils.spans_to_tokens', 'spans_to_tokens', ({(76, 39, 76, 48): 'sent_text', (76, 50, 76, 61): 'sent_annots', (76, 63, 76, 72): 'tokenizer'}, {}), '(sent_text, sent_annots, tokenizer)', False, 'from nerds.utils import spans_to_tokens, get_logger\n'), ((128, 17, 128, 52), 'os.path.join', 'os.path.join', ({(128, 30, 128, 39): 'input_dir', (128, 41, 128, 51): '"""test.txt"""'}, {}), "(input_dir, 'test.txt')", False, 'import os\n'), ((131, 16, 131, 51), 'os.path.join', 'os.path.join', ({(131, 29, 131, 38): 'input_dir', (131, 40, 131, 50): '"""test.ann"""'}, {}), "(input_dir, 'test.ann')", False, 'import os\n'), ((105, 46, 105, 80), 'os.path.join', 'os.path.join', ({(105, 59, 105, 68): 'input_dir', (105, 70, 105, 79): 'text_file'}, {}), '(input_dir, text_file)', False, 'import os\n'), ((106, 45, 106, 80), 'os.path.join', 'os.path.join', ({(106, 58, 106, 67): 'input_dir', (106, 69, 106, 79): 'annot_file'}, {}), '(input_dir, annot_file)', False, 'import os\n'), ((100, 30, 100, 65), 'os.path.join', 'os.path.join', ({(100, 43, 100, 52): 'input_dir', (100, 54, 100, 64): 'annot_file'}, {}), '(input_dir, annot_file)', False, 'import os\n')]
zjsteyn/kraken
kraken/lib/util.py
eaa9f4290db5425ddf80d0aebfa3944713558ab5
""" Ocropus's magic PIL-numpy array conversion routines. They express slightly different behavior from PIL.Image.toarray(). """ import unicodedata import numpy as np from PIL import Image __all__ = ['pil2array', 'array2pil'] def pil2array(im: Image.Image, alpha: int = 0) -> np.array: if im.mode == '1': return np.array(im.convert('L')) return np.array(im) def array2pil(a: np.array) -> Image: if a.dtype == np.dtype("B"): if a.ndim == 2: return Image.frombytes("L", (a.shape[1], a.shape[0]), a.tostring()) elif a.ndim == 3: return Image.frombytes("RGB", (a.shape[1], a.shape[0]), a.tostring()) else: raise Exception("bad image rank") elif a.dtype == np.dtype('float32'): return Image.frombytes("F", (a.shape[1], a.shape[0]), a.tostring()) else: raise Exception("unknown image type") def is_bitonal(im: Image.Image) -> bool: """ Tests a PIL.Image for bitonality. Args: im (PIL.Image.Image): Image to test Returns: True if the image contains only two different color values. False otherwise. """ return im.getcolors(2) is not None and len(im.getcolors(2)) == 2 def get_im_str(im: Image.Image) -> str: return im.filename if hasattr(im, 'filename') else str(im) def is_printable(char: str) -> bool: """ Determines if a chode point is printable/visible when printed. Args: char (str): Input code point. Returns: True if printable, False otherwise. """ letters = ('LC', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu') numbers = ('Nd', 'Nl', 'No') punctuation = ('Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps') symbol = ('Sc', 'Sk', 'Sm', 'So') printable = letters + numbers + punctuation + symbol return unicodedata.category(char) in printable def make_printable(char: str) -> str: """ Takes a Unicode code point and return a printable representation of it. Args: char (str): Input code point Returns: Either the original code point, the name of the code point if it is a combining mark, whitespace etc., or the hex code if it is a control symbol. """ if not char or is_printable(char): return char elif unicodedata.category(char) in ('Cc', 'Cs', 'Co'): return '0x{:x}'.format(ord(char)) else: return unicodedata.name(char)
[((16, 11, 16, 23), 'numpy.array', 'np.array', ({(16, 20, 16, 22): 'im'}, {}), '(im)', True, 'import numpy as np\n'), ((20, 18, 20, 31), 'numpy.dtype', 'np.dtype', ({(20, 27, 20, 30): '"""B"""'}, {}), "('B')", True, 'import numpy as np\n'), ((69, 11, 69, 37), 'unicodedata.category', 'unicodedata.category', ({(69, 32, 69, 36): 'char'}, {}), '(char)', False, 'import unicodedata\n'), ((29, 20, 29, 39), 'numpy.dtype', 'np.dtype', ({(29, 29, 29, 38): '"""float32"""'}, {}), "('float32')", True, 'import numpy as np\n'), ((86, 9, 86, 35), 'unicodedata.category', 'unicodedata.category', ({(86, 30, 86, 34): 'char'}, {}), '(char)', False, 'import unicodedata\n'), ((89, 15, 89, 37), 'unicodedata.name', 'unicodedata.name', ({(89, 32, 89, 36): 'char'}, {}), '(char)', False, 'import unicodedata\n')]
hao44le/ico_top_holder_analysis
analysis/calculate_holding_amount.py
aeeab01c90e4446b424c52c33a68ccb814123121
import sys sys.path.insert(0,'..') from data.whale_data import exchnage_accounts from data.html_helper import check_if_address_name_exists from data.whale_eth_tx_data import * from data.whale_token_tx_data import identify_investor_type_token holding_account = "holding_account" deposit_account = 'deposit_account' withdraw_account = "withdraw_account" in_type = "IN" out_type = "OUT" all_acc_types = dict() for acc in exchnage_accounts: all_acc_types[acc] = exchange_type def update_y_array(X,y,timestamp,amount): target_index = 0 for i in range(len(X)): x_time = X[i] if timestamp < x_time: target_index = i break for i in range(target_index,len(y)): y[i] += amount return y def perform_bfs_on_accounts(out_txs,top_holder_type,acc,m_type='OUT'): print("\t"+m_type) unique_out = set() for out in out_txs: unique_out.add(out[3]) unique_out = list(unique_out)[:5] for out in unique_out: print("\t"+out) if out not in all_acc_types: investor_type = identify_investor_type(out) if investor_type == affliate_type: investor_type = identify_investor_type_token(out) print("\t\t{}".format(investor_type)) else: investor_type = all_acc_types[out] if investor_type == exchange_type: top_holder_type[acc] = deposit_account if m_type == "OUT" else withdraw_account all_acc_types[out] = investor_type if acc not in top_holder_type: top_holder_type[acc] = holding_account return top_holder_type def calculate_holding_amount(X,escape_accounts,txs): top_holder_type = dict() for acc in txs: tx = txs[acc] if acc in escape_accounts: continue #如果当前账户从来没有向外打过token,ignore out_txs = [item for item in tx if item[2] == 'OUT'] if len(out_txs) == 0: print("\tholding account") top_holder_type[acc] = holding_account continue # build all traxe Y: holding_amount, deposit_amount, withdraw_amount amount_trace_y = [0] * len(X) for holder in txs: if holder in escape_accounts: continue if holder not in top_holder_type: print("{} not identified! ".format(holder)) continue holder_type = top_holder_type[holder] holder_txs = txs[holder] print("{} {}".format(holder,holder_type)) for tx in holder_txs: [timestamp,from_a,tx_type,to_a,amount] = tx if holder_type == holding_account: if tx_type == in_type: amount_trace_y = update_y_array(X,amount_trace_y,timestamp,amount) else: amount_trace_y = update_y_array(X,amount_trace_y,timestamp,-amount) return amount_trace_y
[((2, 0, 2, 23), 'sys.path.insert', 'sys.path.insert', ({(2, 16, 2, 17): '(0)', (2, 18, 2, 22): '""".."""'}, {}), "(0, '..')", False, 'import sys\n'), ((44, 32, 44, 65), 'data.whale_token_tx_data.identify_investor_type_token', 'identify_investor_type_token', ({(44, 61, 44, 64): 'out'}, {}), '(out)', False, 'from data.whale_token_tx_data import identify_investor_type_token\n')]
JBoRu/TextBox-1
textbox/trainer/trainer.py
0dcbaa153acc507e3d55075312d7ca5d23146e03
# @Time : 2020/11/14 # @Author : Junyi Li, Gaole He # @Email : [email protected] # UPDATE: # @Time : 2020/12/2, 2020/11/27, 2020/12/3, 2020/12/26 # @Author : Jinhao Jiang, Xiaoxuan Hu, Tianyi Tang, Jinhao Jiang # @Email : [email protected], [email protected], [email protected], [email protected] r""" textbox.trainer.trainer ################################ """ import os import torch import torch.optim as optim import numpy as np import matplotlib.pyplot as plt import copy import math from torch.utils.data import DataLoader from time import time from logging import getLogger from textbox.module.Optimizer.optim import ScheduledOptim from textbox.evaluator import NgramEvaluator, TranslationEvaluator, SummarizationEvaluator from textbox.utils import ensure_dir, early_stopping class AbstractTrainer(object): r"""Trainer Class is used to manage the training and evaluation processes of text generation system models. AbstractTrainer is an abstract class in which the fit() and evaluate() method should be implemented according to different training and evaluation strategies. """ def __init__(self, config, model): self.config = config self.model = model def fit(self, train_data): r"""Train the model based on the train data. """ raise NotImplementedError('Method [next] should be implemented.') def evaluate(self, eval_data): r"""Evaluate the model based on the eval data. """ raise NotImplementedError('Method [next] should be implemented.') class Trainer(AbstractTrainer): r"""The basic Trainer for basic training and evaluation strategies in text generation systems. This class defines common functions for training and evaluation processes of most text generation system models, including fit(), evalute(), resume_checkpoint() and some other features helpful for model training and evaluation. Generally speaking, this class can serve most text generation system models, If the training process of the model is to simply optimize a single loss without involving any complex training strategies, such as adversarial learning, pre-training and so on. Initializing the Trainer needs two parameters: `config` and `model`. `config` records the parameters information for controlling training and evaluation, such as `learning_rate`, `epochs`, `eval_step` and so on. More information can be found in [placeholder]. `model` is the instantiated object of a Model Class. """ def __init__(self, config, model): super(Trainer, self).__init__(config, model) self.logger = getLogger() self.learner = config['learner'] self.learning_rate = config['learning_rate'] self.epochs = config['epochs'] self.eval_step = min(config['eval_step'], self.epochs) self.stopping_step = config['stopping_step'] self.test_batch_size = config['eval_batch_size'] self.device = config['device'] self.embedding_size = config['embedding_size'] self.warmup_steps = config['warmup_steps'] self.checkpoint_dir = config['checkpoint_dir'] ensure_dir(self.checkpoint_dir) saved_model_file = self.config['filename'] + '.pth' self.saved_model_file = os.path.join(self.checkpoint_dir, saved_model_file) self.generated_text_dir = config['generated_text_dir'] ensure_dir(self.generated_text_dir) saved_text_file = self.config['filename'] + '.txt' self.saved_text_file = os.path.join(self.generated_text_dir, saved_text_file) self.start_epoch = 0 self.cur_step = 0 self.best_valid_score = 100000000 self.best_valid_result = None self.train_loss_dict = dict() self.optimizer = self._build_optimizer() self.task_type = config['task_type'].lower() if self.task_type == "translation": self.evaluator = TranslationEvaluator(config) elif self.task_type == "summarization": self.evaluator = SummarizationEvaluator(config) else: self.evaluator = NgramEvaluator(config) self.item_tensor = None self.tot_item_num = None self.iid_field = config['ITEM_ID_FIELD'] def _build_optimizer(self): r"""Init the Optimizer Returns: torch.optim: the optimizer """ if self.learner.lower() == 'adam': optimizer = optim.Adam(self.model.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'sgd': optimizer = optim.SGD(self.model.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'adagrad': optimizer = optim.Adagrad(self.model.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'rmsprop': optimizer = optim.RMSprop(self.model.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'schedule': optimizer = ScheduledOptim(optim.Adam(self.model.parameters(), betas=(0.9, 0.98), eps=1e-09), self.learning_rate, self.embedding_size, self.warmup_steps) else: self.logger.warning('Received unrecognized optimizer, set default Adam optimizer') optimizer = optim.Adam(self.model.parameters(), lr=self.learning_rate) return optimizer def _train_epoch(self, train_data, epoch_idx): r"""Train the model in an epoch Args: train_data (DataLoader): the train data epoch_idx (int): the current epoch id Returns: float/tuple: The sum of loss returned by all batches in this epoch. If the loss in each batch contains multiple parts and the model return these multiple parts loss instead of the sum of loss, It will return a tuple which includes the sum of loss in each part. """ self.model.train() total_loss = None for batch_idx, data in enumerate(train_data): self.optimizer.zero_grad() losses = self.model.calculate_loss(data, epoch_idx=epoch_idx) if isinstance(losses, tuple): loss = sum(losses) loss_tuple = tuple(per_loss.item() for per_loss in losses) total_loss = loss_tuple if total_loss is None else tuple(map(sum, zip(total_loss, loss_tuple))) else: loss = losses total_loss = losses.item() if total_loss is None else total_loss + losses.item() self._check_nan(loss) loss.backward() self.optimizer.step() train_loss = total_loss / len(train_data) return train_loss def _valid_epoch(self, valid_data): r"""Valid the model with valid data Args: valid_data (DataLoader): the valid data Returns: float: valid score dict: valid result """ self.model.eval() total_loss = None for batch_idx, data in enumerate(valid_data): losses = self.model.calculate_loss(data) if isinstance(losses, tuple): loss = sum(losses) loss_tuple = tuple(per_loss.item() for per_loss in losses) total_loss = loss_tuple if total_loss is None else tuple(map(sum, zip(total_loss, loss_tuple))) else: loss = losses total_loss = losses.item() if total_loss is None else total_loss + losses.item() self._check_nan(loss) valid_loss = total_loss / len(valid_data) ppl = np.exp(valid_loss) return valid_loss, ppl def _save_checkpoint(self, epoch): r"""Store the model parameters information and training information. Args: epoch (int): the current epoch id """ state = { 'config': self.config, 'epoch': epoch, 'cur_step': self.cur_step, 'best_valid_score': self.best_valid_score, 'state_dict': self.model.state_dict(), 'optimizer': self.optimizer.state_dict(), } torch.save(state, self.saved_model_file) def _save_generated_text(self, generated_corpus): r"""Store the generated text by our model. Args: corpus (list of string list): """ with open(self.saved_text_file, 'w') as fin: for tokens in generated_corpus: fin.write(' '.join(tokens) + '\n') def resume_checkpoint(self, resume_file): r"""Load the model parameters information and training information. Args: resume_file (file): the checkpoint file """ resume_file = str(resume_file) checkpoint = torch.load(resume_file) self.start_epoch = checkpoint['epoch'] + 1 self.cur_step = checkpoint['cur_step'] self.best_valid_score = checkpoint['best_valid_score'] # load architecture params from checkpoint if checkpoint['config']['model'].lower() != self.config['model'].lower(): self.logger.warning('Architecture configuration given in config file is different from that of checkpoint. ' 'This may yield an exception while state_dict is being loaded.') self.model.load_state_dict(checkpoint['state_dict']) # load optimizer state from checkpoint only when optimizer type is not changed self.optimizer.load_state_dict(checkpoint['optimizer']) message_output = 'Checkpoint loaded. Resume training from epoch {}'.format(self.start_epoch) self.logger.info(message_output) def _check_nan(self, loss): if torch.isnan(loss): raise ValueError('Training loss is nan') def _generate_train_loss_output(self, epoch_idx, s_time, e_time, losses, train_info=""): train_loss_output = "epoch %d %straining [time: %.2fs, " % (epoch_idx, train_info, e_time - s_time) if isinstance(losses, tuple): for idx, loss in enumerate(losses): train_loss_output += 'train_loss%d: %.4f, ' % (idx + 1, loss) train_loss_output = train_loss_output[:-2] else: train_loss_output += "train loss: %.4f" % losses return train_loss_output + ']' def fit(self, train_data, valid_data=None, verbose=True, saved=True): r"""Train the model based on the train data and the valid data. Args: train_data (DataLoader): the train data valid_data (DataLoader, optional): the valid data, default: None. If it's None, the early_stopping is invalid. verbose (bool, optional): whether to write training and evaluation information to logger, default: True saved (bool, optional): whether to save the model parameters, default: True Returns: (float, dict): best valid score and best valid result. If valid_data is None, it returns (-1, None) """ for epoch_idx in range(self.start_epoch, self.epochs): # train training_start_time = time() train_loss = self._train_epoch(train_data, epoch_idx) self.train_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() self._save_checkpoint(epoch_idx) train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss) if verbose: self.logger.info(train_loss_output) # eval if self.eval_step <= 0 or not valid_data: if saved: self._save_checkpoint(epoch_idx) update_output = 'Saving current: %s' % self.saved_model_file if verbose: self.logger.info(update_output) continue if (epoch_idx + 1) % self.eval_step == 0: valid_start_time = time() with torch.no_grad(): valid_score, valid_result = self._valid_epoch(valid_data) # valid_loss, ppl self.best_valid_score, self.cur_step, stop_flag, update_flag = early_stopping( valid_score, self.best_valid_score, self.cur_step, max_step=self.stopping_step, bigger=False) # better model are supposed to provide smaller perplexity and loss valid_end_time = time() valid_score_output = "epoch %d evaluating [time: %.2fs, valid_loss: %f]" % \ (epoch_idx, valid_end_time - valid_start_time, valid_score) valid_result_output = 'valid ppl: {}'.format(valid_result) if verbose: self.logger.info(valid_score_output) self.logger.info(valid_result_output) if update_flag: if saved: self._save_checkpoint(epoch_idx) update_output = 'Saving current best: %s' % self.saved_model_file if verbose: self.logger.info(update_output) self.best_valid_result = valid_result if stop_flag: stop_output = 'Finished training, best eval result in epoch %d' % \ (epoch_idx - self.cur_step * self.eval_step) if verbose: self.logger.info(stop_output) break return self.best_valid_score, self.best_valid_result def _evaluate_nll_test(self, eval_data): r"""Calculate the negative log-likelihood of the eval_data. Args: eval_data (DataLoader): the eval data. Returns: Float: NLL_test of the eval data. """ total_loss = 0 for epoch_idx, eval_batch in enumerate(eval_data): nll_test = self.model.calculate_nll_test(eval_batch, epoch_idx) total_loss += float(nll_test) return total_loss / len(eval_data) @torch.no_grad() def evaluate(self, eval_data, load_best_model=True, model_file=None): r"""Evaluate the model based on the eval data. Args: eval_data (DataLoader): the eval data load_best_model (bool, optional): whether load the best model in the training process, default: True. It should be set True, if users want to test the model after training. model_file (str, optional): the saved model file, default: None. If users want to test the previously trained model file, they can set this parameter. Returns: dict: eval result, key is the eval metric and value in the corresponding metric value """ if load_best_model: if model_file: checkpoint_file = model_file else: checkpoint_file = self.saved_model_file checkpoint = torch.load(checkpoint_file) self.model.load_state_dict(checkpoint['state_dict']) message_output = 'Loading model structure and parameters from {}'.format(checkpoint_file) self.logger.info(message_output) self.model.eval() with torch.no_grad(): generate_corpus = self.model.generate(eval_data) self._save_generated_text(generate_corpus) reference_corpus = eval_data.get_reference() result = self.evaluator.evaluate(generate_corpus, reference_corpus) result['nll_test'] = self._evaluate_nll_test(eval_data) return result def plot_train_loss(self, show=True, save_path=None): r"""Plot the train loss in each epoch Args: show (bool, optional): whether to show this figure, default: True save_path (str, optional): the data path to save the figure, default: None. If it's None, it will not be saved. """ epochs = list(self.train_loss_dict.keys()) epochs.sort() values = [float(self.train_loss_dict[epoch]) for epoch in epochs] plt.plot(epochs, values) plt.xticks(epochs) plt.xlabel('Epoch') plt.ylabel('Loss') if show: plt.show() if save_path: plt.savefig(save_path) class UnconditionalTrainer(Trainer): r"""UnconditionalTrainer is designed for RNN, which is a typical unconditional generator. """ def __init__(self, config, model): super(UnconditionalTrainer, self).__init__(config, model) class GANTrainer(Trainer): r"""GANTrainer is designed for GAN, which is a generative adversarial net method. """ def __init__(self, config, model): super(GANTrainer, self).__init__(config, model) self.optimizer = None self.g_optimizer = self._build_module_optimizer(self.model.generator) self.d_optimizer = self._build_module_optimizer(self.model.discriminator) self.grad_clip = config['grad_clip'] self.g_pretraining_epochs = config['g_pretraining_epochs'] self.d_pretraining_epochs = config['d_pretraining_epochs'] self.d_sample_num = config['d_sample_num'] self.d_sample_training_epochs = config['d_sample_training_epochs'] self.adversarail_training_epochs = config['adversarail_training_epochs'] self.adversarail_d_epochs = config['adversarail_d_epochs'] self.g_pretraining_loss_dict = dict() self.d_pretraining_loss_dict = dict() self.max_length = config['max_seq_length'] + 2 self.pad_idx = model.pad_idx def _build_module_optimizer(self, module): r"""Init the Module Optimizer Args: module (torch.nn.Mudule): Mudule class of torch.nn needed optimizer Returns: torch.optim: the optimizer """ if self.learner.lower() == 'adam': optimizer = optim.Adam(module.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'sgd': optimizer = optim.SGD(module.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'adagrad': optimizer = optim.Adagrad(module.parameters(), lr=self.learning_rate) elif self.learner.lower() == 'rmsprop': optimizer = optim.RMSprop(module.parameters(), lr=self.learning_rate) else: self.logger.warning('Received unrecognized optimizer, set default Adam optimizer') optimizer = optim.Adam(module.parameters(), lr=self.learning_rate) return optimizer def _optimize_step(self, losses, total_loss, model, opt): r"""The opt uses the cliped losses to conduct an optimize step to optimize model and sum up losses to the total_loss. Args: losses (torch.Tensor or tuple): The loss to be backward. total_loss (Float): Total loss in an epoch. model (torch.nn.Mudule): The model to be optimized. opt (torch.optim): The optimizer of the model. Returns: torch.Tensor or tuple: Total loss in an epoch, shape: []. """ if isinstance(losses, tuple): loss = sum(losses) loss_tuple = tuple(per_loss.item() for per_loss in losses) total_loss = loss_tuple if total_loss is None else tuple(map(sum, zip(total_loss, loss_tuple))) else: loss = losses total_loss = losses.item() if total_loss is None else total_loss + losses.item() self._check_nan(loss) opt.zero_grad() loss.backward() torch.nn.utils.clip_grad_norm_(model.parameters(), self.grad_clip) opt.step() return total_loss def _save_checkpoint(self, epoch): state = { 'config': self.config, 'epoch': epoch, 'cur_step': self.cur_step, 'best_valid_score': self.best_valid_score, 'state_dict': self.model.state_dict() } torch.save(state, self.saved_model_file) def _add_pad(self, data): r"""Pad the data to the max length of corpus. Args: data (torch.Tensor): The data to be padded, shape: [batch_size, max_batch_length]. Returns: torch.Tensor: The padded data, shape: [batch_size, max_seq_length]. """ batch_size = data.shape[0] padded_data = torch.full((batch_size, self.max_length), self.pad_idx, dtype=torch.long, device=self.device) padded_data[:, : data.shape[1]] = data return padded_data def _get_real_data(self, train_data): r"""Get the target text index of the corpus train_datas. Args: train_data (DataLoader): the train data. Returns: torch.Tensor: The target text index, shape: [batch_size, max_batch_length]. """ real_datas = [] for corpus in train_data: real_data = corpus['target_idx'] real_data = self._add_pad(real_data) real_datas.append(real_data) real_datas = torch.cat(real_datas, dim=0) return real_datas def _g_train_epoch(self, train_data, epoch_idx): r"""Train the generator module in an epoch Args: train_data (DataLoader): the train data epoch_idx (int): the current epoch id Returns: float/tuple: The sum of loss returned by all batches in this epoch. If the loss in each batch contains multiple parts and the model return these multiple parts loss instead of the sum of loss, It will return a tuple which includes the sum of loss in each part. """ self.model.generator.train() total_loss = None for batch_idx, data in enumerate(train_data): losses = self.model.calculate_g_train_loss(data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.generator, self.g_optimizer) total_loss = [l / len(train_data) for l in total_loss] if isinstance(total_loss, tuple) else total_loss / len( train_data) total_loss = tuple(total_loss) if isinstance(total_loss, list) else total_loss return total_loss def _d_train_epoch(self, train_data, epoch_idx): r"""Train the discriminator module in an epoch Args: train_data (DataLoader): the train data epoch_idx (int): the current epoch id Returns: float/tuple: The sum of loss returned by all batches in this epoch. If the loss in each batch contains multiple parts and the model return these multiple parts loss instead of the sum of loss, It will return a tuple which includes the sum of loss in each part. """ self.model.discriminator.train() total_loss = None real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) fake_data = self.model.sample(self.d_sample_num) fake_dataloader = DataLoader(fake_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for _ in range(self.d_sample_training_epochs): # d_epoch for real_data, fake_data in zip(real_dataloader, fake_dataloader): losses = self.model.calculate_d_train_loss(real_data, fake_data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.discriminator, self.d_optimizer) return total_loss / min(len(real_dataloader), len(fake_dataloader)) / self.d_sample_training_epochs def _adversarial_train_epoch(self, train_data, epoch_idx): r"""Adversarial training in an epoch Args: train_data (DataLoader): the train data epoch_idx (int): the current epoch id Returns: float/tuple: The sum of loss returned by all batches in this epoch. If the loss in each batch contains multiple parts and the model return these multiple parts loss instead of the sum of loss, It will return a tuple which includes the sum of loss in each part. """ self.model.generator.train() total_loss = None losses = self.model.calculate_g_adversarial_loss(epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.generator, self.g_optimizer) for epoch_idx in range(self.adversarail_d_epochs): self._d_train_epoch(train_data, epoch_idx=epoch_idx) return total_loss def fit(self, train_data, valid_data=None, verbose=True, saved=True): # generator pretraining if verbose: self.logger.info("Start generator pretraining...") for epoch_idx in range(self.g_pretraining_epochs): training_start_time = time() train_loss = self._g_train_epoch(train_data, epoch_idx) self.g_pretraining_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss, "generator pre") if verbose: self.logger.info(train_loss_output) if verbose: self.logger.info("End generator pretraining...") # discriminator pretraining if verbose: self.logger.info("Start discriminator pretraining...") for epoch_idx in range(self.d_pretraining_epochs): training_start_time = time() train_loss = self._d_train_epoch(train_data, epoch_idx) self.d_pretraining_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss, "discriminator pre") if verbose: self.logger.info(train_loss_output) if verbose: self.logger.info("End discriminator pretraining...") # adversarial training if verbose: self.logger.info("Start adversarial training...") for epoch_idx in range(self.adversarail_training_epochs): training_start_time = time() train_loss = self._adversarial_train_epoch(train_data, epoch_idx) self.train_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss) if verbose: self.logger.info(train_loss_output) if verbose: self.logger.info("End adversarial pretraining...") self._save_checkpoint(self.adversarail_training_epochs) return -1, None class TextGANTrainer(GANTrainer): r"""TextGANTrainer is designed for TextGAN. """ def __init__(self, config, model): super(TextGANTrainer, self).__init__(config, model) self.adversarail_g_epochs = config['adversarail_g_epochs'] def _d_train_epoch(self, train_data, epoch_idx): self.model.discriminator.train() total_loss = None real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for _ in range(self.d_sample_training_epochs): for idx, real_data in enumerate(real_dataloader): fake_data, z = self.model.sample() losses = self.model.calculate_d_train_loss(real_data, fake_data, z, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.discriminator, self.d_optimizer) if (idx * self.model.batch_size >= self.d_sample_num): break return total_loss / min(len(real_dataloader), self.d_sample_num // self.model.batch_size) / self.d_sample_training_epochs def _adversarial_train_epoch(self, train_data, epoch_idx): self.model.generator.train() total_loss = None real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for idx, real_data in enumerate(real_dataloader): if (idx == self.adversarail_g_epochs): break losses = self.model.calculate_g_adversarial_loss(real_data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.generator, self.g_optimizer) for epoch_idx in range(self.adversarail_d_epochs): self._d_train_epoch(train_data, epoch_idx=epoch_idx) return total_loss / min(len(real_dataloader), self.adversarail_g_epochs) class RankGANTrainer(GANTrainer): r"""RankGANTrainer is designed for RankGAN. """ def __init__(self, config, model): super(RankGANTrainer, self).__init__(config, model) def _d_train_epoch(self, train_data, epoch_idx): r"""Train the discriminator module in an epoch Args: train_data (DataLoader): the train data epoch_idx (int): the current epoch id Returns: float/tuple: The sum of loss returned by all batches in this epoch. If the loss in each batch contains multiple parts and the model return these multiple parts loss instead of the sum of loss, It will return a tuple which includes the sum of loss in each part. """ self.model.discriminator.train() total_loss = None real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) fake_data = self.model.sample(self.d_sample_num) fake_dataloader = DataLoader(fake_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) ref_index = np.random.randint(0, real_data.shape[0], size=self.model.ref_size) ref_data = real_data[ref_index] # ref_size * l for _ in range(self.d_sample_training_epochs): for real_data, fake_data in zip(real_dataloader, fake_dataloader): losses = self.model.calculate_d_train_loss(real_data, fake_data, ref_data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.discriminator, self.d_optimizer) return total_loss / min(len(real_dataloader), len(fake_dataloader)) / self.d_sample_training_epochs def _adversarial_train_epoch(self, train_data, epoch_idx): r"""Adversarial training in an epoch Args: train_data (DataLoader): the train data epoch_idx (int): the current epoch id Returns: float/tuple: The sum of loss returned by all batches in this epoch. If the loss in each batch contains multiple parts and the model return these multiple parts loss instead of the sum of loss, It will return a tuple which includes the sum of loss in each part. """ self.model.generator.train() total_loss = None real_data = self._get_real_data(train_data) ref_index = np.random.randint(0, real_data.shape[0], size=self.model.ref_size) ref_data = real_data[ref_index] # ref_size * l losses = self.model.calculate_g_adversarial_loss(ref_data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.generator, self.g_optimizer) d_loss = 0 for epoch_idx in range(self.adversarail_d_epochs): d_loss += self._d_train_epoch(train_data, epoch_idx=epoch_idx) d_loss = d_loss / self.adversarail_d_epochs return total_loss class ConditionalTrainer(Trainer): r"""ConditionalTrainer is designed for seq2seq testing, which is a typically used setting. """ def __init__(self, config, model): super(ConditionalTrainer, self).__init__(config, model) @torch.no_grad() def evaluate(self, eval_data, load_best_model=True, model_file=None): r"""Evaluate the model based on the eval data. Args: eval_data (DataLoader): the eval data load_best_model (bool, optional): whether load the best model in the training process, default: True. It should be set True, if users want to test the model after training. model_file (str, optional): the saved model file, default: None. If users want to test the previously trained model file, they can set this parameter. Returns: dict: eval result, key is the eval metric and value in the corresponding metric value """ if load_best_model: if model_file: checkpoint_file = model_file else: checkpoint_file = self.saved_model_file checkpoint = torch.load(checkpoint_file) self.model.load_state_dict(checkpoint['state_dict']) message_output = 'Loading model structure and parameters from {}'.format(checkpoint_file) self.logger.info(message_output) self.model.eval() generate_corpus = self.model.generate(eval_data) self._save_generated_text(generate_corpus) reference_corpus = eval_data.get_reference() result = self.evaluator.evaluate(generate_corpus, reference_corpus) return result class MaskGANTrainer(GANTrainer): r""" Trainer specifically designed for MaskGAN training process. """ def __init__(self, config, model): super(MaskGANTrainer, self).__init__(config, model) self.max_length = config["max_seq_length"] self.eos_token_idx = model.eos_idx self.adversarail_c_epochs = config['adversarail_c_epochs'] self.g_mask_pretraining_epochs = config['g_mask_pretraining_epochs'] self.g_lr = config['gen_learning_rate'] self.d_lr = config['dis_learning_rate'] self.c_lr = config['critic_learning_rate'] self.g_optimizer = self._build_module_optimizer_(self.model.generator, self.g_lr) self.d_optimizer = self._build_module_optimizer_(self.model.discriminator, self.d_lr) self.c_optimizer = self._build_module_optimizer_(self.model.discriminator.critic_fc_linear, self.c_lr) self.pre_lm_weight = config["pre_lm_weight"] self.pretrain_lm_epochs = config["pretrain_lm_epochs"] self.checkp = config['checkp'] def _build_module_optimizer_(self, module, lr): r""" Init the Module Optimizer with specified learning rate Returns: torch.optim: the optimizer """ if self.learner.lower() == 'adam': optimizer = optim.Adam(module.parameters(), lr) elif self.learner.lower() == 'sgd': optimizer = optim.SGD(module.parameters(), lr) elif self.learner.lower() == 'adagrad': optimizer = optim.Adagrad(module.parameters(), lr) elif self.learner.lower() == 'rmsprop': optimizer = optim.RMSprop(module.parameters(), lr) else: self.logger.warning('Received unrecognized optimizer, set default Adam optimizer') optimizer = optim.Adam(module.parameters(), lr) return optimizer def _optimize_step(self, losses, total_loss, model, opt, retain_graph=False): r""" Add retain_graph option """ if isinstance(losses, tuple): loss = sum(losses) loss_tuple = tuple(per_loss.item() for per_loss in losses) total_loss = loss_tuple if total_loss is None else tuple(map(sum, zip(total_loss, loss_tuple))) else: loss = losses total_loss = losses.item() if total_loss is None else total_loss + losses.item() self._check_nan(loss) opt.zero_grad() loss.backward(retain_graph=retain_graph) torch.nn.utils.clip_grad_norm_(model.parameters(), self.grad_clip) opt.step() return total_loss def _generate_train_loss_output(self, epoch_idx, s_time, e_time, losses, train_info=""): r""" Specified for maskgan output """ train_loss_output = "%straining [time: %.2fs, " % (train_info, e_time - s_time) if isinstance(losses, dict): for key, loss in losses.items(): train_loss_output += '%s: %.4f, ' % (key, loss) train_loss_output = train_loss_output[:-2] else: train_loss_output += "train loss: %.4f" % losses return train_loss_output + ']' def pretrain_lm(self, train_data, valid_data, verbose): r""" Pretrain rnn-based Language Model with teacher forcing mechanism """ def lm_forward(data): r""" One iteration of LM forward """ input = data[:, :-1] # bs * self.max_len - 1 target = data[:, 1:] bs, seq_len = target.size() lengths = torch.tensor([seq_len] * bs) target_present = torch.ones_like(input).byte() device = target.device lengths = lengths.cuda(device) # pretaining encoder_outputs = pre_train_lm(input, lengths, target, target_present, pretrain=True) logit = pre_train_lm.vocab_linear(encoder_outputs) logit = logit.permute([0, 2, 1]) lossf = torch.nn.CrossEntropyLoss() loss = lossf(logit, target) return loss pre_train_lm = self.model.generator lm_opt = self._build_module_optimizer_(pre_train_lm, lr=0.001) for epoch in range(self.pretrain_lm_epochs): total_loss = None real_data = self._get_real_data(train_data) # bs * self.max_len real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for batch_idx, data in enumerate(real_dataloader): loss = lm_forward(data) total_loss = self._optimize_step(loss, total_loss, pre_train_lm, lm_opt) total_loss = total_loss / len(real_dataloader) if verbose: self.logger.info("Epoch {}/{} of LM pretraining loss: {} ".format(epoch+1, self.pretrain_lm_epochs, total_loss)) ppl = 0.0 if (epoch+1) % 1 == 0: pre_train_lm.eval() validate_data = self._get_real_data(valid_data) # bs * self.max_len validate_dataloader = DataLoader(validate_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) ppl = 0.0 for batch_idx, data in enumerate(validate_dataloader): cross_entropy_loss = lm_forward(data) ppl += math.exp(cross_entropy_loss.item()) ppl = ppl / len(validate_dataloader) pre_train_lm.train() if verbose: self.logger.info("Epoch {}/{} of LM pretraining PPL: {}...".format(epoch + 1, self.pretrain_lm_epochs, ppl)) if ppl < 110: state_dict = { 'embedder': pre_train_lm.embedder, 'encoder': pre_train_lm.encoder.encoder, 'vocab_linear': pre_train_lm.vocab_linear } self.pre_lm_weight = "saved/pretrain_lm_weight" + str(epoch+1) + ".pkl" torch.save(state_dict, self.pre_lm_weight) if verbose: self.logger.info("End LM pretraining. PPL: {}".format(ppl)) self.logger.info("Weigth saved in {}".format(self.pre_lm_weight)) return pre_train_lm, ppl def _g_train_epoch(self, train_data, epoch_idx): self.model.generator.train() total_loss = None real_data = self._get_real_data(train_data) # bs * self.max_len real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for batch_idx, data in enumerate(real_dataloader): loss = self.model.calculate_g_train_loss(data, epoch_idx=epoch_idx) total_loss = self._optimize_step(loss, total_loss, self.model.generator, self.g_optimizer) total_loss = total_loss / len(real_dataloader) return total_loss def _get_validate_ppl(self, validate_data, epoch_idx): self.model.generator.eval() ppl = 0.0 validate_data = self._get_real_data(validate_data) # bs * self.max_len validate_dataloader = DataLoader(validate_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for batch_idx, data in enumerate(validate_dataloader): loss = self.model.calculate_g_train_loss(data, epoch_idx=epoch_idx, validate=True) ppl += math.exp(loss.item()) ppl = ppl / len(validate_dataloader) self.model.generator.train() return ppl def _d_train_epoch(self, train_data, epoch_idx): self.model.discriminator.train() total_loss = None real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for batch_idx, data in enumerate(real_dataloader): losses = self.model.calculate_d_train_loss(data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.discriminator, self.d_optimizer) return total_loss / len(real_dataloader) def _adversarial_train_epoch(self, train_data, epoch_idx): r""" Specified for MaskGAN adversarial training """ dis_total_loss = None gen_total_loss = None critic_total_loss = None g_num = 0.0 d_num = 0.0 real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) dis_train_data = copy.deepcopy(real_dataloader) gen_train_data = copy.deepcopy(real_dataloader) c_train_data = copy.deepcopy(real_dataloader) dis_train_data = iter(dis_train_data) gen_train_data = iter(gen_train_data) _ = next(dis_train_data) # have one offset for g_x in gen_train_data: g_num += 1 for _ in range(3): d_num += 1 try: d_x = next(dis_train_data) except StopIteration: del dis_train_data dis_train_data = copy.deepcopy(real_dataloader) dis_train_data = iter(dis_train_data) d_x = next(dis_train_data) losses = self.model.calculate_d_train_loss(d_x, epoch_idx=_) dis_total_loss = self._optimize_step(losses, dis_total_loss, self.model.discriminator, self.d_optimizer) gen_losses, critic_losses = self.model.calculate_g_adversarial_loss(g_x, epoch_idx=g_num) gen_total_loss = self._optimize_step(gen_losses, gen_total_loss, self.model.generator, self.g_optimizer) critic_total_loss = self._optimize_step(critic_losses, critic_total_loss, self.model.discriminator.critic_fc_linear, self.c_optimizer) return {"dis_loss": dis_total_loss / d_num, "gen_loss": gen_total_loss / g_num, "critic_loss": critic_total_loss / g_num} def _evaluate_nll_test(self, eval_data): total_loss = 0 real_data = self._get_real_data(eval_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for batch_idx, data in enumerate(real_dataloader): nll_test = self.model.calculate_nll_test(data, batch_idx) total_loss += float(nll_test) return total_loss / len(eval_data) def _add_eos(self, data, length): batch_size, pad_seq_len = data.size() padded_data = torch.full((batch_size, self.max_length), self.eos_token_idx, dtype=torch.long, device=self.device) for i in range(batch_size): l = int(length[i].cpu().data) if l == self.max_length+2: padded_data[i, :] = data[i, 1:l-1] else: padded_data[i, 0:l-1] = data[i, 1:l] return padded_data def _get_real_data(self, train_data): real_datas = [] for corpus in train_data: real_data = corpus['target_idx'] # bs*batch_max_seq_len length = corpus['target_length'] real_data = self._add_eos(real_data, length) real_datas.append(real_data) real_datas = torch.cat(real_datas, dim=0) return real_datas def _save_checkpoint(self, epoch, postfix=None): state = { 'config': self.config, 'epoch': epoch, 'cur_step': self.cur_step, 'best_valid_score': self.best_valid_score, 'state_dict': self.model.state_dict(), 'g_opt': self.g_optimizer.state_dict(), 'd_opt': self.d_optimizer.state_dict(), 'c_opt':self.c_optimizer.state_dict() } if postfix is not None: path = self.saved_model_file + "_" + str(epoch) + "_" + postfix torch.save(state, path) return path else: torch.save(state, self.saved_model_file) def _load_generated_text(self): r""" Load the generated text by our model to log. """ with open(self.saved_text_file, 'r') as fin: samples = [] for i in range(5): text = fin.readline() samples.append(text) return samples def fit(self, train_data, valid_data=None, verbose=True, saved=True): # generator pretraining if self.checkp is not None: checkpoint = torch.load(self.checkp) self.model.load_state_dict(checkpoint['state_dict']) self.d_optimizer.load_state_dict(checkpoint["d_opt"]) self.g_optimizer.load_state_dict(checkpoint["g_opt"]) epoch_check = checkpoint['epoch'] if verbose: self.logger.info("Load checkpoint file from: {}".format(self.checkp)) else: if self.pre_lm_weight is None: if verbose: self.logger.info("Start LM pretraining...") pretrain_lm, ppl = self.pretrain_lm(train_data, valid_data, verbose) pretrain_lm = torch.load(self.pre_lm_weight) embedder = pretrain_lm['embedder'].state_dict() lstm = pretrain_lm['encoder'].state_dict() vocab_linear = pretrain_lm['vocab_linear'].state_dict() self.model.generator.embedder.load_state_dict(embedder) self.model.generator.encoder.encoder.load_state_dict(lstm) self.model.generator.decoder.decoder.load_state_dict(lstm) self.model.generator.vocab_linear.load_state_dict(vocab_linear) self.model.discriminator.encoder.encoder.load_state_dict(lstm) self.model.discriminator.decoder.decoder.load_state_dict(lstm) if verbose: self.logger.info("Load pretrained LM weight") else: pretrain_lm = torch.load(self.pre_lm_weight) embedder = pretrain_lm['embedder'].state_dict() lstm = pretrain_lm['encoder'].state_dict() vocab_linear = pretrain_lm['vocab_linear'].state_dict() self.model.generator.embedder.load_state_dict(embedder) self.model.generator.encoder.encoder.load_state_dict(lstm) self.model.generator.decoder.decoder.load_state_dict(lstm) self.model.generator.vocab_linear.load_state_dict(vocab_linear) self.model.discriminator.encoder.encoder.load_state_dict(lstm) self.model.discriminator.decoder.decoder.load_state_dict(lstm) if verbose: self.logger.info("Load pretrained LM weight from: {}".format(self.pre_lm_weight)) if verbose: self.logger.info("Start generator mask pretraining...") for epoch_idx in range(self.g_mask_pretraining_epochs): training_start_time = time() train_loss = self._g_train_epoch(train_data, epoch_idx) self.g_pretraining_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss, "generator pre") if verbose: self.logger.info(train_loss_output) ppl = self._get_validate_ppl(valid_data, epoch_idx) if verbose: self.logger.info( "Epoch {}/{} of mask pretraining PPL: {}...".format(epoch_idx + 1, self.g_mask_pretraining_epochs, ppl)) if ppl <= 90: if verbose: path = self._save_checkpoint(epoch_idx + 1, postfix="pretrain_gen") self.logger.info(">>>> [Pretrain Gen] PPL: {} save weight in {}".format(ppl, path)) self.logger.info("End generator mask pretraining...") break if (epoch_idx) % 10 == 0: self.logger.info(">>>> [Pretrain Gen] Save pretrain gen check in epoch %d ..." % (epoch_idx + 1)) path = self._save_checkpoint(epoch_idx + 1, postfix="pretrain_gen") self.model.eval() test_result = self.evaluate(valid_data, model_file=path) self.model.train() sample = self._load_generated_text() tmp = "\n" for i, s in enumerate(sample): tmp += str(i) tmp += ": " tmp += s.strip() tmp += "\n" self.logger.info('>>>> [Pretrain Gen] test result: {}'.format(test_result)) self.logger.info('>>>> [Pretrain Gen] test result samples: {}'.format(tmp)) # discriminator pretraining if verbose: self.logger.info("Start discriminator pretraining...") for epoch_idx in range(self.d_pretraining_epochs): training_start_time = time() train_loss = self._d_train_epoch(train_data, epoch_idx) self.d_pretraining_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss, "discriminator pre") if verbose: self.logger.info(train_loss_output) if verbose: self.logger.info("End discriminator pretraining...") # adversarial training if verbose: self.logger.info("Start adversarial training...") for epoch_idx in range(self.adversarail_training_epochs): training_start_time = time() train_loss = self._adversarial_train_epoch(train_data, epoch_idx) self.train_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss) if verbose: self.logger.info(train_loss_output) if (epoch_idx+1) % 10 == 0: path = self._save_checkpoint((epoch_idx + 1), postfix="adv_train") self.model.eval() test_result = self.evaluate(valid_data, model_file=path) self.model.train() sample = self._load_generated_text() tmp = "\n" for i, s in enumerate(sample): tmp += str(i) tmp += ": " tmp += s.strip() tmp += "\n" self.logger.info('>>>>>> [Adv] test result: {}'.format(test_result)) self.logger.info('>>>>>> [Adv] test result samples: {}'.format(tmp)) if verbose: self.logger.info("End adversarial pretraining...") self._save_checkpoint(self.adversarail_training_epochs) return -1, None class LeakGANTrainer(GANTrainer): r"""Specified for leakgan trainer """ def __init__(self, config, model): super(LeakGANTrainer, self).__init__(config, model) self.interleaved_pretrain_epoch = config['interleaved_pretrain_epoch'] self.adversarail_g_epochs = config['adversarail_g_epochs'] gen_lr = config['generator_lr'] # 0.001 dis_lr = config['discriminator_lr'] # 0.00005 self.g_optimizer = self._build_module_optimizer_(self.model.generator, gen_lr) # (manager_opt, worker_opt) self.d_optimizer = self._build_module_optimizer_(self.model.discriminator, dis_lr) self.iters_num = config['iter_num'] self.end_idx = model.end_idx def _build_module_optimizer_(self, module, learing_rate): r"""Specified for leakgan """ multi_flag = False if module._get_name() == 'LeakGANGenerator': manager_params, worker_params = module.split_params() multi_flag = True if self.learner.lower() == 'adam': if multi_flag: manager_opt = optim.Adam(manager_params, lr=learing_rate) worker_opt = optim.Adam(worker_params, lr=learing_rate) else: optimizer = optim.Adam(module.parameters(), lr=learing_rate) elif self.learner.lower() == 'sgd': if multi_flag: manager_opt = optim.SGD(manager_params, lr=learing_rate) worker_opt = optim.SGD(worker_params, lr=learing_rate) else: optimizer = optim.SGD(module.parameters(), lr=learing_rate) elif self.learner.lower() == 'adagrad': if multi_flag: manager_opt = optim.Adagrad(manager_params, lr=learing_rate) worker_opt = optim.Adagrad(worker_params, lr=learing_rate) else: optimizer = optim.Adagrad(module.parameters(), lr=learing_rate) elif self.learner.lower() == 'rmsprop': if multi_flag: manager_opt = optim.RMSprop(manager_params, lr=learing_rate) worker_opt = optim.RMSprop(worker_params, lr=learing_rate) else: optimizer = optim.RMSprop(module.parameters(), lr=learing_rate) else: self.logger.warning('Received unrecognized optimizer, set default Adam optimizer') if multi_flag: manager_opt = optim.Adam(manager_params, lr=learing_rate) worker_opt = optim.Adam(worker_params, lr=learing_rate) else: optimizer = optim.Adam(module.parameters(), lr=learing_rate) if multi_flag: return (manager_opt, worker_opt) else: return optimizer def _optimize_step(self, losses, total_loss, model, opt): r"""Specified for leakgan optimize """ if isinstance(losses, tuple): loss = sum(losses) loss_tuple = tuple(per_loss.item() for per_loss in losses) total_loss = loss_tuple if total_loss is None else tuple(map(sum, zip(total_loss, loss_tuple))) else: loss = losses total_loss = losses.item() if total_loss is None else total_loss + losses.item() self._check_nan(loss) if isinstance(losses, tuple): for i, (o, loss) in enumerate(zip(opt, losses)): o.zero_grad() loss.backward(retain_graph=True if i < len(opt) - 1 else False) torch.nn.utils.clip_grad_norm_(model.parameters(), self.grad_clip) o.step() else: opt.zero_grad() losses.backward() torch.nn.utils.clip_grad_norm_(model.parameters(), self.grad_clip) opt.step() return total_loss def _generate_train_loss_output(self, epoch_idx, s_time, e_time, losses, train_info=""): r"""Specified for leakgan output format """ train_loss_output = "%straining [time: %.2fs, " % (train_info, e_time - s_time) if isinstance(losses, dict): for key, loss in losses.items(): train_loss_output += '%s: %.4f, ' % (key, loss) train_loss_output = train_loss_output[:-2] else: train_loss_output += "train loss: %.4f" % losses return train_loss_output + ']' def _add_eos(self, data, length): batch_size = data.shape[0] padded_data = torch.full((batch_size, self.max_length), self.end_idx, dtype=torch.long, device=self.device) for i in range(batch_size): len = length[i].cpu().data padded_data[i, :len] = data[i, :len] return padded_data def _get_real_data(self, train_data): r"""Specified for leakgan which use eos_idx pad not pad_idx """ real_datas = [] for corpus in train_data: real_data = corpus['target_idx'] length = corpus['target_length'] real_data = self._add_eos(real_data, length) real_datas.append(real_data) real_datas = torch.cat(real_datas, dim=0) return real_datas def _adversarial_train_epoch(self, train_data, epoch_idx): r"""Specified for leakgan adversarial training """ self.model.generator.train() total_g_loss = None total_d_loss = 0 total_d_acc = 0 adv_mana_loss = 0 adv_work_loss = 0 adv_d_loss = 0 for e in range(self.adversarail_g_epochs): losses = self.model.calculate_g_adversarial_loss(epoch_idx=e) total_g_loss = self._optimize_step(losses, total_g_loss, self.model.generator, self.g_optimizer) adv_mana_loss, adv_work_loss = total_g_loss adv_mana_loss = adv_mana_loss / self.adversarail_g_epochs adv_work_loss = adv_work_loss / self.adversarail_g_epochs for e in range(self.adversarail_d_epochs): loss_dict = self._d_train_epoch(train_data, epoch_idx=epoch_idx) total_d_loss = total_d_loss + loss_dict['total_loss'] total_d_acc = total_d_acc + loss_dict['train_acc'] adv_d_loss = total_d_loss / self.adversarail_d_epochs adv_c_loss = total_d_acc / self.adversarail_d_epochs return {"mana_loss": adv_mana_loss, "work_loss": adv_work_loss, "dis_loss": adv_d_loss, "train_acc": adv_c_loss} def _g_train_epoch(self, train_data, epoch_idx): total_loss = None real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) for batch_idx, data in enumerate(real_dataloader): # interaction = interaction.to(self.device) losses = self.model.calculate_g_train_loss(data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.generator, self.g_optimizer) total_loss = [l / len(real_dataloader) for l in total_loss] if isinstance(total_loss, tuple) else total_loss / len( train_data) mana_loss, work_loss = total_loss return {"mana_loss": mana_loss, "work_loss": work_loss} def _d_train_epoch(self, train_data, epoch_idx): total_loss = None total_acc = 0 real_data = self._get_real_data(train_data) real_dataloader = DataLoader(real_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) # not need sample self.d_sample_num numbers becauese only train discriminator 5 batch d_sample_num = (self.d_sample_training_epochs + 1) * self.model.batch_size fake_data = self.model.sample(d_sample_num) fake_dataloader = DataLoader(fake_data, batch_size=self.model.batch_size, shuffle=True, drop_last=True) idx = 0 for real_data, fake_data in zip(real_dataloader, fake_dataloader): # self.model.discriminator.eval() # pretraining not use dropout if idx == self.d_sample_training_epochs: break losses, acc = self.model.calculate_d_train_loss(real_data, fake_data, epoch_idx=epoch_idx) total_loss = self._optimize_step(losses, total_loss, self.model.discriminator, self.d_optimizer) total_acc = total_acc + acc idx += 1 total_loss = total_loss / self.d_sample_training_epochs total_acc = total_acc / self.d_sample_training_epochs return {"total_loss": total_loss, "train_acc": total_acc} def fit(self, train_data, valid_data=None, verbose=True, saved=True): # pretraining if verbose: self.logger.info(">> Start pretraining") # generator pretraining for epoch_idx in range(self.g_pretraining_epochs): # 80 if verbose: self.logger.info(">>>> [Pretrain Gen] Start %d / %d epochs generator pretraining" % ( epoch_idx + 1, self.g_pretraining_epochs)) training_start_time = time() train_loss = self._g_train_epoch(train_data, epoch_idx) training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx + 1, training_start_time, training_end_time, train_loss, "generator pre") train_loss_output = ">>>> " + train_loss_output if verbose: self.logger.info(train_loss_output) # discriminator pretraining for epoch_idx in range(self.d_pretraining_epochs): # 5 if verbose: self.logger.info(">>>> [Pretrain Dis]Start %d / %d epochs discriminator pretraining..." % ( epoch_idx + 1, self.d_pretraining_epochs)) training_start_time = time() train_loss = self._d_train_epoch(train_data, epoch_idx) training_end_time = time() train_loss_output = \ self._generate_train_loss_output(epoch_idx, training_start_time, training_end_time, train_loss, "discriminator pre") train_loss_output = ">>>> " + train_loss_output if verbose: self.logger.info(train_loss_output) if verbose: self.logger.info(">> End pretraining") # adversarial training if verbose: self.logger.info(">> Start adversarial training") for epoch in range(int(self.iters_num / self.adversarail_training_epochs)): if verbose: self.logger.info(">>>> [Adv] Start epoch %d / 10 interleaved adversarial training" % (epoch + 1)) for epoch_idx in range(self.adversarail_training_epochs): if verbose: self.logger.info(">>>>>> [Adv] Start epoch %d / %d adversarial training" % ( epoch_idx + 1, self.adversarail_training_epochs)) training_start_time = time() train_loss = self._adversarial_train_epoch(train_data, epoch_idx) # self.train_loss_dict[epoch_idx] = sum(train_loss) if isinstance(train_loss, tuple) else train_loss training_end_time = time() train_loss_output = \ self._generate_train_loss_output((epoch_idx + 1), training_start_time, training_end_time, train_loss, train_info="adv ") train_loss_output = ">>>>>> " + train_loss_output if verbose: self.logger.info(train_loss_output) # gen pretrain for epoch_idx in range(5): if verbose: self.logger.info(">>>>>> [Adv] Start epoch %d / 5 pretrain generator" % (epoch_idx + 1)) training_start_time = time() train_loss = self._g_train_epoch(train_data, epoch_idx) training_end_time = time() train_loss_output = \ self._generate_train_loss_output((epoch_idx + 1), training_start_time, training_end_time, train_loss, "adv generator pre") train_loss_output = ">>>>>> " + train_loss_output if verbose: self.logger.info(train_loss_output) # dis pretrain for epoch_idx in range(5): # d_steps if verbose: self.logger.info(">>>>>> [Adv] Start epoch %d / 5 pretrain discriminator" % (epoch_idx + 1)) training_start_time = time() train_loss = self._d_train_epoch(train_data, epoch_idx) training_end_time = time() train_loss_output = \ self._generate_train_loss_output((epoch_idx + 1), training_start_time, training_end_time, train_loss, "adv discriminator pre") train_loss_output = ">>>>>> " + train_loss_output if verbose: self.logger.info(train_loss_output) self._save_checkpoint(self.adversarail_training_epochs) return -1, None
[((334, 5, 334, 20), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((751, 5, 751, 20), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((72, 22, 72, 33), 'logging.getLogger', 'getLogger', ({}, {}), '()', False, 'from logging import getLogger\n'), ((83, 8, 83, 39), 'textbox.utils.ensure_dir', 'ensure_dir', ({(83, 19, 83, 38): 'self.checkpoint_dir'}, {}), '(self.checkpoint_dir)', False, 'from textbox.utils import ensure_dir, early_stopping\n'), ((85, 32, 85, 83), 'os.path.join', 'os.path.join', ({(85, 45, 85, 64): 'self.checkpoint_dir', (85, 66, 85, 82): 'saved_model_file'}, {}), '(self.checkpoint_dir, saved_model_file)', False, 'import os\n'), ((88, 8, 88, 43), 'textbox.utils.ensure_dir', 'ensure_dir', ({(88, 19, 88, 42): 'self.generated_text_dir'}, {}), '(self.generated_text_dir)', False, 'from textbox.utils import ensure_dir, early_stopping\n'), ((90, 31, 90, 85), 'os.path.join', 'os.path.join', ({(90, 44, 90, 67): 'self.generated_text_dir', (90, 69, 90, 84): 'saved_text_file'}, {}), '(self.generated_text_dir, saved_text_file)', False, 'import os\n'), ((185, 14, 185, 32), 'numpy.exp', 'np.exp', ({(185, 21, 185, 31): 'valid_loss'}, {}), '(valid_loss)', True, 'import numpy as np\n'), ((203, 8, 203, 48), 'torch.save', 'torch.save', ({(203, 19, 203, 24): 'state', (203, 26, 203, 47): 'self.saved_model_file'}, {}), '(state, self.saved_model_file)', False, 'import torch\n'), ((223, 21, 223, 44), 'torch.load', 'torch.load', ({(223, 32, 223, 43): 'resume_file'}, {}), '(resume_file)', False, 'import torch\n'), ((240, 11, 240, 28), 'torch.isnan', 'torch.isnan', ({(240, 23, 240, 27): 'loss'}, {}), '(loss)', False, 'import torch\n'), ((379, 8, 379, 32), 'matplotlib.pyplot.plot', 'plt.plot', ({(379, 17, 379, 23): 'epochs', (379, 25, 379, 31): 'values'}, {}), '(epochs, values)', True, 'import matplotlib.pyplot as plt\n'), ((380, 8, 380, 26), 'matplotlib.pyplot.xticks', 'plt.xticks', ({(380, 19, 380, 25): 'epochs'}, {}), '(epochs)', True, 'import matplotlib.pyplot as plt\n'), ((381, 8, 381, 27), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(381, 19, 381, 26): '"""Epoch"""'}, {}), "('Epoch')", True, 'import matplotlib.pyplot as plt\n'), ((382, 8, 382, 26), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(382, 19, 382, 25): '"""Loss"""'}, {}), "('Loss')", True, 'import matplotlib.pyplot as plt\n'), ((480, 8, 480, 48), 'torch.save', 'torch.save', ({(480, 19, 480, 24): 'state', (480, 26, 480, 47): 'self.saved_model_file'}, {}), '(state, self.saved_model_file)', False, 'import torch\n'), ((492, 22, 492, 115), 'torch.full', 'torch.full', (), '', False, 'import torch\n'), ((511, 21, 511, 49), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((552, 26, 552, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((554, 26, 554, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((649, 26, 649, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((665, 26, 665, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((701, 26, 701, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((703, 26, 703, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((705, 20, 705, 86), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((730, 20, 730, 86), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((920, 26, 920, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((931, 30, 931, 119), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((943, 26, 943, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((959, 26, 959, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((961, 25, 961, 55), 'copy.deepcopy', 'copy.deepcopy', ({(961, 39, 961, 54): 'real_dataloader'}, {}), '(real_dataloader)', False, 'import copy\n'), ((962, 25, 962, 55), 'copy.deepcopy', 'copy.deepcopy', ({(962, 39, 962, 54): 'real_dataloader'}, {}), '(real_dataloader)', False, 'import copy\n'), ((963, 23, 963, 53), 'copy.deepcopy', 'copy.deepcopy', ({(963, 37, 963, 52): 'real_dataloader'}, {}), '(real_dataloader)', False, 'import copy\n'), ((991, 26, 991, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((999, 22, 999, 121), 'torch.full', 'torch.full', (), '', False, 'import torch\n'), ((1016, 21, 1016, 49), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((1280, 22, 1280, 115), 'torch.full', 'torch.full', (), '', False, 'import torch\n'), ((1296, 21, 1296, 49), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((1327, 26, 1327, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((1342, 26, 1342, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((1347, 26, 1347, 111), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((100, 29, 100, 57), 'textbox.evaluator.TranslationEvaluator', 'TranslationEvaluator', ({(100, 50, 100, 56): 'config'}, {}), '(config)', False, 'from textbox.evaluator import NgramEvaluator, TranslationEvaluator, SummarizationEvaluator\n'), ((268, 34, 268, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((271, 32, 271, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((353, 25, 353, 52), 'torch.load', 'torch.load', ({(353, 36, 353, 51): 'checkpoint_file'}, {}), '(checkpoint_file)', False, 'import torch\n'), ((359, 13, 359, 28), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((384, 12, 384, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((386, 12, 386, 34), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(386, 24, 386, 33): 'save_path'}, {}), '(save_path)', True, 'import matplotlib.pyplot as plt\n'), ((590, 34, 590, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((593, 32, 593, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((606, 34, 606, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((609, 32, 609, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((622, 34, 622, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((625, 32, 625, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((770, 25, 770, 52), 'torch.load', 'torch.load', ({(770, 36, 770, 51): 'checkpoint_file'}, {}), '(checkpoint_file)', False, 'import torch\n'), ((862, 22, 862, 50), 'torch.tensor', 'torch.tensor', ({(862, 35, 862, 49): '[seq_len] * bs'}, {}), '([seq_len] * bs)', False, 'import torch\n'), ((871, 20, 871, 47), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ({}, {}), '()', False, 'import torch\n'), ((880, 30, 880, 115), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((1032, 12, 1032, 35), 'torch.save', 'torch.save', ({(1032, 23, 1032, 28): 'state', (1032, 30, 1032, 34): 'path'}, {}), '(state, path)', False, 'import torch\n'), ((1035, 12, 1035, 52), 'torch.save', 'torch.save', ({(1035, 23, 1035, 28): 'state', (1035, 30, 1035, 51): 'self.saved_model_file'}, {}), '(state, self.saved_model_file)', False, 'import torch\n'), ((1050, 25, 1050, 48), 'torch.load', 'torch.load', ({(1050, 36, 1050, 47): 'self.checkp'}, {}), '(self.checkp)', False, 'import torch\n'), ((1094, 34, 1094, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1097, 32, 1097, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1135, 34, 1135, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1138, 32, 1138, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1151, 34, 1151, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1154, 32, 1154, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1373, 34, 1373, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1375, 32, 1375, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1388, 34, 1388, 40), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1390, 32, 1390, 38), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((102, 29, 102, 59), 'textbox.evaluator.SummarizationEvaluator', 'SummarizationEvaluator', ({(102, 52, 102, 58): 'config'}, {}), '(config)', False, 'from textbox.evaluator import NgramEvaluator, TranslationEvaluator, SummarizationEvaluator\n'), ((104, 29, 104, 51), 'textbox.evaluator.NgramEvaluator', 'NgramEvaluator', ({(104, 44, 104, 50): 'config'}, {}), '(config)', False, 'from textbox.evaluator import NgramEvaluator, TranslationEvaluator, SummarizationEvaluator\n'), ((287, 35, 287, 41), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((291, 79, 293, 62), 'textbox.utils.early_stopping', 'early_stopping', (), '', False, 'from textbox.utils import ensure_dir, early_stopping\n'), ((295, 33, 295, 39), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((894, 38, 894, 127), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((1063, 30, 1063, 60), 'torch.load', 'torch.load', ({(1063, 41, 1063, 59): 'self.pre_lm_weight'}, {}), '(self.pre_lm_weight)', False, 'import torch\n'), ((1077, 30, 1077, 60), 'torch.load', 'torch.load', ({(1077, 41, 1077, 59): 'self.pre_lm_weight'}, {}), '(self.pre_lm_weight)', False, 'import torch\n'), ((1206, 30, 1206, 73), 'torch.optim.Adam', 'optim.Adam', (), '', True, 'import torch.optim as optim\n'), ((1207, 29, 1207, 71), 'torch.optim.Adam', 'optim.Adam', (), '', True, 'import torch.optim as optim\n'), ((1411, 38, 1411, 44), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1414, 36, 1414, 42), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1427, 38, 1427, 44), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1429, 36, 1429, 42), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1442, 38, 1442, 44), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1444, 36, 1444, 42), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((288, 21, 288, 36), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((863, 29, 863, 51), 'torch.ones_like', 'torch.ones_like', ({(863, 45, 863, 50): 'input'}, {}), '(input)', False, 'import torch\n'), ((910, 20, 910, 62), 'torch.save', 'torch.save', ({(910, 31, 910, 41): 'state_dict', (910, 43, 910, 61): 'self.pre_lm_weight'}, {}), '(state_dict, self.pre_lm_weight)', False, 'import torch\n'), ((1212, 30, 1212, 72), 'torch.optim.SGD', 'optim.SGD', (), '', True, 'import torch.optim as optim\n'), ((1213, 29, 1213, 70), 'torch.optim.SGD', 'optim.SGD', (), '', True, 'import torch.optim as optim\n'), ((977, 37, 977, 67), 'copy.deepcopy', 'copy.deepcopy', ({(977, 51, 977, 66): 'real_dataloader'}, {}), '(real_dataloader)', False, 'import copy\n'), ((1218, 30, 1218, 76), 'torch.optim.Adagrad', 'optim.Adagrad', (), '', True, 'import torch.optim as optim\n'), ((1219, 29, 1219, 74), 'torch.optim.Adagrad', 'optim.Adagrad', (), '', True, 'import torch.optim as optim\n'), ((1224, 30, 1224, 76), 'torch.optim.RMSprop', 'optim.RMSprop', (), '', True, 'import torch.optim as optim\n'), ((1225, 29, 1225, 74), 'torch.optim.RMSprop', 'optim.RMSprop', (), '', True, 'import torch.optim as optim\n'), ((1231, 30, 1231, 73), 'torch.optim.Adam', 'optim.Adam', (), '', True, 'import torch.optim as optim\n'), ((1232, 29, 1232, 71), 'torch.optim.Adam', 'optim.Adam', (), '', True, 'import torch.optim as optim\n')]
EurusEurus/RSSerpent
rsserpent/plugins/builtin/__init__.py
fd7aaf67b80b2b48c14b1a3efe733374b0012338
from ...models import Persona, Plugin from . import example, example_cache, example_ratelimit, example_with_args plugin = Plugin( name="rsserpent-plugin-builtin", author=Persona( name="queensferryme", link="https://github.com/queensferryme", email="[email protected]", ), repository="https://github.com/RSSerpent/RSSerpent", prefix="/_", routers={ example.path: example.provider, example_cache.path: example_cache.provider, example_ratelimit.path: example_ratelimit.provider, example_with_args.path: example_with_args.provider, }, ) __all__ = ("plugin",)
[]
luisroel91/libdib_assesment
data_processing/process_xls.py
c969cfecbce1243b457961ffafe5caaea7bb5149
import pandas as pd # Define our header col_names = [ "year", "num_males_with_income", "male_median_income_curr_dollars", "male_median_income_2019_dollars", "num_females_with_income", "female_median_income_curr_dollars", "female_median_income_2019_dollars", ] # Load Asian census data XLS, skipping all headers dfa = pd.read_excel( r'p08a.xlsx', skiprows=8, # Make sure PD doesn't use header row for our DF header=None, # Define col names names=col_names, ) # Load White census data XLS, skipping all headers dfw = pd.read_excel( r'p08w.xlsx', skiprows=8, # Make sure PD doesn't use header row for our DF header=None, # Define cold names names=col_names ) # Splinter off rows into age group DFs for both sets of data dfa1524 = dfa.iloc[:20] dfa2534 = dfa.iloc[25:45] dfa3544 = dfa.iloc[50:70] dfa4554 = dfa.iloc[75:95] dfa5564 = dfa.iloc[100:120] dfa6574 = dfa.iloc[125:145] dfa75 = dfa.iloc[150:170] dfw1524 = dfw.iloc[:20] dfw2534 = dfw.iloc[25:45] dfw3544 = dfw.iloc[50:70] dfw4554 = dfw.iloc[75:95] dfw5564 = dfw.iloc[100:120] dfw6574 = dfw.iloc[125:145] dfw75 = dfw.iloc[150:170] # Add Age Range col to each DF dfa1524.insert(0, 'age_range', '15-24') dfa2534.insert(0, 'age_range', '25-34') dfa3544.insert(0, 'age_range', '35-44') dfa4554.insert(0, 'age_range', '45-54') dfa5564.insert(0, 'age_range', '55-64') dfa6574.insert(0, 'age_range', '65-74') dfa75.insert(0, 'age_range', 'Over 75') dfw1524.insert(0, 'age_range', '15-24') dfw2534.insert(0, 'age_range', '25-34') dfw3544.insert(0, 'age_range', '35-44') dfw4554.insert(0, 'age_range', '45-54') dfw5564.insert(0, 'age_range', '55-64') dfw6574.insert(0, 'age_range', '65-74') dfw75.insert(0, 'age_range', 'Over 75') # Stack cleaned DF's vertically dfa = pd.concat([ dfa1524, dfa2534, dfa3544, dfa4554, dfa5564, dfa6574, dfa75 ], axis=0) dfw = pd.concat([ dfw1524, dfw2534, dfw3544, dfw4554, dfw5564, dfw6574, dfw75 ], axis=0) # Add Race col dfa.insert(0, 'race', 'asian') dfw.insert(0, 'race', 'white') # Clean garbage chars in Year col using regex dfa['year'] = dfa['year'].replace(to_replace=r'(\s\(\d+\))', value='', regex=True) dfw['year'] = dfw['year'].replace(to_replace=r'(\s\(\d+\))', value='', regex=True) # Stack our cleaned + normalized data into a single DF df = pd.concat([ dfa, dfw ], axis=0) # Convert the DF col types to conform to our CensusRecord model df = df.astype({ "race": str, "age_range": str, "year": int, "num_males_with_income": int, "male_median_income_curr_dollars": float, "male_median_income_2019_dollars": float, "num_females_with_income": int, "female_median_income_curr_dollars": float, "female_median_income_2019_dollars": float, }) # Pickle the DF df.to_pickle("./res.pkl")
[((15, 6, 22, 1), 'pandas.read_excel', 'pd.read_excel', (), '', True, 'import pandas as pd\n'), ((24, 6, 31, 1), 'pandas.read_excel', 'pd.read_excel', (), '', True, 'import pandas as pd\n'), ((67, 6, 75, 10), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((77, 6, 85, 10), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((96, 5, 99, 10), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n')]
Szymon-Budziak/WDI_exercises_solutions
Section_1/Exercise_16.py
51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460
""" Dany jest ciąg określony wzorem: A[n+1] = (A[n] % 2) ∗ (3 ∗ A[n] + 1) + (1 − A[n] % 2) ∗ A[n] / 2. Startując z dowolnej liczby naturalnej > 1 ciąg ten osiąga wartość 1. Napisać program, który znajdzie wyraz początkowy z przedziału 2-10000 dla którego wartość 1 jest osiągalna po największej liczbie kroków. """ a0 = 2 m = 1 for a0 in range(2, 10000): n = 0 while a0 != 1: a0 = (((a0 % 2) * (3 * a0 + 1)) + ((1 - (a0 % 2)) * (a0 / 2))) n += 1 if n > m: m = n a0 += 1 print(m)
[]
evlog/SysPy
SysPy_ver/funcs/_var_declaration.py
d1ee6e2ca60492d20339c0016a9c24d027170553
""" ***************************************************************************** * H E A D E R I N F O R M A T I O N * * ***************************************************************************** Project Name: SysPy (System Python) http://cgi.di.uoa.gr/~evlog/syspy.html File Name: _var_declaration.py Created by: Evangelos Logaras ***************************************************************************** * C O P Y R I G H T N O T I C E * * ***************************************************************************** This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; version 2.1 of the License, a copy of which is available from http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ***************************************************************************** * D E S C R I P T I O N * * ***************************************************************************** Variable declaration when a variable assignment is tracked. """ from pdb import * def var_declaration(assign_lines_count, token_struct, assign_lines, signals, process_vars): """ FUNCTION: var_declaration(a int, b(), c[], d[], e[]) a: assign lines counter integer b: token's tupple c: list containing the VHDL code d: list containing the signal statements e: list containing Variable declaration when a variable assignment is tracked. """ # Python's variable declerations #---------------------------------------------------------------------------------------------------------------------------------- count0 = 0 count1 = 0 process_vars_d = [] vars0 = [] var0 = '' var1 = '' #---------------------------------------------------------------------------------------------------------------------------------- print("process_vars:", process_vars) # Erasing duplicated registrations in "process_vars[]" #---------------------------------------------------------------------------------------------------------------------------------- for i in range(len(process_vars)): vars0 = [] #flag_process_vars = 0 if ((process_vars[i][0] == "name_left") or (process_vars[i][0] == "name_right")): var0 = process_vars[i][1].replace('=', '') var0 = var0.replace('! ', '') var0 = var0.replace('>', '') var0 = var0.replace('<', '') var0 = var0.replace(' ', '') vars0.append(var0) elif (process_vars[i][0] == "name_right_binary_slice"): var0 = process_vars[i][1][0] vars0.append(var0) elif (process_vars[i][0] == "name_right_binary_slice_var0"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) elif (process_vars[i][0] == "name_right_binary_slice_var1"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) elif (process_vars[i][0] == "name_right_binary_slice_var01"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) elif (process_vars[i][0] == "name_right_item"): var0 = process_vars[i][1][0] vars0.append(var0) elif (process_vars[i][0] == "name_right_item_var"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_item"): var0 = process_vars[i][1][0] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_item_var0"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_item_var1"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_item_var01"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice"): var0 = process_vars[i][1][0] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice_var0"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice_var1"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice_var2"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][3] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice_var01"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice_var02"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][1] vars0.append(var0) var0 = process_vars[i][1][3] vars0.append(var0) elif (process_vars[i][0] == "name_right_array_binary_slice_var12"): var0 = process_vars[i][1][0] vars0.append(var0) var0 = process_vars[i][1][2] vars0.append(var0) var0 = process_vars[i][1][3] vars0.append(var0) flag_process_vars = 0 for n in range(0, len(vars0)): for j in range(len(process_vars_d)): if ((process_vars_d[j][0] == "name_left") or (process_vars_d[j][0] == "name_right")): var1 = process_vars_d[j][1].replace('=', '') var1 = var1.replace('! ', '') var1 = var1.replace('>', '') var1 = var1.replace('<', '') var1 = var1.replace(' ', '') elif (process_vars_d[j][0] == "name_right_binary_slice"): var1 = process_vars_d[j][1][0] elif (process_vars_d[j][0] == "name_right_binary_slice_var0"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_binary_slice_var1"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_binary_slice_var01"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_item"): var1 = process_vars_d[j][1][0] elif (process_vars_d[j][0] == "name_right_item_var"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_item"): var1 = process_vars_d[j][1][0] elif (process_vars_d[j][0] == "name_right_array_binary_item_var0"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_item_var1"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_item_var01"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_slice"): var1 = process_vars_d[j][1][0] elif (process_vars_d[j][0] == "name_right_array_binary_slice_var0"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_slice_var1"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_slice_var2"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_slice_var01"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_slice_var02"): var1 = process_vars_d[j][1] elif (process_vars_d[j][0] == "name_right_array_binary_slice_var12"): var1 = process_vars_d[j][1] if (vars0[n] == var1): if (n == 0): flag_process_vars += 1 if (n == 1): flag_process_vars += 2 if (n == 2): flag_process_vars += 4 if ((process_vars[i][0] == "name_left") or (process_vars[i][0] == "name_right")): if (flag_process_vars == 0): process_vars_d.append(process_vars[i]) elif (process_vars[i][0] == "name_right_binary_slice"): if (flag_process_vars == 0): process_vars_d.append(process_vars[i]) elif (process_vars[i][0] == "name_right_binary_slice_var0"): if (flag_process_vars == 0): process_vars_d.append(["name_right_binary_slice_var0", process_vars[i][1][0]]) process_vars_d.append(["name_right_binary_slice_var0", process_vars[i][1][1]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_binary_slice_var0", process_vars[i][1][1]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_binary_slice_var0", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_binary_slice_var1"): if (flag_process_vars == 0): process_vars_d.append(["name_right_binary_slice_var1", process_vars[i][1][0]]) process_vars_d.append(["name_right_binary_slice_var1", process_vars[i][1][2]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_binary_slice_var1", process_vars[i][1][2]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_binary_slice_var1", process_vars[i][1][0]]) elif (flag_process_vars == 4): pass elif (process_vars[i][0] == "name_right_binary_slice_var01"): if (flag_process_vars == 0): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][1]]) process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][1]]) process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 3): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 4): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][1]]) elif (flag_process_vars == 5): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][1]]) elif (flag_process_vars == 6): process_vars_d.append(["name_right_binary_slice_var01", process_vars[i][1][0]]) elif (flag_process_vars == 7): pass elif (process_vars[i][0] == "name_right_item"): if (flag_process_vars == 0): process_vars_d.append(process_vars[i]) elif (process_vars[i][0] == "name_right_item_var"): if (flag_process_vars == 0): process_vars_d.append(["name_right_item_var", process_vars[i][1][0]]) process_vars_d.append(["name_right_item_var", process_vars[i][1][1]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_item_var", process_vars[i][1][1]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_item_var", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_array_binary_item"): if (flag_process_vars == 0): process_vars_d.append(process_vars[i]) elif (process_vars[i][0] == "name_right_array_binary_item_var0"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_item_var0", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_item_var0", process_vars[i][1][1]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_item_var0", process_vars[i][1][1]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_item_var0", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_array_binary_item_var1"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_item_var1", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_item_var1", process_vars[i][1][2]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_item_var1", process_vars[i][1][2]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_item_var1", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_array_binary_item_var01"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][1]]) process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][2]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][1]]) process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][2]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][2]]) elif (flag_process_vars == 3): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][2]]) elif (flag_process_vars == 4): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][1]]) elif (flag_process_vars == 5): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][1]]) elif (flag_process_vars == 6): process_vars_d.append(["name_right_array_binary_item_var01", process_vars[i][1][0]]) elif (flag_process_vars == 7): pass elif (process_vars[i][0] == "name_right_array_binary_slice"): if (flag_process_vars == 0): process_vars_d.append(process_vars[i]) elif (process_vars[i][0] == "name_right_array_binary_slice_var0"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_slice_var0", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var0", process_vars[i][1][1]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_slice_var0", process_vars[i][1][1]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_slice_var0", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_array_binary_slice_var1"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_slice_var1", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var1", process_vars[i][1][2]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_slice_var1", process_vars[i][1][2]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_slice_var1", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_array_binary_slice_var2"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_slice_var2", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var2", process_vars[i][1][3]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_slice_var2", process_vars[i][1][3]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_slice_var2", process_vars[i][1][0]]) elif (flag_process_vars == 3): pass elif (process_vars[i][0] == "name_right_array_binary_slice_var01"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][1]]) process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][1]]) process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 3): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][2]]) elif (flag_process_vars == 4): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][1]]) elif (flag_process_vars == 5): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][1]]) elif (flag_process_vars == 6): process_vars_d.append(["name_right_array_binary_slice_var01", process_vars[i][1][0]]) elif (flag_process_vars == 7): pass elif (process_vars[i][0] == "name_right_array_binary_slice_var02"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][1]]) process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][3]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][1]]) process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][3]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][3]]) elif (flag_process_vars == 3): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][3]]) elif (flag_process_vars == 4): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][1]]) elif (flag_process_vars == 5): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][1]]) elif (flag_process_vars == 6): process_vars_d.append(["name_right_array_binary_slice_var02", process_vars[i][1][0]]) elif (flag_process_vars == 7): pass elif (process_vars[i][0] == "name_right_array_binary_slice_var12"): if (flag_process_vars == 0): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][2]]) process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][3]]) elif (flag_process_vars == 1): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][2]]) process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][3]]) elif (flag_process_vars == 2): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][3]]) elif (flag_process_vars == 3): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][3]]) elif (flag_process_vars == 4): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][0]]) process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][2]]) elif (flag_process_vars == 5): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][2]]) elif (flag_process_vars == 6): process_vars_d.append(["name_right_array_binary_slice_var12", process_vars[i][1][0]]) elif (flag_process_vars == 7): pass process_vars = process_vars_d #---------------------------------------------------------------------------------------------------------------------------------- j = assign_lines_count for m in range(0, len(process_vars)): if ((process_vars[m][0] == "name_left") or (process_vars[m][0] == "name_right")): t = process_vars[m][1].replace('=', '') t = t.replace(' ', '') elif (process_vars[m][0] == "name_right_binary_slice"): t = process_vars[m][1][0] elif (process_vars[m][0] == "name_right_binary_slice_var0"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_binary_slice_var1"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_binary_slice_var01"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_item"): t = process_vars[m][1][0] elif (process_vars[m][0] == "name_right_item_var"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_item"): t = process_vars[m][1][0] elif (process_vars[m][0] == "name_right_array_binary_item_var0"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_item_var1"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_item_var01"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_slice"): t = process_vars[m][1][0] elif (process_vars[m][0] == "name_right_array_binary_slice_var0"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_slice_var1"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_slice_var2"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_slice_var01"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_slice_var02"): t = process_vars[m][1] elif (process_vars[m][0] == "name_right_array_binary_slice_var12"): t = process_vars[m][1] for i in range (0, len(signals)): if (t == signals[i]['N']): if (signals[i]['D'] == 'v'): L = signals[i]['L'].__doc__ n = signals[i]['N'].__doc__ if (m == 0): sp = '' while 1: if (assign_lines[j][0] == "process_sens_list"): assign_lines[j][0] = assign_lines[j][0] + "_var" for k in range(0, assign_lines[j][4]): sp = sp + ' ' assign_lines[j][1] = assign_lines[j][1].replace("begin", '') assign_lines[j][1] = assign_lines[j][1] + "\n\n" + sp + "-- Variables" assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "-------------------------------------------------------------------" if (signals[i]['T'] == 'b'): if (L.find("int") == 0): if (n.find("list") == 0): for k in range(len(signals_intr[i]['N'])): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": std_logic;\n" elif (signals[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": std_logic := '" + signals[i]['V'] + "';\n" elif (n.find("str") == 0): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": std_logic;\n" elif (signals[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": std_logic := '" + signals[i]['V'] + "';\n" elif (L.find("list") == 0): if (n.find("list") == 0): for k in range(len(signals[i]['N'])): if (signals[i].has_key('V') == False): if (signals[i]['L'][0] > signals[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ");\n" else: assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ");\n" elif (signals[i].has_key('V') == True): if (signals_intr[i]['L'][0] > signals_intr[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ") := \"" + signals[i]['V'] + "\";\n" else: assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ") := '" + signals[i]['V'] + "';\n" elif (n.find("str") == 0): if (signals[i].has_key('V') == False): if (signals[i]['L'][0] > signals[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ");\n" else: assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ");\n" elif (signals[i].has_key('V') == True): if (signals[i]['L'][0] > signals[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ") := \"" + signals[i]['V'] + "\";\n" else: assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ") := '" + signals[i]['V'] + "';\n" break elif (signals[i]['T'] == "int"): if (n.find("str") == 0): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + ";\n" elif (signals[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + " := " + str(signals[i]['V']) + ";\n" elif (n.find("list") == 0): for k in range(len(signals[i]['N'])): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + ";\n" elif (signals_intr[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'][k] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + " := " + str(signals[i]['V']) + ";\n" break elif (signals[i]['T'] == "arrb"): if (n.find("str") == 0): if (signals[i]['L'][1][0] > signals[i]['L'][1][1]): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "type type" + str(count0) + " is array (" + str(signals[i]['L'][0][0]) + " to " + str(signals[i]['L'][0][1]) + ") of std_logic_vector(" + str(signals_intr[i]['L'][1][0]) + " downto " + str(signals_intr[i]['L'][1][1]) + ");\n" elif (signals[i]['L'][1][0] < signals[i]['L'][1][1]): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "type type" + str(count0) + " is array (" + str(signals[i]['L'][0][0]) + " to " + str(signals[i]['L'][0][1]) + ") of std_logic_vector(" + str(signals_intr[i]['L'][1][0]) + " to " + str(signals_intr[i]['L'][1][1]) + ");\n" if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": " + "type" + str(count0) + ";\n" elif (signals[i].has_key('V') == True): v = signals[i]['V'].__doc__ if (v.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": " + "type" + str(count0) + ": \"" + signals[i]['V'] + "\";\n" elif(v.find("list") == 0): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": " + "type" + str(count0) + ": {" for k in range(0, (signals[i]['L'][0][1] + 1)): if (k == signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + "\"" + signals[i]['V'][k] + "\"};\n" elif (k != signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + "\"" + signals[i]['V'][k] + "\", " count0 = count0 + 1 break elif (signals[i]['T'] == "arri"): if (n.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "type type" + str(count0) + " is array (" + str(signals[i]['L'][0][0]) + " to " + str(signals[i]['L'][0][1]) + ") of integer range " + str(signals[i]['L'][1][0]) + " to " + str(signals[i]['L'][1][1]) + ";\n" if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": " + "type" + str(count0) + ";\n" elif (signals[i].has_key('V') == True): v = signals[i]['V'].__doc__ if (v.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": " + "type" + str(count0) + ": " + str(signals[i]['V']) + ";\n" elif(v.find("list") == 0): assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "variable " + signals[i]['N'] + ": " + "type" + str(count0) + ": {" for k in range(0, (signals_intr[i]['L'][0][1] + 1)): if (k == signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'][k] + "};\n" elif (j != signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'][k] + ", " count0 = count0 + 1 break elif (signals[i]['T'] == 's'): v = signals[i]['V'].__doc__ assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "type state_type" + str(count1) + " is (" if (v.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'] + ");\n" elif (v.find("list") == 0): for k in range(len(signals[i]['V'])): if (k == (len(signals[i]['V']) - 1)): assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'][k] + ");\n" else: assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'][k] + ", " assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "signal " + args[i]['N'] + ": state_type" + str(count1) + ";\n" count1 = count1 + 1 break elif (j == 0): break j = j - 1 elif (m != 0): if (signals[i]['T'] == 'b'): if (L.find("int") == 0): if (n.find("list") == 0): for k in range(len(signals_intr[i]['N'])): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": std_logic;\n" elif (signals[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": std_logic := '" + signals[i]['V'] + "';\n" elif (n.find("str") == 0): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": std_logic;\n" elif (signals[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": std_logic := '" + signals[i]['V'] + "';\n" elif (L.find("list") == 0): if (n.find("list") == 0): for k in range(len(signals[i]['N'])): if (signals[i].has_key('V') == False): if (signals[i]['L'][0] > signals[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ");\n" else: assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ");\n" elif (signals[i].has_key('V') == True): if (signals_intr[i]['L'][0] > signals_intr[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ") := \"" + signals[i]['V'] + "\";\n" else: assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ") := '" + signals[i]['V'] + "';\n" elif (n.find("str") == 0): if (signals[i].has_key('V') == False): if (signals[i]['L'][0] > signals[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ");\n" else: assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ");\n" elif (signals[i].has_key('V') == True): if (signals[i]['L'][0] > signals[i]['L'][1]): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " downto " + str(int(signals[i]['L'][1])) + ") := \"" + signals[i]['V'] + "\";\n" else: assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": std_logic_vector(" + str(int(signals[i]['L'][0])) + " to " + str(int(signals[i]['L'][1])) + ") := '" + signals[i]['V'] + "';\n" elif (signals[i]['T'] == "int"): if (n.find("str") == 0): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + ";\n" elif (signals[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + " := " + str(signals[i]['V']) + ";\n" elif (n.find("list") == 0): for k in range(len(signals[i]['N'])): if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + ";\n" elif (signals_intr[i].has_key('V') == True): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'][k] + ": integer range " + str(signals[i]['L'][0]) + " to " + str(signals[i]['L'][1]) + " := " + str(signals[i]['V']) + ";\n" elif (signals[i]['T'] == "arrb"): if (n.find("str") == 0): if (signals[i]['L'][1][0] > signals[i]['L'][1][1]): assign_lines[j][1] = assign_lines[j][1] + sp + "type typev" + str(count0) + " is array (" + str(signals[i]['L'][0][0]) + " to " + str(signals[i]['L'][0][1]) + ") of std_logic_vector(" + str(signals[i]['L'][1][0]) + " downto " + str(signals[i]['L'][1][1]) + ");\n" elif (signals[i]['L'][1][0] < signals[i]['L'][1][1]): assign_lines[j][1] = assign_lines[j][1] + sp + "type typev" + str(count0) + " is array (" + str(signals[i]['L'][0][0]) + " to " + str(signals[i]['L'][0][1]) + ") of std_logic_vector(" + str(signals_intr[i]['L'][1][0]) + " to " + str(signals_intr[i]['L'][1][1]) + ");\n" if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": " + "typev" + str(count0) + ";\n" elif (signals[i].has_key('V') == True): v = signals[i]['V'].__doc__ if (v.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": " + "typev" + str(count0) + ": \"" + signals[i]['V'] + "\";\n" elif(v.find("list") == 0): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": " + "typev" + str(count0) + ": {" for k in range(0, (signals[i]['L'][0][1] + 1)): if (k == signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + "\"" + signals[i]['V'][k] + "\"};\n" elif (k != signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + "\"" + signals[i]['V'][k] + "\", " count0 = count0 + 1 elif (signals[i]['T'] == "arri"): if (n.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + sp + "type typev" + str(count0) + " is array (" + str(signals[i]['L'][0][0]) + " to " + str(signals[i]['L'][0][1]) + ") of integer range " + str(signals[i]['L'][1][0]) + " to " + str(signals[i]['L'][1][1]) + ";\n" if (signals[i].has_key('V') == False): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": " + "typev" + str(count0) + ";\n" elif (signals[i].has_key('V') == True): v = signals[i]['V'].__doc__ if (v.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": " + "typev" + str(count0) + ": " + str(signals[i]['V']) + ";\n" elif(v.find("list") == 0): assign_lines[j][1] = assign_lines[j][1] + sp + "variable " + signals[i]['N'] + ": " + "typev" + str(count0) + ": {" for k in range(0, (signals[i]['L'][0][1] + 1)): if (k == signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + str(signals[i]['V'][k]) + "};\n" elif (j != signals[i]['L'][0][1]): assign_lines[j][1] = assign_lines[j][1] + str(signals[i]['V'][k]) + ", " count0 = count0 + 1 elif (signals[i]['T'] == 's'): v = signals[i]['V'].__doc__ assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "type state_typev" + str(count1) + " is (" if (v.find("str") == 0): assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'] + ");\n" elif (v.find("list") == 0): for k in range(len(signals[i]['V'])): if (k == (len(signals[i]['V']) - 1)): assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'][k] + ");\n" else: assign_lines[j][1] = assign_lines[j][1] + signals[i]['V'][k] + ", " assign_lines[j][1] = assign_lines[j][1] + "\n" + sp + "signal " + args[i]['N'] + ": state_typev" + str(count1) + ";\n" count1 = count1 + 1 if (len(process_vars) > 0): assign_lines[j][1] = assign_lines[j][1] + sp + "-------------------------------------------------------------------" assign_lines[j][1] = assign_lines[j][1] + "\n\n" + sp + "begin\n\n"
[]
MaggieIllustrations/softuni-github-programming
Giraffe/Functions.py
f5695cb14602f3d2974359f6d8734332acc650d3
def say_hi(name,age): print("Hello " + name + ", you are " + age) say_hi("Mike", "35") def cube(num): # function return num*num*num result = cube(4) # variable print(result)
[]
wipfli/airspaces
airspace_surgery.py
c2e01615fa6a065895ed04b8f342a38732e9196b
import glob import json path_in = './airspaces/' path_out = './airspaces_processed/' filenames = [path.split('/')[-1] for path in glob.glob(path_in + '*')] remove = { 'france_fr.geojson': [ 314327, 314187, 314360, 314359, 314362, 314361, 314364, 314363, 314333, 314329, 314331, ], 'germany_de.geojson': [ 307563, 307638, 307639, 307640, ] } replacements = { 'france_fr.geojson': [ ['Bale10 119.35', 'Bale 10 TMA 130.9'], ['Bale1 119.35', 'Bale 1 TMA 130.9'], ['Bale2 119.35', 'Bale 2 TMA 130.9'], ['Bale3 119.35', 'Bale 3 TMA 130.9'], ['Bale4 119.35', 'Bale 4 TMA 130.9'], ['Bale5 119.35', 'Bale 5 TMA 130.9'], ['Bale5 119.35', 'Bale 5 TMA 130.9'], ['Bale6 119.35', 'Bale 6 TMA 130.9'], ['Bale7 119.35', 'Bale 7 TMA 130.9'], ['Bale8 119.35', 'Bale 8 TMA 130.9'], ['Bale9 119.35', 'Bale 9 TMA 130.9'], ['Bale AZ4T1 134.67', 'Bale T1 TMA HX 134.68'], ['Bale AZ4T2 134.67', 'Bale T2 TMA HX 134.68'], ['Bale AZ4T3 134.67', 'Bale T3 TMA HX 134.68'], ['CTR BALE', 'Bale CTR 118.3'] ], 'switzerland_ch.geojson': [ ['ZURICH 10 TMA 118.1', 'ZURICH 10 TMA 124.7'], ['ZURICH 11 TMA 118.1', 'ZURICH 11 TMA 124.7'], ['ZURICH 12 TMA 118.1', 'ZURICH 12 TMA 124.7'], ['ZURICH 13 TMA 118.1', 'ZURICH 13 TMA 124.7'], ['ZURICH 14 TMA 118.1', 'ZURICH 14 TMA HX 127.755'], ['ZURICH 15 TMA 118.1', 'ZURICH 15 TMA HX 127.755'], ['ZURICH 1 TMA 118.1', 'ZURICH 1 TMA 124.7'], ['ZURICH 2 CTR 118.1', 'ZURICH 2 CTR HX 118.975'], ['ZURICH 2 TMA 118.1', 'ZURICH 2 TMA 124.7'], ['ZURICH 3 TMA 118.1', 'ZURICH 3 TMA 124.7'], ['ZURICH 4A TMA 118.1', 'ZURICH 4A TMA 124.7'], ['ZURICH 4B TMA 118.1', 'ZURICH 4B TMA 124.7'], ['ZURICH 4C TMA 118.1', 'ZURICH 4C TMA 124.7'], ['ZURICH 5 TMA 118.1', 'ZURICH 5 TMA 124.7'], ['ZURICH 6 TMA 118.1', 'ZURICH 6 TMA 124.7'], ['ZURICH 7 TMA 118.1', 'ZURICH 7 TMA 124.7'], ['ZURICH 8 TMA 118.1', 'ZURICH 8 TMA 124.7'], ['ZURICH 9 TMA 118.1', 'ZURICH 9 TMA 124.7'], ['BERN 1 TMA 121.025', 'BERN 1 TMA HX 127.325'], ['BERN 2 TMA 121.025', 'BERN 2 TMA HX 127.325'], ['BERN CTR 121.025', 'BERN CTR HX 121.025'], ['EMMEN 1 CTR 120.425', 'EMMEN 1 CTR HX 120.425'], ['EMMEN 1 TMA 120.425', 'EMMEN 1 TMA HX 134.130'], ['EMMEN 2 CTR 120.425', 'EMMEN 2 CTR HX 120.425'], ['EMMEN 2 TMA 120.425', 'EMMEN 2 TMA HX 134.130'], ['EMMEN 3 TMA 120.425', 'EMMEN 3 TMA HX 134.130'], ['EMMEN 4 TMA 120.425', 'EMMEN 4 TMA HX 134.130'], ['EMMEN 5 TMA 120.425', 'EMMEN 5 TMA HX 134.130'], ['EMMEN 6 TMA 120.425', 'EMMEN 6 TMA HX 134.130'], ] } for filename in filenames: print(filename) with open(path_in + filename) as f: data = json.load(f) if filename in replacements: targets = [r[0] for r in replacements[filename]] for feature in data['features']: if feature['properties']['N'] in targets: print('replace ' + feature['properties']['N'] + '...') feature['properties']['N'] = next(x for x in replacements[filename] if x[0] == feature['properties']['N'])[1] if filename in remove: features_out = [f for f in data['features'] if int(f['properties']['ID']) not in remove[filename]] else: features_out = data['features'] print('removed ' + str(len(data['features']) - len(features_out)) + ' features') geojson = { 'type': 'FeatureCollection', 'features': features_out } print('write ' + filename + '...') with open(path_out + filename, 'w') as f: json.dump(geojson, f) all_features = [] for filename in filenames: print('read ' + filename + '...') with open(path_out + filename) as f: all_features += json.load(f)['features'] print('write airspaces.geojson...') with open('airspaces.geojson', 'w') as f: json.dump({ 'type': 'FeatureCollection', 'features': all_features }, f) print('done')
[((122, 4, 125, 9), 'json.dump', 'json.dump', ({(122, 14, 125, 5): "{'type': 'FeatureCollection', 'features': all_features}", (125, 7, 125, 8): 'f'}, {}), "({'type': 'FeatureCollection', 'features': all_features}, f)", False, 'import json\n'), ((7, 45, 7, 69), 'glob.glob', 'glob.glob', ({(7, 55, 7, 68): "(path_in + '*')"}, {}), "(path_in + '*')", False, 'import glob\n'), ((89, 15, 89, 27), 'json.load', 'json.load', ({(89, 25, 89, 26): 'f'}, {}), '(f)', False, 'import json\n'), ((111, 8, 111, 29), 'json.dump', 'json.dump', ({(111, 18, 111, 25): 'geojson', (111, 27, 111, 28): 'f'}, {}), '(geojson, f)', False, 'import json\n'), ((117, 24, 117, 36), 'json.load', 'json.load', ({(117, 34, 117, 35): 'f'}, {}), '(f)', False, 'import json\n')]
lidenghong1/SmallReptileTraining
AndroidSpider/spider_main.py
a1bfb81c9969edfb7554acc50370c0cb036da690
from AndroidSpider import url_manager, html_downloader, html_parser, html_output ''' 爬取百度百科 Android 关键词相关词及简介并输出为一个HTML tab网页 Extra module: BeautifulSoup ''' class SpiderMain(object): def __init__(self): self.urls = url_manager.UrlManager() self.downloader = html_downloader.HtmlDownLoader() self.parser = html_parser.HtmlParser() self.out_put = html_output.HtmlOutput() def craw(self, root_url): count = 1 self.urls.add_new_url(root_url) while self.urls.has_new_url(): try: new_url = self.urls.get_new_url() print("craw %d : %s" % (count, new_url)) headers = { "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36" } html_content = self.downloader.download(new_url, retry_count=2, headers=headers) new_urls, new_data = self.parser.parse(new_url, html_content, "utf-8") self.urls.add_new_urls(new_urls) self.out_put.collect_data(new_data) if count >= 30: break count = count + 1 except Exception as e: print("craw failed!\n"+str(e)) self.out_put.output_html() if __name__ == "__main__": rootUrl = "http://baike.baidu.com/item/Android" objSpider = SpiderMain() objSpider.craw(rootUrl)
[((11, 20, 11, 44), 'AndroidSpider.url_manager.UrlManager', 'url_manager.UrlManager', ({}, {}), '()', False, 'from AndroidSpider import url_manager, html_downloader, html_parser, html_output\n'), ((12, 26, 12, 58), 'AndroidSpider.html_downloader.HtmlDownLoader', 'html_downloader.HtmlDownLoader', ({}, {}), '()', False, 'from AndroidSpider import url_manager, html_downloader, html_parser, html_output\n'), ((13, 22, 13, 46), 'AndroidSpider.html_parser.HtmlParser', 'html_parser.HtmlParser', ({}, {}), '()', False, 'from AndroidSpider import url_manager, html_downloader, html_parser, html_output\n'), ((14, 23, 14, 47), 'AndroidSpider.html_output.HtmlOutput', 'html_output.HtmlOutput', ({}, {}), '()', False, 'from AndroidSpider import url_manager, html_downloader, html_parser, html_output\n')]
trompamusic/ce-queries-template
trompace/mutations/__init__.py
cc5ae69d0e76623bfd72e9453f569f6624bf7c3b
MUTATION = '''mutation {{ {mutation} }}''' def _verify_additional_type(additionaltype): """Check that the input to additionaltype is a list of strings. If it is empty, raise ValueError If it is a string, convert it to a list of strings.""" if additionaltype is None: return None if isinstance(additionaltype, str): additionaltype = [additionaltype] if len(additionaltype) == 0: raise ValueError("additionaltype must be a non-empty list") return additionaltype
[]
CapitalOneDevExchangeHackathon/Financial-Fitness
Web_App/infrastructure/infra.py
54a2203d6b3d96687d822247b040613b644874f2
import boto import boto3 from config import Config dynamodb = boto3.resource('dynamodb', aws_access_key_id=Config.AWS_KEY, aws_secret_access_key=Config.AWS_SECRET_KEY, region_name=Config.REGION) table = dynamodb.Table('user_details') tables = boto3.resource('dynamodb', aws_access_key_id=Config.AWS_KEY, aws_secret_access_key=Config.AWS_SECRET_KEY, region_name=Config.REGION).Table('user_details') print(tables.creation_date_time) def main(): print("29.7604267") def insert_into_db(user): print(user.lastname) try: table.put_item( Item={ 'pin': user.pin, 'firstname': user.firstname, 'lastname': user.lastname, } ) except Exception as E: print(E) return False return True if __name__ == "__main__": main()
[((5, 11, 8, 52), 'boto3.resource', 'boto3.resource', (), '', False, 'import boto3\n'), ((11, 9, 12, 97), 'boto3.resource', 'boto3.resource', (), '', False, 'import boto3\n')]
ndarwin314/symbolicPy
numberTheory/natural.py
ce2e48bf1557b5995db6c324ada9fbd4767df1e3
# TODO: implement algorithms in c++ or something to make them fast
[]
TeaPackCZ/RobotZed
SelfTests.py
7ac8bfb14a6c2e5887f8fed299ad87b384701c54
import os import unittest from Logger import Logger class TestLogger(unittest.TestCase): def test_file_handling(self): testLog = Logger("testLog") ## Check if program can create and open file self.assertTrue(testLog.opened) returns = testLog.close() ## Check if logger correctly signs bool OPENED and returns ## 0 as succes. self.assertFalse(testLog.opened) self.assertEqual(returns,0) returns = testLog.close() ## Check if logger returns 1 when trying to close already ## closed file self.assertEqual(returns,1) ## Do cleanup: os.remove(testLog.name) def test_logging(self): testLog = Logger("testLog") testPhrase = "TestLine\r\n" testLog.save_line(testPhrase) testLog.close() logfile = open(testLog.name) content = logfile.read() logfile.close() saved = content.split(" : ") ## Check if saved data corresponds self.assertEqual(saved[1],testPhrase) ## cleanup os.remove(testLog.name) from gpsNavigation import gpsModule,gpsPoint class TestGPSNavigation(unittest.TestCase): def test_gps_angles(self): gpsMod = gpsModule() A = gpsPoint(10,10) B = gpsPoint(10.1,10.1) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,15623.0) self.assertEqual(azimut,45.0) B = gpsPoint(10.0,10.1) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,10963.0) self.assertEqual(azimut,90.0) B = gpsPoint(9.9,10.1) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,15625.0) self.assertEqual(azimut,135.0) B = gpsPoint(9.9,10.0) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,11132.0) self.assertEqual(azimut,180.0) B = gpsPoint(9.9,9.9) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,15625.0) self.assertEqual(azimut,225.0) B = gpsPoint(10.0,9.9) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,10963.0) self.assertEqual(azimut,270.0) B = gpsPoint(10.1,9.9) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,15623.0) self.assertEqual(azimut,315.0) B = gpsPoint(10.1,10.0) distance, azimut = gpsMod.GPSData.getDirAndDist(A,B) self.assertEqual(distance,11132.0) self.assertEqual(azimut,0) if __name__ == '__main__': unittest.main()
[((85, 4, 85, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((7, 18, 7, 35), 'Logger.Logger', 'Logger', ({(7, 25, 7, 34): '"""testLog"""'}, {}), "('testLog')", False, 'from Logger import Logger\n'), ((20, 8, 20, 31), 'os.remove', 'os.remove', ({(20, 18, 20, 30): 'testLog.name'}, {}), '(testLog.name)', False, 'import os\n'), ((23, 18, 23, 35), 'Logger.Logger', 'Logger', ({(23, 25, 23, 34): '"""testLog"""'}, {}), "('testLog')", False, 'from Logger import Logger\n'), ((34, 8, 34, 31), 'os.remove', 'os.remove', ({(34, 18, 34, 30): 'testLog.name'}, {}), '(testLog.name)', False, 'import os\n'), ((39, 17, 39, 28), 'gpsNavigation.gpsModule', 'gpsModule', ({}, {}), '()', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((41, 12, 41, 27), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(41, 21, 41, 23): '10', (41, 24, 41, 26): '10'}, {}), '(10, 10)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((42, 12, 42, 31), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(42, 21, 42, 25): '10.1', (42, 26, 42, 30): '10.1'}, {}), '(10.1, 10.1)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((48, 12, 48, 31), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(48, 21, 48, 25): '10.0', (48, 26, 48, 30): '10.1'}, {}), '(10.0, 10.1)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((53, 12, 53, 30), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(53, 21, 53, 24): '9.9', (53, 25, 53, 29): '10.1'}, {}), '(9.9, 10.1)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((58, 12, 58, 30), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(58, 21, 58, 24): '9.9', (58, 25, 58, 29): '10.0'}, {}), '(9.9, 10.0)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((63, 12, 63, 29), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(63, 21, 63, 24): '9.9', (63, 25, 63, 28): '9.9'}, {}), '(9.9, 9.9)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((68, 12, 68, 30), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(68, 21, 68, 25): '10.0', (68, 26, 68, 29): '9.9'}, {}), '(10.0, 9.9)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((73, 12, 73, 30), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(73, 21, 73, 25): '10.1', (73, 26, 73, 29): '9.9'}, {}), '(10.1, 9.9)', False, 'from gpsNavigation import gpsModule, gpsPoint\n'), ((78, 12, 78, 31), 'gpsNavigation.gpsPoint', 'gpsPoint', ({(78, 21, 78, 25): '10.1', (78, 26, 78, 30): '10.0'}, {}), '(10.1, 10.0)', False, 'from gpsNavigation import gpsModule, gpsPoint\n')]
Abijithkrishna/manga-py
manga_py/parser.py
03b142ecb944ef37a36e5095ffa580209021e3b0
from logging import warning from requests import get from .info import Info from .provider import Provider from .providers import get_provider class Parser: def __init__(self, args: dict): self.params = args def init_provider( self, chapter_progress: callable = None, global_progress: callable = None, log: callable = None, quest: callable = None, info: Info = None, quest_password: callable = None, ): original_url = self.params.get('url', '') provider_url = self.params.get('force_provider', None) provider = get_provider(provider_url or original_url) if isinstance(provider, bool): raise AttributeError('Provider not found') # update url (if redirect) self.provider = provider(info) # type: Provider self.provider.original_url = original_url real_url = self.check_url(original_url) if self.provider.allow_auto_change_url(): if real_url != original_url: warning('Manga url changed! New url: {}'.format(real_url)) self.params['url'] = real_url self.provider.quiet = self.params.get('quiet', False) self.provider.set_chapter_progress_callback(chapter_progress) self.provider.set_global_progress_callback(global_progress) self.provider.set_log_callback(log) self.provider.set_quest_callback(quest) self.provider.set_quest_password_callback(quest_password) def start(self): self.provider.process(self.params['url'], self.params) def check_url(self, url): proxy = self.params.get('proxy', None) proxies = { 'http': proxy, 'https': proxy, } if proxy else None with get(url, stream=True, proxies=proxies) as response: _url = response.url if url != _url: url = _url return url
[((62, 13, 62, 51), 'requests.get', 'get', (), '', False, 'from requests import get\n')]
pwelzel/bornhack-website
src/villages/migrations/0008_auto_20161228_2209.py
af794e6a2fba06e09626259c7768feb30ff394be
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2016-12-28 22:09 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('villages', '0007_village_camp'), ] operations = [ migrations.AlterField( model_name='village', name='camp', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='camps.Camp'), ), ]
[((19, 18, 19, 97), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n')]
sindhumadhadi09/CustomerMgmt
customers/views.py
db8b27ad6ceb8050843dc33509dc2b6c2ed2c1e2
from django.shortcuts import get_object_or_404, render from django.http import HttpResponseRedirect from django.urls import reverse from django.views import generic from django.utils import timezone from .models import Customer class IndexView(generic.ListView): template_name = 'customers/index.html' context_object_name = 'customers_list' def get_queryset(self): return Customer.objects.all() class CustomerView(generic.TemplateView): template_name = 'customers/detail.html' def add_customer(request): customer = Customer() customer.customer_firstname = request.POST['fname'] customer.customer_lastname = request.POST['lname'] customer.customer_address = request.POST['address'] customer.customer_city = request.POST['city'] customer.customer_zipcode = request.POST['zip'] customer.customer_state = request.POST['state'] customer.save() return HttpResponseRedirect(reverse('customers:index')) def delete_customer(request, customer_id): p = Customer.objects.get(pk=customer_id) p.delete() return HttpResponseRedirect(reverse('customers:index'))
[((28, 32, 28, 58), 'django.urls.reverse', 'reverse', ({(28, 40, 28, 57): '"""customers:index"""'}, {}), "('customers:index')", False, 'from django.urls import reverse\n'), ((33, 32, 33, 58), 'django.urls.reverse', 'reverse', ({(33, 40, 33, 57): '"""customers:index"""'}, {}), "('customers:index')", False, 'from django.urls import reverse\n')]
yuriks/salt
salt/ext/tornado/test/import_test.py
d2a5bd8adddb98ec1718d79384aa13b4f37e8028
# flake8: noqa # pylint: skip-file from __future__ import absolute_import, division, print_function from salt.ext.tornado.test.util import unittest class ImportTest(unittest.TestCase): def test_import_everything(self): # Some of our modules are not otherwise tested. Import them # all (unless they have external dependencies) here to at # least ensure that there are no syntax errors. import tornado.auth import tornado.autoreload import tornado.concurrent import tornado.escape import tornado.gen import tornado.http1connection import tornado.httpclient import tornado.httpserver import tornado.httputil import tornado.ioloop import tornado.iostream import tornado.locale import tornado.log import tornado.netutil import tornado.options import tornado.process import tornado.simple_httpclient import tornado.stack_context import tornado.tcpserver import tornado.tcpclient import tornado.template import tornado.testing import tornado.util import tornado.web import tornado.websocket import tornado.wsgi # for modules with dependencies, if those dependencies can be loaded, # load them too. def test_import_pycurl(self): try: import pycurl # type: ignore except ImportError: pass else: import tornado.curl_httpclient
[]
fossabot/butterfree
butterfree/configs/db/metastore_config.py
8a7da8c540b51c6560b2825cb926c40a351f202b
"""Holds configurations to read and write with Spark to AWS S3.""" import os from typing import Any, Dict, List, Optional from pyspark.sql import DataFrame from butterfree.configs import environment from butterfree.configs.db import AbstractWriteConfig from butterfree.dataframe_service import extract_partition_values class MetastoreConfig(AbstractWriteConfig): """Configuration for Spark metastore database stored. By default the configuration is for AWS S3. Attributes: path: database root location. mode: writing mode used be writers. format_: expected stored file format. file_system: file schema uri, like: s3a, file. """ def __init__( self, path: str = None, mode: str = None, format_: str = None, file_system: str = None, ): self.path = path self.mode = mode self.format_ = format_ self.file_system = file_system @property def path(self) -> Optional[str]: """Bucket name.""" return self.__path @path.setter def path(self, value: str) -> None: self.__path = value or environment.get_variable("FEATURE_STORE_S3_BUCKET") @property def format_(self) -> Optional[str]: """Expected stored file format.""" return self.__format @format_.setter def format_(self, value: str) -> None: self.__format = value or "parquet" @property def mode(self) -> Optional[str]: """Writing mode used be writers.""" return self.__mode @mode.setter def mode(self, value: str) -> None: self.__mode = value or "overwrite" @property def file_system(self) -> Optional[str]: """Writing mode used be writers.""" return self.__file_system @file_system.setter def file_system(self, value: str) -> None: self.__file_system = value or "s3a" def get_options(self, key: str) -> Dict[Optional[str], Optional[str]]: """Get options for Metastore. Options will be a dictionary with the write and read configuration for Spark Metastore. Args: key: path to save data into Metastore. Returns: Options configuration for Metastore. """ return { "mode": self.mode, "format_": self.format_, "path": os.path.join(f"{self.file_system}://{self.path}/", key), } def get_path_with_partitions(self, key: str, dataframe: DataFrame) -> List: """Get options for AWS S3 from partitioned parquet file. Options will be a dictionary with the write and read configuration for Spark to AWS S3. Args: key: path to save data into AWS S3 bucket. dataframe: spark dataframe containing data from a feature set. Returns: A list of string for file-system backed data sources. """ path_list = [] dataframe_values = extract_partition_values( dataframe, partition_columns=["year", "month", "day"] ) for row in dataframe_values: path_list.append( f"{self.file_system}://{self.path}/{key}/year={row['year']}/" f"month={row['month']}/day={row['day']}" ) return path_list def translate(self, schema: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """Translate feature set spark schema to the corresponding database.""" pass
[((107, 27, 109, 9), 'butterfree.dataframe_service.extract_partition_values', 'extract_partition_values', (), '', False, 'from butterfree.dataframe_service import extract_partition_values\n'), ((45, 31, 45, 82), 'butterfree.configs.environment.get_variable', 'environment.get_variable', ({(45, 56, 45, 81): '"""FEATURE_STORE_S3_BUCKET"""'}, {}), "('FEATURE_STORE_S3_BUCKET')", False, 'from butterfree.configs import environment\n'), ((90, 20, 90, 75), 'os.path.join', 'os.path.join', ({(90, 33, 90, 69): 'f"""{self.file_system}://{self.path}/"""', (90, 71, 90, 74): 'key'}, {}), "(f'{self.file_system}://{self.path}/', key)", False, 'import os\n')]
johanngan/special_relativity
examples/2-objects.py
cd372c7460d2c0d4040c81bc1bd0090086dba735
#!/usr/bin/env python3 import sys sys.path.append('..') import specrel.geom as geom import specrel.spacetime.physical as phy import specrel.visualize as vis # Shared parameters include_grid = True include_legend = True tlim = (0, 2) xlim = (-2, 2) # A stationary point object stationary = phy.MovingObject(0, draw_options={'label': '$v = 0$'}) ## Alternate: # direction = (1, 0) # point = (0, 0) # stationary = geom.Line(direction, point, draw_options={'label': '$v = 0$'}) title='Stationary object' p = vis.stplot(stationary, title=title, tlim=tlim, xlim=xlim, grid=include_grid, legend=include_legend) p.save('2-objects_stationary_point.png') p.show() # A stationary point object, animated anim = vis.stanimate(stationary, title=title, tlim=tlim, xlim=xlim, grid=include_grid, legend=include_legend) anim.save('2-objects_stationary_point_anim.mp4') anim.show() # A stationary point object, animated with worldline anim = vis.stanimate_with_worldline(stationary, title=title, tlim=tlim, xlim=xlim, grid=include_grid, legend=include_legend, legend_loc='upper right') anim.save('2-objects_stationary_point_anim_worldline.mp4') anim.show() # A bunch of moving point objects, animated moving = phy.MovingObject(0, velocity=1/2, draw_options={'color': 'red', 'label': '$v = c/2$'}) light = phy.MovingObject(0, velocity=1, draw_options={'color': 'gold', 'label': '$v = c$'}) ftl = phy.MovingObject(0, velocity=3/2, draw_options={'color': 'cyan', 'label': '$v = 3c/2$'}) objects = geom.Collection([stationary, moving, light, ftl]) title = 'Various objects' anim = vis.stanimate_with_worldline(objects, title=title, current_time_color='magenta', tlim=tlim, xlim=xlim, grid=include_grid, legend=include_legend, legend_loc='upper left') anim.save('2-objects_moving_points.mp4') anim.show() # A moving meterstick meterstick = phy.MovingObject(-1/2, length=1, velocity=1/2, draw_options={'label': 'Meterstick'}) # # Alternate: # direction = (1, 1/2) # left = geom.Line(direction, (0, -1/2)) # right = geom.Line(direction, (0, 1/2)) # meterstick = geom.Ribbon(left, right, draw_options={'label': 'Meterstick'}) title = 'Moving meterstick ($v = c/2$)' anim = vis.stanimate_with_worldline(meterstick, title=title, tlim=tlim, xlim=xlim, grid=include_grid, legend=include_legend, legend_loc='upper left') anim.save('2-objects_moving_meterstick.mp4') anim.show()
[((3, 0, 3, 21), 'sys.path.append', 'sys.path.append', ({(3, 16, 3, 20): '""".."""'}, {}), "('..')", False, 'import sys\n'), ((16, 13, 16, 67), 'specrel.spacetime.physical.MovingObject', 'phy.MovingObject', (), '', True, 'import specrel.spacetime.physical as phy\n'), ((22, 4, 23, 45), 'specrel.visualize.stplot', 'vis.stplot', (), '', True, 'import specrel.visualize as vis\n'), ((28, 7, 29, 45), 'specrel.visualize.stanimate', 'vis.stanimate', (), '', True, 'import specrel.visualize as vis\n'), ((34, 7, 36, 29), 'specrel.visualize.stanimate_with_worldline', 'vis.stanimate_with_worldline', (), '', True, 'import specrel.visualize as vis\n'), ((41, 9, 42, 56), 'specrel.spacetime.physical.MovingObject', 'phy.MovingObject', (), '', True, 'import specrel.spacetime.physical as phy\n'), ((43, 8, 44, 55), 'specrel.spacetime.physical.MovingObject', 'phy.MovingObject', (), '', True, 'import specrel.spacetime.physical as phy\n'), ((45, 6, 46, 58), 'specrel.spacetime.physical.MovingObject', 'phy.MovingObject', (), '', True, 'import specrel.spacetime.physical as phy\n'), ((47, 10, 47, 59), 'specrel.geom.Collection', 'geom.Collection', ({(47, 26, 47, 58): '[stationary, moving, light, ftl]'}, {}), '([stationary, moving, light, ftl])', True, 'import specrel.geom as geom\n'), ((49, 7, 51, 51), 'specrel.visualize.stanimate_with_worldline', 'vis.stanimate_with_worldline', (), '', True, 'import specrel.visualize as vis\n'), ((56, 13, 57, 41), 'specrel.spacetime.physical.MovingObject', 'phy.MovingObject', (), '', True, 'import specrel.spacetime.physical as phy\n'), ((64, 7, 66, 28), 'specrel.visualize.stanimate_with_worldline', 'vis.stanimate_with_worldline', (), '', True, 'import specrel.visualize as vis\n')]
mfkiwl/OpenXcvr
firmware/modulator.py
9bea6efd03cd246f16982f0fadafed684ac5ce1c
from baremetal import * from math import pi, sin, cos import sys from scale import scale from settings import * from ssb import ssb_polar def modulator(clk, audio, audio_stb, settings): audio_bits = audio.subtype.bits #AM modulation am_mag = Unsigned(12).constant(0) + audio + 2048 am_phase = Signed(32).constant(0) am_stb = audio_stb #FM modulation fm_mag = Unsigned(12).constant(4095) frequency = Signed(32).constant(0) + audio nfm_scaled_frequency = frequency * (2**(32-audio_bits) * 5 / 50) nfm_phase = nfm_scaled_frequency.subtype.register(clk, en=audio_stb, init=0) nfm_phase.d(nfm_phase + nfm_scaled_frequency) scaled_frequency = frequency * (2**(32-audio_bits) * 8 / 50) fm_phase = scaled_frequency.subtype.register(clk, en=audio_stb, init=0) fm_phase.d(fm_phase + scaled_frequency) fm_stb = Boolean().register(clk, d=audio_stb, init=0) #ssb ssb_mag, ssb_phase, ssb_stb = ssb_polar(clk, audio, audio_stb, settings.mode==LSB) ssb_mag <<= 1 ssb_phase = Signed(32).constant(0) + ssb_phase ssb_phase <<= (32 - audio_bits) #cw modulation cw_mag = Unsigned(12).constant(0) cw_phase = Signed(32).constant(0) cw_stb = audio_stb #mode switching magnitude = Unsigned(12).select(settings.mode, am_mag, fm_mag, fm_mag, ssb_mag, ssb_mag, cw_mag) phase = Signed(32).select(settings.mode, am_phase, nfm_phase, fm_phase, ssb_phase, ssb_phase, cw_phase) stb = Boolean().select(settings.mode, am_stb, fm_stb, fm_stb, ssb_stb, ssb_stb, cw_stb) return magnitude, phase, audio_stb import numpy as np from matplotlib import pyplot as plt def test_modulator(stimulus, mode): settings = Settings() settings.mode = Unsigned(3).input("filter_mode") clk = Clock("clk") audio_in = Signed(12).input("i_data_in") audio_stb_in = Boolean().input("stb_in") i, q, stb = modulator(clk, audio_in, audio_stb_in, settings) #simulate clk.initialise() settings.mode.set(mode) response = [] for data in stimulus: for j in range(200): audio_stb_in.set(j==199) audio_in.set(data) clk.tick() if stb.get(): print i.get(), q.get() if i.get() is None or q.get() is None: continue response.append(i.get()*(2**20)+1j*q.get()) response = np.array(response) plt.title("Modulator") plt.xlabel("Time (samples)") plt.ylabel("Value") a, = plt.plot(np.real(response), label="I") b, = plt.plot(np.imag(response), label="Q") c, = plt.plot(stimulus*(2**20), label="Audio Input") plt.legend(handles=[a, b, c]) plt.show() if __name__ == "__main__" and "sim" in sys.argv: #mode am stim am stimulus=( np.sin(np.arange(1000)*2.0*pi*0.02)*1023+ np.sin(np.arange(1000)*2.0*pi*0.03)*1023 ) #test_modulator(stimulus, FM) #test_modulator(stimulus, FM) #test_modulator(stimulus, NBFM) test_modulator(stimulus, USB)
[]
pierredup/sentry
tests/sentry/auth/test_helper.py
0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80
from __future__ import absolute_import from six.moves.urllib.parse import urlencode from django.test import RequestFactory from django.contrib.auth.models import AnonymousUser from sentry.auth.helper import handle_new_user from sentry.models import AuthProvider, InviteStatus, OrganizationMember from sentry.testutils import TestCase from sentry.utils.compat import mock class HandleNewUserTest(TestCase): @mock.patch("sentry.analytics.record") def test_simple(self, mock_record): provider = "dummy" request = RequestFactory().post("/auth/sso/") request.user = AnonymousUser() auth_provider = AuthProvider.objects.create( organization=self.organization, provider=provider ) identity = {"id": "1234", "email": "[email protected]", "name": "Morty"} auth_identity = handle_new_user(auth_provider, self.organization, request, identity) user = auth_identity.user assert user.email == identity["email"] assert OrganizationMember.objects.filter(organization=self.organization, user=user).exists() signup_record = [r for r in mock_record.call_args_list if r[0][0] == "user.signup"] assert signup_record == [ mock.call( "user.signup", user_id=user.id, source="sso", provider=provider, referrer="in-app" ) ] def test_associated_existing_member_invite_by_email(self): request = RequestFactory().post("/auth/sso/") request.user = AnonymousUser() provider = AuthProvider.objects.create(organization=self.organization, provider="dummy") identity = {"id": "1234", "email": "[email protected]", "name": "Morty"} member = OrganizationMember.objects.create( organization=self.organization, email=identity["email"] ) auth_identity = handle_new_user(provider, self.organization, request, identity) assigned_member = OrganizationMember.objects.get( organization=self.organization, user=auth_identity.user ) assert assigned_member.id == member.id def test_associated_existing_member_invite_request(self): request = RequestFactory().post("/auth/sso/") request.user = AnonymousUser() provider = AuthProvider.objects.create(organization=self.organization, provider="dummy") identity = {"id": "1234", "email": "[email protected]", "name": "Morty"} member = self.create_member( organization=self.organization, email=identity["email"], invite_status=InviteStatus.REQUESTED_TO_BE_INVITED.value, ) auth_identity = handle_new_user(provider, self.organization, request, identity) assert OrganizationMember.objects.filter( organization=self.organization, user=auth_identity.user, invite_status=InviteStatus.APPROVED.value, ).exists() assert not OrganizationMember.objects.filter(id=member.id).exists() def test_associate_pending_invite(self): provider = AuthProvider.objects.create(organization=self.organization, provider="dummy") identity = {"id": "1234", "email": "[email protected]", "name": "Morty"} # The org member invite should have a non matching email, but the # member id and token will match from the cookie, allowing association member = OrganizationMember.objects.create( organization=self.organization, email="[email protected]", token="abc" ) request = RequestFactory().post("/auth/sso/") request.user = AnonymousUser() request.COOKIES["pending-invite"] = urlencode( {"memberId": member.id, "token": member.token, "url": ""} ) auth_identity = handle_new_user(provider, self.organization, request, identity) assigned_member = OrganizationMember.objects.get( organization=self.organization, user=auth_identity.user ) assert assigned_member.id == member.id
[((14, 5, 14, 42), 'sentry.utils.compat.mock.patch', 'mock.patch', ({(14, 16, 14, 41): '"""sentry.analytics.record"""'}, {}), "('sentry.analytics.record')", False, 'from sentry.utils.compat import mock\n'), ((18, 23, 18, 38), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ({}, {}), '()', False, 'from django.contrib.auth.models import AnonymousUser\n'), ((20, 24, 22, 9), 'sentry.models.AuthProvider.objects.create', 'AuthProvider.objects.create', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((25, 24, 25, 92), 'sentry.auth.helper.handle_new_user', 'handle_new_user', ({(25, 40, 25, 53): 'auth_provider', (25, 55, 25, 72): 'self.organization', (25, 74, 25, 81): 'request', (25, 83, 25, 91): 'identity'}, {}), '(auth_provider, self.organization, request, identity)', False, 'from sentry.auth.helper import handle_new_user\n'), ((40, 23, 40, 38), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ({}, {}), '()', False, 'from django.contrib.auth.models import AnonymousUser\n'), ((42, 19, 42, 96), 'sentry.models.AuthProvider.objects.create', 'AuthProvider.objects.create', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((45, 17, 47, 9), 'sentry.models.OrganizationMember.objects.create', 'OrganizationMember.objects.create', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((49, 24, 49, 87), 'sentry.auth.helper.handle_new_user', 'handle_new_user', ({(49, 40, 49, 48): 'provider', (49, 50, 49, 67): 'self.organization', (49, 69, 49, 76): 'request', (49, 78, 49, 86): 'identity'}, {}), '(provider, self.organization, request, identity)', False, 'from sentry.auth.helper import handle_new_user\n'), ((51, 26, 53, 9), 'sentry.models.OrganizationMember.objects.get', 'OrganizationMember.objects.get', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((59, 23, 59, 38), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ({}, {}), '()', False, 'from django.contrib.auth.models import AnonymousUser\n'), ((61, 19, 61, 96), 'sentry.models.AuthProvider.objects.create', 'AuthProvider.objects.create', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((70, 24, 70, 87), 'sentry.auth.helper.handle_new_user', 'handle_new_user', ({(70, 40, 70, 48): 'provider', (70, 50, 70, 67): 'self.organization', (70, 69, 70, 76): 'request', (70, 78, 70, 86): 'identity'}, {}), '(provider, self.organization, request, identity)', False, 'from sentry.auth.helper import handle_new_user\n'), ((81, 19, 81, 96), 'sentry.models.AuthProvider.objects.create', 'AuthProvider.objects.create', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((86, 17, 88, 9), 'sentry.models.OrganizationMember.objects.create', 'OrganizationMember.objects.create', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((91, 23, 91, 38), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ({}, {}), '()', False, 'from django.contrib.auth.models import AnonymousUser\n'), ((92, 44, 94, 9), 'six.moves.urllib.parse.urlencode', 'urlencode', ({(93, 12, 93, 69): "{'memberId': member.id, 'token': member.token, 'url': ''}"}, {}), "({'memberId': member.id, 'token': member.token, 'url': ''})", False, 'from six.moves.urllib.parse import urlencode\n'), ((96, 24, 96, 87), 'sentry.auth.helper.handle_new_user', 'handle_new_user', ({(96, 40, 96, 48): 'provider', (96, 50, 96, 67): 'self.organization', (96, 69, 96, 76): 'request', (96, 78, 96, 86): 'identity'}, {}), '(provider, self.organization, request, identity)', False, 'from sentry.auth.helper import handle_new_user\n'), ((98, 26, 100, 9), 'sentry.models.OrganizationMember.objects.get', 'OrganizationMember.objects.get', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((17, 18, 17, 34), 'django.test.RequestFactory', 'RequestFactory', ({}, {}), '()', False, 'from django.test import RequestFactory\n'), ((29, 15, 29, 91), 'sentry.models.OrganizationMember.objects.filter', 'OrganizationMember.objects.filter', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((33, 12, 35, 13), 'sentry.utils.compat.mock.call', 'mock.call', (), '', False, 'from sentry.utils.compat import mock\n'), ((39, 18, 39, 34), 'django.test.RequestFactory', 'RequestFactory', ({}, {}), '()', False, 'from django.test import RequestFactory\n'), ((58, 18, 58, 34), 'django.test.RequestFactory', 'RequestFactory', ({}, {}), '()', False, 'from django.test import RequestFactory\n'), ((72, 15, 76, 9), 'sentry.models.OrganizationMember.objects.filter', 'OrganizationMember.objects.filter', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n'), ((90, 18, 90, 34), 'django.test.RequestFactory', 'RequestFactory', ({}, {}), '()', False, 'from django.test import RequestFactory\n'), ((78, 19, 78, 66), 'sentry.models.OrganizationMember.objects.filter', 'OrganizationMember.objects.filter', (), '', False, 'from sentry.models import AuthProvider, InviteStatus, OrganizationMember\n')]
richo/groundstation
groundstation/broadcast_events/__init__.py
7ed48dd355051ee6b71164fc801e3893c09d11db
from broadcast_ping import BroadcastPing EVENT_TYPES = { "PING": BroadcastPing, } class UnknownBroadcastEvent(Exception): pass def new_broadcast_event(data): event_type, payload = data.split(" ", 1) if event_type not in EVENT_TYPES: raise UnknownBroadcastEvent(event_type) return EVENT_TYPES[event_type](payload)
[]
dougzor/mbta_python
mbta_python/__init__.py
f277f48f8bf8048cb5c9c6307e672c37292e57f7
import datetime import requests from mbta_python.models import Stop, Direction, Schedule, Mode, \ TripSchedule, Alert, StopWithMode, Prediction HOST = "http://realtime.mbta.com/developer/api/v2" def datetime_to_epoch(dt): epoch = datetime.datetime.utcfromtimestamp(0) return int((dt - epoch).total_seconds()) class MBTASDK(object): """Wrapper around calls to the MBTA Realtime API """ def __init__(self, api_key): self.api_key = api_key def _make_request(self, path, params): url = "{}/{}".format(HOST, path) response = requests.get(url, params=params) data = response.json() error = data.get("error") if error: raise Exception(error["message"]) return response.json() def get_stops_by_location(self, latitude, longitude): """Get a List of Stops sorted by proximity to the given latitude and longitude """ params = { "lat": latitude, "lon": longitude, "api_key": self.api_key, "format": "json" } data = self._make_request("stopsbylocation", params) stops = [Stop(stop_data) for stop_data in data["stop"]] return stops def get_stops_by_route(self, route_id): """Return a List of Directions for the route_id that contain a list of Stops that Direction and Route serve """ params = { "route": route_id, "api_key": self.api_key, "format": "json" } data = self._make_request("stopsbyroute", params) return [Direction(d) for d in data["direction"]] def get_routes_by_stop(self, stop_id): """Return a list of routes that serve a particular stop """ params = { "stop": stop_id, "api_key": self.api_key, "format": "json" } data = self._make_request("routesbystop", params) return StopWithMode(data) def get_schedules_by_stop(self, stop_id, route_id=None, direction_id=None, date=None, max_time=None, max_trips=None): """Return scheduled arrivals and departures for a direction and route for a particular stop. stop_id - Stop ID route_id - Route ID, If not included then schedule for all routes serving the stop will be returned, direction_id - Direction ID, If included then route must also be included if not included then schedule for all directions of the route serving the stop will be returned date - Time after which schedule should be returned. If included then must be within the next seven (7) days If not included then schedule starting from the current datetime will be returned max_time - Defines maximum range of time (in minutes) within which trips will be returned. If not included defaults to 60. max_trips - Defines number of trips to return. Integer between 1 and 100. If not included defaults to 5. """ params = { "stop": stop_id, "api_key": self.api_key, "format": "json", "route": route_id, "direction": direction_id, "datetime": datetime_to_epoch(date) if date else None, "max_time": max_time, "max_trips": max_trips } data = self._make_request("schedulebystop", params) return Schedule(data) def get_schedules_by_routes(self, route_ids, date=None, max_time=None, max_trips=None): """Return the scheduled arrivals and departures in a direction for a particular route or routes. route_ids - List of Route IDs, or single Route ID date - Time after which schedule should be returned. If included then must be within the next seven (7) days If not included then schedule starting from the current datetime will be returned max_time - Defines maximum range of time (in minutes) within which trips will be returned. If not included defaults to 60. max_trips - Defines number of trips to return. Integer between 1 and 100. If not included defaults to 5. """ if not isinstance(route_ids, list): route_ids = [route_ids] params = { "routes": ",".join(route_ids), "api_key": self.api_key, "format": "json", "datetime": datetime_to_epoch(date) if date else None, "max_time": max_time, "max_trips": max_trips } data = self._make_request("schedulebyroutes", params) return [Mode(m) for m in data["mode"]] def get_schedules_by_trip(self, trip_id, date=None): """Return the scheduled arrivals and departures in a direction for a particular route or routes. route_ids - List of Route IDs, or single Route ID date - Time after which schedule should be returned. If included then must be within the next seven (7) days. If not included then schedule starting from the current datetime will be returned max_time - Defines maximum range of time (in minutes) within which trips will be returned. If not included defaults to 60. max_trips - Defines number of trips to return. Integer between 1 and 100. If not included defaults to 5. """ params = { "trip": trip_id, "api_key": self.api_key, "format": "json", "datetime": datetime_to_epoch(date) if date else None, } data = self._make_request("schedulebytrip", params) return TripSchedule(data) def get_predictions_by_stop(self, stop_id, include_access_alerts=False, include_service_alerts=True): """Return predicted arrivals and departures in the next hour for a direction and route for a particular stop. stop_id - Stop ID include_access_alerts - Whether or not alerts pertaining to accessibility (elevators, escalators) should be returned include_service_alerts - Whether or not service alerts should be returned """ params = { "stop": stop_id, "api_key": self.api_key, "format": "json", "include_access_alerts": include_access_alerts, "include_service_alerts": include_service_alerts } data = self._make_request("predictionsbystop", params) return Prediction(data) def get_predictions_by_routes(self, route_ids, include_access_alerts=False, include_service_alerts=True): """Return predictions for upcoming trips (including trips already underway) in a direction for a particular route or routes. route_ids - List of Route IDs, or single Route ID include_access_alerts - Whether or not alerts pertaining to accessibility (elevators, escalators) should be returned include_service_alerts - Whether or not service alerts should be returned """ if not isinstance(route_ids, list): route_ids = [route_ids] params = { "routes": ",".join(route_ids), "api_key": self.api_key, "format": "json", "include_access_alerts": include_access_alerts, "include_service_alerts": include_service_alerts } data = self._make_request("predictionsbyroutes", params) return Prediction(data) def get_vehicles_by_routes(self, route_ids, include_access_alerts=False, include_service_alerts=True): """Return vehicle positions for upcoming trips (including trips already underway) in a direction for a particular route or routes. route_ids - List of Route IDs, or single Route ID include_access_alerts - Whether or not alerts pertaining to accessibility (elevators, escalators) should be returned include_service_alerts - Whether or not service alerts should be returned """ if not isinstance(route_ids, list): route_ids = [route_ids] params = { "routes": ",".join(route_ids), "api_key": self.api_key, "format": "json", "include_access_alerts": include_access_alerts, "include_service_alerts": include_service_alerts } data = self._make_request("vehiclesbyroutes", params) return [Mode(m) for m in data] def get_predictions_by_trip(self, trip_id): """Return the predicted arrivals and departures for a particular trip. trip_id - TripID """ params = { "trip": trip_id, "api_key": self.api_key, "format": "json" } data = self._make_request("predictionsbytrip", params) return TripSchedule(data) def get_vehicles_by_trip(self, trip_id): """Return the predicted vehicle positions for a particular trip. trip_id - TripID """ params = { "trip": trip_id, "api_key": self.api_key, "format": "json" } data = self._make_request("vehiclesbytrip", params) return TripSchedule(data)
[((11, 12, 11, 49), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', ({(11, 47, 11, 48): '0'}, {}), '(0)', False, 'import datetime\n'), ((23, 19, 23, 51), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((74, 15, 74, 33), 'mbta_python.models.StopWithMode', 'StopWithMode', ({(74, 28, 74, 32): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((110, 15, 110, 29), 'mbta_python.models.Schedule', 'Schedule', ({(110, 24, 110, 28): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((163, 15, 163, 33), 'mbta_python.models.TripSchedule', 'TripSchedule', ({(163, 28, 163, 32): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((186, 15, 186, 31), 'mbta_python.models.Prediction', 'Prediction', ({(186, 26, 186, 30): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((212, 15, 212, 31), 'mbta_python.models.Prediction', 'Prediction', ({(212, 26, 212, 30): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((251, 15, 251, 33), 'mbta_python.models.TripSchedule', 'TripSchedule', ({(251, 28, 251, 32): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((264, 15, 264, 33), 'mbta_python.models.TripSchedule', 'TripSchedule', ({(264, 28, 264, 32): 'data'}, {}), '(data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((46, 17, 46, 32), 'mbta_python.models.Stop', 'Stop', ({(46, 22, 46, 31): 'stop_data'}, {}), '(stop_data)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((62, 16, 62, 28), 'mbta_python.models.Direction', 'Direction', ({(62, 26, 62, 27): 'd'}, {}), '(d)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((140, 16, 140, 23), 'mbta_python.models.Mode', 'Mode', ({(140, 21, 140, 22): 'm'}, {}), '(m)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n'), ((238, 16, 238, 23), 'mbta_python.models.Mode', 'Mode', ({(238, 21, 238, 22): 'm'}, {}), '(m)', False, 'from mbta_python.models import Stop, Direction, Schedule, Mode, TripSchedule, Alert, StopWithMode, Prediction\n')]
piotrwinkler/breast_density_classifier
density_model_torch_custom.py
4d47dd98bb0a839cea8b9aef242f5af5db84f06f
import argparse import glob import os import numpy as np import torch from sklearn.metrics import accuracy_score import models_torch as models import utils EXPERIMENT_DATA_DIR = "/tmp/mgr" def inference(parameters, verbose=True) -> int: # resolve device device = torch.device( "cuda:{}".format(parameters["gpu_number"]) if parameters["device_type"] == "gpu" else "cpu" ) # load input images datum_l_cc = utils.load_images(parameters['image_path'], 'L-CC') datum_r_cc = utils.load_images(parameters['image_path'], 'R-CC') datum_l_mlo = utils.load_images(parameters['image_path'], 'L-MLO') datum_r_mlo = utils.load_images(parameters['image_path'], 'R-MLO') # construct models and prepare data if parameters["model_type"] == 'cnn': model = models.BaselineBreastModel(device, nodropout_probability=1.0, gaussian_noise_std=0.0).to(device) model.load_state_dict(torch.load(parameters["model_path"])) x = { "L-CC": torch.Tensor(datum_l_cc).permute(0, 3, 1, 2).to(device), "L-MLO": torch.Tensor(datum_l_mlo).permute(0, 3, 1, 2).to(device), "R-CC": torch.Tensor(datum_r_cc).permute(0, 3, 1, 2).to(device), "R-MLO": torch.Tensor(datum_r_mlo).permute(0, 3, 1, 2).to(device), } elif parameters["model_type"] == 'histogram': model = models.BaselineHistogramModel(num_bins=parameters["bins_histogram"]).to(device) model.load_state_dict(torch.load(parameters["model_path"])) x = torch.Tensor(utils.histogram_features_generator([ datum_l_cc, datum_r_cc, datum_l_mlo, datum_r_mlo ], parameters)).to(device) else: raise RuntimeError(parameters["model_type"]) # run prediction with torch.no_grad(): prediction_density = model(x).cpu().numpy() if verbose: # nicely prints out the predictions print('Density prediction:\n' '\tAlmost entirely fatty (0):\t\t\t' + str(prediction_density[0, 0]) + '\n' '\tScattered areas of fibroglandular density (1):\t' + str(prediction_density[0, 1]) + '\n' '\tHeterogeneously dense (2):\t\t\t' + str(prediction_density[0, 2]) + '\n' '\tExtremely dense (3):\t\t\t\t' + str(prediction_density[0, 3]) + '\n') return np.argmax(prediction_density[0])+1 # return density in scope 1 to 4 if __name__ == "__main__": parser = argparse.ArgumentParser(description='Run Inference') parser.add_argument('model_type') parser.add_argument('--bins-histogram', default=50) parser.add_argument('--model-path', default=None) parser.add_argument('--device-type', default="cpu") # parser.add_argument('--image-path', default="images/") args = parser.parse_args() parameters_ = { "model_type": args.model_type, "bins_histogram": args.bins_histogram, "model_path": args.model_path, "device_type": args.device_type, # "image_path": args.image_path, } if parameters_["model_path"] is None: if args.model_type == "histogram": parameters_["model_path"] = "saved_models/BreastDensity_BaselineHistogramModel/model.p" if args.model_type == "cnn": parameters_["model_path"] = "saved_models/BreastDensity_BaselineBreastModel/model.p" predicted_values = [] real_values = [] predicted_values_two_classes = [] real_values_two_classes = [] two_classes_mapping = {1: 0, 2: 0, 3: 1, 4: 1} for dir in glob.glob(f"{EXPERIMENT_DATA_DIR}/*/"): parameters_["image_path"] = dir predicted_density = inference(parameters_) with open(os.path.join(dir, "density.txt")) as file: real_density = int(file.read()) print(f"Predicted density: {predicted_density}") print(f"Real density: {real_density}\n") print(f"Predicted density (2 cls): {two_classes_mapping[predicted_density]}") print(f"Real density (2 cls): {two_classes_mapping[real_density]}\n") predicted_values.append(predicted_density) real_values.append(real_density) predicted_values_two_classes.append(two_classes_mapping[predicted_density]) real_values_two_classes.append(two_classes_mapping[real_density]) print(f"Total accuracy: {accuracy_score(real_values, predicted_values)}") print(f"Total accuracy two classes: {accuracy_score(real_values_two_classes, predicted_values_two_classes)}") """ python density_model_torch_custom.py histogram python density_model_torch_custom.py cnn """
[((25, 17, 25, 68), 'utils.load_images', 'utils.load_images', ({(25, 35, 25, 59): "parameters['image_path']", (25, 61, 25, 67): '"""L-CC"""'}, {}), "(parameters['image_path'], 'L-CC')", False, 'import utils\n'), ((26, 17, 26, 68), 'utils.load_images', 'utils.load_images', ({(26, 35, 26, 59): "parameters['image_path']", (26, 61, 26, 67): '"""R-CC"""'}, {}), "(parameters['image_path'], 'R-CC')", False, 'import utils\n'), ((27, 18, 27, 70), 'utils.load_images', 'utils.load_images', ({(27, 36, 27, 60): "parameters['image_path']", (27, 62, 27, 69): '"""L-MLO"""'}, {}), "(parameters['image_path'], 'L-MLO')", False, 'import utils\n'), ((28, 18, 28, 70), 'utils.load_images', 'utils.load_images', ({(28, 36, 28, 60): "parameters['image_path']", (28, 62, 28, 69): '"""R-MLO"""'}, {}), "(parameters['image_path'], 'R-MLO')", False, 'import utils\n'), ((66, 13, 66, 65), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((96, 15, 96, 53), 'glob.glob', 'glob.glob', ({(96, 25, 96, 52): 'f"""{EXPERIMENT_DATA_DIR}/*/"""'}, {}), "(f'{EXPERIMENT_DATA_DIR}/*/')", False, 'import glob\n'), ((50, 9, 50, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((61, 11, 61, 43), 'numpy.argmax', 'np.argmax', ({(61, 21, 61, 42): 'prediction_density[0]'}, {}), '(prediction_density[0])', True, 'import numpy as np\n'), ((33, 30, 33, 66), 'torch.load', 'torch.load', ({(33, 41, 33, 65): "parameters['model_path']"}, {}), "(parameters['model_path'])", False, 'import torch\n'), ((32, 16, 32, 101), 'models_torch.BaselineBreastModel', 'models.BaselineBreastModel', (), '', True, 'import models_torch as models\n'), ((42, 30, 42, 66), 'torch.load', 'torch.load', ({(42, 41, 42, 65): "parameters['model_path']"}, {}), "(parameters['model_path'])", False, 'import torch\n'), ((100, 18, 100, 50), 'os.path.join', 'os.path.join', ({(100, 31, 100, 34): 'dir', (100, 36, 100, 49): '"""density.txt"""'}, {}), "(dir, 'density.txt')", False, 'import os\n'), ((114, 29, 114, 74), 'sklearn.metrics.accuracy_score', 'accuracy_score', ({(114, 44, 114, 55): 'real_values', (114, 57, 114, 73): 'predicted_values'}, {}), '(real_values, predicted_values)', False, 'from sklearn.metrics import accuracy_score\n'), ((115, 41, 115, 110), 'sklearn.metrics.accuracy_score', 'accuracy_score', ({(115, 56, 115, 79): 'real_values_two_classes', (115, 81, 115, 109): 'predicted_values_two_classes'}, {}), '(real_values_two_classes, predicted_values_two_classes)', False, 'from sklearn.metrics import accuracy_score\n'), ((41, 16, 41, 84), 'models_torch.BaselineHistogramModel', 'models.BaselineHistogramModel', (), '', True, 'import models_torch as models\n'), ((43, 25, 45, 22), 'utils.histogram_features_generator', 'utils.histogram_features_generator', ({(43, 60, 45, 9): '[datum_l_cc, datum_r_cc, datum_l_mlo, datum_r_mlo]', (45, 11, 45, 21): 'parameters'}, {}), '([datum_l_cc, datum_r_cc, datum_l_mlo,\n datum_r_mlo], parameters)', False, 'import utils\n'), ((35, 20, 35, 44), 'torch.Tensor', 'torch.Tensor', ({(35, 33, 35, 43): 'datum_l_cc'}, {}), '(datum_l_cc)', False, 'import torch\n'), ((36, 21, 36, 46), 'torch.Tensor', 'torch.Tensor', ({(36, 34, 36, 45): 'datum_l_mlo'}, {}), '(datum_l_mlo)', False, 'import torch\n'), ((37, 20, 37, 44), 'torch.Tensor', 'torch.Tensor', ({(37, 33, 37, 43): 'datum_r_cc'}, {}), '(datum_r_cc)', False, 'import torch\n'), ((38, 21, 38, 46), 'torch.Tensor', 'torch.Tensor', ({(38, 34, 38, 45): 'datum_r_mlo'}, {}), '(datum_r_mlo)', False, 'import torch\n')]
yifatdzigan/ESMValTool
esmvaltool/diag_scripts/ensclus/ens_anom.py
83320b0e0b24ddde965599961bb80428e180a731
"""Computation of ensemble anomalies based on a desired value.""" import os import numpy as np from scipy import stats # User-defined packages from read_netcdf import read_iris, save_n_2d_fields from sel_season_area import sel_area, sel_season def ens_anom(filenames, dir_output, name_outputs, varname, numens, season, area, extreme): """Ensemble anomalies. Computation of the ensemble anomalies based on the desired value from the input variable (it can be the percentile, mean, maximum, standard deviation or trend) OUTPUT: NetCDF files of ensemble mean of climatology, selected value and anomaly maps. """ print('The name of the output files will be <variable>_{0}.txt' .format(name_outputs)) print('Number of ensemble members: {0}'.format(numens)) outfiles = [] # Reading the netCDF file of 3Dfield, for all the ensemble members var_ens = [] for ens in range(numens): ifile = filenames[ens] # print('ENSEMBLE MEMBER %s' %ens) var, varunits, lat, lon, dates, _ = read_iris(ifile) # Convertion from kg m-2 s-1 to mm/day if varunits == 'kg m-2 s-1': var = var * 86400 # there are 86400 seconds in a day varunits = 'mm/day' # Selecting a season (DJF,DJFM,NDJFM,JJA) var_season, _ = sel_season(var, dates, season) # Selecting only [latS-latN, lonW-lonE] box region var_area, lat_area, lon_area = sel_area(lat, lon, var_season, area) var_ens.append(var_area) if varunits == 'kg m-2 s-1': print('\nPrecipitation rate units were converted from kg m-2 s-1 ' 'to mm/day') print('The variable is {0} ({1})'.format(varname, varunits)) print('Original var shape: (time x lat x lon)={0}'.format(var.shape)) print('var shape after selecting season {0} and area {1}: ' '(time x lat x lon)={2}'.format(season, area, var_area.shape)) if extreme == 'mean': # Compute the time mean over the entire period, for each ens member varextreme_ens = [np.nanmean(var_ens[i], axis=0) for i in range(numens)] elif len(extreme.split("_")) == 2: # Compute the chosen percentile over the period, for each ens member quant = int(extreme.partition("th")[0]) varextreme_ens = [np.nanpercentile(var_ens[i], quant, axis=0) for i in range(numens)] elif extreme == 'maximum': # Compute the maximum value over the period, for each ensemble member varextreme_ens = [np.nanmax(var_ens[i], axis=0) for i in range(numens)] elif extreme == 'std': # Compute the standard deviation over the period, for each ens member varextreme_ens = [np.nanstd(var_ens[i], axis=0) for i in range(numens)] elif extreme == 'trend': # Compute the linear trend over the period, for each ensemble member trendmap = np.empty((var_ens[0].shape[1], var_ens[0].shape[2])) trendmap_ens = [] for i in range(numens): for jla in range(var_ens[0].shape[1]): for jlo in range(var_ens[0].shape[2]): slope, _, _, _, _ = \ stats.linregress(range(var_ens[0].shape[0]), var_ens[i][:, jla, jlo]) trendmap[jla, jlo] = slope trendmap_ens.append(trendmap.copy()) varextreme_ens = trendmap_ens varextreme_ens_np = np.array(varextreme_ens) print('Anomalies are computed with respect to the {0}'.format(extreme)) # Compute and save the anomalies with respect to the ensemble ens_anomalies = varextreme_ens_np - np.nanmean(varextreme_ens_np, axis=0) varsave = 'ens_anomalies' ofile = os.path.join(dir_output, 'ens_anomalies_{0}.nc' .format(name_outputs)) # print(ofile) print('ens_anomalies shape: (numens x lat x lon)={0}' .format(ens_anomalies.shape)) save_n_2d_fields(lat_area, lon_area, ens_anomalies, varsave, varunits, ofile) outfiles.append(ofile) # Compute and save the climatology vartimemean_ens = [np.mean(var_ens[i], axis=0) for i in range(numens)] ens_climatologies = np.array(vartimemean_ens) varsave = 'ens_climatologies' ofile = os.path.join(dir_output, 'ens_climatologies_{0}.nc' .format(name_outputs)) save_n_2d_fields(lat_area, lon_area, ens_climatologies, varsave, varunits, ofile) outfiles.append(ofile) ens_extreme = varextreme_ens_np varsave = 'ens_extreme' ofile = os.path.join(dir_output, 'ens_extreme_{0}.nc'.format(name_outputs)) save_n_2d_fields(lat_area, lon_area, ens_extreme, varsave, varunits, ofile) outfiles.append(ofile) return outfiles
[((89, 24, 89, 48), 'numpy.array', 'np.array', ({(89, 33, 89, 47): 'varextreme_ens'}, {}), '(varextreme_ens)', True, 'import numpy as np\n'), ((100, 4, 101, 37), 'read_netcdf.save_n_2d_fields', 'save_n_2d_fields', ({(100, 21, 100, 29): 'lat_area', (100, 31, 100, 39): 'lon_area', (100, 41, 100, 54): 'ens_anomalies', (100, 56, 100, 63): 'varsave', (101, 21, 101, 29): 'varunits', (101, 31, 101, 36): 'ofile'}, {}), '(lat_area, lon_area, ens_anomalies, varsave, varunits, ofile)', False, 'from read_netcdf import read_iris, save_n_2d_fields\n'), ((105, 24, 105, 49), 'numpy.array', 'np.array', ({(105, 33, 105, 48): 'vartimemean_ens'}, {}), '(vartimemean_ens)', True, 'import numpy as np\n'), ((109, 4, 110, 37), 'read_netcdf.save_n_2d_fields', 'save_n_2d_fields', ({(109, 21, 109, 29): 'lat_area', (109, 31, 109, 39): 'lon_area', (109, 41, 109, 58): 'ens_climatologies', (109, 60, 109, 67): 'varsave', (110, 21, 110, 29): 'varunits', (110, 31, 110, 36): 'ofile'}, {}), '(lat_area, lon_area, ens_climatologies, varsave, varunits,\n ofile)', False, 'from read_netcdf import read_iris, save_n_2d_fields\n'), ((115, 4, 116, 37), 'read_netcdf.save_n_2d_fields', 'save_n_2d_fields', ({(115, 21, 115, 29): 'lat_area', (115, 31, 115, 39): 'lon_area', (115, 41, 115, 52): 'ens_extreme', (115, 54, 115, 61): 'varsave', (116, 21, 116, 29): 'varunits', (116, 31, 116, 36): 'ofile'}, {}), '(lat_area, lon_area, ens_extreme, varsave, varunits, ofile)', False, 'from read_netcdf import read_iris, save_n_2d_fields\n'), ((32, 44, 32, 60), 'read_netcdf.read_iris', 'read_iris', ({(32, 54, 32, 59): 'ifile'}, {}), '(ifile)', False, 'from read_netcdf import read_iris, save_n_2d_fields\n'), ((40, 24, 40, 54), 'sel_season_area.sel_season', 'sel_season', ({(40, 35, 40, 38): 'var', (40, 40, 40, 45): 'dates', (40, 47, 40, 53): 'season'}, {}), '(var, dates, season)', False, 'from sel_season_area import sel_area, sel_season\n'), ((43, 39, 43, 75), 'sel_season_area.sel_area', 'sel_area', ({(43, 48, 43, 51): 'lat', (43, 53, 43, 56): 'lon', (43, 58, 43, 68): 'var_season', (43, 70, 43, 74): 'area'}, {}), '(lat, lon, var_season, area)', False, 'from sel_season_area import sel_area, sel_season\n'), ((93, 40, 93, 77), 'numpy.nanmean', 'np.nanmean', (), '', True, 'import numpy as np\n'), ((104, 23, 104, 50), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((58, 26, 58, 56), 'numpy.nanmean', 'np.nanmean', (), '', True, 'import numpy as np\n'), ((64, 26, 64, 69), 'numpy.nanpercentile', 'np.nanpercentile', (), '', True, 'import numpy as np\n'), ((69, 26, 69, 55), 'numpy.nanmax', 'np.nanmax', (), '', True, 'import numpy as np\n'), ((73, 26, 73, 55), 'numpy.nanstd', 'np.nanstd', (), '', True, 'import numpy as np\n'), ((77, 19, 77, 71), 'numpy.empty', 'np.empty', ({(77, 28, 77, 70): '(var_ens[0].shape[1], var_ens[0].shape[2])'}, {}), '((var_ens[0].shape[1], var_ens[0].shape[2]))', True, 'import numpy as np\n')]
Te-k/Pytition
pytition/petition/models.py
16ebce01b491b72ed387709d9b705f7cb0d5476f
from django.db import models from django.utils.html import mark_safe, strip_tags from django.utils.text import slugify from django.utils.translation import ugettext as _ from django.utils.translation import ugettext_lazy from django.core.exceptions import ValidationError from django.db.models.signals import post_save, post_delete from django.dispatch import receiver from django.conf import settings from django.contrib.auth.hashers import get_hasher from django.db import transaction from django.urls import reverse from django.db.models import Q from tinymce import models as tinymce_models from colorfield.fields import ColorField import html class Petition(models.Model): NO = "no gradient" RIGHT = "to right" BOTTOM = "to bottom" BOTTOM_RIGHT = "to bottom right" BOTTOM_LEFT = "to bottom left" LINEAR_GRADIENT_CHOICES = ( (NO, "no gradient"), (RIGHT, "to right"), (BOTTOM, "to bottom"), (BOTTOM_RIGHT, "to bottom right"), (BOTTOM_LEFT, "to bottom left") ) MAIL = "MAIL" POST = "POST" GET = "GET" NEWSLETTER_SUBSCRIBE_METHOD_CHOICES = ( (MAIL, "MAIL"), (POST, "POST"), (GET, "GET") ) title = models.TextField(verbose_name=ugettext_lazy("Title")) text = tinymce_models.HTMLField(blank=True) side_text = tinymce_models.HTMLField(blank=True) target = models.IntegerField(default=500) linear_gradient_direction = models.CharField(choices=LINEAR_GRADIENT_CHOICES, max_length=15, default=NO, blank=True) gradient_from = ColorField(blank=True) gradient_to = ColorField(blank=True) bgcolor = ColorField(blank=True) footer_text = tinymce_models.HTMLField(blank=True) footer_links = tinymce_models.HTMLField(blank=True) twitter_description = models.CharField(max_length=200, blank=True) twitter_image = models.CharField(max_length=500, blank=True) has_newsletter = models.BooleanField(default=False) newsletter_subscribe_http_data = models.TextField(blank=True) newsletter_subscribe_http_mailfield = models.CharField(max_length=100, blank=True) newsletter_subscribe_http_url = models.CharField(max_length=1000, blank=True) newsletter_subscribe_mail_subject = models.CharField(max_length=1000, blank=True) newsletter_subscribe_mail_from = models.CharField(max_length=500, blank=True) newsletter_subscribe_mail_to = models.CharField(max_length=500, blank=True) newsletter_subscribe_method = models.CharField(choices=NEWSLETTER_SUBSCRIBE_METHOD_CHOICES, max_length=4, default=MAIL) newsletter_subscribe_mail_smtp_host = models.CharField(max_length=100, default='localhost', blank=True) newsletter_subscribe_mail_smtp_port = models.IntegerField(default=25, blank=True) newsletter_subscribe_mail_smtp_user = models.CharField(max_length=200, blank=True) newsletter_subscribe_mail_smtp_password = models.CharField(max_length=200, blank=True) newsletter_subscribe_mail_smtp_tls = models.BooleanField(default=False) newsletter_subscribe_mail_smtp_starttls = models.BooleanField(default=False) org_twitter_handle = models.CharField(max_length=20, blank=True) published = models.BooleanField(default=False) newsletter_text = models.CharField(max_length=1000, blank=True) sign_form_footer = models.TextField(blank=True) confirmation_email_sender = models.CharField(max_length=100, blank=True) confirmation_email_smtp_host = models.CharField(max_length=100, default='localhost', blank=True) confirmation_email_smtp_port = models.IntegerField(default=25, blank=True) confirmation_email_smtp_user = models.CharField(max_length=200, blank=True) confirmation_email_smtp_password = models.CharField(max_length=200, blank=True) confirmation_email_smtp_tls = models.BooleanField(default=False) confirmation_email_smtp_starttls = models.BooleanField(default=False) use_custom_email_settings = models.BooleanField(default=False) salt = models.TextField(blank=True) slugs = models.ManyToManyField('SlugModel', blank=True, through='SlugOwnership') def prepopulate_from_template(self, template): for field in self._meta.fields: if hasattr(self, field.name) and hasattr(template, field.name): template_value = getattr(template, field.name) if template_value is not None and template_value != "": setattr(self, field.name, template_value) def save(self, *args, **kwargs): super().save(*args, **kwargs) if not self.salt: hasher = get_hasher() self.salt = hasher.salt().decode('utf-8') super().save() def slugify(self): if self.slugs.count() == 0: slugtext = slugify(self.raw_title) # let's search for slug collisions filters = {'slugs__slug': slugtext} if self.organization_set.count() > 0: org = self.organization_set.first() filters.update({'organization__name': org.name}) else: user = self.pytitionuser_set.first() filters.update({'pytitionuser__user__username': user.user.username}) results = Petition.objects.filter(**filters) if results.count() > 0: raise ValueError(_("This slug is already used by another petition from this organization/user")) slug = SlugModel(slug=slugify(slugtext)) slug.save() self.slugs.add(slug) self.save() @classmethod def by_id(cls, id): try: return Petition.objects.get(pk=id) except Petition.DoesNotExist: return None def get_signature_number(self, confirmed=None): signatures = self.signature_set if confirmed is not None: signatures = signatures.filter(confirmed=confirmed) return signatures.count() def already_signed(self, email): signature_number = Signature.objects.filter(petition = self.id)\ .filter(confirmed = True).filter(email = email).count() return signature_number > 0 def confirm_signature(self, conf_hash): signature = Signature.objects.filter(petition=self.id).get(confirmation_hash=conf_hash) if signature: # Now confirm the signature corresponding to this hash signature.confirm() signature.save() return _("Thank you for confirming your signature!") else: return None def add_slug(self, slugtext): with transaction.atomic(): slugtext = slugify(slugtext) slug = SlugModel.objects.create(slug=slugtext) if self.owner_type == "org": SlugOwnership.objects.create(slug=slug, petition=self, organization=self.owner) elif self.owner_type == "user": SlugOwnership.objects.create(slug=slug, petition=self, user=self.owner) else: raise ValueError(_("This petition has no owner, cannot add slug!")) def del_slug(self, slug): slug.delete() def publish(self): self.published = True self.save() def unpublish(self): self.published = False self.save() @property def owner_type(self): if self.organization_set.count() > 0: return "org" elif self.pytitionuser_set.count() > 0: return "user" else: return "no_owner" @property def owner(self): if self.organization_set.count() > 0: return self.organization_set.first() elif self.pytitionuser_set.count() > 0: return self.pytitionuser_set.first() else: return None @property def signature_number(self): return self.get_signature_number(True) @property def raw_twitter_description(self): return html.unescape(mark_safe(strip_tags(self.twitter_description))) @property def raw_text(self): return html.unescape(mark_safe(strip_tags(self.text))) @property def raw_title(self): return html.unescape(mark_safe(strip_tags(self.title).strip())) def __str__(self): return self.raw_title def __repr__(self): return self.raw_title @property def url(self): slugs = self.slugs.all() if len(slugs) == 0: # If there is no slug, ugly url return reverse('detail', kwargs={'petition_id': self.id}) else: if self.organization_set.count() > 0: # This petition is owned by an Organization org = self.organization_set.first() return reverse("slug_show_petition", kwargs={"orgslugname": org.slugname, "petitionname": slugs[0]}) elif self.pytitionuser_set.count() > 0: # This petition is owned by a PytitionUser user = self.pytitionuser_set.first() return reverse("slug_show_petition", kwargs={"username": user.user.username, "petitionname": slugs[0]}) else: # This is a BUG! raise ValueError(_("This petition is buggy. Sorry about that!")) class SlugOwnership(models.Model): petition = models.ForeignKey(Petition, on_delete=models.CASCADE) slug = models.ForeignKey('SlugModel', on_delete=models.CASCADE) user = models.ForeignKey('PytitionUser', on_delete=models.CASCADE, blank=True, null=True, default=None) organization = models.ForeignKey('Organization', on_delete=models.CASCADE, blank=True, null=True, default=None) class Meta: constraints = [ models.UniqueConstraint(fields=['slug', 'organization'], name="unique_slugnameperorg", condition=Q(user=None)), models.UniqueConstraint(fields=['slug', 'user'], name="unique_slugnameperuser", condition=Q(organization=None)), ] class Signature(models.Model): first_name = models.CharField(max_length=50, verbose_name=ugettext_lazy("First name")) last_name = models.CharField(max_length=50, verbose_name=ugettext_lazy("Last name")) phone = models.CharField(max_length=20, blank=True, verbose_name=ugettext_lazy("Phone number")) email = models.EmailField(verbose_name=ugettext_lazy("Email address")) confirmation_hash = models.CharField(max_length=128) confirmed = models.BooleanField(default=False, verbose_name=ugettext_lazy("Confirmed")) petition = models.ForeignKey(Petition, on_delete=models.CASCADE, verbose_name=ugettext_lazy("Petition")) subscribed_to_mailinglist = models.BooleanField(default=False, verbose_name=ugettext_lazy("Subscribed to mailing list")) date = models.DateTimeField(blank=True, auto_now_add=True, verbose_name=ugettext_lazy("Date")) ipaddress = models.TextField(blank=True, null=True) def clean(self): if self.petition.already_signed(self.email): if self.petition.signature_set.filter(email = self.email).get(confirmed = True).id != self.id: raise ValidationError(_("You already signed the petition")) def save(self, *args, **kwargs): self.clean() if self.confirmed: # invalidating other signatures from same email Signature.objects.filter(petition=self.petition).filter(email=self.email)\ .exclude(id=self.id).delete() super().save(*args, **kwargs) def confirm(self): self.confirmed = True def __str__(self): return html.unescape("[{}:{}] {} {}".format(self.petition.id, "OK" if self.confirmed else "..", self.first_name, self.last_name)) def __repr__(self): return html.unescape("[{}:{}] {} {}".format(self.petition.id, "OK" if self.confirmed else "..", self.first_name, self.last_name)) class PetitionTemplate(models.Model): NO = "no gradient" RIGHT = "to right" BOTTOM = "to bottom" BOTTOM_RIGHT = "to bottom right" BOTTOM_LEFT = "to bottom left" LINEAR_GRADIENT_CHOICES = ( (NO, "no gradient"), (RIGHT, "to right"), (BOTTOM, "to bottom"), (BOTTOM_RIGHT, "to bottom right"), (BOTTOM_LEFT, "to bottom left") ) MAIL = "MAIL" POST = "POST" GET = "GET" NEWSLETTER_SUBSCRIBE_METHOD_CHOICES = ( (MAIL, "MAIL"), (POST, "POST"), (GET, "GET") ) name = models.CharField(max_length=50, verbose_name=ugettext_lazy("Name"), db_index=True) text = tinymce_models.HTMLField(blank=True) side_text = tinymce_models.HTMLField(blank=True) target = models.IntegerField(blank=True, null=True) linear_gradient_direction = models.CharField(choices=LINEAR_GRADIENT_CHOICES, max_length=15, default=NO, blank=True) gradient_from = ColorField(blank=True) gradient_to = ColorField(blank=True) bgcolor = ColorField(blank=True) footer_text = tinymce_models.HTMLField(blank=True) footer_links = tinymce_models.HTMLField(blank=True) twitter_description = models.CharField(max_length=200, blank=True) twitter_image = models.CharField(max_length=500, blank=True) has_newsletter = models.BooleanField(default=False) newsletter_subscribe_http_data = models.TextField(blank=True) newsletter_subscribe_http_mailfield = models.CharField(max_length=100, blank=True) newsletter_subscribe_http_url = models.CharField(max_length=1000, blank=True) newsletter_subscribe_mail_subject = models.CharField(max_length=1000, blank=True) newsletter_subscribe_mail_from = models.EmailField(max_length=500, blank=True) newsletter_subscribe_mail_to = models.EmailField(max_length=500, blank=True) newsletter_subscribe_method = models.CharField(choices=NEWSLETTER_SUBSCRIBE_METHOD_CHOICES, max_length=4, default=MAIL) newsletter_subscribe_mail_smtp_host = models.CharField(max_length=100, default='localhost', blank=True) newsletter_subscribe_mail_smtp_port = models.IntegerField(default=25) newsletter_subscribe_mail_smtp_user = models.CharField(max_length=200, blank=True) newsletter_subscribe_mail_smtp_password = models.CharField(max_length=200, blank=True) newsletter_subscribe_mail_smtp_tls = models.BooleanField(default=False) newsletter_subscribe_mail_smtp_starttls = models.BooleanField(default=False) org_twitter_handle = models.CharField(max_length=20, blank=True) newsletter_text = models.CharField(max_length=1000, blank=True) sign_form_footer = models.TextField(blank=True) confirmation_email_sender = models.EmailField(max_length=100, blank=True) confirmation_email_smtp_host = models.CharField(max_length=100, default='localhost', blank=True) confirmation_email_smtp_port = models.IntegerField(default=25, blank=True) confirmation_email_smtp_user = models.CharField(max_length=200, blank=True) confirmation_email_smtp_password = models.CharField(max_length=200, blank=True) confirmation_email_smtp_tls = models.BooleanField(default=False) confirmation_email_smtp_starttls = models.BooleanField(default=False) use_custom_email_settings = models.BooleanField(default=False) def __str__(self): return self.name def __repr__(self): return self.name class Meta: index_together = ["id", ] class SlugModel(models.Model): slug = models.SlugField(max_length=200) class Meta: constraints = [ models.UniqueConstraint(fields=['slug'], name='unique_slugname') ] def __str__(self): return self.slug def __repr__(self): return self.slug class Organization(models.Model): name = models.CharField(max_length=200, verbose_name=ugettext_lazy("Name"), unique=True) petition_templates = models.ManyToManyField(PetitionTemplate, through='TemplateOwnership', through_fields=['organization', 'template'], blank=True, verbose_name=ugettext_lazy("Petition templates")) petitions = models.ManyToManyField(Petition, blank=True, verbose_name=ugettext_lazy("Petitions")) default_template = models.ForeignKey(PetitionTemplate, blank=True, null=True, related_name='+', verbose_name=ugettext_lazy("Default petition template"), to_field='id', on_delete=models.SET_NULL) slugname = models.SlugField(max_length=200, unique=True) def drop(self): with transaction.atomic(): petitions = list(self.petitions.all()) templates = list(self.petition_templates.all()) self.delete() for petition in petitions: petition.delete() for template in templates: template.delete() def add_member(self, member): member.organizations.add(self) permission = Permission.objects.create(organization=self) permission.save() member.permissions.add(permission) member.save() def __str__(self): return self.name def __repr__(self): return self.name def save(self, *args, **kwargs): if not self.slugname: self.slugname = slugify(self.name) super(Organization, self).save(*args, **kwargs) @property def kind(self): return "org" @property def fullname(self): return self.name def save(self, *args, **kwargs): self.slugname = slugify(self.name) super(Organization, self).save(*args, **kwargs) class Permission(models.Model): organization = models.ForeignKey(Organization, on_delete=models.CASCADE, verbose_name=ugettext_lazy("Organization related to these permissions")) can_add_members = models.BooleanField(default=False) can_remove_members = models.BooleanField(default=False) can_create_petitions = models.BooleanField(default=False) can_modify_petitions = models.BooleanField(default=False) can_delete_petitions = models.BooleanField(default=False) can_create_templates = models.BooleanField(default=False) can_modify_templates = models.BooleanField(default=False) can_delete_templates = models.BooleanField(default=False) can_view_signatures = models.BooleanField(default=False) can_modify_signatures = models.BooleanField(default=False) can_delete_signatures = models.BooleanField(default=False) can_modify_permissions = models.BooleanField(default=False) def set_all(self, value): self.can_add_members = value self.can_remove_members = value self.can_create_petitions = value self.can_modify_petitions = value self.can_delete_petitions = value self.can_create_templates = value self.can_modify_templates = value self.can_delete_templates = value self.can_view_signatures = value self.can_modify_signatures = value self.can_delete_signatures = value self.can_modify_permissions = value self.save() def __str__(self): ret = "{orgname} : ".format(orgname=self.organization.name) if self.user.count() > 0: ret = ret + "{username}".format(username=self.user.all()[0].name) else: ret = ret + "None" return ret def __repr__(self): return self.__str__() class PytitionUser(models.Model): petitions = models.ManyToManyField(Petition, blank=True) organizations = models.ManyToManyField(Organization, related_name="members", blank=True) user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="pytitionuser") permissions = models.ManyToManyField(Permission, related_name="user", blank=True) invitations = models.ManyToManyField(Organization, related_name="invited", blank=True) petition_templates = models.ManyToManyField(PetitionTemplate, blank=True, through='TemplateOwnership', through_fields=['user', 'template'], verbose_name=ugettext_lazy("Petition templates")) default_template = models.ForeignKey(PetitionTemplate, blank=True, null=True, related_name='+', verbose_name=ugettext_lazy("Default petition template"), to_field='id', on_delete=models.SET_NULL) def has_right(self, right, petition=None, org=None): if petition: if petition in self.petitions.all(): return True try: if not org: org = Organization.objects.get(petitions=petition, members=self) permissions = self.permissions.get(organization=org) return getattr(permissions, right) except: return False if org: try: permissions = self.permissions.get(organization=org) return getattr(permissions, right) except: return False return False def drop(self): with transaction.atomic(): orgs = list(self.organizations.all()) petitions = list(self.petitions.all()) templates = list(self.petition_templates.all()) self.delete() for org in orgs: if org.members.count() == 0: org.drop() for petition in petitions: petition.delete() for template in templates: template.delete() @property def is_authenticated(self): return self.user.is_authenticated @property def name(self): return self.username @property def username(self): return self.user.username @property def get_full_name(self): return self.user.get_full_name() @property def fullname(self): return self.get_full_name @property def kind(self): return "user" def __str__(self): return self.get_full_name def __repr__(self): return self.get_full_name @receiver(post_save, sender=settings.AUTH_USER_MODEL) def create_user_profile(sender, instance, created, **kwargs): if created: PytitionUser.objects.create(user=instance) @receiver(post_save, sender=settings.AUTH_USER_MODEL) def save_user_profile(sender, instance, **kwargs): instance.pytitionuser.save() @receiver(post_save, sender=Organization) def save_user_profile(sender, instance, **kwargs): if not instance.slugname: slugtext = slugify(instance.name) instance.slugname = slugtext instance.save() @receiver(post_delete, sender=PytitionUser) def post_delete_user(sender, instance, *args, **kwargs): if instance.user: # just in case user is not specified instance.user.delete() class TemplateOwnership(models.Model): user = models.ForeignKey(PytitionUser, blank=True, null=True, on_delete=models.CASCADE) organization = models.ForeignKey(Organization, blank=True, null=True, on_delete=models.CASCADE) template = models.ForeignKey(PetitionTemplate, to_field='id', on_delete=models.CASCADE) def clean(self): if self.user is None and self.organization is None: raise ValidationError(_("The template needs to be owned by a User or an Organization." "It cannot hang around alone by itself.")) #class Meta: # unique_together = (("user", "template"), ("organization", "template"))
[((556, 1, 556, 53), 'django.dispatch.receiver', 'receiver', (), '', False, 'from django.dispatch import receiver\n'), ((562, 1, 562, 53), 'django.dispatch.receiver', 'receiver', (), '', False, 'from django.dispatch import receiver\n'), ((567, 1, 567, 41), 'django.dispatch.receiver', 'receiver', (), '', False, 'from django.dispatch import receiver\n'), ((575, 1, 575, 43), 'django.dispatch.receiver', 'receiver', (), '', False, 'from django.dispatch import receiver\n'), ((49, 11, 49, 47), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((50, 16, 50, 52), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((51, 13, 51, 45), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((52, 32, 52, 120), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((53, 20, 53, 42), 'colorfield.fields.ColorField', 'ColorField', (), '', False, 'from colorfield.fields import ColorField\n'), ((54, 18, 54, 40), 'colorfield.fields.ColorField', 'ColorField', (), '', False, 'from colorfield.fields import ColorField\n'), ((55, 14, 55, 36), 'colorfield.fields.ColorField', 'ColorField', (), '', False, 'from colorfield.fields import ColorField\n'), ((56, 18, 56, 54), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((57, 19, 57, 55), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((58, 26, 58, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((59, 20, 59, 64), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((60, 21, 60, 55), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((61, 37, 61, 65), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((62, 42, 62, 86), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((63, 36, 63, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((64, 40, 64, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((65, 37, 65, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((66, 35, 66, 79), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((67, 34, 68, 64), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((69, 42, 69, 107), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((70, 42, 70, 85), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((71, 42, 71, 86), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((72, 46, 72, 90), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((73, 41, 73, 75), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((74, 46, 74, 80), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((75, 25, 75, 68), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((76, 16, 76, 50), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((77, 22, 77, 67), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((78, 23, 78, 51), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((79, 32, 79, 76), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((80, 35, 80, 100), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((81, 35, 81, 78), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((82, 35, 82, 79), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((83, 39, 83, 83), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((84, 34, 84, 68), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((85, 39, 85, 73), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((86, 32, 86, 66), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((87, 11, 87, 39), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((88, 12, 88, 84), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((241, 15, 241, 68), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((242, 11, 242, 67), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((243, 11, 243, 107), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((244, 19, 244, 115), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((259, 24, 259, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((264, 16, 264, 55), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((318, 11, 318, 47), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((319, 16, 319, 52), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((320, 13, 320, 55), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((321, 32, 321, 120), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((322, 20, 322, 42), 'colorfield.fields.ColorField', 'ColorField', (), '', False, 'from colorfield.fields import ColorField\n'), ((323, 18, 323, 40), 'colorfield.fields.ColorField', 'ColorField', (), '', False, 'from colorfield.fields import ColorField\n'), ((324, 14, 324, 36), 'colorfield.fields.ColorField', 'ColorField', (), '', False, 'from colorfield.fields import ColorField\n'), ((325, 18, 325, 54), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((326, 19, 326, 55), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', (), '', True, 'from tinymce import models as tinymce_models\n'), ((327, 26, 327, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((328, 20, 328, 64), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((329, 21, 329, 55), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((330, 37, 330, 65), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((331, 42, 331, 86), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((332, 36, 332, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((333, 40, 333, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((334, 37, 334, 82), 'django.db.models.EmailField', 'models.EmailField', (), '', False, 'from django.db import models\n'), ((335, 35, 335, 80), 'django.db.models.EmailField', 'models.EmailField', (), '', False, 'from django.db import models\n'), ((336, 34, 337, 64), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((338, 42, 338, 107), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((339, 42, 339, 73), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((340, 42, 340, 86), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((341, 46, 341, 90), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((342, 41, 342, 75), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((343, 46, 343, 80), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((344, 25, 344, 68), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((345, 22, 345, 67), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((346, 23, 346, 51), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((347, 32, 347, 77), 'django.db.models.EmailField', 'models.EmailField', (), '', False, 'from django.db import models\n'), ((348, 35, 348, 100), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((349, 35, 349, 78), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((350, 35, 350, 79), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((351, 39, 351, 83), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((352, 34, 352, 68), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((353, 39, 353, 73), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((354, 32, 354, 66), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((367, 11, 367, 43), 'django.db.models.SlugField', 'models.SlugField', (), '', False, 'from django.db import models\n'), ((390, 15, 390, 60), 'django.db.models.SlugField', 'models.SlugField', (), '', False, 'from django.db import models\n'), ((436, 22, 436, 56), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((437, 25, 437, 59), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((438, 27, 438, 61), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((439, 27, 439, 61), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((440, 27, 440, 61), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((441, 27, 441, 61), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((442, 27, 442, 61), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((443, 27, 443, 61), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((444, 26, 444, 60), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((445, 28, 445, 62), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((446, 28, 446, 62), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((447, 29, 447, 63), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((478, 16, 478, 60), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((479, 20, 479, 92), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((480, 11, 480, 112), 'django.db.models.OneToOneField', 'models.OneToOneField', (), '', False, 'from django.db import models\n'), ((481, 18, 481, 85), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((482, 18, 482, 90), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((581, 11, 581, 91), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((582, 19, 582, 99), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((583, 15, 583, 91), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((429, 24, 429, 42), 'django.utils.text.slugify', 'slugify', ({(429, 32, 429, 41): 'self.name'}, {}), '(self.name)', False, 'from django.utils.text import slugify\n'), ((570, 19, 570, 41), 'django.utils.text.slugify', 'slugify', ({(570, 27, 570, 40): 'instance.name'}, {}), '(instance.name)', False, 'from django.utils.text import slugify\n'), ((48, 42, 48, 64), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(48, 56, 48, 63): '"""Title"""'}, {}), "('Title')", False, 'from django.utils.translation import ugettext_lazy\n'), ((101, 21, 101, 33), 'django.contrib.auth.hashers.get_hasher', 'get_hasher', ({}, {}), '()', False, 'from django.contrib.auth.hashers import get_hasher\n'), ((107, 23, 107, 46), 'django.utils.text.slugify', 'slugify', ({(107, 31, 107, 45): 'self.raw_title'}, {}), '(self.raw_title)', False, 'from django.utils.text import slugify\n'), ((150, 19, 150, 64), 'django.utils.translation.ugettext', '_', ({(150, 21, 150, 63): '"""Thank you for confirming your signature!"""'}, {}), "('Thank you for confirming your signature!')", True, 'from django.utils.translation import ugettext as _\n'), ((155, 13, 155, 33), 'django.db.transaction.atomic', 'transaction.atomic', ({}, {}), '()', False, 'from django.db import transaction\n'), ((156, 23, 156, 40), 'django.utils.text.slugify', 'slugify', ({(156, 31, 156, 39): 'slugtext'}, {}), '(slugtext)', False, 'from django.utils.text import slugify\n'), ((221, 19, 221, 69), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((255, 62, 255, 89), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(255, 76, 255, 88): '"""First name"""'}, {}), "('First name')", False, 'from django.utils.translation import ugettext_lazy\n'), ((256, 61, 256, 87), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(256, 75, 256, 86): '"""Last name"""'}, {}), "('Last name')", False, 'from django.utils.translation import ugettext_lazy\n'), ((257, 69, 257, 98), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(257, 83, 257, 97): '"""Phone number"""'}, {}), "('Phone number')", False, 'from django.utils.translation import ugettext_lazy\n'), ((258, 43, 258, 73), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(258, 57, 258, 72): '"""Email address"""'}, {}), "('Email address')", False, 'from django.utils.translation import ugettext_lazy\n'), ((260, 64, 260, 90), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(260, 78, 260, 89): '"""Confirmed"""'}, {}), "('Confirmed')", False, 'from django.utils.translation import ugettext_lazy\n'), ((261, 82, 261, 107), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(261, 96, 261, 106): '"""Petition"""'}, {}), "('Petition')", False, 'from django.utils.translation import ugettext_lazy\n'), ((262, 80, 262, 123), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(262, 94, 262, 122): '"""Subscribed to mailing list"""'}, {}), "('Subscribed to mailing list')", False, 'from django.utils.translation import ugettext_lazy\n'), ((263, 76, 263, 97), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(263, 90, 263, 96): '"""Date"""'}, {}), "('Date')", False, 'from django.utils.translation import ugettext_lazy\n'), ((317, 56, 317, 77), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(317, 70, 317, 76): '"""Name"""'}, {}), "('Name')", False, 'from django.utils.translation import ugettext_lazy\n'), ((371, 12, 371, 76), 'django.db.models.UniqueConstraint', 'models.UniqueConstraint', (), '', False, 'from django.db import models\n'), ((382, 57, 382, 78), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(382, 71, 382, 77): '"""Name"""'}, {}), "('Name')", False, 'from django.utils.translation import ugettext_lazy\n'), ((385, 61, 385, 96), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(385, 75, 385, 95): '"""Petition templates"""'}, {}), "('Petition templates')", False, 'from django.utils.translation import ugettext_lazy\n'), ((386, 74, 386, 100), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(386, 88, 386, 99): '"""Petitions"""'}, {}), "('Petitions')", False, 'from django.utils.translation import ugettext_lazy\n'), ((388, 54, 388, 96), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(388, 68, 388, 95): '"""Default petition template"""'}, {}), "('Default petition template')", False, 'from django.utils.translation import ugettext_lazy\n'), ((393, 13, 393, 33), 'django.db.transaction.atomic', 'transaction.atomic', ({}, {}), '()', False, 'from django.db import transaction\n'), ((417, 28, 417, 46), 'django.utils.text.slugify', 'slugify', ({(417, 36, 417, 45): 'self.name'}, {}), '(self.name)', False, 'from django.utils.text import slugify\n'), ((435, 50, 435, 108), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(435, 64, 435, 107): '"""Organization related to these permissions"""'}, {}), "('Organization related to these permissions')", False, 'from django.utils.translation import ugettext_lazy\n'), ((485, 61, 485, 96), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(485, 75, 485, 95): '"""Petition templates"""'}, {}), "('Petition templates')", False, 'from django.utils.translation import ugettext_lazy\n'), ((487, 54, 487, 96), 'django.utils.translation.ugettext_lazy', 'ugettext_lazy', ({(487, 68, 487, 95): '"""Default petition template"""'}, {}), "('Default petition template')", False, 'from django.utils.translation import ugettext_lazy\n'), ((512, 13, 512, 33), 'django.db.transaction.atomic', 'transaction.atomic', ({}, {}), '()', False, 'from django.db import transaction\n'), ((200, 39, 200, 75), 'django.utils.html.strip_tags', 'strip_tags', ({(200, 50, 200, 74): 'self.twitter_description'}, {}), '(self.twitter_description)', False, 'from django.utils.html import mark_safe, strip_tags\n'), ((204, 39, 204, 60), 'django.utils.html.strip_tags', 'strip_tags', ({(204, 50, 204, 59): 'self.text'}, {}), '(self.text)', False, 'from django.utils.html import mark_safe, strip_tags\n'), ((226, 23, 228, 57), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((587, 34, 588, 77), 'django.utils.translation.ugettext', '_', ({(587, 36, 588, 76): '"""The template needs to be owned by a User or an Organization.It cannot hang around alone by itself."""'}, {}), "('The template needs to be owned by a User or an Organization.It cannot hang around alone by itself.'\n )", True, 'from django.utils.translation import ugettext as _\n'), ((118, 33, 118, 111), 'django.utils.translation.ugettext', '_', ({(118, 35, 118, 110): '"""This slug is already used by another petition from this organization/user"""'}, {}), "('This slug is already used by another petition from this organization/user')", True, 'from django.utils.translation import ugettext as _\n'), ((120, 34, 120, 51), 'django.utils.text.slugify', 'slugify', ({(120, 42, 120, 50): 'slugtext'}, {}), '(slugtext)', False, 'from django.utils.text import slugify\n'), ((232, 23, 234, 57), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((248, 109, 248, 121), 'django.db.models.Q', 'Q', (), '', False, 'from django.db.models import Q\n'), ((250, 46, 250, 66), 'django.db.models.Q', 'Q', (), '', False, 'from django.db.models import Q\n'), ((269, 38, 269, 74), 'django.utils.translation.ugettext', '_', ({(269, 40, 269, 73): '"""You already signed the petition"""'}, {}), "('You already signed the petition')", True, 'from django.utils.translation import ugettext as _\n'), ((163, 33, 163, 82), 'django.utils.translation.ugettext', '_', ({(163, 35, 163, 81): '"""This petition has no owner, cannot add slug!"""'}, {}), "('This petition has no owner, cannot add slug!')", True, 'from django.utils.translation import ugettext as _\n'), ((208, 39, 208, 61), 'django.utils.html.strip_tags', 'strip_tags', ({(208, 50, 208, 60): 'self.title'}, {}), '(self.title)', False, 'from django.utils.html import mark_safe, strip_tags\n'), ((237, 33, 237, 79), 'django.utils.translation.ugettext', '_', ({(237, 35, 237, 78): '"""This petition is buggy. Sorry about that!"""'}, {}), "('This petition is buggy. Sorry about that!')", True, 'from django.utils.translation import ugettext as _\n')]
JohnShullTopDev/generating-traning-data-for-healthcare-machine-learningcare-
bin/socialhistory.py
d0ffb26e1b99204a796df905b50c8caf01417f69
import csv from testdata import SOCIALHISTORY_FILE from testdata import rndDate from patient import Patient SMOKINGCODES = { '428041000124106': 'Current some day smoker', '266919005' : 'Never smoker', '449868002' : 'Current every day smoker', '266927001' : 'Unknown if ever smoked', '8517006' : 'Former smoker' } class SocialHistory(object): """Create instances of SocialHistory; also maintains socialHistory by patient id""" socialHistories = {} # Dictionary of socialHistory by patient ID @classmethod def load(cls): """Loads patient SocialHistory""" # Loop through socialHistories and build patient socialHistory lists: histories = csv.reader(open(SOCIALHISTORY_FILE, 'U'), dialect='excel-tab') header = next(histories) for history in histories: cls(dict(zip(header, history))) # Create a socialHistory instance def __init__(self, p): self.pid = p['PID'] self.id = p['ID'] self.smokingStatusCode = p['SMOKINGSTATUSCODE'] self.smokingStatusText = SMOKINGCODES[self.smokingStatusCode] # Append socialHistory to the patient's socialHistory list: if self.pid in self.__class__.socialHistories: raise "Found >1 socialHistory for a patient" else: self.__class__.socialHistories[self.pid] = self def toJSON(self, prefix=""): if prefix: prefix += "-" patient = Patient.mpi[self.pid] return { "request": { "method": "PUT", "url": "Observation/" + prefix + "smokingstatus-" + self.id }, "resource": { "id": prefix + "smokingstatus-" + self.id, "resourceType": "Observation", "status": "final", "identifier": [ { "use" : "official", "system": "http://www.bmc.nl/zorgportal/identifiers/observations", "value" : prefix + self.id } ], "text": { "status": "generated", "div": '<div xmlns="http://www.w3.org/1999/xhtml">' + 'Tobacco smoking status: %s</div>'%self.smokingStatusText }, "performer": [ { "reference": "Practitioner/" + prefix + "Practitioner-" + patient.gp } ], "effectiveDateTime": rndDate(2016).isoformat(), "code": { "coding": [ { "system" : "http://loinc.org", "code" : "72166-2", "display": "Tobacco smoking status" } ], "text": "Tobacco smoking status" }, "subject": { "reference": "Patient/" + prefix + self.pid }, "category": [ { "coding": [ { "system" : "http://hl7.org/fhir/observation-category", "code" : "social-history", "display": "Social History" } ], "text": "Social History" } ], "valueCodeableConcept": { "coding": [ { "system" : "http://snomed.info/sct", "code" : self.smokingStatusCode, "display": self.smokingStatusText } ], "text": self.smokingStatusText } } }
[((74, 37, 74, 50), 'testdata.rndDate', 'rndDate', ({(74, 45, 74, 49): '(2016)'}, {}), '(2016)', False, 'from testdata import rndDate\n')]
nirobio/puzzles
Python X/Dictionaries in python.py
fda8c84d8eefd93b40594636fb9b7f0fde02b014
{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "# dictionaries, look-up tables & key-value pairs\n", "# d = {} OR d = dict()\n", "# e.g. d = {\"George\": 24, \"Tom\": 32}\n", "\n", "d = {}\n", "\n" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "d[\"George\"] = 24" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "d[\"Tom\"] = 32\n", "d[\"Jenny\"] = 16" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{'George': 24, 'Tom': 32, 'Jenny': 16}\n" ] } ], "source": [ "print(d)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "ename": "NameError", "evalue": "name 'Jenny' is not defined", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m<ipython-input-5-0bdfff196d23>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mJenny\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mNameError\u001b[0m: name 'Jenny' is not defined" ] } ], "source": [ "print(d[Jenny])" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "32\n" ] } ], "source": [ "print(d[\"Tom\"])" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "d[\"Jenny\"] = 20" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "20\n" ] } ], "source": [ "print(d[\"Jenny\"])" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "# keys are strings or numbers \n", "\n", "d[10] = 100" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "100\n" ] } ], "source": [ "print(d[10])" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "# how to iterate over key-value pairs" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "key:\n", "George\n", "value:\n", "24\n", "\n", "key:\n", "Tom\n", "value:\n", "32\n", "\n", "key:\n", "Jenny\n", "value:\n", "20\n", "\n", "key:\n", "10\n", "value:\n", "100\n", "\n" ] } ], "source": [ " for key, value in d.items():\n", " print(\"key:\")\n", " print(key)\n", " print(\"value:\")\n", " print(value)\n", " print(\"\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.6" } }, "nbformat": 4, "nbformat_minor": 4 }
[]
LiamBindle/spack
lib/spack/spack/test/cache_fetch.py
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import pytest from llnl.util.filesystem import mkdirp, touch import spack.config from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError from spack.stage import Stage @pytest.mark.parametrize('_fetch_method', ['curl', 'urllib']) def test_fetch_missing_cache(tmpdir, _fetch_method): """Ensure raise a missing cache file.""" testpath = str(tmpdir) with spack.config.override('config:url_fetch_method', _fetch_method): fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file') with Stage(fetcher, path=testpath): with pytest.raises(NoCacheError, match=r'No cache'): fetcher.fetch() @pytest.mark.parametrize('_fetch_method', ['curl', 'urllib']) def test_fetch(tmpdir, _fetch_method): """Ensure a fetch after expanding is effectively a no-op.""" testpath = str(tmpdir) cache = os.path.join(testpath, 'cache.tar.gz') touch(cache) url = 'file:///{0}'.format(cache) with spack.config.override('config:url_fetch_method', _fetch_method): fetcher = CacheURLFetchStrategy(url=url) with Stage(fetcher, path=testpath) as stage: source_path = stage.source_path mkdirp(source_path) fetcher.fetch()
[((17, 1, 17, 61), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(17, 25, 17, 40): '"""_fetch_method"""', (17, 42, 17, 60): "['curl', 'urllib']"}, {}), "('_fetch_method', ['curl', 'urllib'])", False, 'import pytest\n'), ((28, 1, 28, 61), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(28, 25, 28, 40): '"""_fetch_method"""', (28, 42, 28, 60): "['curl', 'urllib']"}, {}), "('_fetch_method', ['curl', 'urllib'])", False, 'import pytest\n'), ((32, 12, 32, 50), 'os.path.join', 'os.path.join', ({(32, 25, 32, 33): 'testpath', (32, 35, 32, 49): '"""cache.tar.gz"""'}, {}), "(testpath, 'cache.tar.gz')", False, 'import os\n'), ((33, 4, 33, 16), 'llnl.util.filesystem.touch', 'touch', ({(33, 10, 33, 15): 'cache'}, {}), '(cache)', False, 'from llnl.util.filesystem import mkdirp, touch\n'), ((22, 18, 22, 76), 'spack.fetch_strategy.CacheURLFetchStrategy', 'CacheURLFetchStrategy', (), '', False, 'from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError\n'), ((36, 18, 36, 48), 'spack.fetch_strategy.CacheURLFetchStrategy', 'CacheURLFetchStrategy', (), '', False, 'from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError\n'), ((23, 13, 23, 42), 'spack.stage.Stage', 'Stage', (), '', False, 'from spack.stage import Stage\n'), ((37, 13, 37, 42), 'spack.stage.Stage', 'Stage', (), '', False, 'from spack.stage import Stage\n'), ((39, 12, 39, 31), 'llnl.util.filesystem.mkdirp', 'mkdirp', ({(39, 19, 39, 30): 'source_path'}, {}), '(source_path)', False, 'from llnl.util.filesystem import mkdirp, touch\n'), ((24, 17, 24, 63), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n')]
hanhanwu/Hanhan-Spark-Python
temp_range_sql.py
a04c33100742acffa2ad11d1937ea05c44688427
__author__ = 'hanhanw' import sys from pyspark import SparkConf, SparkContext from pyspark.sql.context import SQLContext from pyspark.sql.types import StructType, StructField, StringType, DoubleType conf = SparkConf().setAppName("temp range sql") sc = SparkContext(conf=conf) sqlContext = SQLContext(sc) assert sc.version >= '1.5.1' inputs1 = sys.argv[1] output = sys.argv[2] def get_range(recordings): recordings.registerTempTable('Recordings') dfrange = sqlContext.sql(""" SELECT r1.DateTime, r1.StationID, (r1.DataValue-r2.DataValue) AS Range FROM (SELECT StationID, DateTime, Observation, DataValue FROM Recordings WHERE Observation='TMAX') r1 JOIN (SELECT StationID, DateTime, Observation, DataValue FROM Recordings WHERE Observation='TMIN') r2 ON (r1.StationID = r2.StationID AND r1.DateTime = r2.DateTime) """) dfrange.registerTempTable('RangeTable') df_maxrange = sqlContext.sql(""" SELECT DateTime, MAX(Range) AS MaxRange FROM RangeTable GROUP BY DateTime """) df_maxrange.registerTempTable('MaxRange') df_result = sqlContext.sql(""" SELECT t1.DateTime as DateTime, t1.StationID as StationID, t2.MaxRange as MaxRange FROM RangeTable t1 JOIN MaxRange t2 ON (t1.DateTime = t2.DateTime AND t1.Range = t2.MaxRange) """) return df_result def main(): temp_schema = StructType([ StructField('StationID', StringType(), False), StructField('DateTime', StringType(), False), StructField('Observation', StringType(), False), StructField('DataValue', DoubleType(), False), StructField('MFlag', StringType(), True), StructField('QFlag', StringType(), True), StructField('SFlag', StringType(), True), StructField('OBSTime', StringType(), True), ]) df = sqlContext.read.format('com.databricks.spark.csv').options(header='false').load(inputs1, schema=temp_schema) df = df.filter(df.QFlag == '') dfrange = get_range(df) result = dfrange.rdd.map(lambda r: str(r.DateTime)+' '+str(r.StationID)+' '+str(r.MaxRange)) outdata = result.sortBy(lambda r: r[0]).coalesce(1) outdata.saveAsTextFile(output) if __name__ == "__main__": main()
[((10, 5, 10, 28), 'pyspark.SparkContext', 'SparkContext', (), '', False, 'from pyspark import SparkConf, SparkContext\n'), ((11, 13, 11, 27), 'pyspark.sql.context.SQLContext', 'SQLContext', ({(11, 24, 11, 26): 'sc'}, {}), '(sc)', False, 'from pyspark.sql.context import SQLContext\n'), ((9, 7, 9, 18), 'pyspark.SparkConf', 'SparkConf', ({}, {}), '()', False, 'from pyspark import SparkConf, SparkContext\n'), ((49, 29, 49, 41), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((50, 28, 50, 40), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((51, 31, 51, 43), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((52, 29, 52, 41), 'pyspark.sql.types.DoubleType', 'DoubleType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((53, 25, 53, 37), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((54, 25, 54, 37), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((55, 25, 55, 37), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n'), ((56, 27, 56, 39), 'pyspark.sql.types.StringType', 'StringType', ({}, {}), '()', False, 'from pyspark.sql.types import StructType, StructField, StringType, DoubleType\n')]
Pompino/react-components-23KB
container/pyf/graphqltypes/Event.py
3201a417c5160e1b77f29fc1eac74ae9dc10d6ad
from typing_extensions import Required #from sqlalchemy.sql.sqltypes import Boolean from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int from models.EventsRelated.EventModel import EventModel from graphqltypes.Utils import extractSession class EventType(ObjectType): id = ID() name = String() lastchange = DateTime() externalId = String() users = List('graphqltypes.User.UserType') def resolve_users(parent, info): session = extractSession(info) dbRecord = session.query(EventModel).get(parent.id) return dbRecord.users groups = List('graphqltypes.Group.GroupType') def resolve_users(parent, info): session = extractSession(info) dbRecord = session.query(EventModel).get(parent.id) return dbRecord.groups rooms = List('graphqltypes.Room.RoomType') def resolve_rooms(parent, info): session = extractSession(info) dbRecord = session.query(EventModel).get(parent.id) return dbRecord.rooms
[((10, 9, 10, 13), 'graphene.ID', 'ID', ({}, {}), '()', False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((11, 11, 11, 19), 'graphene.String', 'String', ({}, {}), '()', False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((13, 17, 13, 27), 'graphene.DateTime', 'DateTime', ({}, {}), '()', False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((14, 17, 14, 25), 'graphene.String', 'String', ({}, {}), '()', False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((16, 12, 16, 46), 'graphene.List', 'List', ({(16, 17, 16, 45): '"""graphqltypes.User.UserType"""'}, {}), "('graphqltypes.User.UserType')", False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((22, 13, 22, 49), 'graphene.List', 'List', ({(22, 18, 22, 48): '"""graphqltypes.Group.GroupType"""'}, {}), "('graphqltypes.Group.GroupType')", False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((28, 12, 28, 46), 'graphene.List', 'List', ({(28, 17, 28, 45): '"""graphqltypes.Room.RoomType"""'}, {}), "('graphqltypes.Room.RoomType')", False, 'from graphene import ObjectType, String, Field, ID, List, DateTime, Mutation, Boolean, Int\n'), ((18, 18, 18, 38), 'graphqltypes.Utils.extractSession', 'extractSession', ({(18, 33, 18, 37): 'info'}, {}), '(info)', False, 'from graphqltypes.Utils import extractSession\n'), ((24, 18, 24, 38), 'graphqltypes.Utils.extractSession', 'extractSession', ({(24, 33, 24, 37): 'info'}, {}), '(info)', False, 'from graphqltypes.Utils import extractSession\n'), ((30, 18, 30, 38), 'graphqltypes.Utils.extractSession', 'extractSession', ({(30, 33, 30, 37): 'info'}, {}), '(info)', False, 'from graphqltypes.Utils import extractSession\n')]
kokosing/hue
desktop/core/ext-py/openpyxl-2.3.0-b2/openpyxl/drawing/shape.py
2307f5379a35aae9be871e836432e6f45138b3d9
from __future__ import absolute_import # Copyright (c) 2010-2015 openpyxl from openpyxl.styles.colors import Color, BLACK, WHITE from openpyxl.utils.units import ( pixels_to_EMU, EMU_to_pixels, short_color, ) from openpyxl.compat import deprecated from openpyxl.xml.functions import Element, SubElement, tostring from openpyxl.xml.constants import ( DRAWING_NS, SHEET_DRAWING_NS, CHART_NS, CHART_DRAWING_NS, PKG_REL_NS ) from openpyxl.compat.strings import safe_string class Shape(object): """ a drawing inside a chart coordiantes are specified by the user in the axis units """ MARGIN_LEFT = 6 + 13 + 1 MARGIN_BOTTOM = 17 + 11 FONT_WIDTH = 7 FONT_HEIGHT = 8 ROUND_RECT = 'roundRect' RECT = 'rect' # other shapes to define : ''' "line" "lineInv" "triangle" "rtTriangle" "diamond" "parallelogram" "trapezoid" "nonIsoscelesTrapezoid" "pentagon" "hexagon" "heptagon" "octagon" "decagon" "dodecagon" "star4" "star5" "star6" "star7" "star8" "star10" "star12" "star16" "star24" "star32" "roundRect" "round1Rect" "round2SameRect" "round2DiagRect" "snipRoundRect" "snip1Rect" "snip2SameRect" "snip2DiagRect" "plaque" "ellipse" "teardrop" "homePlate" "chevron" "pieWedge" "pie" "blockArc" "donut" "noSmoking" "rightArrow" "leftArrow" "upArrow" "downArrow" "stripedRightArrow" "notchedRightArrow" "bentUpArrow" "leftRightArrow" "upDownArrow" "leftUpArrow" "leftRightUpArrow" "quadArrow" "leftArrowCallout" "rightArrowCallout" "upArrowCallout" "downArrowCallout" "leftRightArrowCallout" "upDownArrowCallout" "quadArrowCallout" "bentArrow" "uturnArrow" "circularArrow" "leftCircularArrow" "leftRightCircularArrow" "curvedRightArrow" "curvedLeftArrow" "curvedUpArrow" "curvedDownArrow" "swooshArrow" "cube" "can" "lightningBolt" "heart" "sun" "moon" "smileyFace" "irregularSeal1" "irregularSeal2" "foldedCorner" "bevel" "frame" "halfFrame" "corner" "diagStripe" "chord" "arc" "leftBracket" "rightBracket" "leftBrace" "rightBrace" "bracketPair" "bracePair" "straightConnector1" "bentConnector2" "bentConnector3" "bentConnector4" "bentConnector5" "curvedConnector2" "curvedConnector3" "curvedConnector4" "curvedConnector5" "callout1" "callout2" "callout3" "accentCallout1" "accentCallout2" "accentCallout3" "borderCallout1" "borderCallout2" "borderCallout3" "accentBorderCallout1" "accentBorderCallout2" "accentBorderCallout3" "wedgeRectCallout" "wedgeRoundRectCallout" "wedgeEllipseCallout" "cloudCallout" "cloud" "ribbon" "ribbon2" "ellipseRibbon" "ellipseRibbon2" "leftRightRibbon" "verticalScroll" "horizontalScroll" "wave" "doubleWave" "plus" "flowChartProcess" "flowChartDecision" "flowChartInputOutput" "flowChartPredefinedProcess" "flowChartInternalStorage" "flowChartDocument" "flowChartMultidocument" "flowChartTerminator" "flowChartPreparation" "flowChartManualInput" "flowChartManualOperation" "flowChartConnector" "flowChartPunchedCard" "flowChartPunchedTape" "flowChartSummingJunction" "flowChartOr" "flowChartCollate" "flowChartSort" "flowChartExtract" "flowChartMerge" "flowChartOfflineStorage" "flowChartOnlineStorage" "flowChartMagneticTape" "flowChartMagneticDisk" "flowChartMagneticDrum" "flowChartDisplay" "flowChartDelay" "flowChartAlternateProcess" "flowChartOffpageConnector" "actionButtonBlank" "actionButtonHome" "actionButtonHelp" "actionButtonInformation" "actionButtonForwardNext" "actionButtonBackPrevious" "actionButtonEnd" "actionButtonBeginning" "actionButtonReturn" "actionButtonDocument" "actionButtonSound" "actionButtonMovie" "gear6" "gear9" "funnel" "mathPlus" "mathMinus" "mathMultiply" "mathDivide" "mathEqual" "mathNotEqual" "cornerTabs" "squareTabs" "plaqueTabs" "chartX" "chartStar" "chartPlus" ''' @deprecated("Chart Drawings need a complete rewrite") def __init__(self, chart, coordinates=((0, 0), (1, 1)), text=None, scheme="accent1"): self.chart = chart self.coordinates = coordinates # in axis units self.text = text self.scheme = scheme self.style = Shape.RECT self.border_width = 0 self.border_color = BLACK # "F3B3C5" self.color = WHITE self.text_color = BLACK @property def border_color(self): return self._border_color @border_color.setter def border_color(self, color): self._border_color = short_color(color) @property def color(self): return self._color @color.setter def color(self, color): self._color = short_color(color) @property def text_color(self): return self._text_color @text_color.setter def text_color(self, color): self._text_color = short_color(color) @property def border_width(self): return self._border_width @border_width.setter def border_width(self, w): self._border_width = w @property def coordinates(self): """Return coordindates in axis units""" return self._coordinates @coordinates.setter def coordinates(self, coords): """ set shape coordinates in percentages (left, top, right, bottom) """ # this needs refactoring to reflect changes in charts self.axis_coordinates = coords (x1, y1), (x2, y2) = coords # bottom left, top right drawing_width = pixels_to_EMU(self.chart.drawing.width) drawing_height = pixels_to_EMU(self.chart.drawing.height) plot_width = drawing_width * self.chart.width plot_height = drawing_height * self.chart.height margin_left = self.chart._get_margin_left() * drawing_width xunit = plot_width / self.chart.get_x_units() margin_top = self.chart._get_margin_top() * drawing_height yunit = self.chart.get_y_units() x_start = (margin_left + (float(x1) * xunit)) / drawing_width y_start = ((margin_top + plot_height - (float(y1) * yunit)) / drawing_height) x_end = (margin_left + (float(x2) * xunit)) / drawing_width y_end = ((margin_top + plot_height - (float(y2) * yunit)) / drawing_height) # allow user to specify y's in whatever order # excel expect y_end to be lower if y_end < y_start: y_end, y_start = y_start, y_end self._coordinates = ( self._norm_pct(x_start), self._norm_pct(y_start), self._norm_pct(x_end), self._norm_pct(y_end) ) @staticmethod def _norm_pct(pct): """ force shapes to appear by truncating too large sizes """ if pct > 1: return 1 elif pct < 0: return 0 return pct class ShapeWriter(object): """ one file per shape """ def __init__(self, shapes): self._shapes = shapes def write(self, shape_id): root = Element('{%s}userShapes' % CHART_NS) for shape in self._shapes: anchor = SubElement(root, '{%s}relSizeAnchor' % CHART_DRAWING_NS) xstart, ystart, xend, yend = shape.coordinates _from = SubElement(anchor, '{%s}from' % CHART_DRAWING_NS) SubElement(_from, '{%s}x' % CHART_DRAWING_NS).text = str(xstart) SubElement(_from, '{%s}y' % CHART_DRAWING_NS).text = str(ystart) _to = SubElement(anchor, '{%s}to' % CHART_DRAWING_NS) SubElement(_to, '{%s}x' % CHART_DRAWING_NS).text = str(xend) SubElement(_to, '{%s}y' % CHART_DRAWING_NS).text = str(yend) sp = SubElement(anchor, '{%s}sp' % CHART_DRAWING_NS, {'macro':'', 'textlink':''}) nvspr = SubElement(sp, '{%s}nvSpPr' % CHART_DRAWING_NS) SubElement(nvspr, '{%s}cNvPr' % CHART_DRAWING_NS, {'id':str(shape_id), 'name':'shape %s' % shape_id}) SubElement(nvspr, '{%s}cNvSpPr' % CHART_DRAWING_NS) sppr = SubElement(sp, '{%s}spPr' % CHART_DRAWING_NS) frm = SubElement(sppr, '{%s}xfrm' % DRAWING_NS,) # no transformation SubElement(frm, '{%s}off' % DRAWING_NS, {'x':'0', 'y':'0'}) SubElement(frm, '{%s}ext' % DRAWING_NS, {'cx':'0', 'cy':'0'}) prstgeom = SubElement(sppr, '{%s}prstGeom' % DRAWING_NS, {'prst':str(shape.style)}) SubElement(prstgeom, '{%s}avLst' % DRAWING_NS) fill = SubElement(sppr, '{%s}solidFill' % DRAWING_NS, ) SubElement(fill, '{%s}srgbClr' % DRAWING_NS, {'val':shape.color}) border = SubElement(sppr, '{%s}ln' % DRAWING_NS, {'w':str(shape._border_width)}) sf = SubElement(border, '{%s}solidFill' % DRAWING_NS) SubElement(sf, '{%s}srgbClr' % DRAWING_NS, {'val':shape.border_color}) self._write_style(sp) self._write_text(sp, shape) shape_id += 1 return tostring(root) def _write_text(self, node, shape): """ write text in the shape """ tx_body = SubElement(node, '{%s}txBody' % CHART_DRAWING_NS) SubElement(tx_body, '{%s}bodyPr' % DRAWING_NS, {'vertOverflow':'clip'}) SubElement(tx_body, '{%s}lstStyle' % DRAWING_NS) p = SubElement(tx_body, '{%s}p' % DRAWING_NS) if shape.text: r = SubElement(p, '{%s}r' % DRAWING_NS) rpr = SubElement(r, '{%s}rPr' % DRAWING_NS, {'lang':'en-US'}) fill = SubElement(rpr, '{%s}solidFill' % DRAWING_NS) SubElement(fill, '{%s}srgbClr' % DRAWING_NS, {'val':shape.text_color}) SubElement(r, '{%s}t' % DRAWING_NS).text = shape.text else: SubElement(p, '{%s}endParaRPr' % DRAWING_NS, {'lang':'en-US'}) def _write_style(self, node): """ write style theme """ style = SubElement(node, '{%s}style' % CHART_DRAWING_NS) ln_ref = SubElement(style, '{%s}lnRef' % DRAWING_NS, {'idx':'2'}) scheme_clr = SubElement(ln_ref, '{%s}schemeClr' % DRAWING_NS, {'val':'accent1'}) SubElement(scheme_clr, '{%s}shade' % DRAWING_NS, {'val':'50000'}) fill_ref = SubElement(style, '{%s}fillRef' % DRAWING_NS, {'idx':'1'}) SubElement(fill_ref, '{%s}schemeClr' % DRAWING_NS, {'val':'accent1'}) effect_ref = SubElement(style, '{%s}effectRef' % DRAWING_NS, {'idx':'0'}) SubElement(effect_ref, '{%s}schemeClr' % DRAWING_NS, {'val':'accent1'}) font_ref = SubElement(style, '{%s}fontRef' % DRAWING_NS, {'idx':'minor'}) SubElement(font_ref, '{%s}schemeClr' % DRAWING_NS, {'val':'lt1'})
[((228, 5, 228, 57), 'openpyxl.compat.deprecated', 'deprecated', ({(228, 16, 228, 56): '"""Chart Drawings need a complete rewrite"""'}, {}), "('Chart Drawings need a complete rewrite')", False, 'from openpyxl.compat import deprecated\n'), ((250, 29, 250, 47), 'openpyxl.utils.units.short_color', 'short_color', ({(250, 41, 250, 46): 'color'}, {}), '(color)', False, 'from openpyxl.utils.units import pixels_to_EMU, EMU_to_pixels, short_color\n'), ((258, 22, 258, 40), 'openpyxl.utils.units.short_color', 'short_color', ({(258, 34, 258, 39): 'color'}, {}), '(color)', False, 'from openpyxl.utils.units import pixels_to_EMU, EMU_to_pixels, short_color\n'), ((266, 27, 266, 45), 'openpyxl.utils.units.short_color', 'short_color', ({(266, 39, 266, 44): 'color'}, {}), '(color)', False, 'from openpyxl.utils.units import pixels_to_EMU, EMU_to_pixels, short_color\n'), ((288, 24, 288, 63), 'openpyxl.utils.units.pixels_to_EMU', 'pixels_to_EMU', ({(288, 38, 288, 62): 'self.chart.drawing.width'}, {}), '(self.chart.drawing.width)', False, 'from openpyxl.utils.units import pixels_to_EMU, EMU_to_pixels, short_color\n'), ((289, 25, 289, 65), 'openpyxl.utils.units.pixels_to_EMU', 'pixels_to_EMU', ({(289, 39, 289, 64): 'self.chart.drawing.height'}, {}), '(self.chart.drawing.height)', False, 'from openpyxl.utils.units import pixels_to_EMU, EMU_to_pixels, short_color\n'), ((340, 15, 340, 51), 'openpyxl.xml.functions.Element', 'Element', ({(340, 23, 340, 50): "'{%s}userShapes' % CHART_NS"}, {}), "('{%s}userShapes' % CHART_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((381, 15, 381, 29), 'openpyxl.xml.functions.tostring', 'tostring', ({(381, 24, 381, 28): 'root'}, {}), '(root)', False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((386, 18, 386, 67), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(386, 29, 386, 33): 'node', (386, 35, 386, 66): "'{%s}txBody' % CHART_DRAWING_NS"}, {}), "(node, '{%s}txBody' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((387, 8, 387, 79), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(387, 19, 387, 26): 'tx_body', (387, 28, 387, 53): "('{%s}bodyPr' % DRAWING_NS)", (387, 55, 387, 78): "{'vertOverflow': 'clip'}"}, {}), "(tx_body, '{%s}bodyPr' % DRAWING_NS, {'vertOverflow': 'clip'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((388, 8, 388, 56), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(388, 19, 388, 26): 'tx_body', (388, 28, 388, 55): "('{%s}lstStyle' % DRAWING_NS)"}, {}), "(tx_body, '{%s}lstStyle' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((389, 12, 389, 53), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(389, 23, 389, 30): 'tx_body', (389, 32, 389, 52): "'{%s}p' % DRAWING_NS"}, {}), "(tx_body, '{%s}p' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((403, 16, 403, 64), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(403, 27, 403, 31): 'node', (403, 33, 403, 63): "'{%s}style' % CHART_DRAWING_NS"}, {}), "(node, '{%s}style' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((405, 17, 405, 73), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(405, 28, 405, 33): 'style', (405, 35, 405, 59): "'{%s}lnRef' % DRAWING_NS", (405, 61, 405, 72): "{'idx': '2'}"}, {}), "(style, '{%s}lnRef' % DRAWING_NS, {'idx': '2'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((406, 21, 406, 88), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(406, 32, 406, 38): 'ln_ref', (406, 40, 406, 68): "'{%s}schemeClr' % DRAWING_NS", (406, 70, 406, 87): "{'val': 'accent1'}"}, {}), "(ln_ref, '{%s}schemeClr' % DRAWING_NS, {'val': 'accent1'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((407, 8, 407, 73), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(407, 19, 407, 29): 'scheme_clr', (407, 31, 407, 55): "('{%s}shade' % DRAWING_NS)", (407, 57, 407, 72): "{'val': '50000'}"}, {}), "(scheme_clr, '{%s}shade' % DRAWING_NS, {'val': '50000'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((409, 19, 409, 77), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(409, 30, 409, 35): 'style', (409, 37, 409, 63): "'{%s}fillRef' % DRAWING_NS", (409, 65, 409, 76): "{'idx': '1'}"}, {}), "(style, '{%s}fillRef' % DRAWING_NS, {'idx': '1'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((410, 8, 410, 77), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(410, 19, 410, 27): 'fill_ref', (410, 29, 410, 57): "('{%s}schemeClr' % DRAWING_NS)", (410, 59, 410, 76): "{'val': 'accent1'}"}, {}), "(fill_ref, '{%s}schemeClr' % DRAWING_NS, {'val': 'accent1'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((412, 21, 412, 81), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(412, 32, 412, 37): 'style', (412, 39, 412, 67): "'{%s}effectRef' % DRAWING_NS", (412, 69, 412, 80): "{'idx': '0'}"}, {}), "(style, '{%s}effectRef' % DRAWING_NS, {'idx': '0'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((413, 8, 413, 79), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(413, 19, 413, 29): 'effect_ref', (413, 31, 413, 59): "('{%s}schemeClr' % DRAWING_NS)", (413, 61, 413, 78): "{'val': 'accent1'}"}, {}), "(effect_ref, '{%s}schemeClr' % DRAWING_NS, {'val': 'accent1'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((415, 19, 415, 81), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(415, 30, 415, 35): 'style', (415, 37, 415, 63): "'{%s}fontRef' % DRAWING_NS", (415, 65, 415, 80): "{'idx': 'minor'}"}, {}), "(style, '{%s}fontRef' % DRAWING_NS, {'idx': 'minor'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((416, 8, 416, 73), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(416, 19, 416, 27): 'font_ref', (416, 29, 416, 57): "('{%s}schemeClr' % DRAWING_NS)", (416, 59, 416, 72): "{'val': 'lt1'}"}, {}), "(font_ref, '{%s}schemeClr' % DRAWING_NS, {'val': 'lt1'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((343, 21, 343, 77), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(343, 32, 343, 36): 'root', (343, 38, 343, 76): "'{%s}relSizeAnchor' % CHART_DRAWING_NS"}, {}), "(root, '{%s}relSizeAnchor' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((347, 20, 347, 69), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(347, 31, 347, 37): 'anchor', (347, 39, 347, 68): "'{%s}from' % CHART_DRAWING_NS"}, {}), "(anchor, '{%s}from' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((351, 18, 351, 65), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(351, 29, 351, 35): 'anchor', (351, 37, 351, 64): "'{%s}to' % CHART_DRAWING_NS"}, {}), "(anchor, '{%s}to' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((355, 17, 355, 93), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(355, 28, 355, 34): 'anchor', (355, 36, 355, 63): "'{%s}sp' % CHART_DRAWING_NS", (355, 65, 355, 92): "{'macro': '', 'textlink': ''}"}, {}), "(anchor, '{%s}sp' % CHART_DRAWING_NS, {'macro': '', 'textlink': ''})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((356, 20, 356, 67), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(356, 31, 356, 33): 'sp', (356, 35, 356, 66): "'{%s}nvSpPr' % CHART_DRAWING_NS"}, {}), "(sp, '{%s}nvSpPr' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((358, 12, 358, 63), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(358, 23, 358, 28): 'nvspr', (358, 30, 358, 62): "('{%s}cNvSpPr' % CHART_DRAWING_NS)"}, {}), "(nvspr, '{%s}cNvSpPr' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((360, 19, 360, 64), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(360, 30, 360, 32): 'sp', (360, 34, 360, 63): "'{%s}spPr' % CHART_DRAWING_NS"}, {}), "(sp, '{%s}spPr' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((361, 18, 361, 60), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(361, 29, 361, 33): 'sppr', (361, 35, 361, 58): "'{%s}xfrm' % DRAWING_NS"}, {}), "(sppr, '{%s}xfrm' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((363, 12, 363, 71), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(363, 23, 363, 26): 'frm', (363, 28, 363, 50): "('{%s}off' % DRAWING_NS)", (363, 52, 363, 70): "{'x': '0', 'y': '0'}"}, {}), "(frm, '{%s}off' % DRAWING_NS, {'x': '0', 'y': '0'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((364, 12, 364, 73), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(364, 23, 364, 26): 'frm', (364, 28, 364, 50): "('{%s}ext' % DRAWING_NS)", (364, 52, 364, 72): "{'cx': '0', 'cy': '0'}"}, {}), "(frm, '{%s}ext' % DRAWING_NS, {'cx': '0', 'cy': '0'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((367, 12, 367, 58), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(367, 23, 367, 31): 'prstgeom', (367, 33, 367, 57): "('{%s}avLst' % DRAWING_NS)"}, {}), "(prstgeom, '{%s}avLst' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((369, 19, 369, 67), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(369, 30, 369, 34): 'sppr', (369, 36, 369, 64): "'{%s}solidFill' % DRAWING_NS"}, {}), "(sppr, '{%s}solidFill' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((370, 12, 370, 77), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(370, 23, 370, 27): 'fill', (370, 29, 370, 55): "('{%s}srgbClr' % DRAWING_NS)", (370, 57, 370, 76): "{'val': shape.color}"}, {}), "(fill, '{%s}srgbClr' % DRAWING_NS, {'val': shape.color})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((373, 17, 373, 65), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(373, 28, 373, 34): 'border', (373, 36, 373, 64): "'{%s}solidFill' % DRAWING_NS"}, {}), "(border, '{%s}solidFill' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((374, 12, 374, 82), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(374, 23, 374, 25): 'sf', (374, 27, 374, 53): "('{%s}srgbClr' % DRAWING_NS)", (374, 55, 374, 81): "{'val': shape.border_color}"}, {}), "(sf, '{%s}srgbClr' % DRAWING_NS, {'val': shape.border_color})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((391, 16, 391, 51), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(391, 27, 391, 28): 'p', (391, 30, 391, 50): "'{%s}r' % DRAWING_NS"}, {}), "(p, '{%s}r' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((392, 18, 392, 73), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(392, 29, 392, 30): 'r', (392, 32, 392, 54): "'{%s}rPr' % DRAWING_NS", (392, 56, 392, 72): "{'lang': 'en-US'}"}, {}), "(r, '{%s}rPr' % DRAWING_NS, {'lang': 'en-US'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((393, 19, 393, 64), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(393, 30, 393, 33): 'rpr', (393, 35, 393, 63): "'{%s}solidFill' % DRAWING_NS"}, {}), "(rpr, '{%s}solidFill' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((394, 12, 394, 82), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(394, 23, 394, 27): 'fill', (394, 29, 394, 55): "('{%s}srgbClr' % DRAWING_NS)", (394, 57, 394, 81): "{'val': shape.text_color}"}, {}), "(fill, '{%s}srgbClr' % DRAWING_NS, {'val': shape.text_color})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((398, 12, 398, 74), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(398, 23, 398, 24): 'p', (398, 26, 398, 55): "('{%s}endParaRPr' % DRAWING_NS)", (398, 57, 398, 73): "{'lang': 'en-US'}"}, {}), "(p, '{%s}endParaRPr' % DRAWING_NS, {'lang': 'en-US'})", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((348, 12, 348, 57), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(348, 23, 348, 28): '_from', (348, 30, 348, 56): "('{%s}x' % CHART_DRAWING_NS)"}, {}), "(_from, '{%s}x' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((349, 12, 349, 57), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(349, 23, 349, 28): '_from', (349, 30, 349, 56): "('{%s}y' % CHART_DRAWING_NS)"}, {}), "(_from, '{%s}y' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((352, 12, 352, 55), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(352, 23, 352, 26): '_to', (352, 28, 352, 54): "('{%s}x' % CHART_DRAWING_NS)"}, {}), "(_to, '{%s}x' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((353, 12, 353, 55), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(353, 23, 353, 26): '_to', (353, 28, 353, 54): "('{%s}y' % CHART_DRAWING_NS)"}, {}), "(_to, '{%s}y' % CHART_DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n'), ((396, 12, 396, 47), 'openpyxl.xml.functions.SubElement', 'SubElement', ({(396, 23, 396, 24): 'r', (396, 26, 396, 46): "('{%s}t' % DRAWING_NS)"}, {}), "(r, '{%s}t' % DRAWING_NS)", False, 'from openpyxl.xml.functions import Element, SubElement, tostring\n')]
elowy01/igsr_analysis
scripts/VCF/FILTER/subset_vcf.py
ffea4885227c2299f886a4f41e70b6e1f6bb43da
from VcfQC import VcfQC from ReseqTrackDB import File from ReseqTrackDB import ReseqTrackDB import argparse import os import logging import datetime #get command line arguments parser = argparse.ArgumentParser(description='Script to subset a VCF by excluding the variants within the regions defined by a BED file') ''' Reseqtrack DB connection parameters ''' parser.add_argument('--hostname', type=str, required=True, help='Hostname for ReseqTrack DB' ) parser.add_argument('--username', type=str, required=True, help='User for ReseqTrack DB' ) parser.add_argument('--port', type=int, required=True, help='Port number in the ReseqTrack DB' ) parser.add_argument('--pwd', type=str, help='PWD for the ReseqTrack DB' ) parser.add_argument('--db', type=str, required=True, help='DB name in the ReseqTrack DB' ) parser.add_argument('--type', type=str, required=True, help='Type of the new VCF file' ) parser.add_argument('--vcftools_folder', type=str, required=True, help='Folder containing the VCFtools binary' ) parser.add_argument('--bgzip_folder', type=str, required=True, help='Folder containing the bgzip binary') parser.add_argument('--filename', type=str, required=True, help='Name (without the fullpath) of the VCF file that will be analysed. It assumes that the filename format is for example lc_bams.gatk.xxxx.vcf.gz, where lc_bams is the analysis group and gatk is the method used' ) parser.add_argument('--bed', type=str, required=True, help='BED file containing the coordinates to exclude' ) parser.add_argument('--outsuffix', type=str, required=True, help='Suffix for vcf output file. i.e. no_cms or no_offtarget' ) parser.add_argument('--outdir', type=str, required=True, help='Directory used to put the output files.' ) args = parser.parse_args() if __name__ == '__main__': if os.path.isdir(args.outdir) == False: raise Exception("Output dir does not exist: %s"%args.outdir) hostname=args.hostname username=args.username db=args.db port=args.port pwd=args.pwd reseqdb = ReseqTrackDB(host=hostname,user=username,port=port,pwd=pwd,db=db) file=reseqdb.fetch_file_by_filename(args.filename) #constructing the out filename now = datetime.datetime.now().strftime('%Y%m%d') bits= os.path.basename(file.name).split('.') outprefix=bits[0]+"."+bits[1]+"."+args.outsuffix+"."+now log_filename="subset_vcf_%s.log"% outprefix logger = logging.getLogger("subset_vcf") logger.setLevel(logging.INFO) # create the logging file handler fh = logging.FileHandler(log_filename) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) # add handler to logger object logger.addHandler(fh) logger.info("Program started") vcfQC = VcfQC(vcf=file.path,bgzip_folder=args.bgzip_folder,vcftools_folder=args.vcftools_folder) vcffile=vcfQC.subset_vcf(bed=args.bed,outprefix=outprefix,outdir=args.outdir,create_index=True) f=File(path=vcffile,type=args.type,host_id=1,withdrawn=0) f.store(reseqdb,do_md5=True) logger.info("Done!.")
[((13, 9, 13, 137), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((44, 14, 44, 79), 'ReseqTrackDB.ReseqTrackDB', 'ReseqTrackDB', (), '', False, 'from ReseqTrackDB import ReseqTrackDB\n'), ((55, 13, 55, 44), 'logging.getLogger', 'logging.getLogger', ({(55, 31, 55, 43): '"""subset_vcf"""'}, {}), "('subset_vcf')", False, 'import logging\n'), ((59, 9, 59, 42), 'logging.FileHandler', 'logging.FileHandler', ({(59, 29, 59, 41): 'log_filename'}, {}), '(log_filename)', False, 'import logging\n'), ((61, 16, 61, 89), 'logging.Formatter', 'logging.Formatter', ({(61, 34, 61, 88): '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'}, {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')", False, 'import logging\n'), ((69, 12, 69, 100), 'VcfQC.VcfQC', 'VcfQC', (), '', False, 'from VcfQC import VcfQC\n'), ((72, 6, 72, 61), 'ReseqTrackDB.File', 'File', (), '', False, 'from ReseqTrackDB import File\n'), ((35, 7, 35, 33), 'os.path.isdir', 'os.path.isdir', ({(35, 21, 35, 32): 'args.outdir'}, {}), '(args.outdir)', False, 'import os\n'), ((49, 10, 49, 33), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((50, 10, 50, 37), 'os.path.basename', 'os.path.basename', ({(50, 27, 50, 36): 'file.name'}, {}), '(file.name)', False, 'import os\n')]
Acidburn0zzz/helloworld
controllers/restart.py
9d88357658c55dadf9d4c6f923b63e8cb6207f75
import os from base import BaseHandler class RestartHandler(BaseHandler): def get(self): if not self.authenticate(superuser=True): return os.system('touch ' + self.application.settings["restart_path"]) self.redirect(self.get_argument("next"))
[((10, 4, 10, 67), 'os.system', 'os.system', ({(10, 14, 10, 66): "('touch ' + self.application.settings['restart_path'])"}, {}), "('touch ' + self.application.settings['restart_path'])", False, 'import os\n')]
badock/nova-tidb
nova/tests/unit/conductor/tasks/test_migrate.py
4c4591f2cd887fdc22828e12f0c297c051bbd912
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from nova.compute import rpcapi as compute_rpcapi from nova.conductor.tasks import migrate from nova import objects from nova.scheduler import client as scheduler_client from nova.scheduler import utils as scheduler_utils from nova import test from nova.tests.unit.conductor.test_conductor import FakeContext from nova.tests.unit import fake_flavor from nova.tests.unit import fake_instance class MigrationTaskTestCase(test.NoDBTestCase): def setUp(self): super(MigrationTaskTestCase, self).setUp() self.user_id = 'fake' self.project_id = 'fake' self.context = FakeContext(self.user_id, self.project_id) self.flavor = fake_flavor.fake_flavor_obj(self.context) self.flavor.extra_specs = {'extra_specs': 'fake'} inst = fake_instance.fake_db_instance(image_ref='image_ref', instance_type=self.flavor) inst_object = objects.Instance( flavor=self.flavor, numa_topology=None, pci_requests=None, system_metadata={'image_hw_disk_bus': 'scsi'}) self.instance = objects.Instance._from_db_object( self.context, inst_object, inst, []) self.request_spec = objects.RequestSpec(image=objects.ImageMeta()) self.hosts = [dict(host='host1', nodename=None, limits={})] self.filter_properties = {'limits': {}, 'retry': {'num_attempts': 1, 'hosts': [['host1', None]]}} self.reservations = [] self.clean_shutdown = True def _generate_task(self): return migrate.MigrationTask(self.context, self.instance, self.flavor, self.request_spec, self.reservations, self.clean_shutdown, compute_rpcapi.ComputeAPI(), scheduler_client.SchedulerClient()) @mock.patch.object(objects.RequestSpec, 'from_components') @mock.patch.object(scheduler_utils, 'setup_instance_group') @mock.patch.object(scheduler_client.SchedulerClient, 'select_destinations') @mock.patch.object(compute_rpcapi.ComputeAPI, 'prep_resize') @mock.patch.object(objects.Quotas, 'from_reservations') def test_execute(self, quotas_mock, prep_resize_mock, sel_dest_mock, sig_mock, request_spec_from_components): sel_dest_mock.return_value = self.hosts task = self._generate_task() request_spec_from_components.return_value = self.request_spec legacy_request_spec = self.request_spec.to_legacy_request_spec_dict() task.execute() quotas_mock.assert_called_once_with(self.context, self.reservations, instance=self.instance) sig_mock.assert_called_once_with(self.context, legacy_request_spec, self.filter_properties) task.scheduler_client.select_destinations.assert_called_once_with( self.context, self.request_spec) prep_resize_mock.assert_called_once_with( self.context, self.instance, legacy_request_spec['image'], self.flavor, self.hosts[0]['host'], self.reservations, request_spec=legacy_request_spec, filter_properties=self.filter_properties, node=self.hosts[0]['nodename'], clean_shutdown=self.clean_shutdown) self.assertFalse(quotas_mock.return_value.rollback.called) def test_rollback(self): task = self._generate_task() task.quotas = mock.MagicMock() task.rollback() task.quotas.rollback.assert_called_once_with()
[((57, 5, 57, 62), 'mock.patch.object', 'mock.patch.object', ({(57, 23, 57, 42): 'objects.RequestSpec', (57, 44, 57, 61): '"""from_components"""'}, {}), "(objects.RequestSpec, 'from_components')", False, 'import mock\n'), ((58, 5, 58, 63), 'mock.patch.object', 'mock.patch.object', ({(58, 23, 58, 38): 'scheduler_utils', (58, 40, 58, 62): '"""setup_instance_group"""'}, {}), "(scheduler_utils, 'setup_instance_group')", False, 'import mock\n'), ((59, 5, 59, 79), 'mock.patch.object', 'mock.patch.object', ({(59, 23, 59, 55): 'scheduler_client.SchedulerClient', (59, 57, 59, 78): '"""select_destinations"""'}, {}), "(scheduler_client.SchedulerClient, 'select_destinations')", False, 'import mock\n'), ((60, 5, 60, 64), 'mock.patch.object', 'mock.patch.object', ({(60, 23, 60, 48): 'compute_rpcapi.ComputeAPI', (60, 50, 60, 63): '"""prep_resize"""'}, {}), "(compute_rpcapi.ComputeAPI, 'prep_resize')", False, 'import mock\n'), ((61, 5, 61, 59), 'mock.patch.object', 'mock.patch.object', ({(61, 23, 61, 37): 'objects.Quotas', (61, 39, 61, 58): '"""from_reservations"""'}, {}), "(objects.Quotas, 'from_reservations')", False, 'import mock\n'), ((31, 23, 31, 65), 'nova.tests.unit.conductor.test_conductor.FakeContext', 'FakeContext', ({(31, 35, 31, 47): 'self.user_id', (31, 49, 31, 64): 'self.project_id'}, {}), '(self.user_id, self.project_id)', False, 'from nova.tests.unit.conductor.test_conductor import FakeContext\n'), ((32, 22, 32, 63), 'nova.tests.unit.fake_flavor.fake_flavor_obj', 'fake_flavor.fake_flavor_obj', ({(32, 50, 32, 62): 'self.context'}, {}), '(self.context)', False, 'from nova.tests.unit import fake_flavor\n'), ((34, 15, 35, 72), 'nova.tests.unit.fake_instance.fake_db_instance', 'fake_instance.fake_db_instance', (), '', False, 'from nova.tests.unit import fake_instance\n'), ((36, 22, 40, 58), 'nova.objects.Instance', 'objects.Instance', (), '', False, 'from nova import objects\n'), ((41, 24, 42, 48), 'nova.objects.Instance._from_db_object', 'objects.Instance._from_db_object', ({(42, 12, 42, 24): 'self.context', (42, 26, 42, 37): 'inst_object', (42, 39, 42, 43): 'inst', (42, 45, 42, 47): '[]'}, {}), '(self.context, inst_object, inst, [])', False, 'from nova import objects\n'), ((87, 22, 87, 38), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((54, 37, 54, 64), 'nova.compute.rpcapi.ComputeAPI', 'compute_rpcapi.ComputeAPI', ({}, {}), '()', True, 'from nova.compute import rpcapi as compute_rpcapi\n'), ((55, 37, 55, 71), 'nova.scheduler.client.SchedulerClient', 'scheduler_client.SchedulerClient', ({}, {}), '()', True, 'from nova.scheduler import client as scheduler_client\n'), ((43, 54, 43, 73), 'nova.objects.ImageMeta', 'objects.ImageMeta', ({}, {}), '()', False, 'from nova import objects\n')]
maxmac12/BlackHatPython
CH7_GitCmdAndCtrl/modules/environment.py
60044c65ffc2f1216cbf92c2ec850a4e2e9ca5bf
import os def run(**kwargs): print("[*] In environment module.") return str(os.environ)
[]
rywjhzd/Cataloging-and-Visualizing-Cradles-of-Planet-Formation
diskcatalog/core/views.py
6d59ea9d9a07630721e19c554651bae2775962ac
from django.shortcuts import render from .models import Disk import os def index(request): context = {} disk_list = Disk.objects.all() context['disk_list'] = disk_list return render(request, 'index.html', context) #def index(request): # module_dir = os.path.dirname(__file__) # file_path = os.path.join(module_dir, 'data.txt') # disk_list = open(file_path , 'r') # data = data_file.read() # context = {'disk_list': data} # return render(request, 'index.html', context)
[((10, 11, 10, 49), 'django.shortcuts.render', 'render', ({(10, 18, 10, 25): 'request', (10, 27, 10, 39): '"""index.html"""', (10, 41, 10, 48): 'context'}, {}), "(request, 'index.html', context)", False, 'from django.shortcuts import render\n')]
guswynn/materialize
misc/python/materialize/checks/insert_select.py
f433173ed71f511d91311769ec58c2d427dd6c3b
# Copyright Materialize, Inc. and contributors. All rights reserved. # # Use of this software is governed by the Business Source License # included in the LICENSE file at the root of this repository. # # As of the Change Date specified in that file, in accordance with # the Business Source License, use of this software will be governed # by the Apache License, Version 2.0. from textwrap import dedent from typing import List from materialize.checks.actions import Testdrive from materialize.checks.checks import Check class InsertSelect(Check): def initialize(self) -> Testdrive: return Testdrive( dedent( """ > CREATE TABLE insert_select_destination (f1 STRING); > CREATE TABLE insert_select_source_table (f1 STRING); > INSERT INTO insert_select_source_table SELECT 'T1' || generate_series FROM generate_series(1,10000); """ ) ) def manipulate(self) -> List[Testdrive]: return [ Testdrive(dedent(s)) for s in [ """ > INSERT INTO insert_select_source_table SELECT 'T2' || generate_series FROM generate_series(1, 10000); > INSERT INTO insert_select_destination SELECT * FROM insert_select_source_table; """, """ > INSERT INTO insert_select_source_table SELECT 'T3' || generate_series FROM generate_series(1, 10000); > INSERT INTO insert_select_destination SELECT * FROM insert_select_source_table; """, ] ] def validate(self) -> Testdrive: return Testdrive( dedent( """ > SELECT LEFT(f1, 2), COUNT(*), COUNT(DISTINCT f1) FROM insert_select_destination GROUP BY LEFT(f1, 2); T1 20000 10000 T2 20000 10000 T3 10000 10000 """ ) )
[((19, 12, 26, 13), 'textwrap.dedent', 'dedent', ({(20, 16, 25, 15): '"""\n > CREATE TABLE insert_select_destination (f1 STRING);\n\n > CREATE TABLE insert_select_source_table (f1 STRING);\n > INSERT INTO insert_select_source_table SELECT \'T1\' || generate_series FROM generate_series(1,10000);\n """'}, {}), '(\n """\n > CREATE TABLE insert_select_destination (f1 STRING);\n\n > CREATE TABLE insert_select_source_table (f1 STRING);\n > INSERT INTO insert_select_source_table SELECT \'T1\' || generate_series FROM generate_series(1,10000);\n """\n )', False, 'from textwrap import dedent\n'), ((48, 12, 55, 13), 'textwrap.dedent', 'dedent', ({(49, 16, 54, 14): '"""\n > SELECT LEFT(f1, 2), COUNT(*), COUNT(DISTINCT f1) FROM insert_select_destination GROUP BY LEFT(f1, 2);\n T1 20000 10000\n T2 20000 10000\n T3 10000 10000\n """'}, {}), '(\n """\n > SELECT LEFT(f1, 2), COUNT(*), COUNT(DISTINCT f1) FROM insert_select_destination GROUP BY LEFT(f1, 2);\n T1 20000 10000\n T2 20000 10000\n T3 10000 10000\n """\n )', False, 'from textwrap import dedent\n'), ((31, 22, 31, 31), 'textwrap.dedent', 'dedent', ({(31, 29, 31, 30): 's'}, {}), '(s)', False, 'from textwrap import dedent\n')]