repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
sweetsbeats/starter-snake-python | brute/brute_build.py | e7cb56a3a623a324f4b5ef956020990e8c61f871 | from cffi import FFI
ffibuilder = FFI()
ffibuilder.cdef("""
int test(int t);
""")
ffibuilder.set_source("_pi_cffi",
"""
#include "brute.h"
""",
sources=['brute.c'])
if __name__ == "__main__":
ffibuilder.compile(verbose = True)
| [((3, 13, 3, 18), 'cffi.FFI', 'FFI', ({}, {}), '()', False, 'from cffi import FFI\n')] |
JNotelddim/python-snake | src/board.py | da95339d3a982040a84422e5f7b95453095a4450 | """Board Module"""
import copy
from typing import Tuple, List
from src.coordinate import Coordinate
from src.snake import Snake
class Board:
"""Track the cooardinates for all snakes and food in the game."""
def __init__(self, data):
self._data = data
self._snakes = None
self._foods = None
@property
def snakes(self) -> List[Snake]:
"""Retreive the list of snakes from the board data."""
if self._snakes is None:
snakes = [Snake(snake_data) for snake_data in self._data['snakes']]
self._snakes = snakes
return self._snakes
@property
def foods(self) -> List[Coordinate]:
"""Retreive the list of food from the board data."""
if self._foods is None:
self._foods = [Coordinate(food_data) for food_data in self._data['food']]
return self._foods
@property
def width(self) -> int:
"""Get width of the board -- note: it's a square."""
return self._data['width']
def is_coordinate_in_bounds(self, coordinate) -> bool:
"""Check whether or not the Coordinate is within the bounds of the Board."""
is_wall = (coordinate.x == -1 or coordinate.x == self.width
or coordinate.y == -1 or coordinate.y == self.width)
return not is_wall
def get_other_snakes(self, exclude_id) -> List[Snake]:
"""Get the List of Snakes whose IDs don't match the given ID."""
return [snake for snake in self.snakes if snake.id != exclude_id]
def advance_snake_along_path(self, snake_id: str, path: List[Coordinate]):
"""Return a new board with our snake advanced along given path."""
new_board = copy.deepcopy(self)
return new_board.__help_advance_snake_along_path(snake_id, path)
def __help_advance_snake_along_path(self, snake_id: str, path: List[Coordinate]):
"""Do the actual advancement of the snake along the path."""
me = next((snake for snake in self.snakes if snake.id == snake_id), None)
if not me:
raise ValueError("No snake for given id!")
me.coordinates += path
me.coordinates = me.coordinates[len(path):]
me.coordinates.reverse()
me.coordinates.append(me.coordinates[-1])
print("new coords:")
for coord in me.coordinates:
print(coord)
return self
| [((46, 20, 46, 39), 'copy.deepcopy', 'copy.deepcopy', ({(46, 34, 46, 38): 'self'}, {}), '(self)', False, 'import copy\n'), ((18, 22, 18, 39), 'src.snake.Snake', 'Snake', ({(18, 28, 18, 38): 'snake_data'}, {}), '(snake_data)', False, 'from src.snake import Snake\n'), ((26, 27, 26, 48), 'src.coordinate.Coordinate', 'Coordinate', ({(26, 38, 26, 47): 'food_data'}, {}), '(food_data)', False, 'from src.coordinate import Coordinate\n')] |
CLARIN-PL/personalized-nlp | personalized_nlp/datasets/wiki/base.py | 340294300f93d12cabc59b055ff2548df8f4081a | import os
import zipfile
from typing import List
import pandas as pd
import urllib
from personalized_nlp.settings import STORAGE_DIR
from personalized_nlp.utils.data_splitting import split_texts
from personalized_nlp.datasets.datamodule_base import BaseDataModule
class WikiDataModule(BaseDataModule):
def __init__(
self,
split_sizes: List[float] = [0.55, 0.15, 0.15, 0.15],
**kwargs,
):
super().__init__(**kwargs)
self.data_dir = STORAGE_DIR / 'wiki_data'
self.annotation_column = ''
self.word_stats_annotation_column = ''
self.embeddings_path = ''
self.train_split_names = ['present', 'past']
self.val_split_names = ['future1']
self.test_split_names = ['future2']
self.split_sizes = split_sizes
os.makedirs(self.data_dir / 'embeddings', exist_ok=True)
@property
def class_dims(self):
return [2]
@property
def texts_clean(self):
texts = self.data.text.to_list()
texts = [c.replace('NEWLINE_TOKEN', ' ') for c in texts]
return texts
def _remap_column_names(self, df):
mapping = {'rev_id': 'text_id',
'worker_id': 'annotator_id', 'comment': 'text'}
df.columns = [mapping.get(col, col) for col in df.columns]
return df
def prepare_data(self) -> None:
self.data = pd.read_csv(
self.data_dir / (self.annotation_column + '_annotated_comments.tsv'), sep='\t')
self.data = self._remap_column_names(self.data)
self.data['text'] = self.data['text'].str.replace(
'NEWLINE_TOKEN', ' ')
self.annotators = pd.read_csv(
self.data_dir / (self.annotation_column + '_worker_demographics.tsv'), sep='\t')
self.annotators = self._remap_column_names(self.annotators)
self.annotations = pd.read_csv(
self.data_dir / (self.annotation_column + '_annotations.tsv'), sep='\t')
self.annotations = self._remap_column_names(self.annotations)
self._assign_splits()
personal_df = self.annotations_with_data.loc[self.annotations_with_data.split == 'past']
self.compute_annotator_biases(personal_df)
def _assign_splits(self):
self.data = split_texts(self.data, self.split_sizes) | [((33, 8, 33, 64), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((53, 20, 54, 91), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((59, 26, 60, 92), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((63, 27, 64, 84), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((73, 20, 73, 60), 'personalized_nlp.utils.data_splitting.split_texts', 'split_texts', ({(73, 32, 73, 41): 'self.data', (73, 43, 73, 59): 'self.split_sizes'}, {}), '(self.data, self.split_sizes)', False, 'from personalized_nlp.utils.data_splitting import split_texts\n')] |
dlanghorne0428/StudioMusicPlayer | App/migrations/0010_remove_user_percentage_preferences_user_preferences.py | 54dabab896b96d90b68d6435edfd52fe6a866bc2 | # Generated by Django 4.0 on 2022-03-03 02:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('App', '0009_alter_song_holiday_alter_songfileinput_holiday'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='percentage_preferences',
),
migrations.AddField(
model_name='user',
name='preferences',
field=models.JSONField(null=True),
),
]
| [((13, 8, 16, 9), 'django.db.migrations.RemoveField', 'migrations.RemoveField', (), '', False, 'from django.db import migrations, models\n'), ((20, 18, 20, 45), 'django.db.models.JSONField', 'models.JSONField', (), '', False, 'from django.db import migrations, models\n')] |
Rudeus3Greyrat/admin-management | venv/Lib/site-packages/captcha/conf/settings.py | 7e81d2b1908afa3ea57a82c542c9aebb1d0ffd23 | import os
import warnings
from django.conf import settings
CAPTCHA_FONT_PATH = getattr(settings, 'CAPTCHA_FONT_PATH', os.path.normpath(os.path.join(os.path.dirname(__file__), '..', 'fonts/Vera.ttf')))
CAPTCHA_FONT_SIZE = getattr(settings, 'CAPTCHA_FONT_SIZE', 22)
CAPTCHA_LETTER_ROTATION = getattr(settings, 'CAPTCHA_LETTER_ROTATION', (-35, 35))
CAPTCHA_BACKGROUND_COLOR = getattr(settings, 'CAPTCHA_BACKGROUND_COLOR', '#ffffff')
CAPTCHA_FOREGROUND_COLOR = getattr(settings, 'CAPTCHA_FOREGROUND_COLOR', '#001100')
CAPTCHA_CHALLENGE_FUNCT = getattr(settings, 'CAPTCHA_CHALLENGE_FUNCT', 'captcha.helpers.random_char_challenge')
CAPTCHA_NOISE_FUNCTIONS = getattr(settings, 'CAPTCHA_NOISE_FUNCTIONS', ('captcha.helpers.noise_arcs', 'captcha.helpers.noise_dots',))
CAPTCHA_FILTER_FUNCTIONS = getattr(settings, 'CAPTCHA_FILTER_FUNCTIONS', ('captcha.helpers.post_smooth',))
CAPTCHA_WORDS_DICTIONARY = getattr(settings, 'CAPTCHA_WORDS_DICTIONARY', '/usr/share/dict/words')
CAPTCHA_PUNCTUATION = getattr(settings, 'CAPTCHA_PUNCTUATION', '''_"',.;:-''')
CAPTCHA_FLITE_PATH = getattr(settings, 'CAPTCHA_FLITE_PATH', None)
CAPTCHA_SOX_PATH = getattr(settings, 'CAPTCHA_SOX_PATH', None)
CAPTCHA_TIMEOUT = getattr(settings, 'CAPTCHA_TIMEOUT', 5) # Minutes
CAPTCHA_LENGTH = int(getattr(settings, 'CAPTCHA_LENGTH', 4)) # Chars
# CAPTCHA_IMAGE_BEFORE_FIELD = getattr(settings, 'CAPTCHA_IMAGE_BEFORE_FIELD', True)
CAPTCHA_DICTIONARY_MIN_LENGTH = getattr(settings, 'CAPTCHA_DICTIONARY_MIN_LENGTH', 0)
CAPTCHA_DICTIONARY_MAX_LENGTH = getattr(settings, 'CAPTCHA_DICTIONARY_MAX_LENGTH', 99)
CAPTCHA_IMAGE_SIZE = getattr(settings, 'CAPTCHA_IMAGE_SIZE', None)
CAPTCHA_IMAGE_TEMPLATE = getattr(settings, 'CAPTCHA_IMAGE_TEMPLATE', 'captcha/image.html')
CAPTCHA_HIDDEN_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_HIDDEN_FIELD_TEMPLATE', 'captcha/hidden_field.html')
CAPTCHA_TEXT_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_TEXT_FIELD_TEMPLATE', 'captcha/text_field.html')
if getattr(settings, 'CAPTCHA_FIELD_TEMPLATE', None):
msg = ("CAPTCHA_FIELD_TEMPLATE setting is deprecated in favor of widget's template_name.")
warnings.warn(msg, DeprecationWarning)
CAPTCHA_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_FIELD_TEMPLATE', None)
if getattr(settings, 'CAPTCHA_OUTPUT_FORMAT', None):
msg = ("CAPTCHA_OUTPUT_FORMAT setting is deprecated in favor of widget's template_name.")
warnings.warn(msg, DeprecationWarning)
CAPTCHA_OUTPUT_FORMAT = getattr(settings, 'CAPTCHA_OUTPUT_FORMAT', None)
CAPTCHA_MATH_CHALLENGE_OPERATOR = getattr(settings, 'CAPTCHA_MATH_CHALLENGE_OPERATOR', '*')
CAPTCHA_GET_FROM_POOL = getattr(settings, 'CAPTCHA_GET_FROM_POOL', False)
CAPTCHA_GET_FROM_POOL_TIMEOUT = getattr(settings, 'CAPTCHA_GET_FROM_POOL_TIMEOUT', 5)
CAPTCHA_TEST_MODE = getattr(settings, 'CAPTCHA_TEST_MODE', False)
# Failsafe
if CAPTCHA_DICTIONARY_MIN_LENGTH > CAPTCHA_DICTIONARY_MAX_LENGTH:
CAPTCHA_DICTIONARY_MIN_LENGTH, CAPTCHA_DICTIONARY_MAX_LENGTH = CAPTCHA_DICTIONARY_MAX_LENGTH, CAPTCHA_DICTIONARY_MIN_LENGTH
def _callable_from_string(string_or_callable):
if callable(string_or_callable):
return string_or_callable
else:
return getattr(__import__('.'.join(string_or_callable.split('.')[:-1]), {}, {}, ['']), string_or_callable.split('.')[-1])
def get_challenge(generator=None):
return _callable_from_string(generator or CAPTCHA_CHALLENGE_FUNCT)
def noise_functions():
if CAPTCHA_NOISE_FUNCTIONS:
return map(_callable_from_string, CAPTCHA_NOISE_FUNCTIONS)
return []
def filter_functions():
if CAPTCHA_FILTER_FUNCTIONS:
return map(_callable_from_string, CAPTCHA_FILTER_FUNCTIONS)
return []
| [((30, 4, 30, 42), 'warnings.warn', 'warnings.warn', ({(30, 18, 30, 21): 'msg', (30, 23, 30, 41): 'DeprecationWarning'}, {}), '(msg, DeprecationWarning)', False, 'import warnings\n'), ((34, 4, 34, 42), 'warnings.warn', 'warnings.warn', ({(34, 18, 34, 21): 'msg', (34, 23, 34, 41): 'DeprecationWarning'}, {}), '(msg, DeprecationWarning)', False, 'import warnings\n'), ((6, 89, 6, 114), 'os.path.dirname', 'os.path.dirname', ({(6, 105, 6, 113): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
joevandyk/pilbox | pilbox/test/app_test.py | b84732a78e5bdb2d24bf7ef4177d45806ac03ea6 | from __future__ import absolute_import, division, print_function, \
with_statement
import logging
import os.path
import time
import tornado.escape
import tornado.gen
import tornado.ioloop
from tornado.test.util import unittest
from tornado.testing import AsyncHTTPTestCase, gen_test
import tornado.web
from pilbox.app import PilboxApplication
from pilbox.errors import SignatureError, ClientError, HostError, \
BackgroundError, DimensionsError, FilterError, FormatError, ModeError, \
PositionError, QualityError, UrlError, ImageFormatError, FetchError
from pilbox.signature import sign
from pilbox.test import image_test
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
try:
import cv
except ImportError:
cv = None
logger = logging.getLogger("tornado.application")
class _AppAsyncMixin(object):
def fetch_error(self, code, *args, **kwargs):
response = self.fetch(*args, **kwargs)
self.assertEqual(response.code, code)
self.assertEqual(response.headers.get("Content-Type", None),
"application/json")
return tornado.escape.json_decode(response.body)
def fetch_success(self, *args, **kwargs):
response = self.fetch(*args, **kwargs)
self.assertEqual(response.code, 200)
return response
def get_image_resize_cases(self):
cases = image_test.get_image_resize_cases()
m = dict(background="bg", filter="filter", format="fmt",
position="pos", quality="q")
for i, case in enumerate(cases):
path = "/test/data/%s" % os.path.basename(case["source_path"])
cases[i]["source_query_params"] = dict(
url=self.get_url(path),
w=case["width"] or "",
h=case["height"] or "",
mode=case["mode"])
for k in m.keys():
if k in case:
cases[i]["source_query_params"][m.get(k)] = case[k]
if case.get("format") in ["jpeg", "jpg"]:
cases[i]["content_type"] = "image/jpeg"
elif case.get("format") == "png":
cases[i]["content_type"] = "image/png"
elif case.get("format") == "webp":
cases[i]["content_type"] = "image/webp"
else:
cases[i]["content_type"] = None
return cases
class _PilboxTestApplication(PilboxApplication):
def get_handlers(self):
path = os.path.join(os.path.dirname(__file__), "data")
handlers = [(r"/test/data/test-delayed.jpg", _DelayedHandler),
(r"/test/data/(.*)",
tornado.web.StaticFileHandler,
{"path": path})]
handlers.extend(super(_PilboxTestApplication, self).get_handlers())
return handlers
class _DelayedHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
@tornado.gen.engine
def get(self):
delay = time.time() + float(self.get_argument("delay", 0.0))
yield tornado.gen.Task(
tornado.ioloop.IOLoop.instance().add_timeout, delay)
self.finish()
class AppTest(AsyncHTTPTestCase, _AppAsyncMixin):
def get_app(self):
return _PilboxTestApplication()
def test_missing_url(self):
qs = urlencode(dict(w=1, h=1))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), UrlError.get_code())
def test_missing_dimensions(self):
qs = urlencode(dict(url="http://foo.co/x.jpg"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), DimensionsError.get_code())
def test_invalid_width(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w="a", h=1))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), DimensionsError.get_code())
def test_invalid_height(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h="a"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), DimensionsError.get_code())
def test_invalid_mode(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1, mode="foo"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), ModeError.get_code())
def test_invalid_hexadecimal_background(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1,
mode="fill", bg="r"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), BackgroundError.get_code())
def test_invalid_long_background(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1,
mode="fill", bg="0f0f0f0f0"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), BackgroundError.get_code())
def test_invalid_position(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1, pos="foo"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), PositionError.get_code())
def test_invalid_filter(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1, filter="bar"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), FilterError.get_code())
def test_invalid_format(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1, fmt="foo"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), FormatError.get_code())
def test_invalid_integer_quality(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1, q="a"))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), QualityError.get_code())
def test_outofbounds_quality(self):
qs = urlencode(dict(url="http://foo.co/x.jpg", w=1, h=1, q=200))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), QualityError.get_code())
def test_unsupported_image_format(self):
path = "/test/data/test-bad-format.gif"
qs = urlencode(dict(url=self.get_url(path), w=1, h=1))
resp = self.fetch_error(415, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), ImageFormatError.get_code())
def test_not_found(self):
path = "/test/data/test-not-found.jpg"
qs = urlencode(dict(url=self.get_url(path), w=1, h=1))
resp = self.fetch_error(404, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), FetchError.get_code())
def test_not_connect(self):
qs = urlencode(dict(url="http://a.com/a.jpg", w=1, h=1))
resp = self.fetch_error(404, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), FetchError.get_code())
def test_invalid_protocol(self):
path = os.path.join(os.path.dirname(__file__), "data", "test1.jpg")
qs = urlencode(dict(url="file://%s" % path, w=1, h=1))
resp = self.fetch_error(400, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), UrlError.get_code())
def test_valid(self):
cases = self.get_image_resize_cases()
for case in cases:
if case.get("mode") == "crop" and case.get("position") == "face":
continue
self._assert_expected_resize(case)
@unittest.skipIf(cv is None, "OpenCV is not installed")
def test_valid_face(self):
cases = self.get_image_resize_cases()
for case in cases:
if case.get("mode") == "crop" and case.get("position") == "face":
self._assert_expected_resize(case)
def _assert_expected_resize(self, case):
qs = urlencode(case["source_query_params"])
resp = self.fetch_success("/?%s" % qs)
msg = "/?%s does not match %s" \
% (qs, case["expected_path"])
if case["content_type"]:
self.assertEqual(resp.headers.get("Content-Type", None),
case["content_type"])
with open(case["expected_path"], "rb") as expected:
self.assertEqual(resp.buffer.read(), expected.read(), msg)
class AppRestrictedTest(AsyncHTTPTestCase, _AppAsyncMixin):
KEY = "abcdef"
NAME = "abc"
def get_app(self):
return _PilboxTestApplication(
client_name=self.NAME,
client_key=self.KEY,
allowed_hosts=["foo.co", "bar.io", "localhost"])
def test_missing_client_name(self):
params = dict(url="http://foo.co/x.jpg", w=1, h=1)
qs = sign(self.KEY, urlencode(params))
resp = self.fetch_error(403, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), ClientError.get_code())
def test_bad_client_name(self):
params = dict(url="http://foo.co/x.jpg", w=1, h=1, client="123")
qs = sign(self.KEY, urlencode(params))
resp = self.fetch_error(403, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), ClientError.get_code())
def test_missing_signature(self):
params = dict(url="http://foo.co/x.jpg", w=1, h=1, client=self.NAME)
qs = urlencode(params)
resp = self.fetch_error(403, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), SignatureError.get_code())
def test_bad_signature(self):
params = dict(url="http://foo.co/x.jpg", w=1, h=1,
client=self.NAME, sig="abc123")
qs = urlencode(params)
resp = self.fetch_error(403, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), SignatureError.get_code())
def test_bad_host(self):
params = dict(url="http://bar.co/x.jpg", w=1, h=1, client=self.NAME)
qs = sign(self.KEY, urlencode(params))
resp = self.fetch_error(403, "/?%s" % qs)
self.assertEqual(resp.get("error_code"), HostError.get_code())
def test_valid(self):
cases = self.get_image_resize_cases()
for case in cases:
if case.get("mode") == "crop" and case.get("position") == "face":
continue
params = case["source_query_params"]
params["client"] = self.NAME
qs = sign(self.KEY, urlencode(params))
resp = self.fetch_success("/?%s" % qs)
msg = "/?%s does not match %s" \
% (qs, case["expected_path"])
with open(case["expected_path"], "rb") as expected:
self.assertEqual(resp.buffer.read(), expected.read(), msg)
class AppSlowTest(AsyncHTTPTestCase, _AppAsyncMixin):
def get_app(self):
return _PilboxTestApplication(timeout=0.5)
def test_timeout(self):
url = self.get_url("/test/data/test-delayed.jpg?delay=1.0")
qs = urlencode(dict(url=url, w=1, h=1))
resp = self.fetch_error(404, "/?%s" %qs)
self.assertEqual(resp.get("error_code"), FetchError.get_code())
| [((33, 9, 33, 49), 'logging.getLogger', 'logging.getLogger', ({(33, 27, 33, 48): '"""tornado.application"""'}, {}), "('tornado.application')", False, 'import logging\n'), ((192, 5, 192, 59), 'tornado.test.util.unittest.skipIf', 'unittest.skipIf', ({(192, 21, 192, 31): '(cv is None)', (192, 33, 192, 58): '"""OpenCV is not installed"""'}, {}), "(cv is None, 'OpenCV is not installed')", False, 'from tornado.test.util import unittest\n'), ((50, 16, 50, 51), 'pilbox.test.image_test.get_image_resize_cases', 'image_test.get_image_resize_cases', ({}, {}), '()', False, 'from pilbox.test import image_test\n'), ((200, 13, 200, 51), 'urllib.parse.urlencode', 'urlencode', ({(200, 23, 200, 50): "case['source_query_params']"}, {}), "(case['source_query_params'])", False, 'from urllib.parse import urlencode\n'), ((235, 13, 235, 30), 'urllib.parse.urlencode', 'urlencode', ({(235, 23, 235, 29): 'params'}, {}), '(params)', False, 'from urllib.parse import urlencode\n'), ((242, 13, 242, 30), 'urllib.parse.urlencode', 'urlencode', ({(242, 23, 242, 29): 'params'}, {}), '(params)', False, 'from urllib.parse import urlencode\n'), ((90, 16, 90, 27), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((103, 49, 103, 68), 'pilbox.errors.UrlError.get_code', 'UrlError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((108, 49, 108, 75), 'pilbox.errors.DimensionsError.get_code', 'DimensionsError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((113, 49, 113, 75), 'pilbox.errors.DimensionsError.get_code', 'DimensionsError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((118, 49, 118, 75), 'pilbox.errors.DimensionsError.get_code', 'DimensionsError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((123, 49, 123, 69), 'pilbox.errors.ModeError.get_code', 'ModeError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((129, 49, 129, 75), 'pilbox.errors.BackgroundError.get_code', 'BackgroundError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((135, 49, 135, 75), 'pilbox.errors.BackgroundError.get_code', 'BackgroundError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((140, 49, 140, 73), 'pilbox.errors.PositionError.get_code', 'PositionError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((145, 49, 145, 71), 'pilbox.errors.FilterError.get_code', 'FilterError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((150, 49, 150, 71), 'pilbox.errors.FormatError.get_code', 'FormatError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((155, 49, 155, 72), 'pilbox.errors.QualityError.get_code', 'QualityError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((160, 49, 160, 72), 'pilbox.errors.QualityError.get_code', 'QualityError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((166, 49, 166, 76), 'pilbox.errors.ImageFormatError.get_code', 'ImageFormatError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((172, 49, 172, 70), 'pilbox.errors.FetchError.get_code', 'FetchError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((177, 49, 177, 70), 'pilbox.errors.FetchError.get_code', 'FetchError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((183, 49, 183, 68), 'pilbox.errors.UrlError.get_code', 'UrlError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((223, 28, 223, 45), 'urllib.parse.urlencode', 'urlencode', ({(223, 38, 223, 44): 'params'}, {}), '(params)', False, 'from urllib.parse import urlencode\n'), ((225, 49, 225, 71), 'pilbox.errors.ClientError.get_code', 'ClientError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((229, 28, 229, 45), 'urllib.parse.urlencode', 'urlencode', ({(229, 38, 229, 44): 'params'}, {}), '(params)', False, 'from urllib.parse import urlencode\n'), ((231, 49, 231, 71), 'pilbox.errors.ClientError.get_code', 'ClientError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((237, 49, 237, 74), 'pilbox.errors.SignatureError.get_code', 'SignatureError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((244, 49, 244, 74), 'pilbox.errors.SignatureError.get_code', 'SignatureError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((248, 28, 248, 45), 'urllib.parse.urlencode', 'urlencode', ({(248, 38, 248, 44): 'params'}, {}), '(params)', False, 'from urllib.parse import urlencode\n'), ((250, 49, 250, 69), 'pilbox.errors.HostError.get_code', 'HostError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((275, 49, 275, 70), 'pilbox.errors.FetchError.get_code', 'FetchError.get_code', ({}, {}), '()', False, 'from pilbox.errors import SignatureError, ClientError, HostError, BackgroundError, DimensionsError, FilterError, FormatError, ModeError, PositionError, QualityError, UrlError, ImageFormatError, FetchError\n'), ((259, 32, 259, 49), 'urllib.parse.urlencode', 'urlencode', ({(259, 42, 259, 48): 'params'}, {}), '(params)', False, 'from urllib.parse import urlencode\n')] |
Neelraj21/phython | hackathon/darkmattertemperaturedistribution/example.py | 68a2cedccae694eb84880f3aa55cc01d458e055e | #!/usr/bin/env python
from scipy import *
from pylab import *
#from pylab import imshow
#!
#! Some graphical explorations of the Julia sets with python and pyreport
#!#########################################################################
#$
#$ We start by defining a function J:
#$ \[ J_c : z \rightarrow z^2 + c \]
#$
def J(c):
return lambda z : z**2 + c
[x,y] = ogrid[ -1:1:0.002, -1:1:0.002 ]
z = x + y *1j
#! If we study the divergence of function J under repeated iteration
#! depending on its inital conditions we get a very pretty graph
threshTime = zeros_like(z)
for i in range(40):
z = J(0.285)(z)
threshTime += z*conj(z) > 4
figure(0)
axes([0,0,1,1])
axis('off')
imshow(threshTime)
bone()
show()
#! We can also do that systematicaly for other values of c:
axes([0,0,1,1])
axis('off')
rcParams.update({'figure.figsize': [10.5,5]})
c_values = (0.285 + 0.013j, 0.45 - 0.1428j, -0.70176 -0.3842j,
-0.835-0.2321j, -0.939 +0.167j, -0.986+0.87j)
for i,c in enumerate(c_values):
threshTime = zeros_like(z)
z = x + y *1j
for n in range(40):
z = J(c)(z)
threshTime += z*conj(z) > 4
subplot(2,3,i+1)
imshow(threshTime)
axis('off')
show()
| [] |
codepointtku/respa | resources/migrations/0126_add_field_disallow_overlapping_reservations_per_user.py | bb9cd8459d5562569f976dbc609ec41ceecc8023 | # Generated by Django 2.2.21 on 2021-06-23 12:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('resources', '0125_add_timmi_payload_model'),
]
operations = [
migrations.AddField(
model_name='unit',
name='disallow_overlapping_reservations_per_user',
field=models.BooleanField(default=False, verbose_name='Disallow overlapping reservations in this unit per user.'),
),
]
| [((17, 18, 17, 125), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n')] |
marv1913/lora_multihop | src/lora_multihop/module_config.py | ef07493c2f763d07161fa25d4b884ef79b94afa4 | import logging
from lora_multihop import serial_connection, variables
def config_module(configuration=variables.MODULE_CONFIG):
if serial_connection.execute_command(configuration, [variables.STATUS_OK]):
serial_connection.execute_command('AT+SEND=1', [variables.STATUS_OK])
serial_connection.execute_command('a', ['AT,SENDING', 'AT,SENDED'])
logging.debug('module config successfully set')
return True
logging.warning("could not set module config")
return False
def set_address(address):
cmd = f'AT+ADDR={address}'
if serial_connection.execute_command(serial_connection.str_to_bytes(cmd), [variables.STATUS_OK]):
logging.debug(f'module address successfully set to: {address}')
return True
logging.warning("could not set module address")
return False
def get_current_address():
serial_connection.execute_command(serial_connection.str_to_bytes(variables.GET_ADDR))
addr = serial_connection.response_q.get(variables.COMMAND_VERIFICATION_TIMEOUT)
addr = serial_connection.bytes_to_str(addr)
addr_as_list = addr.split(variables.LORA_MODULE_DELIMITER)
if addr_as_list[0].strip() != 'AT' or addr_as_list[2].strip() != 'OK':
raise ValueError('could not get address of module')
return addr_as_list[1]
| [((7, 7, 7, 78), 'lora_multihop.serial_connection.execute_command', 'serial_connection.execute_command', ({(7, 41, 7, 54): 'configuration', (7, 56, 7, 77): '[variables.STATUS_OK]'}, {}), '(configuration, [variables.STATUS_OK])', False, 'from lora_multihop import serial_connection, variables\n'), ((12, 4, 12, 50), 'logging.warning', 'logging.warning', ({(12, 20, 12, 49): '"""could not set module config"""'}, {}), "('could not set module config')", False, 'import logging\n'), ((21, 4, 21, 51), 'logging.warning', 'logging.warning', ({(21, 20, 21, 50): '"""could not set module address"""'}, {}), "('could not set module address')", False, 'import logging\n'), ((27, 11, 27, 83), 'lora_multihop.serial_connection.response_q.get', 'serial_connection.response_q.get', ({(27, 44, 27, 82): 'variables.COMMAND_VERIFICATION_TIMEOUT'}, {}), '(variables.COMMAND_VERIFICATION_TIMEOUT)', False, 'from lora_multihop import serial_connection, variables\n'), ((28, 11, 28, 47), 'lora_multihop.serial_connection.bytes_to_str', 'serial_connection.bytes_to_str', ({(28, 42, 28, 46): 'addr'}, {}), '(addr)', False, 'from lora_multihop import serial_connection, variables\n'), ((8, 8, 8, 77), 'lora_multihop.serial_connection.execute_command', 'serial_connection.execute_command', ({(8, 42, 8, 53): '"""AT+SEND=1"""', (8, 55, 8, 76): '[variables.STATUS_OK]'}, {}), "('AT+SEND=1', [variables.STATUS_OK])", False, 'from lora_multihop import serial_connection, variables\n'), ((9, 8, 9, 75), 'lora_multihop.serial_connection.execute_command', 'serial_connection.execute_command', ({(9, 42, 9, 45): '"""a"""', (9, 47, 9, 74): "['AT,SENDING', 'AT,SENDED']"}, {}), "('a', ['AT,SENDING', 'AT,SENDED'])", False, 'from lora_multihop import serial_connection, variables\n'), ((10, 8, 10, 55), 'logging.debug', 'logging.debug', ({(10, 22, 10, 54): '"""module config successfully set"""'}, {}), "('module config successfully set')", False, 'import logging\n'), ((18, 41, 18, 76), 'lora_multihop.serial_connection.str_to_bytes', 'serial_connection.str_to_bytes', ({(18, 72, 18, 75): 'cmd'}, {}), '(cmd)', False, 'from lora_multihop import serial_connection, variables\n'), ((19, 8, 19, 71), 'logging.debug', 'logging.debug', ({(19, 22, 19, 70): 'f"""module address successfully set to: {address}"""'}, {}), "(f'module address successfully set to: {address}')", False, 'import logging\n'), ((26, 38, 26, 88), 'lora_multihop.serial_connection.str_to_bytes', 'serial_connection.str_to_bytes', ({(26, 69, 26, 87): 'variables.GET_ADDR'}, {}), '(variables.GET_ADDR)', False, 'from lora_multihop import serial_connection, variables\n')] |
ferdianap/Eris_test | eris/script/ferdian.py | c2a00d65f816ad6d48a65c14b4bea4f3d081b86b | #!/usr/bin/env python
# Copyright (c) 2013-2014, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
copied from
Baxter RSDK Joint Position Example: file playback
"""
from __future__ import print_function
import sys
import rospy
import baxter_interface
from baxter_interface import CHECK_VERSION
import glob
from std_srvs.srv import Empty
def try_float(x):
try:
return float(x)
except ValueError:
return None
def clean_line(line, names):
"""
Cleans a single line of recorded joint positions
@param line: the line described in a list to process
@param names: joint name keys
"""
#convert the line of strings to a float or None
line = [try_float(x) for x in line.rstrip().split(',')]
#zip the values with the joint names
combined = zip(names[1:], line[1:])
#take out any tuples that have a none value
cleaned = [x for x in combined if x[1] is not None]
#convert it to a dictionary with only valid commands
command = dict(cleaned)
left_command = dict((key, command[key]) for key in command.keys()
if key[:-2] == 'left_')
right_command = dict((key, command[key]) for key in command.keys()
if key[:-2] == 'right_')
return (command, left_command, right_command, line)
def map_file(filename, loops=1):
"""
Loops through csv file
@param filename: the file to play
@param loops: number of times to loop
values < 0 mean 'infinite'
Does not loop indefinitely, but only until the file is read
and processed. Reads each line, split up in columns and
formats each line into a controller command in the form of
name/value pairs. Names come from the column headers
first column is the time stamp
"""
left = baxter_interface.Limb('left')
right = baxter_interface.Limb('right')
grip_left = baxter_interface.Gripper('left', CHECK_VERSION)
grip_right = baxter_interface.Gripper('right', CHECK_VERSION)
rate = rospy.Rate(1000)
if grip_left.error():
grip_left.reset()
if grip_right.error():
grip_right.reset()
if (not grip_left.calibrated() and
grip_left.type() != 'custom'):
grip_left.calibrate()
if (not grip_right.calibrated() and
grip_right.type() != 'custom'):
grip_right.calibrate()
print("Playing back: %s" % (filename,))
with open(filename, 'r') as f:
lines = f.readlines()
keys = lines[0].rstrip().split(',')
l = 0
# If specified, repeat the file playback 'loops' number of times
while loops < 1 or l < loops:
i = 0
l += 1
print("Moving to start position...")
_cmd, lcmd_start, rcmd_start, _raw = clean_line(lines[1], keys)
left.move_to_joint_positions(lcmd_start)
right.move_to_joint_positions(rcmd_start)
start_time = rospy.get_time()
for values in lines[1:]:
i += 1
loopstr = str(loops) if loops > 0 else "forever"
sys.stdout.write("\r Record %d of %d, loop %d of %s" %
(i, len(lines) - 1, l, loopstr))
sys.stdout.flush()
cmd, lcmd, rcmd, values = clean_line(values, keys)
#command this set of commands until the next frame
while (rospy.get_time() - start_time) < values[0]:
if rospy.is_shutdown():
print("\n Aborting - ROS shutdown")
return False
if len(lcmd):
left.set_joint_positions(lcmd)
if len(rcmd):
right.set_joint_positions(rcmd)
if ('left_gripper' in cmd and
grip_left.type() != 'custom'):
grip_left.command_position(cmd['left_gripper'])
if ('right_gripper' in cmd and
grip_right.type() != 'custom'):
grip_right.command_position(cmd['right_gripper'])
rate.sleep()
print
return True
def main():
dir = '/home/ros-baxter/sequence1/'
fam = 'no'
ext = '.rec'
#fname = fam+'*'+ext
#fam_list = glob.glob(ext)
#print(fam_list)
rospy.init_node("ferdian_file_playback")
client = rospy.ServiceProxy("ferdian_example_service",Empty)
rs = baxter_interface.RobotEnable(CHECK_VERSION)
rs.enable()
rospy.loginfo("waiting for service")
rospy.wait_for_service("ferdian_example_service")
rospy.loginfo("service available")
#put your loop here
for file in sorted(glob.glob('./sequence1/*.rec')):
map_file(file)
rospy.loginfo("sending signal...") # to the image processing node
#for x in range(0, 3):
# map_file("AtoE.rec")
res = client()
rospy.loginfo("service returned")
###
if __name__ == '__main__':
main()
| [((95, 11, 95, 40), 'baxter_interface.Limb', 'baxter_interface.Limb', ({(95, 33, 95, 39): '"""left"""'}, {}), "('left')", False, 'import baxter_interface\n'), ((96, 12, 96, 42), 'baxter_interface.Limb', 'baxter_interface.Limb', ({(96, 34, 96, 41): '"""right"""'}, {}), "('right')", False, 'import baxter_interface\n'), ((97, 16, 97, 63), 'baxter_interface.Gripper', 'baxter_interface.Gripper', ({(97, 41, 97, 47): '"""left"""', (97, 49, 97, 62): 'CHECK_VERSION'}, {}), "('left', CHECK_VERSION)", False, 'import baxter_interface\n'), ((98, 17, 98, 65), 'baxter_interface.Gripper', 'baxter_interface.Gripper', ({(98, 42, 98, 49): '"""right"""', (98, 51, 98, 64): 'CHECK_VERSION'}, {}), "('right', CHECK_VERSION)", False, 'import baxter_interface\n'), ((99, 11, 99, 27), 'rospy.Rate', 'rospy.Rate', ({(99, 22, 99, 26): '1000'}, {}), '(1000)', False, 'import rospy\n'), ((162, 1, 162, 41), 'rospy.init_node', 'rospy.init_node', ({(162, 17, 162, 40): '"""ferdian_file_playback"""'}, {}), "('ferdian_file_playback')", False, 'import rospy\n'), ((163, 10, 163, 61), 'rospy.ServiceProxy', 'rospy.ServiceProxy', ({(163, 29, 163, 54): '"""ferdian_example_service"""', (163, 55, 163, 60): 'Empty'}, {}), "('ferdian_example_service', Empty)", False, 'import rospy\n'), ((164, 6, 164, 49), 'baxter_interface.RobotEnable', 'baxter_interface.RobotEnable', ({(164, 35, 164, 48): 'CHECK_VERSION'}, {}), '(CHECK_VERSION)', False, 'import baxter_interface\n'), ((166, 1, 166, 37), 'rospy.loginfo', 'rospy.loginfo', ({(166, 15, 166, 36): '"""waiting for service"""'}, {}), "('waiting for service')", False, 'import rospy\n'), ((167, 1, 167, 50), 'rospy.wait_for_service', 'rospy.wait_for_service', ({(167, 24, 167, 49): '"""ferdian_example_service"""'}, {}), "('ferdian_example_service')", False, 'import rospy\n'), ((168, 1, 168, 35), 'rospy.loginfo', 'rospy.loginfo', ({(168, 15, 168, 34): '"""service available"""'}, {}), "('service available')", False, 'import rospy\n'), ((176, 1, 176, 34), 'rospy.loginfo', 'rospy.loginfo', ({(176, 15, 176, 33): '"""service returned"""'}, {}), "('service returned')", False, 'import rospy\n'), ((127, 21, 127, 37), 'rospy.get_time', 'rospy.get_time', ({}, {}), '()', False, 'import rospy\n'), ((170, 20, 170, 50), 'glob.glob', 'glob.glob', ({(170, 30, 170, 49): '"""./sequence1/*.rec"""'}, {}), "('./sequence1/*.rec')", False, 'import glob\n'), ((172, 2, 172, 36), 'rospy.loginfo', 'rospy.loginfo', ({(172, 16, 172, 35): '"""sending signal..."""'}, {}), "('sending signal...')", False, 'import rospy\n'), ((133, 12, 133, 30), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n'), ((138, 19, 138, 38), 'rospy.is_shutdown', 'rospy.is_shutdown', ({}, {}), '()', False, 'import rospy\n'), ((137, 19, 137, 35), 'rospy.get_time', 'rospy.get_time', ({}, {}), '()', False, 'import rospy\n')] |
zhangjun0x01/Alink | core/src/main/python/akdl/entry/base_entry.py | c1cd3380bed29a4be4eb058a7462213869c02387 | import abc
from typing import Dict, Callable
import tensorflow as tf
from flink_ml_framework.context import Context
from flink_ml_framework.java_file import *
from ..runner import tf_helper, io_helper
from ..runner.output_writer import DirectOutputWriter
try:
from flink_ml_tensorflow.tensorflow_context import TFContext
except:
from flink_ml_tensorflow2.tensorflow_context import TFContext
# noinspection PyUnresolvedReferences
from tensorflow_io.core.python.ops import core_ops
__all__ = ['TF1_TYPE', 'TF2_TYPE']
TF1_TYPE = 'tf1'
TF2_TYPE = 'tf2'
class BaseEntry(abc.ABC):
def __init__(self, func_name, engine_type):
self.func_name = func_name
self.engine_type = engine_type
@staticmethod
def get_func_by_name(func_name):
"""
Get function by the func name
:param func_name: func name
:return: function
"""
if '.' not in func_name:
if func_name in globals():
return globals()[func_name]
else:
raise RuntimeError('cannot find function[{}]'.format(func_name))
else:
module_name, func_name = func_name.rsplit('.', 1)
import importlib
# load the module, will raise ImportError if module cannot be loaded
m = importlib.import_module(module_name)
# get the class, will raise AttributeError if class cannot be found
c = getattr(m, func_name)
return c
@abc.abstractmethod
def construct_args(self, **kwargs):
pass
def is_batch(self):
return True
def post_process(self, **kwargs):
pass
def entry_func(self, context: Context):
tf_context = TFContext(context)
properties = tf_context.properties
print('properties', properties, flush=True)
# intra_op_parallelism is set by akdl, because there is a bug in TensorFlow 1.x
# See: https://stackoverflow.com/questions/34426268/restricting-number-of-cores-used
intra_op_parallelism = int(properties['ALINK:intra_op_parallelism'])
if self.engine_type == TF1_TYPE:
tf_helper.set_intra_op_parallelism(intra_op_parallelism_threads=intra_op_parallelism)
elif self.engine_type == TF2_TYPE:
tf.config.threading.set_intra_op_parallelism_threads(intra_op_parallelism)
num_workers = int(properties['ALINK:num_workers'])
work_dir = properties['ALINK:work_dir']
cluster, task_type, task_index = tf_context.export_estimator_cluster()
if self.is_batch():
java_queue_file = JavaFile(context.from_java(), context.to_java())
dataset_file = os.path.join(work_dir, 'dataset.tfrecords')
dataset, dataset_length = io_helper.convert_java_queue_file_to_repeatable_dataset(java_queue_file,
dataset_file)
print("number of records: " + str(dataset_length), flush=True)
dataset_fn: Callable[[], tf.data.TFRecordDataset] = lambda: tf.data.TFRecordDataset(dataset_file)
else:
dataset_fn: Callable[[], tf.data.TFRecordDataset] = lambda: tf_context.flink_stream_dataset()
dataset = None
dataset_file = None
dataset_length = None
saved_model_dir = os.path.join(work_dir, 'savedmodel')
user_params: Dict = json.loads(properties['ALINK:user_defined_params'])
for i in range(1, 1024):
key = "ALINK:bc_" + str(i)
if key in properties:
user_params[key] = context.properties[key]
key = "ALINK:model_dir"
if key in properties:
user_params[key] = properties[key]
output_writer = DirectOutputWriter(tf_context.from_java(), tf_context.to_java())
locals_copy = locals().copy()
locals_copy.pop("self")
print("locals_copy = ", locals_copy, flush=True)
args = self.construct_args(**locals_copy)
func = self.get_func_by_name(self.func_name)
func(args)
print("task_type = {}, task_index = {}: done tf_user_main".format(task_type, task_index), flush=True)
local_vars = locals().copy()
local_vars.pop('self')
self.post_process(**local_vars)
print("task_type = {}, task_index = {}: exit".format(task_type, task_index), flush=True)
output_writer.close()
| [((63, 21, 63, 39), 'flink_ml_tensorflow2.tensorflow_context.TFContext', 'TFContext', ({(63, 31, 63, 38): 'context'}, {}), '(context)', False, 'from flink_ml_tensorflow2.tensorflow_context import TFContext\n'), ((47, 16, 47, 52), 'importlib.import_module', 'importlib.import_module', ({(47, 40, 47, 51): 'module_name'}, {}), '(module_name)', False, 'import importlib\n'), ((73, 12, 73, 86), 'tensorflow.config.threading.set_intra_op_parallelism_threads', 'tf.config.threading.set_intra_op_parallelism_threads', ({(73, 65, 73, 85): 'intra_op_parallelism'}, {}), '(intra_op_parallelism)', True, 'import tensorflow as tf\n'), ((85, 72, 85, 109), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', ({(85, 96, 85, 108): 'dataset_file'}, {}), '(dataset_file)', True, 'import tensorflow as tf\n')] |
jbcurtin/cassandra-orm | corm-tests/test_corm_api.py | 2c5540de36166c81832c1ccd0ee40c52e598e05c | import pytest
ENCODING = 'utf-8'
@pytest.fixture(scope='function', autouse=True)
def setup_case(request):
def destroy_case():
from corm import annihilate_keyspace_tables, SESSIONS
annihilate_keyspace_tables('mykeyspace')
for keyspace_name, session in SESSIONS.copy().items():
if keyspace_name in ['global']:
continue
session.shutdown()
del SESSIONS[keyspace_name]
request.addfinalizer(destroy_case)
def test_initial_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
class TestModel(CORMBase):
__keyspace__ = 'mykeyspace'
something: str
other: str
register_table(TestModel)
sync_schema()
one = TestModel('one', 'two')
two = TestModel('one', 'two')
three = TestModel('one', 'three')
insert([one, two, three])
def test_keyspace_api():
import hashlib
import uuid
from corm import register_table, insert, sync_schema, \
keyspace_exists, keyspace_destroy, keyspace_create
from corm.datatypes import CassandraKeyspaceStrategy
from corm.models import CORMBase
# Keyspaces seem to have to start with Alpha-Letters
keyspace_name = hashlib.md5(str(uuid.uuid4()).encode(ENCODING)).hexdigest()
keyspace_name = f'abc_{keyspace_name}'
assert keyspace_exists(keyspace_name) is False
keyspace_create(keyspace_name, CassandraKeyspaceStrategy.Simple)
assert keyspace_exists(keyspace_name) is True
keyspace_destroy(keyspace_name)
assert keyspace_exists(keyspace_name) is False
class TestModelKeyspace(CORMBase):
__keyspace__ = keyspace_name
item: str
register_table(TestModelKeyspace)
assert keyspace_exists(keyspace_name) is False
sync_schema()
assert keyspace_exists(keyspace_name) is True
one = TestModelKeyspace('one')
insert([one])
keyspace_destroy(keyspace_name)
assert keyspace_exists(keyspace_name) is False
def test_float_api():
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
class TestModelFloat(CORMBase):
__keyspace__ = 'mykeyspace'
input_one: float
register_table(TestModelFloat)
sync_schema()
data = 324.593998934
one = TestModelFloat(data)
insert([one])
for idx, entry in enumerate(select(TestModelFloat)):
assert entry.input_one == data
def test_boolean_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
from datetime import datetime
class TestModelBoolean(CORMBase):
__keyspace__ = 'mykeyspace'
item: str
created: datetime
value: bool
register_table(TestModelBoolean)
sync_schema()
one = TestModelBoolean('one', datetime.utcnow(), True)
two = TestModelBoolean('two', datetime.utcnow(), False)
insert([one, two])
def test_datetime_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
from datetime import datetime
class TestModelDatetime(CORMBase):
__keyspace__ = 'mykeyspace'
item: str
created: datetime
register_table(TestModelDatetime)
sync_schema()
one = TestModelDatetime('one', datetime.utcnow())
two = TestModelDatetime('two', datetime.utcnow())
insert([one, two])
def test_set_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
from corm.annotations import Set
class TestModelSet(CORMBase):
__keyspace__ = 'mykeyspace'
something: str
other: Set
register_table(TestModelSet)
sync_schema()
one = TestModelSet('one', {'first'})
two = TestModelSet('two', {'last', 'second-to-last'})
three = TestModelSet('three', {'last', 'second-to-last', 'last'})
four = TestModelSet('four', ['one', 'two', 'three', 'four'])
insert([one, two, three, four])
def test_select_api():
import random
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
from corm.annotations import Set
from datetime import datetime
MAX_INT = 1000
class TestModelSelect(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
register_table(TestModelSelect)
sync_schema()
insert_later = []
values = []
for idx in range(0, 100):
values.append({
'random_number': random.randint(0, MAX_INT),
'created': datetime.utcnow()
})
entry = TestModelSelect(values[-1]['random_number'], values[-1]['created'])
insert_later.append(entry)
if len(insert_later) > 20:
insert(insert_later)
insert_later = []
insert(insert_later)
for idx, entry in enumerate(select(TestModelSelect, fetch_size=100)):
assert isinstance(entry, TestModelSelect)
# Order is not consistent
# assert entry.random_number == values[idx]['random_number']
# assert entry.created == values[idx]['created']
assert idx > 0
def test_select_where_api():
import random
from corm import register_table, insert, sync_schema, select, where
from corm.models import CORMBase
from datetime import datetime
MAX_INT = 99999
class TestModelSelectSource(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
one: str
two: str
class TestModelSelectPivot(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
one: str
two: str
source: TestModelSelectSource
# TODO: Build UserType integration
# register_table(TestModelSelectSource)
# register_table(TestModelSelectPivot)
def test_alter_table_api():
from corm import register_table, insert, sync_schema, select, obtain_session
from corm.models import CORMBase
from datetime import datetime
# Create Table or Delete Column on existing Table
class TestModelAlter(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
register_table(TestModelAlter)
sync_schema()
COL_CQL = f'''
SELECT
column_name, type
FROM
system_schema.columns
WHERE
table_name = '{TestModelAlter._corm_details.table_name}'
AND
keyspace_name = '{TestModelAlter._corm_details.keyspace}'
'''
rows = [(row.column_name, row.type) for row in obtain_session('mykeyspace').execute(COL_CQL)]
assert len(rows) == 3
# Add Column on existing Table
class TestModelAlter(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
new_column: str
register_table(TestModelAlter)
sync_schema()
rows = [(row.column_name, row.type) for row in obtain_session('mykeyspace').execute(COL_CQL)]
assert len(rows) == 4
def test_not_ordered_by_pk_field():
import random
from corm import register_table, insert, sync_schema, select, obtain_session
from corm.models import CORMBase
from datetime import datetime
class TestNotOrderedByPkField(CORMBase):
__keyspace__ = 'mykeyspace'
__primary_keys__ = ['one', 'two', 'three']
random_number: int
created: datetime
one: str
two: str
three: str
register_table(TestNotOrderedByPkField)
sync_schema()
first_entry = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'beta')
gamma = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'gamma')
delta = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'delta')
second_entry = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'alpha')
insert([first_entry, gamma, delta, second_entry])
for idx, entry in enumerate(select(TestNotOrderedByPkField)):
if idx == 0:
assert entry.three != 'alpha'
def test_ordered_by_pk_field():
import random
from corm import register_table, insert, sync_schema, select, obtain_session
from corm.models import CORMBase
from corm.datatypes import TableOrdering
from datetime import datetime
class TestOrderedByPkField(CORMBase):
__keyspace__ = 'mykeyspace'
__primary_keys__ = ['one', 'two', 'three']
__ordered_by_primary_keys__ = TableOrdering.DESC
random_number: int
created: datetime
one: str
two: str
three: str
register_table(TestOrderedByPkField)
sync_schema()
first_entry = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'beta')
second_entry = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'alpha')
gamma = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'gamma')
delta = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'delta')
insert([first_entry, second_entry, delta, gamma])
for idx, entry in enumerate(select(TestOrderedByPkField)):
if idx == 0:
assert entry.three == 'alpha'
elif idx == 1:
assert entry.three == 'beta'
elif idx == 2:
assert entry.three == 'delta'
elif idx == 3:
assert entry.three == 'gamma'
def test_corm_auth():
import os
os.environ['CLUSTER_PORT'] = '9043'
os.environ['CLUSTER_USERNAME'] = 'cassandra'
os.environ['CLUSTER_PASSWORD'] = 'cassandra'
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
class TestCORMAuth(CORMBase):
one: str
__keyspace__ = 'test_corm_auth'
register_table(TestCORMAuth)
sync_schema()
def test_corm_enum():
import enum
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
class OptionList(enum.Enum):
One = 'one'
Two = 'two'
class TestCormEnum(CORMBase):
__keyspace__ = 'test_corm_enum'
option: OptionList
register_table(TestCormEnum)
sync_schema()
first = TestCormEnum(OptionList.One)
second = TestCormEnum(OptionList.Two)
insert([first, second])
for idx, entry in enumerate(select(TestCormEnum)):
assert entry.option in OptionList.__members__.values()
def test_corm_where():
import enum
from corm import register_table, insert, sync_schema, select, where, cp, Operator
from corm.models import CORMBase
class OptionList(enum.Enum):
One = 'one'
Two = 'two'
class TestCORMWhere(CORMBase):
__keyspace__ = 'test_corm_where'
option: OptionList
score: int
register_table(TestCORMWhere)
sync_schema()
one = TestCORMWhere(OptionList.One, 1)
two = TestCORMWhere(OptionList.One, 2)
three = TestCORMWhere(OptionList.Two, 3)
four = TestCORMWhere(OptionList.Two, 4)
insert([one, two, three, four])
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'score', 4)])):
assert idx == 0
assert entry.score == 4
assert entry.option == OptionList.Two
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'score', 1)])):
assert idx == 0
assert entry.score == 1
assert entry.option == OptionList.One
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'option', OptionList.One)])):
assert idx in [0, 1]
assert entry.score in [1, 2]
assert entry.option == OptionList.One
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'option', OptionList.Two)])):
assert idx in [0, 1]
assert entry.score in [3, 4]
assert entry.option == OptionList.Two
def test_corm_uuid():
import uuid
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
class TestCORMUUID(CORMBase):
__keyspace__ = 'mykeyspace'
identity_test: uuid.UUID
register_table(TestCORMUUID)
sync_schema()
one = TestCORMUUID(uuid.uuid4())
insert([one])
for entry in select(TestCORMUUID):
assert isinstance(entry.identity_test, uuid.UUID)
| [((5, 1, 5, 47), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((30, 4, 30, 29), 'corm.register_table', 'register_table', ({(30, 19, 30, 28): 'TestModel'}, {}), '(TestModel)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((31, 4, 31, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((35, 4, 35, 29), 'corm.insert', 'insert', ({(35, 11, 35, 28): '[one, two, three]'}, {}), '([one, two, three])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((50, 4, 50, 68), 'corm.keyspace_create', 'keyspace_create', ({(50, 20, 50, 33): 'keyspace_name', (50, 35, 50, 67): 'CassandraKeyspaceStrategy.Simple'}, {}), '(keyspace_name, CassandraKeyspaceStrategy.Simple)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((52, 4, 52, 35), 'corm.keyspace_destroy', 'keyspace_destroy', ({(52, 21, 52, 34): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((60, 4, 60, 37), 'corm.register_table', 'register_table', ({(60, 19, 60, 36): 'TestModelKeyspace'}, {}), '(TestModelKeyspace)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((62, 4, 62, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((65, 4, 65, 17), 'corm.insert', 'insert', ({(65, 11, 65, 16): '[one]'}, {}), '([one])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((66, 4, 66, 35), 'corm.keyspace_destroy', 'keyspace_destroy', ({(66, 21, 66, 34): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((78, 4, 78, 34), 'corm.register_table', 'register_table', ({(78, 19, 78, 33): 'TestModelFloat'}, {}), '(TestModelFloat)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((79, 4, 79, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((82, 4, 82, 17), 'corm.insert', 'insert', ({(82, 11, 82, 16): '[one]'}, {}), '([one])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((99, 4, 99, 36), 'corm.register_table', 'register_table', ({(99, 19, 99, 35): 'TestModelBoolean'}, {}), '(TestModelBoolean)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((100, 4, 100, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((103, 4, 103, 22), 'corm.insert', 'insert', ({(103, 11, 103, 21): '[one, two]'}, {}), '([one, two])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((117, 4, 117, 37), 'corm.register_table', 'register_table', ({(117, 19, 117, 36): 'TestModelDatetime'}, {}), '(TestModelDatetime)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((118, 4, 118, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((121, 4, 121, 22), 'corm.insert', 'insert', ({(121, 11, 121, 21): '[one, two]'}, {}), '([one, two])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((134, 4, 134, 32), 'corm.register_table', 'register_table', ({(134, 19, 134, 31): 'TestModelSet'}, {}), '(TestModelSet)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((135, 4, 135, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((140, 4, 140, 35), 'corm.insert', 'insert', ({(140, 11, 140, 34): '[one, two, three, four]'}, {}), '([one, two, three, four])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((156, 4, 156, 35), 'corm.register_table', 'register_table', ({(156, 19, 156, 34): 'TestModelSelect'}, {}), '(TestModelSelect)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((157, 4, 157, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((171, 4, 171, 24), 'corm.insert', 'insert', ({(171, 11, 171, 23): 'insert_later'}, {}), '(insert_later)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((221, 4, 221, 34), 'corm.register_table', 'register_table', ({(221, 19, 221, 33): 'TestModelAlter'}, {}), '(TestModelAlter)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((222, 4, 222, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((245, 4, 245, 34), 'corm.register_table', 'register_table', ({(245, 19, 245, 33): 'TestModelAlter'}, {}), '(TestModelAlter)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((246, 4, 246, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((268, 4, 268, 43), 'corm.register_table', 'register_table', ({(268, 19, 268, 42): 'TestNotOrderedByPkField'}, {}), '(TestNotOrderedByPkField)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((269, 4, 269, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((275, 4, 275, 53), 'corm.insert', 'insert', ({(275, 11, 275, 52): '[first_entry, gamma, delta, second_entry]'}, {}), '([first_entry, gamma, delta, second_entry])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((299, 4, 299, 40), 'corm.register_table', 'register_table', ({(299, 19, 299, 39): 'TestOrderedByPkField'}, {}), '(TestOrderedByPkField)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((300, 4, 300, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((306, 4, 306, 53), 'corm.insert', 'insert', ({(306, 11, 306, 52): '[first_entry, second_entry, delta, gamma]'}, {}), '([first_entry, second_entry, delta, gamma])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((333, 4, 333, 32), 'corm.register_table', 'register_table', ({(333, 19, 333, 31): 'TestCORMAuth'}, {}), '(TestCORMAuth)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((334, 4, 334, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((351, 4, 351, 32), 'corm.register_table', 'register_table', ({(351, 19, 351, 31): 'TestCormEnum'}, {}), '(TestCormEnum)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((352, 4, 352, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((356, 4, 356, 27), 'corm.insert', 'insert', ({(356, 11, 356, 26): '[first, second]'}, {}), '([first, second])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((378, 4, 378, 33), 'corm.register_table', 'register_table', ({(378, 19, 378, 32): 'TestCORMWhere'}, {}), '(TestCORMWhere)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((379, 4, 379, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((384, 4, 384, 35), 'corm.insert', 'insert', ({(384, 11, 384, 34): '[one, two, three, four]'}, {}), '([one, two, three, four])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((419, 4, 419, 32), 'corm.register_table', 'register_table', ({(419, 19, 419, 31): 'TestCORMUUID'}, {}), '(TestCORMUUID)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((420, 4, 420, 17), 'corm.sync_schema', 'sync_schema', ({}, {}), '()', False, 'from corm import register_table, insert, sync_schema, select\n'), ((422, 4, 422, 17), 'corm.insert', 'insert', ({(422, 11, 422, 16): '[one]'}, {}), '([one])', False, 'from corm import register_table, insert, sync_schema, select\n'), ((423, 17, 423, 37), 'corm.select', 'select', ({(423, 24, 423, 36): 'TestCORMUUID'}, {}), '(TestCORMUUID)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((9, 8, 9, 48), 'corm.annihilate_keyspace_tables', 'annihilate_keyspace_tables', ({(9, 35, 9, 47): '"""mykeyspace"""'}, {}), "('mykeyspace')", False, 'from corm import annihilate_keyspace_tables, SESSIONS\n'), ((49, 11, 49, 41), 'corm.keyspace_exists', 'keyspace_exists', ({(49, 27, 49, 40): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((51, 11, 51, 41), 'corm.keyspace_exists', 'keyspace_exists', ({(51, 27, 51, 40): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((53, 11, 53, 41), 'corm.keyspace_exists', 'keyspace_exists', ({(53, 27, 53, 40): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((61, 11, 61, 41), 'corm.keyspace_exists', 'keyspace_exists', ({(61, 27, 61, 40): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((63, 11, 63, 41), 'corm.keyspace_exists', 'keyspace_exists', ({(63, 27, 63, 40): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((67, 11, 67, 41), 'corm.keyspace_exists', 'keyspace_exists', ({(67, 27, 67, 40): 'keyspace_name'}, {}), '(keyspace_name)', False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((83, 32, 83, 54), 'corm.select', 'select', ({(83, 39, 83, 53): 'TestModelFloat'}, {}), '(TestModelFloat)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((101, 34, 101, 51), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((102, 34, 102, 51), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((119, 35, 119, 52), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((120, 35, 120, 52), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((172, 32, 172, 71), 'corm.select', 'select', (), '', False, 'from corm import register_table, insert, sync_schema, select\n'), ((271, 42, 271, 66), 'random.randint', 'random.randint', ({(271, 57, 271, 58): '0', (271, 60, 271, 65): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((271, 68, 271, 85), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((272, 36, 272, 60), 'random.randint', 'random.randint', ({(272, 51, 272, 52): '0', (272, 54, 272, 59): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((272, 62, 272, 79), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((273, 36, 273, 60), 'random.randint', 'random.randint', ({(273, 51, 273, 52): '0', (273, 54, 273, 59): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((273, 62, 273, 79), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((274, 43, 274, 67), 'random.randint', 'random.randint', ({(274, 58, 274, 59): '0', (274, 61, 274, 66): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((274, 69, 274, 86), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((276, 32, 276, 63), 'corm.select', 'select', ({(276, 39, 276, 62): 'TestNotOrderedByPkField'}, {}), '(TestNotOrderedByPkField)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((302, 39, 302, 63), 'random.randint', 'random.randint', ({(302, 54, 302, 55): '0', (302, 57, 302, 62): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((302, 65, 302, 82), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((303, 40, 303, 64), 'random.randint', 'random.randint', ({(303, 55, 303, 56): '0', (303, 58, 303, 63): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((303, 66, 303, 83), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((304, 33, 304, 57), 'random.randint', 'random.randint', ({(304, 48, 304, 49): '0', (304, 51, 304, 56): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((304, 59, 304, 76), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((305, 33, 305, 57), 'random.randint', 'random.randint', ({(305, 48, 305, 49): '0', (305, 51, 305, 56): '99999'}, {}), '(0, 99999)', False, 'import random\n'), ((305, 59, 305, 76), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((307, 32, 307, 60), 'corm.select', 'select', ({(307, 39, 307, 59): 'TestOrderedByPkField'}, {}), '(TestOrderedByPkField)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((358, 32, 358, 52), 'corm.select', 'select', ({(358, 39, 358, 51): 'TestCormEnum'}, {}), '(TestCormEnum)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((421, 23, 421, 35), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n'), ((168, 12, 168, 32), 'corm.insert', 'insert', ({(168, 19, 168, 31): 'insert_later'}, {}), '(insert_later)', False, 'from corm import register_table, insert, sync_schema, select\n'), ((10, 38, 10, 53), 'corm.SESSIONS.copy', 'SESSIONS.copy', ({}, {}), '()', False, 'from corm import annihilate_keyspace_tables, SESSIONS\n'), ((162, 29, 162, 55), 'random.randint', 'random.randint', ({(162, 44, 162, 45): '(0)', (162, 47, 162, 54): 'MAX_INT'}, {}), '(0, MAX_INT)', False, 'import random\n'), ((163, 23, 163, 40), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((386, 54, 386, 84), 'corm.cp', 'cp', ({(386, 57, 386, 71): 'Operator.Equal', (386, 73, 386, 80): '"""score"""', (386, 82, 386, 83): '(4)'}, {}), "(Operator.Equal, 'score', 4)", False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((391, 54, 391, 84), 'corm.cp', 'cp', ({(391, 57, 391, 71): 'Operator.Equal', (391, 73, 391, 80): '"""score"""', (391, 82, 391, 83): '(1)'}, {}), "(Operator.Equal, 'score', 1)", False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((396, 54, 396, 98), 'corm.cp', 'cp', ({(396, 57, 396, 71): 'Operator.Equal', (396, 73, 396, 81): '"""option"""', (396, 83, 396, 97): 'OptionList.One'}, {}), "(Operator.Equal, 'option', OptionList.One)", False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((401, 54, 401, 98), 'corm.cp', 'cp', ({(401, 57, 401, 71): 'Operator.Equal', (401, 73, 401, 81): '"""option"""', (401, 83, 401, 97): 'OptionList.Two'}, {}), "(Operator.Equal, 'option', OptionList.Two)", False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((234, 51, 234, 79), 'corm.obtain_session', 'obtain_session', ({(234, 66, 234, 78): '"""mykeyspace"""'}, {}), "('mykeyspace')", False, 'from corm import register_table, insert, sync_schema, select, obtain_session\n'), ((248, 51, 248, 79), 'corm.obtain_session', 'obtain_session', ({(248, 66, 248, 78): '"""mykeyspace"""'}, {}), "('mykeyspace')", False, 'from corm import register_table, insert, sync_schema, select, obtain_session\n'), ((47, 36, 47, 48), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n')] |
UCSB-dataScience-ProjectGroup/movie_rating_prediction | src/utilities/getInfo.py | c0c29c0463dccc6ad286bd59e77993fdf0d05fb2 | import json
import os
from utilities.SaveLoadJson import SaveLoadJson as SLJ
from utilities.LineCount import LineCount as LC
import subprocess
from geolite2 import geolite2
class getData:
#Get Data Functions ------------------------------------------------------
@staticmethod
def getDATA():
result = {"requests":{},
"time":'',
"cpuload":'',
"uptime":'',
"temp":'',
"ip":''}
result["requests"]=getData.getRequests()
time = getData.getTime().split('\t')
result["time"] = time[0]
result["cpuload"]=time[1]
result["uptime"]=getData.getUptime()
result["temp"]=getData.getTemp()
result["ip"]=getData.getIP()
return json.dumps(result)
@staticmethod
def getRequests():
data = SLJ.load('dataStore.txt')
return {"totalRequests":str(data["totalRequests"]),
"totalQueries":str(data["totalQueries"]),
"totalAdjusts":str(data["totalAdjusts"])}
@staticmethod
def getTime():
proc = subprocess.Popen(['uptime'],stdout=subprocess.PIPE, shell=False)
(out, err) = proc.communicate()
return (str(out)[1:9] + '\t' +
str(float(str(out).split(',')[4])*100)+'%')
@staticmethod
def getUptime():
proc = subprocess.Popen(['uptime', '-p'],stdout=subprocess.PIPE, shell=False)
(out, err) = proc.communicate()
return str(out)
@staticmethod
def getTemp():
proc = subprocess.Popen(['vcgencmd', 'measure_temp'],stdout=subprocess.PIPE, shell=False)
(out,err) = proc.communicate()
return str(out)[5:-1]
@staticmethod
def getIP():
proc = subprocess.Popen(['hostname', '-I'],stdout=subprocess.PIPE, shell=False)
(out, err) = proc.communicate()
return str(out)
#Get Access Functions ---------------------------------------------------
@staticmethod
def getAccess():
result={"Countries":dict(),
"CountrySrs":dict(),
"devices":dict(),
"mostRecentSearch":'',
"mostRecentAcc":'',
"mostRecentIP":'',
"recentSearches":[],
"Users":0}
lastNum = 200
total=0
mostRecentIP = ''
mostRecentAcc = ''
mostRecentSearch = ''
Cname='Unknown'
Sname='Unknown'
Ctyname='Unknown'
ips=dict()
logFile = 'utilities/access.log'
newFile='utilities/new.log'
#f = open(newFile, 'w')
with open(logFile, 'r') as lf:
for temp in lf:
line = temp.split(';')
if len(line) > 1:
if line[2] == '200':
if 'GET /find' in line[3]:
#f.write(temp)
mostRecentIP=line[0]
mostRecentAcc=line[1]
reader = geolite2.reader()
loc = reader.get(line[0])
Cname = loc['country']['names']['en']
if 'subdivisions' in loc:
Sname = loc['subdivisions'][0]['names']['en']
else:
Sname='Unknown'
if 'city' in loc:
Ctyname = loc['city']['names']['en']
else:
Ctyname='Unknown'
if Cname not in result["Countries"]:
result["Countries"][Cname]=dict()
result["CountrySrs"][Cname]=0
if Sname not in result["Countries"][Cname]:
result["Countries"][Cname][Sname]=dict()
if Ctyname not in result["Countries"][Cname][Sname]:
result["Countries"][Cname][Sname][Ctyname] = []
result["CountrySrs"][Cname]+=1
total+=1
search = (line[3].split(' ')[1][6:]).replace('%20',' ')
mostRecentSearch=search
if search not in result["Countries"][Cname][Sname][Ctyname]:
result["Countries"][Cname][Sname][Ctyname].append(search)
if len(result["Countries"][Cname][Sname][Ctyname]) >= lastNum:
result["Countries"][Cname][Sname][Ctyname].pop(0)
if search not in result["recentSearches"]:
result["recentSearches"].insert(0,search)
if len(result["recentSearches"]) >= lastNum:
result["recentSearches"].pop(-1)
ips[line[0]]=1
device=(line[4].split('('))
if len(device)>1:
device=device[1]
else:
device="Unknown"
if device not in result["devices"]:
result["devices"][device]=0
result["devices"][device]+=1
#f.close()
#Most recent stuff
result["mostRecentIP"]=mostRecentIP
result["mostRecentAcc"]=mostRecentAcc
result["mostRecentSearch"]=mostRecentSearch
result["mostRecentLoc"]=str(Ctyname+', '+Sname+', '+Cname)
#Unique Users
for key, value in ips.items():
result["Users"]+=1
#Device percents
for key, value in result["devices"].items():
percnt = (float(value)/float(total))*100
result["devices"][key]=format(percnt, '.2f')
#Country percents
for key, value in result["CountrySrs"].items():
percnt = (float(value)/float(total))*100
result["CountrySrs"][key]=format(percnt,'.2f')
#os.system("sudo mv -f "+newFile+" "+logFile)
return json.dumps(result)
| [((29, 15, 29, 33), 'json.dumps', 'json.dumps', ({(29, 26, 29, 32): 'result'}, {}), '(result)', False, 'import json\n'), ((33, 15, 33, 40), 'utilities.SaveLoadJson.SaveLoadJson.load', 'SLJ.load', ({(33, 24, 33, 39): '"""dataStore.txt"""'}, {}), "('dataStore.txt')", True, 'from utilities.SaveLoadJson import SaveLoadJson as SLJ\n'), ((40, 15, 40, 79), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((47, 15, 47, 85), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((53, 15, 53, 97), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((59, 15, 59, 87), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((169, 15, 169, 33), 'json.dumps', 'json.dumps', ({(169, 26, 169, 32): 'result'}, {}), '(result)', False, 'import json\n'), ((101, 37, 101, 54), 'geolite2.geolite2.reader', 'geolite2.reader', ({}, {}), '()', False, 'from geolite2 import geolite2\n')] |
KalifiaBillal/NeMo | nemo/collections/nlp/losses/__init__.py | 4fc670ad0c886be2623247921d4311ba30f486f8 | from nemo.collections.nlp.losses.sgd_loss import SGDDialogueStateLoss
| [] |
MrAGi/netrunner-cambridge | netrunner/test_settings.py | bae0603486c2aa5a980e8e19207452fb01ec2193 | # -*- coding: utf-8 -*-
from .settings import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['LOCAL_DB_NAME'],
'USER': os.environ['LOCAL_DB_USER'],
'PASSWORD': os.environ['LOCAL_DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
} | [] |
thalles-dreissig20/Quebra_Cabeca | Python_Exercicios/calcula_terreno.py | eeb9458dbabac72d9867e5ec5d7f1aa9b5993d79 | def area(larg, comp):
a = larg * comp
print(f'A dimensão é {a}')
print('Controle de terrenos')
print('-' * 20)
l = float(input('qual a largura do terreno: '))
c = float(input('qual o comprimento do terreno: '))
area(l , c) | [] |
romulogoleniesky/Python_C_E_V | Desafios/desafio_041.py | 2dcf5fb3505a20443788a284c52114c6434118ce | import datetime
ano = (datetime.datetime.now()).year
nasc = int(input("Digite o seu ano de nascimento: "))
categoria = 0
if (ano - nasc) <= 9:
categoria = str("MIRIM")
elif 9 < (ano - nasc) <= 14:
categoria = str("INFANTIL")
elif 14 < (ano - nasc) <= 19 :
categoria = str("JUNIOR")
elif 19 < (ano - nasc) <= 25:
categoria = str("SÊNIOR")
else:
categoria = str("MASTER")
print(f"A categoria do atleta é {str(categoria)}.")
| [((2, 7, 2, 30), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n')] |
RecoHut-Stanzas/S168471 | eval/metrics.py | 7e0ac621c36f839e1df6876ec517d0ad00672790 | import torch
def ndcg_binary_at_k_batch_torch(X_pred, heldout_batch, k=100, device='cpu'):
"""
Normalized Discounted Cumulative Gain@k for for predictions [B, I] and ground-truth [B, I], with binary relevance.
ASSUMPTIONS: all the 0's in heldout_batch indicate 0 relevance.
"""
batch_users = X_pred.shape[0] # batch_size
_, idx_topk = torch.topk(X_pred, k, dim=1, sorted=True)
tp = 1. / torch.log2(torch.arange(2, k + 2, device=device).float())
heldout_batch_nonzero = (heldout_batch > 0).float()
DCG = (heldout_batch_nonzero[torch.arange(batch_users, device=device).unsqueeze(1), idx_topk] * tp).sum(dim=1)
heldout_nonzero = (heldout_batch > 0).sum(dim=1) # num. of non-zero items per batch. [B]
IDCG = torch.tensor([(tp[:min(n, k)]).sum() for n in heldout_nonzero]).to(device)
return DCG / IDCG
def recall_at_k_batch_torch(X_pred, heldout_batch, k=100):
"""
Recall@k for predictions [B, I] and ground-truth [B, I].
"""
batch_users = X_pred.shape[0]
_, topk_indices = torch.topk(X_pred, k, dim=1, sorted=False) # [B, K]
X_pred_binary = torch.zeros_like(X_pred)
if torch.cuda.is_available():
X_pred_binary = X_pred_binary.cuda()
X_pred_binary[torch.arange(batch_users).unsqueeze(1), topk_indices] = 1
X_true_binary = (heldout_batch > 0).float() # .toarray() # [B, I]
k_tensor = torch.tensor([k], dtype=torch.float32)
if torch.cuda.is_available():
X_true_binary = X_true_binary.cuda()
k_tensor = k_tensor.cuda()
tmp = (X_true_binary * X_pred_binary).sum(dim=1).float()
recall = tmp / torch.min(k_tensor, X_true_binary.sum(dim=1).float())
return recall
| [((11, 18, 11, 59), 'torch.topk', 'torch.topk', (), '', False, 'import torch\n'), ((25, 22, 25, 64), 'torch.topk', 'torch.topk', (), '', False, 'import torch\n'), ((26, 20, 26, 44), 'torch.zeros_like', 'torch.zeros_like', ({(26, 37, 26, 43): 'X_pred'}, {}), '(X_pred)', False, 'import torch\n'), ((27, 7, 27, 32), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((31, 15, 31, 53), 'torch.tensor', 'torch.tensor', (), '', False, 'import torch\n'), ((32, 7, 32, 32), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((12, 25, 12, 62), 'torch.arange', 'torch.arange', (), '', False, 'import torch\n'), ((29, 18, 29, 43), 'torch.arange', 'torch.arange', ({(29, 31, 29, 42): 'batch_users'}, {}), '(batch_users)', False, 'import torch\n'), ((14, 33, 14, 73), 'torch.arange', 'torch.arange', (), '', False, 'import torch\n')] |
justinshenk/simba | simba/run_dash_tkinter.py | a58ccd0ceeda201c1452d186033ce6b25fbab564 | # All credit to https://stackoverflow.com/questions/46571448/tkinter-and-a-html-file - thanks DELICA - https://stackoverflow.com/users/7027346/delica
from cefpython3 import cefpython as cef
import ctypes
try:
import tkinter as tk
from tkinter import messagebox
except ImportError:
import Tkinter as tk
import sys
import platform
import logging as _logging
# Fix for PyCharm hints warnings
WindowUtils = cef.WindowUtils()
# Platforms
WINDOWS = (platform.system() == "Windows")
LINUX = (platform.system() == "Linux")
MAC = (platform.system() == "Darwin")
# Globals
logger = _logging.getLogger("tkinter_.py")
url = "localhost:8050/"
class MainFrame(tk.Frame):
def __init__(self, root):
self.closing = False
self.browser = None
# Root
root.geometry("900x640")
tk.Grid.rowconfigure(root, 0, weight=1)
tk.Grid.columnconfigure(root, 0, weight=1)
# MainFrame
tk.Frame.__init__(self, root)
self.master.title('SimBA Dashboard')
self.master.protocol("WM_DELETE_WINDOW", self.on_close)
self.bind("<Configure>", self.on_configure)
self.bind("<FocusIn>", self.on_focus_in)
self.bind("<FocusOut>", self.on_focus_out)
self.focus_set()
# Pack MainFrame
self.pack(fill=tk.BOTH, expand=tk.YES)
def embed_browser(self):
window_info = cef.WindowInfo()
rect = [0, 0, self.winfo_width(), self.winfo_height()]
window_info.SetAsChild(self.get_window_handle(), rect)
self.browser = cef.CreateBrowserSync(window_info,
url=url) #todo
assert self.browser
self.browser.SetClientHandler(LoadHandler(self))
self.browser.SetClientHandler(FocusHandler(self))
self.message_loop_work()
def get_window_handle(self):
if self.winfo_id() > 0:
return self.winfo_id()
else:
raise Exception("Couldn't obtain window handle")
def message_loop_work(self):
cef.MessageLoopWork()
self.after(10, self.message_loop_work)
def on_configure(self, event):
width = event.width
height = event.height
if self.browser:
if WINDOWS:
ctypes.windll.user32.SetWindowPos(
self.browser.GetWindowHandle(), 0,
0, 0, width, height, 0x0002)
elif LINUX:
self.browser.SetBounds(0, 0, width, height)
self.browser.NotifyMoveOrResizeStarted()
if not self.browser:
self.embed_browser()
def on_focus_in(self, _):
logger.debug("BrowserFrame.on_focus_in")
if self.browser:
self.browser.SetFocus(True)
self.focus_set()
def on_focus_out(self, _):
logger.debug("BrowserFrame.on_focus_out")
if self.browser:
self.browser.SetFocus(False)
def on_close(self):
if self.browser:
self.browser.CloseBrowser(True)
self.clear_browser_references()
self.destroy()
self.master.destroy()
def get_browser(self):
if self.browser:
return self.browser
return None
def clear_browser_references(self):
self.browser = None
class LoadHandler(object):
def __init__(self, browser_frame):
self.browser_frame = browser_frame
class FocusHandler(object):
def __init__(self, browser):
self.browser = browser
def OnTakeFocus(self, next_component, **_):
logger.debug("FocusHandler.OnTakeFocus, next={next}"
.format(next=next_component))
def OnSetFocus(self, source, **_):
logger.debug("FocusHandler.OnSetFocus, source={source}"
.format(source=source))
return False
def OnGotFocus(self, **_):
"""Fix CEF focus issues (#255). Call browser frame's focus_set
to get rid of type cursor in url entry widget."""
logger.debug("FocusHandler.OnGotFocus")
self.browser.focus_set()
# if __name__ == '__main__':
logger.setLevel(_logging.INFO)
stream_handler = _logging.StreamHandler()
formatter = _logging.Formatter("[%(filename)s] %(message)s")
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.info("CEF Python {ver}".format(ver=cef.__version__))
logger.info("Python {ver} {arch}".format(
ver=platform.python_version(), arch=platform.architecture()[0]))
logger.info("Tk {ver}".format(ver=tk.Tcl().eval('info patchlevel')))
assert cef.__version__ >= "55.3", "CEF Python v55.3+ required to run this"
sys.excepthook = cef.ExceptHook # To shutdown all CEF processes on error
root = tk.Tk()
app = MainFrame(root)
def on_closing():
if messagebox.askokcancel("Quit", "Do you want to quit?"):
root.destroy()
root.protocol("WM_DELETE_WINDOW", on_closing)
# Tk must be initialized before CEF otherwise fatal error (Issue #306)
cef.Initialize()
root.mainloop()
# app.mainloop()
cef.Shutdown()
| [((16, 14, 16, 31), 'cefpython3.cefpython.WindowUtils', 'cef.WindowUtils', ({}, {}), '()', True, 'from cefpython3 import cefpython as cef\n'), ((24, 9, 24, 42), 'logging.getLogger', '_logging.getLogger', ({(24, 28, 24, 41): '"""tkinter_.py"""'}, {}), "('tkinter_.py')", True, 'import logging as _logging\n'), ((142, 17, 142, 41), 'logging.StreamHandler', '_logging.StreamHandler', ({}, {}), '()', True, 'import logging as _logging\n'), ((143, 12, 143, 60), 'logging.Formatter', '_logging.Formatter', ({(143, 31, 143, 59): '"""[%(filename)s] %(message)s"""'}, {}), "('[%(filename)s] %(message)s')", True, 'import logging as _logging\n'), ((152, 7, 152, 14), 'Tkinter.Tk', 'tk.Tk', ({}, {}), '()', True, 'import Tkinter as tk\n'), ((161, 0, 161, 16), 'cefpython3.cefpython.Initialize', 'cef.Initialize', ({}, {}), '()', True, 'from cefpython3 import cefpython as cef\n'), ((164, 0, 164, 14), 'cefpython3.cefpython.Shutdown', 'cef.Shutdown', ({}, {}), '()', True, 'from cefpython3 import cefpython as cef\n'), ((19, 11, 19, 28), 'platform.system', 'platform.system', ({}, {}), '()', False, 'import platform\n'), ((20, 9, 20, 26), 'platform.system', 'platform.system', ({}, {}), '()', False, 'import platform\n'), ((21, 7, 21, 24), 'platform.system', 'platform.system', ({}, {}), '()', False, 'import platform\n'), ((155, 7, 155, 61), 'tkinter.messagebox.askokcancel', 'messagebox.askokcancel', ({(155, 30, 155, 36): '"""Quit"""', (155, 38, 155, 60): '"""Do you want to quit?"""'}, {}), "('Quit', 'Do you want to quit?')", False, 'from tkinter import messagebox\n'), ((35, 8, 35, 47), 'Tkinter.Grid.rowconfigure', 'tk.Grid.rowconfigure', (), '', True, 'import Tkinter as tk\n'), ((36, 8, 36, 50), 'Tkinter.Grid.columnconfigure', 'tk.Grid.columnconfigure', (), '', True, 'import Tkinter as tk\n'), ((39, 8, 39, 37), 'Tkinter.Frame.__init__', 'tk.Frame.__init__', ({(39, 26, 39, 30): 'self', (39, 32, 39, 36): 'root'}, {}), '(self, root)', True, 'import Tkinter as tk\n'), ((51, 22, 51, 38), 'cefpython3.cefpython.WindowInfo', 'cef.WindowInfo', ({}, {}), '()', True, 'from cefpython3 import cefpython as cef\n'), ((54, 23, 55, 53), 'cefpython3.cefpython.CreateBrowserSync', 'cef.CreateBrowserSync', (), '', True, 'from cefpython3 import cefpython as cef\n'), ((68, 8, 68, 29), 'cefpython3.cefpython.MessageLoopWork', 'cef.MessageLoopWork', ({}, {}), '()', True, 'from cefpython3 import cefpython as cef\n'), ((148, 12, 148, 37), 'platform.python_version', 'platform.python_version', ({}, {}), '()', False, 'import platform\n'), ((148, 44, 148, 67), 'platform.architecture', 'platform.architecture', ({}, {}), '()', False, 'import platform\n'), ((149, 34, 149, 42), 'Tkinter.Tcl', 'tk.Tcl', ({}, {}), '()', True, 'import Tkinter as tk\n')] |
sfpd/rlreloaded | domain_data/mujoco_worlds/make_xml.py | 650c64ec22ad45996c8c577d85b1a4f20aa1c692 | import re
def do_substitution(in_lines):
lines_iter = iter(in_lines)
defn_lines = []
while True:
try:
line = lines_iter.next()
except StopIteration:
raise RuntimeError("didn't find line starting with ---")
if line.startswith('---'):
break
else:
defn_lines.append(line)
d = {}
exec("\n".join(defn_lines), d)
pat = re.compile("\$\((.+?)\)")
out_lines = []
for line in lines_iter:
matches = pat.finditer(line)
for m in matches:
line = line.replace(m.group(0), str(eval(m.group(1),d)))
out_lines.append(line)
return out_lines
from glob import glob
import os.path as osp
infiles = glob(osp.join(osp.dirname(__file__),"*.xml.in"))
for fname in infiles:
with open(fname,"r") as fh:
in_lines = fh.readlines()
out_lines = do_substitution(in_lines)
outfname = fname[:-3]
with open(outfname,"w") as fh:
fh.writelines(out_lines)
| [((16, 10, 16, 35), 're.compile', 're.compile', ({(16, 21, 16, 34): '"""\\\\$\\\\((.+?)\\\\)"""'}, {}), "('\\\\$\\\\((.+?)\\\\)')", False, 'import re\n'), ((31, 24, 31, 45), 'os.path.dirname', 'osp.dirname', ({(31, 36, 31, 44): '__file__'}, {}), '(__file__)', True, 'import os.path as osp\n')] |
cahyareza/django_admin_cookbook | myproject/apps/events/migrations/0002_alter_eventhero_options.py | 6c82dbd3aebe455b68feb020d5cad7978b8191b7 | # Generated by Django 3.2.12 on 2022-03-28 11:57
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='eventhero',
options={'verbose_name_plural': 'Event heroes'},
),
]
| [((13, 8, 16, 9), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', (), '', False, 'from django.db import migrations\n')] |
bmintz/python-snippets | hilton_sign_in.py | 982861c173bf4bcd5d908514a9e8b1914a580a5d | #!/usr/bin/env python3
# encoding: utf-8
import sys
import urllib.parse
import selenium.webdriver
def exit():
driver.quit()
sys.exit(0)
driver = selenium.webdriver.Firefox()
# for some reason, detectportal.firefox.com and connectivitycheck.gstatic.com are not blocked
# therefore, they cannot be used to detect connectivity
# we instead visit another site that is known not to ever have TLS
driver.get('http://neverssl.com')
if 'neverssl.com' in urllib.parse.urlparse(driver.current_url).netloc:
exit()
driver.find_element_by_css_selector('label[for="promo_button"]').click()
driver.find_element_by_css_selector('input[alt="Next"]').click()
driver.find_element_by_css_selector('#PromotionCode').send_keys('lobby18')
driver.find_element_by_css_selector('input[alt="Connect"]').click()
exit()
| [((11, 1, 11, 12), 'sys.exit', 'sys.exit', ({(11, 10, 11, 11): '(0)'}, {}), '(0)', False, 'import sys\n')] |
rhyswhitley/savanna_iav | src/figures/trends/leaf_response.py | 4eadf29a4e9c05d0b14d3b9c973eb8db3ea7edba | #!/usr/bin/env python
import os
from collections import OrderedDict
import cPickle as pickle
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib.cm import get_cmap
from matplotlib import style
from scipy import stats
from scipy import integrate
def plot_monthly_response(norm, pert):
plot_grid = gridspec.GridSpec(4, 1, hspace=0.1)
ax1 = plt.subplot(plot_grid[0])
ax2 = plt.subplot(plot_grid[1])
ax3 = plt.subplot(plot_grid[2])
ax4 = plt.subplot(plot_grid[3])
# Stomatal conductance
ax1.plot(norm["Gtree"].values)
ax1.plot(pert["Gtree"].values)
# Leaf transpiration
ax2.plot(norm["Etree"].values)
ax2.plot(pert["Etree"].values)
# Leaf assimilation
ax3.plot(norm["Atree"].values)
ax3.plot(pert["Atree"].values)
ax4.plot(norm["LAItree"].values)
ax4.plot(pert["LAItree"].values)
ax4.plot(norm["LAIgrass"].values)
ax4.plot(pert["LAIgrass"].values)
plt.show()
return 1
def main():
data_dict = pickle.load(open(PKLPATH, 'rb'))
year_agg = lambda x: x.groupby(level=['month', 'hour']).mean()
data_mean_year = [year_agg(df) \
for df in OrderedDict(data_dict).values()]
# **FOR LOOP WILL GO HERE
plot_monthly_response(data_mean_year[3], data_mean_year[6])
return 1
if __name__ == "__main__":
FILEPATH = "~/Savanna/Data/HowardSprings_IAV/pickled/agg/mean_monthly_leaf.pkl"
PKLPATH = os.path.expanduser(FILEPATH)
main()
| [((17, 16, 17, 51), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (), '', True, 'import matplotlib.gridspec as gridspec\n'), ((19, 10, 19, 35), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(19, 22, 19, 34): 'plot_grid[0]'}, {}), '(plot_grid[0])', True, 'import matplotlib.pyplot as plt\n'), ((20, 10, 20, 35), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(20, 22, 20, 34): 'plot_grid[1]'}, {}), '(plot_grid[1])', True, 'import matplotlib.pyplot as plt\n'), ((21, 10, 21, 35), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(21, 22, 21, 34): 'plot_grid[2]'}, {}), '(plot_grid[2])', True, 'import matplotlib.pyplot as plt\n'), ((22, 10, 22, 35), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(22, 22, 22, 34): 'plot_grid[3]'}, {}), '(plot_grid[3])', True, 'import matplotlib.pyplot as plt\n'), ((41, 4, 41, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((62, 14, 62, 42), 'os.path.expanduser', 'os.path.expanduser', ({(62, 33, 62, 41): 'FILEPATH'}, {}), '(FILEPATH)', False, 'import os\n'), ((52, 32, 52, 54), 'collections.OrderedDict', 'OrderedDict', ({(52, 44, 52, 53): 'data_dict'}, {}), '(data_dict)', False, 'from collections import OrderedDict\n')] |
vprnet/school-closings | app/index.py | 04c63170ea36cabe0a3486f0e58830952e1fd0a8 | #!/usr/local/bin/python2.7
from flask import Flask
import sys
from flask_frozen import Freezer
from upload_s3 import set_metadata
from config import AWS_DIRECTORY
app = Flask(__name__)
app.config.from_object('config')
from views import *
# Serving from s3 leads to some complications in how static files are served
if len(sys.argv) > 1:
if sys.argv[1] == 'build':
PROJECT_ROOT = '/' + AWS_DIRECTORY
elif sys.argv[1] == 'test':
PROJECT_ROOT = '/www.vpr.net/' + AWS_DIRECTORY
else:
PROJECT_ROOT = '/'
class WebFactionMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
environ['SCRIPT_NAME'] = PROJECT_ROOT
return self.app(environ, start_response)
app.wsgi_app = WebFactionMiddleware(app.wsgi_app)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'build':
app.debug = True
freezer = Freezer(app)
freezer.freeze()
set_metadata()
else:
app.run(debug=True)
| [((9, 6, 9, 21), 'flask.Flask', 'Flask', ({(9, 12, 9, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask\n'), ((38, 18, 38, 30), 'flask_frozen.Freezer', 'Freezer', ({(38, 26, 38, 29): 'app'}, {}), '(app)', False, 'from flask_frozen import Freezer\n'), ((40, 8, 40, 22), 'upload_s3.set_metadata', 'set_metadata', ({}, {}), '()', False, 'from upload_s3 import set_metadata\n')] |
modwizcode/m1n1 | proxyclient/linux.py | 96d133e854dfe878ea39f9c994545a2026a446a8 | #!/usr/bin/python
from setup import *
payload = open(sys.argv[1], "rb").read()
dtb = open(sys.argv[2], "rb").read()
if len(sys.argv) > 3:
initramfs = open(sys.argv[3], "rb").read()
initramfs_size = len(initramfs)
else:
initramfs = None
initramfs_size = 0
compressed_size = len(payload)
compressed_addr = u.malloc(compressed_size)
dtb_addr = u.malloc(len(dtb))
print("Loading %d bytes to 0x%x..0x%x..." % (compressed_size, compressed_addr, compressed_addr + compressed_size))
iface.writemem(compressed_addr, payload, True)
print("Loading DTB to 0x%x..." % dtb_addr)
iface.writemem(dtb_addr, dtb)
kernel_size = 32 * 1024 * 1024
kernel_base = u.memalign(2 * 1024 * 1024, kernel_size)
print("Kernel_base: 0x%x" % kernel_base)
assert not (kernel_base & 0xffff)
if initramfs is not None:
initramfs_base = u.memalign(65536, initramfs_size)
print("Loading %d initramfs bytes to 0x%x..." % (initramfs_size, initramfs_base))
iface.writemem(initramfs_base, initramfs, True)
p.kboot_set_initrd(initramfs_base, initramfs_size)
if p.kboot_prepare_dt(dtb_addr):
print("DT prepare failed")
sys.exit(1)
#kernel_size = p.xzdec(compressed_addr, compressed_size)
#if kernel_size < 0:
#raise Exception("Decompression header check error!",)
#print("Uncompressed kernel size: %d bytes" % kernel_size)
print("Uncompressing...")
iface.dev.timeout = 40
kernel_size = p.gzdec(compressed_addr, compressed_size, kernel_base, kernel_size)
print(kernel_size)
if kernel_size < 0:
raise Exception("Decompression error!")
print("Decompress OK...")
p.dc_cvau(kernel_base, kernel_size)
p.ic_ivau(kernel_base, kernel_size)
print("Ready to boot")
daif = u.mrs(DAIF)
daif |= 0x3c0
u.msr(DAIF, daif)
print("DAIF: %x" % daif)
p.kboot_boot(kernel_base)
iface.ttymode()
| [] |
shizhongpwn/ancypwn | src/server.py | 716146e4986c514754492c8503ab196eecb9466d | import json
import os
import multiprocessing
import struct
import importlib
from socketserver import TCPServer, StreamRequestHandler
def plugin_module_import(name):
try:
return importlib.import_module(name)
except ModuleNotFoundError as e:
prompt = 'plugin {} not found, please install it first.\n'.format(name)
prompt += 'try follwing:\n\tpip3 install {}'.format(name)
raise PluginNotFoundError(prompt)
class NotificationHandler(StreamRequestHandler):
def handle(self):
length = struct.unpack('<I', self.request.recv(4))[0]
json_content = self.request.recv(length)
content = json.loads(json_content)
terminal = content['terminal']
if content['exec'] != '':
command = 'ancypwn attach -c \'{}\''.format(content['exec'])
else:
command = 'ancypwn attach'
realname = 'ancypwn_terminal_{}'.format(terminal)
mod = plugin_module_import(realname)
mod.run(command)
class ServerProcess(multiprocessing.Process):
def __init__(self, port, *args, **kwargs):
super(ServerProcess, self).__init__(*args, **kwargs)
self.port = port
def run(self):
self.server = TCPServer(('', self.port), NotificationHandler)
self.server.serve_forever()
| [((11, 15, 11, 44), 'importlib.import_module', 'importlib.import_module', ({(11, 39, 11, 43): 'name'}, {}), '(name)', False, 'import importlib\n'), ((22, 18, 22, 42), 'json.loads', 'json.loads', ({(22, 29, 22, 41): 'json_content'}, {}), '(json_content)', False, 'import json\n'), ((40, 22, 40, 69), 'socketserver.TCPServer', 'TCPServer', ({(40, 32, 40, 47): "('', self.port)", (40, 49, 40, 68): 'NotificationHandler'}, {}), "(('', self.port), NotificationHandler)", False, 'from socketserver import TCPServer, StreamRequestHandler\n')] |
c-hofer/pytorch_utils | pytorch_utils/collection_utils.py | 55278272690937ff1180c8d549bc866a63a5ac51 | def keychain_value_iter(d, key_chain=None, allowed_values=None):
key_chain = [] if key_chain is None else list(key_chain).copy()
if not isinstance(d, dict):
if allowed_values is not None:
assert isinstance(d, allowed_values), 'Value needs to be of type {}!'.format(
allowed_values)
yield key_chain, d
else:
for k, v in d.items():
yield from keychain_value_iter(
v,
key_chain + [k],
allowed_values=allowed_values) | [] |
zace3d/video_analysis | speech_to_text/views.py | 9001486ae64160ca497f6b9a99df5d9a5c5422cc | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from . import helpers
# Create your views here.
@csrf_exempt
def convert_video(request, version):
# Get video
video = request.FILES['video']
# Transcribe video and extract audio
response = helpers.transcribe_file(video)
context = response
# return render(request, 'api/v1/result_successful.html', context)
return JsonResponse(context, safe=False) | [((22, 11, 22, 44), 'django.http.JsonResponse', 'JsonResponse', (), '', False, 'from django.http import JsonResponse\n')] |
boladmin/security_monkey | security_monkey/watchers/vpc/vpn.py | c28592ffd518fa399527d26262683fc860c30eef | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.watchers.vpc.vpn
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Alex Cline <[email protected]> @alex.cline
"""
from cloudaux.aws.ec2 import describe_vpn_connections
from security_monkey.cloudaux_watcher import CloudAuxWatcher
from security_monkey.watcher import ChangeItem
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
class VPN(CloudAuxWatcher):
index = 'vpn'
i_am_singular = 'VPN Connection'
i_am_plural = 'VPN Connections'
def __init__(self, *args, **kwargs):
super(VPN, self).__init__(*args, **kwargs)
self.honor_ephemerals = True
self.ephemeral_paths = [
'VgwTelemetry$*$LastStatusChange',
'VgwTelemetry$*$Status',
'VgwTelemetry$*$StatusMessage',
]
def get_name_from_list_output(self, item):
if item.get("Tags"):
for tag in item["Tags"]:
if tag["Key"] == "Name":
return "{} ({})".format(tag["Value"], item["VpnConnectionId"])
return item["VpnConnectionId"]
def list_method(self, **kwargs):
return describe_vpn_connections(**kwargs)
def get_method(self, item, **kwargs):
# Remove the CustomerGatewayConfiguration -- it's not necessary as all the details are present anyway:
item.pop("CustomerGatewayConfiguration", None)
# Set the ARN:
item["Arn"] = "arn:aws:ec2:{region}:{account}:vpn-connection/{id}".format(region=kwargs["region"],
account=kwargs["account_number"],
id=item["VpnConnectionId"])
# Cast the datetimes to something JSON serializable (ISO 8601 string):
for vgw in item.get("VgwTelemetry", []):
if vgw.get("LastStatusChange"):
vgw["LastStatusChange"] = vgw["LastStatusChange"].strftime(DATETIME_FORMAT)
return item
class VPNItem(ChangeItem):
def __init__(self, region=None, account=None, name=None, arn=None, config=None, source_watcher=None):
super(VPNItem, self).__init__(
index=VPN.index,
region=region,
account=account,
name=name,
arn=arn,
new_config=config if config else {},
source_watcher=source_watcher)
| [((51, 15, 51, 49), 'cloudaux.aws.ec2.describe_vpn_connections', 'describe_vpn_connections', ({}, {}), '(**kwargs)', False, 'from cloudaux.aws.ec2 import describe_vpn_connections\n')] |
coush001/Imperial-MSc-Group-Project-2 | particle.py | 9309217895802d11c6fe9d2dca9b21f98fbc1c61 | from itertools import count
import numpy as np
class Particle(object):
"""Object containing all the properties for a single particle"""
_ids = count(0)
def __init__(self, main_data=None, x=np.zeros(2)):
self.id = next(self._ids)
self.main_data = main_data
self.x = np.array(x)
self.v = np.zeros(2)
self.a = np.zeros(2)
self.D = 0
self.rho = main_data.rho0
self.P = 0
self.m = main_data.dx ** 2 * main_data.rho0 # initial mass depends on the initial particle spacing
self.boundary = False # Particle by default is not on the boundary
# For predictor corrector
self.prev_x = np.array(x)
self.prev_v = np.zeros(2)
self.prev_rho = main_data.rho0
def calc_index(self):
"""Calculates the 2D integer index for the particle's location in the search grid"""
# Calculates the bucket coordinates
self.list_num = np.array((self.x - self.main_data.min_x) /
(2.0 * self.main_data.h), int)
def B(self):
return (self.main_data.rho0 * self.main_data.c0 ** 2) / self.main_data.gamma
def update_P(self):
"""
Equation of state
System is assumed slightly compressible
"""
rho0 = self.main_data.rho0
gamma = self.main_data.gamma
self.P = self.B() * ((self.rho / rho0)**gamma - 1)
def set_main_data(self, main_data):
self.main_data = main_data
def set_x(self, x):
self.x = x
self.calc_index()
def set_v(self, v):
self.v = v
def set_a(self, a):
self.a = a
def set_D(self, D):
self.D = D
def set_rho(self, rho):
self.rho = rho
self.update_P()
def m(self, m):
self.m = m
def list_attributes(self):
x_s = "position: " + str(self.x) + ", "
v_s = "velocity: " + str(self.v) + ", "
a_s = "acceleration: " + str(self.a) + ", "
D_s = "derivative of density: " + str(self.D) + ", "
rho_s = "density: " + str(self.rho) + ", "
m_s = "mass: " + str(self.m) + ", "
P_s = "pressure: " + str(self.P) + ", "
boundary_s = "is boundary: " + str(self.boundary)
return [x_s + v_s + a_s + D_s + rho_s + m_s + P_s + boundary_s]
| [((8, 11, 8, 19), 'itertools.count', 'count', ({(8, 17, 8, 18): '0'}, {}), '(0)', False, 'from itertools import count\n'), ((10, 41, 10, 52), 'numpy.zeros', 'np.zeros', ({(10, 50, 10, 51): '(2)'}, {}), '(2)', True, 'import numpy as np\n'), ((13, 17, 13, 28), 'numpy.array', 'np.array', ({(13, 26, 13, 27): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((14, 17, 14, 28), 'numpy.zeros', 'np.zeros', ({(14, 26, 14, 27): '2'}, {}), '(2)', True, 'import numpy as np\n'), ((15, 17, 15, 28), 'numpy.zeros', 'np.zeros', ({(15, 26, 15, 27): '2'}, {}), '(2)', True, 'import numpy as np\n'), ((22, 22, 22, 33), 'numpy.array', 'np.array', ({(22, 31, 22, 32): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((23, 22, 23, 33), 'numpy.zeros', 'np.zeros', ({(23, 31, 23, 32): '2'}, {}), '(2)', True, 'import numpy as np\n'), ((29, 24, 30, 63), 'numpy.array', 'np.array', ({(29, 33, 30, 57): '(self.x - self.main_data.min_x) / (2.0 * self.main_data.h)', (30, 59, 30, 62): 'int'}, {}), '((self.x - self.main_data.min_x) / (2.0 * self.main_data.h), int)', True, 'import numpy as np\n')] |
hussein18149/PITCHBOARD | app/main/form.py | 9aa515f8dd18464830bdf80488a317e8e791bd1b | from flask_wtf import FlaskForm
from wtforms import StringField,TextAreaField,SubmitField
from wtforms.validators import Required
class UpdateProfile(FlaskForm):
about = TextAreaField('Tell us about you.',validators = [Required()])
submit = SubmitField('Submit')
class PitchForm(FlaskForm):
pitch = TextAreaField('Write a pitch')
submit = SubmitField('Submit')
class PitchComForm(FlaskForm):
pitchcom = TextAreaField('comment on your pitch ')
submit = SubmitField('Submit')
| [((7, 13, 7, 34), 'wtforms.SubmitField', 'SubmitField', ({(7, 25, 7, 33): '"""Submit"""'}, {}), "('Submit')", False, 'from wtforms import StringField, TextAreaField, SubmitField\n'), ((10, 12, 10, 42), 'wtforms.TextAreaField', 'TextAreaField', ({(10, 26, 10, 41): '"""Write a pitch"""'}, {}), "('Write a pitch')", False, 'from wtforms import StringField, TextAreaField, SubmitField\n'), ((11, 13, 11, 34), 'wtforms.SubmitField', 'SubmitField', ({(11, 25, 11, 33): '"""Submit"""'}, {}), "('Submit')", False, 'from wtforms import StringField, TextAreaField, SubmitField\n'), ((14, 15, 14, 54), 'wtforms.TextAreaField', 'TextAreaField', ({(14, 29, 14, 53): '"""comment on your pitch """'}, {}), "('comment on your pitch ')", False, 'from wtforms import StringField, TextAreaField, SubmitField\n'), ((15, 13, 15, 34), 'wtforms.SubmitField', 'SubmitField', ({(15, 25, 15, 33): '"""Submit"""'}, {}), "('Submit')", False, 'from wtforms import StringField, TextAreaField, SubmitField\n'), ((6, 61, 6, 71), 'wtforms.validators.Required', 'Required', ({}, {}), '()', False, 'from wtforms.validators import Required\n')] |
soar-telescope/dragons-soar | soar_instruments/sami/adclass.py | a1c600074f532c1af6bd59bc2cc662a1aecd39c4 | import re
import astrodata
from astrodata import (astro_data_tag, TagSet, astro_data_descriptor,
returns_list)
from astrodata.fits import FitsLoader, FitsProvider
from ..soar import AstroDataSOAR
class AstroDataSAMI(AstroDataSOAR):
__keyword_dict = dict(data_section='DATASEC', gain='GAIN')
@staticmethod
def _matches_data(source):
return source[0].header.get('INSTRUME', '').upper() in {'SAMI', 'SAM'}
@astrodata.astro_data_tag
def _tag_instrument(self):
# QUESTIONS:
# 1) is SAMI always used with the SAM AO?
# 2) is SAMI used only at one telescopes or multiple ones?
# ANSWER:
# 1) SAMI is always used withing SAM but not always with AO.
# 2) SAMI and SAM are only used at SOAR Telescope.
return astrodata.TagSet(['SAMI', 'SAM'])
@astrodata.astro_data_tag
def _tag_flat(self):
# Ideally, we would want 'IMAGE' to be set by the 'IMAGE' tag.
# But since OBSTYPE is being used for both, not clear how that
# can be done right now.
obstype = self.phu.get('OBSTYPE', '')
if 'FLAT' in obstype:
return astrodata.TagSet(['FLAT', 'CAL', 'IMAGE'])
@astrodata.astro_data_tag
def _tag_twilight(self):
if self.phu.get('OBSTYPE') == 'SFLAT':
return astrodata.TagSet(['TWILIGHT'])
@astrodata.astro_data_tag
def _tag_domeflat(self):
if self.phu.get('OBSTYPE') == 'DFLAT':
return astrodata.TagSet(['DOME'])
@astrodata.astro_data_tag
def _tag_acquisition(self):
# Ideally, we would want 'IMAGE' to be set by the 'IMAGE' tag.
# But since OBSTYPE is being used for both, not clear how that
# can be done right now.
filename = self.phu.get('FILENAME', '')
notes = self.phu.get('NOTES', '')
if re.search('acq.[0-9]+', filename) or re.search('/acq/i', notes):
return astrodata.TagSet(['ACQUISITION', 'IMAGE'])
@astrodata.astro_data_tag
def _tag_image(self):
# this one will need something like "if not FABRY keyword", I think.
if self.phu.get('OBSTYPE') == 'OBJECT':
return astrodata.TagSet(['IMAGE'])
@astrodata.astro_data_tag
def _tag_bias(self):
if self.phu.get('OBSTYPE') == 'ZERO':
return astrodata.TagSet(['BIAS', 'CAL'], blocks=['IMAGE', 'FABRY'])
@astrodata.astro_data_descriptor
def data_section(self, pretty=False):
"""
Returns the rectangular section that includes the pixels that would be
exposed to light. If pretty is False, a tuple of 0-based coordinates
is returned with format (x1, x2, y1, y2). If pretty is True, a keyword
value is returned without parsing as a string. In this format, the
coordinates are generally 1-based.
One tuple or string is return per extension/array, in a list. If the
method is called on a single slice, the section is returned as a tuple
or a string.
Parameters
----------
pretty : bool
If True, return the formatted string found in the header.
Returns
-------
tuple of integers or list of tuples
Location of the pixels exposed to light using Python slice values.
string or list of strings
Location of the pixels exposed to light using an IRAF section
format (1-based).
"""
return self._parse_section(self._keyword_for('data_section'), pretty)
@astrodata.astro_data_descriptor
def filter_name(self):
"""
Returns the name of the filter used according to the summary FILTERS
keyword.
Returns
-------
str
The name of the filter.
"""
return self.phu.get('FILTERS')
@astrodata.astro_data_descriptor
def gain(self):
"""
Gain of the amplifier
Returns
-------
float
The gain for each amplifier
"""
# Bruno: GAIN is set to "unavail" in the headers. Do you have
# the gain for each amp in some lookup table?
gain = []
for ad in self[1:]:
val = ad.hdr['gain']
if val != 'unavail':
gain.append(val)
else:
gain.append(None)
return gain
@classmethod
def load(cls, source):
def sami_parser(hdu):
m = re.match('im(\d)', hdu.header.get('EXTNAME', ''))
if m:
hdu.header['EXTNAME'] = ('SCI', 'Added by AstroData')
hdu.header['EXTVER'] = (int(m.group(1)), 'Added by AstroData')
return cls(FitsLoader(FitsProvider).load(source,
extname_parser=sami_parser)) | [((26, 15, 26, 48), 'astrodata.TagSet', 'astrodata.TagSet', ({(26, 32, 26, 47): "['SAMI', 'SAM']"}, {}), "(['SAMI', 'SAM'])", False, 'import astrodata\n'), ((35, 19, 35, 61), 'astrodata.TagSet', 'astrodata.TagSet', ({(35, 36, 35, 60): "['FLAT', 'CAL', 'IMAGE']"}, {}), "(['FLAT', 'CAL', 'IMAGE'])", False, 'import astrodata\n'), ((40, 19, 40, 49), 'astrodata.TagSet', 'astrodata.TagSet', ({(40, 36, 40, 48): "['TWILIGHT']"}, {}), "(['TWILIGHT'])", False, 'import astrodata\n'), ((45, 19, 45, 45), 'astrodata.TagSet', 'astrodata.TagSet', ({(45, 36, 45, 44): "['DOME']"}, {}), "(['DOME'])", False, 'import astrodata\n'), ((55, 11, 55, 44), 're.search', 're.search', ({(55, 21, 55, 33): '"""acq.[0-9]+"""', (55, 35, 55, 43): 'filename'}, {}), "('acq.[0-9]+', filename)", False, 'import re\n'), ((55, 48, 55, 75), 're.search', 're.search', ({(55, 58, 55, 66): '"""/acq/i"""', (55, 69, 55, 74): 'notes'}, {}), "('/acq/i', notes)", False, 'import re\n'), ((56, 19, 56, 61), 'astrodata.TagSet', 'astrodata.TagSet', ({(56, 36, 56, 60): "['ACQUISITION', 'IMAGE']"}, {}), "(['ACQUISITION', 'IMAGE'])", False, 'import astrodata\n'), ((62, 19, 62, 46), 'astrodata.TagSet', 'astrodata.TagSet', ({(62, 36, 62, 45): "['IMAGE']"}, {}), "(['IMAGE'])", False, 'import astrodata\n'), ((67, 19, 67, 79), 'astrodata.TagSet', 'astrodata.TagSet', (), '', False, 'import astrodata\n'), ((142, 19, 142, 43), 'astrodata.fits.FitsLoader', 'FitsLoader', ({(142, 30, 142, 42): 'FitsProvider'}, {}), '(FitsProvider)', False, 'from astrodata.fits import FitsLoader, FitsProvider\n')] |
t10471/python | practice/src/design_pattern/TemplateMethod.py | 75056454bfb49197eb44f6b4d6a1b0a0b4b408ec | # -*- coding: utf-8 -*-
#単なる継承
class Base(object):
def __init__(self):
pass
def meth(self, int):
return self._meth(int)
def _meth(self, int):
return int
class Pow(Base):
def _meth(self, int):
return pow(int,int)
| [] |
yoon28/realsr-noise-injection | yoon/stage1_kernel.py | 402679490bf0972d09aaaadee3b5b9850c2a36e4 | import os, sys
import numpy as np
import cv2
import random
import torch
from configs import Config
from kernelGAN import KernelGAN
from data import DataGenerator
from learner import Learner
import tqdm
DATA_LOC = "/mnt/data/NTIRE2020/realSR/track2" # "/mnt/data/NTIRE2020/realSR/track1"
DATA_X = "DPEDiphone-tr-x" # "Corrupted-tr-x"
DATA_Y = "DPEDiphone-tr-y" # "Corrupted-tr-y"
DATA_VAL = "DPEDiphone-va" # "Corrupted-va-x"
def config_kernelGAN(afile):
img_folder = os.path.dirname(afile)
img_file = os.path.basename(afile)
out_dir = "yoon/kernels/track2"
params = ["--input_image_path", afile,
"--output_dir_path", out_dir,
"--noise_scale", str(1.0),
"--X4"]
conf = Config().parse(params)
conf.input2 = None
return conf
def estimate_kernel(img_file):
conf = config_kernelGAN(img_file)
kgan = KernelGAN(conf)
learner = Learner()
data = DataGenerator(conf, kgan)
for iteration in tqdm.tqdm(range(conf.max_iters), ncols=70):
[g_in, d_in, _] = data.__getitem__(iteration)
kgan.train(g_in, d_in)
learner.update(iteration, kgan)
kgan.finish()
if __name__ == "__main__":
seed_num = 0
torch.manual_seed(seed_num)
torch.cuda.manual_seed(seed_num)
torch.cuda.manual_seed_all(seed_num)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(seed_num)
random.seed(seed_num)
# exit(0)
data = {"X":[os.path.join(DATA_LOC, DATA_X, f) for f in os.listdir(os.path.join(DATA_LOC, DATA_X)) if f[-4:] == ".png"],
"Y":[os.path.join(DATA_LOC, DATA_Y, f) for f in os.listdir(os.path.join(DATA_LOC, DATA_Y)) if f[-4:] == ".png"],
"val":[os.path.join(DATA_LOC, DATA_VAL, f) for f in os.listdir(os.path.join(DATA_LOC, DATA_VAL)) if f[-4:] == ".png"]}
Kernels = []
Noises = []
for f in data["X"]:
estimate_kernel(f)
print("fin.")
| [((21, 17, 21, 39), 'os.path.dirname', 'os.path.dirname', ({(21, 33, 21, 38): 'afile'}, {}), '(afile)', False, 'import os, sys\n'), ((22, 15, 22, 38), 'os.path.basename', 'os.path.basename', ({(22, 32, 22, 37): 'afile'}, {}), '(afile)', False, 'import os, sys\n'), ((35, 11, 35, 26), 'kernelGAN.KernelGAN', 'KernelGAN', ({(35, 21, 35, 25): 'conf'}, {}), '(conf)', False, 'from kernelGAN import KernelGAN\n'), ((36, 14, 36, 23), 'learner.Learner', 'Learner', ({}, {}), '()', False, 'from learner import Learner\n'), ((37, 11, 37, 36), 'data.DataGenerator', 'DataGenerator', ({(37, 25, 37, 29): 'conf', (37, 31, 37, 35): 'kgan'}, {}), '(conf, kgan)', False, 'from data import DataGenerator\n'), ((46, 4, 46, 31), 'torch.manual_seed', 'torch.manual_seed', ({(46, 22, 46, 30): 'seed_num'}, {}), '(seed_num)', False, 'import torch\n'), ((47, 4, 47, 36), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', ({(47, 27, 47, 35): 'seed_num'}, {}), '(seed_num)', False, 'import torch\n'), ((48, 4, 48, 40), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', ({(48, 31, 48, 39): 'seed_num'}, {}), '(seed_num)', False, 'import torch\n'), ((51, 4, 51, 28), 'numpy.random.seed', 'np.random.seed', ({(51, 19, 51, 27): 'seed_num'}, {}), '(seed_num)', True, 'import numpy as np\n'), ((52, 4, 52, 25), 'random.seed', 'random.seed', ({(52, 16, 52, 24): 'seed_num'}, {}), '(seed_num)', False, 'import random\n'), ((29, 11, 29, 19), 'configs.Config', 'Config', ({}, {}), '()', False, 'from configs import Config\n'), ((56, 17, 56, 50), 'os.path.join', 'os.path.join', ({(56, 30, 56, 38): 'DATA_LOC', (56, 40, 56, 46): 'DATA_X', (56, 48, 56, 49): 'f'}, {}), '(DATA_LOC, DATA_X, f)', False, 'import os, sys\n'), ((57, 17, 57, 50), 'os.path.join', 'os.path.join', ({(57, 30, 57, 38): 'DATA_LOC', (57, 40, 57, 46): 'DATA_Y', (57, 48, 57, 49): 'f'}, {}), '(DATA_LOC, DATA_Y, f)', False, 'import os, sys\n'), ((58, 19, 58, 54), 'os.path.join', 'os.path.join', ({(58, 32, 58, 40): 'DATA_LOC', (58, 42, 58, 50): 'DATA_VAL', (58, 52, 58, 53): 'f'}, {}), '(DATA_LOC, DATA_VAL, f)', False, 'import os, sys\n'), ((56, 71, 56, 101), 'os.path.join', 'os.path.join', ({(56, 84, 56, 92): 'DATA_LOC', (56, 94, 56, 100): 'DATA_X'}, {}), '(DATA_LOC, DATA_X)', False, 'import os, sys\n'), ((57, 71, 57, 101), 'os.path.join', 'os.path.join', ({(57, 84, 57, 92): 'DATA_LOC', (57, 94, 57, 100): 'DATA_Y'}, {}), '(DATA_LOC, DATA_Y)', False, 'import os, sys\n'), ((58, 75, 58, 107), 'os.path.join', 'os.path.join', ({(58, 88, 58, 96): 'DATA_LOC', (58, 98, 58, 106): 'DATA_VAL'}, {}), '(DATA_LOC, DATA_VAL)', False, 'import os, sys\n')] |
RDFLib/PyRDFa | test/rdfa/test_non_xhtml.py | efc24d4940910ca1e65900c25b62047301bbdcc7 | from unittest import TestCase
from pyRdfa import pyRdfa
class NonXhtmlTest(TestCase):
"""
RDFa that is in not well-formed XHTML is passed through html5lib.
These tests make sure that this RDFa can be processed both from
a file, and from a URL.
"""
target1 = '<og:isbn>9780596516499</og:isbn>'
target2 = '<gr:typeOfGood rdf:resource="urn:x-domain:oreilly.com:product:9780596803391.EBOOK"/>'
def test_url(self):
g = pyRdfa().rdf_from_source('http://oreilly.com/catalog/9780596516499/')
self.assert_(self.target1.encode('utf-8') in g)
def test_file(self):
g = pyRdfa().rdf_from_source('test/rdfa/oreilly.html')
self.assert_(self.target2.encode('utf-8') in g)
| [((15, 12, 15, 20), 'pyRdfa.pyRdfa', 'pyRdfa', ({}, {}), '()', False, 'from pyRdfa import pyRdfa\n'), ((19, 12, 19, 20), 'pyRdfa.pyRdfa', 'pyRdfa', ({}, {}), '()', False, 'from pyRdfa import pyRdfa\n')] |
jr3cermak/robs-kitchensink | python/pyoai/setup.py | 74b7eb1b1acd8b700d61c5a9ba0c69be3cc6763a | from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name='pyoai',
version='2.4.6.b',
author='Infrae',
author_email='[email protected]',
url='https://github.com/jr3cermak/robs-kitchensink/tree/master/python/pyoai',
classifiers=["Development Status :: 4 - Beta",
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Environment :: Web Environment"],
description="""\
The oaipmh module is a Python implementation of an "Open Archives
Initiative Protocol for Metadata Harvesting" (version 2) client and server.
The protocol is described here:
http://www.openarchives.org/OAI/openarchivesprotocol.html
""",
long_description=(open(join(dirname(__file__), 'README.rst')).read()+
'\n\n'+
open(join(dirname(__file__), 'HISTORY.txt')).read()),
packages=find_packages('src'),
package_dir = {'': 'src'},
zip_safe=False,
license='BSD',
keywords='OAI-PMH xml archive',
install_requires=['lxml'],
)
| [((25, 13, 25, 33), 'setuptools.find_packages', 'find_packages', ({(25, 27, 25, 32): '"""src"""'}, {}), "('src')", False, 'from setuptools import setup, find_packages\n'), ((24, 18, 24, 35), 'os.path.dirname', 'dirname', ({(24, 26, 24, 34): '__file__'}, {}), '(__file__)', False, 'from os.path import join, dirname\n'), ((22, 32, 22, 49), 'os.path.dirname', 'dirname', ({(22, 40, 22, 48): '__file__'}, {}), '(__file__)', False, 'from os.path import join, dirname\n')] |
Roozbeh-Bazargani/CPSC-533R-project | utils/functions.py | 453f093b23d2363f09c61079d1d4fbd878abf3be | import torch
from torch import nn
import math
#0 left hip
#1 left knee
#2 left foot
#3 right hip
#4 right knee
#5 right foot
#6 middle hip
#7 neck
#8 nose
#9 head
#10 left shoulder
#11 left elbow
#12 left wrist
#13 right shoulder
#14 right elbow
#15 right wrist
def random_rotation(J3d):
J = J3d # need copy????
batch_size = J.shape[0]
theta = torch.rand(batch_size).cuda() * 2*torch.tensor(math.pi).cuda() # random theta
root = J[:,:,8] # joint 8 = nose is root
J3d_R = rotation(J.cuda(), theta.cuda(), root.unsqueeze(-1).cuda(), False)
return J3d_R, theta, root # need these values in the code
def rotation(J, theta, root, is_reversed): # rotation over y axis by theta
D = root[:,2].cuda() # absolute depth of the root joint
batch_size = root.shape[0]
v_t = torch.zeros((batch_size, 3, 1)).cuda()
v_t[:, 2, :] = D.cuda() # translation vector
if is_reversed:
root, v_t = v_t, root # swap
theta = -theta
# R = torch.tensor([[torch.cos(theta), -torch.sin(theta), 0], [torch.sin(theta), torch.cos(theta), 0], [0, 0, 1]]) # rotation matrix over z by theta degrees
R = torch.zeros((batch_size, 3, 3)).cuda() # rotation matrix over y by theta degrees
R[:, 0, 0] = torch.cos(theta)
R[:, 0, 2] = torch.sin(theta)
R[:, 1, 1] = torch.ones(batch_size)
R[:, 2, 0] = -torch.sin(theta)
R[:, 2, 2] = torch.cos(theta)
# R = torch.tensor([[torch.cos(theta), 0, torch.sin(theta)], [0, 1, 0], [-torch.sin(theta), 0, torch.cos(theta)]]) # rotation matrix over y by theta degrees
# R = torch.tensor([[1, 0, 0], [0, torch.cos(theta), -torch.sin(theta)], [0, torch.sin(theta), torch.cos(theta)]]) # rotation matrix over x by theta degrees
J_R = torch.matmul(R, J - root) + v_t # rotation
return J_R
def reverse_rotation(J3d_R, theta, root):
J = J3d_R # need copy????
return rotation(J.cuda(), theta.cuda(), root.unsqueeze(-1).cuda(), True)
def temporal_loss(J, K, J_R, K_R): # J is J3d at time t and K is J3d at time t+k. J_R means the reversed rotation of J
#print(torch.norm(J.reshape(J.shape[0], 3, 16) - K.reshape(J.shape[0], 3, 16) - J_R.reshape(J.shape[0], 3, 16) + K_R.reshape(J.shape[0], 3, 16), dim=1).shape)
#stop
mse_fn = nn.MSELoss()
return mse_fn(J.reshape(J.shape[0], 3, 16) - K.reshape(J.shape[0], 3, 16) - J_R.reshape(J.shape[0], 3, 16) + K_R.reshape(J.shape[0], 3, 16), torch.zeros(J.shape[0], 3, 16).cuda())
#return torch.norm(J.reshape(J.shape[0], 3, 16) - K.reshape(J.shape[0], 3, 16) - J_R.reshape(J.shape[0], 3, 16) + K_R.reshape(J.shape[0], 3, 16), dim=1)**2
'''
def temporal_loss(J, K, J_R, K_R): # J is J3d at time t and K is J3d at time t+k. J_R means the reversed rotation of J
return torch.norm(J - K - J_R + K_R, dim=1)**2
'''
'''
def random_rotation(J3d):
# J = torch.transpose(J3d, 1, 2)
J = J3d
root = torch.zeros(J.shape[0:2])
for i in range(J.shape[0]):
theta = torch.rand(1).cuda() * 2*torch.tensor(math.pi).cuda() # random theta
root[i] = J[i,:,8] # joint 8 = nose is root
temp = rotation(J[i,:,:], theta, root[i].unsqueeze(1), False)
# print(temp.shape)
J[i,:,:] = temp
return J, theta, root # need these values in the code
def rotation(J, theta, root, is_reversed): # rotation over y axis by theta
D = root[2] # absolute depth of the root joint
v_t = torch.tensor([[0], [0], [D]]).cuda() # translation vector
if is_reversed:
root, v_t = v_t, root # swap
theta = -theta
# R = torch.tensor([[torch.cos(theta), -torch.sin(theta), 0], [torch.sin(theta), torch.cos(theta), 0], [0, 0, 1]]) # rotation matrix over z by theta degrees
R = torch.tensor([[torch.cos(theta), 0, torch.sin(theta)], [0, 1, 0], [-torch.sin(theta), 0, torch.cos(theta)]]).cuda() # rotation matrix over y by theta degrees
# R = torch.tensor([[1, 0, 0], [0, torch.cos(theta), -torch.sin(theta)], [0, torch.sin(theta), torch.cos(theta)]]) # rotation matrix over x by theta degrees
J_R = torch.matmul(R, J.cuda() - root.cuda()) + v_t # rotation
return J_R
def reverse_rotation(J3d_R, theta, root):
# J = torch.transpose(J3d_R, 1, 2)
J = J3d_R
for i in range(J.shape[0]):
J[i,:,:] = rotation(J[i,:,:].cuda(), theta.cuda(), root[i].unsqueeze(1).cuda(), True)
return J
''' | [((40, 15, 40, 31), 'torch.cos', 'torch.cos', ({(40, 25, 40, 30): 'theta'}, {}), '(theta)', False, 'import torch\n'), ((41, 15, 41, 31), 'torch.sin', 'torch.sin', ({(41, 25, 41, 30): 'theta'}, {}), '(theta)', False, 'import torch\n'), ((42, 15, 42, 37), 'torch.ones', 'torch.ones', ({(42, 26, 42, 36): 'batch_size'}, {}), '(batch_size)', False, 'import torch\n'), ((44, 15, 44, 31), 'torch.cos', 'torch.cos', ({(44, 25, 44, 30): 'theta'}, {}), '(theta)', False, 'import torch\n'), ((59, 11, 59, 23), 'torch.nn.MSELoss', 'nn.MSELoss', ({}, {}), '()', False, 'from torch import nn\n'), ((43, 16, 43, 32), 'torch.sin', 'torch.sin', ({(43, 26, 43, 31): 'theta'}, {}), '(theta)', False, 'import torch\n'), ((48, 8, 48, 33), 'torch.matmul', 'torch.matmul', ({(48, 21, 48, 22): 'R', (48, 24, 48, 32): '(J - root)'}, {}), '(R, J - root)', False, 'import torch\n'), ((33, 8, 33, 39), 'torch.zeros', 'torch.zeros', ({(33, 20, 33, 38): '(batch_size, 3, 1)'}, {}), '((batch_size, 3, 1))', False, 'import torch\n'), ((39, 6, 39, 37), 'torch.zeros', 'torch.zeros', ({(39, 18, 39, 36): '(batch_size, 3, 3)'}, {}), '((batch_size, 3, 3))', False, 'import torch\n'), ((25, 44, 25, 65), 'torch.tensor', 'torch.tensor', ({(25, 57, 25, 64): 'math.pi'}, {}), '(math.pi)', False, 'import torch\n'), ((60, 143, 60, 173), 'torch.zeros', 'torch.zeros', ({(60, 155, 60, 165): 'J.shape[0]', (60, 167, 60, 168): '(3)', (60, 170, 60, 172): '(16)'}, {}), '(J.shape[0], 3, 16)', False, 'import torch\n'), ((25, 10, 25, 32), 'torch.rand', 'torch.rand', ({(25, 21, 25, 31): 'batch_size'}, {}), '(batch_size)', False, 'import torch\n')] |
ayresmajor/Curso-python | Desafio Python/Aula 22 des109.py | 006229cec38ea365bf43b19e3ce93fbd32e1dca6 | from des109 import moeda
preco = float(input('Digite o preço pretendido: €'))
print(f'''A metade do preço é {(moeda.metade(preco))}
O dobro do preço é {(moeda.dobra(preco))}
Aumentando o preço 10% temos {(moeda.aumentar(preco, 10))}
Diminuindo o preço 13% temos {(moeda.aumentar(preco, 13))}''')
| [((4, 34, 4, 53), 'des109.moeda.metade', 'moeda.metade', ({(4, 47, 4, 52): 'preco'}, {}), '(preco)', False, 'from des109 import moeda\n'), ((5, 30, 5, 48), 'des109.moeda.dobra', 'moeda.dobra', ({(5, 42, 5, 47): 'preco'}, {}), '(preco)', False, 'from des109 import moeda\n'), ((6, 39, 6, 64), 'des109.moeda.aumentar', 'moeda.aumentar', ({(6, 54, 6, 59): 'preco', (6, 61, 6, 63): '(10)'}, {}), '(preco, 10)', False, 'from des109 import moeda\n'), ((7, 39, 7, 64), 'des109.moeda.aumentar', 'moeda.aumentar', ({(7, 54, 7, 59): 'preco', (7, 61, 7, 63): '(13)'}, {}), '(preco, 13)', False, 'from des109 import moeda\n')] |
PacktPublishing/Odoo-Development-Cookbook | Chapter13_code/ch13_r05_using_the_rpc_api/xmlrpc.py | 5553110c0bc352c4541f11904e236cad3c443b8b | #!/usr/bin/env python2
import xmlrpclib
db = 'odoo9'
user = 'admin'
password = 'admin'
uid = xmlrpclib.ServerProxy('http://localhost:8069/xmlrpc/2/common')\
.authenticate(db, user, password, {})
odoo = xmlrpclib.ServerProxy('http://localhost:8069/xmlrpc/2/object')
installed_modules = odoo.execute_kw(
db, uid, password, 'ir.module.module', 'search_read',
[[('state', '=', 'installed')], ['name']], {})
for module in installed_modules:
print module['name']
| [] |
emorynlp/character-identification-old | python/zzz/v1-all_feat_cnn/components/features.py | f6519166dd30bd8140f05aa3e43225ab27c2ea6d | from abc import *
import numpy as np
###########################################################
class AbstractFeatureExtractor(object):
@abstractmethod
def extract(self, object):
return
###########################################################
class EntityFeatureExtractor(AbstractFeatureExtractor):
def __init__(self, empty_embd_shape=None, empty_feat_shape=None):
self.e_EMPTY = np.zeros(empty_embd_shape) if empty_embd_shape else None
self.f_EMPTY = np.zeros(empty_feat_shape) if empty_feat_shape else None
def extract(self, entity, include_average=True, nb_mentions=5, selection_method='last'):
embedding, feature = ([], [])
if entity and include_average:
nb_mentions -= 1
embedding.append(entity.get_avg_mention_embedding())
feature.append(entity.get_avg_mention_feature())
nb_padding = max(0, nb_mentions - len(entity))
nb_mentions -= nb_padding
if selection_method is 'last':
mentions = entity[-nb_mentions:]
embedding += map(lambda m: m.embedding, mentions)
feature += map(lambda m: m.feature, mentions)
for i in xrange(nb_padding):
embedding.append(self.e_EMPTY)
feature.append(self.f_EMPTY)
return np.array(embedding), np.array(feature)
###########################################################
class MentionFeatureExtractor(AbstractFeatureExtractor):
def __init__(self, word2vec, word2gender, spks, poss, deps, ners, spk_dim=8, pos_dim=8, dep_dim=8, ner_dim=8):
self.word2vec = word2vec
self.word2vec_dim = len(word2vec.values()[0])
self.word2gender = word2gender
self.word2gender_dim = len(word2gender.values()[0])
self.spk_dim = spk_dim
self.spk2vec = dict()
for spk in spks:
self.spk2vec[spk] = np.random.rand(spk_dim)
self.pos_dim = pos_dim
self.pos2vec = dict()
for pos in poss:
self.pos2vec[pos] = np.random.rand(pos_dim)
self.dep_dim = dep_dim
self.dep2vec = dict()
for dep in deps:
self.dep2vec[dep] = np.random.rand(dep_dim)
self.ner_dim = ner_dim
self.ner2vec = dict()
for ner in ners:
self.ner2vec[ner] = np.random.rand(ner_dim)
def extract(self, mention):
head_token = self.get_head_token(mention)
first_token, last_token = mention.tokens[0], mention.tokens[-1]
utterance = first_token.parent_utterance()
scene = utterance.parent_scene()
episode = scene.parent_episode()
speaker = utterance.speaker
prev_utterance = utterance.previous_utterance()
prev_speaker = prev_utterance.speaker if prev_utterance is not None else None
flatten_utterance_tokens = self.flatten_utterance(utterance)
flatten_sentence_tokens = self.get_mention_sentence_tokens(utterance, mention)
ft_locations = self.get_token_locations(flatten_utterance_tokens, mention)
start_ftid, end_ftid = ft_locations[0], ft_locations[-1]
token_len = end_ftid - start_ftid
embeddings = list()
# Word embeddings of the head word
embeddings.append(self.get_token_word_vector(head_token))
# First word of the mention
embeddings.append(self.get_token_word_vector(first_token))
# Last word of the mention
embeddings.append(self.get_token_word_vector(last_token))
# Avg of all words in the mention
embeddings.append(self.get_tokens_word_vector(mention))
# Two preceding words of the mention
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, start_ftid-1, 1))
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, start_ftid-2, 1))
# Two following words of the mention
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, end_ftid+1, 1))
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, end_ftid+2, 1))
# Avg of the +-1 words
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, start_ftid-1, token_len+2))
# Avg of the +-2 words
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, start_ftid-2, token_len+4))
# Avg of the -5 words
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, start_ftid-1, -5))
# Avg of the +5 words
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_utterance_tokens, end_ftid+1, 5))
# Avg of all words in the mention's sentence
embeddings.append(self.get_tokens_word_vector_wOffset(flatten_sentence_tokens, 0, len(flatten_sentence_tokens)))
# Avg of all words in current utterance
embeddings.append(self.get_utterance_vector(utterance))
# Avg of all words in previous utterance
embeddings.append(self.get_utterance_vector(prev_utterance))
# Avg of all words in the scene
embeddings.append(self.get_scene_vector(scene))
# Avg of all words in the episode
embeddings.append(self.get_episode_vector(episode))
features = list()
# Gender information of head token in the mention
features.append(self.get_token_gender_vector(head_token))
# Avg gender information of all tokens in the mention
features.append(self.get_tokens_gender_vector(mention))
# Current speaker information of the utterance
features.append(self.get_speaker_vector(speaker))
# Previous speaker information of the utterance
features.append(self.get_speaker_vector(prev_speaker))
# Pos tag information of head token
features.append(self.get_pos_tag_vector(head_token.pos_tag))
# Ner tag information of head token
features.append(self.get_ner_tag_vector(head_token.ner_tag))
# Dep label information of head token
features.append(self.get_dep_label_vector(head_token.dep_label))
# Dep label information of head token'parent
features.append(np.zeros(self.dep_dim) if head_token.dep_head is None
else self.get_dep_label_vector(head_token.dep_head.dep_label))
# Mention token length/location information within utterance
features.append(self.get_mention_location_information(flatten_utterance_tokens, start_ftid, end_ftid))
return np.array(embeddings), np.concatenate(features)
###### Helper functions #######
def get_head_token(self, mention):
tids = map(lambda t: t.id, mention.tokens)
for token in mention.tokens:
if token.dep_head is not None and token.dep_head.id not in tids:
return token
return mention.tokens[0]
def flatten_utterance(self, utterance):
return [st for statements in utterance.statements for st in statements]
def get_token_locations(self, flatten_tokens, mention):
locations = []
for idx, token in enumerate(flatten_tokens):
if token in mention.tokens:
locations.append(idx)
locations.sort()
return locations
def get_mention_sentence_tokens(self, utterance, mention):
token = mention.tokens[0]
for statement in utterance.statements:
if token in statement:
return statement
return None
###### Mention tokens features #######
def get_token_word_vector(self, token):
word_form = token.word_form.lower()
return self.word2vec[word_form] if word_form in self.word2vec else np.zeros(self.word2vec_dim)
def get_tokens_word_vector(self, mention):
tvector = np.zeros(self.word2vec_dim)
for token in mention.tokens:
tvector += self.get_token_word_vector(token)
return tvector / float(len(mention.tokens))
def get_tokens_word_vector_wOffset(self, flatten_tokens, start, offset):
tvector = np.zeros(self.word2vec_dim)
if offset > 0:
for tid in xrange(start, start+offset):
tvector += self.get_token_word_vector(flatten_tokens[tid]) \
if tid < len(flatten_tokens) else np.zeros(self.word2vec_dim)
else:
for tid in xrange(start, start-offset, -1):
tvector += self.get_token_word_vector(flatten_tokens[tid]) \
if tid <= 0 else np.zeros(self.word2vec_dim)
return tvector / float(offset)
def get_token_gender_vector(self, token):
word_form = token.word_form.lower()
return self.word2gender[word_form] if word_form in self.word2gender else np.zeros(self.word2gender_dim)
def get_tokens_gender_vector(self, mention):
gvector = np.zeros(self.word2gender_dim)
for token in mention.tokens:
gvector += self.get_token_gender_vector(token)
return gvector / float(len(mention.tokens))
def get_speaker_vector(self, speaker):
return self.spk2vec[speaker] if speaker in self.spk2vec else np.zeros(self.spk_dim)
def get_pos_tag_vector(self, tag):
return self.pos2vec[tag] if tag in self.pos2vec else np.zeros(self.pos_dim)
def get_ner_tag_vector(self, tag):
return self.ner2vec[tag] if tag in self.ner2vec else np.zeros(self.ner_dim)
def get_dep_label_vector(self, label):
return self.dep2vec[label] if label in self.dep2vec else np.zeros(self.dep_dim)
def get_mention_location_information(self, flatten_utternace_tokens, start_idx, end_index):
length = len(flatten_utternace_tokens)
# Normalized mention word length, start token location, end token location
return np.array([float(end_index-start_idx)/length, float(start_idx)/length, float(end_index)/length])
#### Transcript document features ####
def get_utterance_vector(self, utterance):
tcount = 0
uvector = np.zeros(self.word2vec_dim)
if utterance is not None:
for u in utterance.statements:
for t in u:
word = t.word_form.lower()
if word in self.word2vec:
uvector = uvector + self.word2vec[word]
tcount += len(u)
return uvector / float(tcount) if tcount > 0 else uvector
def get_scene_vector(self, scene):
svector = np.zeros(self.word2vec_dim)
for utterance in scene.utterances:
svector += self.get_utterance_vector(utterance)
return svector / float(len(scene.utterances)) if scene.utterances else svector
def get_episode_vector(self, episode):
evector = np.zeros(self.word2vec_dim)
for scene in episode.scenes:
evector += self.get_scene_vector(scene)
return evector / float(len(episode.scenes)) if episode.scenes else evector
| [((176, 18, 176, 45), 'numpy.zeros', 'np.zeros', ({(176, 27, 176, 44): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((182, 18, 182, 45), 'numpy.zeros', 'np.zeros', ({(182, 27, 182, 44): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((199, 18, 199, 48), 'numpy.zeros', 'np.zeros', ({(199, 27, 199, 47): 'self.word2gender_dim'}, {}), '(self.word2gender_dim)', True, 'import numpy as np\n'), ((224, 18, 224, 45), 'numpy.zeros', 'np.zeros', ({(224, 27, 224, 44): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((235, 18, 235, 45), 'numpy.zeros', 'np.zeros', ({(235, 27, 235, 44): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((242, 18, 242, 45), 'numpy.zeros', 'np.zeros', ({(242, 27, 242, 44): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((15, 23, 15, 49), 'numpy.zeros', 'np.zeros', ({(15, 32, 15, 48): 'empty_embd_shape'}, {}), '(empty_embd_shape)', True, 'import numpy as np\n'), ((16, 23, 16, 49), 'numpy.zeros', 'np.zeros', ({(16, 32, 16, 48): 'empty_feat_shape'}, {}), '(empty_feat_shape)', True, 'import numpy as np\n'), ((37, 15, 37, 34), 'numpy.array', 'np.array', ({(37, 24, 37, 33): 'embedding'}, {}), '(embedding)', True, 'import numpy as np\n'), ((37, 36, 37, 53), 'numpy.array', 'np.array', ({(37, 45, 37, 52): 'feature'}, {}), '(feature)', True, 'import numpy as np\n'), ((52, 32, 52, 55), 'numpy.random.rand', 'np.random.rand', ({(52, 47, 52, 54): 'spk_dim'}, {}), '(spk_dim)', True, 'import numpy as np\n'), ((57, 32, 57, 55), 'numpy.random.rand', 'np.random.rand', ({(57, 47, 57, 54): 'pos_dim'}, {}), '(pos_dim)', True, 'import numpy as np\n'), ((62, 32, 62, 55), 'numpy.random.rand', 'np.random.rand', ({(62, 47, 62, 54): 'dep_dim'}, {}), '(dep_dim)', True, 'import numpy as np\n'), ((67, 32, 67, 55), 'numpy.random.rand', 'np.random.rand', ({(67, 47, 67, 54): 'ner_dim'}, {}), '(ner_dim)', True, 'import numpy as np\n'), ((142, 15, 142, 35), 'numpy.array', 'np.array', ({(142, 24, 142, 34): 'embeddings'}, {}), '(embeddings)', True, 'import numpy as np\n'), ((142, 37, 142, 61), 'numpy.concatenate', 'np.concatenate', ({(142, 52, 142, 60): 'features'}, {}), '(features)', True, 'import numpy as np\n'), ((173, 75, 173, 102), 'numpy.zeros', 'np.zeros', ({(173, 84, 173, 101): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((196, 81, 196, 111), 'numpy.zeros', 'np.zeros', ({(196, 90, 196, 110): 'self.word2gender_dim'}, {}), '(self.word2gender_dim)', True, 'import numpy as np\n'), ((205, 69, 205, 91), 'numpy.zeros', 'np.zeros', ({(205, 78, 205, 90): 'self.spk_dim'}, {}), '(self.spk_dim)', True, 'import numpy as np\n'), ((208, 61, 208, 83), 'numpy.zeros', 'np.zeros', ({(208, 70, 208, 82): 'self.pos_dim'}, {}), '(self.pos_dim)', True, 'import numpy as np\n'), ((211, 61, 211, 83), 'numpy.zeros', 'np.zeros', ({(211, 70, 211, 82): 'self.ner_dim'}, {}), '(self.ner_dim)', True, 'import numpy as np\n'), ((214, 65, 214, 87), 'numpy.zeros', 'np.zeros', ({(214, 74, 214, 86): 'self.dep_dim'}, {}), '(self.dep_dim)', True, 'import numpy as np\n'), ((137, 24, 137, 46), 'numpy.zeros', 'np.zeros', ({(137, 33, 137, 45): 'self.dep_dim'}, {}), '(self.dep_dim)', True, 'import numpy as np\n'), ((187, 54, 187, 81), 'numpy.zeros', 'np.zeros', ({(187, 63, 187, 80): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n'), ((191, 37, 191, 64), 'numpy.zeros', 'np.zeros', ({(191, 46, 191, 63): 'self.word2vec_dim'}, {}), '(self.word2vec_dim)', True, 'import numpy as np\n')] |
waikato-ufdl/ufdl-backend | ufdl-core-app/src/ufdl/core_app/models/mixins/_UserRestrictedQuerySet.py | 776fc906c61eba6c2f2e6324758e7b8a323e30d7 | from django.db import models
class UserRestrictedQuerySet(models.QuerySet):
"""
Query-set base class for models which apply per-instance permissions
based on the user accessing them.
"""
def for_user(self, user):
"""
Filters the query-set to those instances that the
given user is allowed to access.
:param user: The user.
:return: The filtered query-set.
"""
raise NotImplementedError(UserRestrictedQuerySet.for_user.__qualname__)
| [] |
sebtelko/pulumi-azure-native | sdk/python/pulumi_azure_native/eventgrid/partner_registration.py | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = ['PartnerRegistrationArgs', 'PartnerRegistration']
@pulumi.input_type
class PartnerRegistrationArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
authorized_azure_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
customer_service_uri: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logo_uri: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
partner_customer_service_extension: Optional[pulumi.Input[str]] = None,
partner_customer_service_number: Optional[pulumi.Input[str]] = None,
partner_name: Optional[pulumi.Input[str]] = None,
partner_registration_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_description: Optional[pulumi.Input[str]] = None,
partner_resource_type_display_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_name: Optional[pulumi.Input[str]] = None,
setup_uri: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
visibility_state: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]] = None):
"""
The set of arguments for constructing a PartnerRegistration resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription.
:param pulumi.Input[Sequence[pulumi.Input[str]]] authorized_azure_subscription_ids: List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
:param pulumi.Input[str] customer_service_uri: The extension of the customer service URI of the publisher.
:param pulumi.Input[str] location: Location of the resource.
:param pulumi.Input[str] logo_uri: URI of the logo.
:param pulumi.Input[str] long_description: Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
:param pulumi.Input[str] partner_customer_service_extension: The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
:param pulumi.Input[str] partner_customer_service_number: The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
:param pulumi.Input[str] partner_name: Official name of the partner name. For example: "Contoso".
:param pulumi.Input[str] partner_registration_name: Name of the partner registration.
:param pulumi.Input[str] partner_resource_type_description: Short description of the partner resource type. The length of this description should not exceed 256 characters.
:param pulumi.Input[str] partner_resource_type_display_name: Display name of the partner resource type.
:param pulumi.Input[str] partner_resource_type_name: Name of the partner resource type.
:param pulumi.Input[str] setup_uri: URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags of the resource.
:param pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']] visibility_state: Visibility state of the partner registration.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if authorized_azure_subscription_ids is not None:
pulumi.set(__self__, "authorized_azure_subscription_ids", authorized_azure_subscription_ids)
if customer_service_uri is not None:
pulumi.set(__self__, "customer_service_uri", customer_service_uri)
if location is not None:
pulumi.set(__self__, "location", location)
if logo_uri is not None:
pulumi.set(__self__, "logo_uri", logo_uri)
if long_description is not None:
pulumi.set(__self__, "long_description", long_description)
if partner_customer_service_extension is not None:
pulumi.set(__self__, "partner_customer_service_extension", partner_customer_service_extension)
if partner_customer_service_number is not None:
pulumi.set(__self__, "partner_customer_service_number", partner_customer_service_number)
if partner_name is not None:
pulumi.set(__self__, "partner_name", partner_name)
if partner_registration_name is not None:
pulumi.set(__self__, "partner_registration_name", partner_registration_name)
if partner_resource_type_description is not None:
pulumi.set(__self__, "partner_resource_type_description", partner_resource_type_description)
if partner_resource_type_display_name is not None:
pulumi.set(__self__, "partner_resource_type_display_name", partner_resource_type_display_name)
if partner_resource_type_name is not None:
pulumi.set(__self__, "partner_resource_type_name", partner_resource_type_name)
if setup_uri is not None:
pulumi.set(__self__, "setup_uri", setup_uri)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if visibility_state is not None:
pulumi.set(__self__, "visibility_state", visibility_state)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="authorizedAzureSubscriptionIds")
def authorized_azure_subscription_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
"""
return pulumi.get(self, "authorized_azure_subscription_ids")
@authorized_azure_subscription_ids.setter
def authorized_azure_subscription_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "authorized_azure_subscription_ids", value)
@property
@pulumi.getter(name="customerServiceUri")
def customer_service_uri(self) -> Optional[pulumi.Input[str]]:
"""
The extension of the customer service URI of the publisher.
"""
return pulumi.get(self, "customer_service_uri")
@customer_service_uri.setter
def customer_service_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_service_uri", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Location of the resource.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logoUri")
def logo_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI of the logo.
"""
return pulumi.get(self, "logo_uri")
@logo_uri.setter
def logo_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_uri", value)
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> Optional[pulumi.Input[str]]:
"""
Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
"""
return pulumi.get(self, "long_description")
@long_description.setter
def long_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "long_description", value)
@property
@pulumi.getter(name="partnerCustomerServiceExtension")
def partner_customer_service_extension(self) -> Optional[pulumi.Input[str]]:
"""
The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
"""
return pulumi.get(self, "partner_customer_service_extension")
@partner_customer_service_extension.setter
def partner_customer_service_extension(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_customer_service_extension", value)
@property
@pulumi.getter(name="partnerCustomerServiceNumber")
def partner_customer_service_number(self) -> Optional[pulumi.Input[str]]:
"""
The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
"""
return pulumi.get(self, "partner_customer_service_number")
@partner_customer_service_number.setter
def partner_customer_service_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_customer_service_number", value)
@property
@pulumi.getter(name="partnerName")
def partner_name(self) -> Optional[pulumi.Input[str]]:
"""
Official name of the partner name. For example: "Contoso".
"""
return pulumi.get(self, "partner_name")
@partner_name.setter
def partner_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_name", value)
@property
@pulumi.getter(name="partnerRegistrationName")
def partner_registration_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the partner registration.
"""
return pulumi.get(self, "partner_registration_name")
@partner_registration_name.setter
def partner_registration_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_registration_name", value)
@property
@pulumi.getter(name="partnerResourceTypeDescription")
def partner_resource_type_description(self) -> Optional[pulumi.Input[str]]:
"""
Short description of the partner resource type. The length of this description should not exceed 256 characters.
"""
return pulumi.get(self, "partner_resource_type_description")
@partner_resource_type_description.setter
def partner_resource_type_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_resource_type_description", value)
@property
@pulumi.getter(name="partnerResourceTypeDisplayName")
def partner_resource_type_display_name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_display_name")
@partner_resource_type_display_name.setter
def partner_resource_type_display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_resource_type_display_name", value)
@property
@pulumi.getter(name="partnerResourceTypeName")
def partner_resource_type_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_name")
@partner_resource_type_name.setter
def partner_resource_type_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_resource_type_name", value)
@property
@pulumi.getter(name="setupUri")
def setup_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
"""
return pulumi.get(self, "setup_uri")
@setup_uri.setter
def setup_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "setup_uri", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Tags of the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="visibilityState")
def visibility_state(self) -> Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]]:
"""
Visibility state of the partner registration.
"""
return pulumi.get(self, "visibility_state")
@visibility_state.setter
def visibility_state(self, value: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]]):
pulumi.set(self, "visibility_state", value)
class PartnerRegistration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorized_azure_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
customer_service_uri: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logo_uri: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
partner_customer_service_extension: Optional[pulumi.Input[str]] = None,
partner_customer_service_number: Optional[pulumi.Input[str]] = None,
partner_name: Optional[pulumi.Input[str]] = None,
partner_registration_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_description: Optional[pulumi.Input[str]] = None,
partner_resource_type_display_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
setup_uri: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
visibility_state: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]] = None,
__props__=None):
"""
Information about a partner registration.
API Version: 2020-04-01-preview.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] authorized_azure_subscription_ids: List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
:param pulumi.Input[str] customer_service_uri: The extension of the customer service URI of the publisher.
:param pulumi.Input[str] location: Location of the resource.
:param pulumi.Input[str] logo_uri: URI of the logo.
:param pulumi.Input[str] long_description: Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
:param pulumi.Input[str] partner_customer_service_extension: The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
:param pulumi.Input[str] partner_customer_service_number: The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
:param pulumi.Input[str] partner_name: Official name of the partner name. For example: "Contoso".
:param pulumi.Input[str] partner_registration_name: Name of the partner registration.
:param pulumi.Input[str] partner_resource_type_description: Short description of the partner resource type. The length of this description should not exceed 256 characters.
:param pulumi.Input[str] partner_resource_type_display_name: Display name of the partner resource type.
:param pulumi.Input[str] partner_resource_type_name: Name of the partner resource type.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription.
:param pulumi.Input[str] setup_uri: URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags of the resource.
:param pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']] visibility_state: Visibility state of the partner registration.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PartnerRegistrationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Information about a partner registration.
API Version: 2020-04-01-preview.
:param str resource_name: The name of the resource.
:param PartnerRegistrationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PartnerRegistrationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorized_azure_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
customer_service_uri: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logo_uri: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
partner_customer_service_extension: Optional[pulumi.Input[str]] = None,
partner_customer_service_number: Optional[pulumi.Input[str]] = None,
partner_name: Optional[pulumi.Input[str]] = None,
partner_registration_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_description: Optional[pulumi.Input[str]] = None,
partner_resource_type_display_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
setup_uri: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
visibility_state: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PartnerRegistrationArgs.__new__(PartnerRegistrationArgs)
__props__.__dict__["authorized_azure_subscription_ids"] = authorized_azure_subscription_ids
__props__.__dict__["customer_service_uri"] = customer_service_uri
__props__.__dict__["location"] = location
__props__.__dict__["logo_uri"] = logo_uri
__props__.__dict__["long_description"] = long_description
__props__.__dict__["partner_customer_service_extension"] = partner_customer_service_extension
__props__.__dict__["partner_customer_service_number"] = partner_customer_service_number
__props__.__dict__["partner_name"] = partner_name
__props__.__dict__["partner_registration_name"] = partner_registration_name
__props__.__dict__["partner_resource_type_description"] = partner_resource_type_description
__props__.__dict__["partner_resource_type_display_name"] = partner_resource_type_display_name
__props__.__dict__["partner_resource_type_name"] = partner_resource_type_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["setup_uri"] = setup_uri
__props__.__dict__["tags"] = tags
__props__.__dict__["visibility_state"] = visibility_state
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:eventgrid:PartnerRegistration"), pulumi.Alias(type_="azure-native:eventgrid/v20200401preview:PartnerRegistration"), pulumi.Alias(type_="azure-nextgen:eventgrid/v20200401preview:PartnerRegistration"), pulumi.Alias(type_="azure-native:eventgrid/v20201015preview:PartnerRegistration"), pulumi.Alias(type_="azure-nextgen:eventgrid/v20201015preview:PartnerRegistration")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PartnerRegistration, __self__).__init__(
'azure-native:eventgrid:PartnerRegistration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PartnerRegistration':
"""
Get an existing PartnerRegistration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PartnerRegistrationArgs.__new__(PartnerRegistrationArgs)
__props__.__dict__["authorized_azure_subscription_ids"] = None
__props__.__dict__["customer_service_uri"] = None
__props__.__dict__["location"] = None
__props__.__dict__["logo_uri"] = None
__props__.__dict__["long_description"] = None
__props__.__dict__["name"] = None
__props__.__dict__["partner_customer_service_extension"] = None
__props__.__dict__["partner_customer_service_number"] = None
__props__.__dict__["partner_name"] = None
__props__.__dict__["partner_resource_type_description"] = None
__props__.__dict__["partner_resource_type_display_name"] = None
__props__.__dict__["partner_resource_type_name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["setup_uri"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["visibility_state"] = None
return PartnerRegistration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="authorizedAzureSubscriptionIds")
def authorized_azure_subscription_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
"""
return pulumi.get(self, "authorized_azure_subscription_ids")
@property
@pulumi.getter(name="customerServiceUri")
def customer_service_uri(self) -> pulumi.Output[Optional[str]]:
"""
The extension of the customer service URI of the publisher.
"""
return pulumi.get(self, "customer_service_uri")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="logoUri")
def logo_uri(self) -> pulumi.Output[Optional[str]]:
"""
URI of the logo.
"""
return pulumi.get(self, "logo_uri")
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> pulumi.Output[Optional[str]]:
"""
Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
"""
return pulumi.get(self, "long_description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="partnerCustomerServiceExtension")
def partner_customer_service_extension(self) -> pulumi.Output[Optional[str]]:
"""
The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
"""
return pulumi.get(self, "partner_customer_service_extension")
@property
@pulumi.getter(name="partnerCustomerServiceNumber")
def partner_customer_service_number(self) -> pulumi.Output[Optional[str]]:
"""
The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
"""
return pulumi.get(self, "partner_customer_service_number")
@property
@pulumi.getter(name="partnerName")
def partner_name(self) -> pulumi.Output[Optional[str]]:
"""
Official name of the partner name. For example: "Contoso".
"""
return pulumi.get(self, "partner_name")
@property
@pulumi.getter(name="partnerResourceTypeDescription")
def partner_resource_type_description(self) -> pulumi.Output[Optional[str]]:
"""
Short description of the partner resource type. The length of this description should not exceed 256 characters.
"""
return pulumi.get(self, "partner_resource_type_description")
@property
@pulumi.getter(name="partnerResourceTypeDisplayName")
def partner_resource_type_display_name(self) -> pulumi.Output[Optional[str]]:
"""
Display name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_display_name")
@property
@pulumi.getter(name="partnerResourceTypeName")
def partner_resource_type_name(self) -> pulumi.Output[Optional[str]]:
"""
Name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Provisioning state of the partner registration.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="setupUri")
def setup_uri(self) -> pulumi.Output[Optional[str]]:
"""
URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
"""
return pulumi.get(self, "setup_uri")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system metadata relating to Partner Registration resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Type of the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="visibilityState")
def visibility_state(self) -> pulumi.Output[Optional[str]]:
"""
Visibility state of the partner registration.
"""
return pulumi.get(self, "visibility_state")
| [((94, 5, 94, 44), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((106, 5, 106, 57), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((121, 5, 121, 45), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((145, 5, 145, 34), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((157, 5, 157, 42), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((170, 5, 170, 58), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((182, 5, 182, 55), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((197, 5, 197, 38), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((209, 5, 209, 50), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((221, 5, 221, 57), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((233, 5, 233, 57), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((245, 5, 245, 50), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((257, 5, 257, 35), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((282, 5, 282, 42), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((467, 5, 467, 57), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((478, 5, 478, 45), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((494, 5, 494, 34), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((502, 5, 502, 42), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((519, 5, 519, 58), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((527, 5, 527, 55), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((538, 5, 538, 38), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((546, 5, 546, 57), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((554, 5, 554, 57), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((562, 5, 562, 50), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((570, 5, 570, 44), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((578, 5, 578, 35), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((587, 5, 587, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((611, 5, 611, 42), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((61, 8, 61, 72), 'pulumi.set', 'pulumi.set', ({(61, 19, 61, 27): '__self__', (61, 29, 61, 50): '"""resource_group_name"""', (61, 52, 61, 71): 'resource_group_name'}, {}), "(__self__, 'resource_group_name', resource_group_name)", False, 'import pulumi\n'), ((99, 15, 99, 54), 'pulumi.get', 'pulumi.get', ({(99, 26, 99, 30): 'self', (99, 32, 99, 53): '"""resource_group_name"""'}, {}), "(self, 'resource_group_name')", False, 'import pulumi\n'), ((103, 8, 103, 54), 'pulumi.set', 'pulumi.set', ({(103, 19, 103, 23): 'self', (103, 25, 103, 46): '"""resource_group_name"""', (103, 48, 103, 53): 'value'}, {}), "(self, 'resource_group_name', value)", False, 'import pulumi\n'), ((114, 15, 114, 68), 'pulumi.get', 'pulumi.get', ({(114, 26, 114, 30): 'self', (114, 32, 114, 67): '"""authorized_azure_subscription_ids"""'}, {}), "(self, 'authorized_azure_subscription_ids')", False, 'import pulumi\n'), ((118, 8, 118, 68), 'pulumi.set', 'pulumi.set', ({(118, 19, 118, 23): 'self', (118, 25, 118, 60): '"""authorized_azure_subscription_ids"""', (118, 62, 118, 67): 'value'}, {}), "(self, 'authorized_azure_subscription_ids', value)", False, 'import pulumi\n'), ((126, 15, 126, 55), 'pulumi.get', 'pulumi.get', ({(126, 26, 126, 30): 'self', (126, 32, 126, 54): '"""customer_service_uri"""'}, {}), "(self, 'customer_service_uri')", False, 'import pulumi\n'), ((130, 8, 130, 55), 'pulumi.set', 'pulumi.set', ({(130, 19, 130, 23): 'self', (130, 25, 130, 47): '"""customer_service_uri"""', (130, 49, 130, 54): 'value'}, {}), "(self, 'customer_service_uri', value)", False, 'import pulumi\n'), ((138, 15, 138, 43), 'pulumi.get', 'pulumi.get', ({(138, 26, 138, 30): 'self', (138, 32, 138, 42): '"""location"""'}, {}), "(self, 'location')", False, 'import pulumi\n'), ((142, 8, 142, 43), 'pulumi.set', 'pulumi.set', ({(142, 19, 142, 23): 'self', (142, 25, 142, 35): '"""location"""', (142, 37, 142, 42): 'value'}, {}), "(self, 'location', value)", False, 'import pulumi\n'), ((150, 15, 150, 43), 'pulumi.get', 'pulumi.get', ({(150, 26, 150, 30): 'self', (150, 32, 150, 42): '"""logo_uri"""'}, {}), "(self, 'logo_uri')", False, 'import pulumi\n'), ((154, 8, 154, 43), 'pulumi.set', 'pulumi.set', ({(154, 19, 154, 23): 'self', (154, 25, 154, 35): '"""logo_uri"""', (154, 37, 154, 42): 'value'}, {}), "(self, 'logo_uri', value)", False, 'import pulumi\n'), ((163, 15, 163, 51), 'pulumi.get', 'pulumi.get', ({(163, 26, 163, 30): 'self', (163, 32, 163, 50): '"""long_description"""'}, {}), "(self, 'long_description')", False, 'import pulumi\n'), ((167, 8, 167, 51), 'pulumi.set', 'pulumi.set', ({(167, 19, 167, 23): 'self', (167, 25, 167, 43): '"""long_description"""', (167, 45, 167, 50): 'value'}, {}), "(self, 'long_description', value)", False, 'import pulumi\n'), ((175, 15, 175, 69), 'pulumi.get', 'pulumi.get', ({(175, 26, 175, 30): 'self', (175, 32, 175, 68): '"""partner_customer_service_extension"""'}, {}), "(self, 'partner_customer_service_extension')", False, 'import pulumi\n'), ((179, 8, 179, 69), 'pulumi.set', 'pulumi.set', ({(179, 19, 179, 23): 'self', (179, 25, 179, 61): '"""partner_customer_service_extension"""', (179, 63, 179, 68): 'value'}, {}), "(self, 'partner_customer_service_extension', value)", False, 'import pulumi\n'), ((190, 15, 190, 66), 'pulumi.get', 'pulumi.get', ({(190, 26, 190, 30): 'self', (190, 32, 190, 65): '"""partner_customer_service_number"""'}, {}), "(self, 'partner_customer_service_number')", False, 'import pulumi\n'), ((194, 8, 194, 66), 'pulumi.set', 'pulumi.set', ({(194, 19, 194, 23): 'self', (194, 25, 194, 58): '"""partner_customer_service_number"""', (194, 60, 194, 65): 'value'}, {}), "(self, 'partner_customer_service_number', value)", False, 'import pulumi\n'), ((202, 15, 202, 47), 'pulumi.get', 'pulumi.get', ({(202, 26, 202, 30): 'self', (202, 32, 202, 46): '"""partner_name"""'}, {}), "(self, 'partner_name')", False, 'import pulumi\n'), ((206, 8, 206, 47), 'pulumi.set', 'pulumi.set', ({(206, 19, 206, 23): 'self', (206, 25, 206, 39): '"""partner_name"""', (206, 41, 206, 46): 'value'}, {}), "(self, 'partner_name', value)", False, 'import pulumi\n'), ((214, 15, 214, 60), 'pulumi.get', 'pulumi.get', ({(214, 26, 214, 30): 'self', (214, 32, 214, 59): '"""partner_registration_name"""'}, {}), "(self, 'partner_registration_name')", False, 'import pulumi\n'), ((218, 8, 218, 60), 'pulumi.set', 'pulumi.set', ({(218, 19, 218, 23): 'self', (218, 25, 218, 52): '"""partner_registration_name"""', (218, 54, 218, 59): 'value'}, {}), "(self, 'partner_registration_name', value)", False, 'import pulumi\n'), ((226, 15, 226, 68), 'pulumi.get', 'pulumi.get', ({(226, 26, 226, 30): 'self', (226, 32, 226, 67): '"""partner_resource_type_description"""'}, {}), "(self, 'partner_resource_type_description')", False, 'import pulumi\n'), ((230, 8, 230, 68), 'pulumi.set', 'pulumi.set', ({(230, 19, 230, 23): 'self', (230, 25, 230, 60): '"""partner_resource_type_description"""', (230, 62, 230, 67): 'value'}, {}), "(self, 'partner_resource_type_description', value)", False, 'import pulumi\n'), ((238, 15, 238, 69), 'pulumi.get', 'pulumi.get', ({(238, 26, 238, 30): 'self', (238, 32, 238, 68): '"""partner_resource_type_display_name"""'}, {}), "(self, 'partner_resource_type_display_name')", False, 'import pulumi\n'), ((242, 8, 242, 69), 'pulumi.set', 'pulumi.set', ({(242, 19, 242, 23): 'self', (242, 25, 242, 61): '"""partner_resource_type_display_name"""', (242, 63, 242, 68): 'value'}, {}), "(self, 'partner_resource_type_display_name', value)", False, 'import pulumi\n'), ((250, 15, 250, 61), 'pulumi.get', 'pulumi.get', ({(250, 26, 250, 30): 'self', (250, 32, 250, 60): '"""partner_resource_type_name"""'}, {}), "(self, 'partner_resource_type_name')", False, 'import pulumi\n'), ((254, 8, 254, 61), 'pulumi.set', 'pulumi.set', ({(254, 19, 254, 23): 'self', (254, 25, 254, 53): '"""partner_resource_type_name"""', (254, 55, 254, 60): 'value'}, {}), "(self, 'partner_resource_type_name', value)", False, 'import pulumi\n'), ((263, 15, 263, 44), 'pulumi.get', 'pulumi.get', ({(263, 26, 263, 30): 'self', (263, 32, 263, 43): '"""setup_uri"""'}, {}), "(self, 'setup_uri')", False, 'import pulumi\n'), ((267, 8, 267, 44), 'pulumi.set', 'pulumi.set', ({(267, 19, 267, 23): 'self', (267, 25, 267, 36): '"""setup_uri"""', (267, 38, 267, 43): 'value'}, {}), "(self, 'setup_uri', value)", False, 'import pulumi\n'), ((275, 15, 275, 39), 'pulumi.get', 'pulumi.get', ({(275, 26, 275, 30): 'self', (275, 32, 275, 38): '"""tags"""'}, {}), "(self, 'tags')", False, 'import pulumi\n'), ((279, 8, 279, 39), 'pulumi.set', 'pulumi.set', ({(279, 19, 279, 23): 'self', (279, 25, 279, 31): '"""tags"""', (279, 33, 279, 38): 'value'}, {}), "(self, 'tags', value)", False, 'import pulumi\n'), ((287, 15, 287, 51), 'pulumi.get', 'pulumi.get', ({(287, 26, 287, 30): 'self', (287, 32, 287, 50): '"""visibility_state"""'}, {}), "(self, 'visibility_state')", False, 'import pulumi\n'), ((291, 8, 291, 51), 'pulumi.set', 'pulumi.set', ({(291, 19, 291, 23): 'self', (291, 25, 291, 43): '"""visibility_state"""', (291, 45, 291, 50): 'value'}, {}), "(self, 'visibility_state', value)", False, 'import pulumi\n'), ((423, 15, 423, 61), 'pulumi.ResourceOptions.merge', 'pulumi.ResourceOptions.merge', ({(423, 44, 423, 48): 'opts', (423, 50, 423, 60): 'alias_opts'}, {}), '(opts, alias_opts)', False, 'import pulumi\n'), ((475, 15, 475, 68), 'pulumi.get', 'pulumi.get', ({(475, 26, 475, 30): 'self', (475, 32, 475, 67): '"""authorized_azure_subscription_ids"""'}, {}), "(self, 'authorized_azure_subscription_ids')", False, 'import pulumi\n'), ((483, 15, 483, 55), 'pulumi.get', 'pulumi.get', ({(483, 26, 483, 30): 'self', (483, 32, 483, 54): '"""customer_service_uri"""'}, {}), "(self, 'customer_service_uri')", False, 'import pulumi\n'), ((491, 15, 491, 43), 'pulumi.get', 'pulumi.get', ({(491, 26, 491, 30): 'self', (491, 32, 491, 42): '"""location"""'}, {}), "(self, 'location')", False, 'import pulumi\n'), ((499, 15, 499, 43), 'pulumi.get', 'pulumi.get', ({(499, 26, 499, 30): 'self', (499, 32, 499, 42): '"""logo_uri"""'}, {}), "(self, 'logo_uri')", False, 'import pulumi\n'), ((508, 15, 508, 51), 'pulumi.get', 'pulumi.get', ({(508, 26, 508, 30): 'self', (508, 32, 508, 50): '"""long_description"""'}, {}), "(self, 'long_description')", False, 'import pulumi\n'), ((516, 15, 516, 39), 'pulumi.get', 'pulumi.get', ({(516, 26, 516, 30): 'self', (516, 32, 516, 38): '"""name"""'}, {}), "(self, 'name')", False, 'import pulumi\n'), ((524, 15, 524, 69), 'pulumi.get', 'pulumi.get', ({(524, 26, 524, 30): 'self', (524, 32, 524, 68): '"""partner_customer_service_extension"""'}, {}), "(self, 'partner_customer_service_extension')", False, 'import pulumi\n'), ((535, 15, 535, 66), 'pulumi.get', 'pulumi.get', ({(535, 26, 535, 30): 'self', (535, 32, 535, 65): '"""partner_customer_service_number"""'}, {}), "(self, 'partner_customer_service_number')", False, 'import pulumi\n'), ((543, 15, 543, 47), 'pulumi.get', 'pulumi.get', ({(543, 26, 543, 30): 'self', (543, 32, 543, 46): '"""partner_name"""'}, {}), "(self, 'partner_name')", False, 'import pulumi\n'), ((551, 15, 551, 68), 'pulumi.get', 'pulumi.get', ({(551, 26, 551, 30): 'self', (551, 32, 551, 67): '"""partner_resource_type_description"""'}, {}), "(self, 'partner_resource_type_description')", False, 'import pulumi\n'), ((559, 15, 559, 69), 'pulumi.get', 'pulumi.get', ({(559, 26, 559, 30): 'self', (559, 32, 559, 68): '"""partner_resource_type_display_name"""'}, {}), "(self, 'partner_resource_type_display_name')", False, 'import pulumi\n'), ((567, 15, 567, 61), 'pulumi.get', 'pulumi.get', ({(567, 26, 567, 30): 'self', (567, 32, 567, 60): '"""partner_resource_type_name"""'}, {}), "(self, 'partner_resource_type_name')", False, 'import pulumi\n'), ((575, 15, 575, 53), 'pulumi.get', 'pulumi.get', ({(575, 26, 575, 30): 'self', (575, 32, 575, 52): '"""provisioning_state"""'}, {}), "(self, 'provisioning_state')", False, 'import pulumi\n'), ((584, 15, 584, 44), 'pulumi.get', 'pulumi.get', ({(584, 26, 584, 30): 'self', (584, 32, 584, 43): '"""setup_uri"""'}, {}), "(self, 'setup_uri')", False, 'import pulumi\n'), ((592, 15, 592, 46), 'pulumi.get', 'pulumi.get', ({(592, 26, 592, 30): 'self', (592, 32, 592, 45): '"""system_data"""'}, {}), "(self, 'system_data')", False, 'import pulumi\n'), ((600, 15, 600, 39), 'pulumi.get', 'pulumi.get', ({(600, 26, 600, 30): 'self', (600, 32, 600, 38): '"""tags"""'}, {}), "(self, 'tags')", False, 'import pulumi\n'), ((608, 15, 608, 39), 'pulumi.get', 'pulumi.get', ({(608, 26, 608, 30): 'self', (608, 32, 608, 38): '"""type"""'}, {}), "(self, 'type')", False, 'import pulumi\n'), ((616, 15, 616, 51), 'pulumi.get', 'pulumi.get', ({(616, 26, 616, 30): 'self', (616, 32, 616, 50): '"""visibility_state"""'}, {}), "(self, 'visibility_state')", False, 'import pulumi\n'), ((63, 12, 63, 104), 'pulumi.set', 'pulumi.set', ({(63, 23, 63, 31): '__self__', (63, 33, 63, 68): '"""authorized_azure_subscription_ids"""', (63, 70, 63, 103): 'authorized_azure_subscription_ids'}, {}), "(__self__, 'authorized_azure_subscription_ids',\n authorized_azure_subscription_ids)", False, 'import pulumi\n'), ((65, 12, 65, 78), 'pulumi.set', 'pulumi.set', ({(65, 23, 65, 31): '__self__', (65, 33, 65, 55): '"""customer_service_uri"""', (65, 57, 65, 77): 'customer_service_uri'}, {}), "(__self__, 'customer_service_uri', customer_service_uri)", False, 'import pulumi\n'), ((67, 12, 67, 54), 'pulumi.set', 'pulumi.set', ({(67, 23, 67, 31): '__self__', (67, 33, 67, 43): '"""location"""', (67, 45, 67, 53): 'location'}, {}), "(__self__, 'location', location)", False, 'import pulumi\n'), ((69, 12, 69, 54), 'pulumi.set', 'pulumi.set', ({(69, 23, 69, 31): '__self__', (69, 33, 69, 43): '"""logo_uri"""', (69, 45, 69, 53): 'logo_uri'}, {}), "(__self__, 'logo_uri', logo_uri)", False, 'import pulumi\n'), ((71, 12, 71, 70), 'pulumi.set', 'pulumi.set', ({(71, 23, 71, 31): '__self__', (71, 33, 71, 51): '"""long_description"""', (71, 53, 71, 69): 'long_description'}, {}), "(__self__, 'long_description', long_description)", False, 'import pulumi\n'), ((73, 12, 73, 106), 'pulumi.set', 'pulumi.set', ({(73, 23, 73, 31): '__self__', (73, 33, 73, 69): '"""partner_customer_service_extension"""', (73, 71, 73, 105): 'partner_customer_service_extension'}, {}), "(__self__, 'partner_customer_service_extension',\n partner_customer_service_extension)", False, 'import pulumi\n'), ((75, 12, 75, 100), 'pulumi.set', 'pulumi.set', ({(75, 23, 75, 31): '__self__', (75, 33, 75, 66): '"""partner_customer_service_number"""', (75, 68, 75, 99): 'partner_customer_service_number'}, {}), "(__self__, 'partner_customer_service_number',\n partner_customer_service_number)", False, 'import pulumi\n'), ((77, 12, 77, 62), 'pulumi.set', 'pulumi.set', ({(77, 23, 77, 31): '__self__', (77, 33, 77, 47): '"""partner_name"""', (77, 49, 77, 61): 'partner_name'}, {}), "(__self__, 'partner_name', partner_name)", False, 'import pulumi\n'), ((79, 12, 79, 88), 'pulumi.set', 'pulumi.set', ({(79, 23, 79, 31): '__self__', (79, 33, 79, 60): '"""partner_registration_name"""', (79, 62, 79, 87): 'partner_registration_name'}, {}), "(__self__, 'partner_registration_name', partner_registration_name)", False, 'import pulumi\n'), ((81, 12, 81, 104), 'pulumi.set', 'pulumi.set', ({(81, 23, 81, 31): '__self__', (81, 33, 81, 68): '"""partner_resource_type_description"""', (81, 70, 81, 103): 'partner_resource_type_description'}, {}), "(__self__, 'partner_resource_type_description',\n partner_resource_type_description)", False, 'import pulumi\n'), ((83, 12, 83, 106), 'pulumi.set', 'pulumi.set', ({(83, 23, 83, 31): '__self__', (83, 33, 83, 69): '"""partner_resource_type_display_name"""', (83, 71, 83, 105): 'partner_resource_type_display_name'}, {}), "(__self__, 'partner_resource_type_display_name',\n partner_resource_type_display_name)", False, 'import pulumi\n'), ((85, 12, 85, 90), 'pulumi.set', 'pulumi.set', ({(85, 23, 85, 31): '__self__', (85, 33, 85, 61): '"""partner_resource_type_name"""', (85, 63, 85, 89): 'partner_resource_type_name'}, {}), "(__self__, 'partner_resource_type_name', partner_resource_type_name)", False, 'import pulumi\n'), ((87, 12, 87, 56), 'pulumi.set', 'pulumi.set', ({(87, 23, 87, 31): '__self__', (87, 33, 87, 44): '"""setup_uri"""', (87, 46, 87, 55): 'setup_uri'}, {}), "(__self__, 'setup_uri', setup_uri)", False, 'import pulumi\n'), ((89, 12, 89, 46), 'pulumi.set', 'pulumi.set', ({(89, 23, 89, 31): '__self__', (89, 33, 89, 39): '"""tags"""', (89, 41, 89, 45): 'tags'}, {}), "(__self__, 'tags', tags)", False, 'import pulumi\n'), ((91, 12, 91, 70), 'pulumi.set', 'pulumi.set', ({(91, 23, 91, 31): '__self__', (91, 33, 91, 51): '"""visibility_state"""', (91, 53, 91, 69): 'visibility_state'}, {}), "(__self__, 'visibility_state', visibility_state)", False, 'import pulumi\n'), ((390, 19, 390, 43), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ({}, {}), '()', False, 'import pulumi\n'), ((442, 50, 442, 79), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', (), '', False, 'import pulumi\n'), ((422, 53, 422, 118), 'pulumi.Alias', 'pulumi.Alias', (), '', False, 'import pulumi\n'), ((422, 120, 422, 201), 'pulumi.Alias', 'pulumi.Alias', (), '', False, 'import pulumi\n'), ((422, 203, 422, 285), 'pulumi.Alias', 'pulumi.Alias', (), '', False, 'import pulumi\n'), ((422, 287, 422, 368), 'pulumi.Alias', 'pulumi.Alias', (), '', False, 'import pulumi\n'), ((422, 370, 422, 452), 'pulumi.Alias', 'pulumi.Alias', (), '', False, 'import pulumi\n')] |
TomKingsfordUoA/ResidualMaskingNetwork | _ar/masking_provement.py | 6ce5ddf70f8ac8f1e6da2746b0bbeb9e457ceb7d | import os
import glob
import cv2
import numpy as np
import torch
from torchvision.transforms import transforms
from natsort import natsorted
from models import resmasking_dropout1
from utils.datasets.fer2013dataset import EMOTION_DICT
from barez import show
transform = transforms.Compose(
[
transforms.ToPILImage(),
transforms.ToTensor(),
]
)
def activations_mask(tensor):
tensor = torch.squeeze(tensor, 0)
tensor = torch.mean(tensor, 0)
tensor = tensor.detach().cpu().numpy()
tensor = np.maximum(tensor, 0)
tensor = cv2.resize(tensor, (224, 224))
tensor = tensor - np.min(tensor)
tensor = tensor / np.max(tensor)
heatmap = cv2.applyColorMap(np.uint8(255 * tensor), cv2.COLORMAP_JET)
return heatmap
model = resmasking_dropout1(3, 7)
# state = torch.load('./saved/checkpoints/resmasking_dropout1_rot30_2019Nov17_14.33')
state = torch.load("./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32")
model.load_state_dict(state["net"])
model.cuda()
model.eval()
for image_path in natsorted(
glob.glob("/home/z/research/bkemo/images/**/*.png", recursive=True)
):
image_name = os.path.basename(image_path)
print(image_name)
# image_path = '/home/z/research/bkemo/images/disgust/0.0_dc10a3_1976_0.png'
image = cv2.imread(image_path)
image = cv2.resize(image, (224, 224))
tensor = transform(image)
tensor = torch.unsqueeze(tensor, 0)
tensor = tensor.cuda()
# output = model(tensor)
x = model.conv1(tensor) # 112
x = model.bn1(x)
x = model.relu(x)
x = model.maxpool(x) # 56
x = model.layer1(x) # 56
m = model.mask1(x)
x = x * (1 + m)
x = model.layer2(x) # 28
m = model.mask2(x)
x = x * (1 + m)
x = model.layer3(x) # 14
heat_1 = activations_mask(x)
m = model.mask3(x)
x = x * (1 + m)
# heat_2 = activations_mask(m)
x = model.layer4(x) # 7
m = model.mask4(x)
x = x * (1 + m)
x = model.avgpool(x)
x = torch.flatten(x, 1)
output = model.fc(x)
# print(np.sum(heat_1 - heat_2))
# show(np.concatenate((image, heat_1, heat_2), axis=1))
cv2.imwrite(
"./masking_provements/{}".format(image_name),
np.concatenate((image, heat_1), axis=1),
)
# np.concatenate((image, heat_1, heat_2), axis=1))
# output = output.cpu().numpy()
# print(EMOTION_DICT[torch.argmax(output, 1).item()])
| [((33, 8, 33, 33), 'models.resmasking_dropout1', 'resmasking_dropout1', ({(33, 28, 33, 29): '3', (33, 31, 33, 32): '7'}, {}), '(3, 7)', False, 'from models import resmasking_dropout1\n'), ((35, 8, 35, 85), 'torch.load', 'torch.load', ({(35, 19, 35, 84): '"""./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32"""'}, {}), "('./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32')", False, 'import torch\n'), ((21, 13, 21, 37), 'torch.squeeze', 'torch.squeeze', ({(21, 27, 21, 33): 'tensor', (21, 35, 21, 36): '0'}, {}), '(tensor, 0)', False, 'import torch\n'), ((22, 13, 22, 34), 'torch.mean', 'torch.mean', ({(22, 24, 22, 30): 'tensor', (22, 32, 22, 33): '0'}, {}), '(tensor, 0)', False, 'import torch\n'), ((24, 13, 24, 34), 'numpy.maximum', 'np.maximum', ({(24, 24, 24, 30): 'tensor', (24, 32, 24, 33): '0'}, {}), '(tensor, 0)', True, 'import numpy as np\n'), ((25, 13, 25, 43), 'cv2.resize', 'cv2.resize', ({(25, 24, 25, 30): 'tensor', (25, 32, 25, 42): '(224, 224)'}, {}), '(tensor, (224, 224))', False, 'import cv2\n'), ((41, 4, 41, 71), 'glob.glob', 'glob.glob', (), '', False, 'import glob\n'), ((43, 17, 43, 45), 'os.path.basename', 'os.path.basename', ({(43, 34, 43, 44): 'image_path'}, {}), '(image_path)', False, 'import os\n'), ((46, 12, 46, 34), 'cv2.imread', 'cv2.imread', ({(46, 23, 46, 33): 'image_path'}, {}), '(image_path)', False, 'import cv2\n'), ((47, 12, 47, 41), 'cv2.resize', 'cv2.resize', ({(47, 23, 47, 28): 'image', (47, 30, 47, 40): '(224, 224)'}, {}), '(image, (224, 224))', False, 'import cv2\n'), ((49, 13, 49, 39), 'torch.unsqueeze', 'torch.unsqueeze', ({(49, 29, 49, 35): 'tensor', (49, 37, 49, 38): '0'}, {}), '(tensor, 0)', False, 'import torch\n'), ((78, 8, 78, 27), 'torch.flatten', 'torch.flatten', ({(78, 22, 78, 23): 'x', (78, 25, 78, 26): '1'}, {}), '(x, 1)', False, 'import torch\n'), ((14, 8, 14, 31), 'torchvision.transforms.transforms.ToPILImage', 'transforms.ToPILImage', ({}, {}), '()', False, 'from torchvision.transforms import transforms\n'), ((15, 8, 15, 29), 'torchvision.transforms.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', False, 'from torchvision.transforms import transforms\n'), ((26, 22, 26, 36), 'numpy.min', 'np.min', ({(26, 29, 26, 35): 'tensor'}, {}), '(tensor)', True, 'import numpy as np\n'), ((27, 22, 27, 36), 'numpy.max', 'np.max', ({(27, 29, 27, 35): 'tensor'}, {}), '(tensor)', True, 'import numpy as np\n'), ((29, 32, 29, 54), 'numpy.uint8', 'np.uint8', ({(29, 41, 29, 53): '255 * tensor'}, {}), '(255 * tensor)', True, 'import numpy as np\n'), ((87, 8, 87, 47), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n')] |
Kauan677/Projetos-Python | Python/Gerenciador de pagamentos.py | 62f6b476e6d250d9ff31c95808b31ebd3ab4fdbb | import time
import colorama
def gerenciador_de_pagamento():
preço = float(str(input('Preço das compras: R$')))
print('''Escolha de pagamento:
[ 1 ]A vista dinheiro/cheque: 10% de desconto.
[ 2 ]A vista no cartão: 5% de desconto.
[ 3 ]Em até duas 2x no cartão: preço formal.
[ 4 ]3x ou mais no cartão: 20% de juros.''')
opção = int(input('Opção de pagamento: '))
print('processando...')
time.sleep(2)
if opção == 1:
print('Você ganhará 10% de desconto!')
print(f'Sendo assim as compras custaram R${preço - (preço * 10 / 100 ):.2f}.')
elif opção == 2:
print('Você ganhará 5% de desconto!')
print(f'Sendo assim as compras custaram R${preço - (preço * 5 /100):.2f}')
elif opção == 3:
print(f'As compras sairam em 2x de R${preço / 2:.2f}.')
print(f'Sendo assim custando o preço formal de R${preço:.2f} no final.')
elif opção == 4:
parcelas = int(input('Quantas parcelas: '))
if parcelas >= 3:
print(f'Compras com 20% de juros')
print(f'As compras sairam em {parcelas}x de R${(preço + (preço * 20 / 100)) / parcelas:.2f}')
print(f'Sendo assim as compras custaram R${preço + (preço * 20 / 100):.2f} no final.')
else:
print('Parcela não compreendida, TENTE NOVAMENTE...')
else:
print('Valor não compreendido, TENTE NOVAMENTE...')
gerenciador_de_pagamento()
return opção
while True:
consulta = gerenciador_de_pagamento()
consulta = str(input('Quer consultar novamente? '))
if consulta in ['sim', 'Sim', 'SIM']:
pass
elif consulta in ['não', 'nao','Não', 'Nao', 'NAO','NÃO']:
break
else:
break
| [((12, 4, 12, 17), 'time.sleep', 'time.sleep', ({(12, 15, 12, 16): '(2)'}, {}), '(2)', False, 'import time\n')] |
seoss/scs_core | src/scs_core/osio/data/abstract_topic.py | 0d4323c5697a39eb44a887f179ba5dca3716c1d2 | """
Created on 2 Apr 2017
@author: Bruno Beloff ([email protected])
"""
from collections import OrderedDict
from scs_core.data.json import JSONable
# --------------------------------------------------------------------------------------------------------------------
class AbstractTopic(JSONable):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, path, name, description, is_public, info):
"""
Constructor
"""
self.__path = path # string
self.__name = name # string
self.__description = description # string
self.__is_public = is_public # bool
self.__info = info # TopicInfo
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
if self.path is not None:
jdict['topic'] = self.path
jdict['name'] = self.name
jdict['description'] = self.description
jdict['public'] = self.is_public
jdict['topic-info'] = self.info
return jdict
# ----------------------------------------------------------------------------------------------------------------
@property
def path(self):
return self.__path
@property
def name(self):
return self.__name
@property
def description(self):
return self.__description
@property
def is_public(self):
return self.__is_public
@property
def info(self):
return self.__info
| [((37, 16, 37, 29), 'collections.OrderedDict', 'OrderedDict', ({}, {}), '()', False, 'from collections import OrderedDict\n')] |
pulumi-bot/pulumi-azure-native | sdk/python/pulumi_azure_native/notificationhubs/latest/get_namespace.py | f7b9490b5211544318e455e5cceafe47b628e12c | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetNamespaceResult',
'AwaitableGetNamespaceResult',
'get_namespace',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:notificationhubs:getNamespace'.""", DeprecationWarning)
@pulumi.output_type
class GetNamespaceResult:
"""
Description of a Namespace resource.
"""
def __init__(__self__, created_at=None, critical=None, data_center=None, enabled=None, id=None, location=None, metric_id=None, name=None, namespace_type=None, provisioning_state=None, region=None, scale_unit=None, service_bus_endpoint=None, sku=None, status=None, subscription_id=None, tags=None, type=None, updated_at=None):
if created_at and not isinstance(created_at, str):
raise TypeError("Expected argument 'created_at' to be a str")
pulumi.set(__self__, "created_at", created_at)
if critical and not isinstance(critical, bool):
raise TypeError("Expected argument 'critical' to be a bool")
pulumi.set(__self__, "critical", critical)
if data_center and not isinstance(data_center, str):
raise TypeError("Expected argument 'data_center' to be a str")
pulumi.set(__self__, "data_center", data_center)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if metric_id and not isinstance(metric_id, str):
raise TypeError("Expected argument 'metric_id' to be a str")
pulumi.set(__self__, "metric_id", metric_id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if namespace_type and not isinstance(namespace_type, str):
raise TypeError("Expected argument 'namespace_type' to be a str")
pulumi.set(__self__, "namespace_type", namespace_type)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if region and not isinstance(region, str):
raise TypeError("Expected argument 'region' to be a str")
pulumi.set(__self__, "region", region)
if scale_unit and not isinstance(scale_unit, str):
raise TypeError("Expected argument 'scale_unit' to be a str")
pulumi.set(__self__, "scale_unit", scale_unit)
if service_bus_endpoint and not isinstance(service_bus_endpoint, str):
raise TypeError("Expected argument 'service_bus_endpoint' to be a str")
pulumi.set(__self__, "service_bus_endpoint", service_bus_endpoint)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if subscription_id and not isinstance(subscription_id, str):
raise TypeError("Expected argument 'subscription_id' to be a str")
pulumi.set(__self__, "subscription_id", subscription_id)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if updated_at and not isinstance(updated_at, str):
raise TypeError("Expected argument 'updated_at' to be a str")
pulumi.set(__self__, "updated_at", updated_at)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[str]:
"""
The time the namespace was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter
def critical(self) -> Optional[bool]:
"""
Whether or not the namespace is set as Critical.
"""
return pulumi.get(self, "critical")
@property
@pulumi.getter(name="dataCenter")
def data_center(self) -> Optional[str]:
"""
Data center for the namespace
"""
return pulumi.get(self, "data_center")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Whether or not the namespace is currently enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="metricId")
def metric_id(self) -> str:
"""
Identifier for Azure Insights metrics
"""
return pulumi.get(self, "metric_id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namespaceType")
def namespace_type(self) -> Optional[str]:
"""
The namespace type.
"""
return pulumi.get(self, "namespace_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Provisioning state of the Namespace.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def region(self) -> Optional[str]:
"""
Specifies the targeted region in which the namespace should be created. It can be any of the following values: Australia East, Australia Southeast, Central US, East US, East US 2, West US, North Central US, South Central US, East Asia, Southeast Asia, Brazil South, Japan East, Japan West, North Europe, West Europe
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="scaleUnit")
def scale_unit(self) -> Optional[str]:
"""
ScaleUnit where the namespace gets created
"""
return pulumi.get(self, "scale_unit")
@property
@pulumi.getter(name="serviceBusEndpoint")
def service_bus_endpoint(self) -> Optional[str]:
"""
Endpoint you can use to perform NotificationHub operations.
"""
return pulumi.get(self, "service_bus_endpoint")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
The sku of the created namespace
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Status of the namespace. It can be any of these values:1 = Created/Active2 = Creating3 = Suspended4 = Deleting
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="subscriptionId")
def subscription_id(self) -> Optional[str]:
"""
The Id of the Azure subscription associated with the namespace.
"""
return pulumi.get(self, "subscription_id")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> Optional[str]:
"""
The time the namespace was updated.
"""
return pulumi.get(self, "updated_at")
class AwaitableGetNamespaceResult(GetNamespaceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetNamespaceResult(
created_at=self.created_at,
critical=self.critical,
data_center=self.data_center,
enabled=self.enabled,
id=self.id,
location=self.location,
metric_id=self.metric_id,
name=self.name,
namespace_type=self.namespace_type,
provisioning_state=self.provisioning_state,
region=self.region,
scale_unit=self.scale_unit,
service_bus_endpoint=self.service_bus_endpoint,
sku=self.sku,
status=self.status,
subscription_id=self.subscription_id,
tags=self.tags,
type=self.type,
updated_at=self.updated_at)
def get_namespace(namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNamespaceResult:
"""
Description of a Namespace resource.
Latest API Version: 2017-04-01.
:param str namespace_name: The namespace name.
:param str resource_group_name: The name of the resource group.
"""
pulumi.log.warn("""get_namespace is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:notificationhubs:getNamespace'.""")
__args__ = dict()
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:notificationhubs/latest:getNamespace', __args__, opts=opts, typ=GetNamespaceResult).value
return AwaitableGetNamespaceResult(
created_at=__ret__.created_at,
critical=__ret__.critical,
data_center=__ret__.data_center,
enabled=__ret__.enabled,
id=__ret__.id,
location=__ret__.location,
metric_id=__ret__.metric_id,
name=__ret__.name,
namespace_type=__ret__.namespace_type,
provisioning_state=__ret__.provisioning_state,
region=__ret__.region,
scale_unit=__ret__.scale_unit,
service_bus_endpoint=__ret__.service_bus_endpoint,
sku=__ret__.sku,
status=__ret__.status,
subscription_id=__ret__.subscription_id,
tags=__ret__.tags,
type=__ret__.type,
updated_at=__ret__.updated_at)
| [((18, 0, 18, 178), 'warnings.warn', 'warnings.warn', ({(18, 14, 18, 157): '"""The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."""', (18, 159, 18, 177): 'DeprecationWarning'}, {}), '(\n "The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."\n , DeprecationWarning)', False, 'import warnings\n'), ((85, 5, 85, 36), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((101, 5, 101, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((133, 5, 133, 35), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((149, 5, 149, 40), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((157, 5, 157, 44), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((173, 5, 173, 36), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((181, 5, 181, 45), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((205, 5, 205, 41), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((229, 5, 229, 36), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((275, 4, 275, 193), 'pulumi.log.warn', 'pulumi.log.warn', ({(275, 20, 275, 192): '"""get_namespace is deprecated: The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."""'}, {}), '(\n "get_namespace is deprecated: The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."\n )', False, 'import pulumi\n'), ((28, 8, 28, 54), 'pulumi.set', 'pulumi.set', ({(28, 19, 28, 27): '__self__', (28, 29, 28, 41): '"""created_at"""', (28, 43, 28, 53): 'created_at'}, {}), "(__self__, 'created_at', created_at)", False, 'import pulumi\n'), ((31, 8, 31, 50), 'pulumi.set', 'pulumi.set', ({(31, 19, 31, 27): '__self__', (31, 29, 31, 39): '"""critical"""', (31, 41, 31, 49): 'critical'}, {}), "(__self__, 'critical', critical)", False, 'import pulumi\n'), ((34, 8, 34, 56), 'pulumi.set', 'pulumi.set', ({(34, 19, 34, 27): '__self__', (34, 29, 34, 42): '"""data_center"""', (34, 44, 34, 55): 'data_center'}, {}), "(__self__, 'data_center', data_center)", False, 'import pulumi\n'), ((37, 8, 37, 48), 'pulumi.set', 'pulumi.set', ({(37, 19, 37, 27): '__self__', (37, 29, 37, 38): '"""enabled"""', (37, 40, 37, 47): 'enabled'}, {}), "(__self__, 'enabled', enabled)", False, 'import pulumi\n'), ((40, 8, 40, 38), 'pulumi.set', 'pulumi.set', ({(40, 19, 40, 27): '__self__', (40, 29, 40, 33): '"""id"""', (40, 35, 40, 37): 'id'}, {}), "(__self__, 'id', id)", False, 'import pulumi\n'), ((43, 8, 43, 50), 'pulumi.set', 'pulumi.set', ({(43, 19, 43, 27): '__self__', (43, 29, 43, 39): '"""location"""', (43, 41, 43, 49): 'location'}, {}), "(__self__, 'location', location)", False, 'import pulumi\n'), ((46, 8, 46, 52), 'pulumi.set', 'pulumi.set', ({(46, 19, 46, 27): '__self__', (46, 29, 46, 40): '"""metric_id"""', (46, 42, 46, 51): 'metric_id'}, {}), "(__self__, 'metric_id', metric_id)", False, 'import pulumi\n'), ((49, 8, 49, 42), 'pulumi.set', 'pulumi.set', ({(49, 19, 49, 27): '__self__', (49, 29, 49, 35): '"""name"""', (49, 37, 49, 41): 'name'}, {}), "(__self__, 'name', name)", False, 'import pulumi\n'), ((52, 8, 52, 62), 'pulumi.set', 'pulumi.set', ({(52, 19, 52, 27): '__self__', (52, 29, 52, 45): '"""namespace_type"""', (52, 47, 52, 61): 'namespace_type'}, {}), "(__self__, 'namespace_type', namespace_type)", False, 'import pulumi\n'), ((55, 8, 55, 70), 'pulumi.set', 'pulumi.set', ({(55, 19, 55, 27): '__self__', (55, 29, 55, 49): '"""provisioning_state"""', (55, 51, 55, 69): 'provisioning_state'}, {}), "(__self__, 'provisioning_state', provisioning_state)", False, 'import pulumi\n'), ((58, 8, 58, 46), 'pulumi.set', 'pulumi.set', ({(58, 19, 58, 27): '__self__', (58, 29, 58, 37): '"""region"""', (58, 39, 58, 45): 'region'}, {}), "(__self__, 'region', region)", False, 'import pulumi\n'), ((61, 8, 61, 54), 'pulumi.set', 'pulumi.set', ({(61, 19, 61, 27): '__self__', (61, 29, 61, 41): '"""scale_unit"""', (61, 43, 61, 53): 'scale_unit'}, {}), "(__self__, 'scale_unit', scale_unit)", False, 'import pulumi\n'), ((64, 8, 64, 74), 'pulumi.set', 'pulumi.set', ({(64, 19, 64, 27): '__self__', (64, 29, 64, 51): '"""service_bus_endpoint"""', (64, 53, 64, 73): 'service_bus_endpoint'}, {}), "(__self__, 'service_bus_endpoint', service_bus_endpoint)", False, 'import pulumi\n'), ((67, 8, 67, 40), 'pulumi.set', 'pulumi.set', ({(67, 19, 67, 27): '__self__', (67, 29, 67, 34): '"""sku"""', (67, 36, 67, 39): 'sku'}, {}), "(__self__, 'sku', sku)", False, 'import pulumi\n'), ((70, 8, 70, 46), 'pulumi.set', 'pulumi.set', ({(70, 19, 70, 27): '__self__', (70, 29, 70, 37): '"""status"""', (70, 39, 70, 45): 'status'}, {}), "(__self__, 'status', status)", False, 'import pulumi\n'), ((73, 8, 73, 64), 'pulumi.set', 'pulumi.set', ({(73, 19, 73, 27): '__self__', (73, 29, 73, 46): '"""subscription_id"""', (73, 48, 73, 63): 'subscription_id'}, {}), "(__self__, 'subscription_id', subscription_id)", False, 'import pulumi\n'), ((76, 8, 76, 42), 'pulumi.set', 'pulumi.set', ({(76, 19, 76, 27): '__self__', (76, 29, 76, 35): '"""tags"""', (76, 37, 76, 41): 'tags'}, {}), "(__self__, 'tags', tags)", False, 'import pulumi\n'), ((79, 8, 79, 42), 'pulumi.set', 'pulumi.set', ({(79, 19, 79, 27): '__self__', (79, 29, 79, 35): '"""type"""', (79, 37, 79, 41): 'type'}, {}), "(__self__, 'type', type)", False, 'import pulumi\n'), ((82, 8, 82, 54), 'pulumi.set', 'pulumi.set', ({(82, 19, 82, 27): '__self__', (82, 29, 82, 41): '"""updated_at"""', (82, 43, 82, 53): 'updated_at'}, {}), "(__self__, 'updated_at', updated_at)", False, 'import pulumi\n'), ((90, 15, 90, 45), 'pulumi.get', 'pulumi.get', ({(90, 26, 90, 30): 'self', (90, 32, 90, 44): '"""created_at"""'}, {}), "(self, 'created_at')", False, 'import pulumi\n'), ((98, 15, 98, 43), 'pulumi.get', 'pulumi.get', ({(98, 26, 98, 30): 'self', (98, 32, 98, 42): '"""critical"""'}, {}), "(self, 'critical')", False, 'import pulumi\n'), ((106, 15, 106, 46), 'pulumi.get', 'pulumi.get', ({(106, 26, 106, 30): 'self', (106, 32, 106, 45): '"""data_center"""'}, {}), "(self, 'data_center')", False, 'import pulumi\n'), ((114, 15, 114, 42), 'pulumi.get', 'pulumi.get', ({(114, 26, 114, 30): 'self', (114, 32, 114, 41): '"""enabled"""'}, {}), "(self, 'enabled')", False, 'import pulumi\n'), ((122, 15, 122, 37), 'pulumi.get', 'pulumi.get', ({(122, 26, 122, 30): 'self', (122, 32, 122, 36): '"""id"""'}, {}), "(self, 'id')", False, 'import pulumi\n'), ((130, 15, 130, 43), 'pulumi.get', 'pulumi.get', ({(130, 26, 130, 30): 'self', (130, 32, 130, 42): '"""location"""'}, {}), "(self, 'location')", False, 'import pulumi\n'), ((138, 15, 138, 44), 'pulumi.get', 'pulumi.get', ({(138, 26, 138, 30): 'self', (138, 32, 138, 43): '"""metric_id"""'}, {}), "(self, 'metric_id')", False, 'import pulumi\n'), ((146, 15, 146, 39), 'pulumi.get', 'pulumi.get', ({(146, 26, 146, 30): 'self', (146, 32, 146, 38): '"""name"""'}, {}), "(self, 'name')", False, 'import pulumi\n'), ((154, 15, 154, 49), 'pulumi.get', 'pulumi.get', ({(154, 26, 154, 30): 'self', (154, 32, 154, 48): '"""namespace_type"""'}, {}), "(self, 'namespace_type')", False, 'import pulumi\n'), ((162, 15, 162, 53), 'pulumi.get', 'pulumi.get', ({(162, 26, 162, 30): 'self', (162, 32, 162, 52): '"""provisioning_state"""'}, {}), "(self, 'provisioning_state')", False, 'import pulumi\n'), ((170, 15, 170, 41), 'pulumi.get', 'pulumi.get', ({(170, 26, 170, 30): 'self', (170, 32, 170, 40): '"""region"""'}, {}), "(self, 'region')", False, 'import pulumi\n'), ((178, 15, 178, 45), 'pulumi.get', 'pulumi.get', ({(178, 26, 178, 30): 'self', (178, 32, 178, 44): '"""scale_unit"""'}, {}), "(self, 'scale_unit')", False, 'import pulumi\n'), ((186, 15, 186, 55), 'pulumi.get', 'pulumi.get', ({(186, 26, 186, 30): 'self', (186, 32, 186, 54): '"""service_bus_endpoint"""'}, {}), "(self, 'service_bus_endpoint')", False, 'import pulumi\n'), ((194, 15, 194, 38), 'pulumi.get', 'pulumi.get', ({(194, 26, 194, 30): 'self', (194, 32, 194, 37): '"""sku"""'}, {}), "(self, 'sku')", False, 'import pulumi\n'), ((202, 15, 202, 41), 'pulumi.get', 'pulumi.get', ({(202, 26, 202, 30): 'self', (202, 32, 202, 40): '"""status"""'}, {}), "(self, 'status')", False, 'import pulumi\n'), ((210, 15, 210, 50), 'pulumi.get', 'pulumi.get', ({(210, 26, 210, 30): 'self', (210, 32, 210, 49): '"""subscription_id"""'}, {}), "(self, 'subscription_id')", False, 'import pulumi\n'), ((218, 15, 218, 39), 'pulumi.get', 'pulumi.get', ({(218, 26, 218, 30): 'self', (218, 32, 218, 38): '"""tags"""'}, {}), "(self, 'tags')", False, 'import pulumi\n'), ((226, 15, 226, 39), 'pulumi.get', 'pulumi.get', ({(226, 26, 226, 30): 'self', (226, 32, 226, 38): '"""type"""'}, {}), "(self, 'type')", False, 'import pulumi\n'), ((234, 15, 234, 45), 'pulumi.get', 'pulumi.get', ({(234, 26, 234, 30): 'self', (234, 32, 234, 44): '"""updated_at"""'}, {}), "(self, 'updated_at')", False, 'import pulumi\n'), ((280, 15, 280, 37), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ({}, {}), '()', False, 'import pulumi\n'), ((283, 14, 283, 133), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (), '', False, 'import pulumi\n')] |
naren-m/chue | chue/utils.py | 6f77ad990c911353524c5c99bcf6e30155edaf97 | import json
from pygments import highlight
from pygments.lexers import JsonLexer
from pygments.formatters import TerminalFormatter
def print_json_obj(json_object):
json_str = json.dumps(json_object, indent=4, sort_keys=True)
print(highlight(json_str, JsonLexer(), TerminalFormatter()))
def print_json_str(json_str):
print(highlight(json_str, JsonLexer(), TerminalFormatter()))
| [((8, 15, 8, 64), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((9, 30, 9, 41), 'pygments.lexers.JsonLexer', 'JsonLexer', ({}, {}), '()', False, 'from pygments.lexers import JsonLexer\n'), ((9, 43, 9, 62), 'pygments.formatters.TerminalFormatter', 'TerminalFormatter', ({}, {}), '()', False, 'from pygments.formatters import TerminalFormatter\n'), ((13, 30, 13, 41), 'pygments.lexers.JsonLexer', 'JsonLexer', ({}, {}), '()', False, 'from pygments.lexers import JsonLexer\n'), ((13, 43, 13, 62), 'pygments.formatters.TerminalFormatter', 'TerminalFormatter', ({}, {}), '()', False, 'from pygments.formatters import TerminalFormatter\n')] |
919bot/Tessa | selfdrive/car/chrysler/radar_interface.py | 9b48ff9020e8fb6992fc78271f2720fd19e01093 | #!/usr/bin/env python3
import os
from opendbc.can.parser import CANParser
from cereal import car
from selfdrive.car.interfaces import RadarInterfaceBase
RADAR_MSGS_C = list(range(0x2c2, 0x2d4+2, 2)) # c_ messages 706,...,724
RADAR_MSGS_D = list(range(0x2a2, 0x2b4+2, 2)) # d_ messages
LAST_MSG = max(RADAR_MSGS_C + RADAR_MSGS_D)
NUMBER_MSGS = len(RADAR_MSGS_C) + len(RADAR_MSGS_D)
def _create_radar_can_parser():
dbc_f = 'chrysler_pacifica_2017_hybrid_private_fusion.dbc'
msg_n = len(RADAR_MSGS_C)
# list of [(signal name, message name or number, initial values), (...)]
# [('RADAR_STATE', 1024, 0),
# ('LONG_DIST', 1072, 255),
# ('LONG_DIST', 1073, 255),
# ('LONG_DIST', 1074, 255),
# ('LONG_DIST', 1075, 255),
# The factor and offset are applied by the dbc parsing library, so the
# default values should be after the factor/offset are applied.
signals = list(zip(['LONG_DIST'] * msg_n +
['LAT_DIST'] * msg_n +
['REL_SPEED'] * msg_n,
RADAR_MSGS_C * 2 + # LONG_DIST, LAT_DIST
RADAR_MSGS_D, # REL_SPEED
[0] * msg_n + # LONG_DIST
[-1000] * msg_n + # LAT_DIST
[-146.278] * msg_n)) # REL_SPEED set to 0, factor/offset to this
# TODO what are the checks actually used for?
# honda only checks the last message,
# toyota checks all the messages. Which do we want?
checks = list(zip(RADAR_MSGS_C +
RADAR_MSGS_D,
[20]*msg_n + # 20Hz (0.05s)
[20]*msg_n)) # 20Hz (0.05s)
return CANParser(os.path.splitext(dbc_f)[0], signals, checks, 1)
def _address_to_track(address):
if address in RADAR_MSGS_C:
return (address - RADAR_MSGS_C[0]) // 2
if address in RADAR_MSGS_D:
return (address - RADAR_MSGS_D[0]) // 2
raise ValueError("radar received unexpected address %d" % address)
class RadarInterface(RadarInterfaceBase):
def __init__(self, CP):
self.pts = {}
self.delay = 0 # Delay of radar #TUNE
self.rcp = _create_radar_can_parser()
self.updated_messages = set()
self.trigger_msg = LAST_MSG
def update(self, can_strings):
vls = self.rcp.update_strings(can_strings)
self.updated_messages.update(vls)
if self.trigger_msg not in self.updated_messages:
return None
ret = car.RadarData.new_message()
errors = []
if not self.rcp.can_valid:
errors.append("canError")
ret.errors = errors
for ii in self.updated_messages: # ii should be the message ID as a number
cpt = self.rcp.vl[ii]
trackId = _address_to_track(ii)
if trackId not in self.pts:
self.pts[trackId] = car.RadarData.RadarPoint.new_message()
self.pts[trackId].trackId = trackId
self.pts[trackId].aRel = float('nan')
self.pts[trackId].yvRel = float('nan')
self.pts[trackId].measured = True
if 'LONG_DIST' in cpt: # c_* message
self.pts[trackId].dRel = cpt['LONG_DIST'] # from front of car
# our lat_dist is positive to the right in car's frame.
# TODO what does yRel want?
self.pts[trackId].yRel = cpt['LAT_DIST'] # in car frame's y axis, left is positive
else: # d_* message
self.pts[trackId].vRel = cpt['REL_SPEED']
# We want a list, not a dictionary. Filter out LONG_DIST==0 because that means it's not valid.
ret.points = [x for x in self.pts.values() if x.dRel != 0]
self.updated_messages.clear()
return ret
| [((64, 10, 64, 37), 'cereal.car.RadarData.new_message', 'car.RadarData.new_message', ({}, {}), '()', False, 'from cereal import car\n'), ((40, 19, 40, 42), 'os.path.splitext', 'os.path.splitext', ({(40, 36, 40, 41): 'dbc_f'}, {}), '(dbc_f)', False, 'import os\n'), ((75, 28, 75, 66), 'cereal.car.RadarData.RadarPoint.new_message', 'car.RadarData.RadarPoint.new_message', ({}, {}), '()', False, 'from cereal import car\n')] |
mattiasljungstrom/fips | mod/tools/ccmake.py | 8775e299f710ae5b977d49dc0672b607f2a10378 | """
wrapper for ccmake command line tool
"""
import subprocess
name = 'ccmake'
platforms = ['linux', 'osx']
optional = True
not_found = "required for 'fips config' functionality"
#-------------------------------------------------------------------------------
def check_exists(fips_dir) :
"""test if ccmake is in the path
:returns: True if ccmake is in the path
"""
try:
out = subprocess.check_output(['ccmake', '--version'])
return True
except (OSError, subprocess.CalledProcessError):
return False
#-------------------------------------------------------------------------------
def run(build_dir) :
"""run ccmake to configure cmake project
:param build_dir: directory where ccmake should run
:returns: True if ccmake returns successful
"""
res = subprocess.call('ccmake .', cwd=build_dir, shell=True)
return res == 0
| [((30, 10, 30, 64), 'subprocess.call', 'subprocess.call', (), '', False, 'import subprocess\n'), ((18, 14, 18, 62), 'subprocess.check_output', 'subprocess.check_output', ({(18, 38, 18, 61): "['ccmake', '--version']"}, {}), "(['ccmake', '--version'])", False, 'import subprocess\n')] |
mbartoli/image-quality-assessment | image_quality/handlers/data_generator.py | b957c781ac8a11f8668f58345524f33503338b3b |
import os
import numpy as np
import tensorflow as tf
from image_quality.utils import utils
class TrainDataGenerator(tf.keras.utils.Sequence):
'''inherits from Keras Sequence base object, allows to use multiprocessing in .fit_generator'''
def __init__(self, samples, img_dir, batch_size, n_classes, basenet_preprocess,
img_load_dims=(256, 256), img_crop_dims=(224, 224), shuffle=True):
self.samples = samples
self.img_dir = img_dir
self.batch_size = batch_size
self.n_classes = n_classes
self.basenet_preprocess = basenet_preprocess # Keras basenet specific preprocessing function
self.img_load_dims = img_load_dims # dimensions that images get resized into when loaded
self.img_crop_dims = img_crop_dims # dimensions that images get randomly cropped to
self.shuffle = shuffle
self.on_epoch_end() # call ensures that samples are shuffled in first epoch if shuffle is set to True
def __len__(self):
return int(np.ceil(len(self.samples) / self.batch_size)) # number of batches per epoch
def __getitem__(self, index):
batch_indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # get batch indexes
batch_samples = [self.samples[i] for i in batch_indexes] # get batch samples
X, y = self.__data_generator(batch_samples)
return X, y
def on_epoch_end(self):
self.indexes = np.arange(len(self.samples))
if self.shuffle is True:
np.random.shuffle(self.indexes)
def __data_generator(self, batch_samples):
# initialize images and labels tensors for faster processing
X = np.empty((len(batch_samples), *self.img_crop_dims, 3))
y = np.empty((len(batch_samples), self.n_classes))
for i, sample in enumerate(batch_samples):
# load and randomly augment image
img_file = os.path.join(self.img_dir, '{}'.format(sample['image_id']))
img = utils.load_image(img_file, self.img_load_dims)
if img is not None:
img = utils.random_crop(img, self.img_crop_dims)
img = utils.random_horizontal_flip(img)
X[i, ] = img
# normalize labels
y[i, ] = utils.normalize_labels(sample['label'])
# apply basenet specific preprocessing
# input is 4D numpy array of RGB values within [0, 255]
X = self.basenet_preprocess(X)
return X, y
class TestDataGenerator(tf.keras.utils.Sequence):
'''inherits from Keras Sequence base object, allows to use multiprocessing in .fit_generator'''
def __init__(self, samples, img_dir, batch_size, n_classes, basenet_preprocess,
img_load_dims=(224, 224)):
self.samples = samples
self.img_dir = img_dir
self.batch_size = batch_size
self.n_classes = n_classes
self.basenet_preprocess = basenet_preprocess # Keras basenet specific preprocessing function
self.img_load_dims = img_load_dims # dimensions that images get resized into when loaded
self.on_epoch_end() # call ensures that samples are shuffled in first epoch if shuffle is set to True
def __len__(self):
return int(np.ceil(len(self.samples) / self.batch_size)) # number of batches per epoch
def __getitem__(self, index):
batch_indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # get batch indexes
batch_samples = [self.samples[i] for i in batch_indexes] # get batch samples
X, y = self.__data_generator(batch_samples)
return X, y
def on_epoch_end(self):
self.indexes = np.arange(len(self.samples))
def __data_generator(self, batch_samples):
# initialize images and labels tensors for faster processing
X = np.empty((len(batch_samples), *self.img_load_dims, 3))
y = np.empty((len(batch_samples), self.n_classes))
for i, sample in enumerate(batch_samples):
# load and randomly augment image
img_file = os.path.join(self.img_dir, '{}'.format(sample['image_id']))
img = utils.load_image(img_file, self.img_load_dims)
if img is not None:
X[i, ] = img
# normalize labels
if sample.get('label') is not None:
y[i, ] = utils.normalize_labels(sample['label'])
# apply basenet specific preprocessing
# input is 4D numpy array of RGB values within [0, 255]
X = self.basenet_preprocess(X)
return X, y
| [((34, 6, 34, 37), 'numpy.random.shuffle', 'np.random.shuffle', ({(34, 24, 34, 36): 'self.indexes'}, {}), '(self.indexes)', True, 'import numpy as np\n'), ((44, 12, 44, 58), 'image_quality.utils.utils.load_image', 'utils.load_image', ({(44, 29, 44, 37): 'img_file', (44, 39, 44, 57): 'self.img_load_dims'}, {}), '(img_file, self.img_load_dims)', False, 'from image_quality.utils import utils\n'), ((51, 15, 51, 54), 'image_quality.utils.utils.normalize_labels', 'utils.normalize_labels', ({(51, 38, 51, 53): "sample['label']"}, {}), "(sample['label'])", False, 'from image_quality.utils import utils\n'), ((92, 12, 92, 58), 'image_quality.utils.utils.load_image', 'utils.load_image', ({(92, 29, 92, 37): 'img_file', (92, 39, 92, 57): 'self.img_load_dims'}, {}), '(img_file, self.img_load_dims)', False, 'from image_quality.utils import utils\n'), ((46, 14, 46, 56), 'image_quality.utils.utils.random_crop', 'utils.random_crop', ({(46, 32, 46, 35): 'img', (46, 37, 46, 55): 'self.img_crop_dims'}, {}), '(img, self.img_crop_dims)', False, 'from image_quality.utils import utils\n'), ((47, 14, 47, 47), 'image_quality.utils.utils.random_horizontal_flip', 'utils.random_horizontal_flip', ({(47, 43, 47, 46): 'img'}, {}), '(img)', False, 'from image_quality.utils import utils\n'), ((98, 17, 98, 56), 'image_quality.utils.utils.normalize_labels', 'utils.normalize_labels', ({(98, 40, 98, 55): "sample['label']"}, {}), "(sample['label'])", False, 'from image_quality.utils import utils\n')] |
sirken/coding-practice | codewars/4 kyu/strip-comments.py | 9c5e23b2c24f525a89a5e1d15ce3aec3ad1a01ab | from Test import Test, Test as test
'''
Complete the solution so that it strips all text that follows any of a set of comment markers passed in. Any whitespace at the end of the line should also be stripped out.
Example:
Given an input string of:
apples, pears # and bananas
grapes
bananas !apples
The output expected would be:
apples, pears
grapes
bananas
The code would be called like so:
result = solution("apples, pears # and bananas\ngrapes\nbananas !apples", ["#", "!"])
# result should == "apples, pears\ngrapes\nbananas"
'''
# Split by rows, then find earliest marker and extract string before it
def solution(string,markers):
strings = string.split('\n')
l = []
for line in strings:
pos = len(line)
for m in markers:
if m in line:
if line.index(m) < pos:
pos = line.index(m)
l.append(line[:pos].rstrip())
return '\n'.join(l)
# Top solution, split list by \n, edit in place
def solution(string,markers):
parts = string.split('\n')
for s in markers:
parts = [v.split(s)[0].rstrip() for v in parts]
return '\n'.join(parts)
# Top solution expanded
def solution(string,markers):
# split by lines
parts = string.split('\n')
# Loop through markers
for s in markers:
# Loop through all lines, check for any markers
# Split by marker, grab first item, and rstrip whitespace
for num, v in enumerate(parts):
parts[num] = v.split(s)[0].rstrip()
return '\n'.join(parts)
Test.assert_equals(solution("apples, pears # and bananas\ngrapes\nbananas !apples", ["#", "!"]), "apples, pears\ngrapes\nbananas")
Test.assert_equals(solution("a #b\nc\nd $e f g", ["#", "$"]), "a\nc\nd")
Test.assert_equals(solution('= - avocados oranges pears cherries\nlemons apples\n- watermelons strawberries', ['#', '?', '=', ',', '.', '-', '!']), '\nlemons apples\n')
| [] |
myQLM/myqlm-interop | qat/interop/qiskit/quantum_channels.py | 9d77cb7c719f82be05d9f88493522940b8142124 | # -*- coding: utf-8 -*-
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp
import numpy as np
from qat.comm.quops.ttypes import QuantumChannel, RepresentationType
from qat.comm.datamodel.ttypes import Matrix, ComplexNumber
def array_to_matrix(array):
"""
Transform a two dimmentional numpy array to a myqlm Matrix.
Args:
array: (ndarray) a two dimmentional numpy array
Returns:
(Matrix): a myqlm Matrix
"""
assert len(array.shape) == 2, "The array must be two dimmentional"
data = []
for arr in array:
for elem in arr:
data.append(ComplexNumber(np.real(elem), np.imag(elem)))
matri = Matrix(array.shape[0], array.shape[1], data)
return matri
def qiskit_to_qchannel(representation):
"""
Create a myqlm representation of quantum channel from a qiskit representation
of a quantum channel.
Args:
representation: (Kraus|Choi|Chi|SuperOp|PTM) qiskit representation of a quantum channel.
Returns:
(QuantumChannel): myqlm representation of a quantum channel.
"""
qchannel = None
qiskit_data = representation.data
# Find what representation it is.
# Then create the corresponding matrix (kraus_ops|basis|matrix)from the data
# of the representation.
# Finally, create the QuantumChannel with the RepresentationType, the arity
# (got from the qiskit representation) and the matrix.
if isinstance(representation, Kraus):
kraus_ops = []
for arr in qiskit_data:
kraus_ops.append(array_to_matrix(arr))
qchannel = QuantumChannel(
representation=RepresentationType.KRAUS,
arity=representation.num_qubits,
kraus_ops=kraus_ops)
elif isinstance(representation, Chi):
basis = []
basis.append(array_to_matrix(qiskit_data))
qchannel = QuantumChannel(
representation=RepresentationType.CHI,
arity=representation.num_qubits,
basis=basis)
elif isinstance(representation, SuperOp):
basis = []
basis.append(array_to_matrix(qiskit_data))
qchannel = QuantumChannel(
representation=RepresentationType.SUPEROP,
arity=representation.num_qubits,
basis=basis)
elif isinstance(representation, PTM):
matri = array_to_matrix(qiskit_data)
qchannel = QuantumChannel(
representation=RepresentationType.PTM,
arity=representation.num_qubits,
matrix=matri)
elif isinstance(representation, Choi):
matri = array_to_matrix(qiskit_data)
qchannel = QuantumChannel(
representation=RepresentationType.CHOI,
arity=representation.num_qubits,
matrix=matri)
return qchannel
def qchannel_to_qiskit(representation):
"""
Create a qiskit representation of quantum channel from a myqlm representation
of a quantum channel.
Args:
representation: (QuantumChannel) myqlm representation of a quantum channel.
Returns:
(Kraus|Choi|Chi|SuperOp|PTM): qiskit representation of a quantum channel.
"""
rep = representation.representation
# Find what representation it is.
# Then create the corresponding matrix and shape it like qiskit is expecting it.
# Finally, create the qiskit representation from that matrix.
if rep in (RepresentationType.PTM, RepresentationType.CHOI):
matri = representation.matrix
data_re = []
data_im = []
for i in range(matri.nRows):
for j in range(matri.nCols):
data_re.append(matri.data[i * matri.nRows + j].re + 0.j)
data_im.append(matri.data[i * matri.nRows + j].im)
data = np.array(data_re)
data.imag = np.array(data_im)
data = data.reshape((matri.nRows, matri.nCols))
return PTM(data) if (rep == RepresentationType.PTM) else Choi(data)
if rep in (RepresentationType.CHI, RepresentationType.SUPEROP):
final_data = []
for matri in representation.basis:
data_re = []
data_im = []
for i in range(matri.nRows):
for j in range(matri.nCols):
data_re.append(matri.data[i * matri.nRows + j].re + 0.j)
data_im.append(matri.data[i * matri.nRows + j].im)
data = np.array(data_re)
data.imag = np.array(data_im)
data = data.reshape((matri.nRows, matri.nCols))
final_data.append(data)
if rep == RepresentationType.CHI:
return Chi(final_data) if len(final_data) > 1 else Chi(final_data[0])
return SuperOp(final_data) if len(final_data) > 1 else SuperOp(final_data[0])
if rep == RepresentationType.KRAUS:
final_data = []
for matri in representation.kraus_ops:
data_re = []
data_im = []
for i in range(matri.nRows):
for j in range(matri.nCols):
data_re.append(matri.data[i * matri.nRows + j].re + 0.j)
data_im.append(matri.data[i * matri.nRows + j].im)
data = np.array(data_re)
data.imag = np.array(data_im)
data = data.reshape((matri.nRows, matri.nCols))
final_data.append(data)
return Kraus(final_data)
return None
| [((46, 12, 46, 56), 'qat.comm.datamodel.ttypes.Matrix', 'Matrix', ({(46, 19, 46, 33): 'array.shape[0]', (46, 35, 46, 49): 'array.shape[1]', (46, 51, 46, 55): 'data'}, {}), '(array.shape[0], array.shape[1], data)', False, 'from qat.comm.datamodel.ttypes import Matrix, ComplexNumber\n'), ((73, 19, 76, 32), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', (), '', False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((130, 15, 130, 32), 'numpy.array', 'np.array', ({(130, 24, 130, 31): 'data_re'}, {}), '(data_re)', True, 'import numpy as np\n'), ((131, 20, 131, 37), 'numpy.array', 'np.array', ({(131, 29, 131, 36): 'data_im'}, {}), '(data_im)', True, 'import numpy as np\n'), ((163, 15, 163, 32), 'qiskit.quantum_info.operators.channel.Kraus', 'Kraus', ({(163, 21, 163, 31): 'final_data'}, {}), '(final_data)', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((80, 19, 83, 24), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', (), '', False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((133, 15, 133, 24), 'qiskit.quantum_info.operators.channel.PTM', 'PTM', ({(133, 19, 133, 23): 'data'}, {}), '(data)', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((133, 65, 133, 75), 'qiskit.quantum_info.operators.channel.Choi', 'Choi', ({(133, 70, 133, 74): 'data'}, {}), '(data)', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((143, 19, 143, 36), 'numpy.array', 'np.array', ({(143, 28, 143, 35): 'data_re'}, {}), '(data_re)', True, 'import numpy as np\n'), ((144, 24, 144, 41), 'numpy.array', 'np.array', ({(144, 33, 144, 40): 'data_im'}, {}), '(data_im)', True, 'import numpy as np\n'), ((149, 15, 149, 34), 'qiskit.quantum_info.operators.channel.SuperOp', 'SuperOp', ({(149, 23, 149, 33): 'final_data'}, {}), '(final_data)', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((149, 63, 149, 85), 'qiskit.quantum_info.operators.channel.SuperOp', 'SuperOp', ({(149, 71, 149, 84): 'final_data[0]'}, {}), '(final_data[0])', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((159, 19, 159, 36), 'numpy.array', 'np.array', ({(159, 28, 159, 35): 'data_re'}, {}), '(data_re)', True, 'import numpy as np\n'), ((160, 24, 160, 41), 'numpy.array', 'np.array', ({(160, 33, 160, 40): 'data_im'}, {}), '(data_im)', True, 'import numpy as np\n'), ((87, 19, 90, 24), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', (), '', False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((148, 19, 148, 34), 'qiskit.quantum_info.operators.channel.Chi', 'Chi', ({(148, 23, 148, 33): 'final_data'}, {}), '(final_data)', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((148, 63, 148, 81), 'qiskit.quantum_info.operators.channel.Chi', 'Chi', ({(148, 67, 148, 80): 'final_data[0]'}, {}), '(final_data[0])', False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((45, 38, 45, 51), 'numpy.real', 'np.real', ({(45, 46, 45, 50): 'elem'}, {}), '(elem)', True, 'import numpy as np\n'), ((45, 53, 45, 66), 'numpy.imag', 'np.imag', ({(45, 61, 45, 65): 'elem'}, {}), '(elem)', True, 'import numpy as np\n'), ((93, 19, 96, 25), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', (), '', False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((99, 19, 102, 25), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', (), '', False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n')] |
mshader/mne-nirs | mne_nirs/simulation/_simulation.py | d59a5436d162108226f31b33b194dfecada40d72 | # Authors: Robert Luke <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
from mne import Annotations, create_info
from mne.io import RawArray
def simulate_nirs_raw(sfreq=3., amplitude=1.,
sig_dur=300., stim_dur=5.,
isi_min=15., isi_max=45.):
"""
Create simulated data.
.. warning:: Work in progress: I am trying to think on the best API.
Parameters
----------
sfreq : Number
The sample rate.
amplitude : Number
The amplitude of the signal to simulate in uM.
sig_dur : Number
The length of the signal to generate in seconds.
stim_dur : Number
The length of the stimulus to generate in seconds.
isi_min : Number
The minimum duration of the inter stimulus interval in seconds.
isi_max : Number
The maximum duration of the inter stimulus interval in seconds.
Returns
-------
raw : instance of Raw
The generated raw instance.
"""
from nilearn.stats.first_level_model import make_first_level_design_matrix
from pandas import DataFrame
frame_times = np.arange(sig_dur * sfreq) / sfreq
onset = 0.
onsets = []
conditions = []
durations = []
while onset < sig_dur - 60:
onset += np.random.uniform(isi_min, isi_max) + stim_dur
onsets.append(onset)
conditions.append("A")
durations.append(stim_dur)
events = DataFrame({'trial_type': conditions,
'onset': onsets,
'duration': durations})
dm = make_first_level_design_matrix(frame_times, events,
drift_model='polynomial',
drift_order=0)
annotations = Annotations(onsets, durations, conditions)
info = create_info(ch_names=['Simulated'], sfreq=sfreq, ch_types=['hbo'])
raw = RawArray(dm[["A"]].to_numpy().T * amplitude * 1.e-6,
info, verbose=False)
raw.set_annotations(annotations)
return raw
| [((53, 13, 55, 47), 'pandas.DataFrame', 'DataFrame', ({(53, 23, 55, 46): "{'trial_type': conditions, 'onset': onsets, 'duration': durations}"}, {}), "({'trial_type': conditions, 'onset': onsets, 'duration': durations})", False, 'from pandas import DataFrame\n'), ((57, 9, 59, 54), 'nilearn.stats.first_level_model.make_first_level_design_matrix', 'make_first_level_design_matrix', (), '', False, 'from nilearn.stats.first_level_model import make_first_level_design_matrix\n'), ((61, 18, 61, 60), 'mne.Annotations', 'Annotations', ({(61, 30, 61, 36): 'onsets', (61, 38, 61, 47): 'durations', (61, 49, 61, 59): 'conditions'}, {}), '(onsets, durations, conditions)', False, 'from mne import Annotations, create_info\n'), ((63, 11, 63, 77), 'mne.create_info', 'create_info', (), '', False, 'from mne import Annotations, create_info\n'), ((41, 18, 41, 44), 'numpy.arange', 'np.arange', ({(41, 28, 41, 43): '(sig_dur * sfreq)'}, {}), '(sig_dur * sfreq)', True, 'import numpy as np\n'), ((48, 17, 48, 52), 'numpy.random.uniform', 'np.random.uniform', ({(48, 35, 48, 42): 'isi_min', (48, 44, 48, 51): 'isi_max'}, {}), '(isi_min, isi_max)', True, 'import numpy as np\n')] |
athanikos/cryptodataaccess | build/lib/dataaccess/TransactionRepository.py | 6189a44c65a9b03c02822a534e865740ab488809 | from cryptomodel.cryptostore import user_notification, user_channel, user_transaction, operation_type
from mongoengine import Q
from cryptodataaccess import helpers
from cryptodataaccess.helpers import if_none_raise, if_none_raise_with_id
class TransactionRepository:
def __init__(self, config, log_error):
self.configuration = config
self.log_error = log_error
def fetch_transaction(self, id):
return helpers.server_time_out_wrapper(self, self.do_fetch_transaction, id)
def fetch_transactions(self, user_id):
return helpers.server_time_out_wrapper(self, self.do_fetch_transactions, user_id)
def fetch_transactions(self, user_id):
return helpers.server_time_out_wrapper(self, self.do_fetch_transactions, user_id)
def insert_transaction(self, user_id, volume, symbol, value, price, currency, date, source, source_id, operation):
return helpers.server_time_out_wrapper(self, self.do_insert_transaction, user_id, volume, symbol,
value, price, currency, date, source, source_id, operation)
def update_transaction(self, id, user_id, volume, symbol, value, price, currency, date, source, source_id,
operation):
return helpers.server_time_out_wrapper(self, self.do_update_transaction, id,
user_id, volume, symbol, value, price, currency, date, source, source_id,
operation)
def delete_transaction(self, id, throw_if_does_not_exist=True):
helpers.server_time_out_wrapper(self, self.do_delete_transaction, id, throw_if_does_not_exist)
def do_delete_transaction(self, id, throw_if_does_not_exist=True):
helpers.do_local_connect(self.configuration)
trans = user_transaction.objects(id=id).first()
if throw_if_does_not_exist:
if_none_raise_with_id(id, trans)
if trans is not None:
trans.delete()
def do_update_transaction(self, id, user_id, volume, symbol, value, price, currency, date, source, source_id,
operation):
helpers.do_local_connect(self.configuration)
trans = user_transaction.objects(id=id).first()
if_none_raise_with_id(id, trans)
trans.user_id = user_id
trans.volume = volume
trans.symbol = symbol
trans.value = value
trans.price = price
trans.date = date
trans.source = source
trans.currency = currency
trans.source_id = source_id
trans.operation = operation
trans.save()
return user_transaction.objects(id=id).first()
def do_insert_transaction(self, user_id, volume, symbol, value, price, currency, date, source, source_id,
operation):
helpers.do_local_connect(self.configuration)
trans = user_transaction()
trans.user_id = user_id
trans.volume = volume
trans.symbol = symbol
trans.value = value
trans.price = price
trans.date = date
trans.currency = currency
trans.source = source
trans.source_id = source_id
trans.operation = operation
trans.save()
return user_transaction.objects(id=trans.id).first()
def do_fetch_transactions(self, user_id ):
helpers.do_local_connect(self.configuration)
return user_transaction.objects(Q(user_id=user_id))
def do_fetch_transaction(self, id ):
helpers.do_local_connect(self.configuration)
return user_transaction.objects(Q(id=id))[0]
| [((15, 15, 15, 83), 'cryptodataaccess.helpers.server_time_out_wrapper', 'helpers.server_time_out_wrapper', ({(15, 47, 15, 51): 'self', (15, 53, 15, 78): 'self.do_fetch_transaction', (15, 80, 15, 82): 'id'}, {}), '(self, self.do_fetch_transaction, id)', False, 'from cryptodataaccess import helpers\n'), ((18, 15, 18, 89), 'cryptodataaccess.helpers.server_time_out_wrapper', 'helpers.server_time_out_wrapper', ({(18, 47, 18, 51): 'self', (18, 53, 18, 79): 'self.do_fetch_transactions', (18, 81, 18, 88): 'user_id'}, {}), '(self, self.do_fetch_transactions, user_id)', False, 'from cryptodataaccess import helpers\n'), ((21, 15, 21, 89), 'cryptodataaccess.helpers.server_time_out_wrapper', 'helpers.server_time_out_wrapper', ({(21, 47, 21, 51): 'self', (21, 53, 21, 79): 'self.do_fetch_transactions', (21, 81, 21, 88): 'user_id'}, {}), '(self, self.do_fetch_transactions, user_id)', False, 'from cryptodataaccess import helpers\n'), ((24, 15, 25, 106), 'cryptodataaccess.helpers.server_time_out_wrapper', 'helpers.server_time_out_wrapper', ({(24, 47, 24, 51): 'self', (24, 53, 24, 79): 'self.do_insert_transaction', (24, 81, 24, 88): 'user_id', (24, 90, 24, 96): 'volume', (24, 98, 24, 104): 'symbol', (25, 47, 25, 52): 'value', (25, 54, 25, 59): 'price', (25, 61, 25, 69): 'currency', (25, 71, 25, 75): 'date', (25, 77, 25, 83): 'source', (25, 85, 25, 94): 'source_id', (25, 96, 25, 105): 'operation'}, {}), '(self, self.do_insert_transaction, user_id,\n volume, symbol, value, price, currency, date, source, source_id, operation)', False, 'from cryptodataaccess import helpers\n'), ((29, 15, 31, 57), 'cryptodataaccess.helpers.server_time_out_wrapper', 'helpers.server_time_out_wrapper', ({(29, 47, 29, 51): 'self', (29, 53, 29, 79): 'self.do_update_transaction', (29, 81, 29, 83): 'id', (30, 47, 30, 54): 'user_id', (30, 56, 30, 62): 'volume', (30, 64, 30, 70): 'symbol', (30, 72, 30, 77): 'value', (30, 79, 30, 84): 'price', (30, 86, 30, 94): 'currency', (30, 96, 30, 100): 'date', (30, 102, 30, 108): 'source', (30, 110, 30, 119): 'source_id', (31, 47, 31, 56): 'operation'}, {}), '(self, self.do_update_transaction, id,\n user_id, volume, symbol, value, price, currency, date, source,\n source_id, operation)', False, 'from cryptodataaccess import helpers\n'), ((34, 8, 34, 102), 'cryptodataaccess.helpers.server_time_out_wrapper', 'helpers.server_time_out_wrapper', ({(34, 40, 34, 44): 'self', (34, 46, 34, 72): 'self.do_delete_transaction', (34, 74, 34, 76): 'id', (34, 78, 34, 101): 'throw_if_does_not_exist'}, {}), '(self, self.do_delete_transaction, id,\n throw_if_does_not_exist)', False, 'from cryptodataaccess import helpers\n'), ((37, 8, 37, 52), 'cryptodataaccess.helpers.do_local_connect', 'helpers.do_local_connect', ({(37, 33, 37, 51): 'self.configuration'}, {}), '(self.configuration)', False, 'from cryptodataaccess import helpers\n'), ((46, 8, 46, 52), 'cryptodataaccess.helpers.do_local_connect', 'helpers.do_local_connect', ({(46, 33, 46, 51): 'self.configuration'}, {}), '(self.configuration)', False, 'from cryptodataaccess import helpers\n'), ((48, 8, 48, 40), 'cryptodataaccess.helpers.if_none_raise_with_id', 'if_none_raise_with_id', ({(48, 30, 48, 32): 'id', (48, 34, 48, 39): 'trans'}, {}), '(id, trans)', False, 'from cryptodataaccess.helpers import if_none_raise, if_none_raise_with_id\n'), ((64, 8, 64, 52), 'cryptodataaccess.helpers.do_local_connect', 'helpers.do_local_connect', ({(64, 33, 64, 51): 'self.configuration'}, {}), '(self.configuration)', False, 'from cryptodataaccess import helpers\n'), ((65, 16, 65, 34), 'cryptomodel.cryptostore.user_transaction', 'user_transaction', ({}, {}), '()', False, 'from cryptomodel.cryptostore import user_notification, user_channel, user_transaction, operation_type\n'), ((80, 8, 80, 52), 'cryptodataaccess.helpers.do_local_connect', 'helpers.do_local_connect', ({(80, 33, 80, 51): 'self.configuration'}, {}), '(self.configuration)', False, 'from cryptodataaccess import helpers\n'), ((84, 8, 84, 52), 'cryptodataaccess.helpers.do_local_connect', 'helpers.do_local_connect', ({(84, 33, 84, 51): 'self.configuration'}, {}), '(self.configuration)', False, 'from cryptodataaccess import helpers\n'), ((40, 12, 40, 44), 'cryptodataaccess.helpers.if_none_raise_with_id', 'if_none_raise_with_id', ({(40, 34, 40, 36): 'id', (40, 38, 40, 43): 'trans'}, {}), '(id, trans)', False, 'from cryptodataaccess.helpers import if_none_raise, if_none_raise_with_id\n'), ((81, 40, 81, 58), 'mongoengine.Q', 'Q', (), '', False, 'from mongoengine import Q\n'), ((38, 16, 38, 47), 'cryptomodel.cryptostore.user_transaction.objects', 'user_transaction.objects', (), '', False, 'from cryptomodel.cryptostore import user_notification, user_channel, user_transaction, operation_type\n'), ((47, 16, 47, 47), 'cryptomodel.cryptostore.user_transaction.objects', 'user_transaction.objects', (), '', False, 'from cryptomodel.cryptostore import user_notification, user_channel, user_transaction, operation_type\n'), ((60, 15, 60, 46), 'cryptomodel.cryptostore.user_transaction.objects', 'user_transaction.objects', (), '', False, 'from cryptomodel.cryptostore import user_notification, user_channel, user_transaction, operation_type\n'), ((77, 15, 77, 52), 'cryptomodel.cryptostore.user_transaction.objects', 'user_transaction.objects', (), '', False, 'from cryptomodel.cryptostore import user_notification, user_channel, user_transaction, operation_type\n'), ((85, 40, 85, 48), 'mongoengine.Q', 'Q', (), '', False, 'from mongoengine import Q\n')] |
FSU-ACM/Contest-Server | app/util/auth2.py | 00a71cdcee1a7e4d4e4d8e33b5d6decf27f02313 | """ util.auth2: Authentication tools
This module is based off of util.auth, except with the action
paradigm removed.
"""
from flask import session
from app.models import Account
from app.util import course as course_util
# Session keys
SESSION_EMAIL = 'email'
def create_account(email: str, password: str, first_name: str,
last_name: str, fsuid: str, course_list: list = []):
"""
Creates an account for a single user.
:email: Required, the email address of the user.
:password: Required, user's chosen password.
:first_name: Required, user's first name.
:last_name: Required, user's last name.
:fsuid: Optional, user's FSUID.
:course_list: Optional, courses being taken by user
:return: Account object.
"""
account = Account(
email=email,
first_name=first_name,
last_name=last_name,
fsuid=fsuid,
is_admin=False
)
# Set user's extra credit courses
course_util.set_courses(account, course_list)
account.set_password(password)
account.save()
return account
def get_account(email: str=None):
"""
Retrieves account via email (defaults to using session), otherwise
redirects to login page.
:email: Optional email string, if not provided will use session['email']
:return: Account if email is present in session, None otherwise.
"""
try:
email = email or session['email']
return Account.objects.get_or_404(email=email)
except:
return None
| [((29, 14, 35, 5), 'app.models.Account', 'Account', (), '', False, 'from app.models import Account\n'), ((38, 4, 38, 49), 'app.util.course.set_courses', 'course_util.set_courses', ({(38, 28, 38, 35): 'account', (38, 37, 38, 48): 'course_list'}, {}), '(account, course_list)', True, 'from app.util import course as course_util\n'), ((56, 15, 56, 54), 'app.models.Account.objects.get_or_404', 'Account.objects.get_or_404', (), '', False, 'from app.models import Account\n')] |
motiurce/FeView | FeView/pstaticwidget.py | 8897b37062be88dd5ead2c8524f6b3b73451e25d | from PyQt5.QtWidgets import *
from matplotlib.backends.backend_qt5agg import FigureCanvas
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
class PstaticWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.fig_pstatic = Figure()
self.fig_pstatic.set_facecolor('#ffffff')
self.canvas_pstatic = FigureCanvas(self.fig_pstatic)
vertical_layout = QVBoxLayout()
vertical_layout.addWidget(self.canvas_pstatic)
self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111)
self.setLayout(vertical_layout)
self.canvas_pstatic.axes_pstatic.set_xticks([])
self.canvas_pstatic.axes_pstatic.set_yticks([])
self.canvas_pstatic.axes_pstatic.axis('off')
self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95)
self.toolbar = NavigationToolbar(self.canvas_pstatic, self)
self.toolbar.setFixedHeight(25)
vertical_layout.addWidget(self.toolbar) | [((10, 27, 10, 35), 'matplotlib.figure.Figure', 'Figure', ({}, {}), '()', False, 'from matplotlib.figure import Figure\n'), ((12, 30, 12, 60), 'matplotlib.backends.backend_qt5agg.FigureCanvas', 'FigureCanvas', ({(12, 43, 12, 59): 'self.fig_pstatic'}, {}), '(self.fig_pstatic)', False, 'from matplotlib.backends.backend_qt5agg import FigureCanvas\n'), ((22, 23, 22, 67), 'matplotlib.backends.backend_qt5agg.NavigationToolbar2QT', 'NavigationToolbar', ({(22, 41, 22, 60): 'self.canvas_pstatic', (22, 62, 22, 66): 'self'}, {}), '(self.canvas_pstatic, self)', True, 'from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar\n')] |
julesy89/pyallocation | pyallocation/solvers/exhaustive.py | af80a8e2367a006121dd0702b55efa7b954bb039 | import numpy as np
from pymoo.core.algorithm import Algorithm
from pymoo.core.population import Population
from pymoo.util.termination.no_termination import NoTermination
from pyallocation.allocation import FastAllocation
from pyallocation.problem import AllocationProblem
def exhaustively(problem):
alloc = FastAllocation(problem, debug=False)
k = 0
sols = []
rec_exhaustively(problem, alloc, k, sols)
sols.sort(key=lambda x: (x[1], x[2]))
return sols[:100]
def rec_exhaustively(problem, alloc, k, sols):
if not alloc.feas:
return
if k == problem.n_var:
x, cv, f = np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum()
sols.append((x, cv, f))
if len(sols) > 1000:
sols.sort(key=lambda x: (x[1], x[2]))
while len(sols) > 100:
sols.pop()
else:
for val in range(problem.xl[k], problem.xu[k] + 1):
alloc.set(k, val)
rec_exhaustively(problem, alloc, k + 1, sols)
alloc.set(k, -1)
class ExhaustiveAlgorithm(Algorithm):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_termination = NoTermination()
def setup(self, problem, **kwargs):
super().setup(problem, **kwargs)
assert isinstance(problem, AllocationProblem)
return self
def _initialize(self):
self._next()
def _next(self):
solutions = exhaustively(self.problem)
self.pop = Population.new(X=np.array([x for x, _, _ in solutions]))
self.evaluator.eval(self.problem, self.pop)
for ind in self.pop:
print(ind.F[0], ind.X)
self.termination.force_termination = True
| [((11, 12, 11, 48), 'pyallocation.allocation.FastAllocation', 'FastAllocation', (), '', False, 'from pyallocation.allocation import FastAllocation\n'), ((44, 35, 44, 50), 'pymoo.util.termination.no_termination.NoTermination', 'NoTermination', ({}, {}), '()', False, 'from pymoo.util.termination.no_termination import NoTermination\n'), ((24, 19, 24, 35), 'numpy.copy', 'np.copy', ({(24, 27, 24, 34): 'alloc.x'}, {}), '(alloc.x)', True, 'import numpy as np\n'), ((57, 36, 57, 74), 'numpy.array', 'np.array', ({(57, 45, 57, 73): '[x for x, _, _ in solutions]'}, {}), '([x for x, _, _ in solutions])', True, 'import numpy as np\n')] |
yasminbraga/ufopa-reports | config.py | 6d8b213eb0dfce6775d0bb0fd277e8dc09da041c | import os
class Config:
CSRF_ENABLED = True
SECRET_KEY = 'your-very-very-secret-key'
SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = True
class Development(Config):
ENV = 'development'
DEBUG = True
TESTING = False
class Production(Config):
ENV = 'production'
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da')
| [((21, 30, 21, 212), 'os.getenv', 'os.getenv', ({(21, 40, 21, 54): '"""DATABASE_URL"""', (21, 56, 21, 211): '"""postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da"""'}, {}), "('DATABASE_URL',\n 'postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da'\n )", False, 'import os\n')] |
noironetworks/heat | heat/api/openstack/v1/views/stacks_view.py | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from heat.api.openstack.v1 import util
from heat.api.openstack.v1.views import views_common
from heat.rpc import api as rpc_api
_collection_name = 'stacks'
basic_keys = (
rpc_api.STACK_ID,
rpc_api.STACK_NAME,
rpc_api.STACK_DESCRIPTION,
rpc_api.STACK_STATUS,
rpc_api.STACK_STATUS_DATA,
rpc_api.STACK_CREATION_TIME,
rpc_api.STACK_DELETION_TIME,
rpc_api.STACK_UPDATED_TIME,
rpc_api.STACK_OWNER,
rpc_api.STACK_PARENT,
rpc_api.STACK_USER_PROJECT_ID,
rpc_api.STACK_TAGS,
)
def format_stack(req, stack, keys=None, include_project=False):
def transform(key, value):
if keys and key not in keys:
return
if key == rpc_api.STACK_ID:
yield ('id', value['stack_id'])
yield ('links', [util.make_link(req, value)])
if include_project:
yield ('project', value['tenant'])
elif key == rpc_api.STACK_ACTION:
return
elif (key == rpc_api.STACK_STATUS and
rpc_api.STACK_ACTION in stack):
# To avoid breaking API compatibility, we join RES_ACTION
# and RES_STATUS, so the API format doesn't expose the
# internal split of state into action/status
yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value)))
else:
# TODO(zaneb): ensure parameters can be formatted for XML
# elif key == rpc_api.STACK_PARAMETERS:
# return key, json.dumps(value)
yield (key, value)
return dict(itertools.chain.from_iterable(
transform(k, v) for k, v in stack.items()))
def collection(req, stacks, count=None, include_project=False):
keys = basic_keys
formatted_stacks = [format_stack(req, s, keys, include_project)
for s in stacks]
result = {'stacks': formatted_stacks}
links = views_common.get_collection_links(req, formatted_stacks)
if links:
result['links'] = links
if count is not None:
result['count'] = count
return result
| [((72, 12, 72, 68), 'heat.api.openstack.v1.views.views_common.get_collection_links', 'views_common.get_collection_links', ({(72, 46, 72, 49): 'req', (72, 51, 72, 67): 'formatted_stacks'}, {}), '(req, formatted_stacks)', False, 'from heat.api.openstack.v1.views import views_common\n'), ((45, 29, 45, 55), 'heat.api.openstack.v1.util.make_link', 'util.make_link', ({(45, 44, 45, 47): 'req', (45, 49, 45, 54): 'value'}, {}), '(req, value)', False, 'from heat.api.openstack.v1 import util\n')] |
Kzra/pykrev | pykrev/formula/find_intersections.py | 1a328fccded962f309e951c8509b87a82c3d3ae6 | import itertools
import numpy as np
import pandas as pd
def find_intersections(formula_lists,group_labels,exclusive = True):
"""
Docstring for function pyKrev.find_intersections
====================
This function compares n lists of molecular formula and outputs a dictionary containing the intersections between each list.
Use
----
find_intersections([list_1,..,list_n],['group_1',...,'group_n'])
Returns a dictionary in which each key corresponds to a combination of group labels
and the corresponding value is a set containing the intersections between the groups in that combination.
Parameters
----------
formula_lists: a list containing n lists of molecular formula. Each item in the sub list should be a formula string.
group_labels: a list containing n strings of corresponding group labels.
exclusive: True or False, depending on whether you want the intersections to contain only unique values.
"""
if len(formula_lists) != len(group_labels):
raise InputError('formula_lists and group_labels must be of equal length')
combinations = [seq for i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq) > 0]
combinations = sorted(combinations,key = lambda c : len(c),reverse = True) # sort combinations by length
if exclusive == True:
assigned_formula = set() #create a set that will hold all the formula already assigned to a group
amb = pd.DataFrame(data = formula_lists).T
amb.columns = group_labels
intersections = dict()
for combo in combinations:
queries = []
for c in combo:
formula = list(filter(None,amb[c])) #Remove None entries introduced by dataframe
queries.append(set(formula))
if len(queries) == 1: #if there is only one query find the unique elements in it
q_set = frozenset(queries[0]) #qset is a frozen set, so it will not be mutated by changes to queries[0]
for f_list in formula_lists: #cycle all formula in formula_lists
set_f = frozenset(f_list) #convert f_list to sets, must be frozen so type matches q_set
if set_f == q_set: # ignore the set that corresponds to the query
pass
else:
queries[0] = queries[0] - set_f #delete any repeated elements in fset
intersections[combo] = queries[0]
elif len(queries) > 1:
if exclusive == True:
q_intersect = intersect(queries)
intersections[combo] = q_intersect - assigned_formula #remove any elements from q_intersect that have already been assigned
assigned_formula.update(q_intersect) #update the assigned_set with q_intersect
else:
intersections[combo] = intersect(queries)
return intersections
def intersect(samples,counter=0):
""" This command uses recursion to find the intersections between a variable number of sets given in samples.
Where samples = [set_1,set_2,...,set_n] """
if len(samples) == 1:
return samples[0]
a = samples[counter]
b = samples[counter+1::]
if len(b) == 1: #check to see whether the recursion has reached the final element
return a & b[0]
else:
counter += 1
return a & intersect(samples,counter) | [((34, 10, 34, 44), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((30, 73, 30, 111), 'itertools.combinations', 'itertools.combinations', ({(30, 96, 30, 108): 'group_labels', (30, 109, 30, 110): 'i'}, {}), '(group_labels, i)', False, 'import itertools\n')] |
j4ck64/PlaylistDirectories | Create Playlist.py | 4a7caf0923620a84aea9bb91e643011e7ee118db | import os
import glob
import shutil
from tinytag import TinyTag
""" root = 'C:/'
copy_to = '/copy to/folder'
tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God and Drugz).mp3')
print(tag.artist)
print('song duration: '+str(tag.duration))
"""
f = []
f=glob.glob('C:/Users/jchap/OneDrive/*.mp3')
print(f)
musicDirectory=[]
musicFiles =[]
# tag = TinyTag.get(f[0])
# print(tag.artist)
# for root, dirs, files in os.walk("C:/Users/jchap/OneDrive/"):
for root, dirs, files in os.walk("C:/"):
for file in files:
if file.endswith(".mp3"):
musicFiles.append(file)
musicDirectory.append(os.path.join(root, file))
#print(os.path.join(root, file))
print('files'+str(musicFiles))
tag = TinyTag.get(musicDirectory[0])
print('Artist',tag.artist)
print('Album Artist',tag.albumartist)
print('Title',tag.title)
print('Biterate',tag.bitrate)
print('music directory'+str(musicDirectory))
print(len(musicDirectory))
currentDirectory =os.path.dirname(__file__)
with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', "r") as f:
content_list = [word.strip() for word in f]
""" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', "r")
content_list = my_file. readlines() """
# print('playlist contents')
# print(content_list)
musicDirectory
musicWithoutDuplicates = []
duplicatesList = []
count =0
# check for tags equal to none
#musicDirectory =[x for x in musicDirectory j = TinyTag.get(x) if x != 'wdg']
#remove tracks without albumn artist or title
for track in reversed(range(len(musicDirectory))):
try:
trackTag = TinyTag.get(musicDirectory[track])
if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None':
print('albumArtist = none',musicDirectory[track])
print('removing track and adding to log file')
musicDirectory.remove(musicDirectory[track])
except IndexError:
break
#check for duplicates
for j in range(len(musicDirectory)):
musicDtag = TinyTag.get(musicDirectory[j])
duplicateL=[]
duplicateLBiterate=[]
for duplicate in range(len(musicDirectory)):
duplicateTag = TinyTag.get(musicDirectory[duplicate])
musicWithoutDuplicates.append(musicDirectory[j])
if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist:
if duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title :
#check if last iteration
if duplicate>=len(musicDirectory)-1:
print("found a duplicate!",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)
if len(duplicateLBiterate)==1:## did something here may need to change the conditional statement or add another
print('biterate')
#[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]]
print("Current duplicate Bite rate", duplicateLBiterate)
for x in range(len(duplicateL)):
if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate):
#REMOVE ONE WITH THE BEST BITERATE
duplicateL.remove(duplicateL[x])
print('duplicate list',duplicateL)
#Add
duplicatesList = duplicatesList + duplicateL
else:
print("found a duplicate!",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)
duplicateL.append(musicDirectory[duplicate])
duplicateLBiterate.append(duplicateTag.bitrate)
print('dup ',duplicatesList)
#remove duplicates from list
for u in range(len(duplicatesList)):
for i in range(len(musicDirectory)):
if duplicatesList[u]==musicDirectory[i]:
musicDirectory.remove(musicDirectory[i])
print('music ',musicDirectory)
#create playlist
newPlaylist = open("Test.m3u", "w")
#add file path to the respective track in the new playlist
for content in enumerate(content_list):
# split strings into artist and title
trackNumber=content[0]
trackArray =str(content[1]).split('-')
albumArtist= trackArray[0].strip()
title=trackArray[1].strip()
print('title:',title)
print('albumArtist:',albumArtist)
for trackDirectory in range(len(musicDirectory)):
trackTag = TinyTag.get(musicDirectory[trackDirectory])
if trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist:
if trackTag.title == title or trackTag.title in title:
newPlaylist.write(trackDirectory + " " + content)
newPlaylist.close()
try:
while True:
content.next()
except StopIteration:
pass
break
else:
print()
else:
print() | [((14, 2, 14, 44), 'glob.glob', 'glob.glob', ({(14, 12, 14, 43): '"""C:/Users/jchap/OneDrive/*.mp3"""'}, {}), "('C:/Users/jchap/OneDrive/*.mp3')", False, 'import glob\n'), ((23, 25, 23, 39), 'os.walk', 'os.walk', ({(23, 33, 23, 38): '"""C:/"""'}, {}), "('C:/')", False, 'import os\n'), ((31, 6, 31, 36), 'tinytag.TinyTag.get', 'TinyTag.get', ({(31, 18, 31, 35): 'musicDirectory[0]'}, {}), '(musicDirectory[0])', False, 'from tinytag import TinyTag\n'), ((40, 18, 40, 43), 'os.path.dirname', 'os.path.dirname', ({(40, 34, 40, 42): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((79, 16, 79, 46), 'tinytag.TinyTag.get', 'TinyTag.get', ({(79, 28, 79, 45): 'musicDirectory[j]'}, {}), '(musicDirectory[j])', False, 'from tinytag import TinyTag\n'), ((66, 19, 66, 53), 'tinytag.TinyTag.get', 'TinyTag.get', ({(66, 31, 66, 52): 'musicDirectory[track]'}, {}), '(musicDirectory[track])', False, 'from tinytag import TinyTag\n'), ((84, 23, 84, 61), 'tinytag.TinyTag.get', 'TinyTag.get', ({(84, 35, 84, 60): 'musicDirectory[duplicate]'}, {}), '(musicDirectory[duplicate])', False, 'from tinytag import TinyTag\n'), ((135, 19, 135, 62), 'tinytag.TinyTag.get', 'TinyTag.get', ({(135, 31, 135, 61): 'musicDirectory[trackDirectory]'}, {}), '(musicDirectory[trackDirectory])', False, 'from tinytag import TinyTag\n'), ((27, 35, 27, 59), 'os.path.join', 'os.path.join', ({(27, 48, 27, 52): 'root', (27, 54, 27, 58): 'file'}, {}), '(root, file)', False, 'import os\n'), ((98, 31, 98, 57), 'tinytag.TinyTag.get', 'TinyTag.get', ({(98, 43, 98, 56): 'duplicateL[x]'}, {}), '(duplicateL[x])', False, 'from tinytag import TinyTag\n')] |
Alexis-Kepano/python_challenge | PyBank/main.py | 2d86e0d891c549d5fba99bd48d612be80746e34b | #import modules
import os
import csv
#input
csvpath = os.path.join('Resources', 'budget_data.csv')
#output
outfile = os.path.join('Analysis', 'pybankstatements.txt')
#declare variables
months = []; total_m = 1; net_total = 0; total_change = 0; monthly_changes = []; greatest_inc = ['', 0]; greatest_dec = ['', 0]
#open & read csv
with open(csvpath) as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
header = next(csvreader)
first_row = next(csvreader)
previous_row = int(first_row[1])
net_total = int(first_row[1])
#loop
for row in csvreader:
net_total += int(row[1])
total_m = total_m+1
current_value = int(row[1])
change_value = int(current_value-previous_row)
monthly_changes.append(change_value)
months.append(row[0])
previous_row = int(row[1])
total_change = total_change + change_value
if change_value > greatest_inc[1]:
greatest_inc[0] = str(row[0])
greatest_inc[1] = change_value
if change_value < greatest_dec[1]:
greatest_dec[0] = str(row[0])
greatest_dec[1] = change_value
avg_change = total_change/len(months)
output = (
f"\n Financial Analysis \n"
f"------------------------------\n"
f"Total Months: {total_m}\n"
f"Total: ${net_total}\n"
f"Average Change: ${avg_change:.2f}\n"
f"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\n"
f"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\n")
with open(outfile, "w") as txt_file:
txt_file.write(output)
outfile | [((5, 10, 5, 54), 'os.path.join', 'os.path.join', ({(5, 23, 5, 34): '"""Resources"""', (5, 36, 5, 53): '"""budget_data.csv"""'}, {}), "('Resources', 'budget_data.csv')", False, 'import os\n'), ((7, 10, 7, 58), 'os.path.join', 'os.path.join', ({(7, 23, 7, 33): '"""Analysis"""', (7, 35, 7, 57): '"""pybankstatements.txt"""'}, {}), "('Analysis', 'pybankstatements.txt')", False, 'import os\n'), ((14, 16, 14, 50), 'csv.reader', 'csv.reader', (), '', False, 'import csv\n')] |
aasw0ng/thornode-telegram-bot | bot/constants/messages.py | 5f73b882381548f45fc9e690c6e4845def9600b7 | from enum import Enum
from constants.globals import HEALTH_EMOJIS
NETWORK_ERROR = '😱 There was an error while getting data 😱\nAn API endpoint is down!'
HEALTH_LEGEND = f'\n*Node health*:\n{HEALTH_EMOJIS[True]} - *healthy*\n{HEALTH_EMOJIS[False]} - *unhealthy*\n' \
f'{HEALTH_EMOJIS[None]} - *unknown*\n'
class NetworkHealthStatus(Enum):
INEFFICIENT = "Inefficient"
OVERBONDED = "Overbonded"
OPTIMAL = "Optimal"
UNDBERBONDED = "Underbonded"
INSECURE = "Insecure"
NETWORK_HEALTHY_AGAIN = "The network is safe and efficient again! ✅"
def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str:
severity = "🤒"
if network_health_status is NetworkHealthStatus.INSECURE:
severity = "💀"
elif network_health_status is NetworkHealthStatus.INEFFICIENT:
severity = "🦥"
return f"Network health is not optimal: {network_health_status.value} {severity}"
def get_node_healthy_again_message(node_data) -> str:
return f"⚕️Node is healthy again⚕️\nAddress: {node_data['node_address']}\nIP: {node_data['ip_address']}\n" \
def get_node_health_warning_message(node_data) -> str:
return "⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ \n" \
f"Node is *not responding*!\nAddress: {node_data['node_address']}\nIP: {node_data['ip_address']}\n" \
"\nCheck it's health immediately\n" \
"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️"
| [] |
RanHerOver/cometaai | src/interactive_conditional_samples.py | 02d459da5bbc58536112cfe6343f5ceef4ff2356 | import random
import fire
import json
import os
import numpy as np
import tensorflow as tf
import pytumblr
import mysql.connector
import datetime
from random import seed
import model, sample, encoder
def interact_model(
model_name='1558M',
seed=None,
nsamples=1,
batch_size=1,
length=None,
temperature=.7,
top_k=10,
top_p=1,
models_dir='models',
):
# Autenticazione
client = pytumblr.TumblrRestClient(
'',
'',
'',
''
)
# Al fine di mantenere la sicurezza del mio account le due coppie di chiavi per la connessione a Tumblr sono state eliminate da questo file.
# Connessione al DB
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
database="cometa"
)
print(mydb)
cursor = mydb.cursor()
# Generazione query
print("prima di eseguire la query")
cursor.execute("SELECT testo FROM prompts ORDER BY RAND() LIMIT 1")
print("dopo query")
for (testo) in cursor:
print("{}".format(testo))
# Formattazione del prompt
testoBuono = "{}".format(testo)
testoBuono=testoBuono.replace("(","")
testoBuono=testoBuono.replace(")","")
testoBuono=testoBuono.replace("'","")
testoBuono=testoBuono.replace(",","")
print(testoBuono)
client.info() # Riceve e trattiene le informazioni del profilo
blogName='unlikelycrownkitty'
models_dir = os.path.expanduser(os.path.expandvars(models_dir))
if batch_size is None:
batch_size = 1
assert nsamples % batch_size == 0
# Carico il modello dalla directory
enc = encoder.get_encoder(model_name, models_dir)
hparams = model.default_hparams()
with open(os.path.join(models_dir, model_name, 'hparams.json')) as f:
hparams.override_from_dict(json.load(f))
# Eseguo un controllo per verificare che il prompt non sia eccessivamente lungo
if length is None:
length = hparams.n_ctx // 2
elif length > hparams.n_ctx:
raise ValueError("Can't get samples longer than window size: %s" % hparams.n_ctx)
# Avvio il modello con i parametri
with tf.Session(graph=tf.Graph()) as sess:
context = tf.placeholder(tf.int32, [batch_size, None])
np.random.seed(seed)
tf.set_random_seed(seed)
output = sample.sample_sequence(
hparams=hparams, length=length,
context=context,
batch_size=batch_size,
temperature=temperature, top_k=top_k, top_p=top_p
)
continua=True
# Inizio la generazione del testo
saver = tf.train.Saver()
ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name))
saver.restore(sess, ckpt)
while continua:
raw_text = testoBuono
# raw_text = f.read()
while not raw_text:
print('The file is empty! Write something yourself.')
raw_text = input("Model prompt >>> ")
context_tokens = enc.encode(raw_text)
generated = 0
for _ in range(nsamples // batch_size):
out = sess.run(output, feed_dict={
context: [context_tokens for _ in range(batch_size)]
})[:, len(context_tokens):]
for i in range(batch_size):
generated += 1
text = enc.decode(out[i])
print("=" * 40 + " SAMPLE " + str(generated) + " " + "=" * 40)
print(text)
print("=" * 80)
# Pubblico il testo generato
client.create_text(blogName, state="published", slug="testing-text-posts",title=raw_text, body=text)
print('Continue? y/n')
risposta=input()
if risposta.lower() in ['y', 'yes']:
continua=True
else:
continua=False
exit()
if __name__ == '__main__':
fire.Fire(interact_model())
| [((25, 13, 30, 5), 'pytumblr.TumblrRestClient', 'pytumblr.TumblrRestClient', ({(26, 6, 26, 8): '""""""', (27, 6, 27, 8): '""""""', (28, 6, 28, 8): '""""""', (29, 6, 29, 8): '""""""'}, {}), "('', '', '', '')", False, 'import pytumblr\n'), ((68, 10, 68, 53), 'encoder.get_encoder', 'encoder.get_encoder', ({(68, 30, 68, 40): 'model_name', (68, 42, 68, 52): 'models_dir'}, {}), '(model_name, models_dir)', False, 'import model, sample, encoder\n'), ((69, 14, 69, 37), 'model.default_hparams', 'model.default_hparams', ({}, {}), '()', False, 'import model, sample, encoder\n'), ((62, 36, 62, 66), 'os.path.expandvars', 'os.path.expandvars', ({(62, 55, 62, 65): 'models_dir'}, {}), '(models_dir)', False, 'import os\n'), ((82, 18, 82, 62), 'tensorflow.placeholder', 'tf.placeholder', ({(82, 33, 82, 41): 'tf.int32', (82, 43, 82, 61): '[batch_size, None]'}, {}), '(tf.int32, [batch_size, None])', True, 'import tensorflow as tf\n'), ((83, 8, 83, 28), 'numpy.random.seed', 'np.random.seed', ({(83, 23, 83, 27): 'seed'}, {}), '(seed)', True, 'import numpy as np\n'), ((84, 8, 84, 32), 'tensorflow.set_random_seed', 'tf.set_random_seed', ({(84, 27, 84, 31): 'seed'}, {}), '(seed)', True, 'import tensorflow as tf\n'), ((85, 17, 90, 9), 'sample.sample_sequence', 'sample.sample_sequence', (), '', False, 'import model, sample, encoder\n'), ((93, 16, 93, 32), 'tensorflow.train.Saver', 'tf.train.Saver', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((70, 14, 70, 66), 'os.path.join', 'os.path.join', ({(70, 27, 70, 37): 'models_dir', (70, 39, 70, 49): 'model_name', (70, 51, 70, 65): '"""hparams.json"""'}, {}), "(models_dir, model_name, 'hparams.json')", False, 'import os\n'), ((71, 35, 71, 47), 'json.load', 'json.load', ({(71, 45, 71, 46): 'f'}, {}), '(f)', False, 'import json\n'), ((94, 42, 94, 78), 'os.path.join', 'os.path.join', ({(94, 55, 94, 65): 'models_dir', (94, 67, 94, 77): 'model_name'}, {}), '(models_dir, model_name)', False, 'import os\n'), ((81, 26, 81, 36), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n')] |
kokosing/hue | desktop/core/ext-py/pyasn1-0.1.8/pyasn1/compat/iterfunc.py | 2307f5379a35aae9be871e836432e6f45138b3d9 | from sys import version_info
if version_info[0] <= 2 and version_info[1] <= 4:
def all(iterable):
for element in iterable:
if not element:
return False
return True
else:
all = all
| [] |
UniversitaDellaCalabria/uniCMS | src/cms/carousels/serializers.py | b0af4e1a767867f0a9b3c135a5c84587e713cb71 | from rest_framework import serializers
from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer
from cms.medias.serializers import MediaSerializer
from . models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization
class CarouselForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
return Carousel.objects.filter(pk=carousel_id)
return None # pragma: no cover
class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']
return CarouselItem.objects.filter(pk=item_id,
carousel__pk=carousel_id)
return None # pragma: no cover
class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']
link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id']
return CarouselItemLink.objects.filter(pk=link_id,
carousel_item__pk=item_id,
carousel_item__carousel__pk=carousel_id)
return None # pragma: no cover
class CarouselSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
class Meta:
model = Carousel
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel = CarouselForeignKey()
def to_representation(self, instance):
data = super().to_representation(instance)
image = MediaSerializer(instance.image)
data['image'] = image.data
return data
class Meta:
model = CarouselItem
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item = CarouselItemForeignKey()
class Meta:
model = CarouselItemLocalization
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item = CarouselItemForeignKey()
class Meta:
model = CarouselItemLink
fields = '__all__'
class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item_link = CarouselItemLinkForeignKey()
class Meta:
model = CarouselItemLinkLocalization
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselSelectOptionsSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
data = super().to_representation(instance)
data['value'] = instance.pk
data['text'] = instance.name
return data
class Meta:
model = Carousel
fields = ()
| [((58, 16, 58, 47), 'cms.medias.serializers.MediaSerializer', 'MediaSerializer', ({(58, 32, 58, 46): 'instance.image'}, {}), '(instance.image)', False, 'from cms.medias.serializers import MediaSerializer\n')] |
mzegla/open_model_zoo | demos/colorization_demo/python/colorization_demo.py | 092576b4c598c1e301ebc38ad74b323972e54f3e | #!/usr/bin/env python3
"""
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from openvino.runtime import Core, get_version
import cv2 as cv
import numpy as np
import logging as log
from time import perf_counter
import sys
from argparse import ArgumentParser, SUPPRESS
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python'))
sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo'))
import monitors
from images_capture import open_images_capture
from model_api.performance_metrics import PerformanceMetrics
log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout)
def build_arg():
parser = ArgumentParser(add_help=False)
in_args = parser.add_argument_group('Options')
in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the script.')
in_args.add_argument("-m", "--model", help="Required. Path to .xml file with pre-trained model.",
required=True, type=Path)
in_args.add_argument("-d", "--device",
help="Optional. Specify target device for infer: CPU, GPU, HDDL or MYRIAD. "
"Default: CPU",
default="CPU", type=str)
in_args.add_argument('-i', "--input", required=True,
help='Required. An input to process. The input must be a single image, '
'a folder of images, video file or camera id.')
in_args.add_argument('--loop', default=False, action='store_true',
help='Optional. Enable reading the input in a loop.')
in_args.add_argument('-o', '--output', required=False,
help='Optional. Name of the output file(s) to save.')
in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int,
help='Optional. Number of frames to store in output. '
'If 0 is set, all frames are stored.')
in_args.add_argument("--no_show", help="Optional. Don't show output.",
action='store_true', default=False)
in_args.add_argument("-u", "--utilization_monitors", default="", type=str,
help="Optional. List of monitors to show initially.")
return parser
def main(args):
cap = open_images_capture(args.input, args.loop)
log.info('OpenVINO Inference Engine')
log.info('\tbuild: {}'.format(get_version()))
core = Core()
log.info('Reading model {}'.format(args.model))
model = core.read_model(args.model, args.model.with_suffix(".bin"))
input_tensor_name = 'data_l'
input_shape = model.input(input_tensor_name).shape
assert input_shape[1] == 1, "Expected model input shape with 1 channel"
inputs = {}
for input in model.inputs:
inputs[input.get_any_name()] = np.zeros(input.shape)
assert len(model.outputs) == 1, "Expected number of outputs is equal 1"
compiled_model = core.compile_model(model, device_name=args.device)
infer_request = compiled_model.create_infer_request()
log.info('The model {} is loaded to {}'.format(args.model, args.device))
_, _, h_in, w_in = input_shape
frames_processed = 0
imshow_size = (640, 480)
graph_size = (imshow_size[0] // 2, imshow_size[1] // 4)
presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size)
metrics = PerformanceMetrics()
video_writer = cv.VideoWriter()
if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'),
cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)):
raise RuntimeError("Can't open video writer")
start_time = perf_counter()
original_frame = cap.read()
if original_frame is None:
raise RuntimeError("Can't read an image from the input")
while original_frame is not None:
(h_orig, w_orig) = original_frame.shape[:2]
if original_frame.shape[2] > 1:
frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB)
else:
frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB)
img_rgb = frame.astype(np.float32) / 255
img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab)
img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0]
inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1])
res = next(iter(infer_request.infer(inputs).values()))
update_res = np.squeeze(res)
out = update_res.transpose((1, 2, 0))
out = cv.resize(out, (w_orig, h_orig))
img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2)
img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1)
original_image = cv.resize(original_frame, imshow_size)
grayscale_image = cv.resize(frame, imshow_size)
colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8)
lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8)
original_image = cv.putText(original_image, 'Original', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
ir_image = [cv.hconcat([original_image, grayscale_image]),
cv.hconcat([lab_image, colorize_image])]
final_image = cv.vconcat(ir_image)
metrics.update(start_time, final_image)
frames_processed += 1
if video_writer.isOpened() and (args.output_limit <= 0 or frames_processed <= args.output_limit):
video_writer.write(final_image)
presenter.drawGraphs(final_image)
if not args.no_show:
cv.imshow('Colorization Demo', final_image)
key = cv.waitKey(1)
if key in {ord("q"), ord("Q"), 27}:
break
presenter.handleKey(key)
start_time = perf_counter()
original_frame = cap.read()
metrics.log_total()
for rep in presenter.reportMeans():
log.info(rep)
if __name__ == "__main__":
args = build_arg().parse_args()
sys.exit(main(args) or 0)
| [((34, 0, 34, 91), 'logging.basicConfig', 'log.basicConfig', (), '', True, 'import logging as log\n'), ((38, 13, 38, 43), 'argparse.ArgumentParser', 'ArgumentParser', (), '', False, 'from argparse import ArgumentParser, SUPPRESS\n'), ((64, 10, 64, 52), 'images_capture.open_images_capture', 'open_images_capture', ({(64, 30, 64, 40): 'args.input', (64, 42, 64, 51): 'args.loop'}, {}), '(args.input, args.loop)', False, 'from images_capture import open_images_capture\n'), ((66, 4, 66, 41), 'logging.info', 'log.info', ({(66, 13, 66, 40): '"""OpenVINO Inference Engine"""'}, {}), "('OpenVINO Inference Engine')", True, 'import logging as log\n'), ((68, 11, 68, 17), 'openvino.runtime.Core', 'Core', ({}, {}), '()', False, 'from openvino.runtime import Core, get_version\n'), ((92, 16, 92, 109), 'monitors.Presenter', 'monitors.Presenter', ({(92, 35, 92, 60): 'args.utilization_monitors', (92, 62, 92, 96): 'imshow_size[1] * 2 - graph_size[1]', (92, 98, 92, 108): 'graph_size'}, {}), '(args.utilization_monitors, imshow_size[1] * 2 -\n graph_size[1], graph_size)', False, 'import monitors\n'), ((93, 14, 93, 34), 'model_api.performance_metrics.PerformanceMetrics', 'PerformanceMetrics', ({}, {}), '()', False, 'from model_api.performance_metrics import PerformanceMetrics\n'), ((95, 19, 95, 35), 'cv2.VideoWriter', 'cv.VideoWriter', ({}, {}), '()', True, 'import cv2 as cv\n'), ((100, 17, 100, 31), 'time.perf_counter', 'perf_counter', ({}, {}), '()', False, 'from time import perf_counter\n'), ((79, 39, 79, 60), 'numpy.zeros', 'np.zeros', ({(79, 48, 79, 59): 'input.shape'}, {}), '(input.shape)', True, 'import numpy as np\n'), ((114, 18, 114, 56), 'cv2.cvtColor', 'cv.cvtColor', ({(114, 30, 114, 37): 'img_rgb', (114, 39, 114, 55): 'cv.COLOR_RGB2Lab'}, {}), '(img_rgb, cv.COLOR_RGB2Lab)', True, 'import cv2 as cv\n'), ((117, 36, 117, 73), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((121, 21, 121, 36), 'numpy.squeeze', 'np.squeeze', ({(121, 32, 121, 35): 'res'}, {}), '(res)', True, 'import numpy as np\n'), ((124, 14, 124, 46), 'cv2.resize', 'cv.resize', ({(124, 24, 124, 27): 'out', (124, 29, 124, 45): '(w_orig, h_orig)'}, {}), '(out, (w_orig, h_orig))', True, 'import cv2 as cv\n'), ((125, 22, 125, 87), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((128, 25, 128, 63), 'cv2.resize', 'cv.resize', ({(128, 35, 128, 49): 'original_frame', (128, 51, 128, 62): 'imshow_size'}, {}), '(original_frame, imshow_size)', True, 'import cv2 as cv\n'), ((129, 26, 129, 55), 'cv2.resize', 'cv.resize', ({(129, 36, 129, 41): 'frame', (129, 43, 129, 54): 'imshow_size'}, {}), '(frame, imshow_size)', True, 'import cv2 as cv\n'), ((133, 25, 134, 91), 'cv2.putText', 'cv.putText', ({(133, 36, 133, 50): 'original_image', (133, 52, 133, 62): '"""Original"""', (133, 64, 133, 72): '(25, 50)', (134, 36, 134, 59): 'cv.FONT_HERSHEY_SIMPLEX', (134, 61, 134, 62): '1', (134, 64, 134, 75): '(0, 0, 255)', (134, 77, 134, 78): '2', (134, 80, 134, 90): 'cv.LINE_AA'}, {}), "(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,\n (0, 0, 255), 2, cv.LINE_AA)", True, 'import cv2 as cv\n'), ((135, 26, 136, 92), 'cv2.putText', 'cv.putText', ({(135, 37, 135, 52): 'grayscale_image', (135, 54, 135, 65): '"""Grayscale"""', (135, 67, 135, 75): '(25, 50)', (136, 37, 136, 60): 'cv.FONT_HERSHEY_SIMPLEX', (136, 62, 136, 63): '1', (136, 65, 136, 76): '(0, 0, 255)', (136, 78, 136, 79): '2', (136, 81, 136, 91): 'cv.LINE_AA'}, {}), "(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX,\n 1, (0, 0, 255), 2, cv.LINE_AA)", True, 'import cv2 as cv\n'), ((137, 25, 138, 91), 'cv2.putText', 'cv.putText', ({(137, 36, 137, 50): 'colorize_image', (137, 52, 137, 62): '"""Colorize"""', (137, 64, 137, 72): '(25, 50)', (138, 36, 138, 59): 'cv.FONT_HERSHEY_SIMPLEX', (138, 61, 138, 62): '1', (138, 64, 138, 75): '(0, 0, 255)', (138, 77, 138, 78): '2', (138, 80, 138, 90): 'cv.LINE_AA'}, {}), "(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,\n (0, 0, 255), 2, cv.LINE_AA)", True, 'import cv2 as cv\n'), ((139, 20, 140, 86), 'cv2.putText', 'cv.putText', ({(139, 31, 139, 40): 'lab_image', (139, 42, 139, 62): '"""LAB interpretation"""', (139, 64, 139, 72): '(25, 50)', (140, 31, 140, 54): 'cv.FONT_HERSHEY_SIMPLEX', (140, 56, 140, 57): '1', (140, 59, 140, 70): '(0, 0, 255)', (140, 72, 140, 73): '2', (140, 75, 140, 85): 'cv.LINE_AA'}, {}), "(lab_image, 'LAB interpretation', (25, 50), cv.\n FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)", True, 'import cv2 as cv\n'), ((144, 22, 144, 42), 'cv2.vconcat', 'cv.vconcat', ({(144, 33, 144, 41): 'ir_image'}, {}), '(ir_image)', True, 'import cv2 as cv\n'), ((159, 21, 159, 35), 'time.perf_counter', 'perf_counter', ({}, {}), '()', False, 'from time import perf_counter\n'), ((164, 8, 164, 21), 'logging.info', 'log.info', ({(164, 17, 164, 20): 'rep'}, {}), '(rep)', True, 'import logging as log\n'), ((67, 34, 67, 47), 'openvino.runtime.get_version', 'get_version', ({}, {}), '()', False, 'from openvino.runtime import Core, get_version\n'), ((111, 20, 111, 66), 'cv2.cvtColor', 'cv.cvtColor', ({(111, 32, 111, 46): 'original_frame', (111, 48, 111, 65): 'cv.COLOR_GRAY2RGB'}, {}), '(original_frame, cv.COLOR_GRAY2RGB)', True, 'import cv2 as cv\n'), ((126, 30, 126, 72), 'cv2.cvtColor', 'cv.cvtColor', ({(126, 42, 126, 53): 'img_lab_out', (126, 55, 126, 71): 'cv.COLOR_Lab2BGR'}, {}), '(img_lab_out, cv.COLOR_Lab2BGR)', True, 'import cv2 as cv\n'), ((142, 20, 142, 65), 'cv2.hconcat', 'cv.hconcat', ({(142, 31, 142, 64): '[original_image, grayscale_image]'}, {}), '([original_image, grayscale_image])', True, 'import cv2 as cv\n'), ((143, 20, 143, 59), 'cv2.hconcat', 'cv.hconcat', ({(143, 31, 143, 58): '[lab_image, colorize_image]'}, {}), '([lab_image, colorize_image])', True, 'import cv2 as cv\n'), ((154, 12, 154, 55), 'cv2.imshow', 'cv.imshow', ({(154, 22, 154, 41): '"""Colorization Demo"""', (154, 43, 154, 54): 'final_image'}, {}), "('Colorization Demo', final_image)", True, 'import cv2 as cv\n'), ((155, 18, 155, 31), 'cv2.waitKey', 'cv.waitKey', ({(155, 29, 155, 30): '1'}, {}), '(1)', True, 'import cv2 as cv\n'), ((96, 58, 96, 88), 'cv2.VideoWriter_fourcc', 'cv.VideoWriter_fourcc', ({(96, 80, 96, 87): "*'MJPG'"}, {}), "(*'MJPG')", True, 'import cv2 as cv\n'), ((109, 32, 109, 78), 'cv2.cvtColor', 'cv.cvtColor', ({(109, 44, 109, 58): 'original_frame', (109, 60, 109, 77): 'cv.COLOR_BGR2GRAY'}, {}), '(original_frame, cv.COLOR_BGR2GRAY)', True, 'import cv2 as cv\n'), ((131, 20, 131, 55), 'cv2.resize', 'cv.resize', ({(131, 30, 131, 41): 'img_lab_out', (131, 43, 131, 54): 'imshow_size'}, {}), '(img_lab_out, imshow_size)', True, 'import cv2 as cv\n'), ((130, 26, 130, 61), 'cv2.resize', 'cv.resize', ({(130, 36, 130, 47): 'img_bgr_out', (130, 49, 130, 60): 'imshow_size'}, {}), '(img_bgr_out, imshow_size)', True, 'import cv2 as cv\n'), ((27, 20, 27, 34), 'pathlib.Path', 'Path', ({(27, 25, 27, 33): '__file__'}, {}), '(__file__)', False, 'from pathlib import Path\n'), ((28, 20, 28, 34), 'pathlib.Path', 'Path', ({(28, 25, 28, 33): '__file__'}, {}), '(__file__)', False, 'from pathlib import Path\n')] |
chbndrhnns/ahoi-client | swagger_client/models/transfer.py | 8bd25f541c05af17c82904fa250272514b7971f2 | # coding: utf-8
"""
[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) © 2016‐2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501
OpenAPI spec version: 2.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.amount import Amount # noqa: F401,E501
class Transfer(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'iban': 'str',
'bic': 'str',
'name': 'str',
'amount': 'Amount',
'purpose': 'str',
'tan_media_id': 'str',
'tan_scheme': 'str'
}
attribute_map = {
'iban': 'iban',
'bic': 'bic',
'name': 'name',
'amount': 'amount',
'purpose': 'purpose',
'tan_media_id': 'tanMediaId',
'tan_scheme': 'tanScheme'
}
def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501
"""Transfer - a model defined in Swagger""" # noqa: E501
self._iban = None
self._bic = None
self._name = None
self._amount = None
self._purpose = None
self._tan_media_id = None
self._tan_scheme = None
self.discriminator = None
self.iban = iban
if bic is not None:
self.bic = bic
self.name = name
self.amount = amount
if purpose is not None:
self.purpose = purpose
self.tan_media_id = tan_media_id
self.tan_scheme = tan_scheme
@property
def iban(self):
"""Gets the iban of this Transfer. # noqa: E501
IBAN - International Bank Account Number (defined in ISO 13616-1) # noqa: E501
:return: The iban of this Transfer. # noqa: E501
:rtype: str
"""
return self._iban
@iban.setter
def iban(self, iban):
"""Sets the iban of this Transfer.
IBAN - International Bank Account Number (defined in ISO 13616-1) # noqa: E501
:param iban: The iban of this Transfer. # noqa: E501
:type: str
"""
if iban is None:
raise ValueError("Invalid value for `iban`, must not be `None`") # noqa: E501
self._iban = iban
@property
def bic(self):
"""Gets the bic of this Transfer. # noqa: E501
BIC - Business Identifier Code (defined in ISO-9362) # noqa: E501
:return: The bic of this Transfer. # noqa: E501
:rtype: str
"""
return self._bic
@bic.setter
def bic(self, bic):
"""Sets the bic of this Transfer.
BIC - Business Identifier Code (defined in ISO-9362) # noqa: E501
:param bic: The bic of this Transfer. # noqa: E501
:type: str
"""
self._bic = bic
@property
def name(self):
"""Gets the name of this Transfer. # noqa: E501
Name - Name of the creditor # noqa: E501
:return: The name of this Transfer. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Transfer.
Name - Name of the creditor # noqa: E501
:param name: The name of this Transfer. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def amount(self):
"""Gets the amount of this Transfer. # noqa: E501
Amount to be transfered # noqa: E501
:return: The amount of this Transfer. # noqa: E501
:rtype: Amount
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this Transfer.
Amount to be transfered # noqa: E501
:param amount: The amount of this Transfer. # noqa: E501
:type: Amount
"""
if amount is None:
raise ValueError("Invalid value for `amount`, must not be `None`") # noqa: E501
self._amount = amount
@property
def purpose(self):
"""Gets the purpose of this Transfer. # noqa: E501
Purpose # noqa: E501
:return: The purpose of this Transfer. # noqa: E501
:rtype: str
"""
return self._purpose
@purpose.setter
def purpose(self, purpose):
"""Sets the purpose of this Transfer.
Purpose # noqa: E501
:param purpose: The purpose of this Transfer. # noqa: E501
:type: str
"""
self._purpose = purpose
@property
def tan_media_id(self):
"""Gets the tan_media_id of this Transfer. # noqa: E501
TANMediaId - The identifying ID of the TANMedia. # noqa: E501
:return: The tan_media_id of this Transfer. # noqa: E501
:rtype: str
"""
return self._tan_media_id
@tan_media_id.setter
def tan_media_id(self, tan_media_id):
"""Sets the tan_media_id of this Transfer.
TANMediaId - The identifying ID of the TANMedia. # noqa: E501
:param tan_media_id: The tan_media_id of this Transfer. # noqa: E501
:type: str
"""
if tan_media_id is None:
raise ValueError("Invalid value for `tan_media_id`, must not be `None`") # noqa: E501
self._tan_media_id = tan_media_id
@property
def tan_scheme(self):
"""Gets the tan_scheme of this Transfer. # noqa: E501
TANScheme - The scheme **id** that is used to verify this payment (e.g. \"901\") # noqa: E501
:return: The tan_scheme of this Transfer. # noqa: E501
:rtype: str
"""
return self._tan_scheme
@tan_scheme.setter
def tan_scheme(self, tan_scheme):
"""Sets the tan_scheme of this Transfer.
TANScheme - The scheme **id** that is used to verify this payment (e.g. \"901\") # noqa: E501
:param tan_scheme: The tan_scheme of this Transfer. # noqa: E501
:type: str
"""
if tan_scheme is None:
raise ValueError("Invalid value for `tan_scheme`, must not be `None`") # noqa: E501
self._tan_scheme = tan_scheme
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Transfer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [((252, 23, 252, 56), 'six.iteritems', 'six.iteritems', ({(252, 37, 252, 55): 'self.swagger_types'}, {}), '(self.swagger_types)', False, 'import six\n')] |
vdonnefort/lisa | external/trappy/tests/test_caching.py | 38e5f246e6c94201a60a8698e7f29277f11c425e | # Copyright 2015-2017 ARM Limited, Google and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from builtins import chr
import os
import json
import shutil
import sys
import unittest
import utils_tests
import trappy
from trappy.ftrace import GenericFTrace
from trappy.systrace import SysTrace
class TestCaching(utils_tests.SetupDirectory):
def __init__(self, *args, **kwargs):
super(TestCaching, self).__init__(
[("trace_sched.txt", "trace.txt"),
("trace_sched.txt", "trace.raw.txt"),
("trace_systrace.html", "trace.html")],
*args,
**kwargs)
def test_cache_created(self):
"""Test cache creation when enabled"""
GenericFTrace.disable_cache = False
traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))
for trace in traces:
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
self.assertTrue(cache_dir in os.listdir(trace_dir))
def test_cache_not_created(self):
"""Test that cache should not be created when disabled """
GenericFTrace.disable_cache = True
traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))
for trace in traces:
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
self.assertFalse(cache_dir in os.listdir(trace_dir))
def test_compare_cached_vs_uncached(self):
""" Test that the cached and uncached traces are same """
# Build the cache, but the actual trace will be parsed
# fresh since this is a first time parse
GenericFTrace.disable_cache = False
uncached_trace = trappy.FTrace()
uncached_dfr = uncached_trace.sched_wakeup.data_frame
# Now read from previously parsed cache by reusing the path
cached_trace = trappy.FTrace(uncached_trace.trace_path)
cached_dfr = cached_trace.sched_wakeup.data_frame
# By default, the str to float conversion done when reading from csv is
# different from the one used when reading from the trace.txt file.
#
# Here's an example:
# - trace.txt string timestamps:
# [76.402065, 80.402065, 80.001337]
# - parsed dataframe timestamps:
# [76.402065000000007, 80.402065000000007, 82.001337000000007]
#
# - csv string timestamps:
# [76.402065, 80.402065, 80.001337]
# - cached dataframe timestamps:
# [76.402064999999993, 80.402064999999993, 82.001337000000007]
#
# To fix this, the timestamps read from the cache are converted using
# the same conversion method as the trace.txt parser, which results in
# cache-read timestamps being identical to trace-read timestamps.
#
# This test ensures that this stays true.
cached_times = [r[0] for r in cached_dfr.iterrows()]
uncached_times = [r[0] for r in uncached_dfr.iterrows()]
self.assertTrue(cached_times == uncached_times)
# compare other columns as well
self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] ==
[r[1].pid for r in uncached_dfr.iterrows()])
self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] ==
[r[1].comm for r in uncached_dfr.iterrows()])
self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] ==
[r[1].prio for r in uncached_dfr.iterrows()])
def test_invalid_cache_overwritten(self):
"""Test a cache with a bad checksum is overwritten"""
# This is a directory so we can't use the files_to_copy arg of
# SetUpDirectory, just do it ourselves.
cache_path = ".trace.txt.cache"
src = os.path.join(utils_tests.TESTS_DIRECTORY, "trace_sched.txt.cache")
shutil.copytree(src, cache_path)
metadata_path = os.path.join(cache_path, "metadata.json")
def read_metadata():
with open(metadata_path, "r") as f:
return json.load(f)
def write_md5(md5):
metadata = read_metadata()
metadata["md5sum"] = md5
with open(metadata_path, "w") as f:
json.dump(metadata, f)
# Change 1 character of the stored checksum
md5sum = read_metadata()["md5sum"]
md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1)
write_md5(md5sum_inc)
# Parse a trace, this should delete and overwrite the invalidated cache
GenericFTrace.disable_cache = False
trace = trappy.FTrace()
# Check that the modified md5sum was overwritten
self.assertNotEqual(read_metadata()["md5sum"], md5sum_inc,
"The invalid ftrace cache wasn't overwritten")
def test_cache_dynamic_events(self):
"""Test that caching works if new event parsers have been registered"""
# Parse the trace to create a cache
GenericFTrace.disable_cache = False
trace1 = trappy.FTrace()
# Check we're actually testing what we think we are
if hasattr(trace1, 'dynamic_event'):
raise RuntimeError('Test bug: found unexpected event in trace')
# Now register a new event type, call the constructor again, and check
# that the newly added event (which is not present in the cache) is
# parsed.
parse_class = trappy.register_dynamic_ftrace("DynamicEvent", "dynamic_test_key")
trace2 = trappy.FTrace()
self.assertTrue(len(trace2.dynamic_event.data_frame) == 1)
trappy.unregister_dynamic_ftrace(parse_class)
def test_cache_normalize_time(self):
"""Test that caching doesn't break normalize_time"""
GenericFTrace.disable_cache = False
# Times in trace_sched.txt
start_time = 6550.018511
first_freq_event_time = 6550.056870
# Parse without normalizing time
trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],
normalize_time=False)
self.assertEqual(trace1.cpu_frequency.data_frame.index[0],
first_freq_event_time)
# Parse with normalized time
trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],
normalize_time=True)
self.assertEqual(trace2.cpu_frequency.data_frame.index[0],
first_freq_event_time - start_time)
def test_cache_window_broad(self):
"""Test that caching doesn't break the 'window' parameter"""
GenericFTrace.disable_cache = False
trace1 = trappy.FTrace(
events=['sched_wakeup'],
window=(0, 1))
# Check that we're testing what we think we're testing The trace
# contains 2 sched_wakeup events; this window should get rid of one of
# them.
if len(trace1.sched_wakeup.data_frame) != 1:
raise RuntimeError('Test bug: bad sched_wakeup event count')
# Parse again without the window
trace1 = trappy.FTrace(
events=['sched_wakeup'],
window=(0, None))
self.assertEqual(len(trace1.sched_wakeup.data_frame), 2)
def test_cache_window_narrow(self):
"""
Test that applying a window to a cached trace returns EXACTLY what is expected
"""
# As described in test_compare_cache_vs_uncached, reading from cache
# results in slightly different timestamps
#
# This test verifies that applying windows results in identical
# dataframes whether cache is used or not.
GenericFTrace.disable_cache = False
uncached_trace = trappy.FTrace()
trace = trappy.FTrace(uncached_trace.trace_path,
normalize_time=False,
abs_window=(6550.100000, 6552.000002))
self.assertAlmostEquals(trace.get_duration(), 1.900002)
self.assertEqual(len(trace.sched_wakeup.data_frame), 2)
self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1)
def test_ftrace_metadata(self):
"""Test that caching keeps trace metadata"""
GenericFTrace.disable_cache = False
self.test_cache_created()
trace = trappy.FTrace()
version = int(trace._version)
cpus = int(trace._cpus)
self.assertEqual(version, 6)
self.assertEqual(cpus, 6)
def test_cache_delete_single(self):
GenericFTrace.disable_cache = False
trace = trappy.FTrace()
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
number_of_trace_categories = 31
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)
os.remove(os.path.join(cache_dir, 'SchedWakeup.csv'))
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1)
# Generate trace again, should regenerate only the missing item
trace = trappy.FTrace()
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)
for c in trace.trace_classes:
if isinstance(c, trace.class_definitions['sched_wakeup']):
self.assertEqual(c.cached, False)
continue
self.assertEqual(c.cached, True)
| [((70, 25, 70, 40), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((74, 23, 74, 63), 'trappy.FTrace', 'trappy.FTrace', ({(74, 37, 74, 62): 'uncached_trace.trace_path'}, {}), '(uncached_trace.trace_path)', False, 'import trappy\n'), ((117, 14, 117, 80), 'os.path.join', 'os.path.join', ({(117, 27, 117, 54): 'utils_tests.TESTS_DIRECTORY', (117, 56, 117, 79): '"""trace_sched.txt.cache"""'}, {}), "(utils_tests.TESTS_DIRECTORY, 'trace_sched.txt.cache')", False, 'import os\n'), ((118, 8, 118, 40), 'shutil.copytree', 'shutil.copytree', ({(118, 24, 118, 27): 'src', (118, 29, 118, 39): 'cache_path'}, {}), '(src, cache_path)', False, 'import shutil\n'), ((120, 24, 120, 65), 'os.path.join', 'os.path.join', ({(120, 37, 120, 47): 'cache_path', (120, 49, 120, 64): '"""metadata.json"""'}, {}), "(cache_path, 'metadata.json')", False, 'import os\n'), ((140, 16, 140, 31), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((151, 17, 151, 32), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((161, 22, 161, 88), 'trappy.register_dynamic_ftrace', 'trappy.register_dynamic_ftrace', ({(161, 53, 161, 67): '"""DynamicEvent"""', (161, 69, 161, 87): '"""dynamic_test_key"""'}, {}), "('DynamicEvent', 'dynamic_test_key')", False, 'import trappy\n'), ((163, 17, 163, 32), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((166, 8, 166, 53), 'trappy.unregister_dynamic_ftrace', 'trappy.unregister_dynamic_ftrace', ({(166, 41, 166, 52): 'parse_class'}, {}), '(parse_class)', False, 'import trappy\n'), ((177, 17, 178, 52), 'trappy.FTrace', 'trappy.FTrace', (), '', False, 'import trappy\n'), ((184, 17, 185, 51), 'trappy.FTrace', 'trappy.FTrace', (), '', False, 'import trappy\n'), ((194, 17, 196, 26), 'trappy.FTrace', 'trappy.FTrace', (), '', False, 'import trappy\n'), ((205, 17, 207, 29), 'trappy.FTrace', 'trappy.FTrace', (), '', False, 'import trappy\n'), ((222, 25, 222, 40), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((224, 16, 226, 68), 'trappy.FTrace', 'trappy.FTrace', (), '', False, 'import trappy\n'), ((239, 16, 239, 31), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((249, 16, 249, 31), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((251, 21, 251, 54), 'os.path.abspath', 'os.path.abspath', ({(251, 37, 251, 53): 'trace.trace_path'}, {}), '(trace.trace_path)', False, 'import os\n'), ((252, 20, 252, 47), 'os.path.dirname', 'os.path.dirname', ({(252, 36, 252, 46): 'trace_path'}, {}), '(trace_path)', False, 'import os\n'), ((253, 21, 253, 49), 'os.path.basename', 'os.path.basename', ({(253, 38, 253, 48): 'trace_path'}, {}), '(trace_path)', False, 'import os\n'), ((262, 16, 262, 31), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((42, 18, 42, 33), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((42, 35, 42, 71), 'trappy.SysTrace', 'trappy.SysTrace', (), '', False, 'import trappy\n'), ((45, 25, 45, 58), 'os.path.abspath', 'os.path.abspath', ({(45, 41, 45, 57): 'trace.trace_path'}, {}), '(trace.trace_path)', False, 'import os\n'), ((46, 24, 46, 51), 'os.path.dirname', 'os.path.dirname', ({(46, 40, 46, 50): 'trace_path'}, {}), '(trace_path)', False, 'import os\n'), ((47, 25, 47, 53), 'os.path.basename', 'os.path.basename', ({(47, 42, 47, 52): 'trace_path'}, {}), '(trace_path)', False, 'import os\n'), ((55, 18, 55, 33), 'trappy.FTrace', 'trappy.FTrace', ({}, {}), '()', False, 'import trappy\n'), ((55, 35, 55, 71), 'trappy.SysTrace', 'trappy.SysTrace', (), '', False, 'import trappy\n'), ((58, 25, 58, 58), 'os.path.abspath', 'os.path.abspath', ({(58, 41, 58, 57): 'trace.trace_path'}, {}), '(trace.trace_path)', False, 'import os\n'), ((59, 24, 59, 51), 'os.path.dirname', 'os.path.dirname', ({(59, 40, 59, 50): 'trace_path'}, {}), '(trace_path)', False, 'import os\n'), ((60, 25, 60, 53), 'os.path.basename', 'os.path.basename', ({(60, 42, 60, 52): 'trace_path'}, {}), '(trace_path)', False, 'import os\n'), ((258, 18, 258, 60), 'os.path.join', 'os.path.join', ({(258, 31, 258, 40): 'cache_dir', (258, 42, 258, 59): '"""SchedWakeup.csv"""'}, {}), "(cache_dir, 'SchedWakeup.csv')", False, 'import os\n'), ((124, 23, 124, 35), 'json.load', 'json.load', ({(124, 33, 124, 34): 'f'}, {}), '(f)', False, 'import json\n'), ((130, 16, 130, 38), 'json.dump', 'json.dump', ({(130, 26, 130, 34): 'metadata', (130, 36, 130, 37): 'f'}, {}), '(metadata, f)', False, 'import json\n'), ((256, 29, 256, 50), 'os.listdir', 'os.listdir', ({(256, 40, 256, 49): 'cache_dir'}, {}), '(cache_dir)', False, 'import os\n'), ((259, 29, 259, 50), 'os.listdir', 'os.listdir', ({(259, 40, 259, 49): 'cache_dir'}, {}), '(cache_dir)', False, 'import os\n'), ((263, 29, 263, 50), 'os.listdir', 'os.listdir', ({(263, 40, 263, 49): 'cache_dir'}, {}), '(cache_dir)', False, 'import os\n'), ((50, 41, 50, 62), 'os.listdir', 'os.listdir', ({(50, 52, 50, 61): 'trace_dir'}, {}), '(trace_dir)', False, 'import os\n'), ((63, 42, 63, 63), 'os.listdir', 'os.listdir', ({(63, 53, 63, 62): 'trace_dir'}, {}), '(trace_dir)', False, 'import os\n')] |
MF-HORIZON/mf-horizon-python-client | src/mf_horizon_client/client/pipelines/blueprints.py | 67a4a094767cb8e5f01956f20f5ca7726781614a | from enum import Enum
class BlueprintType(Enum):
"""
A blueprint is a pipeline template in horizon, and must be specified when creating a new pipeline
Nonlinear
===============================================================================================================
A nonlinear pipeline combines nonlinear feature generation and selection with a nonlinear regressor to generate
forecasts that are at a specific target in the future.
A number of different regressor types are available here:
1. Mondrian Forest. An adaptation of the probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673
Provides Bayesian-esque error bounds, and is our recommended nonlinear regressor of choice.
2. XG Boost
3. Random Forest.
The stages of a nonlinear pipeline are as follows:
A. Forecast Specification
B. Stationarization
C. Feature Generation
D. Feature Filtering
E. Feature Refinement
F. Nonlinear Backtesting
G. Nonlinear Prediction
Linear
===============================================================================================================
A nonlinear pipeline combines nonlinear feature generation with a nonlinear regressor to generate
forecasts that are at a specific target in the future.
The regressor used is a Variational Bayesian Linear Regressor
The stages of a linear pipeline are as follows:
A. Forecast Specification
B. Stationarization
C. Nonlinear Feature Generation
D. Feature Filtering
E. Feature Refinement
F. Linear Backtesting
G. Linear Prediction
Fast Forecasting
===============================================================================================================
The fast forecasting pipeline is intended to be used as a quick assessment of a dataset's predictive performance
It is identical to the linear pipeline, but does not include Feature Refinement.
The stages of a linear pipeline are as follows:
A. Forecast Specification
B. Stationarization
C. Nonlinear Feature Generation
D. Feature Filtering
E. Linear Backtesting
F. Linear Prediction
Feature Selection
===============================================================================================================
The feature selection pipeline assumes that the input data set already encodes information about a signal's
past, such that a horizontal observation vector may be used in a traditional regression sense to map to a target
value at a point in the future.
Feat1 | Feat2 | Feat3 | .... | FeatP
Obs1 ------------------------------------- t
Obs2 ------------------------------------- t-1
Obs3 ------------------------------------- t-2
... .....................................
... .....................................
ObsN ------------------------------------- t-N
Two stages of feature selection are then used in order to maximize predictive performance of the feature set
on specified future points for a given target
The stages of a linear pipeline are as follows:
A. Forecast Specification
B. Feature Filtering
E. Feature Refinement
Feature Discovery
===============================================================================================================
The feature discovery pipeline discovers features to maximize performance for a particular forecast target,
at a specified point in the future. Unlike the feature selection pipeline, it does not assume that the signal
set has already encoded historical information about the original data's past.
The stages of a feature discovery pipeline are as follows:
A. Forecast Specification
B. Feature Generation
C. Feature Filtering
D. Feature Refinement
Signal Encoding
===============================================================================================================
One of Horizon's feature generation methods is to encode signals in the frequency domain, extracting historic
lags that will efficiently represent the information contained within them.
The signal encoding pipeline allows for this functionality to be isolated, where the output is a feature
set that has encoded past information about a signal that can be exported from the platform
The stages of a signal encoding pipeline are as follows:
A. Forecast Specification
B. Feature Generation
C. Feature Filtering
Stationarization
===============================================================================================================
Stationarize a signal set and specified target using Augmented Dicky Fuller analysis, and a detrending method
for the specified target.
The stages of a stationarization pipeline are as follows:
A. Forecast Specification
B. Stationarization
Time-Series Regression
===============================================================================================================
Run Horizon's regression algorithms on a pre-encoded signal set.
Small Data Forecasting
===============================================================================================================
Time-series pipeline for small data. Does not contain any backtesting, and uses all the data for model training.
A. Forecast Specification
B. Stationarization
C. Linear Feature Generation
D. Feature Filtering
E. Feature Refinement
G. Linear Prediction
Variational Forecasting
===============================================================================================================
Creates a stacked lag-embedding matrix by combining a two-stage feature generation and selection process, with
lag-only feature generation.
A. Forecast Specification
B. Stationarization
C. Linear Feature Generation
D. Feature Filtering
E. Linear Feature Generation
F. Feature Filtering
G. Linear Backtesting
H. Linear Prediction
Custom
===============================================================================================================
Advanced: Contains only a forecast specification stage for adding stages manually.
N.B. There is no validation on stage addition.
"""
nonlinear = "nonlinear"
linear = "linear"
fast_forecasting = "fast_forecast"
feature_selection = "feature_selection"
feature_discovery = "feature_discovery"
signal_encoding = "signal_encoding"
stationarisation = "stationarisation"
time_series_regression = "regression"
variational_forecasting = "variational_forecasting"
custom = "custom"
small_data = "small_data"
| [] |
An-Alone-Cow/pyChess | pyChess/olaf/views.py | 2729a3a89e4d7d79659488ecb1b0bff9cac281a3 | from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.shortcuts import render
from django.urls import reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.utils import timezone
from olaf.models import *
from olaf.forms import *
from olaf.utility import usertools
from olaf.chess.controller import proccess_move
def index ( request ):
args = {}
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
args [ 'message' ] = message
if ( request.user.is_authenticated ):
if ( request.method == 'POST' ):
if ( request.POST.get ( 'game_id' ) is not None ):
game_id = request.POST.get ( 'game_id' )
if ( game_id == '-1' ):
game_id = usertools.new_game ( request )
request.session [ 'game_id' ] = game_id
else:
request.session.pop ( 'game_id', default = None )
f = lambda a : str ( a.date () ) + " - " + str ( a.hour ) + ":" + str ( a.minute ) + ":" + str ( a.second )
args [ 'game_list' ] = list ([str ( game.id ), f ( game.creation_time )] for game in request.user.userdata.game_history.filter ( result = 0 ).order_by ( '-creation_time' ) )
if ( request.session.get ( 'game_id' ) is not None ):
args [ 'game_board' ] = usertools.get_translated_game_board ( request )
else:
args [ 'game_board' ] = None
return render ( request, 'olaf/index_logged_in.html', args )
else:
args [ 'login_form' ] = LoginForm ()
args [ 'register_form' ] = RegisterForm ()
args [ 'score' ] = list ( [user.master.username, user.wins, user.loses, user.ties] for user in UserData.objects.filter ( is_active = True ) )
return render ( request, 'olaf/index_not_logged_in.html', args )
form_operation_dict = {
'login' : (
usertools.login_user,
LoginForm,
'olaf/login.html',
{},
'index',
{ 'message' : "You're logged in. :)"}
),
'register' : (
usertools.register_user,
RegisterForm,
'olaf/register.html',
{},
'index',
{ 'message' : "An activation email has been sent to you" }
),
'password_reset_request' : (
usertools.init_pass_reset_token,
ForgotPasswordUsernameOrEmailForm,
'olaf/password_reset_request.html',
{},
'index',
{ 'message' : "An email containing the password reset link will be sent to your email"}
),
'reset_password' : (
usertools.reset_password_action,
PasswordChangeForm,
'olaf/reset_password.html',
{},
'olaf:login',
{ 'message' : "Password successfully changed, you can login now" }
),
'resend_activation_email' : (
usertools.resend_activation_email,
ResendActivationUsernameOrEmailForm,
'olaf/resend_activation_email.html',
{},
'index',
{ 'message' : "Activation email successfully sent to your email" }
),
}
def form_operation ( request, oper, *args ):
func, FORM, fail_template, fail_args, success_url, success_args = form_operation_dict [ oper ]
if ( request.method == 'POST' ):
form = FORM ( request.POST )
if ( form.is_valid () ):
func ( request, form, *args )
for key in success_args:
request.session [ key ] = success_args [ key ]
return HttpResponseRedirect ( reverse ( success_url ) )
else:
form = FORM ()
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
fail_args [ 'message' ] = message
fail_args [ 'form' ] = form
return render ( request, fail_template, fail_args )
#view functions
def login_user ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'login' )
def register_user ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'register' )
def password_reset_request ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'password_reset_request' )
def reset_password_action ( request, token ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
tk = ExpirableTokenField.objects.filter ( token = token ).first ()
if ( tk is None ):
request.session [ 'message' ] = "Broken link"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
if ( timezone.now () <= tk.expiration_time ):
return form_operation ( request, 'reset_password', token )
else:
request.session [ 'message' ] = "Link expired, try getting a new one"
return HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) )
def activate_account ( request, token ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
tk = ExpirableTokenField.objects.filter ( token = token ).first ()
if ( tk is None ):
request.session [ 'message' ] = "Broken link"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
if ( timezone.now () <= tk.expiration_time ):
if ( tk.user.is_active ):
request.session [ 'message' ] = "Account already active"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
userdata = tk.user
userdata.is_active = True
userdata.save ()
request.session [ 'message' ] = "Your account has been activated successfully"
return HttpResponseRedirect ( reverse ( 'olaf:login' ) )
else:
request.session [ 'message' ] = "Link expired, try getting a new one"
return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) )
def resend_activation_email ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'resend_activation_email' )
def logout_user ( request ):
usertools.logout_user ( request )
request.session [ 'message' ] = "Goodbye :)"
return HttpResponseRedirect ( reverse ( 'index' ) )
def scoreboard ( request ):
if ( request.method == 'POST' ):
username = request.POST.get ( 'username' )
user = User.objects.filter ( username = username ).first ()
if ( user is None ):
request.session [ 'message' ] = "User not found"
return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) )
else:
return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args = (username, ) ) )
else:
args = {}
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
args [ 'message' ] = message
lst = [ (user.master.username, user.wins, user.loses, user.ties) for user in UserData.objects.filter ( is_active = True ) ]
args [ 'lst' ] = lst
if ( request.user.is_authenticated ):
args [ 'logged_in' ] = True
return render ( request, 'olaf/scoreboard.html', args )
def move ( request ):
proccess_move ( request )
return HttpResponseRedirect ( reverse ( 'index' ) ) | [((111, 8, 111, 52), 'django.shortcuts.render', 'render', ({(111, 17, 111, 24): 'request', (111, 26, 111, 39): 'fail_template', (111, 41, 111, 50): 'fail_args'}, {}), '(request, fail_template, fail_args)', False, 'from django.shortcuts import render\n'), ((179, 1, 179, 34), 'olaf.utility.usertools.logout_user', 'usertools.logout_user', ({(179, 25, 179, 32): 'request'}, {}), '(request)', False, 'from olaf.utility import usertools\n'), ((210, 1, 210, 26), 'olaf.chess.controller.proccess_move', 'proccess_move', ({(210, 17, 210, 24): 'request'}, {}), '(request)', False, 'from olaf.chess.controller import proccess_move\n'), ((41, 9, 41, 62), 'django.shortcuts.render', 'render', ({(41, 18, 41, 25): 'request', (41, 27, 41, 54): '"""olaf/index_logged_in.html"""', (41, 56, 41, 60): 'args'}, {}), "(request, 'olaf/index_logged_in.html', args)", False, 'from django.shortcuts import render\n'), ((47, 9, 47, 66), 'django.shortcuts.render', 'render', ({(47, 18, 47, 25): 'request', (47, 27, 47, 58): '"""olaf/index_not_logged_in.html"""', (47, 60, 47, 64): 'args'}, {}), "(request, 'olaf/index_not_logged_in.html', args)", False, 'from django.shortcuts import render\n'), ((182, 31, 182, 50), 'django.urls.reverse', 'reverse', ({(182, 41, 182, 48): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((207, 9, 207, 57), 'django.shortcuts.render', 'render', ({(207, 18, 207, 25): 'request', (207, 27, 207, 49): '"""olaf/scoreboard.html"""', (207, 51, 207, 55): 'args'}, {}), "(request, 'olaf/scoreboard.html', args)", False, 'from django.shortcuts import render\n'), ((212, 31, 212, 50), 'django.urls.reverse', 'reverse', ({(212, 41, 212, 48): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((37, 27, 37, 74), 'olaf.utility.usertools.get_translated_game_board', 'usertools.get_translated_game_board', ({(37, 65, 37, 72): 'request'}, {}), '(request)', False, 'from olaf.utility import usertools\n'), ((117, 32, 117, 51), 'django.urls.reverse', 'reverse', ({(117, 42, 117, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((123, 32, 123, 51), 'django.urls.reverse', 'reverse', ({(123, 42, 123, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((129, 32, 129, 51), 'django.urls.reverse', 'reverse', ({(129, 42, 129, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((135, 32, 135, 51), 'django.urls.reverse', 'reverse', ({(135, 42, 135, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((140, 32, 140, 51), 'django.urls.reverse', 'reverse', ({(140, 42, 140, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((142, 7, 142, 22), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n'), ((150, 32, 150, 51), 'django.urls.reverse', 'reverse', ({(150, 42, 150, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((155, 32, 155, 51), 'django.urls.reverse', 'reverse', ({(155, 42, 155, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((157, 7, 157, 22), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n'), ((174, 32, 174, 51), 'django.urls.reverse', 'reverse', ({(174, 42, 174, 49): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((102, 33, 102, 56), 'django.urls.reverse', 'reverse', ({(102, 43, 102, 54): 'success_url'}, {}), '(success_url)', False, 'from django.urls import reverse\n'), ((146, 33, 146, 66), 'django.urls.reverse', 'reverse', ({(146, 43, 146, 64): '"""olaf:reset_password"""'}, {}), "('olaf:reset_password')", False, 'from django.urls import reverse\n'), ((170, 33, 170, 75), 'django.urls.reverse', 'reverse', ({(170, 43, 170, 73): '"""olaf:resend_activation_email"""'}, {}), "('olaf:resend_activation_email')", False, 'from django.urls import reverse\n'), ((187, 9, 187, 52), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', (), '', False, 'from django.contrib.auth.models import User\n'), ((191, 33, 191, 62), 'django.urls.reverse', 'reverse', ({(191, 43, 191, 60): '"""olaf:scoreboard"""'}, {}), "('olaf:scoreboard')", False, 'from django.urls import reverse\n'), ((193, 33, 193, 85), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((27, 15, 27, 45), 'olaf.utility.usertools.new_game', 'usertools.new_game', ({(27, 36, 27, 43): 'request'}, {}), '(request)', False, 'from olaf.utility import usertools\n'), ((160, 34, 160, 53), 'django.urls.reverse', 'reverse', ({(160, 44, 160, 51): '"""index"""'}, {}), "('index')", False, 'from django.urls import reverse\n'), ((167, 34, 167, 58), 'django.urls.reverse', 'reverse', ({(167, 44, 167, 56): '"""olaf:login"""'}, {}), "('olaf:login')", False, 'from django.urls import reverse\n')] |
fgitmichael/SelfSupevisedSkillDiscovery | ce_vae_test/main_cetrainer.py | 60eee11cfd67046190dd2784bf40e97bdbed9d40 | from __future__ import print_function
import argparse
import torch
import torch.utils.data
import matplotlib.pyplot as plt
from torch import nn, optim
from torch.nn import functional as F
from torchvision import datasets, transforms
from torchvision.utils import save_image
from torch.utils.tensorboard import SummaryWriter
from ce_vae_test.networks.min_vae import MinVae
from ce_vae_test.trainer.ce_trainer import CeVaeTrainer
from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement
parser = argparse.ArgumentParser(description='VAE MNIST Example')
parser.add_argument('--batch-size', type=int, default=128, metavar='N',
help='input batch size for training (default: 128)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='enables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if args.cuda else "cpu")
writer = SummaryWriter()
kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}
train_sampler = SamplerDatasetWithReplacement(
dataset=datasets.MNIST('../data',
train=True,
download=True,
transform=transforms.ToTensor()),
batch_size=args.batch_size
)
test_sampler = SamplerDatasetWithReplacement(
dataset=datasets.MNIST('../data',
train=False,
transform=transforms.ToTensor()),
batch_size=args.batch_size * 10
)
cevae = MinVae(
input_size=28 * 28,
output_size=10,
latent_dim=2,
hidden_sizes_dec=[5],
device=device
).to(device)
trainer = CeVaeTrainer(
vae=cevae,
num_epochs=300,
train_loader=train_sampler,
test_loader=test_sampler,
writer=writer,
device=device,
alpha=0.90,
lamda=0.22
)
trainer.run()
| [((17, 9, 17, 65), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((31, 0, 31, 28), 'torch.manual_seed', 'torch.manual_seed', ({(31, 18, 31, 27): 'args.seed'}, {}), '(args.seed)', False, 'import torch\n'), ((33, 9, 33, 53), 'torch.device', 'torch.device', ({(33, 22, 33, 52): "'cuda' if args.cuda else 'cpu'"}, {}), "('cuda' if args.cuda else 'cpu')", False, 'import torch\n'), ((35, 9, 35, 24), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', ({}, {}), '()', False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((60, 10, 69, 1), 'ce_vae_test.trainer.ce_trainer.CeVaeTrainer', 'CeVaeTrainer', (), '', False, 'from ce_vae_test.trainer.ce_trainer import CeVaeTrainer\n'), ((29, 33, 29, 58), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((52, 8, 58, 1), 'ce_vae_test.networks.min_vae.MinVae', 'MinVae', (), '', False, 'from ce_vae_test.networks.min_vae import MinVae\n'), ((42, 37, 42, 58), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', False, 'from torchvision import datasets, transforms\n'), ((48, 37, 48, 58), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', False, 'from torchvision import datasets, transforms\n')] |
sergeyberezansky/appr | appr/commands/logout.py | 03168addf05c3efd779dad5168fb0a80d0512100 | from __future__ import absolute_import, division, print_function
from appr.auth import ApprAuth
from appr.commands.command_base import CommandBase, PackageSplit
class LogoutCmd(CommandBase):
name = 'logout'
help_message = "logout"
def __init__(self, options):
super(LogoutCmd, self).__init__(options)
self.status = None
self.registry_host = options.registry_host
self.package_parts = options.package_parts
pname = self.package_parts.get('package', None)
namespace = self.package_parts.get('namespace', None)
self.package = None
if pname:
self.package = "%s/%s" % (namespace, pname)
elif namespace:
self.package = namespace
@classmethod
def _add_arguments(cls, parser):
cls._add_registryhost_option(parser)
parser.add_argument('registry', nargs='?', default=None, action=PackageSplit,
help="registry url: quay.io[/namespace][/repo]\n" +
"If namespace and/or repo are passed, creds only logout for them")
def _call(self):
client = self.RegistryClient(self.registry_host)
ApprAuth().delete_token(client.host, scope=self.package)
self.status = "Logout complete"
if self.registry_host != '*':
self.status += " from %s" % self.registry_host
def _render_dict(self):
return {"status": self.status, 'host': self.registry_host, "scope": self.package}
def _render_console(self):
return " >>> %s" % self.status
| [((33, 8, 33, 18), 'appr.auth.ApprAuth', 'ApprAuth', ({}, {}), '()', False, 'from appr.auth import ApprAuth\n')] |
webnowone/albumMusical | musica/apps.py | b9532ff0ef47b610f0f2b565f0dd77e54d638772 | from django.apps import AppConfig
class MusicaConfig(AppConfig):
name = 'musica'
| [] |
tuxiqae/pytzwhere | tzwhere/tzwhere.py | 32d2bef9ff2d784741471fddb35fbb6732f556d5 | #!/usr/bin/env python
'''tzwhere.py - time zone computation from latitude/longitude.
Ordinarily this is loaded as a module and instances of the tzwhere
class are instantiated and queried directly
'''
import collections
try:
import ujson as json # loads 2 seconds faster than normal json
except:
try:
import json
except ImportError:
import simplejson as json
import math
import gzip
import os
import shapely.geometry as geometry
import shapely.prepared as prepared
# We can save about 222MB of RAM by turning our polygon lists into
# numpy arrays rather than tuples, if numpy is installed.
try:
import numpy
WRAP = numpy.asarray
COLLECTION_TYPE = numpy.ndarray
except ImportError:
WRAP = tuple
COLLECTION_TYPE = tuple
# for navigation and pulling values/files
this_dir, this_filename = os.path.split(__file__)
BASE_DIR = os.path.dirname(this_dir)
class tzwhere(object):
SHORTCUT_DEGREES_LATITUDE = 1.0
SHORTCUT_DEGREES_LONGITUDE = 1.0
# By default, use the data file in our package directory
DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__),
'tz_world_shortcuts.json')
DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__),
'tz_world.json.gz')
def __init__(self, forceTZ=False):
'''
Initializes the tzwhere class.
@forceTZ: If you want to force the lookup method to a return a
timezone even if the point you are looking up is slightly outside it's
bounds, you need to specify this during initialization arleady
'''
featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS)
pgen = feature_collection_polygons(featureCollection)
self.timezoneNamesToPolygons = collections.defaultdict(list)
self.unprepTimezoneNamesToPolygons = collections.defaultdict(list)
for tzname, poly in pgen:
self.timezoneNamesToPolygons[tzname].append(poly)
for tzname, polys in self.timezoneNamesToPolygons.items():
self.timezoneNamesToPolygons[tzname] = WRAP(polys)
if forceTZ:
self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys)
with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f:
self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f)
self.forceTZ = forceTZ
for tzname in self.timezoneNamesToPolygons:
# Convert things to tuples to save memory
for degree in self.timezoneLatitudeShortcuts:
for tzname in self.timezoneLatitudeShortcuts[degree].keys():
self.timezoneLatitudeShortcuts[degree][tzname] = \
tuple(self.timezoneLatitudeShortcuts[degree][tzname])
for degree in self.timezoneLongitudeShortcuts.keys():
for tzname in self.timezoneLongitudeShortcuts[degree].keys():
self.timezoneLongitudeShortcuts[degree][tzname] = \
tuple(self.timezoneLongitudeShortcuts[degree][tzname])
def tzNameAt(self, latitude, longitude, forceTZ=False):
'''
Let's you lookup for a given latitude and longitude the appropriate
timezone.
@latitude: latitude
@longitude: longitude
@forceTZ: If forceTZ is true and you can't find a valid timezone return
the closest timezone you can find instead. Only works if the point has
the same integer value for its degree than the timezeone
'''
if forceTZ:
assert self.forceTZ, 'You need to initialize tzwhere with forceTZ'
latTzOptions = self.timezoneLatitudeShortcuts[str(
(math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) *
self.SHORTCUT_DEGREES_LATITUDE)
)]
latSet = set(latTzOptions.keys())
lngTzOptions = self.timezoneLongitudeShortcuts[str(
(math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) *
self.SHORTCUT_DEGREES_LONGITUDE)
)]
lngSet = set(lngTzOptions.keys())
possibleTimezones = lngSet.intersection(latSet)
queryPoint = geometry.Point(longitude, latitude)
if possibleTimezones:
for tzname in possibleTimezones:
if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE):
self.timezoneNamesToPolygons[tzname] = list(
map(lambda p: prepared.prep(
geometry.Polygon(p[0], p[1])
), self.timezoneNamesToPolygons[tzname]))
polyIndices = set(latTzOptions[tzname]).intersection(set(
lngTzOptions[tzname]
))
for polyIndex in polyIndices:
poly = self.timezoneNamesToPolygons[tzname][polyIndex]
if poly.contains_properly(queryPoint):
return tzname
if forceTZ:
return self.__forceTZ__(possibleTimezones, latTzOptions,
lngTzOptions, queryPoint)
def __forceTZ__(self, possibleTimezones, latTzOptions,
lngTzOptions, queryPoint):
distances = []
if possibleTimezones:
if len(possibleTimezones) == 1:
return possibleTimezones.pop()
else:
for tzname in possibleTimezones:
if isinstance(self.unprepTimezoneNamesToPolygons[tzname],
COLLECTION_TYPE):
self.unprepTimezoneNamesToPolygons[tzname] = list(
map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]),
self.timezoneNamesToPolygons[tzname]))
polyIndices = set(latTzOptions[tzname]).intersection(
set(lngTzOptions[tzname]))
for polyIndex in polyIndices:
poly = self.unprepTimezoneNamesToPolygons[
tzname][polyIndex]
d = poly.distance(queryPoint)
distances.append((d, tzname))
if len(distances) > 0:
return sorted(distances, key=lambda x: x[0])[0][1]
class prepareMap(object):
def __init__(self):
DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__),
'tz_world_shortcuts.json')
DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__),
'tz_world.json.gz')
featureCollection = read_tzworld(DEFAULT_POLYGONS)
pgen = feature_collection_polygons(featureCollection)
tzNamesToPolygons = collections.defaultdict(list)
for tzname, poly in pgen:
tzNamesToPolygons[tzname].append(poly)
for tzname, polys in tzNamesToPolygons.items():
tzNamesToPolygons[tzname] = \
WRAP(tzNamesToPolygons[tzname])
timezoneLongitudeShortcuts,\
timezoneLatitudeShortcuts = self.construct_shortcuts(
tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE,
tzwhere.SHORTCUT_DEGREES_LATITUDE)
with open(DEFAULT_SHORTCUTS, 'w') as f:
json.dump(
(timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f)
@staticmethod
def construct_shortcuts(timezoneNamesToPolygons,
shortcut_long, shortcut_lat):
''' Construct our shortcuts for looking up polygons. Much faster
than using an r-tree '''
def find_min_max(ls, gridSize):
minLs = (math.floor(min(ls) / gridSize) *
gridSize)
maxLs = (math.floor(max(ls) / gridSize) *
gridSize)
return minLs, maxLs
timezoneLongitudeShortcuts = {}
timezoneLatitudeShortcuts = {}
for tzname in timezoneNamesToPolygons:
tzLngs = []
tzLats = []
for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]):
lngs = [x[0] for x in poly[0]]
lats = [x[1] for x in poly[0]]
tzLngs.extend(lngs)
tzLats.extend(lats)
minLng, maxLng = find_min_max(
lngs, shortcut_long)
minLat, maxLat = find_min_max(
lats, shortcut_lat)
degree = minLng
while degree <= maxLng:
if degree not in timezoneLongitudeShortcuts:
timezoneLongitudeShortcuts[degree] =\
collections.defaultdict(list)
timezoneLongitudeShortcuts[degree][tzname].append(polyIndex)
degree = degree + shortcut_long
degree = minLat
while degree <= maxLat:
if degree not in timezoneLatitudeShortcuts:
timezoneLatitudeShortcuts[degree] =\
collections.defaultdict(list)
timezoneLatitudeShortcuts[degree][tzname].append(polyIndex)
degree = degree + shortcut_lat
return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts
def read_tzworld(path):
reader = read_json
return reader(path)
def read_json(path):
with gzip.open(path, "rb") as f:
featureCollection = json.loads(f.read().decode("utf-8"))
return featureCollection
def feature_collection_polygons(featureCollection):
"""Turn a feature collection
into an iterator over polygons.
Given a featureCollection of the kind loaded from the json
input, unpack it to an iterator which produces a series of
(tzname, polygon) pairs, one for every polygon in the
featureCollection. Here tzname is a string and polygon is a
list of floats.
"""
for feature in featureCollection['features']:
tzname = feature['properties']['TZID']
if feature['geometry']['type'] == 'Polygon':
exterior = feature['geometry']['coordinates'][0]
interior = feature['geometry']['coordinates'][1:]
yield (tzname, (exterior, interior))
if __name__ == "__main__":
prepareMap()
| [((34, 26, 34, 49), 'os.path.split', 'os.path.split', ({(34, 40, 34, 48): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((35, 11, 35, 36), 'os.path.dirname', 'os.path.dirname', ({(35, 27, 35, 35): 'this_dir'}, {}), '(this_dir)', False, 'import os\n'), ((42, 37, 42, 62), 'os.path.dirname', 'os.path.dirname', ({(42, 53, 42, 61): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((44, 36, 44, 61), 'os.path.dirname', 'os.path.dirname', ({(44, 52, 44, 60): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((57, 39, 57, 68), 'collections.defaultdict', 'collections.defaultdict', ({(57, 63, 57, 67): 'list'}, {}), '(list)', False, 'import collections\n'), ((58, 45, 58, 74), 'collections.defaultdict', 'collections.defaultdict', ({(58, 69, 58, 73): 'list'}, {}), '(list)', False, 'import collections\n'), ((109, 21, 109, 56), 'shapely.geometry.Point', 'geometry.Point', ({(109, 36, 109, 45): 'longitude', (109, 47, 109, 55): 'latitude'}, {}), '(longitude, latitude)', True, 'import shapely.geometry as geometry\n'), ((165, 28, 165, 57), 'collections.defaultdict', 'collections.defaultdict', ({(165, 52, 165, 56): 'list'}, {}), '(list)', False, 'import collections\n'), ((235, 9, 235, 30), 'gzip.open', 'gzip.open', ({(235, 19, 235, 23): 'path', (235, 25, 235, 29): '"""rb"""'}, {}), "(path, 'rb')", False, 'import gzip\n'), ((68, 78, 68, 90), 'simplejson.load', 'json.load', ({(68, 88, 68, 89): 'f'}, {}), '(f)', True, 'import simplejson as json\n'), ((159, 41, 159, 66), 'os.path.dirname', 'os.path.dirname', ({(159, 57, 159, 65): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((161, 40, 161, 65), 'os.path.dirname', 'os.path.dirname', ({(161, 56, 161, 64): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((179, 12, 180, 75), 'simplejson.dump', 'json.dump', ({(180, 16, 180, 71): '(timezoneLongitudeShortcuts, timezoneLatitudeShortcuts)', (180, 73, 180, 74): 'f'}, {}), '((timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f)', True, 'import simplejson as json\n'), ((98, 13, 98, 66), 'math.floor', 'math.floor', ({(98, 24, 98, 65): '(latitude / self.SHORTCUT_DEGREES_LATITUDE)'}, {}), '(latitude / self.SHORTCUT_DEGREES_LATITUDE)', False, 'import math\n'), ((103, 13, 103, 68), 'math.floor', 'math.floor', ({(103, 24, 103, 67): '(longitude / self.SHORTCUT_DEGREES_LONGITUDE)'}, {}), '(longitude / self.SHORTCUT_DEGREES_LONGITUDE)', False, 'import math\n'), ((215, 28, 215, 57), 'collections.defaultdict', 'collections.defaultdict', ({(215, 52, 215, 56): 'list'}, {}), '(list)', False, 'import collections\n'), ((223, 28, 223, 57), 'collections.defaultdict', 'collections.defaultdict', ({(223, 52, 223, 56): 'list'}, {}), '(list)', False, 'import collections\n'), ((116, 32, 116, 60), 'shapely.geometry.Polygon', 'geometry.Polygon', ({(116, 49, 116, 53): 'p[0]', (116, 55, 116, 59): 'p[1]'}, {}), '(p[0], p[1])', True, 'import shapely.geometry as geometry\n'), ((143, 101, 143, 129), 'shapely.geometry.Polygon', 'geometry.Polygon', ({(143, 118, 143, 122): 'p[0]', (143, 124, 143, 128): 'p[1]'}, {}), '(p[0], p[1])', True, 'import shapely.geometry as geometry\n')] |
jre21/mindmeld | tests/home_assistant/custom_features.py | 6a88e4b0dfc7971f6bf9ae406b89dbc76f68af81 | from mindmeld.models.helpers import register_query_feature
@register_query_feature(feature_name='average-token-length')
def extract_average_token_length(**args):
"""
Example query feature that gets the average length of normalized tokens in the query„
Returns:
(function) A feature extraction function that takes a query and
returns the average normalized token length
"""
# pylint: disable=locally-disabled,unused-argument
def _extractor(query, resources):
tokens = query.normalized_tokens
average_token_length = sum([len(t) for t in tokens]) / len(tokens)
return {'average_token_length': average_token_length}
return _extractor
| [((4, 1, 4, 60), 'mindmeld.models.helpers.register_query_feature', 'register_query_feature', (), '', False, 'from mindmeld.models.helpers import register_query_feature\n')] |
woody2371/fishbowl-api | source/statuscodes.py | f34ff9267436b1278985870fbf19863febdb391b | #!/usr/bin/python
# -*- coding: utf-8 -*-
def getstatus(code):
if code == "1000":
value = "Success!"
elif code == "1001":
value = "Unknown Message Received"
elif code == "1002":
value = "Connection to Fishbowl Server was lost"
elif code == "1003":
value = "Some Requests had errors -- now isn't that helpful..."
elif code == "1004":
value = "There was an error with the database."
elif code == "1009":
value = "Fishbowl Server has been shut down."
elif code == "1010":
value = "You have been logged off the server by an administrator."
elif code == "1012":
value = "Unknown request function."
elif code == "1100":
value = "Unknown login error occurred."
elif code == "1110":
value = "A new Integrated Application has been added to Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator to approve this Integrated Application."
elif code == "1111":
value = "This Integrated Application registration key does not match."
elif code == "1112":
value = "This Integrated Application has not been approved by the Fishbowl Inventory Administrator."
elif code == "1120":
value = "Invalid Username or Password."
elif code == "1130":
value = "Invalid Ticket passed to Fishbowl Inventory Server."
elif code == "1131":
value = "Invalid Key value."
elif code == "1140":
value = "Initialization token is not correct type."
elif code == "1150":
value = "Request was invalid"
elif code == "1160":
value = "Response was invalid."
elif code == "1162":
value = "The login limit has been reached for the server's key."
elif code == "1200":
value = "Custom Field is invalid."
elif code == "1500":
value = "The import was not properly formed."
elif code == "1501":
value = "That import type is not supported"
elif code == "1502":
value = "File not found."
elif code == "1503":
value = "That export type is not supported."
elif code == "1504":
value = "File could not be written to."
elif code == "1505":
value = "The import data was of the wrong type."
elif code == "2000":
value = "Was not able to find the Part {0}."
elif code == "2001":
value = "The part was invalid."
elif code == "2100":
value = "Was not able to find the Product {0}."
elif code == "2101":
value = "The product was invalid."
elif code == "2200":
value = "The yield failed."
elif code == "2201":
value = "Commit failed."
elif code == "2202":
value = "Add initial inventory failed."
elif code == "2203":
value = "Can not adjust committed inventory."
elif code == "2300":
value = "Was not able to find the Tag number {0}."
elif code == "2301":
value = "The tag is invalid."
elif code == "2302":
value = "The tag move failed."
elif code == "2303":
value = "Was not able to save Tag number {0}."
elif code == "2304":
value = "Not enough available inventory in Tagnumber {0}."
elif code == "2305":
value = "Tag number {0} is a location."
elif code == "2400":
value = "Invalid UOM."
elif code == "2401":
value = "UOM {0} not found."
elif code == "2402":
value = "Integer UOM {0} cannot have non-integer quantity."
elif code == "2500":
value = "The Tracking is not valid."
elif code == "2510":
value = "Serial number is missing."
elif code == "2511":
value = "Serial number is null."
elif code == "2512":
value = "Serial number is duplicate."
elif code == "2513":
value = "Serial number is not valid."
elif code == "2600":
value = "Location not found."
elif code == "2601":
value = "Invalid location."
elif code == "2602":
value = "Location Group {0} not found."
elif code == "3000":
value = "Customer {0} not found."
elif code == "3001":
value = "Customer is invalid."
elif code == "3100":
value = "Vendor {0} not found."
elif code == "3101":
value = "Vendor is invalid."
elif code == "4000":
value = "There was an error load PO {0}."
elif code == "4001":
value = "Unknow status {0}."
elif code == "4002":
value = "Unknown carrier {0}."
elif code == "4003":
value = "Unknown QuickBooks class {0}."
elif code == "4004":
value = "PO does not have a PO number. Please turn on the auto-assign PO number option in the purchase order module options."
else:
value = 'Unknown status'
return value
| [] |
jacob327/docker-flask-nginx-uwsgi-mysql | app/src/server/hoge/hoge_api.py | 4b0731f746d6fda7bfecd082ddef53a9c5ec8f75 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# [Import start]
from flask import Blueprint, jsonify
# [Import end]
app = Blueprint(
'hoge',
__name__,
url_prefix='/hoge'
)
@app.route('/test')
def hoge():
return "\nhogehoge"
| [((8, 6, 12, 1), 'flask.Blueprint', 'Blueprint', (), '', False, 'from flask import Blueprint, jsonify\n')] |
chipbuster/Energy-Languages-Setup | preinstall_setup/makedeb-11.0.1-1-stable/src/makedeb/utils/missing_apt_dependencies.py | 5b6192e1cc73f701a2310ac72520ed540d86c1ae | #!/usr/bin/env python3
import apt_pkg
import sys
from apt_pkg import CURSTATE_INSTALLED, version_compare
from operator import lt, le, eq, ge, gt
# Function mappings for relationship operators.
relation_operators = {"<<": lt, "<=": le, "=": eq, ">=": ge, ">>": gt}
# Set up APT cache.
apt_pkg.init()
cache = apt_pkg.Cache(None)
missing_packages = []
for i in sys.argv[1:]:
# Build the package relationship string for use by 'apt-get satisfy'.
relationship_operator = None
for j in ["<=", ">=", "<", ">", "="]:
if j in i:
relationship_operator = j
break
if relationship_operator is not None:
if relationship_operator in ["<", ">"]:
relationship_operator_formatted = j + j
else:
relationship_operator_formatted = j
package = i.split(relationship_operator)
pkgname = package[0]
pkgver = package[1]
package_string = f"{pkgname} ({relationship_operator_formatted} {pkgver})"
else:
pkgname = i
pkgver = None
package_string = pkgname
# Check if the package is in the cache.
try:
pkg = cache[pkgname]
except KeyError:
missing_packages += [package_string]
continue
# Get the list of installed and provided packages that are currently installed.
installed_pkg_versions = []
if pkg.current_state == CURSTATE_INSTALLED:
installed_pkg_versions += [pkg]
for i in pkg.provides_list:
parent_pkg = i[2].parent_pkg
if parent_pkg.current_state == CURSTATE_INSTALLED:
installed_pkg_versions += [parent_pkg]
# If an installed package was found and no relationship operators were used, the dependency has been satisfied.
if (len(installed_pkg_versions) != 0) and (relationship_operator is None):
continue
# Otherwise, check all matching installed packages and see if any of them fit the specified relationship operator.
matched_pkg = False
for i in installed_pkg_versions:
installed_version = i.current_ver.ver_str
version_result = version_compare(installed_version, pkgver)
if relation_operators[relationship_operator_formatted](version_result, 0):
matched_pkg = True
if not matched_pkg:
missing_packages += [package_string]
for i in missing_packages:
print(i)
exit(0)
| [((12, 0, 12, 14), 'apt_pkg.init', 'apt_pkg.init', ({}, {}), '()', False, 'import apt_pkg\n'), ((13, 8, 13, 27), 'apt_pkg.Cache', 'apt_pkg.Cache', ({(13, 22, 13, 26): 'None'}, {}), '(None)', False, 'import apt_pkg\n'), ((69, 25, 69, 67), 'apt_pkg.version_compare', 'version_compare', ({(69, 41, 69, 58): 'installed_version', (69, 60, 69, 66): 'pkgver'}, {}), '(installed_version, pkgver)', False, 'from apt_pkg import CURSTATE_INSTALLED, version_compare\n')] |
zferic/harmonization-website | cohorts_proj/datasets/migrations/0009_auto_20200824_0617.py | f6a081481df3a3a62cb075fbb63ad0470b0d4e06 | # Generated by Django 3.0.7 on 2020-08-24 06:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datasets', '0008_auto_20200821_1427'),
]
operations = [
migrations.AddField(
model_name='rawdar',
name='AsB',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='AsB_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='AsB_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Ba',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Ba_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Ba_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Cs',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Cs_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Cs_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='DMA',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='DMA_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='DMA_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='MMA',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='MMA_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='MMA_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Sr',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Sr_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Sr_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='iAs',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='iAs_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='iAs_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ag',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ag_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Al',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Al_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='As',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='As_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Be',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Be_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cd',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cd_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Co',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Co_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cr',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cr_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cu',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cu_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Fe',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Fe_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Hg',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Hg_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mo',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mo_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ni',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ni_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Pb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Pb_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sb_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Se',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Se_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Tl',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Tl_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='U',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='U_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='V',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='V_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='W',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='W_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Zn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Zn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='urine_specific_gravity',
field=models.FloatField(blank=True, null=True),
),
]
| [((16, 18, 16, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((21, 18, 21, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((27, 18, 27, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((32, 18, 32, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((37, 18, 37, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((43, 18, 43, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((48, 18, 48, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((53, 18, 53, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((59, 18, 59, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((64, 18, 64, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((69, 18, 69, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((75, 18, 75, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((80, 18, 80, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((85, 18, 85, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((91, 18, 91, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((96, 18, 96, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((101, 18, 101, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((107, 18, 107, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((112, 18, 112, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((117, 18, 117, 153), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((123, 18, 123, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((128, 18, 128, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((133, 18, 133, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((138, 18, 138, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((143, 18, 143, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((148, 18, 148, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((153, 18, 153, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((158, 18, 158, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((163, 18, 163, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((168, 18, 168, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((173, 18, 173, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((178, 18, 178, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((183, 18, 183, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((188, 18, 188, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((193, 18, 193, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((198, 18, 198, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((203, 18, 203, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((208, 18, 208, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((213, 18, 213, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((218, 18, 218, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((223, 18, 223, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((228, 18, 228, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((233, 18, 233, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((238, 18, 238, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((243, 18, 243, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((248, 18, 248, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((253, 18, 253, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((258, 18, 258, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((263, 18, 263, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((268, 18, 268, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((273, 18, 273, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((278, 18, 278, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((283, 18, 283, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((288, 18, 288, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((293, 18, 293, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((298, 18, 298, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((303, 18, 303, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((308, 18, 308, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((313, 18, 313, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((318, 18, 318, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((323, 18, 323, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((328, 18, 328, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((333, 18, 333, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((338, 18, 338, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((343, 18, 343, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((348, 18, 348, 58), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n')] |
skvel/pynet_testx | test_hello.py | 46566e059e076cb763f8a10ed7f6ff9eac5b63b1 | print "Hello World!"
print "Trying my hand at Git!"
print "Something else"
for i in range(10):
print i
| [] |
TheDim0n/ProjectManager | tasks/views.py | 50d36e7e3fc71655aa5a82bb19eacc07172ba5e4 | from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.views.generic import DetailView, ListView
from projects.models import Project
from status.models import Status
from .models import Task
from .forms import TaskForm, FilterForm
def _get_projects(user):
projects = [("All", "All"), ('---', '---')]
for item in Project.objects.filter(created_by=user):
projects.append((item.name, item.name))
return projects
def _get_statuses():
statuses = [("All", "All")]
for item in Status.objects.all():
statuses.append((item.text, item.text))
return statuses
class TaskListView(LoginRequiredMixin, ListView):
login_url = '/users/register'
model = Task
context_object_name = 'tasks'
template_name = 'tasks/index.html'
ordering = ['finish_date']
def get_queryset(self):
queryset = super().get_queryset()
for obj in queryset:
obj.check_expired()
return queryset
def get_context_data(self, *args, **kwargs):
try:
project_name = self.request.GET['project']
except KeyError:
project_name = ''
try:
status_name = self.request.GET['status']
except KeyError:
status_name = ''
if self.request.user.is_authenticated:
tasks = Task.objects.filter(created_by=self.request.user)
if project_name and project_name != "All":
if project_name == '---':
tasks = tasks.filter(level=None)
else:
tasks = tasks.filter(level__project__name=project_name)
if status_name and status_name != "All":
tasks = tasks.filter(status__text=status_name)
status_list = Status.objects.all()
last_initial = {
'status': status_name,
'project': project_name,
}
form = FilterForm(initial=last_initial)
form.fields['project'].choices = _get_projects(user=self.request.user)
form.fields['status'].choices = _get_statuses()
context = super(TaskListView, self).get_context_data(*args, **kwargs)
context['status_list'] = status_list
context['tasks'] = tasks
context['filter_form'] = form
context['task_form'] = TaskForm
return context
class TaskDetailView(DetailView):
model = Task
template_name = 'tasks/details.html'
def get_object(self):
obj = super().get_object()
obj.check_expired()
return obj
def get_context_data(self, *args, **kwargs):
initial_content = {
'name': self.object.name,
'start_date': self.object.start_date,
'finish_date': self.object.finish_date,
'status': self.object.status,
'description': self.object.description,
}
context = super(TaskDetailView, self).get_context_data(*args, **kwargs)
context['task_form'] = TaskForm(initial=initial_content)
return context
class TaskCreateView(LoginRequiredMixin, CreateView):
login_url = '/users/register'
model = Task
form_class = TaskForm
template_name = 'tasks/index.html'
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
class TaskUpdateView(LoginRequiredMixin, UpdateView):
login_url = '/users/register'
model = Task
form_class = TaskForm
template_name = "tasks/update_task.html"
def form_valid(self, form):
self.object.check_expired()
return super().form_valid(form)
class TaskDeleteView(DeleteView):
model = Task
template_name = "tasks/delete_task.html"
| [((13, 16, 13, 55), 'projects.models.Project.objects.filter', 'Project.objects.filter', (), '', False, 'from projects.models import Project\n'), ((19, 16, 19, 36), 'status.models.Status.objects.all', 'Status.objects.all', ({}, {}), '()', False, 'from status.models import Status\n'), ((57, 22, 57, 42), 'status.models.Status.objects.all', 'Status.objects.all', ({}, {}), '()', False, 'from status.models import Status\n')] |
minnieteng/smoke_project | smoke/noaa/get_smokeplume_counts.py | cc3c8f16f7759fe29e46d3cec32a3ed6ca86bd5f | import os
import math
import time
import geohash
import geojson
from geojson import MultiLineString
from shapely import geometry
import shapefile
import numpy
import datetime as dt
import pandas as pd
import logging
logger = logging.getLogger(__name__)
source_shape_file_path = "C:/temp/2018/"
threshold = 60*60
cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt']
times = []
for root,dirs,files in os.walk(source_shape_file_path):
for file in files:
with open(os.path.join(root,file),"r") as auto:
if file.endswith(".shp"):
try:
filename = file.replace(".shp","")
shape=shapefile.Reader(source_shape_file_path+filename+"/"+file)
for r in shape.iterRecords():
start_time = dt.datetime.strptime(r[1], '%Y%j %H%M')
end_time = dt.datetime.strptime(r[2], '%Y%j %H%M')
epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M'))
epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M'))
# sometimes start is later than end time, we'll assume the earlier time is start
epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold
epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold
epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600))
epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600))
times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt])
break
except:
logger.error('failed to parse file:'+source_shape_file_path+filename+"/")
continue
df = pd.DataFrame(times, columns=cols)
df.to_csv('noaa_times.csv')
| [((14, 9, 14, 36), 'logging.getLogger', 'logging.getLogger', ({(14, 27, 14, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((20, 23, 20, 54), 'os.walk', 'os.walk', ({(20, 31, 20, 53): 'source_shape_file_path'}, {}), '(source_shape_file_path)', False, 'import os\n'), ((44, 5, 44, 38), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((22, 18, 22, 41), 'os.path.join', 'os.path.join', ({(22, 31, 22, 35): 'root', (22, 36, 22, 40): 'file'}, {}), '(root, file)', False, 'import os\n'), ((26, 26, 26, 84), 'shapefile.Reader', 'shapefile.Reader', ({(26, 43, 26, 83): "source_shape_file_path + filename + '/' + file"}, {}), "(source_shape_file_path + filename + '/' + file)", False, 'import shapefile\n'), ((28, 37, 28, 76), 'datetime.datetime.strptime', 'dt.datetime.strptime', ({(28, 58, 28, 62): 'r[1]', (28, 64, 28, 75): '"""%Y%j %H%M"""'}, {}), "(r[1], '%Y%j %H%M')", True, 'import datetime as dt\n'), ((29, 35, 29, 74), 'datetime.datetime.strptime', 'dt.datetime.strptime', ({(29, 56, 29, 60): 'r[2]', (29, 62, 29, 73): '"""%Y%j %H%M"""'}, {}), "(r[2], '%Y%j %H%M')", True, 'import datetime as dt\n'), ((30, 56, 30, 95), 'datetime.datetime.strptime', 'dt.datetime.strptime', ({(30, 77, 30, 81): 'r[1]', (30, 83, 30, 94): '"""%Y%j %H%M"""'}, {}), "(r[1], '%Y%j %H%M')", True, 'import datetime as dt\n'), ((31, 56, 31, 95), 'datetime.datetime.strptime', 'dt.datetime.strptime', ({(31, 77, 31, 81): 'r[2]', (31, 83, 31, 94): '"""%Y%j %H%M"""'}, {}), "(r[2], '%Y%j %H%M')", True, 'import datetime as dt\n')] |
KRHS-GameProgramming-2015/Manpac | notes/OOBall/OOBall/main-demo.py | 959bf7f5195a4edb528fbbf25b8896fcb28d5327 | import pygame_sdl2
pygame_sdl2.import_as_pygame()
import pygame
import os
import random
import math
from Ball import Ball
def save_state(balls):
"""
Saves the game state.
"""
stateString = ""
with open("state.txt", "w") as f:
for ball in balls:
stateString += "{} {} {} {} {}".format(ball.imageFile,
ball.speedx,
ball.speedy,
ball.rect.centerx,
ball.rect.centery)
stateString += '\n'
f.write(stateString)
def load_state():
try:
objects = []
with open("state.txt", "r") as f:
for line in f.read():
f, sx, sy, x, y = line.split()
objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)])
return objects
except:
return None
def delete_state():
if os.path.exists("state.txt"):
os.unlink("state.txt")
def main():
pygame.init()
clock = pygame.time.Clock()
infoObject = pygame.display.Info()
#print infoObject.current_w
width = infoObject.current_w
height = infoObject.current_h
size = width, height
bgColor = r,g,b = 0, 0, 0
screen = pygame.display.set_mode(size)
pygame.display.set_mode()
balls = load_state()
delete_state()
if balls == None:
balls = []
ballTimer = 0
ballTimerMax = .75 * 60
done = False
sleeping = False
font = pygame.font.Font("DejaVuSans.ttf", 124)
text = font.render("Start", True, (255, 255, 255, 255))
textRect = text.get_rect(center = (width/2, height/2))
while not done:
for event in pygame.event.get():
text = font.render(str(event.type), True, (255, 255, 255, 255))
if event.type == pygame.QUIT:
done = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK:
done = True
elif event.type == pygame.APP_WILLENTERBACKGROUND:
# The app is about to go to sleep. It should save state, cancel
# any timers, and stop drawing the screen until an APP_DIDENTERFOREGROUND
# event shows up.
save_state(balls)
sleeping = True
elif event.type == pygame.APP_DIDENTERFOREGROUND:
# The app woke back up. Delete the saved state (we don't need it),
# restore any times, and start drawing the screen again.
delete_state()
sleeping = False
# For now, we have to re-open the window when entering the
# foreground.
screen = pygame.display.set_mode((1280, 720))
if not sleeping:
ballTimer += 1
if ballTimer >= ballTimerMax:
ballTimer = 0
ballSpeed = [random.randint(-5, 5),
random.randint(-5, 5)]
ballPos = [random.randint(100, width-100),
random.randint(100, height-100)]
balls += [Ball("ball.png",ballSpeed,ballPos)]
save_state(balls)
for ball in balls:
ball.move()
ball.collideScreen(size)
for first in balls:
for second in balls:
if first != second:
first.collideBall(second)
bgColor = r,g,b
screen.fill(bgColor)
for ball in balls:
screen.blit(ball.image, ball.rect)
screen.blit(text, textRect)
pygame.display.flip()
clock.tick(60)
if done:
break
if __name__ == "__main__":
main()
| [((2, 0, 2, 30), 'pygame_sdl2.import_as_pygame', 'pygame_sdl2.import_as_pygame', ({}, {}), '()', False, 'import pygame_sdl2\n'), ((37, 7, 37, 34), 'os.path.exists', 'os.path.exists', ({(37, 22, 37, 33): '"""state.txt"""'}, {}), "('state.txt')", False, 'import os\n'), ((41, 4, 41, 17), 'pygame.init', 'pygame.init', ({}, {}), '()', False, 'import pygame\n'), ((43, 12, 43, 31), 'pygame.time.Clock', 'pygame.time.Clock', ({}, {}), '()', False, 'import pygame\n'), ((44, 17, 44, 38), 'pygame.display.Info', 'pygame.display.Info', ({}, {}), '()', False, 'import pygame\n'), ((55, 13, 55, 42), 'pygame.display.set_mode', 'pygame.display.set_mode', ({(55, 37, 55, 41): 'size'}, {}), '(size)', False, 'import pygame\n'), ((56, 4, 56, 29), 'pygame.display.set_mode', 'pygame.display.set_mode', ({}, {}), '()', False, 'import pygame\n'), ((70, 11, 70, 50), 'pygame.font.Font', 'pygame.font.Font', ({(70, 28, 70, 44): '"""DejaVuSans.ttf"""', (70, 46, 70, 49): '124'}, {}), "('DejaVuSans.ttf', 124)", False, 'import pygame\n'), ((38, 8, 38, 30), 'os.unlink', 'os.unlink', ({(38, 18, 38, 29): '"""state.txt"""'}, {}), "('state.txt')", False, 'import os\n'), ((75, 21, 75, 39), 'pygame.event.get', 'pygame.event.get', ({}, {}), '()', False, 'import pygame\n'), ((121, 12, 121, 33), 'pygame.display.flip', 'pygame.display.flip', ({}, {}), '()', False, 'import pygame\n'), ((100, 29, 100, 50), 'random.randint', 'random.randint', ({(100, 44, 100, 46): '(-5)', (100, 48, 100, 49): '(5)'}, {}), '(-5, 5)', False, 'import random\n'), ((101, 29, 101, 50), 'random.randint', 'random.randint', ({(101, 44, 101, 46): '(-5)', (101, 48, 101, 49): '(5)'}, {}), '(-5, 5)', False, 'import random\n'), ((102, 27, 102, 57), 'random.randint', 'random.randint', ({(102, 42, 102, 45): '(100)', (102, 47, 102, 56): '(width - 100)'}, {}), '(100, width - 100)', False, 'import random\n'), ((103, 29, 103, 60), 'random.randint', 'random.randint', ({(103, 44, 103, 47): '(100)', (103, 49, 103, 59): '(height - 100)'}, {}), '(100, height - 100)', False, 'import random\n'), ((104, 26, 104, 60), 'Ball.Ball', 'Ball', ({(104, 31, 104, 41): '"""ball.png"""', (104, 42, 104, 51): 'ballSpeed', (104, 52, 104, 59): 'ballPos'}, {}), "('ball.png', ballSpeed, ballPos)", False, 'from Ball import Ball\n'), ((94, 25, 94, 61), 'pygame.display.set_mode', 'pygame.display.set_mode', ({(94, 49, 94, 60): '(1280, 720)'}, {}), '((1280, 720))', False, 'import pygame\n')] |
vdbergh/pentanomial | sprt.py | d046e74acde3f961c7afd22fc4f82fa5aeb4c0fd | from __future__ import division
import math, copy
import argparse
from brownian import Brownian
import scipy
import LLRcalc
class sprt:
def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model="logistic"):
assert elo_model in ("logistic", "normalized")
self.elo_model = elo_model
self.a = math.log(beta / (1 - alpha))
self.b = math.log((1 - beta) / alpha)
self.elo0 = elo0
self.elo1 = elo1
self.clamped = False
self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2
def elo_to_score(self, elo):
"""
"elo" is expressed in our current elo_model.
"""
if self.elo_model == "normalized":
nt = elo / LLRcalc.nelo_divided_by_nt
return nt * self.sigma_pg + 0.5
else:
return LLRcalc.L_(elo)
def lelo_to_elo(self, lelo):
"""
For external use. "elo" is expressed in our current elo_model.
"lelo" is logistic.
"""
if self.elo_model == "logistic":
return lelo
score = LLRcalc.L_(lelo)
nt = (score - 0.5) / self.sigma_pg
return nt * LLRcalc.nelo_divided_by_nt
def set_state(self, results):
N, self.pdf = LLRcalc.results_to_pdf(results)
if self.elo_model == "normalized":
mu, var = LLRcalc.stats(self.pdf) # code duplication with LLRcalc
if len(results) == 5:
self.sigma_pg = (2 * var) ** 0.5
elif len(results) == 3:
self.sigma_pg = var ** 0.5
else:
assert False
self.s0, self.s1 = [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)]
mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None)
# llr estimate
self.llr = N * mu_LLR
self.T = N
# now normalize llr (if llr is not legal then the implications
# of this are unclear)
slope = self.llr / N
if self.llr > 1.03 * self.b or self.llr < 1.03 * self.a:
self.clamped = True
if self.llr < self.a:
self.T = self.a / slope
self.llr = self.a
elif self.llr > self.b:
self.T = self.b / slope
self.llr = self.b
def outcome_prob(self, elo):
"""
The probability of a test with the given elo with worse outcome
(faster fail, slower pass or a pass changed into a fail).
"""
s = LLRcalc.L_(elo)
mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s)
sigma_LLR = math.sqrt(var_LLR)
return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf(
T=self.T, y=self.llr
)
def lower_cb(self, p):
"""
Maximal elo value such that the observed outcome of the test has probability
less than p.
"""
avg_elo = (self.elo0 + self.elo1) / 2
delta = self.elo1 - self.elo0
N = 30
# Various error conditions must be handled better here!
while True:
elo0 = max(avg_elo - N * delta, -1000)
elo1 = min(avg_elo + N * delta, 1000)
try:
sol, res = scipy.optimize.brentq(
lambda elo: self.outcome_prob(elo) - (1 - p),
elo0,
elo1,
full_output=True,
disp=False,
)
except ValueError:
if elo0 > -1000 or elo1 < 1000:
N *= 2
continue
else:
if self.outcome_prob(elo0) - (1 - p) > 0:
return elo1
else:
return elo0
assert res.converged
break
return sol
def analytics(self, p=0.05):
ret = {}
ret["clamped"] = self.clamped
ret["a"] = self.a
ret["b"] = self.b
ret["elo"] = self.lower_cb(0.5)
ret["ci"] = [self.lower_cb(p / 2), self.lower_cb(1 - p / 2)]
ret["LOS"] = self.outcome_prob(0)
ret["LLR"] = self.llr
return ret
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--alpha", help="probability of a false positve", type=float, default=0.05
)
parser.add_argument(
"--beta", help="probability of a false negative", type=float, default=0.05
)
parser.add_argument(
"--elo0", help="H0 (expressed in LogisticElo)", type=float, default=0.0
)
parser.add_argument(
"--elo1", help="H1 (expressed in LogisticElo)", type=float, default=5.0
)
parser.add_argument("--level", help="confidence level", type=float, default=0.95)
parser.add_argument(
"--elo-model",
help="logistic or normalized",
choices=['logistic', 'normalized'],
default='logistic',
)
parser.add_argument(
"--results",
help="trinomial of pentanomial frequencies, low to high",
nargs="*",
type=int,
required=True,
)
args = parser.parse_args()
results = args.results
if len(results) != 3 and len(results) != 5:
parser.error("argument --results: expected 3 or 5 arguments")
alpha = args.alpha
beta = args.beta
elo0 = args.elo0
elo1 = args.elo1
elo_model = args.elo_model
p = 1 - args.level
s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model)
s.set_state(results)
a = s.analytics(p)
print("Design parameters")
print("=================")
print("False positives : %4.2f%%" % (100 * alpha,))
print("False negatives : %4.2f%%" % (100 * beta,))
print("[Elo0,Elo1] : [%.2f,%.2f]" % (elo0, elo1))
print("Confidence level : %4.2f%%" % (100 * (1 - p),))
print("Elo model : %s" % elo_model)
print("Estimates")
print("=========")
print("Elo : %.2f" % a["elo"])
print(
"Confidence interval : [%.2f,%.2f] (%4.2f%%)"
% (a["ci"][0], a["ci"][1], 100 * (1 - p))
)
print("LOS : %4.2f%%" % (100 * a["LOS"],))
print("Context")
print("=======")
print(
"LLR [u,l] : %.2f %s [%.2f,%.2f]"
% (a["LLR"], "(clamped)" if a["clamped"] else "", a["a"], a["b"])
)
| [((129, 13, 129, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((13, 17, 13, 45), 'math.log', 'math.log', ({(13, 26, 13, 44): 'beta / (1 - alpha)'}, {}), '(beta / (1 - alpha))', False, 'import math, copy\n'), ((14, 17, 14, 45), 'math.log', 'math.log', ({(14, 26, 14, 44): '(1 - beta) / alpha'}, {}), '((1 - beta) / alpha)', False, 'import math, copy\n'), ((37, 16, 37, 32), 'LLRcalc.L_', 'LLRcalc.L_', ({(37, 27, 37, 31): 'lelo'}, {}), '(lelo)', False, 'import LLRcalc\n'), ((42, 22, 42, 53), 'LLRcalc.results_to_pdf', 'LLRcalc.results_to_pdf', ({(42, 45, 42, 52): 'results'}, {}), '(results)', False, 'import LLRcalc\n'), ((76, 12, 76, 27), 'LLRcalc.L_', 'LLRcalc.L_', ({(76, 23, 76, 26): 'elo'}, {}), '(elo)', False, 'import LLRcalc\n'), ((78, 20, 78, 38), 'math.sqrt', 'math.sqrt', ({(78, 30, 78, 37): 'var_LLR'}, {}), '(var_LLR)', False, 'import math, copy\n'), ((28, 19, 28, 34), 'LLRcalc.L_', 'LLRcalc.L_', ({(28, 30, 28, 33): 'elo'}, {}), '(elo)', False, 'import LLRcalc\n'), ((44, 22, 44, 45), 'LLRcalc.stats', 'LLRcalc.stats', ({(44, 36, 44, 44): 'self.pdf'}, {}), '(self.pdf)', False, 'import LLRcalc\n'), ((79, 15, 79, 71), 'brownian.Brownian', 'Brownian', (), '', False, 'from brownian import Brownian\n')] |
t3zeng/mynewt-nimble | tools/hci_throughput/hci.py | e910132947d6b3cd61ef4732867382634178aa08 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from dataclasses import dataclass
import struct
from binascii import unhexlify
import random
############
# DEFINES
############
AF_BLUETOOTH = 31
HCI_CHANNEL_USER = 1
HCI_COMMAND_PACKET = 0x01
HCI_ACL_DATA_PACKET = 0x02
HCI_EVENT_PACKET = 0x04
HCI_EV_CODE_DISCONN_CMP = 0x05
HCI_EV_CODE_CMD_CMP = 0x0e
HCI_EV_CODE_CMD_STATUS = 0x0f
HCI_EV_CODE_LE_META_EVENT = 0x3e
HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a
HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07
HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c
HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14
HCI_EV_NUM_COMP_PKTS = 0x13
CONN_FAILED_TO_BE_ESTABLISHED = 0x3e
CONN_TIMEOUT = 0x08
OGF_HOST_CTL = 0x03
OCF_SET_EVENT_MASK = 0x0001
OCF_RESET = 0X0003
OGF_INFO_PARAM = 0x04
OCF_READ_LOCAL_COMMANDS = 0x0002
OCF_READ_BD_ADDR = 0x0009
OGF_LE_CTL = 0x08
OCF_LE_SET_EVENT_MASK = 0x0001
OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002
OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060
OCF_LE_SET_RANDOM_ADDRESS = 0x0005
OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006
OCF_LE_SET_ADVERTISE_ENABLE = 0x000a
OCF_LE_SET_SCAN_PARAMETERS = 0x000b
OCF_LE_SET_SCAN_ENABLE = 0x000c
OCF_LE_CREATE_CONN = 0x000d
OCF_LE_SET_DATA_LEN = 0x0022
OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023
OCF_LE_READ_MAX_DATA_LEN = 0x002f
OCF_LE_READ_PHY = 0x0030
OCF_LE_SET_DFLT_PHY = 0x0031
OCF_LE_SET_PHY = 0x0032
OGF_VENDOR_SPECIFIC = 0x003f
BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001
PUBLIC_ADDRESS_TYPE = 0
STATIC_RANDOM_ADDRESS_TYPE = 1
WAIT_FOR_EVENT_TIMEOUT = 5
WAIT_FOR_EVENT_CONN_TIMEOUT = 25
############
# GLOBAL VAR
############
num_of_bytes_to_send = None # based on supported_max_tx_octets
num_of_packets_to_send = None
events_list = []
bdaddr = '00:00:00:00:00:00'
static_addr = '00:00:00:00:00:00'
le_read_buffer_size = None
conn_handle = 0
requested_tx_octets = 1
requested_tx_time = 1
suggested_dflt_data_len = None
max_data_len = None
phy = None
ev_num_comp_pkts = None
num_of_completed_packets_cnt = 0
num_of_completed_packets_time = 0
############
# FUNCTIONS
############
def get_opcode(ogf: int, ocf: int):
return ((ocf & 0x03ff)|(ogf << 10))
def get_ogf_ocf(opcode: int):
ogf = opcode >> 10
ocf = opcode & 0x03ff
return ogf, ocf
def cmd_addr_to_ba(addr_str: str):
return unhexlify("".join(addr_str.split(':')))[::-1]
def ba_addr_to_str(addr_ba: bytearray):
addr_str = addr_ba.hex().upper()
return ':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2, -2))[1:]
def gen_static_rand_addr():
while True:
x = [random.randint(0,1) for _ in range(0,48)]
if 0 in x[:-2] and 1 in x[:-2]:
x[0] = 1
x[1] = 1
break
addr_int = int("".join([str(x[i]) for i in range(0,len(x))]), 2)
addr_hex = "{0:0{1}x}".format(addr_int, 12)
addr = ":".join(addr_hex[i:i+2] for i in range(0, len(addr_hex), 2))
return addr.upper()
############
# GLOBAL VAR CLASSES
############
@dataclass
class Suggested_Dflt_Data_Length():
status: int
suggested_max_tx_octets: int
suggested_max_tx_time: int
def __init__(self):
self.set()
def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0):
self.status = status
self.suggested_max_tx_octets = suggested_max_tx_octets
self.suggested_max_tx_time = suggested_max_tx_time
@dataclass
class Max_Data_Length():
status: int
supported_max_tx_octets: int
supported_max_tx_time: int
supported_max_rx_octets: int
supported_max_rx_time: int
def __init__(self):
self.set()
def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0,
supported_max_rx_octets=0, supported_max_rx_time=0):
self.status = status
self.supported_max_tx_octets = supported_max_tx_octets
self.supported_max_tx_time = supported_max_tx_time
self.supported_max_rx_octets = supported_max_rx_octets
self.supported_max_rx_time = supported_max_rx_time
@dataclass
class LE_Read_Buffer_Size:
status: int
le_acl_data_packet_length: int
total_num_le_acl_data_packets: int
iso_data_packet_len: int
total_num_iso_data_packets: int
def __init__(self):
self.set()
def set(self, status=0, le_acl_data_packet_length=0,
total_num_le_acl_data_packets=0, iso_data_packet_len=0,
total_num_iso_data_packets=0):
self.status = status
self.le_acl_data_packet_length = le_acl_data_packet_length
self.total_num_le_acl_data_packets = total_num_le_acl_data_packets
self.iso_data_packet_len = iso_data_packet_len
self.total_num_iso_data_packets = total_num_iso_data_packets
@dataclass
class LE_Read_PHY:
status: int
connection_handle: int
tx_phy: int
rx_phy: int
def __init__(self):
self.set()
def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0):
self.status = status
self.connection_handle = connection_handle
self.tx_phy = tx_phy
self.rx_phy = rx_phy
############
# EVENTS
############
@dataclass
class HCI_Ev_Disconn_Complete:
status: int
connection_handle: int
reason: int
def __init__(self):
self.set()
def set(self, status=0, connection_handle=0, reason=0):
self.status = status
self.connection_handle = connection_handle
self.reason = reason
@dataclass
class HCI_Ev_Cmd_Complete:
num_hci_command_packets: int
opcode: int
return_parameters: int
def __init__(self):
self.set()
def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''):
self.num_hci_command_packets = num_hci_cmd_packets
self.opcode = opcode
self.return_parameters = return_parameters
@dataclass
class HCI_Ev_Cmd_Status:
status: int
num_hci_command_packets: int
opcode: int
def __init__(self):
self.set()
def set(self, status = 0, num_hci_cmd_packets=0, opcode=0):
self.status = status
self.num_hci_command_packets = num_hci_cmd_packets
self.opcode = opcode
@dataclass
class HCI_Ev_LE_Meta:
subevent_code: int
def __init__(self):
self.set()
def set(self, subevent_code=0):
self.subevent_code = subevent_code
@dataclass
class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta):
status: int
connection_handle: int
role: int
peer_address_type: int
peer_address: str
local_resolvable_private_address: int
peer_resolvable_private_address: int
connection_interval: int
peripheral_latency: int
supervision_timeout: int
central_clock_accuracy: int
def __init__(self):
self.set()
def set(self, subevent_code=0, status=0, connection_handle=0, role=0,
peer_address_type=0, peer_address='00:00:00:00:00:00',
local_resolvable_private_address='00:00:00:00:00:00',
peer_resolvable_private_address='00:00:00:00:00:00',
connection_interval=0, peripheral_latency=0, supervision_timeout=0,
central_clock_accuracy=0):
super().set(subevent_code)
self.status = status
self.connection_handle = connection_handle
self.role = role
self.peer_address_type = peer_address_type
self.peer_address = peer_address
self.local_resolvable_private_address = local_resolvable_private_address
self.peer_resolvable_private_address = peer_resolvable_private_address
self.connection_interval = connection_interval
self.peripheral_latency = peripheral_latency
self.supervision_timeout = supervision_timeout
self.central_clock_accuracy = central_clock_accuracy
@dataclass
class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta):
conn_handle: int
max_tx_octets: int
max_tx_time: int
max_rx_octets: int
max_rx_time: int
triggered: int
def __init__(self):
self.set()
def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0,
max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0):
super().set(subevent_code)
self.conn_handle = conn_handle
self.max_tx_octets = max_tx_octets
self.max_tx_time = max_tx_time
self.max_rx_octets = max_rx_octets
self.max_rx_time = max_rx_time
self.triggered = triggered
@dataclass
class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta):
status: int
connection_handle: int
tx_phy: int
rx_phy: int
def __init__(self):
self.set()
def set(self, subevent_code=0, status=0, connection_handle=0,
tx_phy=0, rx_phy=0):
super().set(subevent_code)
self.status = status
self.connection_handle = connection_handle
self.tx_phy = tx_phy
self.rx_phy = rx_phy
@dataclass
class HCI_Number_Of_Completed_Packets:
num_handles: int
connection_handle: int
num_completed_packets: int
def __init__(self):
self.set()
def set(self, num_handles=0, connection_handle=0, num_completed_packets=0):
self.num_handles = num_handles
self.connection_handle = connection_handle
self.num_completed_packets = num_completed_packets
class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta):
connection_handle: int
algorithm: int
def __init__(self):
self.set()
def set(self, subevent_code=0, connection_handle=0, algorithm=0):
super().set(subevent_code)
self.connection_handle = connection_handle
self.algorithm = algorithm
############
# PARAMETERS
############
@dataclass
class HCI_Advertising:
advertising_interval_min: int
advertising_interval_max: int
advertising_type: int
own_address_type: int
peer_address_type: int
peer_address: str
advertising_channel_map: int
advertising_filter_policy: int
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, advertising_interval_min=0, advertising_interval_max=0, \
advertising_type=0, own_address_type=0, peer_address_type=0, \
peer_address='00:00:00:00:00:00', advertising_channel_map=0, \
advertising_filter_policy=0):
self.advertising_interval_min = advertising_interval_min
self.advertising_interval_max = advertising_interval_max
self.advertising_type = advertising_type
self.own_address_type = own_address_type
self.peer_address_type = peer_address_type
self.peer_address = peer_address
self.advertising_channel_map = advertising_channel_map
self.advertising_filter_policy = advertising_filter_policy
self.ba_full_message = bytearray(struct.pack('<HHBBBBB',
advertising_interval_min, advertising_interval_max,
advertising_type, own_address_type, peer_address_type,
advertising_channel_map, advertising_filter_policy))
peer_addr_ba = cmd_addr_to_ba(peer_address)
self.ba_full_message[7:7] = peer_addr_ba
@dataclass
class HCI_Scan:
le_scan_type: int
le_scan_interval: int
le_scan_window: int
own_address_type: int
scanning_filter_policy: int
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0,
own_address_type=0, scanning_filter_policy=0):
self.le_scan_type = le_scan_type
self.le_scan_interval = le_scan_interval
self.le_scan_window = le_scan_window
self.own_address_type = own_address_type
self.scanning_filter_policy = scanning_filter_policy
self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type,
le_scan_interval, le_scan_window, own_address_type,
scanning_filter_policy))
@dataclass
class HCI_Connect:
le_scan_interval: int
le_scan_window: int
initiator_filter_policy: int
peer_address_type: int
peer_address: str
own_address_type: int
connection_interval_min: int
connection_interval_max: int
max_latency: int
supervision_timeout: int
min_ce_length: int
max_ce_length: int
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, le_scan_interval=0, le_scan_window=0, \
initiator_filter_policy=0, peer_address_type=0, \
peer_address='00:00:00:00:00:00', own_address_type=0, \
connection_interval_min=0, connection_interval_max=0, \
max_latency=0, supervision_timeout=0, min_ce_length=0, \
max_ce_length=0):
self.le_scan_interval = le_scan_interval
self.le_scan_window = le_scan_window
self.initiator_filter_policy = initiator_filter_policy
self.peer_address_type = peer_address_type
self.peer_address = peer_address
self.own_address_type = own_address_type
self.connection_interval_min = connection_interval_min
self.connection_interval_max = connection_interval_max
self.max_latency = max_latency
self.supervision_timeout = supervision_timeout
self.min_ce_length = min_ce_length
self.max_ce_length = max_ce_length
self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH',
le_scan_interval, le_scan_window, initiator_filter_policy,
peer_address_type, own_address_type, connection_interval_min,
connection_interval_max, max_latency,supervision_timeout,
min_ce_length, max_ce_length))
peer_addr_ba = cmd_addr_to_ba(peer_address)
self.ba_full_message[6:6] = peer_addr_ba
############
# RX / TX
############
@dataclass
class HCI_Receive:
packet_type: int
def __init__(self):
self.set()
def set(self,packet_type=0):
self.packet_type = packet_type
@dataclass
class HCI_Recv_Event_Packet(HCI_Receive):
ev_code: int
packet_len: int
recv_data: bytearray
current_event: None
def __init__(self):
self.set()
def set(self,packet_type=0, ev_code=0, packet_len=0,
recv_data=bytearray(256)):
super().set(packet_type)
self.ev_code = ev_code
self.packet_len = packet_len
self.recv_data = recv_data
self.recv_data = recv_data[:packet_len]
@dataclass
class HCI_Recv_ACL_Data_Packet(HCI_Receive):
connection_handle: int
pb_flag: int
bc_flag: int
data_total_len: int
data: bytearray
def __init__(self):
self.set()
def set(self, packet_type=0, connection_handle=0,
pb_flag=0, bc_flag=0, total_data_len=0, data=b''):
super().set(packet_type)
self.connection_handle = connection_handle
self.pb_flag = pb_flag
self.bc_flag = bc_flag
self.data_total_len = total_data_len
self.data = data
@dataclass
class HCI_Recv_L2CAP_Data:
pdu_length: int
channel_id: int
data: bytearray
def __init__(self):
self.set()
def set(self, pdu_length=0, channel_id=0, data=b''):
self.pdu_length = pdu_length
self.channel_id = channel_id
self.data = data
@dataclass
class HCI_Cmd_Send:
packet_type: int
ogf: int
ocf: int
packet_len: int
data: bytearray
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, ogf=0, ocf=0, data=b''):
self.packet_type = HCI_COMMAND_PACKET
self.ogf = ogf
self.ocf = ocf
self.opcode = get_opcode(ogf, ocf)
self.packet_len = len(data)
self.data = data
self.ba_full_message = bytearray(struct.pack('<BHB',
self.packet_type, self.opcode, self.packet_len))
self.ba_full_message.extend(self.data)
@dataclass
class HCI_ACL_Data_Send:
packet_type: int
connection_handle: int
pb_flag: int
bc_flag: int
data_total_length: int
data: bytearray
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''):
self.packet_type = HCI_ACL_DATA_PACKET
self.connection_handle = connection_handle
self.pb_flag = pb_flag
self.bc_flag = bc_flag
self.data_total_length = len(data)
self.data = data
self.ba_full_message = bytearray(struct.pack('<BHH',
self.packet_type,
((self.connection_handle & 0x0eff) |
(self.pb_flag << 12) |
(self.bc_flag << 14)),
self.data_total_length))
self.ba_full_message.extend(self.data)
@dataclass
class L2CAP_Data_Send:
pdu_length: int
channel_id: int
data: bytearray
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, pdu_length=0, channel_id=0, data=b''):
if not pdu_length:
self.pdu_length = len(data)
else:
self.pdu_length = pdu_length
self.channel_id = channel_id
self.data = data
fmt_conf = "<HH"
self.ba_full_message = bytearray(struct.pack(fmt_conf,
self.pdu_length, self.channel_id))
self.ba_full_message.extend(data)
| [((121, 13, 121, 32), 'random.randint', 'random.randint', ({(121, 28, 121, 29): '(0)', (121, 30, 121, 31): '(1)'}, {}), '(0, 1)', False, 'import random\n'), ((394, 41, 397, 63), 'struct.pack', 'struct.pack', ({(394, 53, 394, 63): '"""<HHBBBBB"""', (395, 12, 395, 36): 'advertising_interval_min', (395, 38, 395, 62): 'advertising_interval_max', (396, 12, 396, 28): 'advertising_type', (396, 30, 396, 46): 'own_address_type', (396, 48, 396, 65): 'peer_address_type', (397, 12, 397, 35): 'advertising_channel_map', (397, 37, 397, 62): 'advertising_filter_policy'}, {}), "('<HHBBBBB', advertising_interval_min, advertising_interval_max,\n advertising_type, own_address_type, peer_address_type,\n advertising_channel_map, advertising_filter_policy)", False, 'import struct\n'), ((420, 41, 422, 35), 'struct.pack', 'struct.pack', ({(420, 53, 420, 61): '"""<BHHBB"""', (420, 62, 420, 74): 'le_scan_type', (421, 12, 421, 28): 'le_scan_interval', (421, 30, 421, 44): 'le_scan_window', (421, 46, 421, 62): 'own_address_type', (422, 12, 422, 34): 'scanning_filter_policy'}, {}), "('<BHHBB', le_scan_type, le_scan_interval, le_scan_window,\n own_address_type, scanning_filter_policy)", False, 'import struct\n'), ((461, 41, 465, 41), 'struct.pack', 'struct.pack', ({(461, 53, 461, 67): '"""<HHBBBHHHHHH"""', (462, 12, 462, 28): 'le_scan_interval', (462, 30, 462, 44): 'le_scan_window', (462, 46, 462, 69): 'initiator_filter_policy', (463, 12, 463, 29): 'peer_address_type', (463, 31, 463, 47): 'own_address_type', (463, 49, 463, 72): 'connection_interval_min', (464, 12, 464, 35): 'connection_interval_max', (464, 37, 464, 48): 'max_latency', (464, 49, 464, 68): 'supervision_timeout', (465, 12, 465, 25): 'min_ce_length', (465, 27, 465, 40): 'max_ce_length'}, {}), "('<HHBBBHHHHHH', le_scan_interval, le_scan_window,\n initiator_filter_policy, peer_address_type, own_address_type,\n connection_interval_min, connection_interval_max, max_latency,\n supervision_timeout, min_ce_length, max_ce_length)", False, 'import struct\n'), ((553, 41, 554, 75), 'struct.pack', 'struct.pack', ({(553, 53, 553, 59): '"""<BHB"""', (554, 28, 554, 44): 'self.packet_type', (554, 46, 554, 57): 'self.opcode', (554, 59, 554, 74): 'self.packet_len'}, {}), "('<BHB', self.packet_type, self.opcode, self.packet_len)", False, 'import struct\n'), ((577, 41, 582, 35), 'struct.pack', 'struct.pack', ({(577, 53, 577, 59): '"""<BHH"""', (578, 12, 578, 28): 'self.packet_type', (579, 13, 581, 36): 'self.connection_handle & 3839 | self.pb_flag << 12 | self.bc_flag << 14', (582, 12, 582, 34): 'self.data_total_length'}, {}), "('<BHH', self.packet_type, self.connection_handle & 3839 | self.\n pb_flag << 12 | self.bc_flag << 14, self.data_total_length)", False, 'import struct\n'), ((603, 41, 604, 73), 'struct.pack', 'struct.pack', ({(603, 53, 603, 61): 'fmt_conf', (604, 40, 604, 55): 'self.pdu_length', (604, 57, 604, 72): 'self.channel_id'}, {}), '(fmt_conf, self.pdu_length, self.channel_id)', False, 'import struct\n')] |
populationgenomics/analysis-runner | examples/dataproc/query.py | f42bedb1dc430a813350fb4b5514bcc7b845f0fc | """Simple Hail query example."""
import click
import hail as hl
from bokeh.io.export import get_screenshot_as_png
from analysis_runner import output_path
GNOMAD_HGDP_1KG_MT = (
'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/'
'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt'
)
@click.command()
@click.option('--rerun', help='Whether to overwrite cached files', default=False)
def query(rerun):
"""Query script entry point."""
hl.init(default_reference='GRCh38')
sample_qc_path = output_path('sample_qc.mt')
if rerun or not hl.hadoop_exists(sample_qc_path):
mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT)
mt = mt.head(100, n_cols=100)
mt_qc = hl.sample_qc(mt)
mt_qc.write(sample_qc_path)
mt_qc = hl.read_matrix_table(sample_qc_path)
plot_filename = output_path('call_rate_plot.png', 'web')
if rerun or not hl.hadoop_exists(plot_filename):
call_rate_plot = hl.plot.histogram(
mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate'
)
with hl.hadoop_open(plot_filename, 'wb') as f:
get_screenshot_as_png(call_rate_plot).save(f, format='PNG')
if __name__ == '__main__':
query() # pylint: disable=no-value-for-parameter
| [((15, 1, 15, 16), 'click.command', 'click.command', ({}, {}), '()', False, 'import click\n'), ((16, 1, 16, 81), 'click.option', 'click.option', (), '', False, 'import click\n'), ((20, 4, 20, 39), 'hail.init', 'hl.init', (), '', True, 'import hail as hl\n'), ((22, 21, 22, 48), 'analysis_runner.output_path', 'output_path', ({(22, 33, 22, 47): '"""sample_qc.mt"""'}, {}), "('sample_qc.mt')", False, 'from analysis_runner import output_path\n'), ((28, 12, 28, 48), 'hail.read_matrix_table', 'hl.read_matrix_table', ({(28, 33, 28, 47): 'sample_qc_path'}, {}), '(sample_qc_path)', True, 'import hail as hl\n'), ((30, 20, 30, 60), 'analysis_runner.output_path', 'output_path', ({(30, 32, 30, 52): '"""call_rate_plot.png"""', (30, 54, 30, 59): '"""web"""'}, {}), "('call_rate_plot.png', 'web')", False, 'from analysis_runner import output_path\n'), ((24, 13, 24, 53), 'hail.read_matrix_table', 'hl.read_matrix_table', ({(24, 34, 24, 52): 'GNOMAD_HGDP_1KG_MT'}, {}), '(GNOMAD_HGDP_1KG_MT)', True, 'import hail as hl\n'), ((26, 16, 26, 32), 'hail.sample_qc', 'hl.sample_qc', ({(26, 29, 26, 31): 'mt'}, {}), '(mt)', True, 'import hail as hl\n'), ((32, 25, 34, 9), 'hail.plot.histogram', 'hl.plot.histogram', (), '', True, 'import hail as hl\n'), ((23, 20, 23, 52), 'hail.hadoop_exists', 'hl.hadoop_exists', ({(23, 37, 23, 51): 'sample_qc_path'}, {}), '(sample_qc_path)', True, 'import hail as hl\n'), ((31, 20, 31, 51), 'hail.hadoop_exists', 'hl.hadoop_exists', ({(31, 37, 31, 50): 'plot_filename'}, {}), '(plot_filename)', True, 'import hail as hl\n'), ((35, 13, 35, 48), 'hail.hadoop_open', 'hl.hadoop_open', ({(35, 28, 35, 41): 'plot_filename', (35, 43, 35, 47): '"""wb"""'}, {}), "(plot_filename, 'wb')", True, 'import hail as hl\n'), ((36, 12, 36, 49), 'bokeh.io.export.get_screenshot_as_png', 'get_screenshot_as_png', ({(36, 34, 36, 48): 'call_rate_plot'}, {}), '(call_rate_plot)', False, 'from bokeh.io.export import get_screenshot_as_png\n')] |
darkarnium/ptpip | ptpip/ptpip.py | c54eed4d7509ecfc6973a00496a9e80fb7473fa2 | import uuid
import time
import socket
import struct
class PtpIpConnection(object):
"""docstring for PtpIP"""
def __init__(self):
super(PtpIpConnection, self).__init__()
self.session = None
self.session_events = None
self.session_id = None
self.cmd_queue = []
self.event_queue = []
self.object_queue = []
def open(self, host='192.168.1.1', port=15740):
# Open both session, first one for for commands, second for events
self.session = self.connect(host=host, port=port)
self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session)
self.session_events = self.connect(host=host, port=port)
self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events)
# 0x1002 OpenSession
ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0])
self.send_recieve_ptpip_packet(ptip_cmd, self.session)
def communication_thread(self):
while True:
if len(self.cmd_queue) == 0:
# do a ping receive a pong (same as ping) as reply to keep the connection alive
# couldnt get any reply onto a propper PtpIpPing packet so i am querying the status
# of the device
ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8),
self.session)
if isinstance(ptpip_packet_reply, PtpIpCmdResponse):
time.sleep(1)
continue
else:
# get the next command from command the queue
ptip_cmd = self.cmd_queue.pop()
ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session)
if (ptpip_packet_reply.ptp_response_code == 0x2001 and \
ptpip_packet_reply.ptp_response_code == 0x2019):
print("Cmd send successfully")
else:
print(f"cmd reply is: {ptpip_packet_reply.ptp_response_code}")
# wait 1 second before new packets are processed/send to the camera
time.sleep(1)
pass
def send_ptpip_cmd(self, ptpip_packet):
self.cmd_queue.append(ptpip_packet)
def connect(self, host='192.168.1.1', port=15740):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
s.connect((host, port))
except socket.error as message:
if s:
s.close()
print(f"Could not open socket: {message}")
return s
def send_recieve_ptpip_packet(self, ptpip_packet, session):
if isinstance(ptpip_packet, PtpIpInitCmdReq):
self.send_data(ptpip_packet.data(), session)
# set the session id of the object if the reply is of type PtpIpInitCmdAck
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if isinstance(ptpip_packet_reply, PtpIpInitCmdAck):
self.session_id = ptpip_packet_reply.session_id
elif isinstance(ptpip_packet, PtpIpEventReq):
self.send_ptpip_event_req(ptpip_packet, session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7:
self.send_data(ptpip_packet.data(), session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if isinstance(ptpip_packet_reply, PtpIpStartDataPacket):
data_length = struct.unpack('I', ptpip_packet_reply.length)[0]
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
data = ptpip_packet_reply.data
while isinstance(ptpip_packet_reply, PtpIpDataPacket):
data = data + ptpip_packet_reply.data
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if data_length == len(data):
events = PtpIpEventFactory(data).get_events()
for event in events:
self.event_queue.append(event)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009:
self.send_data(ptpip_packet.data(), session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if isinstance(ptpip_packet_reply, PtpIpStartDataPacket):
data_length = struct.unpack('I', ptpip_packet_reply.length)[0]
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
data = ptpip_packet_reply.data
while isinstance(ptpip_packet_reply, PtpIpDataPacket):
data = data + ptpip_packet_reply.data
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if data_length == len(data):
self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data))
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
else:
self.send_data(ptpip_packet.data(), session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
return ptpip_packet_reply
def send_ptpip_event_req(self, ptpip_packet, session):
# add the session id of the object itself if it is not specified in the package
if ptpip_packet.session_id is None:
ptpip_packet.session_id = self.session_id
self.send_data(ptpip_packet.data(), session)
def send_data(self, data, session):
session.send(struct.pack('I', len(data) + 4) + data)
def recieve_data(self, session):
data = session.recv(4)
(data_length,) = struct.unpack('I', data)
print(f"Packet length: {data_length}")
while (data_length) > len(data):
data += session.recv(data_length - len(data))
return data[4:]
class PtpIpPacket(object):
"""docstring for PtpIpCmd"""
def __init__(self):
super(PtpIpPacket, self).__init__()
def factory(self, data=None):
if data is None:
self.cmdtype = None
else:
print(f"Cmd Type: {struct.unpack('I', data[0:4])[0]}")
self.cmdtype = struct.unpack('I', data[0:4])[0]
if self.cmdtype == 1:
return PtpIpInitCmdReq(data[4:])
elif self.cmdtype == 2:
return PtpIpInitCmdAck(data[4:])
elif self.cmdtype == 3:
return PtpIpEventReq(data[4:])
elif self.cmdtype == 4:
return PtpIpEventAck(data[4:])
elif self.cmdtype == 5:
return PtpIpInitFail(data[4:])
elif self.cmdtype == 6:
return PtpIpCmdRequest(data[4:])
elif self.cmdtype == 7:
return PtpIpCmdResponse(data[4:])
elif self.cmdtype == 9:
return PtpIpStartDataPacket(data[4:])
elif self.cmdtype == 10:
return PtpIpDataPacket(data[4:])
elif self.cmdtype == 12:
return PtpIpEndDataPacket(data[4:])
elif self.cmdtype == 13:
return PtpIpPing(data[4:])
def data(self):
pass
class PtpIpInitCmdReq(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpInitCmdReq, self).__init__()
self.cmdtype = struct.pack('I', 0x01)
self.version = struct.pack('>I', 0x0100)
if data is None:
guid = uuid.uuid4()
self.guid = guid.bytes
self.hostname = socket.gethostname() + '\x00'
self.hostname = self.hostname.encode('utf-16-le')
else:
self.guid = data[0:16]
self.hostname = data[16:0]
def data(self):
return self.cmdtype + self.guid + self.hostname + self.version
class PtpIpInitCmdAck(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpInitCmdAck, self).__init__()
self.cmdtype = struct.pack('I', 0x02)
if data is not None:
self.session_id = data[0:4]
self.guid = data[4:20]
self.hostname = data[20:]
class PtpIpEventReq(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None, session_id=None):
super(PtpIpEventReq, self).__init__()
self.cmdtype = struct.pack('I', 0x03)
self.session_id = None
if data is not None:
self.session_id = data[0:4]
elif session_id is not None:
self.session_id = session_id
def data(self):
if self.session_id:
return self.cmdtype + self.session_id
return self.cmdtype
class PtpIpEventAck(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpEventAck, self).__init__()
self.cmdtype = struct.pack('I', 0x04)
class PtpIpInitFail(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpInitFail, self).__init__()
self.cmdtype = struct.pack('I', 0x05)
class PtpIpCmdRequest(PtpIpPacket):
"""
Operation Code Description
0x1001 GetDeviceInfo
0x1002 OpenSession
0x1003 CloseSession
0x1004 GetStorageIDs
0x1005 GetStorageInfo
0x1006 GetNumObjects
0x1007 GetObjectHandles
0x1008 GetObjectInfo
0x1009 GetObject
0x100A GetThumb
0x100B DeleteObject
0x100C SendObjectInfo
0x100D SendObject
0x100E InitiateCapture
0x100F FormatStore
0x1014 GetDevicePropDesc
0x1015 GetDevicePropValue
0x1016 SetDevicePropValue
0x101B GetPartialObject
0x90C0 InitiateCaptureRecInSdram
0x90C1 AfDrive
0x90C2 ChangeCameraMode
0x90C3 DeleteImagesInSdram
0x90C4 GetLargeThumb
0x90C7 GetEvent
0x90C8 DeviceReady
0x90C9 SetPreWbData
0x90CA GetVendorPropCodes
0x90CB AfAndCaptureRecInSdram
0x90CC GetPicCtrlData
0x90CD SetPicCtrlData
0x90CE DeleteCustomPicCtrl
0x90CF GetPicCtrlCapability
0x9201 StartLiveView
0x9202 EndLiveView
0x9203 GetLiveViewImage
0x9204 MfDrive
0x9205 ChangeAfArea
0x9206 AfDriveCancel
0x9207 InitiateCaptureRecInMedia
0x9209 GetVendorStorageIDs
0x920A StartMovieRecInCard
0x920B EndMovieRec
0x920C TerminateCapture
0x9400 GetPartialObjectHighSpeed
0x9407 SetTransferListLock
0x9408 GetTransferList
0x9409 NotifyFileAcquisitionStart
0x940A NotifyFileAcquisitionEnd
0x940B GetSpecificSizeObject
0x9801 GetObjectPropsSupported
0x9802 GetObjectPropDesc
0x9803 GetObjectPropValue
0x9805 GetObjectPropList
"""
def __init__(self, data=None, cmd=None, param1=None, param2=None, param3=None, param4=None,
param5=None):
super(PtpIpCmdRequest, self).__init__()
self.cmdtype = struct.pack('I', 0x06)
self.unkown = struct.pack('I', 0x01)
self.ptp_cmd = cmd
self.param1 = param1
self.param2 = param2
self.param3 = param3
self.param4 = param4
self.param5 = param5
# Todo: Transaction ID generieren
self.transaction_id = struct.pack('I', 0x06)
self.args = ''
if self.param1 is not None:
self.args = self.args + struct.pack('L', self.param1)
if self.param2 is not None:
self.args = self.args + struct.pack('L', self.param2)
if self.param3 is not None:
self.args = self.args + struct.pack('L', self.param3)
if self.param4 is not None:
self.args = self.args + struct.pack('L', self.param4)
if self.param5 is not None:
self.args = self.args + struct.pack('L', self.param5)
def data(self):
return self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd) + \
self.transaction_id + self.args
class PtpIpCmdResponse(PtpIpPacket):
"""
ResponseCode Description
0x2000 Undefined
0x2001 OK
0x2002 General Error
0x2003 Session Not Open
0x2004 Invalid TransactionID
0x2005 Operation Not Supported
0x2006 Parameter Not Supported
0x2007 Incomplete Transfer
0x2008 Invalid StorageID
0x2009 Invalid ObjectHandle
0x200A DeviceProp Not Supported
0x200B Invalid ObjectFormatCode
0x200C Store Full
0x200D Object WriteProtected
0x200E Store Read-Only
0x200F Access Denied
0x2010 No Thumbnail Present
0x2011 SelfTest Failed
0x2012 Partial Deletion
0x2013 Store Not Available
0x2014 Specification By Format Unsupported
0x2015 No Valid ObjectInfo
0x2016 Invalid Code Format
0x2017 Unknown Vendor Code
0x2018 Capture Already Terminated
0x2019 Device Busy
0x201A Invalid ParentObject
0x201B Invalid DeviceProp Format
0x201C Invalid DeviceProp Value
0x201D Invalid Parameter
0x201E Session Already Open
0x201F Transaction Cancelled
0x2020 Specification of Destination Unsupported
"""
def __init__(self, data=None):
super(PtpIpCmdResponse, self).__init__()
self.cmdtype = struct.pack('I', 0x07)
if data is not None:
self.ptp_response_code = struct.unpack('H', data[0:2])[0]
self.transaction_id = data[2:6]
self.args = data[6:]
class PtpIpStartDataPacket(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x09)
super(PtpIpStartDataPacket, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
self.length = data[4:8]
class PtpIpDataPacket(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x10)
super(PtpIpDataPacket, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
self.data = data[4:]
class PtpIpCancelTransaction(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x11)
super(PtpIpCancelTransaction, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
class PtpIpEndDataPacket(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x12)
super(PtpIpEndDataPacket, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
print(f"transaction_id: {struct.unpack('I', self.transaction_id)[0]}")
self.data = data[4:]
class PtpIpPing(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x13)
super(PtpIpPing, self).__init__()
if data is not None:
self.data = ''
def data(self):
return self.cmdtype
class PtpIpEvent(object):
"""
EventCode Description
0x4001 CancelTransaction
0x4002 ObjectAdded
0x4003 ObjectRemoved
0x4004 StoreAdded
0x4005 StoreRemoved
0x4006 DevicePropChanged
0x4007 ObjectInfoChanged
0x4008 DeviceInfoChanged
0x4009 RequestObjectTransfer
0x400A StoreFull
0x400C StorageInfoChanged
0x400D CaptureComplete
0xC101 ObjectAddedInSdram
0xC102 CaptureCompleteRecInSdram
0xC105 RecordingInterrupted
"""
def __init__(self, event_code, event_parameter):
super(PtpIpEvent, self).__init__()
self.event_code = int(event_code)
self.event_parameter = int(event_parameter)
class PtpIpEventFactory(object):
"""
This is a factory to produce an array of PtpIpEvent objects if it got passd a data reply
from a GetEvent request 0x90C7
"""
def __init__(self, data):
super(PtpIpEventFactory, self).__init__()
# create an empty array for the PtpIpEvent object which will be replied
self.events = []
# get the amount of events passed from the data passed to the factory
amount_of_events = struct.unpack('H', data[0:2])[0]
# set an counter and an offset of 2 as the first two bytes are already processed
counter = 1
offset = 2
while counter <= amount_of_events:
# get the event_code which consists of two bytes
event_code = str(struct.unpack('H', data[offset:offset+2])[0])
# get the event_parameter which consists of 4 bytes
event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0])
self.events.append(PtpIpEvent(event_code, event_parameter))
# increase the offset by 6 to get to the next event_code and event_parameter pair
offset = offset + 6
counter = counter + 1
def get_events(self):
return self.events
class PtpIpDataObject(object):
"""docstring for PtpIpDataObject"""
def __init__(self, object_handle, data):
super(PtpIpDataObject, self).__init__()
self.object_handle = object_handle
self.data = data
| [((141, 25, 141, 49), 'struct.unpack', 'struct.unpack', ({(141, 39, 141, 42): '"""I"""', (141, 44, 141, 48): 'data'}, {}), "('I', data)", False, 'import struct\n'), ((191, 23, 191, 45), 'struct.pack', 'struct.pack', ({(191, 35, 191, 38): '"""I"""', (191, 40, 191, 44): '1'}, {}), "('I', 1)", False, 'import struct\n'), ((192, 23, 192, 48), 'struct.pack', 'struct.pack', ({(192, 35, 192, 39): '""">I"""', (192, 41, 192, 47): '256'}, {}), "('>I', 256)", False, 'import struct\n'), ((210, 23, 210, 45), 'struct.pack', 'struct.pack', ({(210, 35, 210, 38): '"""I"""', (210, 40, 210, 44): '2'}, {}), "('I', 2)", False, 'import struct\n'), ((221, 23, 221, 45), 'struct.pack', 'struct.pack', ({(221, 35, 221, 38): '"""I"""', (221, 40, 221, 44): '3'}, {}), "('I', 3)", False, 'import struct\n'), ((238, 23, 238, 45), 'struct.pack', 'struct.pack', ({(238, 35, 238, 38): '"""I"""', (238, 40, 238, 44): '4'}, {}), "('I', 4)", False, 'import struct\n'), ((245, 23, 245, 45), 'struct.pack', 'struct.pack', ({(245, 35, 245, 38): '"""I"""', (245, 40, 245, 44): '5'}, {}), "('I', 5)", False, 'import struct\n'), ((309, 23, 309, 45), 'struct.pack', 'struct.pack', ({(309, 35, 309, 38): '"""I"""', (309, 40, 309, 44): '6'}, {}), "('I', 6)", False, 'import struct\n'), ((310, 22, 310, 44), 'struct.pack', 'struct.pack', ({(310, 34, 310, 37): '"""I"""', (310, 39, 310, 43): '1'}, {}), "('I', 1)", False, 'import struct\n'), ((318, 30, 318, 52), 'struct.pack', 'struct.pack', ({(318, 42, 318, 45): '"""I"""', (318, 47, 318, 51): '6'}, {}), "('I', 6)", False, 'import struct\n'), ((379, 23, 379, 45), 'struct.pack', 'struct.pack', ({(379, 35, 379, 38): '"""I"""', (379, 40, 379, 44): '7'}, {}), "('I', 7)", False, 'import struct\n'), ((389, 23, 389, 45), 'struct.pack', 'struct.pack', ({(389, 35, 389, 38): '"""I"""', (389, 40, 389, 44): '9'}, {}), "('I', 9)", False, 'import struct\n'), ((399, 23, 399, 45), 'struct.pack', 'struct.pack', ({(399, 35, 399, 38): '"""I"""', (399, 40, 399, 44): '16'}, {}), "('I', 16)", False, 'import struct\n'), ((409, 23, 409, 45), 'struct.pack', 'struct.pack', ({(409, 35, 409, 38): '"""I"""', (409, 40, 409, 44): '17'}, {}), "('I', 17)", False, 'import struct\n'), ((418, 23, 418, 45), 'struct.pack', 'struct.pack', ({(418, 35, 418, 38): '"""I"""', (418, 40, 418, 44): '18'}, {}), "('I', 18)", False, 'import struct\n'), ((429, 23, 429, 45), 'struct.pack', 'struct.pack', ({(429, 35, 429, 38): '"""I"""', (429, 40, 429, 44): '19'}, {}), "('I', 19)", False, 'import struct\n'), ((53, 12, 53, 25), 'time.sleep', 'time.sleep', ({(53, 23, 53, 24): '(1)'}, {}), '(1)', False, 'import time\n'), ((61, 16, 61, 65), 'socket.socket', 'socket.socket', ({(61, 30, 61, 44): 'socket.AF_INET', (61, 46, 61, 64): 'socket.SOCK_STREAM'}, {}), '(socket.AF_INET, socket.SOCK_STREAM)', False, 'import socket\n'), ((194, 19, 194, 31), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n'), ((475, 27, 475, 56), 'struct.unpack', 'struct.unpack', ({(475, 41, 475, 44): '"""H"""', (475, 46, 475, 55): 'data[0:2]'}, {}), "('H', data[0:2])", False, 'import struct\n'), ((158, 27, 158, 56), 'struct.unpack', 'struct.unpack', ({(158, 41, 158, 44): '"""I"""', (158, 46, 158, 55): 'data[0:4]'}, {}), "('I', data[0:4])", False, 'import struct\n'), ((196, 28, 196, 48), 'socket.gethostname', 'socket.gethostname', ({}, {}), '()', False, 'import socket\n'), ((321, 36, 321, 65), 'struct.pack', 'struct.pack', ({(321, 48, 321, 51): '"""L"""', (321, 53, 321, 64): 'self.param1'}, {}), "('L', self.param1)", False, 'import struct\n'), ((324, 36, 324, 65), 'struct.pack', 'struct.pack', ({(324, 48, 324, 51): '"""L"""', (324, 53, 324, 64): 'self.param2'}, {}), "('L', self.param2)", False, 'import struct\n'), ((327, 36, 327, 65), 'struct.pack', 'struct.pack', ({(327, 48, 327, 51): '"""L"""', (327, 53, 327, 64): 'self.param3'}, {}), "('L', self.param3)", False, 'import struct\n'), ((330, 36, 330, 65), 'struct.pack', 'struct.pack', ({(330, 48, 330, 51): '"""L"""', (330, 53, 330, 64): 'self.param4'}, {}), "('L', self.param4)", False, 'import struct\n'), ((333, 36, 333, 65), 'struct.pack', 'struct.pack', ({(333, 48, 333, 51): '"""L"""', (333, 53, 333, 64): 'self.param5'}, {}), "('L', self.param5)", False, 'import struct\n'), ((381, 37, 381, 66), 'struct.unpack', 'struct.unpack', ({(381, 51, 381, 54): '"""H"""', (381, 56, 381, 65): 'data[0:2]'}, {}), "('H', data[0:2])", False, 'import struct\n'), ((27, 54, 27, 89), 'struct.unpack', 'struct.unpack', ({(27, 68, 27, 71): '"""L"""', (27, 73, 27, 88): 'self.session_id'}, {}), "('L', self.session_id)", False, 'import struct\n'), ((40, 20, 40, 33), 'time.sleep', 'time.sleep', ({(40, 31, 40, 32): '(1)'}, {}), '(1)', False, 'import time\n'), ((336, 44, 336, 74), 'struct.pack', 'struct.pack', ({(336, 56, 336, 59): '"""H"""', (336, 61, 336, 73): 'self.ptp_cmd'}, {}), "('H', self.ptp_cmd)", False, 'import struct\n'), ((482, 29, 482, 70), 'struct.unpack', 'struct.unpack', ({(482, 43, 482, 46): '"""H"""', (482, 48, 482, 69): 'data[offset:offset + 2]'}, {}), "('H', data[offset:offset + 2])", False, 'import struct\n'), ((485, 34, 485, 77), 'struct.unpack', 'struct.unpack', ({(485, 48, 485, 51): '"""I"""', (485, 53, 485, 76): 'data[offset + 2:offset + 6]'}, {}), "('I', data[offset + 2:offset + 6])", False, 'import struct\n'), ((91, 30, 91, 75), 'struct.unpack', 'struct.unpack', ({(91, 44, 91, 47): '"""I"""', (91, 49, 91, 74): 'ptpip_packet_reply.length'}, {}), "('I', ptpip_packet_reply.length)", False, 'import struct\n'), ((157, 31, 157, 60), 'struct.unpack', 'struct.unpack', ({(157, 45, 157, 48): '"""I"""', (157, 50, 157, 59): 'data[0:4]'}, {}), "('I', data[0:4])", False, 'import struct\n'), ((422, 37, 422, 76), 'struct.unpack', 'struct.unpack', ({(422, 51, 422, 54): '"""I"""', (422, 56, 422, 75): 'self.transaction_id'}, {}), "('I', self.transaction_id)", False, 'import struct\n'), ((111, 30, 111, 75), 'struct.unpack', 'struct.unpack', ({(111, 44, 111, 47): '"""I"""', (111, 49, 111, 74): 'ptpip_packet_reply.length'}, {}), "('I', ptpip_packet_reply.length)", False, 'import struct\n')] |
jaideep-seth/PyOpenWorm | examples/morpho.py | c36baeda9590334ba810296934973da34f0eab78 | """
How to load morphologies of certain cells from the database.
"""
#this is an expected failure right now, as morphology is not implemented
from __future__ import absolute_import
from __future__ import print_function
import PyOpenWorm as P
from PyOpenWorm.context import Context
from PyOpenWorm.worm import Worm
from six import StringIO
#Connect to database.
with P.connect('default.conf') as conn:
ctx = Context(ident="http://openworm.org/data", conf=conn.conf).stored
#Create a new Cell object to work with.
aval = ctx(Worm)().get_neuron_network().aneuron('AVAL')
#Get the morphology associated with the Cell. Returns a neuroml.Morphology object.
morph = aval._morphology()
out = StringIO()
morph.export(out, 0) # we're printing it here, but we would normally do something else with the morphology object.
print(str(out.read()))
| [((14, 5, 14, 30), 'PyOpenWorm.connect', 'P.connect', ({(14, 15, 14, 29): '"""default.conf"""'}, {}), "('default.conf')", True, 'import PyOpenWorm as P\n'), ((22, 10, 22, 20), 'six.StringIO', 'StringIO', ({}, {}), '()', False, 'from six import StringIO\n'), ((15, 10, 15, 67), 'PyOpenWorm.context.Context', 'Context', (), '', False, 'from PyOpenWorm.context import Context\n')] |
kkrampa/commcare-hq | corehq/apps/app_manager/tests/test_form_workflow.py | d64d7cad98b240325ad669ccc7effb07721b4d44 | from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import SimpleTestCase
from corehq.apps.app_manager.const import (
AUTO_SELECT_RAW,
AUTO_SELECT_CASE,
WORKFLOW_FORM,
WORKFLOW_MODULE,
WORKFLOW_PREVIOUS,
WORKFLOW_ROOT,
WORKFLOW_PARENT_MODULE,
)
from corehq.apps.app_manager.models import FormDatum, FormLink
from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId
from corehq.apps.app_manager.suite_xml.xml_models import StackDatum
from corehq.apps.app_manager.tests.app_factory import AppFactory
from corehq.apps.app_manager.tests.util import TestXmlMixin
from corehq.apps.app_manager.xpath import session_var
class TestFormWorkflow(SimpleTestCase, TestXmlMixin):
file_path = ('data', 'form_workflow')
def test_basic(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
m1, m1f0 = factory.new_basic_module('m1', 'frog')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="(today() - dob) < 7", form_id=m1f0.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), "./entry[1]")
def test_with_case_management_both_update(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
factory.form_requires_case(m0f0)
m1, m1f0 = factory.new_basic_module('m1', 'frog')
factory.form_requires_case(m1f0)
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="(today() - dob) > 7", form_id=m1f0.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), "./entry[1]")
def test_with_case_management_create_update(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('m1', 'frog')
factory.form_requires_case(m1f0)
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath='true()', form_id=m1f0.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), "./entry[1]")
def test_with_case_management_multiple_links(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('m1', 'frog')
factory.form_requires_case(m1f0)
m1f1 = factory.new_form(m1)
factory.form_opens_case(m1f1)
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="a = 1", form_id=m1f0.unique_id),
FormLink(xpath="a = 2", form_id=m1f1.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), "./entry[1]")
def test_link_to_child_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id),
]
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id),
]
self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), "./entry")
def test_manual_form_link(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id_new_child_0")
]),
]
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id"),
FormDatum(name='case_id_load_visit_0', xpath="instance('commcaresession')/session/data/case_id_new_visit_0"),
]),
]
self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), "./entry")
def test_manual_form_link_with_fallback(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id_new_child_0")
]),
]
m1f0.post_form_workflow = WORKFLOW_FORM
condition_for_xpath = "instance('casedb')/casedb/case[@case_id = " \
"instance('commcaresession')/session/data/case_id]/prop = 'value'"
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id"),
FormDatum(name='case_id_load_visit_0',
xpath="instance('commcaresession')/session/data/case_id_new_visit_0"),
]),
FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id"),
FormDatum(name='case_id_load_visit_0',
xpath="instance('commcaresession')/session/data/case_id_new_visit_0"),
]),
]
m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS
self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'),
factory.app.create_suite(), "./entry")
m1f0.post_form_workflow_fallback = WORKFLOW_MODULE
self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'),
factory.app.create_suite(), "./entry")
m1f0.post_form_workflow_fallback = WORKFLOW_ROOT
self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'),
factory.app.create_suite(), "./entry")
def test_reference_to_missing_session_variable_in_stack(self):
# http://manage.dimagi.com/default.asp?236750
#
# Stack create blocks do not update the session after each datum
# so items put into the session in one step aren't available later steps
#
# <datum id="case_id_A" value="instance('commcaresession')/session/data/case_id_new_A"/>
# - <datum id="case_id_B" value="instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host"/>
# + <datum id="case_id_B" value="instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host"/>
#
# in the above example ``case_id_A`` is being added to the session and then
# later referenced. However since the session doesn't get updated
# the value isn't available in the session.
#
# To fix this we need to replace any references to previous variables with the full xpath which
# that session variable references.
#
# See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('person registration', 'person')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_advanced_module('episode registration', 'episode')
factory.form_requires_case(m1f0, case_type='person')
factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True)
m2, m2f0 = factory.new_advanced_module('tests', 'episode')
factory.form_requires_case(m2f0, 'episode')
factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0')
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id_load_episode_0', xpath="instance('commcaresession')/session/data/case_id_new_episode_0")
]),
]
self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), "./entry")
def test_return_to_parent_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE
expected = """
<partial>
<stack>
<create>
<command value="'m1'"/>
<datum id="case_id" value="instance('commcaresession')/session/data/case_id"/>
<datum id="case_id_new_visit_0" value="uuid()"/>
</create>
</stack>
</partial>
"""
self.assertXmlPartialEqual(expected, factory.app.create_suite(), "./entry[3]/stack")
def test_return_to_child_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m2f0.post_form_workflow = WORKFLOW_MODULE
expected = """
<partial>
<stack>
<create>
<command value="'m1'"/>
<datum id="case_id" value="instance('commcaresession')/session/data/case_id"/>
<datum id="case_id_new_visit_0" value="uuid()"/>
<command value="'m2'"/>
</create>
</stack>
</partial>
"""
self.assertXmlPartialEqual(expected, factory.app.create_suite(), "./entry[3]/stack")
def test_link_to_form_in_parent_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
# link to child -> edit child
m2f0.post_form_workflow = WORKFLOW_FORM
m2f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id),
]
self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), "./entry[3]")
def test_form_links_submodule(self):
# Test that when linking between two forms in a submodule we match up the
# session variables between the source and target form correctly
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m0f0)
factory.form_opens_case(m0f0, 'visit', is_subcase=True)
m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0)
factory.form_requires_case(m1f0, 'child')
factory.form_requires_case(m1f0, 'visit', parent_case_type='child')
m1f1 = factory.new_form(m1)
factory.form_requires_case(m1f1, 'child')
factory.form_requires_case(m1f1, 'visit', parent_case_type='child')
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m1f1.unique_id),
]
self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), "./entry")
def _build_workflow_app(self, mode):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', '')
factory.new_form(m0)
m1, m1f0 = factory.new_basic_module('m1', 'patient')
m1f1 = factory.new_form(m1)
factory.form_opens_case(m1f0)
factory.form_requires_case(m1f1)
m2, m2f0 = factory.new_basic_module('m2', 'patient')
m2f1 = factory.new_form(m2)
factory.form_requires_case(m2f0)
factory.form_requires_case(m2f1)
m3, m3f0 = factory.new_basic_module('m3', 'child')
m3f1 = factory.new_form(m3)
factory.form_requires_case(m3f0, parent_case_type='patient')
factory.form_requires_case(m3f1)
m4, m4f0 = factory.new_advanced_module('m4', 'patient')
factory.form_requires_case(m4f0, case_type='patient')
factory.form_requires_case(m4f0, case_type='patient')
m4f1 = factory.new_form(m4)
factory.form_requires_case(m4f1, case_type='patient')
factory.form_requires_case(m4f1, case_type='patient')
factory.form_requires_case(m4f1, case_type='patient')
m4f2 = factory.new_form(m4)
factory.form_requires_case(m4f2, case_type='patient')
factory.form_requires_case(m4f2, case_type='patient')
factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id')
m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1)
factory.form_requires_case(m5f0)
for module in factory.app.get_modules():
for form in module.get_forms():
form.post_form_workflow = mode
return factory.app
def test_form_workflow_previous(self):
app = self._build_workflow_app(WORKFLOW_PREVIOUS)
self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), "./entry")
def test_form_workflow_module(self):
app = self._build_workflow_app(WORKFLOW_MODULE)
self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), "./entry")
def test_form_workflow_module_in_root(self):
app = self._build_workflow_app(WORKFLOW_PREVIOUS)
for m in [1, 2]:
module = app.get_module(m)
module.put_in_root = True
self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), "./entry")
def test_form_workflow_root(self):
app = self._build_workflow_app(WORKFLOW_ROOT)
self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), "./entry")
class TestReplaceSessionRefs(SimpleTestCase):
def test_replace_session_references_in_stack(self):
children = [
CommandId('m0'),
StackDatum(id='a', value=session_var('new_a')),
StackDatum(id='b', value=session_var('new_b')),
StackDatum(id='c', value="instance('casedb')/case/[@case_id = {a}]/index/parent".format(a=session_var('a'))),
StackDatum(id='d', value="if({c}, {c}, {a}]".format(a=session_var('a'), c=session_var('c')))
]
clean = _replace_session_references_in_stack(children)
clean_raw = []
for child in clean:
if isinstance(child, CommandId):
clean_raw.append(child.id)
else:
clean_raw.append((child.id, child.value))
new_c = "instance('casedb')/case/[@case_id = {a}]/index/parent".format(a=session_var('new_a'))
self.assertEqual(clean_raw, [
'm0',
('a', session_var('new_a')),
('b', session_var('new_b')),
('c', new_c),
('d', "if({c}, {c}, {a}]".format(a=session_var('new_a'), c=new_c))
])
| [((26, 18, 26, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((37, 18, 37, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((51, 18, 51, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((65, 18, 65, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((83, 18, 83, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((108, 18, 108, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((138, 18, 138, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((204, 18, 204, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((226, 18, 226, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((255, 18, 255, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((285, 18, 285, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((306, 18, 306, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((327, 18, 327, 51), 'corehq.apps.app_manager.tests.app_factory.AppFactory', 'AppFactory', (), '', False, 'from corehq.apps.app_manager.tests.app_factory import AppFactory\n'), ((400, 16, 400, 62), 'corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack', '_replace_session_references_in_stack', ({(400, 53, 400, 61): 'children'}, {}), '(children)', False, 'from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId\n'), ((32, 12, 32, 76), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((45, 12, 45, 73), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((59, 12, 59, 60), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((76, 12, 76, 59), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((77, 12, 77, 59), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((97, 12, 97, 60), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((102, 12, 102, 60), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((298, 12, 298, 60), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((321, 12, 321, 60), 'corehq.apps.app_manager.models.FormLink', 'FormLink', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((393, 12, 393, 27), 'corehq.apps.app_manager.suite_xml.post_process.workflow.CommandId', 'CommandId', ({(393, 22, 393, 26): '"""m0"""'}, {}), "('m0')", False, 'from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId\n'), ((408, 81, 408, 101), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(408, 93, 408, 100): '"""new_a"""'}, {}), "('new_a')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((394, 37, 394, 57), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(394, 49, 394, 56): '"""new_a"""'}, {}), "('new_a')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((395, 37, 395, 57), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(395, 49, 395, 56): '"""new_b"""'}, {}), "('new_b')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((411, 18, 411, 38), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(411, 30, 411, 37): '"""new_a"""'}, {}), "('new_a')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((412, 18, 412, 38), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(412, 30, 412, 37): '"""new_b"""'}, {}), "('new_b')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((123, 16, 123, 111), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((130, 16, 130, 99), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((131, 16, 131, 124), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((153, 16, 153, 111), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((162, 16, 162, 99), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((163, 16, 164, 95), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((167, 16, 167, 99), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((168, 16, 169, 95), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((219, 16, 219, 128), 'corehq.apps.app_manager.models.FormDatum', 'FormDatum', (), '', False, 'from corehq.apps.app_manager.models import FormDatum, FormLink\n'), ((396, 102, 396, 118), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(396, 114, 396, 117): '"""a"""'}, {}), "('a')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((397, 66, 397, 82), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(397, 78, 397, 81): '"""a"""'}, {}), "('a')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((397, 86, 397, 102), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(397, 98, 397, 101): '"""c"""'}, {}), "('c')", False, 'from corehq.apps.app_manager.xpath import session_var\n'), ((414, 47, 414, 67), 'corehq.apps.app_manager.xpath.session_var', 'session_var', ({(414, 59, 414, 66): '"""new_a"""'}, {}), "('new_a')", False, 'from corehq.apps.app_manager.xpath import session_var\n')] |
sboshin/tensorflow | tensorflow/python/compiler/tensorrt/model_tests/model_handler.py | 77689016fb4c1373abeca36360f7b2dd9434c547 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Loads, converts, and runs sample models."""
import abc
import collections
import functools
import tempfile
import time
from typing import Callable, Iterable, List, Mapping, Optional, Sequence, Union
from absl import logging
import numpy as np
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import tensor_shape_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.client import session
from tensorflow.python.compiler.tensorrt import trt_convert as trt
from tensorflow.python.framework import convert_to_constants
from tensorflow.python.framework import dtypes as tf_dtypes
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops as framework_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.saved_model import load as saved_model_load
from tensorflow.python.saved_model import loader as saved_model_loader
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
# pylint: disable=bad-whitespace
### Helper Functions
def _get_concrete_tensor_shape(
tensor_shape: tensor_shape_pb2.TensorShapeProto,
batch_size: Optional[int] = None) -> Sequence[int]:
"""Gets a concrete tensor shape without dynamic dimensions."""
if tensor_shape.unknown_rank:
raise ValueError("Cannot generates random tensors for unknown rank!")
shape = [dim.size for dim in tensor_shape.dim]
if not shape:
raise ValueError("The tensor cannot have a rank of 0!")
if shape[0] < 0:
if batch_size is None or batch_size <= 0:
raise ValueError("Must provide a valid batch size "
"as the tensor has a dynamic batch size!")
shape[0] = batch_size
if any(filter(lambda x: x < 0, shape)):
raise ValueError("Cannot have dynamic dimensions except for batch size!")
return shape
def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo,
batch_size: Optional[int] = None) -> np.ndarray:
"""Generates a random tensor based on the data type and tensor shape."""
dtype = tf_dtypes.as_dtype(tensor_info.dtype)
shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size)
with session.Session():
return random_ops.random_uniform(
shape=shape, dtype=dtype, name=tensor_info.name.split(":")[0]).eval()
def _generate_random_tensor_v2(
tensor: framework_ops.Tensor,
batch_size: Optional[int] = None) -> framework_ops.Tensor:
"""Generates a random tensor based on the data type and tensor shape."""
shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size)
return random_ops.random_uniform(
shape=shape, dtype=tensor.dtype, name=tensor.name)
# Models are repeatedly loaded for different TensorRT conversion settings.
# Using cache can reduce I/O.
@functools.lru_cache()
def load_meta_graph(
saved_model_dir: str, saved_model_tags: str,
saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef:
"""Loads a `tf.MetaGraphDef` in TF1."""
with session.Session() as sess:
meta_graph = saved_model_loader.load(
sess=sess,
export_dir=saved_model_dir,
tags=saved_model_tags,
)
output_node_names = [
tensor.name.split(":")[0] for tensor in
meta_graph.signature_def[saved_model_signature_key].outputs.values()
]
graph_def = (
convert_to_constants.convert_variables_to_constants_from_session_graph(
sess, meta_graph.graph_def, output_node_names))
meta_graph.graph_def.CopyFrom(graph_def)
return meta_graph
@functools.lru_cache()
def load_graph_func(saved_model_dir: str, saved_model_tags: str,
saved_model_signature_key: str):
"""Loads a graph function in TF2."""
imported = saved_model_load.load(
export_dir=saved_model_dir, tags=saved_model_tags)
graph_func = imported.signatures[saved_model_signature_key]
return convert_to_constants.convert_variables_to_constants_v2(graph_func)
### Test Classes
class TestResult(
collections.namedtuple("TestResult",
["outputs", "latency", "trt_convert_params"])):
def __new__(cls,
outputs: Mapping[str, np.ndarray],
latency: List[float],
trt_convert_params: trt.TrtConversionParams = None):
return super(TestResult, cls).__new__(cls, outputs, latency,
trt_convert_params)
class ModelConfig(
collections.namedtuple("ModelConfig", [
"saved_model_dir", "saved_model_tags", "saved_model_signature_key",
"default_batch_size"
])):
"""Configurations for test models."""
def __new__(cls,
saved_model_dir: str,
saved_model_tags: Sequence[str] = (tag_constants.SERVING,),
saved_model_signature_key: str = (
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY),
default_batch_size: int = 1):
return super(ModelConfig,
cls).__new__(cls, saved_model_dir, saved_model_tags,
saved_model_signature_key, default_batch_size)
class TestResultCollection(
collections.namedtuple("TestResultCollection", ["results", "config"])):
def __new__(cls, config: ModelConfig,
results: Sequence[TestResult] = tuple()):
return super(TestResultCollection, cls).__new__(cls, config, results)
class _ModelHandlerBase(metaclass=abc.ABCMeta):
"""Base class for running a model."""
def __init__(self, model_config: ModelConfig):
self._model_config = model_config
def __str__(self) -> str:
return str(self._model_config)
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, str(self))
@property
def model_config(self) -> ModelConfig:
return self._model_config
@property
def input_tensort_names(self) -> Sequence[str]:
"""Names of input tensors."""
@property
def output_tensor_names(self) -> Sequence[str]:
"""Names of output tensors."""
@abc.abstractmethod
def generate_random_inputs(
self,
batch_size: Optional[int] = None
) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]:
"""Generates mapping from names to input tensors."""
@abc.abstractmethod
def run(self,
inputs=None,
warmup_iterations: int = 10,
benchmark_iterations: int = 100,
allow_to_use_gpu: bool = False) -> TestResult:
"""Runs the model with provided or randomly generated input tensors.
Args:
inputs: Mapping from names to input ndarrays in TF1, or a sequence of
tensors in TF2. If `None`, ramdomly generated inputs will be used
instead.
warmup_iterations: Number of inferences to warm up the runtime.
benchmark_iterations: Number of inferences to measure the latency.
allow_to_use_gpu: Whether it is allowed to use GPU or not.
Returns:
`TestResult` summarizing timing and numerics information.
"""
class ModelHandlerV1(_ModelHandlerBase):
"""Runs a model in TF1."""
@property
def meta_graph(self) -> meta_graph_pb2.MetaGraphDef:
return load_meta_graph(
saved_model_dir=self.model_config.saved_model_dir,
saved_model_tags=self.model_config.saved_model_tags,
saved_model_signature_key=self.model_config.saved_model_signature_key)
@property
def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:
return self.meta_graph.signature_def[
self.model_config.saved_model_signature_key].inputs
@property
def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:
return self.meta_graph.signature_def[
self.model_config.saved_model_signature_key].outputs
@property
def input_tensort_names(self) -> Sequence[str]:
return [info.name for info in self.input_tensor_info.values()]
@property
def output_tensor_names(self) -> Sequence[str]:
return [info.name for info in self.output_tensor_info.values()]
def generate_random_inputs(self,
batch_size: Optional[int] = None
) -> Mapping[str, np.ndarray]:
batch_size = batch_size or self.model_config.default_batch_size
return {
tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size)
for tensor_info in self.input_tensor_info.values()
}
def run(self,
inputs: Optional[Mapping[str, np.ndarray]] = None,
warmup_iterations=10,
benchmark_iterations=100,
allow_to_use_gpu=False) -> TestResult:
inputs = inputs or self.generate_random_inputs()
config_proto = None
if not allow_to_use_gpu:
config_proto = config_pb2.ConfigProto(device_count={"CPU": 1, "GPU": 0})
with session.Session(config=config_proto) as sess:
importer.import_graph_def(self.meta_graph.graph_def)
try:
for _ in range(warmup_iterations):
sess.run(fetches=self.output_tensor_names, feed_dict=inputs)
latency = []
for _ in range(benchmark_iterations):
before = time.time()
outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs)
latency.append(time.time() - before)
except Exception as exc:
raise RuntimeError("Failed to run model inference! "
"Model information: {}".format(str(self))) from exc
outputs = dict(zip(self.output_tensor_names, outputs))
return TestResult(latency=latency, outputs=outputs if inputs else None)
class ModelHandlerV2(_ModelHandlerBase):
"""Runs a model in TF2."""
@property
def graph_func(self):
graph_func = load_graph_func(
saved_model_dir=self.model_config.saved_model_dir,
saved_model_tags=self.model_config.saved_model_tags,
saved_model_signature_key=self.model_config.saved_model_signature_key)
return convert_to_constants.convert_variables_to_constants_v2(graph_func)
@property
def input_tensor_names(self):
return [tensor.name for tensor in self.graph_func.inputs]
@property
def output_tensor_names(self):
return [tensor.name for tensor in self.graph_func.outputs]
def generate_random_inputs(self,
batch_size: Optional[int] = None
) -> Sequence[framework_ops.Tensor]:
batch_size = batch_size or self.model_config.default_batch_size
return [
_generate_random_tensor_v2(tensor, batch_size)
for tensor in self.graph_func.inputs
]
def run(self,
inputs: Optional[Sequence[framework_ops.Tensor]] = None,
warmup_iterations=10,
benchmark_iterations=100,
allow_to_use_gpu=False) -> TestResult:
inputs = inputs or self.generate_random_inputs()
try:
device = "/device:gpu:0" if allow_to_use_gpu else "/device:cpu:0"
with framework_ops.device(device):
for _ in range(warmup_iterations):
self.graph_func(*inputs)
latency = []
for _ in range(benchmark_iterations):
before = time.time()
outputs = self.graph_func(*inputs)
latency.append(time.time() - before)
except Exception as exc:
raise RuntimeError("Failed to run model inference! "
"Model information: {}".format(str(self))) from exc
outputs = dict(zip(self.output_tensor_names, outputs))
return TestResult(latency=latency, outputs=outputs if inputs else None)
class _TrtModelHandlerBase(_ModelHandlerBase):
"""Base class for converting and running a model."""
def __init__(
self,
model_config: ModelConfig,
trt_convert_params: trt.TrtConversionParams,
):
super(_TrtModelHandlerBase, self).__init__(model_config)
self._trt_convert_params = trt_convert_params
self._converter = self._create_converter(trt_convert_params)
logging.info("Converting to TensorRT!")
self._check_conversion(self._converter.convert())
self._conversion_is_saved = False
@abc.abstractmethod
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
"""Creates a converter for the corresponding TF version."""
@abc.abstractmethod
def _check_conversion(self, conversion_output):
"""Checks if conversion output has any TensorRT engines."""
def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef):
if "TRTEngineOp" not in [node.op for node in graph_def.node]:
raise RuntimeError("Failed to convert to TensorRT! "
"Model Information: {}".format(str(self)))
def __str__(self) -> str:
base = super(_TrtModelHandlerBase, self).__str__()
return "{}, TrtConversionParams: {}".format(base,
str(self._trt_convert_params))
@property
def trt_convert_params(self) -> trt.TrtConversionParams:
return self._trt_convert_params
def save(self,
output_saved_model_dir: Optional[str] = None,
overwrite=True) -> None:
"""Saves a TensorRT converted model."""
if self._conversion_is_saved and not overwrite:
return
output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp()
logging.info("Saving TensorRT model to %s!", output_saved_model_dir)
self._converter.save(output_saved_model_dir)
self._model_config = self.model_config._replace(
saved_model_dir=output_saved_model_dir)
self._conversion_is_saved = True
class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1):
"""Converts a TF1 model with TensorRT and runs the converted model."""
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
conversion_nodes_denylist = self.output_tensor_names
return trt.TrtGraphConverter(
input_saved_model_dir=self.model_config.saved_model_dir,
input_saved_model_tags=self.model_config.saved_model_tags,
input_saved_model_signature_key=(
self.model_config.saved_model_signature_key),
nodes_denylist=conversion_nodes_denylist,
max_batch_size=trt_convert_params.max_batch_size,
max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes,
precision_mode=trt_convert_params.precision_mode,
minimum_segment_size=trt_convert_params.minimum_segment_size,
is_dynamic_op=trt_convert_params.is_dynamic_op,
maximum_cached_engines=trt_convert_params.maximum_cached_engines,
use_calibration=trt_convert_params.use_calibration,
)
_check_conversion = _TrtModelHandlerBase._check_contains_trt_engine
def run(self,
inputs: Optional[Mapping[str, np.ndarray]] = None,
warmup_iterations=10,
benchmark_iterations=100) -> TestResult:
self.save(overwrite=False)
logging.info("Running with TensorRT!")
test_result = ModelHandlerV1.run(
self,
inputs,
warmup_iterations,
benchmark_iterations,
allow_to_use_gpu=True)
return test_result._replace(trt_convert_params=self._trt_convert_params)
class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2):
"""Converts a TF2 model with TensorRT and runs the converted model."""
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
return trt.TrtGraphConverterV2(
input_saved_model_dir=self.model_config.saved_model_dir,
input_saved_model_tags=self.model_config.saved_model_tags,
input_saved_model_signature_key=(
self.model_config.saved_model_signature_key),
conversion_params=trt_convert_params)
def _check_conversion(self, graph_func):
graph_def = graph_func.graph.as_graph_def()
self._check_contains_trt_engine(graph_def)
def run(self,
inputs: Optional[Sequence[framework_ops.Tensor]] = None,
warmup_iterations=10,
benchmark_iterations=100) -> TestResult:
self.save(overwrite=False)
logging.info("Running with TensorRT!")
test_result = ModelHandlerV2.run(
self,
inputs,
warmup_iterations,
benchmark_iterations,
allow_to_use_gpu=True)
return test_result._replace(trt_convert_params=self._trt_convert_params)
class _ModelHandlerManagerBase(metaclass=abc.ABCMeta):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking."""
def __init__(
self, model_config: ModelConfig,
default_trt_convert_params: trt.TrtConversionParams,
trt_convert_params_updater: Callable[[trt.TrtConversionParams],
Iterable[trt.TrtConversionParams]]):
self._ori_model = self.model_handler_cls(model_config)
self._trt_models = []
for trt_convert_params in trt_convert_params_updater(
default_trt_convert_params):
trt_model = self.trt_model_handler_cls(
model_config, trt_convert_params=trt_convert_params)
self._trt_models.append(trt_model)
self._result_collection = TestResultCollection(
results=[], config=model_config)
def __str__(self) -> str:
return "Input Model: {}".format(str(self._ori_model))
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, str(self))
@property
@classmethod
@abc.abstractmethod
def model_handler_cls(cls):
"""The modle handler class. ModelHandleV1/ModelHandlerV2."""
@property
@classmethod
@abc.abstractmethod
def trt_model_handler_cls(cls):
"""The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2."""
@property
def model_config(self):
return self._ori_model.model_config
def generate_random_inputs(self, batch_size: Optional[int] = None):
return self._ori_model.generate_random_inputs(batch_size)
def run(self,
inputs=None,
warmup_iterations: int = 10,
benchmark_iterations: int = 100) -> TestResultCollection:
"""Runs model inference with provided or randomly generated input tensors.
Args:
inputs: Mapping from names to input ndarrays in TF1. Or a sequence of
tensors in TF2. If `None`, ramdomly generated input tensors will be used
instead.
warmup_iterations: Number of inferences to warm up the runtime.
benchmark_iterations: Number of inferences to measure the latency.
Returns:
`TestResultCollection` summarizing timing and numerics information for
different TensorRT conversion settings.
"""
inputs = inputs or self.generate_random_inputs()
results = [
model.run(inputs, warmup_iterations, benchmark_iterations)
for model in [self._ori_model] + self._trt_models
]
return self._result_collection._replace(results=results)
class ModelHandlerManagerV1(_ModelHandlerManagerBase):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF1."""
model_handler_cls = ModelHandlerV1
trt_model_handler_cls = TrtModelHandlerV1
class ModelHandlerManagerV2(_ModelHandlerManagerBase):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF2."""
model_handler_cls = ModelHandlerV2
trt_model_handler_cls = TrtModelHandlerV2
| [((86, 1, 86, 22), 'functools.lru_cache', 'functools.lru_cache', ({}, {}), '()', False, 'import functools\n'), ((108, 1, 108, 22), 'functools.lru_cache', 'functools.lru_cache', ({}, {}), '()', False, 'import functools\n'), ((120, 4, 121, 72), 'collections.namedtuple', 'collections.namedtuple', ({(120, 27, 120, 39): '"""TestResult"""', (121, 27, 121, 71): "['outputs', 'latency', 'trt_convert_params']"}, {}), "('TestResult', ['outputs', 'latency',\n 'trt_convert_params'])", False, 'import collections\n'), ((132, 4, 135, 6), 'collections.namedtuple', 'collections.namedtuple', ({(132, 27, 132, 40): '"""ModelConfig"""', (132, 42, 135, 5): "['saved_model_dir', 'saved_model_tags', 'saved_model_signature_key',\n 'default_batch_size']"}, {}), "('ModelConfig', ['saved_model_dir',\n 'saved_model_tags', 'saved_model_signature_key', 'default_batch_size'])", False, 'import collections\n'), ((150, 4, 150, 73), 'collections.namedtuple', 'collections.namedtuple', ({(150, 27, 150, 49): '"""TestResultCollection"""', (150, 51, 150, 72): "['results', 'config']"}, {}), "('TestResultCollection', ['results', 'config'])", False, 'import collections\n'), ((68, 10, 68, 47), 'tensorflow.python.framework.dtypes.as_dtype', 'tf_dtypes.as_dtype', ({(68, 29, 68, 46): 'tensor_info.dtype'}, {}), '(tensor_info.dtype)', True, 'from tensorflow.python.framework import dtypes as tf_dtypes\n'), ((80, 9, 81, 56), 'tensorflow.python.ops.random_ops.random_uniform', 'random_ops.random_uniform', (), '', False, 'from tensorflow.python.ops import random_ops\n'), ((112, 13, 113, 56), 'tensorflow.python.saved_model.load.load', 'saved_model_load.load', (), '', True, 'from tensorflow.python.saved_model import load as saved_model_load\n'), ((115, 9, 115, 75), 'tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_v2', 'convert_to_constants.convert_variables_to_constants_v2', ({(115, 64, 115, 74): 'graph_func'}, {}), '(graph_func)', False, 'from tensorflow.python.framework import convert_to_constants\n'), ((70, 7, 70, 24), 'tensorflow.python.client.session.Session', 'session.Session', ({}, {}), '()', False, 'from tensorflow.python.client import session\n'), ((91, 7, 91, 24), 'tensorflow.python.client.session.Session', 'session.Session', ({}, {}), '()', False, 'from tensorflow.python.client import session\n'), ((92, 17, 96, 5), 'tensorflow.python.saved_model.loader.load', 'saved_model_loader.load', (), '', True, 'from tensorflow.python.saved_model import loader as saved_model_loader\n'), ((102, 8, 103, 58), 'tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_from_session_graph', 'convert_to_constants.convert_variables_to_constants_from_session_graph', ({(103, 12, 103, 16): 'sess', (103, 18, 103, 38): 'meta_graph.graph_def', (103, 40, 103, 57): 'output_node_names'}, {}), '(sess,\n meta_graph.graph_def, output_node_names)', False, 'from tensorflow.python.framework import convert_to_constants\n'), ((281, 11, 281, 77), 'tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_v2', 'convert_to_constants.convert_variables_to_constants_v2', ({(281, 66, 281, 76): 'graph_func'}, {}), '(graph_func)', False, 'from tensorflow.python.framework import convert_to_constants\n'), ((335, 4, 335, 43), 'absl.logging.info', 'logging.info', ({(335, 17, 335, 42): '"""Converting to TensorRT!"""'}, {}), "('Converting to TensorRT!')", False, 'from absl import logging\n'), ((368, 4, 368, 72), 'absl.logging.info', 'logging.info', ({(368, 17, 368, 47): '"""Saving TensorRT model to %s!"""', (368, 49, 368, 71): 'output_saved_model_dir'}, {}), "('Saving TensorRT model to %s!', output_saved_model_dir)", False, 'from absl import logging\n'), ((380, 11, 393, 5), 'tensorflow.python.compiler.tensorrt.trt_convert.TrtGraphConverter', 'trt.TrtGraphConverter', (), '', True, 'from tensorflow.python.compiler.tensorrt import trt_convert as trt\n'), ((402, 4, 402, 42), 'absl.logging.info', 'logging.info', ({(402, 17, 402, 41): '"""Running with TensorRT!"""'}, {}), "('Running with TensorRT!')", False, 'from absl import logging\n'), ((416, 11, 421, 45), 'tensorflow.python.compiler.tensorrt.trt_convert.TrtGraphConverterV2', 'trt.TrtGraphConverterV2', (), '', True, 'from tensorflow.python.compiler.tensorrt import trt_convert as trt\n'), ((432, 4, 432, 42), 'absl.logging.info', 'logging.info', ({(432, 17, 432, 41): '"""Running with TensorRT!"""'}, {}), "('Running with TensorRT!')", False, 'from absl import logging\n'), ((254, 21, 254, 78), 'tensorflow.core.protobuf.config_pb2.ConfigProto', 'config_pb2.ConfigProto', (), '', False, 'from tensorflow.core.protobuf import config_pb2\n'), ((255, 9, 255, 45), 'tensorflow.python.client.session.Session', 'session.Session', (), '', False, 'from tensorflow.python.client import session\n'), ((256, 6, 256, 58), 'tensorflow.python.framework.importer.import_graph_def', 'importer.import_graph_def', ({(256, 32, 256, 57): 'self.meta_graph.graph_def'}, {}), '(self.meta_graph.graph_def)', False, 'from tensorflow.python.framework import importer\n'), ((367, 55, 367, 73), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((308, 11, 308, 39), 'tensorflow.python.framework.ops.device', 'framework_ops.device', ({(308, 32, 308, 38): 'device'}, {}), '(device)', True, 'from tensorflow.python.framework import ops as framework_ops\n'), ((262, 19, 262, 30), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((313, 19, 313, 30), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((264, 25, 264, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((315, 25, 315, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
arpitran/HackerRank_solutions | Python/Python Evaluation/solution.py | a3a77c858edd3955ea38530916db9051b1aa93f9 | eval(input("Enter a expression ")) | [] |
syamkakarla98/Kernel-PCA-Using-Different-Kernels-With-Classification | kpca_iris.py | 03302843bff9b0d87e2983bed1f37bc329e716c1 | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# load dataset into Pandas DataFrame
df = pd.read_csv("D:\Python_programs\ML\Iris Data\KPCA\iris.csv")
#df.to_csv('iris.csv')
from sklearn.preprocessing import StandardScaler
features = ['sepal length', 'sepal width', 'petal length', 'petal width']
# Separating out the features
x = df.loc[:, features].values
# Separating out the target
y = df.loc[:,['target']].values
# Standardizing the features
x = StandardScaler().fit_transform(x)
from sklearn.decomposition import KernelPCA
## Finding the principle components
# KERNELS : linear,rbf,poly
#
def Kernel_Pca(ker):
kpca = KernelPCA(n_components=4, kernel=ker, gamma=15)
x_kpca = kpca.fit_transform(x)
kpca_transform = kpca.fit_transform(x)
explained_variance = np.var(kpca_transform, axis=0)
ev = explained_variance / np.sum(explained_variance)
#--------- Bar Graph for Explained Variance Ratio ------------
plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b')
plt.legend()
plt.xlabel('Principal Components ')
#----------------------
n=list(ev*100)
pc=[]
for i in range(len(n)):
n[i]=round(n[i],4)
pc.append('PC-'+str(i+1)+'('+str(n[i])+')')
#----------------------
plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30)
plt.ylabel('Variance Ratio')
plt.title('Variance Ratio of IRIS Dataset using kernel:'+str(ker))
plt.show()
#---------------------------------------------------
# *Since the initial 2 principal components have high variance.
# so, we select pc-1 and pc-2.
#---------------------------------------------------
kpca = KernelPCA(n_components=2, kernel=ker, gamma=15)
x_kpca = kpca.fit_transform(x)
principalComponents = kpca.fit_transform(x)
principalDf = pd.DataFrame(data = principalComponents
, columns = ['PC-1', 'PC-2'])
# Adding lables
finalDf = pd.concat([principalDf, df[['target']]], axis = 1)
# Plotting pc1 & pc2
fig = plt.figure(figsize = (8,8))
ax = fig.add_subplot(1,1,1)
ax.set_xlabel('PC-1', fontsize = 15)
ax.set_ylabel('PC-2', fontsize = 15)
ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker), fontsize = 20)
targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']
colors = ['r', 'g', 'b']
for target, color in zip(targets,colors):
indicesToKeep = finalDf['target'] == target
ax.scatter(finalDf.loc[indicesToKeep, 'PC-1']
, finalDf.loc[indicesToKeep, 'PC-2']
, c = color
, s = 30)
ax.legend(targets)
ax.grid()
plt.show() # FOR SHOWING THE PLOT
#------------------- SAVING DATA INTO CSV FILE ------------
finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv')
#------------------------------------------------------
k=['linear','rbf','poly']
for i in k:
Kernel_Pca(i)
| [((6, 5, 6, 65), 'pandas.read_csv', 'pd.read_csv', ({(6, 17, 6, 64): '"""D:\\\\Python_programs\\\\ML\\\\Iris Data\\\\KPCA\\\\iris.csv"""'}, {}), "('D:\\\\Python_programs\\\\ML\\\\Iris Data\\\\KPCA\\\\iris.csv')", True, 'import pandas as pd\n'), ((27, 11, 27, 58), 'sklearn.decomposition.KernelPCA', 'KernelPCA', (), '', False, 'from sklearn.decomposition import KernelPCA\n'), ((30, 25, 30, 55), 'numpy.var', 'np.var', (), '', True, 'import numpy as np\n'), ((35, 4, 35, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((36, 4, 36, 39), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(36, 15, 36, 38): '"""Principal Components """'}, {}), "('Principal Components ')", True, 'import matplotlib.pyplot as plt\n'), ((45, 4, 45, 53), 'matplotlib.pyplot.xticks', 'plt.xticks', (), '', True, 'import matplotlib.pyplot as plt\n'), ((46, 4, 46, 32), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(46, 15, 46, 31): '"""Variance Ratio"""'}, {}), "('Variance Ratio')", True, 'import matplotlib.pyplot as plt\n'), ((48, 4, 48, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((53, 11, 53, 58), 'sklearn.decomposition.KernelPCA', 'KernelPCA', (), '', False, 'from sklearn.decomposition import KernelPCA\n'), ((57, 18, 58, 46), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((60, 14, 60, 64), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((62, 10, 62, 37), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((77, 4, 77, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((17, 4, 17, 20), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ({}, {}), '()', False, 'from sklearn.preprocessing import StandardScaler\n'), ((31, 30, 31, 56), 'numpy.sum', 'np.sum', ({(31, 37, 31, 55): 'explained_variance'}, {}), '(explained_variance)', True, 'import numpy as np\n')] |
felaray/Recognizers-Text | Python/libraries/recognizers-date-time/recognizers_date_time/date_time/italian/dateperiod_extractor_config.py | f514fd61c8d472ed92565261162712409f655312 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List, Pattern
from recognizers_text.utilities import RegExpUtility
from recognizers_number.number import BaseNumberParser
from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor
from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration
from ...resources.base_date_time import BaseDateTime
from ...resources.italian_date_time import ItalianDateTime
from ..extractors import DateTimeExtractor
from ..base_duration import BaseDurationExtractor
from ..base_date import BaseDateExtractor
from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex
from .duration_extractor_config import ItalianDurationExtractorConfiguration
from .date_extractor_config import ItalianDateExtractorConfiguration
from recognizers_text.extractor import Extractor
from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor
class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration):
@property
def previous_prefix_regex(self) -> Pattern:
return self._previous_prefix_regex
@property
def check_both_before_after(self) -> bool:
return self._check_both_before_after
@property
def simple_cases_regexes(self) -> List[Pattern]:
return self._simple_cases_regexes
@property
def illegal_year_regex(self) -> Pattern:
return self._illegal_year_regex
@property
def year_regex(self) -> Pattern:
return self._year_regex
@property
def till_regex(self) -> Pattern:
return self._till_regex
@property
def followed_unit(self) -> Pattern:
return self._followed_unit
@property
def number_combined_with_unit(self) -> Pattern:
return self._number_combined_with_unit
@property
def past_regex(self) -> Pattern:
return self._past_regex
@property
def decade_with_century_regex(self) -> Pattern:
return self._decade_with_century_regex
@property
def future_regex(self) -> Pattern:
return self._future_regex
@property
def week_of_regex(self) -> Pattern:
return self._week_of_regex
@property
def month_of_regex(self) -> Pattern:
return self._month_of_regex
@property
def date_unit_regex(self) -> Pattern:
return self._date_unit_regex
@property
def in_connector_regex(self) -> Pattern:
return self._in_connector_regex
@property
def range_unit_regex(self) -> Pattern:
return self._range_unit_regex
@property
def date_point_extractor(self) -> DateTimeExtractor:
return self._date_point_extractor
@property
def integer_extractor(self) -> BaseNumberExtractor:
return self._integer_extractor
@property
def number_parser(self) -> BaseNumberParser:
return self._number_parser
@property
def duration_extractor(self) -> DateTimeExtractor:
return self._duration_extractor
@property
def now_regex(self) -> Pattern:
return self._now_regex
@property
def future_suffix_regex(self) -> Pattern:
return self._future_suffix_regex
@property
def ago_regex(self) -> Pattern:
return self._ago_regex
@property
def later_regex(self) -> Pattern:
return self._later_regex
@property
def less_than_regex(self) -> Pattern:
return self._less_than_regex
@property
def more_than_regex(self) -> Pattern:
return self._more_than_regex
@property
def duration_date_restrictions(self) -> [str]:
return self._duration_date_restrictions
@property
def year_period_regex(self) -> Pattern:
return self._year_period_regex
@property
def month_num_regex(self) -> Pattern:
return self._month_num_regex
@property
def century_suffix_regex(self) -> Pattern:
return self._century_suffix_regex
@property
def ordinal_extractor(self) -> BaseNumberExtractor:
return self._ordinal_extractor
@property
def cardinal_extractor(self) -> Extractor:
return self._cardinal_extractor
@property
def time_unit_regex(self) -> Pattern:
return self._time_unit_regex
@property
def within_next_prefix_regex(self) -> Pattern:
return self._within_next_prefix_regex
@property
def range_connector_regex(self) -> Pattern:
return self._range_connector_regex
@property
def day_regex(self) -> Pattern:
return self._day_regex
@property
def week_day_regex(self) -> Pattern:
return self._week_day_regex
@property
def relative_month_regex(self) -> Pattern:
return self._relative_month_regex
@property
def month_suffix_regex(self) -> Pattern:
return self._month_suffix_regex
@property
def past_prefix_regex(self) -> Pattern:
return self._past_prefix_regex
@property
def next_prefix_regex(self) -> Pattern:
return self._next_prefix_regex
@property
def this_prefix_regex(self) -> Pattern:
return self._this_prefix_regex
@property
def which_week_regex(self) -> Pattern:
return self._which_week_regex
@property
def rest_of_date_regex(self) -> Pattern:
return self._rest_of_date_regex
@property
def complex_date_period_regex(self) -> Pattern:
return self._complex_date_period_regex
@property
def week_day_of_month_regex(self) -> Pattern:
return self._week_day_of_month_regex
@property
def all_half_year_regex(self) -> Pattern:
return self._all_half_year_regex
def __init__(self):
self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex)
self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex)
self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex)
self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex)
self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex)
self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex)
self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex)
self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex)
self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex)
self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex)
self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex)
self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex)
self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex)
self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex)
self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PastSuffixRegex)
self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter
self._simple_cases_regexes = [
RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthFrontBetweenRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthFrontSimpleCasesRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.QuarterRegexYearFront),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LaterEarlyPeriodRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WeekWithWeekDayRangeRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex)
]
self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter
self._illegal_year_regex = RegExpUtility.get_safe_reg_exp(
BaseDateTime.IllegalYearRegex)
self._year_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.YearRegex)
self._till_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.TillRegex)
self._followed_unit = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FollowedDateUnit)
self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NumberCombinedWithDateUnit)
self._past_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PastSuffixRegex)
self._future_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NextSuffixRegex)
self._week_of_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WeekOfRegex)
self._month_of_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthOfRegex)
self._date_unit_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.DateUnitRegex)
self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WithinNextPrefixRegex)
self._in_connector_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.InConnectorRegex)
self._range_unit_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.RangeUnitRegex)
self.from_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FromRegex)
self.connector_and_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.ConnectorAndRegex)
self.before_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.BeforeRegex2)
self._date_point_extractor = BaseDateExtractor(
ItalianDateExtractorConfiguration())
self._integer_extractor = ItalianIntegerExtractor()
self._number_parser = BaseNumberParser(
ItalianNumberParserConfiguration())
self._duration_extractor = BaseDurationExtractor(
ItalianDurationExtractorConfiguration())
self._now_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NowRegex)
self._future_suffix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FutureSuffixRegex
)
self._ago_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.AgoRegex
)
self._later_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LaterRegex
)
self._less_than_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LessThanRegex
)
self._more_than_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MoreThanRegex
)
self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions
self._year_period_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.YearPeriodRegex
)
self._month_num_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthNumRegex
)
self._century_suffix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.CenturySuffixRegex
)
self._ordinal_extractor = ItalianOrdinalExtractor()
self._cardinal_extractor = ItalianCardinalExtractor()
self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PreviousPrefixRegex
)
self._cardinal_extractor = ItalianCardinalExtractor()
# TODO When the implementation for these properties is added, change the None values to their respective Regexps
self._time_unit_regex = None
def get_from_token_index(self, source: str) -> MatchedIndex:
match = self.from_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def get_between_token_index(self, source: str) -> MatchedIndex:
match = self.before_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def has_connector_token(self, source: str) -> bool:
return not self.connector_and_regex.search(source) is None
| [((212, 36, 212, 100), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(212, 67, 212, 99): 'ItalianDateTime.AllHalfYearRegex'}, {}), '(ItalianDateTime.AllHalfYearRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((213, 40, 213, 107), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(213, 71, 213, 106): 'ItalianDateTime.WeekDayOfMonthRegex'}, {}), '(ItalianDateTime.WeekDayOfMonthRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((214, 42, 214, 112), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(214, 73, 214, 111): 'ItalianDateTime.ComplexDatePeriodRegex'}, {}), '(ItalianDateTime.ComplexDatePeriodRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((215, 35, 215, 98), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(215, 66, 215, 97): 'ItalianDateTime.RestOfDateRegex'}, {}), '(ItalianDateTime.RestOfDateRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((216, 33, 216, 95), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(216, 64, 216, 94): 'ItalianDateTime.WhichWeekRegex'}, {}), '(ItalianDateTime.WhichWeekRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((217, 34, 217, 97), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(217, 65, 217, 96): 'ItalianDateTime.ThisPrefixRegex'}, {}), '(ItalianDateTime.ThisPrefixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((218, 34, 218, 97), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(218, 65, 218, 96): 'ItalianDateTime.NextSuffixRegex'}, {}), '(ItalianDateTime.NextSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((219, 34, 219, 97), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(219, 65, 219, 96): 'ItalianDateTime.PastSuffixRegex'}, {}), '(ItalianDateTime.PastSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((220, 35, 220, 99), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(220, 66, 220, 98): 'ItalianDateTime.MonthSuffixRegex'}, {}), '(ItalianDateTime.MonthSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((221, 37, 221, 103), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(221, 68, 221, 102): 'ItalianDateTime.RelativeMonthRegex'}, {}), '(ItalianDateTime.RelativeMonthRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((222, 31, 222, 91), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(222, 62, 222, 90): 'ItalianDateTime.WeekDayRegex'}, {}), '(ItalianDateTime.WeekDayRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((223, 26, 223, 82), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(223, 57, 223, 81): 'ItalianDateTime.DayRegex'}, {}), '(ItalianDateTime.DayRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((224, 38, 224, 105), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(224, 69, 224, 104): 'ItalianDateTime.RangeConnectorRegex'}, {}), '(ItalianDateTime.RangeConnectorRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((225, 32, 225, 93), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(225, 63, 225, 92): 'ItalianDateTime.TimeUnitRegex'}, {}), '(ItalianDateTime.TimeUnitRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((226, 38, 227, 44), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(227, 12, 227, 43): 'ItalianDateTime.PastSuffixRegex'}, {}), '(ItalianDateTime.PastSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((256, 35, 257, 42), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(257, 12, 257, 41): 'BaseDateTime.IllegalYearRegex'}, {}), '(BaseDateTime.IllegalYearRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((258, 27, 259, 38), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(259, 12, 259, 37): 'ItalianDateTime.YearRegex'}, {}), '(ItalianDateTime.YearRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((260, 27, 261, 38), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(261, 12, 261, 37): 'ItalianDateTime.TillRegex'}, {}), '(ItalianDateTime.TillRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((262, 30, 263, 45), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(263, 12, 263, 44): 'ItalianDateTime.FollowedDateUnit'}, {}), '(ItalianDateTime.FollowedDateUnit)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((264, 42, 265, 55), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(265, 12, 265, 54): 'ItalianDateTime.NumberCombinedWithDateUnit'}, {}), '(ItalianDateTime.NumberCombinedWithDateUnit)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((266, 27, 267, 44), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(267, 12, 267, 43): 'ItalianDateTime.PastSuffixRegex'}, {}), '(ItalianDateTime.PastSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((268, 29, 269, 44), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(269, 12, 269, 43): 'ItalianDateTime.NextSuffixRegex'}, {}), '(ItalianDateTime.NextSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((270, 30, 271, 40), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(271, 12, 271, 39): 'ItalianDateTime.WeekOfRegex'}, {}), '(ItalianDateTime.WeekOfRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((272, 31, 273, 41), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(273, 12, 273, 40): 'ItalianDateTime.MonthOfRegex'}, {}), '(ItalianDateTime.MonthOfRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((274, 32, 275, 42), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(275, 12, 275, 41): 'ItalianDateTime.DateUnitRegex'}, {}), '(ItalianDateTime.DateUnitRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((276, 41, 277, 50), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(277, 12, 277, 49): 'ItalianDateTime.WithinNextPrefixRegex'}, {}), '(ItalianDateTime.WithinNextPrefixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((278, 35, 279, 45), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(279, 12, 279, 44): 'ItalianDateTime.InConnectorRegex'}, {}), '(ItalianDateTime.InConnectorRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((280, 33, 281, 43), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(281, 12, 281, 42): 'ItalianDateTime.RangeUnitRegex'}, {}), '(ItalianDateTime.RangeUnitRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((283, 26, 284, 38), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(284, 12, 284, 37): 'ItalianDateTime.FromRegex'}, {}), '(ItalianDateTime.FromRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((285, 35, 286, 46), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(286, 12, 286, 45): 'ItalianDateTime.ConnectorAndRegex'}, {}), '(ItalianDateTime.ConnectorAndRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((287, 28, 288, 41), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(288, 12, 288, 40): 'ItalianDateTime.BeforeRegex2'}, {}), '(ItalianDateTime.BeforeRegex2)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((292, 34, 292, 59), 'recognizers_number.number.italian.extractors.ItalianIntegerExtractor', 'ItalianIntegerExtractor', ({}, {}), '()', False, 'from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor\n'), ((297, 26, 298, 37), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(298, 12, 298, 36): 'ItalianDateTime.NowRegex'}, {}), '(ItalianDateTime.NowRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((299, 36, 301, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(300, 12, 300, 45): 'ItalianDateTime.FutureSuffixRegex'}, {}), '(ItalianDateTime.FutureSuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((302, 26, 304, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(303, 12, 303, 36): 'ItalianDateTime.AgoRegex'}, {}), '(ItalianDateTime.AgoRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((305, 28, 307, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(306, 12, 306, 38): 'ItalianDateTime.LaterRegex'}, {}), '(ItalianDateTime.LaterRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((308, 32, 310, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(309, 12, 309, 41): 'ItalianDateTime.LessThanRegex'}, {}), '(ItalianDateTime.LessThanRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((311, 32, 313, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(312, 12, 312, 41): 'ItalianDateTime.MoreThanRegex'}, {}), '(ItalianDateTime.MoreThanRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((315, 34, 317, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(316, 12, 316, 43): 'ItalianDateTime.YearPeriodRegex'}, {}), '(ItalianDateTime.YearPeriodRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((318, 32, 320, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(319, 12, 319, 41): 'ItalianDateTime.MonthNumRegex'}, {}), '(ItalianDateTime.MonthNumRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((321, 37, 323, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(322, 12, 322, 46): 'ItalianDateTime.CenturySuffixRegex'}, {}), '(ItalianDateTime.CenturySuffixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((324, 34, 324, 59), 'recognizers_number.ItalianOrdinalExtractor', 'ItalianOrdinalExtractor', ({}, {}), '()', False, 'from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor\n'), ((325, 35, 325, 61), 'recognizers_number.ItalianCardinalExtractor', 'ItalianCardinalExtractor', ({}, {}), '()', False, 'from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor\n'), ((326, 38, 328, 9), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(327, 12, 327, 47): 'ItalianDateTime.PreviousPrefixRegex'}, {}), '(ItalianDateTime.PreviousPrefixRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((329, 35, 329, 61), 'recognizers_number.ItalianCardinalExtractor', 'ItalianCardinalExtractor', ({}, {}), '()', False, 'from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor\n'), ((230, 12, 230, 76), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(230, 43, 230, 75): 'ItalianDateTime.SimpleCasesRegex'}, {}), '(ItalianDateTime.SimpleCasesRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((231, 12, 231, 72), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(231, 43, 231, 71): 'ItalianDateTime.BetweenRegex'}, {}), '(ItalianDateTime.BetweenRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((232, 12, 232, 78), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(232, 43, 232, 77): 'ItalianDateTime.OneWordPeriodRegex'}, {}), '(ItalianDateTime.OneWordPeriodRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((233, 12, 233, 73), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(233, 43, 233, 72): 'ItalianDateTime.MonthWithYear'}, {}), '(ItalianDateTime.MonthWithYear)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((234, 12, 234, 76), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(234, 43, 234, 75): 'ItalianDateTime.MonthNumWithYear'}, {}), '(ItalianDateTime.MonthNumWithYear)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((235, 12, 235, 69), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(235, 43, 235, 68): 'ItalianDateTime.YearRegex'}, {}), '(ItalianDateTime.YearRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((236, 12, 236, 75), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(236, 43, 236, 74): 'ItalianDateTime.YearPeriodRegex'}, {}), '(ItalianDateTime.YearPeriodRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((237, 12, 237, 75), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(237, 43, 237, 74): 'ItalianDateTime.WeekOfYearRegex'}, {}), '(ItalianDateTime.WeekOfYearRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((238, 12, 238, 79), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(238, 43, 238, 78): 'ItalianDateTime.WeekDayOfMonthRegex'}, {}), '(ItalianDateTime.WeekDayOfMonthRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((239, 12, 240, 55), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(240, 16, 240, 54): 'ItalianDateTime.MonthFrontBetweenRegex'}, {}), '(ItalianDateTime.MonthFrontBetweenRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((241, 12, 242, 59), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(242, 16, 242, 58): 'ItalianDateTime.MonthFrontSimpleCasesRegex'}, {}), '(ItalianDateTime.MonthFrontSimpleCasesRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((243, 12, 243, 72), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(243, 43, 243, 71): 'ItalianDateTime.QuarterRegex'}, {}), '(ItalianDateTime.QuarterRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((244, 12, 245, 54), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(245, 16, 245, 53): 'ItalianDateTime.QuarterRegexYearFront'}, {}), '(ItalianDateTime.QuarterRegexYearFront)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((246, 12, 246, 71), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(246, 43, 246, 70): 'ItalianDateTime.SeasonRegex'}, {}), '(ItalianDateTime.SeasonRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((247, 12, 248, 54), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(248, 16, 248, 53): 'ItalianDateTime.LaterEarlyPeriodRegex'}, {}), '(ItalianDateTime.LaterEarlyPeriodRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((249, 12, 250, 58), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(250, 16, 250, 57): 'ItalianDateTime.WeekWithWeekDayRangeRegex'}, {}), '(ItalianDateTime.WeekWithWeekDayRangeRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((251, 12, 251, 79), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(251, 43, 251, 78): 'ItalianDateTime.YearPlusNumberRegex'}, {}), '(ItalianDateTime.YearPlusNumberRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((252, 12, 252, 82), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(252, 43, 252, 81): 'ItalianDateTime.DecadeWithCenturyRegex'}, {}), '(ItalianDateTime.DecadeWithCenturyRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((253, 12, 253, 79), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', ({(253, 43, 253, 78): 'ItalianDateTime.RelativeDecadeRegex'}, {}), '(ItalianDateTime.RelativeDecadeRegex)', False, 'from recognizers_text.utilities import RegExpUtility\n'), ((294, 12, 294, 46), 'recognizers_number.number.italian.parsers.ItalianNumberParserConfiguration', 'ItalianNumberParserConfiguration', ({}, {}), '()', False, 'from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration\n')] |
danteay/pydbrepo | pydbrepo/drivers/sqlite.py | 665ad5fe64a00697128f9943e0fc831ae485f136 | """SQLite Driver implementation."""
# pylint: disable=R0201
import os
import sqlite3
from typing import Any, AnyStr, List, NoReturn, Optional, Tuple
from pydbrepo.drivers.driver import Driver
class SQLite(Driver):
"""SQLite Driver connection class.
Environment variables:
DATABASE_URL: Database file ulr on the system. If it's an in memory database the url should
be None or `:memory:` string
DATABASE_COMMIT: default('false') Auto commit transaction flag
:type url:
:param url: Database connection url
:param autocommit: Auto commit transactions
"""
def __init__(
self,
url: Optional[AnyStr] = None,
autocommit: Optional[bool] = None,
):
super().__init__()
self.__build_connection(url, autocommit)
def __build_connection(
self,
url: Optional[AnyStr] = None,
autocommit: Optional[bool] = None,
) -> NoReturn:
"""Start real driver connection from parameters.
:param url: Database connection url
:param autocommit: Auto commit transactions
"""
if url is None:
url = ':memory:'
if autocommit is None:
autocommit = False
if os.getenv('DATABASE_URL', None) is not None:
url = os.getenv('DATABASE_URL')
if os.getenv('DATABASE_COMMIT', None) is not None:
autocommit = os.getenv('DATABASE_COMMIT').lower() == "true"
self.__url = url
self.__conn = sqlite3.connect(url)
self.__commit = autocommit
@staticmethod
def __execute(cursor, sql: AnyStr, *args) -> Any:
"""Execute query and attempt to replace with arguments.
:param cursor: Connection cursor statement
:param sql: Raw query to be executed
:param args: List of arguments passed to be replaced in query
"""
if not args:
return cursor.execute(sql)
return cursor.execute(sql, tuple(args))
def query(self, **kwargs) -> List[Tuple]:
"""Execute a query and return all values.
:param kwargs: Parameters to execute query statement.
sql: AnyStr -> SQL query statement
args: Optional[Iterable[Any]] -> Object with query replacement values
:return List[Tuple]: List of tuple records found by query
"""
self._validate_params({'sql'}, set(kwargs.keys()))
cursor = self.__conn.cursor()
_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))
self.__commit_transaction()
res = cursor.fetchall()
cursor.close()
return res
def query_one(self, **kwargs) -> Tuple[Any, ...]:
"""Execute a query and do not return any result value.
:param kwargs: Parameters to execute query statement.
sql: AnyStr -> SQL query statement
args: Optional[Iterable[Any]] -> Object with query replacement values
:return Tuple: Found record
"""
self._validate_params({'sql'}, set(kwargs.keys()))
cursor = self.__conn.cursor()
_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))
self.__commit_transaction()
res = cursor.fetchone()
cursor.close()
return res
def query_none(self, **kwargs) -> NoReturn:
"""Execute a query and do not return any result value.
:param kwargs: Parameters to execute query statement.
sql: AnyStr -> SQL query statement
args: Optional[Iterable[Any]] -> Object with query replacement values
"""
self._validate_params({'sql'}, set(kwargs.keys()))
cursor = self.__conn.cursor()
_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))
self.__commit_transaction()
cursor.close()
def commit(self) -> NoReturn:
"""Commit transaction."""
self.__conn.commit()
def rollback(self) -> NoReturn:
self.__conn.rollback()
def close(self) -> NoReturn:
"""Close current connection."""
self.__conn.close()
def get_real_driver(self) -> Any:
"""Return real mysql driver connection."""
return self.__conn
def placeholder(self, **kwargs) -> AnyStr:
"""Return query place holder."""
return '?'
def reset_placeholder(self) -> NoReturn:
"""Reset place holder status (do nothing)"""
def __repr__(self):
"""Mysql driver representation."""
return f"SQLite({self.__url})"
def __commit_transaction(self):
"""Execute commit operation if the __commit flag is True."""
if self.__commit:
self.commit()
| [((58, 22, 58, 42), 'sqlite3.connect', 'sqlite3.connect', ({(58, 38, 58, 41): 'url'}, {}), '(url)', False, 'import sqlite3\n'), ((51, 11, 51, 42), 'os.getenv', 'os.getenv', ({(51, 21, 51, 35): '"""DATABASE_URL"""', (51, 37, 51, 41): 'None'}, {}), "('DATABASE_URL', None)", False, 'import os\n'), ((52, 18, 52, 43), 'os.getenv', 'os.getenv', ({(52, 28, 52, 42): '"""DATABASE_URL"""'}, {}), "('DATABASE_URL')", False, 'import os\n'), ((54, 11, 54, 45), 'os.getenv', 'os.getenv', ({(54, 21, 54, 38): '"""DATABASE_COMMIT"""', (54, 40, 54, 44): 'None'}, {}), "('DATABASE_COMMIT', None)", False, 'import os\n'), ((55, 25, 55, 53), 'os.getenv', 'os.getenv', ({(55, 35, 55, 52): '"""DATABASE_COMMIT"""'}, {}), "('DATABASE_COMMIT')", False, 'import os\n')] |
EmilPi/PuzzleLib | Modules/BatchNormND.py | 31aa0fab3b5e9472b9b9871ca52e4d94ea683fa9 | import numpy as np
from PuzzleLib import Config
from PuzzleLib.Backend import gpuarray, Blas
from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward
from PuzzleLib.Variable import Variable
from PuzzleLib.Modules.Module import ModuleError, Module
class BatchNormND(Module):
def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None,
empty=False, inplace=False):
super().__init__(name)
self.inplace = inplace
if inplace and Config.showWarnings:
Config.getLogger().info("Warning: %s is using inplace flag", self)
self.maps = maps
self.epsilon = epsilon
self.initFactor = initFactor
self.minFactor = minFactor
self.numOfProps = 0
self.affine = affine
self.scale, self.bias, self.mean, self.var = None, None, None, None
self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None, None, None, None
if empty:
return
shape = (1, maps) + self.repeat(1, nd)
scale = np.random.normal(1.0, sscale if affine else 0.0, shape).astype(self.calctype)
var = np.ones(shape, dtype=self.calctype)
self.setVar("scale", Variable(gpuarray.to_gpu(scale)))
self.setVar("bias", Variable(gpuarray.zeros(shape, dtype=self.calctype)))
self.setAttr("mean", gpuarray.zeros(shape, dtype=self.calctype))
self.setAttr("var", gpuarray.to_gpu(var))
def updateData(self, data):
if self.train:
if self.inplace:
raise ModuleError("%s: using inplace flag in train mode is prohibited" % self)
self.numOfProps += 1
factor = max(self.initFactor / self.numOfProps, self.minFactor)
self.data, self.savemean, self.saveinvvar = batchNormNd(
data, self.scale, self.bias, self.mean, self.var, self.epsilon, factor, False
)
else:
self.data = batchNormNd(
data, self.scale, self.bias, self.mean, self.var, self.epsilon, 0, True,
out=data if self.inplace else None
)
def updateGrad(self, grad):
tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon)
if self.affine:
self.grad, self.scalegrad, self.biasgrad = tup
else:
self.grad, _, _ = tup
def accGradParams(self, grad, scale=1.0, momentum=0.0):
if self.affine:
Blas.addVectorToVector(
self.scalegrad.ravel(), self.vars["scale"].grad.ravel(), out=self.vars["scale"].grad.ravel(),
alpha=scale, beta=momentum
)
Blas.addVectorToVector(
self.biasgrad.ravel(), self.vars["bias"].grad.ravel(), out=self.vars["bias"].grad.ravel(),
alpha=scale, beta=momentum
)
def dataShapeFrom(self, shape):
return shape
def gradShapeFrom(self, shape):
return shape
def reset(self):
super().reset()
self.savemean, self.saveinvvar = None, None
if self.affine:
self.scalegrad, self.biasgrad = None, None
def calcMode(self, T):
if Config.backend == Config.Backend.cuda:
if T not in {np.float16, np.float32}:
raise ModuleError("Unsupported dtype %s" % T)
elif T != np.float32:
raise ModuleError("Unsupported dtype %s" % T)
self.calctype = T
| [((38, 8, 38, 43), 'numpy.ones', 'np.ones', (), '', True, 'import numpy as np\n'), ((66, 8, 66, 104), 'PuzzleLib.Backend.Dnn.batchNormNdBackward', 'batchNormNdBackward', ({(66, 28, 66, 39): 'self.inData', (66, 41, 66, 45): 'grad', (66, 47, 66, 57): 'self.scale', (66, 59, 66, 72): 'self.savemean', (66, 74, 66, 89): 'self.saveinvvar', (66, 91, 66, 103): 'self.epsilon'}, {}), '(self.inData, grad, self.scale, self.savemean, self.\n saveinvvar, self.epsilon)', False, 'from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward\n'), ((43, 23, 43, 65), 'PuzzleLib.Backend.gpuarray.zeros', 'gpuarray.zeros', (), '', False, 'from PuzzleLib.Backend import gpuarray, Blas\n'), ((44, 22, 44, 42), 'PuzzleLib.Backend.gpuarray.to_gpu', 'gpuarray.to_gpu', ({(44, 38, 44, 41): 'var'}, {}), '(var)', False, 'from PuzzleLib.Backend import gpuarray, Blas\n'), ((55, 47, 57, 4), 'PuzzleLib.Backend.Dnn.batchNormNd', 'batchNormNd', ({(56, 4, 56, 8): 'data', (56, 10, 56, 20): 'self.scale', (56, 22, 56, 31): 'self.bias', (56, 33, 56, 42): 'self.mean', (56, 44, 56, 52): 'self.var', (56, 54, 56, 66): 'self.epsilon', (56, 68, 56, 74): 'factor', (56, 76, 56, 81): 'False'}, {}), '(data, self.scale, self.bias, self.mean, self.var, self.epsilon,\n factor, False)', False, 'from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward\n'), ((59, 15, 62, 4), 'PuzzleLib.Backend.Dnn.batchNormNd', 'batchNormNd', (), '', False, 'from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward\n'), ((37, 10, 37, 65), 'numpy.random.normal', 'np.random.normal', ({(37, 27, 37, 30): '1.0', (37, 32, 37, 57): 'sscale if affine else 0.0', (37, 59, 37, 64): 'shape'}, {}), '(1.0, sscale if affine else 0.0, shape)', True, 'import numpy as np\n'), ((40, 32, 40, 54), 'PuzzleLib.Backend.gpuarray.to_gpu', 'gpuarray.to_gpu', ({(40, 48, 40, 53): 'scale'}, {}), '(scale)', False, 'from PuzzleLib.Backend import gpuarray, Blas\n'), ((41, 31, 41, 73), 'PuzzleLib.Backend.gpuarray.zeros', 'gpuarray.zeros', (), '', False, 'from PuzzleLib.Backend import gpuarray, Blas\n'), ((50, 10, 50, 82), 'PuzzleLib.Modules.Module.ModuleError', 'ModuleError', ({(50, 22, 50, 81): "('%s: using inplace flag in train mode is prohibited' % self)"}, {}), "('%s: using inplace flag in train mode is prohibited' % self)", False, 'from PuzzleLib.Modules.Module import ModuleError, Module\n'), ((105, 10, 105, 49), 'PuzzleLib.Modules.Module.ModuleError', 'ModuleError', ({(105, 22, 105, 48): "('Unsupported dtype %s' % T)"}, {}), "('Unsupported dtype %s' % T)", False, 'from PuzzleLib.Modules.Module import ModuleError, Module\n'), ((108, 9, 108, 48), 'PuzzleLib.Modules.Module.ModuleError', 'ModuleError', ({(108, 21, 108, 47): "('Unsupported dtype %s' % T)"}, {}), "('Unsupported dtype %s' % T)", False, 'from PuzzleLib.Modules.Module import ModuleError, Module\n'), ((19, 3, 19, 21), 'PuzzleLib.Config.getLogger', 'Config.getLogger', ({}, {}), '()', False, 'from PuzzleLib import Config\n')] |
jnthn/intellij-community | python/testData/editing/enterInIncompleteTupleLiteral.after.py | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | xs = ('foo', 'bar',
'baz'<caret> | [] |
waltzofpearls/reckon | model/server/server.py | 533e47fd05f685024083ce7a823e9c26c35dd824 | from concurrent import futures
from forecaster.prophet import Forecaster as ProphetForecaster
from multiprocessing import Event, Process, cpu_count
from pythonjsonlogger import jsonlogger
import contextlib
import grpc
import logging
import model.api.forecast_pb2_grpc as grpc_lib
import os
import signal
import socket
import sys
import time
class ForecastServicer(ProphetForecaster):
def __init__(self, logger):
self.logger = logger
def pretty_timedelta(self, seconds):
seconds = int(seconds)
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if days > 0:
return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds)
elif hours > 0:
return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds)
elif minutes > 0:
return '{:d}m{:d}s'.format(minutes, seconds)
else:
return '{:d}s'.format(seconds)
class GracefulShutdown:
def __init__(self, logger):
self.logger = logger
self.event = Event()
signal.signal(signal.SIGINT, self.handler('SIGINT'))
signal.signal(signal.SIGTERM, self.handler('SIGTERM'))
signal.signal(signal.SIGHUP, self.handler('SIGHUP'))
def handler(self, signal_name):
def fn(signal_received, frame):
self.logger.info('signal received', extra={'signal': signal_name})
self.event.set()
return fn
class Config(object):
def __init__(self):
self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '')
self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', ''))
self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', ''))
self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', ''))
self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count()))
self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1))
self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2))
self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5))
class Server(object):
def __init__(self, config, logger):
self.config = config
self.logger = logger
@contextlib.contextmanager
def _reserve_port(self):
"""Find and reserve a port for all subprocesses to use"""
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:
raise RuntimeError('failed to set SO_REUSEPORT.')
_, port = self.config.grpc_server_address.split(':')
sock.bind(('', int(port)))
try:
yield sock.getsockname()[1]
finally:
sock.close()
def _run_server(self, shutdown_event):
server_credentials = grpc.ssl_server_credentials(
[(self.config.grpc_server_key, self.config.grpc_server_cert)],
root_certificates=self.config.grpc_root_ca,
require_client_auth=True
)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num),
options=[
("grpc.so_reuseport", 1),
("grpc.use_local_subchannel_pool", 1),
],
)
grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server)
server.add_secure_port(self.config.grpc_server_address, server_credentials)
self.logger.info('starting python gRPC server...')
server.start()
while not shutdown_event.is_set():
time.sleep(1)
server.stop(5).wait()
self.logger.info('python gRPC server stopped')
def serve(self):
with self._reserve_port():
procs = []
shutdown = GracefulShutdown(self.logger)
for _ in range(self.config.gprc_server_process_num):
proc = Process(target=self._run_server, args=(shutdown.event,))
procs.append(proc)
proc.start()
while not shutdown.event.is_set():
time.sleep(1)
t = time.time()
grace_period = self.config.grpc_server_grace_period_in_secs
kill_period = self.config.grpc_server_kill_period_in_secs
while True:
# Send SIGINT if process doesn't exit quickly enough, and kill it as last resort
# .is_alive() also implicitly joins the process (good practice in linux)
alive_procs = [proc for proc in procs if proc.is_alive()]
if len(alive_procs) == 0:
break
elapsed = time.time() - t
if elapsed >= grace_period and elapsed < kill_period:
for proc in alive_procs:
proc.terminate()
self.logger.info("sending SIGTERM to subprocess", extra={'proc': proc})
elif elapsed >= kill_period:
for proc in alive_procs:
self.logger.warning("sending SIGKILL to subprocess", extra={'proc': proc})
# Queues and other inter-process communication primitives can break when
# process is killed, but we don't care here
proc.kill()
time.sleep(1)
time.sleep(1)
for proc in procs:
self.logger.info("subprocess terminated", extra={'proc': proc})
def json_logger():
logger = logging.getLogger()
log_handler = logging.StreamHandler(sys.stdout)
formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s')
log_handler.setFormatter(formatter)
log_handler.flush = sys.stdout.flush
logger.setLevel(logging.INFO)
logger.addHandler(log_handler)
return logger
| [((139, 13, 139, 32), 'logging.getLogger', 'logging.getLogger', ({}, {}), '()', False, 'import logging\n'), ((140, 18, 140, 51), 'logging.StreamHandler', 'logging.StreamHandler', ({(140, 40, 140, 50): 'sys.stdout'}, {}), '(sys.stdout)', False, 'import logging\n'), ((141, 16, 141, 94), 'pythonjsonlogger.jsonlogger.JsonFormatter', 'jsonlogger.JsonFormatter', (), '', False, 'from pythonjsonlogger import jsonlogger\n'), ((36, 21, 36, 28), 'multiprocessing.Event', 'Event', ({}, {}), '()', False, 'from multiprocessing import Event, Process, cpu_count\n'), ((49, 35, 49, 71), 'os.getenv', 'os.getenv', ({(49, 45, 49, 66): '"""GRPC_SERVER_ADDRESS"""', (49, 68, 49, 70): '""""""'}, {}), "('GRPC_SERVER_ADDRESS', '')", False, 'import os\n'), ((66, 15, 66, 65), 'socket.socket', 'socket.socket', ({(66, 29, 66, 44): 'socket.AF_INET6', (66, 46, 66, 64): 'socket.SOCK_STREAM'}, {}), '(socket.AF_INET6, socket.SOCK_STREAM)', False, 'import socket\n'), ((78, 29, 82, 9), 'grpc.ssl_server_credentials', 'grpc.ssl_server_credentials', (), '', False, 'import grpc\n'), ((50, 42, 50, 74), 'os.getenv', 'os.getenv', ({(50, 52, 50, 69): '"""GRPC_SERVER_KEY"""', (50, 71, 50, 73): '""""""'}, {}), "('GRPC_SERVER_KEY', '')", False, 'import os\n'), ((51, 43, 51, 76), 'os.getenv', 'os.getenv', ({(51, 53, 51, 71): '"""GRPC_SERVER_CERT"""', (51, 73, 51, 75): '""""""'}, {}), "('GRPC_SERVER_CERT', '')", False, 'import os\n'), ((52, 39, 52, 68), 'os.getenv', 'os.getenv', ({(52, 49, 52, 63): '"""GRPC_ROOT_CA"""', (52, 65, 52, 67): '""""""'}, {}), "('GRPC_ROOT_CA', '')", False, 'import os\n'), ((54, 42, 54, 80), 'os.getenv', 'os.getenv', ({(54, 52, 54, 76): '"""GRPC_SERVER_THREAD_NUM"""', (54, 78, 54, 79): '1'}, {}), "('GRPC_SERVER_THREAD_NUM', 1)", False, 'import os\n'), ((55, 52, 55, 100), 'os.getenv', 'os.getenv', ({(55, 62, 55, 96): '"""GRPC_SERVER_GRACE_PERIOD_IN_SECS"""', (55, 98, 55, 99): '2'}, {}), "('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2)", False, 'import os\n'), ((56, 51, 56, 98), 'os.getenv', 'os.getenv', ({(56, 61, 56, 94): '"""GRPC_SERVER_KILL_PERIOD_IN_SECS"""', (56, 96, 56, 97): '5'}, {}), "('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5)", False, 'import os\n'), ((84, 12, 84, 86), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', (), '', False, 'from concurrent import futures\n'), ((96, 12, 96, 25), 'time.sleep', 'time.sleep', ({(96, 23, 96, 24): '(1)'}, {}), '(1)', False, 'import time\n'), ((112, 16, 112, 27), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((134, 12, 134, 25), 'time.sleep', 'time.sleep', ({(134, 23, 134, 24): '(1)'}, {}), '(1)', False, 'import time\n'), ((53, 80, 53, 91), 'multiprocessing.cpu_count', 'cpu_count', ({}, {}), '()', False, 'from multiprocessing import Event, Process, cpu_count\n'), ((106, 23, 106, 79), 'multiprocessing.Process', 'Process', (), '', False, 'from multiprocessing import Event, Process, cpu_count\n'), ((110, 16, 110, 29), 'time.sleep', 'time.sleep', ({(110, 27, 110, 28): '(1)'}, {}), '(1)', False, 'import time\n'), ((132, 16, 132, 29), 'time.sleep', 'time.sleep', ({(132, 27, 132, 28): '(1)'}, {}), '(1)', False, 'import time\n'), ((121, 26, 121, 37), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
jhgoebbert/jupyter-libertem-proxy | test/test_setupcall.py | 2f966744c08c14c534030c2623fe4a3a8590dabe | def test_setupcall():
"""
Test the call of the setup function
"""
import jupyter_libertem_proxy as jx
print("\nRunning test_setupcall...")
print(jx.setup_libertem())
| [((8, 10, 8, 29), 'jupyter_libertem_proxy.setup_libertem', 'jx.setup_libertem', ({}, {}), '()', True, 'import jupyter_libertem_proxy as jx\n')] |
LaudateCorpus1/launchpad | launchpad/launch/worker_manager.py | 6068bbaff9da6d9d520c01314ef920d0d4978afc | # Copyright 2020 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WorkerManager handles thread and process-based runtimes."""
import atexit
import collections
from concurrent import futures
import ctypes
import os
import signal
import subprocess
import threading
import time
from typing import Optional, Sequence, Text
from absl import flags
from absl import logging
from absl.testing import absltest
from launchpad import flags as lp_flags
import psutil
import termcolor
FLAGS = flags.FLAGS
ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future'])
_WORKER_MANAGERS = threading.local()
_HAS_MAIN_MANAGER = False
def get_worker_manager():
manager = getattr(_WORKER_MANAGERS, 'manager', None)
assert manager, 'Worker manager is not available in the current thread'
return manager
def register_signal_handler(sig, handler):
"""Registers a signal handler."""
return signal.signal(sig, handler)
def remove_signal_handler(sig, handler):
return signal.signal(sig, handler)
def wait_for_stop():
"""Blocks until termination of the node's program is requested.
Can be used to perform cleanup at the end of the run, for example:
start_server()
lp.wait_for_stop()
stop_server()
checkpoint()
"""
get_worker_manager().wait_for_stop()
class WorkerManager:
"""Encapsulates running threads and processes of a Launchpad Program."""
def __init__(
self,
stop_main_thread=False,
kill_main_thread=True,
register_in_thread=False,
register_signals=True):
"""Initializes a WorkerManager.
Args:
stop_main_thread: Should main thread be notified about termination.
kill_main_thread: When set to false try not to kill the launcher while
killing workers. This is not possible when thread workers run in the
same process.
register_in_thread: TODO
register_signals: Whether or not to register signal handlers.
"""
self._mutex = threading.Lock()
self._termination_notice_secs = -1
handle_user_stop = False
global _HAS_MAIN_MANAGER
# Make the first created worker manager the main manager, which handles
# signals.
if not _HAS_MAIN_MANAGER:
self._termination_notice_secs = FLAGS.lp_termination_notice_secs
handle_user_stop = True
_HAS_MAIN_MANAGER = True
self._active_workers = collections.defaultdict(list)
self._workers_count = collections.defaultdict(lambda: 0)
self._first_failure = None
self._stop_counter = 0
self._alarm_enabled = False
self._kill_main_thread = kill_main_thread
self._stop_event = threading.Event()
self._main_thread = threading.current_thread().ident
self._sigterm_handler = None
self._sigquit_handler = None
self._sigalrm_handler = None
if register_signals:
self._sigterm_handler = register_signal_handler(signal.SIGTERM,
self._sigterm)
self._sigquit_handler = register_signal_handler(signal.SIGQUIT,
self._sigquit)
if handle_user_stop:
register_signal_handler(
signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user())
self._stop_main_thread = stop_main_thread
if register_in_thread:
_WORKER_MANAGERS.manager = self
def _disable_signals(self):
self._disable_alarm()
if self._sigterm_handler is not None:
remove_signal_handler(signal.SIGTERM, self._sigterm_handler)
self._sigterm_handler = None
if self._sigquit_handler is not None:
remove_signal_handler(signal.SIGQUIT, self._sigquit_handler)
self._sigquit_handler = None
def _sigterm(self, sig=None, frame=None):
"""Handles SIGTERM by stopping the workers."""
if callable(self._sigterm_handler):
self._sigterm_handler(sig, frame)
self._stop()
def _sigquit(self, sig=None, frame=None):
if callable(self._sigquit_handler):
self._sigquit_handler(sig, frame)
self._kill()
def wait_for_stop(self):
"""Blocks until managed runtime is being terminated."""
self._stop_event.wait()
def thread_worker(self, name, function):
"""Registers and start a new thread worker.
Args:
name: Name of the worker group.
function: Entrypoint function to execute in a worker.
"""
with self._mutex:
future = futures.Future()
def run_inner(f=function, future=future, manager=self):
_WORKER_MANAGERS.manager = manager
try:
future.set_result(f())
except BaseException as e:
future.set_exception(e)
builder = lambda t, n: threading.Thread(target=t, name=n)
thread = builder(run_inner, name)
thread.setDaemon(True)
thread.start()
self._workers_count[name] += 1
worker = ThreadWorker(thread=thread, future=future)
self._active_workers[name].append(worker)
if self._stop_event.is_set():
# Runtime is terminating, so notify the worker.
self._send_exception(worker)
def process_worker(self, name, command, env=None, **kwargs):
"""Adds process worker to the runtime.
Args:
name: Name of the worker's group.
command: Command to execute in the worker.
env: Environment variables to set for the worker.
**kwargs: Other parameters to be passed to `subprocess.Popen`.
"""
with self._mutex:
process = subprocess.Popen(command, env=env or {}, **kwargs)
self._workers_count[name] += 1
self._active_workers[name].append(process)
def register_existing_process(self, name: str, pid: int):
"""Registers already started worker process.
Args:
name: Name of the workers' group.
pid: Pid of the process to monitor.
"""
with self._mutex:
self._workers_count[name] += 1
self._active_workers[name].append(psutil.Process(pid))
def _stop_by_user(self):
"""Handles stopping of the runtime by a user."""
if self._termination_notice_secs != 0:
print(
termcolor.colored(
'User-requested termination. Asking workers to stop.', 'blue'))
print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue'))
signal.signal(signal.SIGINT, lambda sig, frame: self._kill())
self._stop()
def _kill_process_tree(self, pid):
"""Kills all child processes of the current process."""
parent = psutil.Process(pid)
for process in parent.children(recursive=True):
try:
process.send_signal(signal.SIGKILL)
except psutil.NoSuchProcess:
pass
parent.send_signal(signal.SIGKILL)
def _kill(self):
"""Kills all workers (and main thread/process if needed)."""
print(termcolor.colored('\nKilling entire runtime.', 'blue'))
kill_self = self._kill_main_thread
for workers in self._active_workers.values():
for worker in workers:
if isinstance(worker, ThreadWorker):
# Not possible to kill a thread without killing the process.
kill_self = True
else:
self._kill_process_tree(worker.pid)
if kill_self:
self._kill_process_tree(os.getpid())
def _send_exception(self, worker):
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(worker.thread.ident),
ctypes.py_object(SystemExit))
assert res < 2, 'Exception raise failure'
def _stop_or_kill(self):
"""Stops all workers; kills them if they don't stop on time."""
pending_secs = self._termination_notice_secs - self._stop_counter
if pending_secs == 0:
if self._termination_notice_secs > 0:
still_running = [
label for label in self._active_workers
if self._active_workers[label]
]
print(
termcolor.colored(
f'Worker groups that did not terminate in time: {still_running}',
'red'))
self._kill()
return
if pending_secs >= 0:
print(
termcolor.colored(f'Waiting for workers to stop for {pending_secs}s.',
'blue'),
end='\r')
self._stop_counter += 1
for workers in self._active_workers.values():
for worker in workers:
if isinstance(worker, ThreadWorker):
if self._stop_counter == 1:
self._send_exception(worker)
elif isinstance(worker, subprocess.Popen):
worker.send_signal(signal.SIGTERM)
else:
# Notify all workers running under a proxy process.
children = worker.children(recursive=True)
worker_found = False
for process in children:
if process.name() != 'bash' and 'envelope_' not in process.name():
try:
worker_found = True
process.send_signal(signal.SIGTERM)
except psutil.NoSuchProcess:
pass
if not worker_found:
# No more workers running, so we can kill the proxy itself.
try:
worker.send_signal(signal.SIGKILL)
except psutil.NoSuchProcess:
pass
if self._stop_main_thread:
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(threading.main_thread().ident),
ctypes.py_object(SystemExit))
assert res < 2, 'Exception raise failure'
if pending_secs >= 0:
signal.alarm(1)
def _stop(self):
"""Requests all workers to stop and schedule delayed termination."""
if not self._stop_event.is_set():
self._stop_event.set()
try:
if self._termination_notice_secs > 0:
self._alarm_enabled = True
self._sigalrm_handler = register_signal_handler(
signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill())
except ValueError:
# This happens when we attempt to register a signal handler but not in the
# main thread. Send a SIGTERM to redirect to the main thread.
psutil.Process(os.getpid()).send_signal(signal.SIGTERM)
return
self._stop_or_kill()
def _disable_alarm(self):
if self._alarm_enabled:
self._alarm_enabled = False
signal.alarm(0)
remove_signal_handler(signal.SIGALRM, self._sigalrm_handler)
def stop_and_wait(self):
"""Requests stopping all workers and wait for termination."""
with self._mutex:
self._stop()
self.wait(raise_error=False)
def join(self):
self.wait()
def wait(self,
labels_to_wait_for: Optional[Sequence[Text]] = None,
raise_error=True,
return_on_first_completed=False):
"""Waits for workers to finish.
Args:
labels_to_wait_for: If supplied, only wait for these groups' workers to
finish. Wait for all workers otherwise.
raise_error: Raise an exception upon any worker failure.
return_on_first_completed: Whether to return upon the first completed (or
failed) worker.
Raises:
RuntimeError: if any worker raises an exception.
"""
while True:
try:
active_workers = True
while active_workers:
with self._mutex:
self._check_workers()
active_workers = False
if self._first_failure and raise_error:
failure = self._first_failure
self._first_failure = None
raise failure
for label in labels_to_wait_for or self._active_workers.keys():
if self._active_workers[label]:
active_workers = True
if (return_on_first_completed and len(self._active_workers[label])
< self._workers_count[label]):
return
time.sleep(0.1)
return
except SystemExit:
self._stop()
def cleanup_after_test(self, test_case: absltest.TestCase):
"""Cleanups runtime after a test."""
with self._mutex:
self._check_workers()
self._stop()
self._disable_signals()
self.wait(raise_error=False)
with self._mutex:
if self._first_failure:
raise self._first_failure
def _check_workers(self):
"""Checks status of running workers, terminate runtime in case of errors."""
has_workers = False
for label in self._active_workers:
still_active = []
for worker in self._active_workers[label]:
active = True
if isinstance(worker, ThreadWorker):
if not worker.thread.is_alive():
worker.thread.join()
if not self._stop_counter:
try:
worker.future.result()
except BaseException as e:
if not self._first_failure and not self._stop_counter:
self._first_failure = e
active = False
elif isinstance(worker, subprocess.Popen):
try:
res = worker.wait(0)
active = False
if res and not self._first_failure and not self._stop_counter:
self._first_failure = RuntimeError('One of the workers failed.')
except subprocess.TimeoutExpired:
pass
else:
try:
# We can't obtain return code of external process, so clean
# termination is assumed.
res = worker.wait(0)
active = False
except psutil.TimeoutExpired:
pass
if active:
has_workers = True
still_active.append(worker)
self._active_workers[label] = still_active
if has_workers and self._first_failure and not self._stop_counter:
self._stop()
elif not has_workers:
self._disable_alarm()
def __del__(self):
self._disable_signals()
| [((37, 15, 37, 75), 'collections.namedtuple', 'collections.namedtuple', ({(37, 38, 37, 52): '"""ThreadWorker"""', (37, 54, 37, 74): "['thread', 'future']"}, {}), "('ThreadWorker', ['thread', 'future'])", False, 'import collections\n'), ((39, 19, 39, 36), 'threading.local', 'threading.local', ({}, {}), '()', False, 'import threading\n'), ((51, 9, 51, 36), 'signal.signal', 'signal.signal', ({(51, 23, 51, 26): 'sig', (51, 28, 51, 35): 'handler'}, {}), '(sig, handler)', False, 'import signal\n'), ((56, 9, 56, 36), 'signal.signal', 'signal.signal', ({(56, 23, 56, 26): 'sig', (56, 28, 56, 35): 'handler'}, {}), '(sig, handler)', False, 'import signal\n'), ((90, 18, 90, 34), 'threading.Lock', 'threading.Lock', ({}, {}), '()', False, 'import threading\n'), ((100, 27, 100, 56), 'collections.defaultdict', 'collections.defaultdict', ({(100, 51, 100, 55): 'list'}, {}), '(list)', False, 'import collections\n'), ((101, 26, 101, 60), 'collections.defaultdict', 'collections.defaultdict', ({(101, 50, 101, 59): 'lambda : 0'}, {}), '(lambda : 0)', False, 'import collections\n'), ((106, 23, 106, 40), 'threading.Event', 'threading.Event', ({}, {}), '()', False, 'import threading\n'), ((213, 13, 213, 32), 'psutil.Process', 'psutil.Process', ({(213, 28, 213, 31): 'pid'}, {}), '(pid)', False, 'import psutil\n'), ((107, 24, 107, 50), 'threading.current_thread', 'threading.current_thread', ({}, {}), '()', False, 'import threading\n'), ((155, 15, 155, 31), 'concurrent.futures.Future', 'futures.Future', ({}, {}), '()', False, 'from concurrent import futures\n'), ((186, 16, 186, 66), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((223, 10, 223, 64), 'termcolor.colored', 'termcolor.colored', ({(223, 28, 223, 55): '"""\nKilling entire runtime."""', (223, 57, 223, 63): '"""blue"""'}, {}), '("""\nKilling entire runtime.""", \'blue\')', False, 'import termcolor\n'), ((237, 8, 237, 42), 'ctypes.c_long', 'ctypes.c_long', ({(237, 22, 237, 41): 'worker.thread.ident'}, {}), '(worker.thread.ident)', False, 'import ctypes\n'), ((238, 8, 238, 36), 'ctypes.py_object', 'ctypes.py_object', ({(238, 25, 238, 35): 'SystemExit'}, {}), '(SystemExit)', False, 'import ctypes\n'), ((293, 6, 293, 21), 'signal.alarm', 'signal.alarm', ({(293, 19, 293, 20): '(1)'}, {}), '(1)', False, 'import signal\n'), ((316, 6, 316, 21), 'signal.alarm', 'signal.alarm', ({(316, 19, 316, 20): '(0)'}, {}), '(0)', False, 'import signal\n'), ((164, 29, 164, 63), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((199, 40, 199, 59), 'psutil.Process', 'psutil.Process', ({(199, 55, 199, 58): 'pid'}, {}), '(pid)', False, 'import psutil\n'), ((205, 10, 206, 76), 'termcolor.colored', 'termcolor.colored', ({(206, 14, 206, 67): '"""User-requested termination. Asking workers to stop."""', (206, 69, 206, 75): '"""blue"""'}, {}), "('User-requested termination. Asking workers to stop.', 'blue'\n )", False, 'import termcolor\n'), ((207, 12, 207, 79), 'termcolor.colored', 'termcolor.colored', ({(207, 30, 207, 70): '"""Press CTRL+C to terminate immediately."""', (207, 72, 207, 78): '"""blue"""'}, {}), "('Press CTRL+C to terminate immediately.', 'blue')", False, 'import termcolor\n'), ((233, 30, 233, 41), 'os.getpid', 'os.getpid', ({}, {}), '()', False, 'import os\n'), ((258, 10, 259, 35), 'termcolor.colored', 'termcolor.colored', ({(258, 28, 258, 79): 'f"""Waiting for workers to stop for {pending_secs}s."""', (259, 28, 259, 34): '"""blue"""'}, {}), "(f'Waiting for workers to stop for {pending_secs}s.', 'blue')", False, 'import termcolor\n'), ((289, 10, 289, 38), 'ctypes.py_object', 'ctypes.py_object', ({(289, 27, 289, 37): 'SystemExit'}, {}), '(SystemExit)', False, 'import ctypes\n'), ((251, 12, 253, 22), 'termcolor.colored', 'termcolor.colored', ({(252, 16, 252, 80): 'f"""Worker groups that did not terminate in time: {still_running}"""', (253, 16, 253, 21): '"""red"""'}, {}), "(\n f'Worker groups that did not terminate in time: {still_running}', 'red')", False, 'import termcolor\n'), ((361, 10, 361, 25), 'time.sleep', 'time.sleep', ({(361, 21, 361, 24): '(0.1)'}, {}), '(0.1)', False, 'import time\n'), ((288, 24, 288, 47), 'threading.main_thread', 'threading.main_thread', ({}, {}), '()', False, 'import threading\n'), ((308, 21, 308, 32), 'os.getpid', 'os.getpid', ({}, {}), '()', False, 'import os\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.