max_stars_repo_path
stringlengths
3
269
max_stars_repo_name
stringlengths
4
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.05M
score
float64
0.23
5.13
int_score
int64
0
5
GearBot/Util/Pages.py
JohnyTheCarrot/GearBot
0
4600
import discord from Util import Utils, Emoji, Translator page_handlers = dict() known_messages = dict() def on_ready(bot): load_from_disc() def register(type, init, update, sender_only=False): page_handlers[type] = { "init": init, "update": update, "sender_only": sender_only } def unregister(type_handler): if type_handler in page_handlers.keys(): del page_handlers[type_handler] async def create_new(type, ctx, **kwargs): text, embed, has_pages, emoji = await page_handlers[type]["init"](ctx, **kwargs) message: discord.Message = await ctx.channel.send(text, embed=embed) if has_pages or len(emoji) > 0: data = { "type": type, "page": 0, "trigger": ctx.message.id, "sender": ctx.author.id } for k, v in kwargs.items(): data[k] = v known_messages[str(message.id)] = data try: if has_pages: await message.add_reaction(Emoji.get_emoji('LEFT')) for e in emoji: await message.add_reaction(e) if has_pages: await message.add_reaction(Emoji.get_emoji('RIGHT')) except discord.Forbidden: await ctx.send( f"{Emoji.get_chat_emoji('WARNING')} {Translator.translate('paginator_missing_perms', ctx, prev=Emoji.get_chat_emoji('LEFT'), next=Emoji.get_chat_emoji('RIGHT'))} {Emoji.get_chat_emoji('WARNING')}") if len(known_messages.keys()) > 500: del known_messages[list(known_messages.keys())[0]] save_to_disc() async def update(bot, message, action, user): message_id = str(message.id) if message_id in known_messages.keys(): type = known_messages[message_id]["type"] if type in page_handlers.keys(): data = known_messages[message_id] if data["sender"] == user or page_handlers[type]["sender_only"] is False: page_num = data["page"] try: trigger_message = await message.channel.get_message(data["trigger"]) except discord.NotFound: trigger_message = None ctx = await bot.get_context(trigger_message) if trigger_message is not None else None text, embed, page = await page_handlers[type]["update"](ctx, message, page_num, action, data) await message.edit(content=text, embed=embed) known_messages[message_id]["page"] = page save_to_disc() return True return False def basic_pages(pages, page_num, action): if action == "PREV": page_num -= 1 elif action == "NEXT": page_num += 1 if page_num < 0: page_num = len(pages) - 1 if page_num >= len(pages): page_num = 0 page = pages[page_num] return page, page_num def paginate(input, max_lines=20, max_chars=1900, prefix="", suffix=""): max_chars -= len(prefix) + len(suffix) lines = str(input).splitlines(keepends=True) pages = [] page = "" count = 0 for line in lines: if len(page) + len(line) > max_chars or count == max_lines: if page == "": # single 2k line, split smaller words = line.split(" ") for word in words: if len(page) + len(word) > max_chars: pages.append(f"{prefix}{page}{suffix}") page = f"{word} " else: page += f"{word} " else: pages.append(f"{prefix}{page}{suffix}") page = line count = 1 else: page += line count += 1 pages.append(f"{prefix}{page}{suffix}") return pages def paginate_fields(input): pages = [] for page in input: page_fields = dict() for name, content in page.items(): page_fields[name] = paginate(content, max_chars=1024) pages.append(page_fields) real_pages = [] for page in pages: page_count = 0 page_fields = dict() for name, parts in page.items(): base_name = name if len(parts) is 1: if page_count + len(name) + len(parts[0]) > 4000: real_pages.append(page_fields) page_fields = dict() page_count = 0 page_fields[name] = parts[0] page_count += len(name) + len(parts[0]) else: for i in range(len(parts)): part = parts[i] name = f"{base_name} ({i+1}/{len(parts)})" if page_count + len(name) + len(part) > 3000: real_pages.append(page_fields) page_fields = dict() page_count = 0 page_fields[name] = part page_count += len(name) + len(part) real_pages.append(page_fields) return real_pages def save_to_disc(): Utils.saveToDisk("known_messages", known_messages) def load_from_disc(): global known_messages known_messages = Utils.fetch_from_disk("known_messages")
2.1875
2
software/Opal/spud/diamond/build/lib.linux-x86_64-2.7/diamond/dialogs.py
msc-acse/acse-9-independent-research-project-Wade003
2
4601
#!/usr/bin/env python # This file is part of Diamond. # # Diamond is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Diamond is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Diamond. If not, see <http://www.gnu.org/licenses/>. import os import sys import traceback import gtk import pygtkconsole def prompt(parent, message, type = gtk.MESSAGE_QUESTION, has_cancel = False): """ Display a simple Yes / No dialog. Returns one of gtk.RESPONSE_{YES,NO,CANCEL}. """ prompt_dialog = gtk.MessageDialog(parent, 0, type, gtk.BUTTONS_NONE, message) prompt_dialog.add_buttons(gtk.STOCK_YES, gtk.RESPONSE_YES, gtk.STOCK_NO, gtk.RESPONSE_NO) if has_cancel: prompt_dialog.add_buttons(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL) prompt_dialog.connect("response", prompt_response) prompt_dialog.run() return prompt_response.response def long_message(parent, message): """ Display a message prompt, with the message contained within a scrolled window. """ message_dialog = gtk.Dialog(parent = parent, buttons = (gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) message_dialog.set_default_size(400, 300) message_dialog.connect("response", close_dialog) scrolled_window = gtk.ScrolledWindow() message_dialog.vbox.add(scrolled_window) scrolled_window.show() scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS) text_view = gtk.TextView() scrolled_window.add(text_view) text_view.show() text_view.get_buffer().set_text(message) text_view.set_cursor_visible(False) text_view.set_property("editable", False) text_view.set_property("height-request", 180) text_view.set_property("width-request", 240) message_dialog.run() return def error(parent, message): """ Display an error message. """ error_dialog = gtk.MessageDialog(parent, 0, gtk.MESSAGE_WARNING, gtk.BUTTONS_OK, message) error_dialog.connect("response", close_dialog) error_dialog.run() return def error_tb(parent, message): """ Display an error message, together with the last traceback. """ tb = traceback.format_exception(sys.exc_info()[0] ,sys.exc_info()[1], sys.exc_info()[2]) tb_msg = "" for tbline in tb: tb_msg += tbline long_message(parent, tb_msg + "\n" + message) return def get_filename(title, action, filter_names_and_patterns = {}, folder_uri = None): """ Utility function to get a filename. """ if action == gtk.FILE_CHOOSER_ACTION_SAVE: buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK) elif action == gtk.FILE_CHOOSER_ACTION_CREATE_FOLDER: buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_NEW,gtk.RESPONSE_OK) else: buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK) filew = gtk.FileChooserDialog(title=title, action=action, buttons=buttons) filew.set_default_response(gtk.RESPONSE_OK) if not folder_uri is None: filew.set_current_folder_uri("file://" + os.path.abspath(folder_uri)) for filtername in filter_names_and_patterns: filter = gtk.FileFilter() filter.set_name(filtername) filter.add_pattern(filter_names_and_patterns[filtername]) filew.add_filter(filter) allfilter = gtk.FileFilter() allfilter.set_name("All known files") for filtername in filter_names_and_patterns: allfilter.add_pattern(filter_names_and_patterns[filtername]) filew.add_filter(allfilter) filter = gtk.FileFilter() filter.set_name("All files") filter.add_pattern("*") filew.add_filter(filter) result = filew.run() if result == gtk.RESPONSE_OK: filename = filew.get_filename() filtername = filew.get_filter().get_name() filew.destroy() return filename else: filew.destroy() return None def console(parent, locals = None): """ Launch a python console. """ console_dialog = gtk.Dialog(parent = parent, buttons = (gtk.STOCK_QUIT, gtk.RESPONSE_ACCEPT)) console_dialog.set_default_size(400, 300) console_dialog.connect("response", close_dialog) stdout = sys.stdout stderr = sys.stderr console_widget = pygtkconsole.GTKInterpreterConsole(locals) console_dialog.vbox.add(console_widget) console_widget.show() console_dialog.run() sys.stdout = stdout sys.stderr = stderr return def prompt_response(dialog, response_id): """ Signal handler for dialog response signals. Stores the dialog response in the function namespace, to allow response return in other functions. """ if response_id == gtk.RESPONSE_DELETE_EVENT: response_id = gtk.RESPONSE_CANCEL prompt_response.response = response_id close_dialog(dialog, response_id) return def close_dialog(dialog, response_id = None): """ Signal handler for dialog reponse or destroy signals. Closes the dialog. """ dialog.destroy() return def radio_dialog(title, message, choices, logo): r = RadioDialog(title, message, choices, logo) return r.data def message_box(window, title, message): dialog = gtk.MessageDialog(window, 0, gtk.MESSAGE_INFO, gtk.BUTTONS_OK, message) dialog.set_title(title) dialog.connect("response", close_dialog) dialog.run() class RadioDialog: def __init__(self, title, message, choices, logo): self.window = gtk.Window(gtk.WINDOW_TOPLEVEL) self.window.connect("delete_event", self.cleanup) self.window.connect("key_press_event", self.key_press) self.window.set_title(title) self.window.set_position(gtk.WIN_POS_CENTER) if not logo is None: self.window.set_icon_from_file(logo) self.window.show() #swindow = gtk.ScrolledWindow() #swindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) #self.window.add(swindow) #swindow.show() main_box = gtk.VBox(False, 0) self.window.add(main_box) main_box.show() if not logo is None: image = gtk.Image() image.set_from_file(logo) main_box.pack_start(image, True, True, 0) image.show() label = gtk.Label(message) main_box.add(label) label.show() radio_box = gtk.VBox(False, 10) main_box.pack_start(radio_box, True, True, 0) radio_box.show() separator = gtk.HSeparator() main_box.pack_start(separator, False, True, 0) separator.show() close = gtk.Button(stock=gtk.STOCK_OK) close.connect("clicked", self.cleanup) main_box.pack_start(close, False, False, 0) close.show() prev_radio = None for choice in choices: radio = gtk.RadioButton(prev_radio, choice) radio.connect("toggled", self.radio_callback, choice) radio_box.pack_start(radio, False, False, 0) radio.show() if prev_radio is None: radio.set_active(True) prev_radio = radio self.data = choices[0] gtk.main() def cleanup(self, widget, data=None): self.window.destroy() gtk.main_quit() def key_press(self, widget, event): if event.keyval == gtk.keysyms.Return: self.cleanup(None) def radio_callback(self, widget, data): self.data = data class GoToDialog: def __init__(self, parent): self.goto_gui = gtk.glade.XML(parent.gladefile, root="GoToDialog") self.dialog_box = self.goto_gui.get_widget("GoToDialog") self.dialog_box.set_modal(True) def run(self): signals = {"goto_activate": self.on_goto_activate, "cancel_activate": self.on_cancel_activate} self.goto_gui.signal_autoconnect(signals) self.dialog_box.show() return "" def on_goto_activate(self, widget=None): print "goto" def on_cancel_activate(self, widget=None): print "cancel"
2.609375
3
src/mazes.py
tim-fi/pyxel_games
2
4602
from __future__ import annotations from dataclasses import dataclass, field, InitVar from typing import List, Tuple, Iterator, Iterable, Optional from random import choice import pyxel # ------------------------------------------------------- # Types # ------------------------------------------------------- Maze = Tuple[int, ...] # ------------------------------------------------------- # Constants # ------------------------------------------------------- SCALE = 3 BOARD_WIDTH = 32 BOARD_HEIGHT = 32 CELL_SIZE = 6 CELL_COLOR = 15 WALL_SIZE = 1 WALL_COLOR = 5 # Flags UP = 1 << 0 LEFT = 1 << 1 DOWN = 1 << 2 RIGHT = 1 << 3 VISTED = 1 << 4 # Calculated N_CELLS = BOARD_WIDTH * BOARD_HEIGHT BLOCK_SIZE = CELL_SIZE + WALL_SIZE * 2 WINDOW_WIDTH = BOARD_WIDTH * BLOCK_SIZE WINDOW_HEIGHT = BOARD_HEIGHT * BLOCK_SIZE NEIGHBORS = ((0, -1), (-1, 0), (0, 1), (1, 0)) # ------------------------------------------------------- # Maze # ------------------------------------------------------- @dataclass class Generator: width: int height: int start_pos: InitVar[Optional[Tuple[int, int]]] = None _visited_cells: int = field(init=False, default=0) _stack: List[Tuple[int, int]] = field(init=False, default_factory=list) _maze: List[int] = field(init=False) def __post_init__(self, start_pos: Optional[Tuple[int, int]]): x, y = start_pos = start_pos or (0, 0) self._stack.append(start_pos) self._visited_cells = 1 self._maze = [0 for _ in range(self.width * self.height)] self._maze[y * self.width + x] |= VISTED def _get_neighbors(self, x: int, y: int) -> List[int]: return [ (i, dx, dy) for i, (dx, dy) in enumerate(NEIGHBORS) if ( 0 <= x + dx < self.width and 0 <= y + dy < self.height and self._maze[(y + dy) * self.width + (x + dx)] & VISTED == 0 ) ] def step(self) -> Tuple[Maze, Tuple[int, int], bool]: if self._visited_cells < self.width * self.height: x, y = self._stack[-1] neighbors = self._get_neighbors(x, y) if neighbors: d, dx, dy = choice(neighbors) self._maze[y * self.width + x] |= 1 << d x_, y_ = x + dx, y + dy self._maze[y_ * self.width + x_] |= 1 << ((d + 2) % 4) | VISTED self._stack.append((x_, y_)) self._visited_cells += 1 else: del self._stack[-1] return tuple(self._maze), self._stack[-1], False else: return tuple(self._maze), (0, 0), True # ------------------------------------------------------- # Application # ------------------------------------------------------- @dataclass class App: maze: Maze = field(init=False, default=tuple(0 for _ in range(N_CELLS))) generator: Optional[Generator] = field(init=False, default=None) running: bool = field(init=False, default=False) pos: Tuple[int, int] = field(init=False, default=(0, 0)) def run(self): pyxel.init( WINDOW_WIDTH, WINDOW_HEIGHT, scale=SCALE, caption="Mazes", border_width=SCALE, border_color=pyxel.DEFAULT_PALETTE[5], fps=100 ) pyxel.mouse(True) pyxel.run(self.update, self.draw) def draw(self): pyxel.cls(0) for i, cell in enumerate(self.maze): x, y = i % BOARD_WIDTH, i // BOARD_WIDTH scr_x, scr_y = x * BLOCK_SIZE, y * BLOCK_SIZE pyxel.rect( scr_x, scr_y, BLOCK_SIZE, BLOCK_SIZE, WALL_COLOR ) if cell & VISTED: pyxel.rect( scr_x + WALL_SIZE, scr_y + WALL_SIZE, CELL_SIZE, CELL_SIZE, CELL_COLOR ) if cell & UP: pyxel.rect( scr_x + WALL_SIZE, scr_y, CELL_SIZE, WALL_SIZE, CELL_COLOR ) if cell & LEFT: pyxel.rect( scr_x, scr_y + WALL_SIZE, WALL_SIZE, CELL_SIZE, CELL_COLOR ) if cell & DOWN: pyxel.rect( scr_x + WALL_SIZE, scr_y + WALL_SIZE + CELL_SIZE, CELL_SIZE, WALL_SIZE, CELL_COLOR ) if cell & RIGHT: pyxel.rect( scr_x + WALL_SIZE + CELL_SIZE, scr_y + WALL_SIZE, WALL_SIZE, CELL_SIZE, CELL_COLOR ) x, y = self.pos pyxel.rectb( x * BLOCK_SIZE + WALL_SIZE, y * BLOCK_SIZE + WALL_SIZE, CELL_SIZE, CELL_SIZE, 2 if self.running else 1 ) def update(self): if pyxel.btnp(pyxel.KEY_SPACE) or pyxel.btnp(pyxel.MOUSE_LEFT_BUTTON): self.running = not self.running if self.running and self.generator is None: self.generator = Generator(BOARD_WIDTH, BOARD_HEIGHT, self.pos) if self.running: next_maze, pos, done = self.generator.step() if done: self.running = False self.generator = None self.maze = next_maze self.pos = pos else: self.pos = ( max(0, min(BOARD_WIDTH-1, pyxel.mouse_x // BLOCK_SIZE)), max(0, min(BOARD_HEIGHT-1, pyxel.mouse_y // BLOCK_SIZE)) ) if __name__ == '__main__': App().run()
2.703125
3
bobjiang/settings.py
bobjiangps/django-blog
3
4603
<reponame>bobjiangps/django-blog<filename>bobjiang/settings.py """ Django settings for bobjiang project. Generated by 'django-admin startproject' using Django 2.0.6. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import os import json # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) with open(os.path.join(BASE_DIR, "store.json"), "r") as store_file: STORED = json.load(store_file) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = STORED['secret_key'] # SECURITY WARNING: don't run with debug turned on in production! # DEBUG = True DEBUG = False RECORD_VISITOR = True # RECORD_VISITOR = False ALLOWED_HOSTS = ['*',] APPEND_SLASH = True # Application definition INSTALLED_APPS = [ 'haystack', 'blog.apps.BlogConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'main', 'comments', 'ckeditor', 'ckeditor_uploader', 'tool', 'accounting', #'xadmin', #'crispy_forms', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'bobjiang.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'bobjiang.context_processors.device' ], }, }, ] WSGI_APPLICATION = 'bobjiang.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': STORED['db_name'], 'USER': STORED['db_user'], 'PASSWORD': STORED['<PASSWORD>'], 'HOST': '127.0.0.1', 'PORT': 3306, 'OPTIONS': { 'autocommit': True, }, } } # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ #LANGUAGE_CODE = 'en-us' LANGUAGE_CODE = 'zh-hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = False # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_URL = '/static/' #STATIC_ROOT = os.path.join(BASE_DIR, 'static') STATICFILES_DIRS = [ os.path.join(BASE_DIR, "static"), ] #STATIC_ROOT = '/home/bob/djproject/bobjiang/blog/static/' MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' CKEDITOR_UPLOAD_PATH = 'upload/' CKEDITOR_IMAGE_BACKEND = 'pillow' CKEDITOR_BROWSE_SHOW_DIRS = True CKEDITOR_RESTRICT_BY_USER = True CKEDITOR_CONFIGS = { 'default': { 'toolbar': (['div', 'Source', '-', 'Save', 'NewPage', 'Preview', '-', 'Templates'], ['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-','Print','SpellChecker','Scayt'], ['Undo', 'Redo', '-', 'Find', 'Replace', '-', 'SelectAll', 'RemoveFormat','-','Maximize', 'ShowBlocks', '-',"CodeSnippet", 'Subscript', 'Superscript'], ['Form', 'Checkbox', 'Radio', 'TextField', 'Textarea', 'Select', 'Button', 'ImageButton', 'HiddenField'], ['Bold', 'Italic', 'Underline', 'Strike', '-'], ['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', 'Blockquote'], ['JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock'], ['Link', 'Unlink', 'Anchor'], ['Image', 'Flash', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak'], ['Styles', 'Format', 'Font', 'FontSize'], ['TextColor', 'BGColor'], ), 'extraPlugins': 'codesnippet', } } # haystack HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'blog.whoosh_cn_backend.WhooshEngine', 'PATH': os.path.join(BASE_DIR, 'whoosh_index'), }, } HAYSTACK_SEARCH_RESULTS_PER_PAGE = 5 HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
1.890625
2
Users/models.py
titusnjuguna/FreeDom
1
4604
from django.db import models from django.contrib.auth.models import User from PIL import Image class Profile(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) image = models.ImageField(default='default.jpg', upload_to='profile_pic/') def __str__(self): return f'{self.user.username} Profile' def save(self,*args,**kwargs): super().save() img = Image(self.prof_pic.path) if img.height > 300 and img.width > 300: output_size = (300,300) img.thumbnail(output_size) img.save(self.prof_pic.path)
2.46875
2
dvc/__init__.py
zjj2wry/dvc
0
4605
<reponame>zjj2wry/dvc<filename>dvc/__init__.py """ DVC ---- Make your data science projects reproducible and shareable. """ import os import warnings VERSION_BASE = '0.23.2' __version__ = VERSION_BASE PACKAGEPATH = os.path.abspath(os.path.dirname(__file__)) HOMEPATH = os.path.dirname(PACKAGEPATH) VERSIONPATH = os.path.join(PACKAGEPATH, 'version.py') def _update_version_file(): """Dynamically update version file.""" from git import Repo from git.exc import InvalidGitRepositoryError try: repo = Repo(HOMEPATH) except InvalidGitRepositoryError: return __version__ sha = repo.head.object.hexsha short_sha = repo.git.rev_parse(sha, short=6) dirty = '.mod' if repo.is_dirty() else '' ver = '{}+{}{}'.format(__version__, short_sha, dirty) # Write a helper file, that will be installed with the package # and will provide a true version of the installed dvc with open(VERSIONPATH, 'w+') as fobj: fobj.write('# AUTOGENERATED by dvc/__init__.py\n') fobj.write('version = "{}"\n'.format(ver)) return ver def _remove_version_file(): """Remove version.py so that it doesn't get into the release.""" if os.path.exists(VERSIONPATH): os.unlink(VERSIONPATH) if os.path.exists(os.path.join(HOMEPATH, 'setup.py')): # dvc is run directly from source without installation or # __version__ is called from setup.py if os.getenv('APPVEYOR_REPO_TAG', '').lower() != 'true' \ and os.getenv('TRAVIS_TAG', '') == '': __version__ = _update_version_file() else: # pragma: no cover _remove_version_file() else: # pragma: no cover # dvc was installed with pip or something. Hopefully we have our # auto-generated version.py to help us provide a true version from dvc.version import version __version__ = version VERSION = __version__ # Ignore numpy's runtime warnings: https://github.com/numpy/numpy/pull/432. # We don't directly import numpy, but our dependency networkx does, causing # these warnings in some environments. Luckily these warnings are benign and # we can simply ignore them so that they don't show up when you are using dvc. warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
2.078125
2
pkg_dir/src/utils/notion_utils.py
robperch/robase_datalysis
2
4606
## MODULE WITH UTIL FUNCTIONS - NOTION "----------------------------------------------------------------------------------------------------------------------" ####################################################### Imports ######################################################## "----------------------------------------------------------------------------------------------------------------------" ## Standard library imports import requests ## Third party imports import pandas as pd ## Local application imports from pkg_dir.config.config import ( creds_file_path as crds_loc, ) from pkg_dir.src.utils.general_utils import ( read_yaml, ) "----------------------------------------------------------------------------------------------------------------------" ####################################################### Functions ###################################################### "----------------------------------------------------------------------------------------------------------------------" ## Read notion database with api def notion_api_call(db_api_url, db_id, headers): """ Read notion database with api :param db_api_url (string): base url provided by Notion to make api calls :param db_id (string): unique id of the database that will be read :param headers (dictionary): dict with authorization and version info :return req (?): response after calling notions api """ ## Configuring reading URL read_url = db_api_url + db_id + "/query" ## Requesting info via the API req = requests.request( "POST", read_url, headers=headers ) ## Verifying API call status print("API interaction status code: ", req.status_code) return req ## Calling a Notion database as a json via Notion's API def get_notion_db_json(db_id): """ Calling a Notion database as a json via Notion's API :param db_id (string): unique id of the database that will be called :return db_json (json): json with the notion's db contents """ ## Reading credentials from yaml file yaml_file = read_yaml(crds_loc) notion_version = yaml_file["notion_api"]["notion_version"] db_api_url = yaml_file["notion_api"]["db_api_url"] api_key = yaml_file["notion_api"]["api_key"] ## Building headers for the API call headers = { "Authorization": "Bearer " + api_key, "Notion-Version": notion_version } ## Calling notion's api req = notion_api_call(db_api_url, db_id, headers) ## Converting the api response to a json db_json = req.json() return db_json ## Crating a schema of the notion database that was read def create_notion_db_schema(db_json, relevant_properties): """ Crating a schema of the notion database that was read :param db_json (json): json object obtained by calling notion's api :param relevant_properties (list): list of string with the names of the relevant properties :return db_schema (dictionary): schema of the table that includes the properties' data type """ ## Selecting a sample entry to go over all of it's properties sample_entry = db_json["results"][0]["properties"] ## Bulding dictionary (schema) of the relevant properties and their datatypes db_schema = { prop: { "data_type": sample_entry[prop]["type"] } for prop in sample_entry if prop in relevant_properties } # print(db_schema) return db_schema ## Building a the blueprint dictionary for the dataframe (orient=index) def notion_db_blueprint_df(db_json, db_schema, index_prop): """ Building a the blueprint dictionary for the dataframe (orient=index) :param db_json (json): json object obtained by calling notion's api :return db_schema (dictionary): schema of the table that includes the properties' data type :param index_prop (string): name of the property that will serve as the df's index :return df_dict (dict): dictionary that will be used to create a dataframe with the json contents """ ## Empty dictionary that will store all the results df_dict = {} ## Iterating over every row in the dataframe for row in db_json["results"]: ## Defining the table's base attributes #### All properties contained in the notion db row_props = row["properties"] #### Name of the index; key attribute in the notion db row_name = row_props[index_prop]["title"][0]["plain_text"] #### Empty list to store all the row contents row_contents = [] ## Iterating over every relevant property in the table for col in db_schema: ## Identifying the datatype of the property data_type = db_schema[col]["data_type"] ## Set of conditions to determine how the row will be treated #### Skipping the index row if data_type == "title": continue #### Searching for data in specific locations for special data types (1) elif data_type in ["select", "person", "created_by"]: try: row_contents.append(row_props[col][data_type]["name"]) except: row_contents.append("No_data") #### Searching for data in specific locations for special data types (2) elif data_type in ["rich_text"]: try: row_contents.append(row_props[col][data_type][0]["text"]["content"]) except: row_contents.append("No_data") #### Searching for data in specific locations for special data types (2) elif data_type in ["formula"]: try: #### Applying conditions based on the type of formula result if row_props[col][data_type]["type"] == "string": row_contents.append(row_props[col][data_type]["string"]) elif row_props[col][data_type]["type"] == "number": row_contents.append(row_props[col][data_type]["number"]) except: row_contents.append("No_data") #### General procedure to find data else: row_contents.append(row_props[col][db_schema[col]["data_type"]]) ## Saving the row contents gathered df_dict[row_name] = row_contents return df_dict ## Obtaining a dataframe from a notion database def notion_json_to_df(db_json, relevant_properties): """ Obtaining a dataframe from a notion database :param db_json (json): json object obtained by calling notion's api :param relevant_properties (list): list of string with the names of the relevant properties :return df_n (dataframe): resulting dataframe crated based on the blueprint generated """ ## General parameters needed to build the dataframe #### Database schema db_schema = create_notion_db_schema(db_json, relevant_properties) #### Property that will be used as the dataframe's index index_prop = [prop for prop in db_schema if db_schema[prop]["data_type"] == "title"][0] ## Building a the blueprint dictionary for the dataframe (orient=index) df_dict = notion_db_blueprint_df(db_json, db_schema, index_prop) ## Creating dataframe with the resulting blueprint dictionary #### Crating dataframe df_n = pd.DataFrame.from_dict(df_dict, orient="index") #### Inserting the table's index as a column at the end of the df df_n.insert( df_n.shape[1], index_prop, df_n.index ) #### Resetting index df_n.reset_index(inplace=True, drop=True) #### Adjusting column names df_n.columns = [col_n for col_n in db_schema] return df_n ## Obtaining a Notion database as dataframe with the selected columns def notion_db_to_df(db_id, relevant_properties): """ Obtaining a Notion database as dataframe with the selected columns :param db_id (string): unique id to identify the notion database :param relevant_properties (list): list of string with the names of the relevant properties :return df_n (dataframe): resulting dataframe crated based on the blueprint generated """ ## Calling a Notion database as a json via Notion's API db_json = get_notion_db_json(db_id) ## Obtaining a dataframe from a notion database df_n = notion_json_to_df(db_json, relevant_properties) return df_n "----------------------------------------------------------------------------------------------------------------------" "----------------------------------------------------------------------------------------------------------------------" ## END OF FILE ## "----------------------------------------------------------------------------------------------------------------------" "----------------------------------------------------------------------------------------------------------------------"
2.015625
2
libpermian/issueanalyzer/test_baseissue.py
velezd/permian
0
4607
<filename>libpermian/issueanalyzer/test_baseissue.py import unittest import logging import contextlib from libpermian.settings import Settings from .proxy import IssueAnalyzerProxy from .base import BaseAnalyzer, BaseIssue from .issueset import IssueSet LOGGER = logging.getLogger('test') class NewIssue(BaseIssue): def submit(self): LOGGER.info('submit was called') return super().submit() def make(self): LOGGER.info('make was called') return 'http://issuetracker.example.com/new_issue' def update(self): LOGGER.info('update was called') def _lookup(self): LOGGER.info('lookup was called') return None @property def resolved(self): return False @property def report_url(self): return 'http://issuetracker.example.com/new/foo' class TrackedUnresolvedIssue(NewIssue): def _lookup(self): LOGGER.info('lookup was called') return 'http://issuetracker.example.com/123' @property def resolved(self): return False @property def report_url(self): return 'http://issuetracker.example.com/new/bar' class TrackedResolvedIssue(TrackedUnresolvedIssue): @property def resolved(self): return True class TestNewIssue(unittest.TestCase): def setUp(self): self.settings = Settings({}, {}, []) self.issue = NewIssue(self.settings) def test_properties(self): self.assertTrue(self.issue.new) self.assertFalse(self.issue.tracked) self.assertEqual(self.issue.uri, None) def test_sync(self): # test lookup was called with self.assertLogs('test', level='INFO') as cm: self.issue.sync() self.assertEqual(cm.output, ['INFO:test:lookup was called']) self.test_properties() def test_str(self): self.assertEqual(str(self.issue), self.issue.report_url) class TestTrackedUnresolvedIssue(TestNewIssue): def setUp(self): self.settings = Settings({}, {}, []) self.issue = TrackedUnresolvedIssue(self.settings) def test_properties(self): self.assertFalse(self.issue.new) self.assertTrue(self.issue.tracked) self.assertEqual(self.issue.uri, 'http://issuetracker.example.com/123') def test_str(self): self.assertEqual(str(self.issue), self.issue.uri) # TrackedResolvedIssue should behave the same way as TrackedUnresolvedIssue # so just inherit the whole test case to run the very same test class TestTrackedResolvedIssue(TestTrackedUnresolvedIssue): def setUp(self): self.settings = Settings({}, {}, []) self.issue = TrackedResolvedIssue(self.settings) class TestSubmitDisabled(unittest.TestCase): settings = Settings( { 'issueAnalyzer' : { 'create_issues': False, 'update_issues': False, 'create_issues_instead_of_update': False, } }, {}, [] ) def setUp(self): self.new = NewIssue(self.settings) self.unresolved = TrackedUnresolvedIssue(self.settings) self.resolved = TrackedResolvedIssue(self.settings) # sync the issues so that lookup is not called => logged during submit self.new.sync() self.unresolved.sync() self.resolved.sync() @contextlib.contextmanager def assertUnchanged(self, issue): old_uri = issue.uri old_new = issue.new old_tracked = issue.tracked yield issue self.assertEqual(issue.uri, old_uri) self.assertEqual(issue.new, old_new) self.assertEqual(issue.tracked, old_tracked) def assertSubmitNoop(self, issue): with self.assertUnchanged(issue): with self.assertLogs('test', level='INFO') as cm: issue.submit() issue.submit() self.assertEqual(cm.output, [ "INFO:test:submit was called", "INFO:test:submit was called", ]) def assertSubmitCreate(self, issue): with self.assertLogs('test', level='INFO') as cm: result1 = issue.submit() result2 = issue.submit() self.assertEqual(cm.output, [ "INFO:test:submit was called", "INFO:test:make was called", "INFO:test:submit was called", ]) self.assertEqual(result1, result2) return result1 def assertSubmitUpdate(self, issue): with self.assertUnchanged(issue): with self.assertLogs('test', level='INFO') as cm: result1 = issue.submit() result2 = issue.submit() self.assertEqual(cm.output, [ "INFO:test:submit was called", "INFO:test:update was called", "INFO:test:submit was called", ]) self.assertEqual(result1, result2) return result1 def testNew(self): self.assertSubmitNoop(self.new) def testUnresolved(self): self.assertSubmitNoop(self.unresolved) def testResolved(self): self.assertSubmitNoop(self.resolved) class TestSubmitCreateUpdate(TestSubmitDisabled): settings = Settings( { 'issueAnalyzer' : { 'create_issues': True, 'update_issues': True, 'create_issues_instead_of_update': False, } }, {}, [] ) def testNew(self): result = self.assertSubmitCreate(self.new) self.assertTrue(self.new.new) self.assertTrue(self.new.tracked) self.assertEqual(result, 'http://issuetracker.example.com/new_issue') self.assertEqual(result, self.new.uri) # repeated submit doesn't do anything with self.assertUnchanged(self.new): with self.assertLogs('test', level='INFO') as cm: result = self.new.submit() self.assertEqual(cm.output, [ "INFO:test:submit was called", ]) def testUnresolved(self): self.assertSubmitUpdate(self.unresolved) def testResolved(self): self.assertSubmitUpdate(self.resolved) class TestSubmitCreateOnlyNew(TestSubmitCreateUpdate): settings = Settings( { 'issueAnalyzer' : { 'create_issues': True, 'update_issues': False, 'create_issues_instead_of_update': False, } }, {}, [] ) def testUnresolved(self): self.assertSubmitNoop(self.unresolved) def testResolved(self): self.assertSubmitNoop(self.resolved) class TestSubmitUpdateOnlyTracked(TestSubmitCreateUpdate): settings = Settings( { 'issueAnalyzer' : { 'create_issues': False, 'update_issues': True, 'create_issues_instead_of_update': False, } }, {}, [] ) def testNew(self): self.assertSubmitNoop(self.new) class TestSubmitCreateAlwaysWithUpdateOff(TestSubmitCreateUpdate): settings = Settings( { 'issueAnalyzer' : { 'create_issues': True, 'update_issues': False, # This should have no effect 'create_issues_instead_of_update': True, } }, {}, [] ) def testUnresolved(self): old_uri = self.unresolved.uri result = self.assertSubmitCreate(self.unresolved) self.assertEqual(result, 'http://issuetracker.example.com/new_issue') self.assertEqual(self.unresolved.uri, result) self.assertNotEqual(result, old_uri) def testResolved(self): old_uri = self.resolved.uri result = self.assertSubmitCreate(self.resolved) self.assertEqual(result, 'http://issuetracker.example.com/new_issue') self.assertEqual(self.resolved.uri, result) self.assertNotEqual(result, old_uri) # The update_issue should have no effect when create_issues_instead_of_update # is set to True. class TestSubmitCreateAlwaysWithUpdateOn(TestSubmitCreateAlwaysWithUpdateOff): settings = Settings( { 'issueAnalyzer' : { 'create_issues': True, 'update_issues': True, # This should have no effect 'create_issues_instead_of_update': True, } }, {}, [] )
2.390625
2
naplib/alignment/prosodylab_aligner/__main__.py
gavinmischler/naplib-python
1
4608
<filename>naplib/alignment/prosodylab_aligner/__main__.py # Copyright (c) 2011-2014 <NAME> and <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ Command-line driver for the module """ import logging import os import sys import yaml from bisect import bisect from shutil import copyfile from textgrid import MLF from corpus import Corpus from aligner import Aligner from archive import Archive from utilities import splitname, resolve_opts, \ ALIGNED, CONFIG, HMMDEFS, MACROS, SCORES from argparse import ArgumentParser DICTIONARY = "eng.dict" MODEL = "eng.zip" LOGGING_FMT = "%(message)s" # parse arguments argparser = ArgumentParser(prog="{} -m aligner".format(sys.executable), description="Prosodylab-Aligner") argparser.add_argument("-c", "--configuration", help="config file") argparser.add_argument("-d", "--dictionary", metavar="DICT", action="append", help="dictionary file (default: {}) (can specify multiple)".format(DICTIONARY)) argparser.add_argument("-s", "--samplerate", type=int, help="analysis samplerate (in Hz)") argparser.add_argument("-e", "--epochs", type=int, help="# of epochs of training per round") input_group = argparser.add_argument_group() input_group.add_argument("-r", "--read", help="source for a precomputed acoustic model") input_group.add_argument("-t", "--train", help="directory containing data for training") output_group = argparser.add_mutually_exclusive_group(required=True) output_group.add_argument("-a", "--align", help="directory containing data to align") output_group.add_argument("-w", "--write", help="destination for computed acoustic model") verbosity_group = argparser.add_mutually_exclusive_group() verbosity_group.add_argument("-v", "--verbose", action="store_true", help="Verbose output") verbosity_group.add_argument("-V", "--extra-verbose", action="store_true", help="Even more verbose output") args = argparser.parse_args() # hack to allow proper override of default dictionary if not args.dictionary: args.dictionary = [DICTIONARY] # set up logging loglevel = logging.WARNING if args.extra_verbose: loglevel = logging.DEBUG elif args.verbose: loglevel = logging.INFO logging.basicConfig(format=LOGGING_FMT, level=loglevel) # input: pick one if args.train: if args.read: logging.error("Cannot train on persistent model.") exit(1) logging.info("Preparing corpus '{}'.".format(args.train)) opts = resolve_opts(args) corpus = Corpus(args.train, opts) logging.info("Preparing aligner.") aligner = Aligner(opts) logging.info("Training aligner on corpus '{}'.".format(args.train)) aligner.HTKbook_training_regime(corpus, opts["epochs"], flatstart=(args.read is None)) else: if not args.read: args.read = MODEL logging.info("Reading aligner from '{}'.".format(args.read)) # warn about irrelevant flags if args.configuration: logging.warning("Ignoring config flag (-c/--configuration).") args.configuration = None if args.epochs: logging.warning("Ignoring epochs flag (-e/--epochs).") if args.samplerate: logging.warning("Ignoring samplerate flag (-s/--samplerate).") args.samplerate = None # create archive from -r argument archive = Archive(args.read) # read configuration file therefrom, and resolve options with it args.configuration = os.path.join(archive.dirname, CONFIG) opts = resolve_opts(args) # initialize aligner and set it to point to the archive data aligner = Aligner(opts) aligner.curdir = archive.dirname # output: pick one if args.align: # check to make sure we're not aligning on the training data if (not args.train) or (os.path.realpath(args.train) != os.path.realpath(args.align)): logging.info("Preparing corpus '{}'.".format(args.align)) corpus = Corpus(args.align, opts) logging.info("Aligning corpus '{}'.".format(args.align)) aligned = os.path.join(args.align, ALIGNED) scores = os.path.join(args.align, SCORES) aligner.align_and_score(corpus, aligned, scores) logging.debug("Wrote MLF file to '{}'.".format(aligned)) logging.debug("Wrote likelihood scores to '{}'.".format(scores)) logging.info("Writing TextGrids.") size = MLF(aligned).write(args.align) if not size: logging.error("No paths found!") exit(1) logging.debug("Wrote {} TextGrids.".format(size)) elif args.write: # create and populate archive (_, basename, _) = splitname(args.write) archive = Archive.empty(basename) archive.add(os.path.join(aligner.curdir, HMMDEFS)) archive.add(os.path.join(aligner.curdir, MACROS)) # whatever this is, it's not going to work once you move the data if "dictionary" in opts: del opts["dictionary"] with open(os.path.join(archive.dirname, CONFIG), "w") as sink: yaml.dump(opts, sink) (basename, _) = os.path.splitext(args.write) archive_path = os.path.relpath(archive.dump(basename)) logging.info("Wrote aligner to '{}'.".format(archive_path)) # else unreachable logging.info("Success!")
2.28125
2
init/build_statements.py
andgein/sis-2017-winter-olymp
0
4609
<reponame>andgein/sis-2017-winter-olymp<filename>init/build_statements.py #!/usr/bin/env python3 import codecs import os import os.path import shutil import subprocess import logging import glob import json CONTEST_DIR = 'polygon-contest' INIT_FILE = 'init.txt' BUILD_DIR = 'build' LANGUAGE = 'russian' FILES_DIR = 'files-' + LANGUAGE def time_limit_from_int(tl): tl //= 1000 return str(tl) + ' секунд' + ('a' if tl == 1 else 'ы') def memory_limit_from_int(ml): return str(ml // (1024 ** 2)) + ' мегабайт' def build_with_text(text, replace_data, result, section='', problem_name=''): text = text.replace('%TEXT%', section + '\n' + replace_data) with codecs.open(os.path.join(BUILD_DIR, 'data.tex'), 'w', 'utf-8') as data_file: data_file.write(text) cwd = os.getcwd() os.chdir(BUILD_DIR) logging.info('Compile problem %s' % problem_name) for _ in range(2): subprocess.check_output(['pdflatex', 'compile.tex']) os.chdir(cwd) shutil.copy(os.path.join(BUILD_DIR, 'compile.pdf'), os.path.join(FILES_DIR, result)) def main(): id_by_name = {} with open(INIT_FILE, 'r', encoding='utf-8') as init: for line in init: if not line.strip(): continue line = line.strip().split('\t') id_by_name[line[11]] = line[2] logging.basicConfig(level=logging.DEBUG, format='%(asctime)s [%(levelname)s] %(message)s') if not os.path.exists(FILES_DIR): logging.info('Create folder for output files: %s' % FILES_DIR) os.mkdir(FILES_DIR) if not os.path.exists(BUILD_DIR): logging.info('Create folder for build files: %s' % BUILD_DIR) os.mkdir(BUILD_DIR) problems_dir = os.path.join(CONTEST_DIR, 'problems') for problem_counter, problem_dir in enumerate(glob.glob(os.path.join(problems_dir, '*')), start=1): statement_dir = os.path.join(problem_dir, 'statements', LANGUAGE) properties_file_name = os.path.join(statement_dir, 'problem-properties.json') logging.info('Read problem properties file %s' % properties_file_name) with codecs.open(properties_file_name, 'r', 'utf-8') as properties_file: properties = json.load(properties_file) name = properties['name'] legend = properties['legend'] input_file = properties['inputFile'] output_file = properties['outputFile'] time_limit = time_limit_from_int(properties['timeLimit']) memory_limit = memory_limit_from_int(properties['memoryLimit']) input_format = properties['input'] output_format = properties['output'] samples = "".join(["\exmp{%s}{%s}%%\n" % (sample['input'], sample['output']) for sample in properties['sampleTests']]) notes = '' if len(properties.get('notes','')) > 0: notes = '\\Note\n' + properties['notes'] shutil.copy('template.tex', os.path.join(BUILD_DIR, 'compile.tex')) shutil.copy('olymp.sty', os.path.join(BUILD_DIR, 'olymp.sty')) with codecs.open('data.tex', 'r', 'utf-8') as data_file: data = data_file.read() problem_name = os.path.basename(problem_dir) problem_id = id_by_name[problem_name] data = data.replace('%NAME%', name).replace('%INPUT_FILE%', input_file).replace('%OUTPUT_FILE%', output_file).\ replace('%TIME_LIMIT%', time_limit).replace('%MEMORY_LIMIT%', memory_limit).\ replace('%ID%', problem_id).\ replace('%PROBLEM_COUNTER%', str(problem_counter)).\ replace('%STATEMENT_DIR%', os.path.join('..', statement_dir).replace('\\', '/') + '/') build_with_text(data, legend + '\n\\InputFile\n' + input_format + '\n\\OutputFile\n' + output_format + "\\begin{example}" + samples +"\\end{example}\n" + notes, problem_name + '.pdf', problem_name=problem_name) if __name__ == '__main__': main()
2.078125
2
conanfile.py
mmurooka/mc_rtc_data
1
4610
# -*- coding: utf-8 -*- # from conans import python_requires import conans.tools as tools import os base = python_requires("Eigen3ToPython/latest@multi-contact/dev") class MCRTCDataConan(base.Eigen3ToPythonConan): name = "mc_rtc_data" version = "1.0.4" description = "Environments/Robots description for mc_rtc" topics = ("robotics", "data") url = "https://github.com/jrl-umi3218/mc_rtc_data" homepage = "https://github.com/jrl-umi3218/mc_rtc_data" author = "<NAME> <<EMAIL>>" license = "BSD-2-Clause" exports = ["LICENSE"] exports_sources = ["CMakeLists.txt", "conan/CMakeLists.txt", "cmake/*", "jvrc_description/*", "mc_env_description/*", "mc_int_obj_description/*", "mc_rtc_data/*"] generators = "cmake" settings = "os", "arch" requires = () def config_options(self): del self.options.python2_version del self.options.python3_version def package_id(self): pass def package(self): cmake = self._configure_cmake() cmake.install() tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig")) for f in [".catkin", "_setup_util.py", "env.sh", "setup.bash", "local_setup.bash", "setup.sh", "local_setup.sh", "setup.zsh", "local_setup.zsh", ".rosinstall"]: p = os.path.join(self.package_folder, f) if os.path.exists(p): os.remove(p)
1.757813
2
hyperion/migrations/0006_auto_20190218_2251.py
ExiaSR/hyperion
3
4611
<filename>hyperion/migrations/0006_auto_20190218_2251.py # Generated by Django 2.1.5 on 2019-02-18 22:51 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('hyperion', '0005_auto_20190212_2116'), ] operations = [ migrations.RenameField( model_name='post', old_name='visibleTo', new_name='visible_to', ), migrations.AddField( model_name='post', name='content_type', field=models.CharField(choices=[('1', 'text/plain'), ('2', 'text/markdown'), ('3', 'image/png;base64'), ('4', 'image/jpeg;base64'), ('5', 'application/base64')], default='1', max_length=1), ), migrations.AddField( model_name='post', name='visibility', field=models.CharField(choices=[('1', 'PUBLIC'), ('2', 'FOAF'), ('3', 'FRIENDS'), ('4', 'PRIVATE'), ('5', 'SERVERONLY')], default='1', max_length=1), ), migrations.AlterField( model_name='comment', name='author', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='hyperion.UserProfile'), ), ]
1.890625
2
pottan_ocr/utils.py
nithyadurai87/pottan-ocr-tamil
5
4612
import torch import json import numpy as np from torch.autograd import Variable import gzip import yaml from re import split from matplotlib import pyplot def showImg( im ): pyplot.imshow( im ) pyplot.show() def myOpen( fname, mode ): return open( fname, mode, encoding="utf-8" ) def readFile( fname ): opener, mode = ( gzip.open, 'rt' ) if fname[-3:] == '.gz' else ( open, 'r' ) with opener( fname, mode ) as f: return f.read() def readLines( fname ): return split('[\r\n]', readFile( fname ) ) def readJson( fname ): with myOpen( fname, 'r' ) as f: return json.load( f ) def writeFile( fname, contents ): with myOpen( fname, 'w' ) as f: f.write( contents ) def writeJson( fname, data ): with myOpen( fname, 'w') as outfile: json.dump(data, outfile) def readYaml( fname ): with myOpen(fname, 'r') as fp: return yaml.load( fp ) config = readYaml('./config.yaml') class averager(object): """Compute average for `torch.Variable` and `torch.Tensor`. """ def __init__(self): self.reset() def add(self, v): if isinstance(v, Variable): count = v.data.numel() v = v.data.sum() elif isinstance(v, torch.Tensor): count = v.numel() v = v.sum() self.n_count += count self.sum += v def reset(self): self.n_count = 0 self.sum = 0 def val(self): res = 0 if self.n_count != 0: res = self.sum / float(self.n_count) return res def loadTrainedModel( model, opt ): """Load a pretrained model into given model""" print('loading pretrained model from %s' % opt.crnn) if( opt.cuda ): stateDict = torch.load(opt.crnn ) else: stateDict = torch.load(opt.crnn, map_location={'cuda:0': 'cpu'} ) # Handle the case of some old torch version. It will save the data as module.<xyz> . Handle it if( list( stateDict.keys() )[0][:7] == 'module.' ): for key in list(stateDict.keys()): stateDict[ key[ 7:] ] = stateDict[key] del stateDict[ key ] model.load_state_dict( stateDict ) print('Completed loading pre trained model')
2.34375
2
pyvips/error.py
kleisauke/pyvips
0
4613
<gh_stars>0 # errors from libvips import sys import logging from pyvips import ffi, vips_lib logger = logging.getLogger(__name__) _is_PY3 = sys.version_info[0] == 3 if _is_PY3: text_type = str else: text_type = unicode ffi.cdef(''' const char* vips_error_buffer (void); void vips_error_clear (void); ''') def _to_bytes(x): """Convert to a byte string. Convert a Python unicode string to a utf-8-encoded byte string. You must call this on strings you pass to libvips. """ if isinstance(x, text_type): x = x.encode() return x def _to_string(x): """Convert to a unicode string. If x is a byte string, assume it is utf-8 and decode to a Python unicode string. You must call this on text strings you get back from libvips. """ if _is_PY3 and isinstance(x, bytes): x = x.decode('utf-8') return x class Error(Exception): """An error from vips. Attributes: message (str): a high-level description of the error detail (str): a string with some detailed diagnostics """ def __init__(self, message, detail=None): self.message = message if detail is None or detail == "": detail = _to_string(ffi.string(vips_lib.vips_error_buffer())) vips_lib.vips_error_clear() self.detail = detail logger.debug('Error %s %s', self.message, self.detail) def __str__(self): return '{0}\n {1}'.format(self.message, self.detail) __all__ = [ '_to_bytes', '_to_string', 'Error', ]
2.46875
2
population_estimator/curses_io.py
cruzanta/population-estimator
1
4614
<filename>population_estimator/curses_io.py #!/usr/bin/env python """ Module for painting output on and obtaining input from a text-based terminal window using the curses library. """ import curses import textwrap def display_string(screen, a_string, output_line): # Paints a string on a text-based terminal window. _, width = screen.getmaxyx() try: screen.addstr(output_line, 0, textwrap.fill(a_string, width - 1)) except curses.error: screen.addstr(0, 0, textwrap.fill( 'Terminal window too small for output! Please resize. ', width - 1)) return output_line def display_list_items(screen, a_list, output_line): # Paints each item of a list on a text-based terminal window. for item in a_list: output_line = display_string(screen, '%s' % (item), output_line) output_line += 1 return output_line def display_formatted_dict(screen, dct, output_line): # Paints each key, value pair of a dict on a text-based terminal window. for key, value in dct.items(): if isinstance(value, int): value = '{:,}'.format(value) formatted_dict = '%s: %s' % (key, value) output_line = display_string(screen, formatted_dict, output_line) output_line += 1 return output_line def display_string_with_prompt(screen, first_line_num, a_string, prompt): """Paints two strings and accepts input. Paints two strings on a text-based terminal window. The latter of the two strings serves as the prompt for the user to enter input. Args: screen: A window object that represents the text-based terminal window. first_line_num: An integer that represents the location along the y-axis of the terminal window where the first character of the first string is painted. a_string: The first string that is painted on the terminal window. prompt: A string that serves as a prompt for the user to enter input. Returns: A string that the user enters in as input. """ screen.clear() output_line = first_line_num output_line = display_string(screen, a_string, output_line) output_line += 3 output_line = display_string(screen, prompt, output_line) screen.refresh() return screen.getstr(output_line, len(prompt) + 1) def display_list_items_with_prompt(screen, first_line_num, a_string, a_list, prompt): """Paints a string, each item of a list, and accepts input. Paints a string, each item of a list, and another string on a text-based terminal window. Each item of the list is painted on its own line. The second string serves as a prompt for the user to enter input. Args: screen: A window object that represents the text-based terminal window. first_line_num: An integer that represents the location along the y-axis of the terminal window where the first character of the first string is painted. a_string: The first string that is painted on the terminal window. a_list: A list whose items are painted on each line of the terminal window. prompt: A string that serves as a prompt for the user to enter input. Returns: A string that the user enters in as input. """ screen.clear() output_line = first_line_num output_line = display_string(screen, a_string, output_line) output_line += 2 output_line = display_list_items(screen, a_list, output_line) output_line += 1 output_line = display_string(screen, prompt, output_line) screen.refresh() return screen.getstr(output_line, len(prompt) + 1) def display_formatted_dicts_with_prompt(screen, first_line_num, a_string, list_of_dicts, prompt): """Paints a string, each item of each dict in a list, and accepts input. Paints a string, each item of each dict in a list, and another string on a text-based terminal window. Each key, value pair of each dict is painted on its own line with the key and value separated by a colon. The second string serves as a prompt for the user to enter input. Args: screen: A window object that represents the text-based terminal window. first_line_num: An integer that represents the location along the y-axis of the terminal window where the first character of the first string is painted. a_string: The first string that is painted on the terminal window. list_of_dicts: A list of dictionaries whose key, value pairs are painted on their own line of the terminal window. prompt: A string that serves as a prompt for the user to enter input. Returns: A string that the user enters in as input. """ screen.clear() output_line = first_line_num output_line = display_string(screen, a_string, output_line) output_line += 2 for dct in list_of_dicts: output_line = display_formatted_dict(screen, dct, output_line) output_line += 1 output_line += 1 output_line = display_string(screen, prompt, output_line) screen.refresh() return screen.getstr(output_line, len(prompt) + 1) def get_user_menu_selection(screen, first_line_num, a_string, menu_items, prompt): """Paints a string, a menu, and accepts input. Paints a string, a menu, and another string on a text-based terminal window. The menu is composed of the items in a list, and each item is assigned its own number that represents the order in which the item appears in the menu. The second string serves as a prompt for the user to enter a number from the menu. Args: screen: A window object that represents the text-based terminal window. first_line_num: An integer that represents the location along the y-axis of the terminal window where the first character of the first string is painted. a_string: The first string that is painted on the terminal window. menu_items: A list whose items are painted on each line of the terminal window as menu options. prompt: A string that serves as a prompt for the user to enter a number from the menu. Returns: A string representation of the item in 'menu_items' that the user selects. """ # Create a dictionary that contains the items in 'menu_items'. Each item # is added as a value with an integer key that represents the order in which # the item will appear in the menu. item_key = 1 selection_items = {} for item in menu_items: selection_items['%s' % (item_key)] = item item_key += 1 # Display the menu and prompt the user for a selection. while True: screen.clear() output_line = first_line_num output_line = display_string(screen, a_string, output_line) output_line += 3 for menu_num in sorted(selection_items.iterkeys()): item_line = '%s) %s' % (menu_num, selection_items[menu_num]) output_line = display_string(screen, item_line, output_line) output_line += 1 output_line += 1 output_line = display_string(screen, prompt, output_line) screen.refresh() input = screen.getstr(output_line, len(prompt) + 1) if input not in selection_items.keys(): continue # Force the user to enter a valid selection. else: return selection_items[input]
3.875
4
tools/micropython-mockup/urandom.py
hwinther/lanot
0
4615
def randrange(n, y): pass
0.902344
1
SAP/released_tr_email_sender/ui.py
botisko/personal_programs
0
4616
<gh_stars>0 import json from tkinter import * from tkinter import ttk from tkinter import messagebox from tr_data import TRData, NO_DATA_MEETS_CRITERIA from email_text import email_body_template from helpers import send_email RECIPIENT = <email_address> EXCEPTION_FILE = "tr_number_exceptions.json" class TrEmailSender: def __init__(self, transport_requests: TRData): self.transport_requests = transport_requests self.exceptions = self.load_exceptions() # WINDOW CREATION self.window = Tk() self.window.title("Send email with import requests to TST") self.window.config(padx=20, pady=20) # TTILE LABEL self.title_lbl = Label( text="Please select TRs to be included into email: ", ) # BUTTONS self.refresh_btn = Button(text="REFRESH", command=self.refresh) self.exceptions_btn = Button(text="Add to exceptions", command=self.add_to_exceptions) self.select_all_btn = Button(text="Select All", command=self.select_all) self.send_btn = Button(text="SEND", command=self.send_email) # list of TRs columns_labels = { 'tr_number': ("TR Number", 100), 'description': ("Description", 350), 'tkt_type': ("Ticket Type", 80), 'ticket_num': ("Ticket Number", 80), 'module': ("SAP Module", 80), 'export_datetime': ("Export Timestamp", 150), 'owner': ("Owner", 80) } # TREE VIEW for list display self.tr_tree_view = ttk.Treeview(columns=tuple(columns_labels.keys()), show='headings') # Update columns for column, (label, field_length) in columns_labels.items(): self.tr_tree_view.column(column, minwidth=80, width=field_length, anchor='w', stretch=False) self.tr_tree_view.heading(column, text=label) # insert data self.populate_tree_view_lines() #LAYOUT PLACEMENT self.title_lbl.grid(row=0, column=0, sticky=W) self.tr_tree_view.grid(row=1, column=0, rowspan=4) self.refresh_btn.grid(row=1, column=1, sticky=N+S+E+W, padx=2, pady=2) self.exceptions_btn.grid(row=2, column=1, sticky=E+W+S, padx=1, pady=2) self.select_all_btn.grid(row=3, column=1, sticky=E+W+N, padx=1, pady=2) self.send_btn.grid(row=4, column=1, sticky=N+S+E+W, padx=1, pady=2) # DISPLAY WINDOW self.window.mainloop() def refresh(self): # delete all rows in tree view for item in self.tr_tree_view.get_children(): self.tr_tree_view.delete(item) # update with new data self.transport_requests.refresh() self.exceptions = self.load_exceptions() self.populate_tree_view_lines() def populate_tree_view_lines(self): all_are_in_exceptions = True for (tr_number, export_timestamp, owner, description, ticket_number, sap_module, ticket_type) in self.transport_requests.data: # check if not in exception if not tr_number in self.exceptions: year = export_timestamp[:4] month = export_timestamp[4:6] day = export_timestamp[6:8] time = f"{export_timestamp[8:10]}:{export_timestamp[10:12]}:{export_timestamp[12:]}" export_date_time = f"{day}/{month}/{year} - {time}" line_values = (tr_number, description, ticket_type, ticket_number, sap_module, export_date_time, owner) self.tr_tree_view.insert('', 'end', values=line_values) all_are_in_exceptions = False # if all TRs are in exceptions, insert only pre-defined information if all_are_in_exceptions: tr_number = NO_DATA_MEETS_CRITERIA[0][0] description = NO_DATA_MEETS_CRITERIA[0][3] no_data_information = (tr_number, description, "", "", "", "", "") self.tr_tree_view.insert('', 'end', values=no_data_information) def select_all(self): items = self.tr_tree_view.get_children() self.tr_tree_view.selection_add(items) def get_selected_item_ids(self): return self.tr_tree_view.selection() def send_email(self): # get selected lines selected_ids = self.get_selected_item_ids() # get data of each id if not selected_ids: messagebox.showinfo( title="Status Info", message="There is nothing to send.\n\nPlease refresh the page." ) return None email_details = self.prepare_email_details(selected_ids) # send email if send_email(**email_details): messagebox.showinfo( title="Status Info", message="Email has been sent!") # add trs into exceptions return self.add_to_exceptions() else: return None def prepare_email_details(self, selected_ids): transport_data = [self.tr_tree_view.item(id_tag, 'values') for id_tag in selected_ids] # prepare list of transports for email body html_list_of_trs = "" ticket_numbers = set() for (tr_number, description, ticket_type, ticket_number, sap_module, export_timestamp, owner) in transport_data: html_list_of_trs += f"<li>{tr_number} - {owner} - {description}</li>" ticket_numbers.add(ticket_number) # prepare email details email_details = { 'recipient': RECIPIENT, 'subject': f"Transport requests for: {', '.join(sorted(ticket_numbers)).rstrip(', ')}", 'html_body': email_body_template.format(html_list_of_trs) } return email_details def load_exceptions(self): try: with open(file=EXCEPTION_FILE, mode='r') as file: exception_list = set(json.load(file)['tr_numbers']) except FileNotFoundError: with open(file=EXCEPTION_FILE, mode='w') as file: exception_dict = {'tr_numbers': []} json.dump(exception_dict, file, indent=4) return set() else: return exception_list def add_to_exceptions(self): selected_ids = self.get_selected_item_ids() if not selected_ids: messagebox.showinfo( title="Status Info", message="Nothing has been selected.\n\nPlease refresh the page." ) return None transport_numbers = [self.tr_tree_view.item(id_tag, 'values')[0] for id_tag in selected_ids] # add TR number of selected items to exception json file for number in transport_numbers: self.exceptions.add(number) updated_data= {'tr_numbers': list(self.exceptions)} with open(file=EXCEPTION_FILE, mode='w') as file: json.dump(updated_data, file, indent=4) return self.refresh()
2.71875
3
AI-Practice-Tensorflow-Notes-master/opt/opt4_8_backward.py
foochane/Tensorflow-Learning
2
4617
#coding:utf-8 #0导入模块 ,生成模拟数据集 import tensorflow as tf import numpy as np import matplotlib.pyplot as plt import opt4_8_generateds import opt4_8_forward STEPS = 40000 BATCH_SIZE = 30 LEARNING_RATE_BASE = 0.001 LEARNING_RATE_DECAY = 0.999 REGULARIZER = 0.01 def backward(): x = tf.placeholder(tf.float32, shape=(None, 2)) y_ = tf.placeholder(tf.float32, shape=(None, 1)) X, Y_, Y_c = opt4_8_generateds.generateds() y = opt4_8_forward.forward(x, REGULARIZER) global_step = tf.Variable(0,trainable=False) learning_rate = tf.train.exponential_decay( LEARNING_RATE_BASE, global_step, 300/BATCH_SIZE, LEARNING_RATE_DECAY, staircase=True) #定义损失函数 loss_mse = tf.reduce_mean(tf.square(y-y_)) loss_total = loss_mse + tf.add_n(tf.get_collection('losses')) #定义反向传播方法:包含正则化 train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss_total) with tf.Session() as sess: init_op = tf.global_variables_initializer() sess.run(init_op) for i in range(STEPS): start = (i*BATCH_SIZE) % 300 end = start + BATCH_SIZE sess.run(train_step, feed_dict={x: X[start:end], y_:Y_[start:end]}) if i % 2000 == 0: loss_v = sess.run(loss_total, feed_dict={x:X,y_:Y_}) print("After %d steps, loss is: %f" %(i, loss_v)) xx, yy = np.mgrid[-3:3:.01, -3:3:.01] grid = np.c_[xx.ravel(), yy.ravel()] probs = sess.run(y, feed_dict={x:grid}) probs = probs.reshape(xx.shape) plt.scatter(X[:,0], X[:,1], c=np.squeeze(Y_c)) plt.contour(xx, yy, probs, levels=[.5]) plt.show() if __name__=='__main__': backward()
3.109375
3
test/examples/integrated/codec/vip/vip_agent.py
rodrigomelo9/uvm-python
140
4618
<gh_stars>100-1000 #// #// ------------------------------------------------------------- #// Copyright 2011 Synopsys, Inc. #// Copyright 2019-2020 <NAME> (tpoikela) #// All Rights Reserved Worldwide #// #// Licensed under the Apache License, Version 2.0 (the #// "License"); you may not use this file except in #// compliance with the License. You may obtain a copy of #// the License at #// #// http://www.apache.org/licenses/LICENSE-2.0 #// #// Unless required by applicable law or agreed to in #// writing, software distributed under the License is #// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR #// CONDITIONS OF ANY KIND, either express or implied. See #// the License for the specific language governing #// permissions and limitations under the License. #// ------------------------------------------------------------- #// from uvm import * from .vip_sequencer import vip_sequencer from .vip_driver import vip_driver from .vip_monitor import vip_monitor class vip_agent(UVMAgent): def __init__(self, name, parent=None): super().__init__(name, parent) self.hier_objection = False def build_phase(self, phase): self.sqr = vip_sequencer.type_id.create("sqr", self) self.drv = vip_driver.type_id.create("drv", self) self.tx_mon = vip_monitor.type_id.create("tx_mon", self) self.rx_mon = vip_monitor.type_id.create("rx_mon", self) self.rx_mon.hier_objection = self.hier_objection self.tx_mon.hier_objection = self.hier_objection self.drv.hier_objection = self.hier_objection vif = [] if not UVMConfigDb.get(self, "", "vif", vif): uvm_fatal("VIP/AGT/NOVIF", "No virtual interface specified for self agent instance") self.vif = vif[0] UVMConfigDb.set(self, "tx_mon", "vif", self.vif.tx_mon) UVMConfigDb.set(self, "rx_mon", "vif", self.vif.rx) def connect_phase(self, phase): self.drv.seq_item_port.connect(self.sqr.seq_item_export) async def pre_reset_phase(self, phase): if self.hier_objection: phase.raise_objection(self, "Resetting agent") await self.reset_and_suspend() if self.hier_objection: print("vip_agent dropping objection") phase.drop_objection(self) async def reset_and_suspend(self): #fork await sv.fork_join([ cocotb.fork(self.drv.reset_and_suspend()), cocotb.fork(self.tx_mon.reset_and_suspend()), cocotb.fork(self.rx_mon.reset_and_suspend()) ]) #join self.sqr.stop_sequences() async def suspend(self): await sv.fork_join([ # fork cocotb.fork(self.drv.suspend()), cocotb.fork(self.tx_mon.suspend()), cocotb.fork(self.rx_mon.suspend()), ]) # join async def resume(self): # fork await sv.fork_join([ cocotb.fork(self.drv.resume()), cocotb.fork(self.tx_mon.resume()), cocotb.fork(self.rx_mon.resume()), ]) # join uvm_component_utils(vip_agent)
1.914063
2
tests/test_transliterate.py
abosoar/camel_tools
1
4619
# -*- coding: utf-8 -*- # MIT License # # Copyright 2018-2020 New York University A<NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """ Tests for camel_tools.transliterate. """ from __future__ import absolute_import import pytest from camel_tools.utils.charmap import CharMapper from camel_tools.utils.transliterate import Transliterator # A mapper that translates lower-case English characters to a lower-case x and # upper-case English characters to an upper-case X. This makes it easy to # predict what the transliteration should be. TEST_MAP = { u'A-Z': u'X', u'a-z': u'x', } TEST_MAPPER = CharMapper(TEST_MAP, None) class TestTransliteratorInit(object): """Test class for Transliterator.__init__. """ def test_init_none_mapper(self): """Test that init raises a TypeError when given a mapper that is None. """ with pytest.raises(TypeError): Transliterator(None) def test_init_invalid_type_mapper(self): """Test that init raises a TypeError when given a mapper that is not a CharMapper instance. """ with pytest.raises(TypeError): Transliterator({}) def test_init_valid_mapper(self): """Test that init doesn't raise an error when given a valid mapper. """ assert Transliterator(TEST_MAPPER) def test_init_none_marker(self): """Test that init raises a TypeError when given a marker that is None. """ with pytest.raises(TypeError): Transliterator(TEST_MAPPER, None) def test_init_invalid_type_marker(self): """Test that init raises a TypeError when given a marker that is not a string. """ with pytest.raises(TypeError): Transliterator(TEST_MAPPER, []) def test_init_empty_marker(self): """Test that init raises a ValueError when given a marker that is an empty string. """ with pytest.raises(ValueError): Transliterator(TEST_MAPPER, '') def test_init_invalid_marker1(self): """Test that init raises a ValueError when given an invalid marker ( wgitespace in the middle). """ with pytest.raises(ValueError): Transliterator(TEST_MAPPER, '@@LAT @@') def test_init_invalid_marker2(self): """Test that init raises a ValueError when given an invalid marker ( whitespace at the end). """ with pytest.raises(ValueError): Transliterator(TEST_MAPPER, '@@LAT@@ ') def test_init_invalid_marker3(self): """Test that init raises a ValueError when given an invalid marker ( whitespace at the beginning). """ with pytest.raises(ValueError): Transliterator(TEST_MAPPER, ' @@LAT@@') def test_init_valid_marker1(self): """Test that init doesn't raise an error when given a valid marker. """ assert Transliterator(TEST_MAPPER, '@@LAT@@') def test_init_valid_marker2(self): """Test that init doesn't raise an error when given a valid marker. """ assert Transliterator(TEST_MAPPER, u'@@LAT@@') class TestTransliteratorTranslate(object): """Test class for Transliterator.translate. """ def test_trans_empty(self): """Test that transliterating an empty string returns an empty string. """ trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(u'') == u'' def test_trans_single_no_markers(self): """Test that a single word with no markers gets transliterated. """ trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(u'Hello') == u'Xxxxx' def test_trans_single_with_markers(self): """Test that a single word with markers does not get transliterated. """ trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(u'@@Hello') == u'@@Hello' def test_trans_single_strip(self): """Test that a single word with markers does not get transliterated but markers do get stripped when strip_markers is set to True. """ trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(u'@@Hello', True) == u'Hello' def test_trans_single_ignore(self): """Test that a single word with markers gets transliterated when ignore markers is set to True. """ trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(u'@@Hello', False, True) == u'@@Xxxxx' def test_trans_single_ignore_strip(self): """Test that a single word with markers gets transliterated with markers stripped when both strip_markers and ignore_markers are set to True. """ trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(u'@@Hello', True, True) == u'Xxxxx' def test_trans_sent_no_markers(self): """Test that a sentence with no markers gets transliterated. """ sent_orig = u'Hello World, this is a sentence!' sent_out = u'Xxxxx Xxxxx, xxxx xx x xxxxxxxx!' trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(sent_orig) == sent_out def test_trans_sent_with_markers(self): """Test that tokens with markers in a sentence do not get transliterated. """ sent_orig = u'Hello @@World, this is a @@sentence!' sent_out = u'Xxxxx @@World, xxxx xx x @@sentence!' trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(sent_orig) == sent_out def test_trans_sent_strip(self): """Test that tokens with markers in a sentence do not get transliterated but markers do get stripped when strip_markers is set to True. """ sent_orig = u'Hello @@World, this is a @@sentence!' sent_out = u'Xxxxx World, xxxx xx x sentence!' trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(sent_orig, True) == sent_out def test_trans_sent_ignore(self): """Test that tokens with markers in a sentence get transliterated when ignore markers is set to True. """ sent_orig = u'Hello @@World, this is a @@sentence!' sent_out = u'Xxxxx @@Xxxxx, xxxx xx x @@xxxxxxxx!' trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(sent_orig, False, True) == sent_out def test_trans_sent_ignore_strip(self): """Test that tokens with markers in a sentence get transliterated with markers stripped when both strip_markers and ignore_markers are set to True. """ sent_orig = u'Hello @@World, this is a @@sentence!' sent_out = u'Xxxxx Xxxxx, xxxx xx x xxxxxxxx!' trans = Transliterator(TEST_MAPPER, '@@') assert trans.transliterate(sent_orig, True, True) == sent_out
2.078125
2
source/code/build-instance-scheduler-template.py
liangruibupt/aws-instance-scheduler
0
4620
###################################################################################################################### # Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance # # with the License. A copy of the License is located at # # # # http://www.apache.org/licenses/ # # # # or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions # # and limitations under the License. # ###################################################################################################################### import json import sys from collections import OrderedDict def get_versioned_template(template_filename, bucket, solution, version, region): with open(template_filename, "rt") as f: template_text = "".join(f.readlines()) template_text = template_text.replace("%bucket%", bucket) template_text = template_text.replace("%solution%", solution) template_text = template_text.replace("%version%", version) if region == 'cn-north-1' or region == 'cn-northwest-1': arn_prefix = "arn:aws-cn" else: arn_prefix = "arn:aws" template_text = template_text.replace("%arn_prefix%", arn_prefix) return json.loads(template_text, object_pairs_hook=OrderedDict) def main(template_file, bucket, solution, version, region): template = get_versioned_template(template_file, bucket, solution, version, region) print(json.dumps(template, indent=4)) main(template_file=sys.argv[1], bucket=sys.argv[2], solution=sys.argv[3], version=sys.argv[4], region=sys.argv[5]) exit(0)
1.976563
2
src/parse.py
StanfordAHA/Configuration
0
4621
############################################################################### # file -- parse.py -- # Top contributors (to current version): # <NAME> # This file is part of the configuration finder for the Stanford AHA project. # Copyright (c) 2021 by the authors listed in the file AUTHORS # in the top-level source directory) and their institutional affiliations. # All rights reserved. See the file LICENSE in the top-level source # directory for licensing information. # # Handles parsing of all input files. ############################################################################### import smt_switch as ss import smt_switch.primops as po import smt_switch.sortkinds as sk import argparse import pono as c import sys import re import time import copy import io #import timeit class stream: set0 = [] set1 = [] seq_in = [] seq_out = [] vars = {} var_array_inds = {} constr2terms = [] data_in_size = [] data_out_size = [] clk_name = None rst_n_name = None config_names = [] def read_stream(self, args, fout, agg_set, tb_set, sram_set, symbols, solver): global dim_names # open an annotation file if args.annotation == None: if agg_set: annot_file = args.hwpath+"agg_lake_top_annotation.txt" elif tb_set: annot_file = args.hwpath+"tb_lake_top_annotation.txt" elif sram_set: annot_file = args.hwpath+"sram_lake_top_annotation.txt" else: annot_file = args.hwpath+"lake_top_annotation.txt" else: annot_file = args.annotation cfo = open(annot_file, "r+") clines = cfo.readlines() # Close opend file cfo.close() # Collect the Set0, Set1, I/O sequence, and config_name variables as they appear in btor2 for cln in clines: cln = cln.strip() cln = cln.replace(',', '') cvars = cln.split() if 'var' == cvars[0]: self.vars[cvars[1]] = cvars[3] elif 'input' != cvars[0] and 'output' != cvars[0] and 'var' != cvars[0] and 'if' != cvars[0] and 'SOLVE' != cvars[1]: self.constr2terms.append(cvars) elif 'if' == cvars[0]: self.constr2terms.append(cvars) elif 'SOLVE' == cvars[1]:#specific bits are set only to be solved. Others can be anything, e.g. 0 signal = cvars[0] if ':' in signal: signal_name = signal[:signal.find('[')] ind_start = signal[signal.find('[')+1:signal.find(':')] symb_start = False if ind_start in self.vars: ind_start = int(self.vars[ind_start],0) elif ind_start.isdigit(): ind_start = int(ind_start,0) else: symb_start = True ind_end = signal[signal.find(':')+1:signal.find(']')] symb_end = False if ind_end in self.vars: ind_end = int(self.vars[ind_end],0) elif ind_end.isdigit(): ind_end = int(ind_end,0) else: #case of symbolic symb_end = True if not symb_start and not symb_end: if signal[:signal.find('[')] not in self.var_array_inds: self.var_array_inds[signal[:signal.find('[')]] = [] for i in range(ind_start,ind_end+1): self.var_array_inds[signal_name].append(i) else: #implement later when suport for universal quantifiers is added self.constr2terms.append(cvars) else: if signal[:signal.find('[')] not in self.var_array_inds: self.var_array_inds[signal[:signal.find('[')]] = [] self.var_array_inds[signal[:signal.find('[')]].append(signal[signal.find('[')+1:signal.find(']')]) elif 'SET' == cvars[-1][:-1]: if len(cvars) == 6: rem_dims = cvars[2] dims = [] while (rem_dims != ''): dims.append(int(rem_dims[1:rem_dims.find(':')],0)) rem_dims = rem_dims[rem_dims.find(']')+1:] gen = [0]*len(dims) j = len(dims)-1 while j >= 0: if gen[j] <= dims[j]: build_dims = cvars[-2] for i in gen: build_dims = build_dims + '['+str(i)+']' if cvars[-1][-1:] == '0': self.set0.append(build_dims) else: self.set1.append(build_dims) while (j < len(dims)-1 and gen[j+1] == 0): j += 1 else: gen[j] = 0 j -= 1 gen[j] += 1 else: if cvars[-1][-1:] == '0': self.set0.append(cvars[-2]) else: self.set1.append(cvars[-2]) elif 'SEQUENCE' == cvars[-1]: if len(cvars) == 6: rem_dims = cvars[2] dims = [] while (rem_dims != ''): dims.append(int(rem_dims[1:rem_dims.find(':')],0)) rem_dims = rem_dims[rem_dims.find(']')+1:] if cvars[0] == 'input': self.data_in_size = dims else: self.data_out_size = dims assert len(self.data_in_size) <= 3 assert len(self.data_out_size) <= 3 gen = [0]*len(dims) j = len(dims)-1 while j >= 0: if gen[j] <= dims[j]: build_dims = cvars[-2] for i in gen: build_dims = build_dims + '['+str(i)+']' if cvars[0] == 'input': self.seq_in.append(build_dims) else: self.seq_out.append(build_dims) while (j < len(dims)-1 and gen[j+1] == 0): j += 1 else: gen[j] = 0 j -= 1 gen[j] += 1 else: if cvars[0] == 'input': self.seq_in.append(cvars[-2]) else: self.seq_out.append(cvars[-2]) elif 'SOLVE' == cvars[-1] and ('input' == cvars[0] or 'output' == cvars[0]): #if cvars[3] == 'strg_ub_pre_fetch_0_input_latency': # continue if len(cvars) == 6: dim = int(cvars[2][1:cvars[2].find(':')],0) for i in range(dim+1): self.config_names.append(cvars[-2]+'['+str(i)+']') else: self.config_names.append(cvars[-2]) elif 'CLK' == cvars[-1]: self.clk_name = cvars[-2] elif 'RSTN' == cvars[-1]: self.rst_n_name = cvars[-2] else: assert 'X' == cvars[-1]
1.851563
2
neyesem/main.py
omerfarukbaysal/neyesem
0
4622
<reponame>omerfarukbaysal/neyesem<gh_stars>0 from flask import Blueprint, render_template, redirect, url_for, request, flash, make_response from werkzeug.security import generate_password_hash from flask_login import login_required, current_user from . import db import datetime from .models import Visitor, User main = Blueprint('main', __name__) @main.route('/') def index(): #control visitor with cookies cookie = request.cookies.get('isvisited') if cookie: #visit=True pass else: resp = make_response(render_template('index.html')) resp.set_cookie('isvisited', 'yess') return resp visitor_ip = request.remote_addr visited_time = datetime.datetime.now() visitors = Visitor.query.all() visited = Visitor.query.filter_by(ip=visitor_ip).first() visit = False if visited: difference = abs((visited_time - visited.last_visit).seconds) if difference > 60: visit = True visited.last_visit = visited_time db.session.commit() else: new_visitor = Visitor(ip=visitor_ip,last_visit=visited_time) db.session.add(new_visitor) db.session.commit() return render_template('index.html',visitors=visitors,visit=visit) @main.route('/', methods=['POST']) def index_post(): email = request.form.get('email') name = request.form.get('name') password = request.form.get('password') user = User.query.filter_by(email=email).first() # if this returns a user, then the email already exists in database if user: # if a user is found, we want to redirect back to signup page so user can try again flash('Email address already exists') return redirect(url_for('auth.signup')) # create a new user with the form data. Hash the password so the plaintext version isn't saved. new_user = User(email=email, name=name, password=generate_password_hash(password, method='sha256')) # add the new user to the database db.session.add(new_user) db.session.commit() return redirect(url_for('auth.login')) @main.route('/profile') @login_required def profile(): return render_template('profile.html', name=current_user.name)
2.71875
3
00-Aulas/Aula007_2.py
AmandaRH07/Python_Entra21
0
4623
# Funções cabecalho = "SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n" rodape = "\n\n\n Obrigada pela preferencia" def imprimir_tela(conteudo): print(cabecalho) #print(opcao_menu) print(conteudo) print(rodape) def ler_opcoes(): opcao = int(input("Insira a opção: ")) return opcao def carregar_opcoes(opcao): if opcao == 1: imprimir_tela("A opção escolhida foi 'Cadastrar funcionário'") elif opcao == 2: imprimir_tela("A opção escolhida foi 'Listar funcionários'") elif opcao == 3: imprimir_tela("A opção escolhida foi 'Editar funcionário'") elif opcao == 4: imprimir_tela("A opção escolhida foi 'Deletar funcionário'") elif opcao == 5: imprimir_tela("A opção escolhida foi 'Sair'") else: pass
3.578125
4
ast_version/src/binop.py
lucassa3/CCompiler
1
4624
<reponame>lucassa3/CCompiler<filename>ast_version/src/binop.py from node import Node class BinOp(Node): def eval(self, st): a = self.children[0].eval(st) b = self.children[1].eval(st) if self.value == "MINUS": return a - b elif self.value == "PLUS": return a + b elif self.value == "MULT": return a * b elif self.value == "DIV": return a // b elif self.value == "GREATER": return a > b elif self.value == "LESS": return a < b elif self.value == "GE": return a >= b elif self.value == "LE": return a <= b elif self.value == "EQUALS": return a == b elif self.value == "AND": return a and b elif self.value == "OR": return a or b
3.171875
3
aqt/installer.py
pylipp/aqtinstall
0
4625
#!/usr/bin/env python3 # # Copyright (C) 2018 <NAME> <<EMAIL>> # Copyright (C) 2019,2020 <NAME> <<EMAIL>> # Copyright (C) 2020, <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of # the Software, and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import concurrent.futures import os import pathlib import subprocess import sys import time from logging import getLogger import py7zr import requests from requests.adapters import HTTPAdapter from urllib3.util.retry import Retry from aqt.archives import QtPackage from aqt.helper import altlink, versiontuple from aqt.qtpatch import Updater from aqt.settings import Settings class ExtractionError(Exception): pass class QtInstaller: """ Installer class to download packages and extract it. """ def __init__(self, qt_archives, logging=None, command=None, target_dir=None): self.qt_archives = qt_archives if logging: self.logger = logging else: self.logger = getLogger('aqt') self.command = command if target_dir is None: self.base_dir = os.getcwd() else: self.base_dir = target_dir self.settings = Settings() def retrieve_archive(self, package: QtPackage): archive = package.archive url = package.url name = package.name start_time = time.perf_counter() self.logger.info("Downloading {}...".format(name)) self.logger.debug("Download URL: {}".format(url)) session = requests.Session() retry = Retry(connect=5, backoff_factor=0.5) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) try: r = session.get(url, allow_redirects=False, stream=True) if r.status_code == 302: newurl = altlink(r.url, r.headers['Location'], logger=self.logger) self.logger.info('Redirected URL: {}'.format(newurl)) r = session.get(newurl, stream=True) except requests.exceptions.ConnectionError as e: self.logger.error("Connection error: %s" % e.args) raise e else: try: with open(archive, 'wb') as fd: for chunk in r.iter_content(chunk_size=8196): fd.write(chunk) fd.flush() if self.command is None: with open(archive, 'rb') as fd: self.extract_archive(fd) except Exception as e: exc = sys.exc_info() self.logger.error("Download error: %s" % exc[1]) raise e else: if self.command is not None: self.extract_archive_ext(archive) os.unlink(archive) self.logger.info("Finish installation of {} in {}".format(archive, time.perf_counter() - start_time)) def extract_archive(self, archive): szf = py7zr.SevenZipFile(archive) szf.extractall(path=self.base_dir) szf.close() def extract_archive_ext(self, archive): if self.base_dir is not None: command_args = [self.command, 'x', '-aoa', '-bd', '-y', '-o{}'.format(self.base_dir), archive] else: command_args = [self.command, 'x', '-aoa', '-bd', '-y', archive] try: proc = subprocess.run(command_args, stdout=subprocess.PIPE, check=True) self.logger.debug(proc.stdout) except subprocess.CalledProcessError as cpe: self.logger.error("Extraction error: %d" % cpe.returncode) if cpe.stdout is not None: self.logger.error(cpe.stdout) if cpe.stderr is not None: self.logger.error(cpe.stderr) raise cpe def get_arch_dir(self, arch): if arch.startswith('win64_mingw'): arch_dir = arch[6:] + '_64' elif arch.startswith('win32_mingw'): arch_dir = arch[6:] + '_32' elif arch.startswith('win'): arch_dir = arch[6:] else: arch_dir = arch return arch_dir def make_conf_files(self, qt_version, arch): """Make Qt configuration files, qt.conf and qtconfig.pri""" arch_dir = self.get_arch_dir(arch) try: # prepare qt.conf with open(os.path.join(self.base_dir, qt_version, arch_dir, 'bin', 'qt.conf'), 'w') as f: f.write("[Paths]\n") f.write("Prefix=..\n") # update qtconfig.pri only as OpenSource with open(os.path.join(self.base_dir, qt_version, arch_dir, 'mkspecs', 'qconfig.pri'), 'r+') as f: lines = f.readlines() f.seek(0) f.truncate() for line in lines: if line.startswith('QT_EDITION ='): line = 'QT_EDITION = OpenSource\n' if line.startswith('QT_LICHECK ='): line = 'QT_LICHECK =\n' f.write(line) except IOError as e: self.logger.error("Configuration file generation error: %s\n", e.args, exc_info=True) raise e def install(self): with concurrent.futures.ThreadPoolExecutor(self.settings.concurrency) as executor: futures = [executor.submit(self.retrieve_archive, ar) for ar in self.qt_archives.get_archives()] done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_EXCEPTION) if len(not_done) > 0: self.logger.error("Installation error detected.") exit(1) try: for feature in done: feature.result() except Exception: exit(1) def finalize(self): target = self.qt_archives.get_target_config() self.make_conf_files(target.version, target.arch) prefix = pathlib.Path(self.base_dir) / target.version / target.arch updater = Updater(prefix, self.logger) if versiontuple(target.version) < (5, 14, 2): updater.patch_qt(target)
1.765625
2
lib/django-0.96/django/views/generic/list_detail.py
MiCHiLU/google_appengine_sdk
790
4626
from django.template import loader, RequestContext from django.http import Http404, HttpResponse from django.core.xheaders import populate_xheaders from django.core.paginator import ObjectPaginator, InvalidPage from django.core.exceptions import ObjectDoesNotExist def object_list(request, queryset, paginate_by=None, page=None, allow_empty=False, template_name=None, template_loader=loader, extra_context=None, context_processors=None, template_object_name='object', mimetype=None): """ Generic list of objects. Templates: ``<app_label>/<model_name>_list.html`` Context: object_list list of objects is_paginated are the results paginated? results_per_page number of objects per page (if paginated) has_next is there a next page? has_previous is there a prev page? page the current page next the next page previous the previous page pages number of pages, total hits number of objects, total last_on_page the result number of the last of object in the object_list (1-indexed) first_on_page the result number of the first object in the object_list (1-indexed) """ if extra_context is None: extra_context = {} queryset = queryset._clone() if paginate_by: paginator = ObjectPaginator(queryset, paginate_by) if not page: page = request.GET.get('page', 1) try: page = int(page) object_list = paginator.get_page(page - 1) except (InvalidPage, ValueError): if page == 1 and allow_empty: object_list = [] else: raise Http404 c = RequestContext(request, { '%s_list' % template_object_name: object_list, 'is_paginated': paginator.pages > 1, 'results_per_page': paginate_by, 'has_next': paginator.has_next_page(page - 1), 'has_previous': paginator.has_previous_page(page - 1), 'page': page, 'next': page + 1, 'previous': page - 1, 'last_on_page': paginator.last_on_page(page - 1), 'first_on_page': paginator.first_on_page(page - 1), 'pages': paginator.pages, 'hits' : paginator.hits, }, context_processors) else: c = RequestContext(request, { '%s_list' % template_object_name: queryset, 'is_paginated': False }, context_processors) if not allow_empty and len(queryset) == 0: raise Http404 for key, value in extra_context.items(): if callable(value): c[key] = value() else: c[key] = value if not template_name: model = queryset.model template_name = "%s/%s_list.html" % (model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) return HttpResponse(t.render(c), mimetype=mimetype) def object_detail(request, queryset, object_id=None, slug=None, slug_field=None, template_name=None, template_name_field=None, template_loader=loader, extra_context=None, context_processors=None, template_object_name='object', mimetype=None): """ Generic detail of an object. Templates: ``<app_label>/<model_name>_detail.html`` Context: object the object """ if extra_context is None: extra_context = {} model = queryset.model if object_id: queryset = queryset.filter(pk=object_id) elif slug and slug_field: queryset = queryset.filter(**{slug_field: slug}) else: raise AttributeError, "Generic detail view must be called with either an object_id or a slug/slug_field." try: obj = queryset.get() except ObjectDoesNotExist: raise Http404, "No %s found matching the query" % (model._meta.verbose_name) if not template_name: template_name = "%s/%s_detail.html" % (model._meta.app_label, model._meta.object_name.lower()) if template_name_field: template_name_list = [getattr(obj, template_name_field), template_name] t = template_loader.select_template(template_name_list) else: t = template_loader.get_template(template_name) c = RequestContext(request, { template_object_name: obj, }, context_processors) for key, value in extra_context.items(): if callable(value): c[key] = value() else: c[key] = value response = HttpResponse(t.render(c), mimetype=mimetype) populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.name)) return response
2.3125
2
pax/_src/core/utility_modules.py
NTT123/pax
11
4627
"""Utility Modules.""" from typing import Any, Callable, Dict, List, Optional, Sequence, TypeVar, Union import jax import jax.numpy as jnp from .module import Module, parameters_method T = TypeVar("T", bound=Module) O = TypeVar("O") class ParameterModule(Module): """A PAX module that registers attributes as parameters by default.""" def parameters(self): return self.apply_submodules(lambda x: x.parameters()) class StateModule(Module): """A PAX module that registers attributes as states by default.""" parameters = parameters_method() class LazyModule(Module): """A lazy module is a module that only creates submodules when needed. Example: >>> from dataclasses import dataclass >>> @dataclass ... class MLP(pax.experimental.LazyModule): ... features: list ... ... def __call__(self, x): ... sizes = zip(self.features[:-1], self.features[1:]) ... for i, (in_dim, out_dim) in enumerate(sizes): ... fc = self.get_or_create(f"fc_{i}", lambda: pax.Linear(in_dim, out_dim)) ... x = jax.nn.relu(fc(x)) ... return x ... ... >>> mlp, _ = MLP([1, 2, 3]) % jnp.ones((1, 1)) >>> print(mlp.summary()) MLP(features=[1, 2, 3]) ├── Linear(in_dim=1, out_dim=2, with_bias=True) └── Linear(in_dim=2, out_dim=3, with_bias=True) """ def get_or_create(self, name, create_fn: Callable[[], T]) -> T: """Create and register a new attribute when it is not exist. Return the attribute. """ if hasattr(self, name): value = getattr(self, name) else: assert callable(create_fn), "Expect a callable function" value = create_fn() setattr(self, name, value) return value class Lambda(Module): """Convert a function to a module. Example: >>> net = pax.Lambda(jax.nn.relu) >>> print(net.summary()) x => relu(x) >>> y = net(jnp.array(-1)) >>> y DeviceArray(0, dtype=int32, weak_type=True) """ func: Callable def __init__(self, func: Callable, name: Optional[str] = None): super().__init__(name=name) self.func = func def __call__(self, *args, **kwargs): return self.func(*args, **kwargs) def __repr__(self) -> str: if self.name is not None: return super().__repr__() else: return f"{self.__class__.__qualname__}({self.func.__name__})" def summary(self, return_list: bool = False) -> Union[str, List[str]]: if self.name is not None: name = self.name elif isinstance(self.func, jax.custom_jvp) and hasattr(self.func, "fun"): if hasattr(self.func.fun, "__name__"): name = self.func.fun.__name__ else: name = f"{self.func.fun}" elif hasattr(self.func, "__name__"): name = self.func.__name__ else: name = f"{self.func}" output = f"x => {name}(x)" return [output] if return_list else output class Flattener(Module): """Flatten PAX modules for better performance. Example: >>> net = pax.Linear(3, 3) >>> opt = opax.adam(1e-3)(net.parameters()) >>> flat_mods = pax.experimental.Flattener(model=net, optimizer=opt) >>> net, opt = flat_mods.model, flat_mods.optimizer >>> print(net.summary()) Linear(in_dim=3, out_dim=3, with_bias=True) >>> print(opt.summary()) chain.<locals>.Chain ├── scale_by_adam.<locals>.ScaleByAdam │ ├── Linear(in_dim=3, out_dim=3, with_bias=True) │ └── Linear(in_dim=3, out_dim=3, with_bias=True) └── scale.<locals>.Scale """ treedef_dict: Dict[str, Any] leaves_dict: Dict[str, Sequence[jnp.ndarray]] def __init__(self, **kwargs): """Create a new flattener.""" super().__init__() self.treedef_dict = {} self.leaves_dict = {} for name, value in kwargs.items(): leaves, treedef = jax.tree_flatten(value) self.treedef_dict[name] = treedef self.leaves_dict[name] = leaves def __getattr__(self, name: str) -> Any: if name in self.treedef_dict: treedef = self.treedef_dict[name] leaves = self.leaves_dict[name] value = jax.tree_unflatten(treedef, leaves) return value else: raise AttributeError() def update(self: T, **kwargs) -> T: """Update the flattener. Example: >>> net = pax.Linear(3, 3) >>> flats = pax.experimental.Flattener(net=net) >>> flats = flats.update(net=pax.Linear(4, 4)) >>> print(flats.net.summary()) Linear(in_dim=4, out_dim=4, with_bias=True) """ new_self = self.copy() for name, value in kwargs.items(): leaves, treedef = jax.tree_flatten(value) new_self.treedef_dict[name] = treedef new_self.leaves_dict[name] = leaves return new_self def parameters(self: T) -> T: """Raise an error. Need to reconstruct the original module before getting parameters. """ raise ValueError( "A flattener only stores ndarray leaves as non-trainable states.\n" "Reconstruct the original module before getting parameters." )
3.109375
3
src/richie/apps/courses/lms/edx.py
kernicPanel/richie
0
4628
<reponame>kernicPanel/richie """ Backend to connect Open edX richie with an LMS """ import logging import re import requests from requests.auth import AuthBase from ..serializers import SyncCourseRunSerializer from .base import BaseLMSBackend logger = logging.getLogger(__name__) def split_course_key(key): """Split an OpenEdX course key by organization, course and course run codes. We first try splitting the key as a version 1 key (course-v1:org+course+run) and fallback the old version (org/course/run). """ if key.startswith("course-v1:"): organization, course, run = key[10:].split("+") else: organization, course, run = key.split("/") return organization, course, run class EdXTokenAuth(AuthBase): """Attach HTTP token authentication to the given Request object.""" def __init__(self, token): """Set-up token value in the instance.""" self.token = token def __call__(self, request): """Modify and return the request.""" request.headers.update( {"X-Edx-Api-Key": self.token, "Content-Type": "application/json"} ) return request class TokenAPIClient(requests.Session): """ A :class:`requests.Session` that automatically authenticates against edX's preferred authentication method up to Dogwood, given a secret token. For more usage details, see documentation of the :class:`requests.Session` object: https://requests.readthedocs.io/en/master/user/advanced/#session-objects """ def __init__(self, token, *args, **kwargs): """Extending the session object by setting the authentication token.""" super().__init__(*args, **kwargs) self.auth = EdXTokenAuth(token) class EdXLMSBackend(BaseLMSBackend): """LMS backend for Richie tested with Open EdX Dogwood to Hawthorn.""" @property def api_client(self): """Instantiate and return an edx token API client.""" return TokenAPIClient(self.configuration["API_TOKEN"]) def extract_course_id(self, url): """Extract the LMS course id from the course run url.""" return re.match(self.configuration["COURSE_REGEX"], url).group("course_id") def extract_course_number(self, data): """Extract the LMS course number from data dictionary.""" course_id = self.extract_course_id(data.get("resource_link")) return split_course_key(course_id)[1] @staticmethod def get_course_run_serializer(data, partial=False): """Prepare data and return a bound serializer.""" return SyncCourseRunSerializer(data=data, partial=partial)
2.5
2
BitTorrent-5.2.2/BTL/brpclib.py
jpabb7/p2pScrapper
4
4629
# by <NAME> import xmlrpclib from xmlrpclib2 import * from BTL import brpc old_PyCurlTransport = PyCurlTransport class PyCurlTransport(old_PyCurlTransport): def set_connection_params(self, h): h.add_header('User-Agent', "brpclib.py/1.0") h.add_header('Connection', "Keep-Alive") h.add_header('Content-Type', "application/octet-stream") def _parse_response(self, response): # read response from input file/socket, and parse it return brpc.loads(response.getvalue())[0] # -------------------------------------------------------------------- # request dispatcher class _Method: # some magic to bind an B-RPC method to an RPC server. # supports "nested" methods (e.g. examples.getStateName) def __init__(self, send, name): self.__send = send self.__name = name def __getattr__(self, name): return _Method(self.__send, "%s.%s" % (self.__name, name)) def __call__(self, *args, **kwargs): args = (args, kwargs) return self.__send(self.__name, args) # ARG! prevent repr(_Method()) from submiting an RPC call! def __repr__(self): return "<%s instance at 0x%08X>" % (self.__class__, id(self)) # Double underscore is BAD! class BRPC_ServerProxy(xmlrpclib.ServerProxy): """uri [,options] -> a logical connection to an B-RPC server uri is the connection point on the server, given as scheme://host/target. The standard implementation always supports the "http" scheme. If SSL socket support is available (Python 2.0), it also supports "https". If the target part and the slash preceding it are both omitted, "/RPC2" is assumed. The following options can be given as keyword arguments: transport: a transport factory encoding: the request encoding (default is UTF-8) All 8-bit strings passed to the server proxy are assumed to use the given encoding. """ def __init__(self, uri, transport=None, encoding=None, verbose=0, allow_none=0): # establish a "logical" server connection # get the url import urllib type, uri = urllib.splittype(uri) if type not in ("http", "https"): raise IOError, "unsupported B-RPC protocol" self.__host, self.__handler = urllib.splithost(uri) if not self.__handler: self.__handler = "/RPC2" if transport is None: if type == "https": transport = xmlrpclib.SafeTransport() else: transport = xmlrpclib.Transport() self.__transport = transport self.__encoding = encoding self.__verbose = verbose self.__allow_none = allow_none def __request(self, methodname, params): # call a method on the remote server request = brpc.dumps(params, methodname, encoding=self.__encoding, allow_none=self.__allow_none) response = self.__transport.request( self.__host, self.__handler, request, verbose=self.__verbose ) if len(response) == 1: response = response[0] return response def __repr__(self): return ( "<ServerProxy for %s%s>" % (self.__host, self.__handler) ) __str__ = __repr__ def __getattr__(self, name): # magic method dispatcher return _Method(self.__request, name) def new_server_proxy(url): c = cache_set.get_cache(PyCURL_Cache, url) t = PyCurlTransport(c) return BRPC_ServerProxy(url, transport=t) ServerProxy = new_server_proxy if __name__ == '__main__': s = ServerProxy('https://greg.mitte.bittorrent.com:7080/') def ping(*a, **kw): (a2, kw2) = s.ping(*a, **kw) assert a2 == list(a), '%s list is not %s' % (r, list(a)) assert kw2 == dict(kw), '%s dict is not %s' % (kw2, dict(kw)) ping(0, 1, 1, name="potato") ping(0, 1, 1, name="anime") ping("phish", 0, 1, 1) ping("games", 0, 1, 1)
2.53125
3
database_files/views.py
den-gts/django-database-files-3000
8
4630
import base64 import mimetypes import os from django.conf import settings from django.http import Http404, HttpResponse from django.shortcuts import get_object_or_404 from django.views.decorators.cache import cache_control from django.views.static import serve as django_serve from database_files.models import File @cache_control(max_age=86400) def serve(request, name): """ Retrieves the file from the database. """ f = get_object_or_404(File, name=name) f.dump() mimetype = mimetypes.guess_type(name)[0] or 'application/octet-stream' response = HttpResponse(f.content, content_type=mimetype) response['Content-Length'] = f.size return response def serve_mixed(request, *args, **kwargs): """ First attempts to serve the file from the filesystem, then tries the database. """ name = kwargs.get('name') or kwargs.get('path') document_root = kwargs.get('document_root') document_root = document_root or settings.MEDIA_ROOT try: # First attempt to serve from filesystem. return django_serve(request, name, document_root) except Http404: # Then try serving from database. return serve(request, name)
2.0625
2
NoiseFiltersPy/Injector.py
TVect/NoiseFiltersPy
6
4631
<reponame>TVect/NoiseFiltersPy<gh_stars>1-10 import numpy as np import pandas as pd from abc import ABC class Injector(ABC): """Base class for the injectors of artificial noise. Attributes ---------- rem_indx : :obj:`List` Removed indexes (rows) from the dataset after the filtering. parameters : :obj:`Dict` Parameters used to define the behaviour of the filter. clean_data : :obj:`Sequence` Filtered independent attributes(X) of the dataset. clean_classes : :obj:`Sequence` Filtered target attributes(y) of the dataset. """ def __init__(self, attributes, labels, rate: float = 0.1) -> None: self._new_noise = [] if not isinstance(attributes, pd.DataFrame): self._attrs = pd.DataFrame(attributes) else: self._attrs = attributes if not isinstance(labels, pd.DataFrame): self._labels = pd.DataFrame(labels) else: self._labels = labels self._rate = rate self.verify() self._num_noise = int(self._rate * self._attrs.shape[0]) self._label_types = set(self.labels[0].unique()) @property def labels(self): return self._labels @property def noise_indx(self): return self._new_noise def verify(self) -> None: if min(self._labels.value_counts()) < 2: raise ValueError("Number of examples in the minority class must be >= 2.") if self._attrs.shape[0] != self.labels.shape[0]: raise ValueError("Attributes and classes must have the sime size.") if self._rate < 0 or self._rate > 1: raise ValueError("") def _gen_random(self, seed: int = None): """[summary] Args: seed (int, optional): [description]. Defaults to 123. """ rng = np.random.default_rng(seed) for example in self._new_noise: self._labels.iloc[example] = rng.choice(list(self._label_types - set(self._labels.iloc[example])))
3.0625
3
application/mod_user/forms.py
hackBCA/hackbcafour
2
4632
<filename>application/mod_user/forms.py from wtforms import Form, TextField, PasswordField, SelectField, TextAreaField, BooleanField, validators, ValidationError, RadioField import re phone_regex = "(\+\d+-?)?((\(?\d{3}\)?)|(\d{3}))-?\d{3}-?\d{4}$" gender_choices = [ ("", "Gender"), ("male", "Male"), ("female", "Female"), ("other", "Other"), ("rns", "Rather Not Say") ] beginner_choices = [ ("", "Are you a beginner?"), ("yes", "Yes"), ("no", "No") ] ethnicity_choices = [ ("", "Ethnicity"), ("white", "White"), ("african_american", "African American"), ("asian_pacific", "Asian or Pacific Islander"), ("american_indian_alaskan_native", "American Indian or Alaskan Native"), ("multiracial", "Multiracial"), ("hispanic", "Hispanic origin"), ("other", "Other"), ("rns", "Rather Not Say") ] num_hackathons_choices = [ ("", "How many hackathons have you been to?"), ("0", "0"), ("1", "1"), ("2", "2"), ("3", "3"), ("4", "4"), ("5", "5+") ] num_hackathons_choices_mentor = [ ("", "How many hackathons have you mentored at?"), ("0", "0"), ("1", "1"), ("2", "2"), ("3", "3"), ("4", "4"), ("5", "5+") ] grade_choices = [ ("", "What grade are you in?"), ("9", "9th"), ("10", "10th"), ("11", "11th"), ("12", "12th") ] shirt_sizes = [ ("", "What is your shirt size?"), ("XS", "Extra Small"), ("S", "Small"), ("M", "Medium"), ("L", "Large"), ("XL", "Extra Large") ] type_account_choices = [ ("hacker", "Hacker"), ("mentor", "Mentor") ] free_response1_prompt = "Why do you want to come to hackBCA?" free_response1_prompt_mentor = "Please list languages/frameworks/technologies that you would like to mentor students in." free_response2_prompt_mentor = "Would you like to run a workshop? If so, please briefly describe your ideas." class HackerRegistrationForm(Form): email = TextField("Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address.") ], render_kw={"class": 'text'}, description = "Email") first_name = TextField("First Name", [ validators.Required(message = "You must enter a first name.") ], render_kw={"class": 'text'}, description = "First Name") last_name = TextField("Last Name", [ validators.Required(message = "You must enter a last name.") ], render_kw={"class": 'text'}, description = "Last Name") school = TextField("School Name", [ validators.Required(message = "Enter your school's name.") ], render_kw={"class": 'text'}, description = "School Name") gender = SelectField("Gender", [validators.Required(message = "You must select an option.")], choices = gender_choices, render_kw={"class": 'text'}, description = "Gender") beginner = SelectField("Are you a beginner?", [validators.Required(message = "You must select an option.")], choices = beginner_choices, render_kw={"class": 'text'}, description = "Are you a beginner?") ethnicity = SelectField("Ethnicity", [validators.Required(message = "You must select an option.")], choices = ethnicity_choices, render_kw={"class": 'text'}, description = "Ethnicity") grade = SelectField("Grade", [validators.Required(message = "You must select an option.")], choices = grade_choices, render_kw={"class": 'text'}, description = "Grade") age = TextField("Age", [ validators.Required(message = "Enter your age") ], render_kw={"class": 'text'}, description = "Age") num_hackathons = SelectField("How many hackathons have you attended?", [validators.Required(message = "You must select an option.")], choices = num_hackathons_choices, render_kw={"class": 'text'}, description = "How many hackathons have you attended?") free_response1 = TextAreaField(free_response1_prompt, [ validators.Required(message = "You must answer this question."), validators.Length(max = 1500, message = "Response must be less than 1500 characters long.") ], render_kw={"class": 'text'}, description = "1500 characters maximum.") link1 = TextField("Link #1", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "Link #1 (Optional)") link2 = TextField("Link #2", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "Link #2 (Optional)") link3 = TextField("Link #3", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "Link #3 (Optional)") password = PasswordField("Password", [ validators.Required(message = "You must enter a password."), validators.Length(min = 8, message = "Password must be at least 8 characters.") ], render_kw={"class": 'text'}, description = "Password") confirm_password = PasswordField("Confirm Password", render_kw={"class": 'text'}, description = "Confirm Password") mlh_coc = BooleanField("I agree", [ validators.Required(message = "Please read and agree to the MLH Code of Conduct.") ], description = "I have read & agree to the MLH Code of Conduct.", default = False) mlh_terms = BooleanField("I agree", [ validators.Required(message = "Please read and agree to the MLH Terms and Conditions.") ], description = "I agree to the MLH Contest Terms and Conditions and the MLH Privacy Policy.", default = False) def validate_confirm_password(form, field): password = form['password'].data if len(password) >= 8 and password != field.data: raise ValidationError("Passwords must match.") def validate(self): #Man I love validators.URL links = ["link1", "link2", "link3"] originalValues = {} for link in links: #Temporarily prefix all links with http:// if they are missing it attr = getattr(self, link) val = attr.data originalValues[link] = val if re.match("^(http|https)://", val) is None: val = "http://" + val attr.data = val setattr(self, link, attr) rv = Form.validate(self) for link in links: #Revert link values back to actual values attr = getattr(self, link) attr.data = originalValues[link] setattr(self, link, attr) if not rv: return False return True def validate_other_gender(form, field): if form['gender'].data == 'other' and field.data == "": raise ValidationError("Enter your gender.") class MentorRegistrationForm(Form): email = TextField("Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address.") ], render_kw={"class": 'text'}, description = "Email") first_name = TextField("First Name", [ validators.Required(message = "You must enter a first name.") ], render_kw={"class": 'text'}, description = "First Name") last_name = TextField("Last Name", [ validators.Required(message = "You must enter a last name.") ], render_kw={"class": 'text'}, description = "Last Name") school = TextField("Company/School Name", [ validators.Required(message = "Enter your company/schools's name.") ], render_kw={"class": 'text'}, description = "Company/School Name") phone = TextField("Phone Number", [ validators.Required(message = "Enter your preferred contact number."), validators.Regexp(phone_regex, message = "Please enter a valid phone number.") ], render_kw={"class": 'text'}, description = "Phone Number") num_hackathons = SelectField("How many hackathons have you mentored at?", [validators.Required(message = "You must select an option.")], choices = num_hackathons_choices_mentor, render_kw={"class": 'text'}, description = "How many hackathons have you mentored at?") mentor_free_response1 = TextAreaField(free_response1_prompt_mentor, [ validators.Length(max = 1500, message = "Response must be less than 1500 characters long.") ], render_kw={"class": 'text'}, description = "1500 characters maximum.") mentor_free_response2 = TextAreaField(free_response2_prompt_mentor, [ validators.Length(max = 1500, message = "Response must be less than 1500 characters long.") ], render_kw={"class": 'text'}, description = "1500 characters maximum.") github_link = TextField("Github Link", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "Github Link (Optional)") linkedin_link = TextField("LinkedIn", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "LinkedIn Link (Optional)") site_link = TextField("Personal Site", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "Personal Site Link (Optional)") other_link = TextField("other", [ validators.optional(), validators.URL(message = "Invalid URL.") ], render_kw={"class": 'text'}, description = "Other Link (Optional)") password = PasswordField("Password", [ validators.Required(message = "You must enter a password."), validators.Length(min = 8, message = "Password must be at least 8 characters.") ], render_kw={"class": 'text'}, description = "Password") confirm_password = PasswordField("<PASSWORD> Password", render_kw={"class": 'text'}, description = "Confirm Password") mlh_coc = BooleanField("I agree", [ validators.Required(message = "Please read and agree to the MLH Code of Conduct.") ], description = "I have read & agree to the MLH Code of Conduct.", default = False) mlh_terms = BooleanField("I agree", [ validators.Required(message = "Please read and agree to the MLH Terms and Conditions.") ], description = "I agree to the MLH Contest Terms and Conditions and the MLH Privacy Policy.", default = False) def validate(self): links = ["github_link", "linkedin_link", "site_link", "other_link"] originalValues = {} for link in links: #Temporarily prefix all links with http:// if they are missing it attr = getattr(self, link) val = attr.data originalValues[link] = val if re.match("^(http|https)://", val) is None: val = "http://" + val attr.data = val setattr(self, link, attr) rv = Form.validate(self) for link in links: #Revert link values back to actual values attr = getattr(self, link) attr.data = originalValues[link] setattr(self, link, attr) if not rv: return False return True class LoginForm(Form): email = TextField("Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address." )], render_kw={"class": 'text'},description = "Email") password = PasswordField("Password", [], render_kw={"class": 'text'}, description = "Password") class EmailForm(Form): email = TextField("Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address." )], render_kw={"class": 'text'}, description = "Email") class RecoverForm(Form): password = PasswordField("Password", [ validators.Required(message = "You must enter a password."), validators.Length(min = 8, message = "Password must be at least 8 characters.") ], render_kw={"class": 'text'}, description = "Password") confirm_password = PasswordField("Confirm Password", render_kw={"class": 'text'}, description = "Confirm Password") def validate_confirm_password(form, field): password = form['password'].data if len(password) >= 8 and password != field.data: raise ValidationError("Passwords must match.") class ChangeNameForm(Form): first_name = TextField("First Name", [ validators.Required(message = "You must enter a first name.") ], render_kw={"class": 'text'}, description = "First Name") last_name = TextField("Last Name", [ validators.Required(message = "You must enter a last name.") ], render_kw={"class": 'text'}, description = "Last Name") class ChangePasswordForm(Form): password = PasswordField("Password", [ validators.Required(message = "You must enter your current password."), validators.Length(min = 8, message = "Password must be at least 8 characters.") ], render_kw={"class": 'text'}, description = "Current Password") new_password = PasswordField("New Password", [ validators.Required(message = "You must choose a new password."), validators.Length(min = 8, message = "Password must be at least 8 characters.") ], render_kw={"class": 'text'}, description = "New Password") confirm_password = PasswordField("<PASSWORD> Password", render_kw={"class": 'text'}, description = "Confirm New Password") def validate_confirm_password(form, field): password = form['new_password'].data if len(password) >= 8 and password != field.data: raise ValidationError("Passwords must match.") attending_choices = [ ("Attending", "Yes, I will!"), ("Not Attending", "No, I won't.") ] class RsvpForm(Form): # attending = RadioField("Are you attending hackBCA III?", [validators.Required(message = "Please tell us if you are attending hackBCA III.")], render_kw={"class": 'text'}, choices = attending_choices, description = "Will you be at hackBCA?") # t_shirt_size = SelectField("What is your shirt size?", [validators.Required(message = "You must select an option.")], choices = shirt_sizes, description = "What is your shirt size?") dietary_restrictions = TextField("Dietary Restrictions", [ validators.optional(), ], render_kw={"class": 'text'}, description = "Do you have any dietary restrictions?") guardian_name = TextField("Guardian Full Name", [ validators.Required(message = "You must enter a name.") ], render_kw={"class": 'text'}, description = "Guardian Name") guardian_home_num = TextField("Guardian Home Number", [ validators.Required(message = "Enter your guardian's home number."), validators.Regexp(phone_regex, message = "Please enter a valid phone number.") ], render_kw={"class": 'text'}, description = "Guardian Home Number") guardian_cell_num = TextField("Guardian Cellphone", [ validators.Required(message = "Enter your guardian's cellphone number."), validators.Regexp(phone_regex, message = "Please enter a valid phone number.") ], render_kw={"class": 'text'}, description = "Guardian Cellphone") guardian_email = TextField("Guardian Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address." )], render_kw={"class": 'text'}, description = "Guardian Email") emergency_name = TextField("Emergency Contact Full Name", [ validators.Required(message = "You must enter a name.") ], render_kw={"class": 'text'}, description = "Emergency Contact Name") emergency_home_num = TextField("Emergency Contact Home Number", [ validators.Required(message = "Enter your emergency contact's home number."), validators.Regexp(phone_regex, message = "Please enter a valid phone number.") ], render_kw={"class": 'text'}, description = "Emergency Contact Home Number") emergency_cell_num = TextField("Emergency Contact Cellphone", [ validators.Required(message = "Enter your emergency contact's cellphone."), validators.Regexp(phone_regex, message = "Please enter a valid phone number.") ], render_kw={"class": 'text'}, description = "Emergency Contact Cellphone") emergency_email = TextField("Emergency Contact Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address." )], render_kw={"class": 'text'}, description = "Emergency Contact Email") school_address = TextField("School Address", [ validators.Required(message = "Enter your school address."), ], render_kw={"class": 'text'}, description = "School Address") school_town = TextField("School Town", [ validators.Required(message = "Enter your school town."), ], render_kw={"class": 'text'}, description = "School Town") school_state = TextField("School State", [ validators.Required(message = "Enter your school state."), ], render_kw={"class": 'text'}, description = "School State") school_phone_num = TextField("School Phone Number", [ validators.Required(message = "Enter school's home number."), validators.Regexp(phone_regex, message = "Please enter a valid phone number.") ], render_kw={"class": 'text'}, description = "School Phone Number") school_principal_name = TextField("Principal Name", [ validators.Required(message = "You must enter a name."), ], render_kw={"class": 'text'}, description = "Principal Name") school_principal_email = TextField("Principal Email", [ validators.Required(message = "Enter an email."), validators.Email(message = "Invalid email address." )], render_kw={"class": 'text'}, description = "Principal Email") cs_teacher_name = TextField("CS Teacher Name", [ validators.optional(), ], render_kw={"class": 'text'}, description = "CS Teacher Name (if applicable)") cs_teacher_email = TextField("CS Teacher Email", [ validators.optional(), validators.Email(message = "Invalid email address." )], render_kw={"class": 'text'}, description = "CS Teacher Email (if applicable)") # class MentorRsvpForm(Form): # attending = RadioField("Are you attending hackBCA III?", [validators.Required(message = "Please tell us if you are attending hackBCA III.")], choices = attending_choices) # phone = TextField("Phone Number", [ # validators.Required(message = "Confirm your preferred contact number."), # validators.Regexp(phone_regex, message = "Please enter a valid phone number.") # ], description = "Phone Number Confirmation") # t_shirt_size = SelectField("What is your shirt size?", [validators.Required(message = "You must select an option.")], choices = shirt_sizes, description = "What is your shirt size?") # food_allergies = TextAreaField("Allergies", [ # validators.optional(), # ], description = "Do you have any allergies?") # medical_information = TextAreaField("Medical Information", [ # validators.optional(), # ], description = "Are there any other medical issues that we should know about? (ex. Other allergies, illnesses, etc.)") # hackbca_rules = BooleanField("I agree",[ # validators.Required(message = "Please read and agree to our rules.") # ], description = "I agree to the rules set forth by hackBCA.", default = False) # mlh_terms = BooleanField("I agree",[ # validators.Required(message = "Please read and agree to the MLH Code of Conduct.") # ], description = "I agree to the MLH Code of Conduct.", default = False)
2.671875
3
src/app.py
gh640/coding-challenge
0
4633
# coding: utf-8 '''フロントコントローラを提供する ''' from math import ceil import os from flask import json from flask import Flask from flask import request from flask import send_from_directory from flask import render_template # from json_loader import load_locations # from json_loader import prepare_locations from models import Location # ページ毎のロケ地表示する LOCATION_ITEMS_PER_PAGE = 20 app = Flask(__name__) app.config['GOOGLE_API_KEY'] = os.environ['GOOGLE_API_KEY'] app.config['ROOT'] = (app.config['APPLICATION_ROOT'] if app.config['APPLICATION_ROOT'] else '') @app.route('/static/<path:path>') def send_js(path): return send_from_directory('static', path) @app.route('/') def index(): return render_template('index.html') @app.route('/location') def location(): req_title = request.args.get('title', None) try: req_page = int(request.args.get('page', 1)) except ValueError as e: req_page = 1 query = Location.selectbase() if req_title: query = query.where(Location.title ** '%{}%'.format(req_title)) total_items = query.count() total_pages = ceil(total_items / LOCATION_ITEMS_PER_PAGE) current_page = req_page if req_page <= total_pages else total_pages query = query.paginate(current_page, LOCATION_ITEMS_PER_PAGE) locations = [l.as_dict() for l in query] return json.jsonify({ 'meta': { 'pager_data': { 'totalItems': total_items, 'totalPages': total_pages, 'currentItems': len(locations), 'currentPage': current_page, 'itemsPerPage': LOCATION_ITEMS_PER_PAGE, }, }, 'entities': { 'locations': locations, }, }) @app.route('/movie') def movie(): req_title = request.args.get('title', None) if not req_title: return json.jsonify([]) query = (Location.select(Location.title) .distinct() .where(Location.title ** '%{}%'.format(req_title))) movies = [{'id': index, 'title': l.title} for index, l in enumerate(query)] return json.jsonify(movies)
2.515625
3
corkus/objects/dungeon.py
MrBartusek/corkus.py
5
4634
<filename>corkus/objects/dungeon.py from __future__ import annotations from .base import CorkusBase from enum import Enum class DungeonType(Enum): REMOVED = "REMOVED" """Dungeons that were removed from the game in version ``1.14.1`` like ``Skeleton`` or ``Spider``""" REMOVED_MINI = "REMOVED_MINI" """Minidungeons that were reworked in version ``1.17`` like ``Ice`` or ``Ocean``""" STANDARD = "STANDARD" """Generic dungeons like ``Galleon's Graveyard`` or ``Fallen Factory``""" CORRUPTED = "CORRUPTED" """Harder variant of standard dungeons like ``Corrupted Decrepit Sewers`` or ``Corrupted Sand-Swept Tomb``""" class Dungeon(CorkusBase): """Represents a `Dungeon <https://wynncraft.fandom.com/wiki/Dungeons>`_ completed by a :py:class:`Player`""" @property def name(self) -> str: """Name of the dungeon like ``Decrepit Sewers``, ``Galleon's Graveyard`` or ``Fallen Factory``.""" return self._attributes.get("name", "") @property def type(self) -> DungeonType: """Type of the dungeon.""" if self.name.startswith("Corrupted"): return DungeonType.CORRUPTED elif self.name in ( "Zombie", "Animal", "Skeleton", "Spider", "Silverfish",): return DungeonType.REMOVED elif self.name in ( "Jungle", "Ice", "Ocean"): return DungeonType.REMOVED_MINI elif self.name in ( "Decrepit Sewers", "Infested Pit", "Ice Barrows", "Lost Sanctuary", "Sand-Swept Tomb", "Underworld Crypt", "Undergrowth Ruins", "Eldritch Outlook", "Galleon's Graveyard", "Fallen Factory"): return DungeonType.STANDARD else: raise ValueError(f"Invalid dungeon: {self.name}") @property def completed(self) -> int: """Total runs completed by the player. Failed runs are not counted.""" return self._attributes.get("completed", 0) def __repr__(self) -> str: return f"<Dungeon name={self.name!r} completed={self.completed}>"
2.953125
3
src/brisk.py
chaoer/brisk-descriptor
18
4635
<filename>src/brisk.py import pybrisk class Brisk: def __init__(self, thresh=60, octaves=4): self.thresh = thresh self.octaves = octaves self.descriptor_extractor = pybrisk.create() def __del__(self): pybrisk.destroy(self.descriptor_extractor) def detect(self, img): return pybrisk.detect(self.descriptor_extractor, img, self.thresh, self.octaves) def compute(self, img, keypoints): return pybrisk.compute(self.descriptor_extractor, img, keypoints)
2.6875
3
hale_hub/outlet_interface.py
tantinlala/hale-hub
0
4636
import serial import serial.tools.list_ports from hale_hub.constants import STARTING_OUTLET_COMMAND, SERIAL_BAUD_RATE, SERIAL_TIMEOUT from hale_hub.ifttt_logger import send_ifttt_log class _Outlet: def __init__(self, name): self.state = 0 self.name = name class _OutletInterface: def __init__(self): self.outlets = [_Outlet('Outlet 0'), _Outlet('Outlet 1'), _Outlet('Outlet 2')] self.serial_interface = None self.serial_interface_string = None def set_outlet_name(self, name, outlet_id): if outlet_id < len(self.outlets): self.outlets[outlet_id].name = name def set_serial_interface(self, serial_interface_string): try: print('Setting serial interface with description: {}'.format(serial_interface_string)) self.serial_interface_string = serial_interface_string ports = [p.device for p in serial.tools.list_ports.comports() if self.serial_interface_string in p.description] self.serial_interface = serial.Serial(ports[0], SERIAL_BAUD_RATE, timeout=SERIAL_TIMEOUT) except IndexError: send_ifttt_log(__name__, 'No serial ports could be upon!') def _send_outlet_command(self, outlet_id, outlet_state): try: print('Changing outlet {0} to {1} state'.format(outlet_id, outlet_state)) command = bytearray([STARTING_OUTLET_COMMAND + (outlet_id << 1) + outlet_state]) print('Writing {0} to serial'.format(command)) self.serial_interface.write(command) except (serial.SerialException, AttributeError): send_ifttt_log(__name__, 'No serial bytes could be written') if self.serial_interface.is_open(): self.serial_interface.close() self.set_serial_interface(self.serial_interface_string) def toggle_outlet(self, outlet_id): if outlet_id < len(self.outlets): self.outlets[outlet_id].state ^= 1 self._send_outlet_command(outlet_id, self.outlets[outlet_id].state) def turn_on_outlet(self, outlet_id): if outlet_id < len(self.outlets): self.outlets[outlet_id].state = 1 self._send_outlet_command(outlet_id, self.outlets[outlet_id].state) def turn_off_outlet(self, outlet_id): if outlet_id < len(self.outlets): self.outlets[outlet_id].state = 0 self._send_outlet_command(outlet_id, self.outlets[outlet_id].state) def get_outlets(self): return self.outlets _outlet_interface = _OutletInterface() set_outlet_serial_interface = _outlet_interface.set_serial_interface toggle_outlet = _outlet_interface.toggle_outlet turn_on_outlet = _outlet_interface.turn_on_outlet turn_off_outlet = _outlet_interface.turn_off_outlet get_outlets = _outlet_interface.get_outlets set_outlet_name = _outlet_interface.set_outlet_name
2.421875
2
helix/core.py
carbonscott/helix
0
4637
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import numpy as np def remove_nan(xyzs): return xyzs[~np.isnan(xyzs).any(axis = 1)] def measure_twocores(core_xyz_ref, core_xyz_tar): ''' Measure the following aspects of two helical cores. - Interhelical distance vector between the centers. - Interhelical angle (0-90 degree) ''' # Obtain the centers... center_ref = np.nanmean(core_xyz_ref, axis = 0) center_tar = np.nanmean(core_xyz_tar, axis = 0) # Construct the interhelical distance vector... ih_dvec = center_tar - center_ref # Calculate the length of interhelical distance vector... norm_ih_dvec = np.linalg.norm(ih_dvec) # Obtain the helical core vectors... core_xyz_ref_nonan = remove_nan(core_xyz_ref) core_xyz_tar_nonan = remove_nan(core_xyz_tar) core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0] core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0] # Calculate the interhelical angle... core_vec_ref_unit = core_vec_ref / np.linalg.norm(core_vec_ref) core_vec_tar_unit = core_vec_tar / np.linalg.norm(core_vec_tar) ih_ang = np.arccos( np.dot(core_vec_ref_unit, core_vec_tar_unit) ) return ih_dvec, norm_ih_dvec, core_vec_ref_unit, core_vec_tar_unit, ih_ang def calc_interangle(core_xyz_ref, core_xyz_tar): ''' Measure the following aspects of two helical cores. - Interhelical angle (0-90 degree) ''' # Obtain the helical core vectors... core_xyz_ref_nonan = remove_nan(core_xyz_ref) core_xyz_tar_nonan = remove_nan(core_xyz_tar) core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0] core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0] # Calculate the interhelical angle... core_vec_ref_unit = core_vec_ref / np.linalg.norm(core_vec_ref) core_vec_tar_unit = core_vec_tar / np.linalg.norm(core_vec_tar) inter_angle = np.arccos( np.dot(core_vec_ref_unit, core_vec_tar_unit) ) if inter_angle > np.pi / 2.0: inter_angle = np.pi - inter_angle return inter_angle def calc_interdist(core_xyz_ref, core_xyz_tar): ''' Measure the following aspects of two helical cores. - Interhelical distance vector between the centers. Refers to http://geomalgorithms.com/a07-_distance.html for the method. Q is ref, P is tar. ''' # Obtain the helical core vectors... core_xyz_ref_nonan = remove_nan(core_xyz_ref) core_xyz_tar_nonan = remove_nan(core_xyz_tar) core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0] core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0] # Obtain the starting point... q0 = core_xyz_ref_nonan[0] p0 = core_xyz_tar_nonan[0] w0 = p0 - q0 # Obtain the directional vector with magnitude... v = core_vec_ref u = core_vec_tar # Math part... a = np.dot(u, u) b = np.dot(u, v) c = np.dot(v, v) d = np.dot(u, w0) e = np.dot(v, w0) de = a * c - b * b # Denominator if de == 0: sc, tc = 0, d / b else: sc, tc = (b * e - c * d) / de, (a * e - b * d) / de # Calculate distance... wc = w0 + sc * u - tc * v inter_dist = np.linalg.norm(wc) return inter_dist
2.6875
3
matury/2011/6.py
bartekpacia/informatyka-frycz
2
4638
<reponame>bartekpacia/informatyka-frycz<gh_stars>1-10 from typing import List with open("dane/liczby.txt") as f: nums: List[int] = [] nums_9_chars: List[int] = [] for line in f: sline = line.strip() num = int(sline, 2) if len(sline) == 9: nums_9_chars.append(num) nums.append(num) count_even = 0 max_num = 0 for num in nums: if num % 2 == 0: count_even += 1 if num > max_num: max_num = num print(f"{count_even=}") print(f"max_num(10): {max_num}, max_num(2): {bin(max_num)[2:]}") sum_9_chars = 0 for num in nums_9_chars: sum_9_chars += num print(f"count of numbers with 9 digits: {len(nums_9_chars)}, their sum: {bin(sum_9_chars)[2:]}")
3.265625
3
Python/fibs.py
familug/FAMILUG
5
4639
def fib(n): if n < 2: return n else: return fib(n-1) + fib(n-2) def fib_fast(n): from math import sqrt s5 = sqrt(5) x = (1 + s5) ** n y = (1 - s5) ** n return int((x - y)/(s5 * 2**n)) def print_fib(n): for i in range(n): print fib(i), print for i in range(n): print fib_fast(i), def print_fib2(n): fibs = [0, 1] a, b = 0, 1 if n == 0: print a elif n == 1: print a, b else: print 0, 1, for i in range(2, n): a, b = b, a + b print b, if __name__ == "__main__": print_fib(10) print print_fib2(10)
3.75
4
src/model/ParseInput.py
slavi010/polyhash-2020
0
4640
<filename>src/model/ParseInput.py import os from typing import List from src.model.Etape import Etape from src.model.Grille import Grille from src.model.ItemCase import ItemCase from src.model.PointMontage import PointMontage from src.model.Robot import Robot from src.model.Tache import Tache class ParseInput: """Parser qui permet de lire le fichier texte d'input fourni par Google. Va transformer ce fichier en données et classes exploitables pour nous """ grille: Grille def __init__(self): pass def parse(self, file_path: str) -> Grille: """parse le fichier google et retourne la Grille correspondante :rtype: Grille """ # tests si file_path est un fichier assert os.path.isfile(file_path) with open(file_path, 'r') as file: index: int = 0 # récupère toutes les lignes du fichiers lines: List = file.readlines() # Transformation des lignes en liste d'entiers for index_line in range(len(lines)): lines[index_line] = lines[index_line].split(' ') for index_val in range(len(lines[index_line])): lines[index_line][index_val] = int(lines[index_line][index_val]) # crée un instance de Grille grille = Grille(lines[0][0], lines[0][1]) # instancie dans grille le nombre de robot correspondant # crée les robots for idx_robot in range(lines[0][2]): grille.robots.append(Robot()) # Crée les points de montage, et les place dans la grille for idx_point_montage in range(lines[0][3]): index += 1 grille.add_point_montage(PointMontage(lines[index][0], lines[index][1])) # Récupère le nombre de tour d'horloge autorisé grille.step_simulation = lines[0][5] # Récupére les informations de chaque tâche # instancier dans grille les tâches correspondantes # si une étape (assembly point) n'est pas encore créée dans la grille aux cordonnées correspondantes, # l'instancier et la mettre dans la grille (et ne pas oublier de l'associer à la tâche) # Crée les instances Taches et Etapes for index_tache in range(lines[0][4]): index += 1 tache_tampon: Tache = Tache(lines[index][0], index_tache) index += 1 g_x = 0 g_y = 0 for index_etape in range(lines[index-1][1]): #ajoute les étapes etape = Etape(lines[index][index_etape*2+0], lines[index][index_etape*2+1]) tache_tampon.add_etape(etape) g_x += (etape.x - g_x)/len(tache_tampon.etapes) g_y += (etape.y - g_y)/len(tache_tampon.etapes) #ajoute les paramètres dans la classe tache tache_tampon.centre_gravite = ItemCase(int(g_x), int(g_y)) tache_tampon.distance_centre_gravite = max(tache_tampon.etapes, key=lambda etape: tache_tampon.centre_gravite.distance(etape)) \ .distance(tache_tampon.centre_gravite) grille.add_tache(tache_tampon) # calcul la distance et la surface aproximative entre chaque étape for etape_from, etape_to in zip(tache_tampon.etapes[0::1], tache_tampon.etapes[1::1]): tache_tampon.distance += etape_from.distance(etape_to) tache_tampon.surface += etape_from.distance(etape_to) return grille
3.015625
3
test.py
UnKafkaesque/Sentiment-Analysis
0
4641
<filename>test.py<gh_stars>0 import os import sys import time import traceback import project1_Copy as p1 import numpy as np verbose = False def green(s): return '\033[1;32m%s\033[m' % s def yellow(s): return '\033[1;33m%s\033[m' % s def red(s): return '\033[1;31m%s\033[m' % s def log(*m): print(" ".join(map(str, m))) def log_exit(*m): log(red("ERROR:"), *m) exit(1) def check_real(ex_name, f, exp_res, *args): try: res = f(*args) except NotImplementedError: log(red("FAIL"), ex_name, ": not implemented") return True if not np.isreal(res): log(red("FAIL"), ex_name, ": does not return a real number, type: ", type(res)) return True if res != exp_res: log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res) return True def equals(x, y): if type(y) == np.ndarray: return (x == y).all() return x == y def check_tuple(ex_name, f, exp_res, *args, **kwargs): try: res = f(*args, **kwargs) except NotImplementedError: log(red("FAIL"), ex_name, ": not implemented") return True if not type(res) == tuple: log(red("FAIL"), ex_name, ": does not return a tuple, type: ", type(res)) return True if not len(res) == len(exp_res): log(red("FAIL"), ex_name, ": expected a tuple of size ", len(exp_res), " but got tuple of size", len(res)) return True if not all(equals(x, y) for x, y in zip(res, exp_res)): log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res) return True def check_array(ex_name, f, exp_res, *args): try: res = f(*args) except NotImplementedError: log(red("FAIL"), ex_name, ": not implemented") return True if not type(res) == np.ndarray: log(red("FAIL"), ex_name, ": does not return a numpy array, type: ", type(res)) return True if not len(res) == len(exp_res): log(red("FAIL"), ex_name, ": expected an array of shape ", exp_res.shape, " but got array of shape", res.shape) return True if not all(equals(x, y) for x, y in zip(res, exp_res)): log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res) return True def check_list(ex_name, f, exp_res, *args): try: res = f(*args) except NotImplementedError: log(red("FAIL"), ex_name, ": not implemented") return True if not type(res) == list: log(red("FAIL"), ex_name, ": does not return a list, type: ", type(res)) return True if not len(res) == len(exp_res): log(red("FAIL"), ex_name, ": expected a list of size ", len(exp_res), " but got list of size", len(res)) return True if not all(equals(x, y) for x, y in zip(res, exp_res)): log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res) return True def check_get_order(): ex_name = "Get order" if check_list( ex_name, p1.get_order, [0], 1): log("You should revert `get_order` to its original implementation for this test to pass") return if check_list( ex_name, p1.get_order, [1, 0], 2): log("You should revert `get_order` to its original implementation for this test to pass") return log(green("PASS"), ex_name, "") def check_hinge_loss_single(): ex_name = "Hinge loss single" feature_vector = np.array([1, 2]) label, theta, theta_0 = 1, np.array([-1, 1]), -0.2 exp_res = 1 - 0.8 if check_real( ex_name, p1.hinge_loss_single, exp_res, feature_vector, label, theta, theta_0): return log(green("PASS"), ex_name, "") def check_hinge_loss_full(): ex_name = "Hinge loss full" feature_vector = np.array([[1, 2], [1, 2]]) label, theta, theta_0 = np.array([1, 1]), np.array([-1, 1]), -0.2 exp_res = 1 - 0.8 if check_real( ex_name, p1.hinge_loss_full, exp_res, feature_vector, label, theta, theta_0): return log(green("PASS"), ex_name, "") def check_perceptron_single_update(): ex_name = "Perceptron single update" feature_vector = np.array([1, 2]) label, theta, theta_0 = 1, np.array([-1, 1]), -1.5 exp_res = (np.array([0, 3]), -0.5) if check_tuple( ex_name, p1.perceptron_single_step_update, exp_res, feature_vector, label, theta, theta_0): return feature_vector = np.array([1, 2]) label, theta, theta_0 = 1, np.array([-1, 1]), -1 exp_res = (np.array([0, 3]), 0) if check_tuple( ex_name + " (boundary case)", p1.perceptron_single_step_update, exp_res, feature_vector, label, theta, theta_0): return log(green("PASS"), ex_name, "") def check_perceptron(): ex_name = "Perceptron" feature_matrix = np.array([[1, 2]]) labels = np.array([1]) T = 1 exp_res = (np.array([1, 2]), 1) if check_tuple( ex_name, p1.perceptron, exp_res, feature_matrix, labels, T): return feature_matrix = np.array([[1, 2], [-1, 0]]) labels = np.array([1, 1]) T = 1 exp_res = (np.array([0, 2]), 2) if check_tuple( ex_name, p1.perceptron, exp_res, feature_matrix, labels, T): return feature_matrix = np.array([[1, 2]]) labels = np.array([1]) T = 2 exp_res = (np.array([1, 2]), 1) if check_tuple( ex_name, p1.perceptron, exp_res, feature_matrix, labels, T): return feature_matrix = np.array([[1, 2], [-1, 0]]) labels = np.array([1, 1]) T = 2 exp_res = (np.array([0, 2]), 2) if check_tuple( ex_name, p1.perceptron, exp_res, feature_matrix, labels, T): return log(green("PASS"), ex_name, "") def check_average_perceptron(): ex_name = "Average perceptron" feature_matrix = np.array([[1, 2]]) labels = np.array([1]) T = 1 exp_res = (np.array([1, 2]), 1) if check_tuple( ex_name, p1.average_perceptron, exp_res, feature_matrix, labels, T): return feature_matrix = np.array([[1, 2], [-1, 0]]) labels = np.array([1, 1]) T = 1 exp_res = (np.array([-0.5, 1]), 1.5) if check_tuple( ex_name, p1.average_perceptron, exp_res, feature_matrix, labels, T): return feature_matrix = np.array([[1, 2]]) labels = np.array([1]) T = 2 exp_res = (np.array([1, 2]), 1) if check_tuple( ex_name, p1.average_perceptron, exp_res, feature_matrix, labels, T): return feature_matrix = np.array([[1, 2], [-1, 0]]) labels = np.array([1, 1]) T = 2 exp_res = (np.array([-0.25, 1.5]), 1.75) if check_tuple( ex_name, p1.average_perceptron, exp_res, feature_matrix, labels, T): return log(green("PASS"), ex_name, "") def check_pegasos_single_update(): ex_name = "Pegasos single update" feature_vector = np.array([1, 2]) label, theta, theta_0 = 1, np.array([-1, 1]), -1.5 L = 0.2 eta = 0.1 exp_res = (np.array([-0.88, 1.18]), -1.4) if check_tuple( ex_name, p1.pegasos_single_step_update, exp_res, feature_vector, label, L, eta, theta, theta_0): return feature_vector = np.array([1, 1]) label, theta, theta_0 = 1, np.array([-1, 1]), 1 L = 0.2 eta = 0.1 exp_res = (np.array([-0.88, 1.08]), 1.1) if check_tuple( ex_name + " (boundary case)", p1.pegasos_single_step_update, exp_res, feature_vector, label, L, eta, theta, theta_0): return feature_vector = np.array([1, 2]) label, theta, theta_0 = 1, np.array([-1, 1]), -2 L = 0.2 eta = 0.1 exp_res = (np.array([-0.88, 1.18]), -1.9) if check_tuple( ex_name, p1.pegasos_single_step_update, exp_res, feature_vector, label, L, eta, theta, theta_0): return log(green("PASS"), ex_name, "") def check_pegasos(): ex_name = "Pegasos" feature_matrix = np.array([[1, 2]]) labels = np.array([1]) T = 1 L = 0.2 exp_res = (np.array([1, 2]), 1) if check_tuple( ex_name, p1.pegasos, exp_res, feature_matrix, labels, T, L): return feature_matrix = np.array([[1, 1], [1, 1]]) labels = np.array([1, 1]) T = 1 L = 1 exp_res = (np.array([1-1/np.sqrt(2), 1-1/np.sqrt(2)]), 1) if check_tuple( ex_name, p1.pegasos, exp_res, feature_matrix, labels, T, L): return log(green("PASS"), ex_name, "") def check_classify(): ex_name = "Classify" feature_matrix = np.array([[1, 1], [1, 1], [1, 1]]) theta = np.array([1, 1]) theta_0 = 0 exp_res = np.array([1, 1, 1]) if check_array( ex_name, p1.classify, exp_res, feature_matrix, theta, theta_0): return feature_matrix = np.array([[-1, 1]]) theta = np.array([1, 1]) theta_0 = 0 exp_res = np.array([-1]) if check_array( ex_name + " (boundary case)", p1.classify, exp_res, feature_matrix, theta, theta_0): return log(green("PASS"), ex_name, "") def check_classifier_accuracy(): ex_name = "Classifier accuracy" train_feature_matrix = np.array([[1, 0], [1, -1], [2, 3]]) val_feature_matrix = np.array([[1, 1], [2, -1]]) train_labels = np.array([1, -1, 1]) val_labels = np.array([-1, 1]) exp_res = 1, 0 T=1 if check_tuple( ex_name, p1.classifier_accuracy, exp_res, p1.perceptron, train_feature_matrix, val_feature_matrix, train_labels, val_labels, T=T): return train_feature_matrix = np.array([[1, 0], [1, -1], [2, 3]]) val_feature_matrix = np.array([[1, 1], [2, -1]]) train_labels = np.array([1, -1, 1]) val_labels = np.array([-1, 1]) exp_res = 1, 0 T=1 L=0.2 if check_tuple( ex_name, p1.classifier_accuracy, exp_res, p1.pegasos, train_feature_matrix, val_feature_matrix, train_labels, val_labels, T=T, L=L): return log(green("PASS"), ex_name, "") def check_bag_of_words(): ex_name = "Bag of words" texts = [ "He loves to walk on the beach", "There is nothing better"] try: res = p1.bag_of_words(texts) except NotImplementedError: log(red("FAIL"), ex_name, ": not implemented") return if not type(res) == dict: log(red("FAIL"), ex_name, ": does not return a tuple, type: ", type(res)) return vals = sorted(res.values()) exp_vals = list(range(len(res.keys()))) if not vals == exp_vals: log(red("FAIL"), ex_name, ": wrong set of indices. Expected: ", exp_vals, " got ", vals) return log(green("PASS"), ex_name, "") keys = sorted(res.keys()) exp_keys = ['beach', 'better', 'he', 'is', 'loves', 'nothing', 'on', 'the', 'there', 'to', 'walk'] stop_keys = ['beach', 'better', 'loves', 'nothing', 'walk'] if keys == exp_keys: log(yellow("WARN"), ex_name, ": does not remove stopwords:", [k for k in keys if k not in stop_keys]) elif keys == stop_keys: log(green("PASS"), ex_name, " stopwords removed") else: log(red("FAIL"), ex_name, ": keys are missing:", [k for k in stop_keys if k not in keys], " or are not unexpected:", [k for k in keys if k not in stop_keys]) def check_extract_bow_feature_vectors(): ex_name = "Extract bow feature vectors" texts = [ "He loves her ", "He really really loves her"] keys = ["he", "loves", "her", "really"] dictionary = {k:i for i, k in enumerate(keys)} exp_res = np.array( [[1, 1, 1, 0], [1, 1, 1, 1]]) non_bin_res = np.array( [[1, 1, 1, 0], [1, 1, 1, 2]]) try: res = p1.extract_bow_feature_vectors(texts, dictionary) except NotImplementedError: log(red("FAIL"), ex_name, ": not implemented") return if not type(res) == np.ndarray: log(red("FAIL"), ex_name, ": does not return a numpy array, type: ", type(res)) return if not len(res) == len(exp_res): log(red("FAIL"), ex_name, ": expected an array of shape ", exp_res.shape, " but got array of shape", res.shape) return log(green("PASS"), ex_name) if (res == exp_res).all(): log(yellow("WARN"), ex_name, ": uses binary indicators as features") elif (res == non_bin_res).all(): log(green("PASS"), ex_name, ": correct non binary features") else: log(red("FAIL"), ex_name, ": unexpected feature matrix") return def main(): log(green("PASS"), "Import project1") try: check_get_order() check_hinge_loss_single() check_hinge_loss_full() check_perceptron_single_update() check_perceptron() check_average_perceptron() check_pegasos_single_update() check_pegasos() check_classify() check_classifier_accuracy() check_bag_of_words() check_extract_bow_feature_vectors() except Exception: log_exit(traceback.format_exc()) if __name__ == "__main__": main()
2.453125
2
homework_1/tests/test_3.py
mag-id/epam_python_autumn_2020
0
4642
""" Unit tests for module `homework_1.tasks.task_3`. """ from tempfile import NamedTemporaryFile from typing import Tuple import pytest from homework_1.tasks.task_3 import find_maximum_and_minimum @pytest.mark.parametrize( ["file_content", "expected_result"], [ pytest.param( "0\n", (0, 0), id="'0\n', result is (0, 0).", ), pytest.param( "1\n2\n3\n4\n5\n", (1, 5), id="'1\n2\n3\n4\n5\n', result is (1, 5).", ), pytest.param( "1\n-2\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n11\n-12\n", (-12, 11), id="'1\n-2\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n11\n-12\n', result: (11,-12).", ), pytest.param( "11\n-12\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n1\n-2\n", (-12, 11), id="'11\n-12\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n1\n-2\n', result: (11,-12).", ), pytest.param( "\n".join(str(num) for num in range(0, 667000)), (0, 666999), id="Integers from 0 to 666999 delimited by '\n'.", ), ], ) def test_find_maximum_and_minimum(file_content: str, expected_result: Tuple[int, int]): """ Mocks file using `NamedTemporaryFile` instance with writed `file_content` inside, where `file_name` == `file.name`. Passes test if `find_maximum_and_minimum`(`file.name`) is equal to `expected_result`. """ with NamedTemporaryFile(mode="wt") as file: file.write(file_content) file.seek(0) assert find_maximum_and_minimum(file.name) == expected_result
2.6875
3
app/admin/views/__init__.py
CAUCHY2932/Northern_Hemisphere
0
4643
# coding:utf-8 import app.admin.views.start import app.admin.views.book import app.admin.views.user import app.admin.views.site
1.023438
1
test_mnist.py
aidiary/chainer-siamese
0
4644
<reponame>aidiary/chainer-siamese import os import chainer import chainer.links as L from net import SiameseNetwork import numpy as np import matplotlib.pyplot as plt # 訓練済みモデルをロード model = SiameseNetwork() chainer.serializers.load_npz(os.path.join('result', 'model.npz'), model) # テストデータをロード _, test = chainer.datasets.get_mnist(ndim=3) test_data, test_label = test._datasets # テストデータを学習した低次元空間(2次元)に写像 y = model.forward_once(test_data) feat = y.data # ラベルごとに描画 c = ['#ff0000', '#ffff00', '#00ff00', '#00ffff', '#0000ff', '#ff00ff', '#990000', '#999900', '#009900', '#009999'] # 各ラベルごとに異なる色でプロット # 同じクラス内のインスタンスが近くに集まり、 # 異なるクラスのインスタンスが離れていれば成功 for i in range(10): f = feat[np.where(test_label == i)] plt.plot(f[:, 0], f[:, 1], '.', c=c[i]) plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']) plt.savefig(os.path.join('result', 'result.png'))
2.171875
2
TwoFeetTempoMove.py
b0nz0/TwisterTempo
0
4645
<reponame>b0nz0/TwisterTempo<gh_stars>0 from random import randrange, random from time import time import logging from TwisterTempoGUI import TwisterTempoGUI class TwoFeetTempoMove(object): COLORS_ALPHA = {0: 'RED', 1: 'BLUE', 2: 'YELLOW', 3: 'GREEN'} COLORS_RGB = {0: (255, 0, 0), 1: (0, 0, 255), 2: (255, 255, 0), 3: (0, 255, 0)} FOOT_CHANGE_PERC = 0.3 FOOT_ON_AIR_PERC = 0.08 FEET_ON_SAME_CIRCLE_PERC = 0.05 def __init__(self, min_delay=0, max_delay=100): assert min_delay >= 0 assert max_delay > 0 self.min_delay = min_delay self.max_delay = max_delay self._last_beat_millis = 0 self._left_color = randrange(0, len(TwoFeetTempoMove.COLORS_ALPHA)) self._right_color = randrange(0, len(TwoFeetTempoMove.COLORS_ALPHA)) self._left_direction = "FW" self._right_direction = "FW" self._next_foot = 'RIGHT' logging.info("Starting with LEFT: %s, RIGHT: %s" % (TwoFeetTempoMove.COLORS_ALPHA[self._left_color], TwoFeetTempoMove.COLORS_ALPHA[self._right_color])) self.tt_gui = TwisterTempoGUI() self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color]) self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color]) self._starting_millis = time() * 1000 def get_colors_alpha(self): return {'RIGHT': TwoFeetTempoMove.COLORS_ALPHA[self._right_color], 'LEFT': TwoFeetTempoMove.COLORS_ALPHA[self._left_color]} def get_colors_rgb(self): return {'RIGHT': TwoFeetTempoMove.COLORS_RGB[self._right_color], 'LEFT': TwoFeetTempoMove.COLORS_RGB[self._left_color]} def increase_speed(self): self.min_delay = self.min_delay - 10 def decrease_speed(self): self.min_delay = self.min_delay + 10 def tempo_found_callback(self, seconds, millis, confidence): act_delay = millis - self._last_beat_millis + randrange(0, self.max_delay) if act_delay >= self.min_delay: self._last_beat_millis = millis self.beat_found() def beat_found(self): millis = self._last_beat_millis logging.debug("Randomized beat found at: %d:%d.%d" % (millis / 60000, millis / 1000, millis % 1000)) act_millis = time() * 1000 - self._starting_millis logging.debug("\tActual: %d:%d.%d" % (act_millis / 60000, act_millis / 1000, act_millis % 1000)) # special moves if random() < TwoFeetTempoMove.FOOT_ON_AIR_PERC: # randomized next foot on air move if self._next_foot == 'RIGHT': self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color], on_air=True) else: self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color], on_air=True) logging.debug("\tmove next foot On Air") elif random() < TwoFeetTempoMove.FEET_ON_SAME_CIRCLE_PERC: # randomized both feet on same circle if self._next_foot == 'RIGHT': self._right_color = self._left_color self.tt_gui.set_large_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color]) else: self._left_color = self._right_color self.tt_gui.set_large_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color]) logging.debug("\tmove both feet on same circle") # end special moves else: if random() < TwoFeetTempoMove.FOOT_CHANGE_PERC: # randomize at 30% the switch on foot if self._next_foot == 'RIGHT': self._next_foot = 'LEFT' else: self._next_foot = 'RIGHT' if self._next_foot == 'RIGHT': if self._right_direction == "FW": if self._right_color == len(TwoFeetTempoMove.COLORS_ALPHA) - 1: self._right_color = self._right_color - 1 self._right_direction = "BW" else: self._right_color = self._right_color + 1 else: if self._right_color == 0: self._right_color = self._right_color + 1 self._right_direction = "FW" else: self._right_color = self._right_color - 1 self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color]) logging.debug("\tmove RIGHT foot to " + TwoFeetTempoMove.COLORS_ALPHA[self._right_color]) self._next_foot = 'LEFT' else: if self._left_direction == "FW": if self._left_color == len(TwoFeetTempoMove.COLORS_ALPHA) - 1: self._left_color = self._left_color - 1 self._left_direction = "BW" else: self._left_color = self._left_color + 1 else: if self._left_color == 0: self._left_color = self._left_color + 1 self._left_direction = "FW" else: self._left_color = self._left_color - 1 self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color]) logging.debug("\tmove LEFT foot to " + TwoFeetTempoMove.COLORS_ALPHA[self._left_color]) self._next_foot = 'RIGHT'
2.671875
3
plugins/panorama/panorama/__init__.py
mohnjahoney/website_source
13
4646
<filename>plugins/panorama/panorama/__init__.py # -*- coding: utf-8 -*- """ Panorama is a Pelican plugin to generate statistics from blog posts (number of posts per month, categories and so on) display them as beautiful charts. Project location: https://github.com/romainx/panorama """ __version__ = "0.2.0" __author__ = "romainx" from .panorama import *
1.460938
1
transitfeed/transfer.py
cclauss/transitfeed
9
4647
#!/usr/bin/python2.5 # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from gtfsobjectbase import GtfsObjectBase import problems as problems_module import util class Transfer(GtfsObjectBase): """Represents a transfer in a schedule""" _REQUIRED_FIELD_NAMES = ['from_stop_id', 'to_stop_id', 'transfer_type'] _FIELD_NAMES = _REQUIRED_FIELD_NAMES + ['min_transfer_time'] _TABLE_NAME = 'transfers' _ID_COLUMNS = ['from_stop_id', 'to_stop_id'] def __init__(self, schedule=None, from_stop_id=None, to_stop_id=None, transfer_type=None, min_transfer_time=None, field_dict=None): self._schedule = None if field_dict: self.__dict__.update(field_dict) else: self.from_stop_id = from_stop_id self.to_stop_id = to_stop_id self.transfer_type = transfer_type self.min_transfer_time = min_transfer_time if getattr(self, 'transfer_type', None) in ("", None): # Use the default, recommended transfer, if attribute is not set or blank self.transfer_type = 0 else: try: self.transfer_type = util.NonNegIntStringToInt(self.transfer_type) except (TypeError, ValueError): pass if hasattr(self, 'min_transfer_time'): try: self.min_transfer_time = util.NonNegIntStringToInt(self.min_transfer_time) except (TypeError, ValueError): pass else: self.min_transfer_time = None if schedule is not None: # Note from Tom, Nov 25, 2009: Maybe calling __init__ with a schedule # should output a DeprecationWarning. A schedule factory probably won't # use it and other GenericGTFSObject subclasses don't support it. schedule.AddTransferObject(self) def ValidateFromStopIdIsPresent(self, problems): if util.IsEmpty(self.from_stop_id): problems.MissingValue('from_stop_id') return False return True def ValidateToStopIdIsPresent(self, problems): if util.IsEmpty(self.to_stop_id): problems.MissingValue('to_stop_id') return False return True def ValidateTransferType(self, problems): if not util.IsEmpty(self.transfer_type): if (not isinstance(self.transfer_type, int)) or \ (self.transfer_type not in range(0, 4)): problems.InvalidValue('transfer_type', self.transfer_type) return False return True def ValidateMinimumTransferTime(self, problems): if not util.IsEmpty(self.min_transfer_time): if self.transfer_type != 2: problems.MinimumTransferTimeSetWithInvalidTransferType( self.transfer_type) # If min_transfer_time is negative, equal to or bigger than 24h, issue # an error. If smaller than 24h but bigger than 3h issue a warning. # These errors are not blocking, and should not prevent the transfer # from being added to the schedule. if (isinstance(self.min_transfer_time, int)): if self.min_transfer_time < 0: problems.InvalidValue('min_transfer_time', self.min_transfer_time, reason="This field cannot contain a negative " \ "value.") elif self.min_transfer_time >= 24*3600: problems.InvalidValue('min_transfer_time', self.min_transfer_time, reason="The value is very large for a " \ "transfer time and most likely " \ "indicates an error.") elif self.min_transfer_time >= 3*3600: problems.InvalidValue('min_transfer_time', self.min_transfer_time, type=problems_module.TYPE_WARNING, reason="The value is large for a transfer " \ "time and most likely indicates " \ "an error.") else: # It has a value, but it is not an integer problems.InvalidValue('min_transfer_time', self.min_transfer_time, reason="If present, this field should contain " \ "an integer value.") return False return True def GetTransferDistance(self): from_stop = self._schedule.stops[self.from_stop_id] to_stop = self._schedule.stops[self.to_stop_id] distance = util.ApproximateDistanceBetweenStops(from_stop, to_stop) return distance def ValidateFromStopIdIsValid(self, problems): if self.from_stop_id not in self._schedule.stops.keys(): problems.InvalidValue('from_stop_id', self.from_stop_id) return False return True def ValidateToStopIdIsValid(self, problems): if self.to_stop_id not in self._schedule.stops.keys(): problems.InvalidValue('to_stop_id', self.to_stop_id) return False return True def ValidateTransferDistance(self, problems): distance = self.GetTransferDistance() if distance > 10000: problems.TransferDistanceTooBig(self.from_stop_id, self.to_stop_id, distance) elif distance > 2000: problems.TransferDistanceTooBig(self.from_stop_id, self.to_stop_id, distance, type=problems_module.TYPE_WARNING) def ValidateTransferWalkingTime(self, problems): if util.IsEmpty(self.min_transfer_time): return if self.min_transfer_time < 0: # Error has already been reported, and it does not make sense # to calculate walking speed with negative times. return distance = self.GetTransferDistance() # If min_transfer_time + 120s isn't enough for someone walking very fast # (2m/s) then issue a warning. # # Stops that are close together (less than 240m appart) never trigger this # warning, regardless of min_transfer_time. FAST_WALKING_SPEED= 2 # 2m/s if self.min_transfer_time + 120 < distance / FAST_WALKING_SPEED: problems.TransferWalkingSpeedTooFast(from_stop_id=self.from_stop_id, to_stop_id=self.to_stop_id, transfer_time=self.min_transfer_time, distance=distance) def ValidateBeforeAdd(self, problems): result = True result = self.ValidateFromStopIdIsPresent(problems) and result result = self.ValidateToStopIdIsPresent(problems) and result result = self.ValidateTransferType(problems) and result result = self.ValidateMinimumTransferTime(problems) and result return result def ValidateAfterAdd(self, problems): valid_stop_ids = True valid_stop_ids = self.ValidateFromStopIdIsValid(problems) and valid_stop_ids valid_stop_ids = self.ValidateToStopIdIsValid(problems) and valid_stop_ids # We need both stop IDs to be valid to able to validate their distance and # the walking time between them if valid_stop_ids: self.ValidateTransferDistance(problems) self.ValidateTransferWalkingTime(problems) def Validate(self, problems=problems_module.default_problem_reporter): if self.ValidateBeforeAdd(problems) and self._schedule: self.ValidateAfterAdd(problems) def _ID(self): return tuple(self[i] for i in self._ID_COLUMNS) def AddToSchedule(self, schedule, problems): schedule.AddTransferObject(self, problems)
2.25
2
social/urls.py
Kizito-Alberrt/insta-social
0
4648
from django.urls import path from . import views from . views import UserPostListView, PostDetailView, PostDeleteview, PostCreateView, PostUpdateView,CommentUpdateView, VideoCreateView, video_update urlpatterns = [ path('',views.base, name='base'), path('login',views.login, name='login'), path('register',views.register, name='register'), path('index',views.index, name='index'), path('logout',views.logout, name='logout'), path('like_post', views.like_post, name='like_post'), path('find_friends',views.find_friends, name='find_friends'), path('profile',views.profile, name='profile'), path('profile_update', views.profile_update, name='profile_update'), path('user/<str:username>', UserPostListView.as_view(), name='user_posts'), path('post/<int:pk>/',PostDetailView.as_view(), name='post_details' ), path('post/<int:pk>/delete/',PostDeleteview.as_view(), name='post_delete' ), path('profile_posts',views.profile_posts, name='profile_posts'), path('results',views.results, name='results'), path('post/new/',PostCreateView.as_view(), name='post-create' ), path('post_update',views.post_update, name='post_update'), path('post/<int:pk>/update',PostUpdateView.as_view(), name='post-update' ), path('profile_photos',views.profile_photos, name='profile_photos'), path('comment_update/<int:id>',views.comment_update, name='comment_update'), path('comment/<int:pk>/update',CommentUpdateView.as_view(), name='comment-update' ), path('delete/<int:id>',views.delete, name='delete'), path('favourite',views.favourite, name='favourite'), path('favourite_posts',views.favourite_posts, name='favourite_posts'), path('video/new/',VideoCreateView.as_view(), name='video-create' ), path('post/<int:pk>/video',video_update.as_view(), name='video_update' ), # path('<str:username>',views.userprofile, name='userprofile'), path('video_posts',views.video_posts, name='video_posts'), path('user_videos',views.user_videos,name='user_videos'), ]
1.867188
2
vtkplotter_examples/other/dolfin/collisions.py
ismarou/vtkplotter-examples
4
4649
<reponame>ismarou/vtkplotter-examples<filename>vtkplotter_examples/other/dolfin/collisions.py<gh_stars>1-10 ''' compute_collision() will compute the collision of all the entities with a Point while compute_first_collision() will always return its first entry. Especially if a point is on an element edge this can be tricky. You may also want to compare with the Cell.contains(Point) tool. ''' # Script by Rudy at https://fenicsproject.discourse.group/t/ # any-function-to-determine-if-the-point-is-in-the-mesh/275/3 import dolfin from vtkplotter.dolfin import shapes, plot, printc n = 4 Px = 0.5 Py = 0.5 mesh = dolfin.UnitSquareMesh(n, n) bbt = mesh.bounding_box_tree() collisions = bbt.compute_collisions(dolfin.Point(Px, Py)) collisions1st = bbt.compute_first_entity_collision(dolfin.Point(Px, Py)) printc("collisions : ", collisions) printc("collisions 1st: ", collisions1st) for cell in dolfin.cells(mesh): contains = cell.contains(dolfin.Point(Px, Py)) printc("Cell", cell.index(), "contains P:", contains, c=contains) ########################################### pt = shapes.Point([Px, Py], c='blue') plot(mesh, pt, text=__doc__)
2.75
3
alice_check_train/__main__.py
AsciiShell/Alice-Check-Train
0
4650
<reponame>AsciiShell/Alice-Check-Train import datetime import os from alice_check_train.main import rasp_to_text from alice_check_train.rasp_api import get_rasp, filter_rasp def main(): key = os.getenv('RASP_KEY') station_from = os.getenv('STATION_FROM') station_to = os.getenv('STATION_TO') date = datetime.date.today().strftime('%Y-%m-%d') js = get_rasp(key, station_from, station_to, date) filtered = filter_rasp(js['segments'], 300) message = rasp_to_text(filtered, 1000) if len(message) > 1024: print('Too long message: {} > 1024'.format(len(message))) print(message) if __name__ == '__main__': main()
2.609375
3
aws-KNN-RESTful.py
cakebytheoceanLuo/k-NN
1
4651
# https://medium.com/@kumon/how-to-realize-similarity-search-with-elasticsearch-3dd5641b9adb # https://docs.aws.amazon.com/opensearch-service/latest/developerguide/knn.html import sys import requests import h5py import numpy as np import json import aiohttp import asyncio import time import httpx from requests.auth import HTTPBasicAuth from statistics import mean # if len(sys.argv) != 2: # print("Type in the efSearch!") # sys.exit() # path = '/tmp/sift-128-euclidean.hdf5.1M' # float dataset # path = '/tmp/sift-128-euclidean.hdf5' # float dataset path = '/home/ubuntu/sift-128-euclidean.hdf5' # float dataset output_csv = '/tmp/sift-es.csv' # url = 'http://127.0.0.1:9200/sift-index/' host = 'https://vpc-....ap-southeast-1.es.amazonaws.com/' # single node # host = 'https://vpc-....ap-southeast-1.es.amazonaws.com/' # two nodes url = host + 'sift-index/' requestHeaders = {'content-type': 'application/json'} # https://stackoverflow.com/questions/51378099/content-type-header-not-supported auth = HTTPBasicAuth('admin', '<PASSWORD>') # Build an index #https://stackoverflow.com/questions/17301938/making-a-request-to-a-restful-api-using-python # PUT sift-index data = '''{ "settings": { "index": { "knn": true, "knn.space_type": "l2", "knn.algo_param.m": 6, "knn.algo_param.ef_construction": 50, "knn.algo_param.ef_search": 50, "refresh_interval": -1, "translog.flush_threshold_size": "10gb", "number_of_replicas": 0 } }, "mappings": { "properties": { "sift_vector": { "type": "knn_vector", "dimension": 128 } } } }''' # https://medium.com/@kumon/how-to-realize-similarity-search-with-elasticsearch-3dd5641b9adb response = requests.put(url, data=data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB')) # response = requests.put(url, data=data, verify=False, headers=requestHeaders, auth=auth) assert response.status_code==requests.codes.ok # cluster_url = 'http://127.0.0.1:9200/_cluster/settings' cluster_url = host + '_cluster/settings' cluster_data = '''{ "persistent" : { "knn.algo_param.index_thread_qty": 16 } } ''' response = requests.put(cluster_url, data=cluster_data, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), headers=requestHeaders) assert response.status_code==requests.codes.ok # Bulkload into index bulk_template = '{ "index": { "_index": "sift-index", "_id": "%s" } }\n{ "sift_vector": [%s] }\n' hf = h5py.File(path, 'r') for key in hf.keys(): print("A key of hf is %s" % key) #Names of the groups in HDF5 file. vectors = np.array(hf["train"][:]) num_vectors, dim = vectors.shape print("num_vectors: %d" % num_vectors) print("dim: %d" % dim) bulk_data = "" start = time.time() for (id,vector) in enumerate(vectors): assert len(vector)==dim vector_str = "" for num in vector: vector_str += str(num) + ',' vector_str = vector_str[:-1] id_str = str(id) single_bulk_done = bulk_template % (id_str, vector_str) bulk_data += single_bulk_done if (id+1) % 100000 == 0: print(str(id+1)) # POST _bulk response = requests.put(url + '_bulk', data=bulk_data, auth=HTTPBasicAuth('admin', 'I#<PASSWORD>TAHB'), headers=requestHeaders) assert response.status_code==requests.codes.ok bulk_data = "" end = time.time() print("Insert Time: %d mins" % ((end - start) / 60.0)) # Unit: min # refresh_url = 'http://127.0.0.1:9200/sift-index/_settings' refresh_url = host + 'sift-index/_settings' refresh_data = '''{ "index" : { "refresh_interval": "1s" } } ''' response = requests.put(refresh_url, data=refresh_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB')) assert response.status_code==requests.codes.ok # response = requests.post('http://127.0.0.1:9200/sift-index/_refresh', verify=False, headers=requestHeaders) # assert response.status_code==requests.codes.ok # merge_url = 'http://127.0.0.1:9200/sift-index/_forcemerge?max_num_segments=1' merge_url = host + 'sift-index/_forcemerge?max_num_segments=1' merge_response = requests.post(merge_url, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#<PASSWORD>'), timeout=600) assert merge_response.status_code==requests.codes.ok # warmup_url = 'http://127.0.0.1:9200/_opendistro/_knn/warmup/sift-index' warmup_url = host + '_opendistro/_knn/warmup/sift-index' warmup_response = requests.get(warmup_url, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I<PASSWORD>')) assert warmup_response.status_code==requests.codes.ok # Send queries total_time = 0 # in ms hits = 0 # for recall calculation query_template = ''' { "size": 50, "query": {"knn": {"sift_vector": {"vector": [%s],"k": 50}}} } ''' queries = np.array(hf["test"][:]) nq = len(queries) neighbors = np.array(hf["neighbors"][:]) # distances = np.array(hf["distances"][:]) num_queries, q_dim = queries.shape print("num_queries: %d" % num_queries) print("q_dim: %d" % q_dim) assert q_dim==dim ef_search_list = [50, 100, 150, 200, 250, 300] for ef_search in ef_search_list: ef_data = '''{ "index": { "knn.algo_param.ef_search": %d } }''' ef_data = ef_data % ef_search ### Update Index Setting: efSearch response = requests.put(url + '_settings', data=ef_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', '<PASSWORD>')) assert response.status_code==requests.codes.ok total_time_list = [] hits_list = [] for count in range(5): total_time = 0 # in ms hits = 0 # for recall calculation query_template = ''' ''' single_query = '''{}\n{"size": 50, "query": {"knn": {"sift_vector": {"vector": [%s],"k": 50}}}}\n''' for (id,query) in enumerate(queries): assert len(query)==dim query_str = "" for num in query: query_str += str(num) + ',' query_str = query_str[:-1] # GET sift-index/_search single_query_done = single_query % (query_str) query_template += single_query_done query_data = query_template # print(query_data) response = requests.get(url + '_msearch', data=query_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', '<PASSWORD>'), stream=True) assert response.status_code==requests.codes.ok # print(response.text) result = json.loads(response.text) # QPS total_time = result['took'] # tooks = [] # for i in range(len(queries)): # for ele in result['responses']: # tooks.append(int(ele['took'])) for id in range(len(queries)): # Recall neighbor_id_from_result = [] for ele in result['responses'][id]['hits']['hits']: neighbor_id_from_result.append(int(ele['_id'])) assert len(neighbor_id_from_result)==50 # print("neighbor_id_from_result: ") # print(neighbor_id_from_result) neighbor_id_gt = neighbors[id][0:50] # topK=50 # print("neighbor_id_gt") # print(neighbor_id_gt) hits_q = len(list(set(neighbor_id_from_result) & set(neighbor_id_gt))) # print("# hits of this query with topk=50: %d" % hits_q) hits += hits_q total_time_list.append(total_time) hits_list.append(hits) print(total_time_list) total_time_avg = mean(total_time_list[2:-1]) hits_avg = mean(hits_list) QPS = 1.0 * nq / (total_time_avg / 1000.0) recall = 1.0 * hits_avg / (nq * 50) print(ef_search, QPS, recall)
2.328125
2
test/test_docker_images.py
bauerj/cibuildwheel
0
4652
<reponame>bauerj/cibuildwheel import platform import textwrap import pytest from . import test_projects, utils dockcross_only_project = test_projects.new_c_project( setup_py_add=textwrap.dedent(r''' import os, sys # check that we're running in the correct docker image as specified in the # environment options CIBW_MANYLINUX1_*_IMAGE if "linux" in sys.platform and not os.path.exists("/dockcross"): raise Exception( "/dockcross directory not found. Is this test running in the correct docker image?" ) ''') ) def test(tmp_path): if utils.platform != 'linux': pytest.skip('the test is only relevant to the linux build') if platform.machine() not in ['x86_64', 'i686']: pytest.skip('this test is currently only possible on x86_64/i686 due to availability of alternative images') project_dir = tmp_path / 'project' dockcross_only_project.generate(project_dir) actual_wheels = utils.cibuildwheel_run(project_dir, add_env={ 'CIBW_MANYLINUX_X86_64_IMAGE': 'dockcross/manylinux2010-x64', 'CIBW_MANYLINUX_I686_IMAGE': 'dockcross/manylinux2010-x86', 'CIBW_SKIP': 'pp* cp39-*', }) # also check that we got the right wheels built expected_wheels = [w for w in utils.expected_wheels('spam', '0.1.0') if '-pp' not in w and '-cp39-' not in w] assert set(actual_wheels) == set(expected_wheels)
2.25
2
real_trade/MoveAverageTradePosition.py
taka-mochi/cryptocurrency-autotrading
3
4653
# coding: utf-8 import math import dateutil import dateutil.parser import json from ChartBars import Chart from ChartUpdaterByCCWebsocket import ChartUpdaterByCoincheckWS from Util import BitcoinUtil def adjust_price_to_tick(price, tick): return price - math.fmod(price, tick) def adjust_amount_to_tick(amount, tick): return amount - math.fmod(amount, tick) # a class for one position class OnePositionTrader(object): def __init__(self, price_decide_algorithm, api, pair="btc_jpy", use_leverage = True): self.max_total_position_price_base = 0 # total maximum position size in base currency self.positioned_price_base = 0 # total position price in base currency (actually paired currency) self.positioned_value_in_qty = 0 # used only for genbutsu self.max_free_margin_of_base_currency = 0 # max free margin. we cannot use orders that exceed this margin self.positions = [] self.position_id_to_sellids = {} self.got_all_order_ids = [] self.got_close_order_ids = [] self.exist_order_info_list = None self.exist_close_order_info_list = None self.last_checked_transaction_id = 0 self.api = api # api: e.g. instance of CoinCheck self.use_leverage = use_leverage self.timelimit_to_grouping_transaction = 2 # 約定時刻がこの秒数以下なら同一ポジションとみなす(use_leverage == False の場合のみ) self.__pair = pair self.price_decide_algorithm = price_decide_algorithm print("PositionTrader: inst=" + str(self) + ", pair=" + str(pair)) @property def pair(self): return self.__pair def get_base_currency(self): return self.pair.split("_")[1].lower() def get_qty_currency(self): return self.pair.split("_")[0].lower() # set usable jpy (available_margin + reserved_margin + (positioned)) def set_max_total_position_price_base(self, p): self.set_max_total_position_price_of_base_currency(p) def set_max_total_position_price_of_base_currency(self, p): self.max_total_position_price_base = p def set_max_free_margin_of_base_currency(self, p): self.max_free_margin_of_base_currency = p def get_max_total_position_price_base(self): return self.get_max_total_position_price_of_base_currency() def get_max_total_position_price_of_base_currency(self): return self.max_total_position_price_base def get_positioned_price_base(self): return self.positioned_price_base def set_timelimit_to_grouping_transaction(self, timelimit_to_grouping_transaction): self.timelimit_to_grouping_transaction = timelimit_to_grouping_transaction # check current status and make new positions according to algorithm # notice: this method should be called after update_status def update_new_orders(self, chart, do_not_create_new_order=False): assert (self.price_decide_algorithm is not None) position_type = None target_value = None stoploss_rate = None decide_make_ret = self.price_decide_algorithm.decide_make_position_order(chart) if len(decide_make_ret) == 3: (position_type, target_value, stoploss_rate) = decide_make_ret else: (position_type, target_value) = decide_make_ret if target_value is None or position_type is None: # algorithm says this instance should not make order. cancel all if self.exist_order_info_list is not None: for exist_order_info in self.exist_order_info_list: self._cancel_order(exist_order_info["id"]) self.exist_order_info_list = None return False # round to possible price tick = self.api.order.tick_price(self.pair) target_value = adjust_price_to_tick(target_value, tick) if stoploss_rate is not None: stoploss_rate = adjust_price_to_tick(stoploss_rate, tick) # !!round to possible amount possible_make_total_price_base_cur = self.get_max_total_position_price_of_base_currency() - self.positioned_price_base possible_make_total_price_base_cur = min(possible_make_total_price_base_cur, self.max_free_margin_of_base_currency) amount_tick = self.api.order.tick_amount(self.pair) possible_amount = 1.0 * possible_make_total_price_base_cur / target_value possible_amount = adjust_amount_to_tick(possible_amount,amount_tick) print("possible_create_in_base = %f, want to make amount in base = %f, possible amount = %f" % (self.get_max_total_position_price_of_base_currency() - self.positioned_price_base, possible_make_total_price_base_cur, possible_amount)) #print("base_cur = %f, positioned = %f, others = %f" % (self.get_max_total_position_price_of_base_currency(), self.positioned_price_base, self.other_reserved_base,)) #print("target_value = %f, possible_base = %f" % (target_value, possible_make_total_price_base_cur,)) if possible_amount <= 0.000001: # too few btc print("want to make (price,amount) = (%f,%f) but too few amount" % (target_value, possible_amount)) return False if not do_not_create_new_order: success, new_order_created = self._update_or_create_order(position_type, target_value, possible_amount, stop_loss_rate=stoploss_rate) return new_order_created else: self._cancel_exist_all_buy_orders() print("algorithm wants to create a new order but DO_NOT_CREATE_NEW flag = true") return False # update close orders according to current positions # this class should be called after update_status def update_close_orders(self, chart, current_time_timezone_aware): for position in self.positions: open_rate = float(position["open_rate"]) amount = float(position["amount"]) created_time = position["created_at_datetime"] target_value = None if self.price_decide_algorithm.market_sell_decide_algorithm(chart, open_rate, created_time, current_time_timezone_aware) is True: # market order close pass else: target_value = self.price_decide_algorithm.sell_price_decide_algorithm(open_rate) target_value = adjust_price_to_tick(target_value, self.api.order.tick_price(self.pair)) self._update_or_create_close_order(position, target_value) # interface to update internal position & order status def update_status(self, valid_position_info, valid_transaction_info, valid_order_info): # update position/order status (assume: pagenations are already cleared) self._update_order_id_status(valid_order_info) if self.use_leverage: self._update_position_status(valid_position_info) else: self._update_transaction_status(valid_transaction_info) def _update_position_status(self, valid_position_info): # apply real positions status to this instance # レバレッジ用 if not self.use_leverage: return """ position example (array of "data" will be passed) { "data": [ { "id": 10, "pair": "btc_jpy", "status": "open", "created_at": "2015-12-02T05:27:53.000Z", "closed_at": null, "open_rate": "43553.0", "closed_rate": null, "amount": "1.51347797", "all_amount": "1.51045705", "side": "sell", "pl": "-8490.81029287", "new_order": { "id": 23104033, "side": "sell", "rate": null, "amount": null, "pending_amount": "0", "status": "complete", "created_at": "2015-12-02T05:27:52.000Z" }, "close_orders": [ { "id": 23755132, "side": "buy", "rate": "10000.0", "amount": "1.0", "pending_amount": "0.0", "status": "cancel", "created_at": "2015-12-05T05:03:56.000Z" } ] } ] } """ #### # parse positions #### self.positions = [] self.position_id_to_sellids = {} all_positions = valid_position_info positioned_value_in_base = 0 for position in all_positions: status = position["status"] if status != "open": continue pair = position["pair"] if pair != self.pair: continue position_id = position["id"] # check position that is created by the new_order that is self.order_id: new_order = position["new_order"] if new_order["status"] == "cancel": print("new order: " + str(new_order["id"]) + " state is 'cancel'. probably partially contracted and remain is canceled. this position is not ignored") #continue new_order_id = new_order["id"] if new_order_id in self.got_all_order_ids: # this position is created by this class's order created_time = dateutil.parser.parse(position["created_at"]) position["created_at_datetime"] = created_time amount = position["amount"] all_amount = position["all_amount"] if all_amount is not None and all_amount < amount: amount = all_amount position["amount"] = position["all_amount"] = amount self.positions.append(position) open_rate = position["open_rate"] positioned_value_in_base += float(amount) * float(open_rate) # check close orders self.position_id_to_sellids[position_id] = \ list(map(lambda x:x["id"], filter(lambda x:x["status"] != "cancel", position["close_orders"]))) self.positioned_price_base = positioned_value_in_base def _update_transaction_status(self, valid_transaction_info): if self.use_leverage: return # 現物用。transactionの結果からポジションの状態を解析. 基本的にupdate_position_statusと挙動は同じ。parseするjsonが異なる # * ただし、前フレームからの情報を引き継ぐところがupdate_position_statusと違う (現物にはpositionという概念が無い) positions = self.positions position_id_to_sellids = self.position_id_to_sellids close_transactions = [] all_transactions = valid_transaction_info positioned_value_in_qty = self.positioned_value_in_qty qty_cur = self.get_qty_currency() base_cur = self.get_base_currency() last_transaction_id_in_this_frame = self.last_checked_transaction_id for transaction in all_transactions: transaction_id = int(transaction["id"]) # transaction_id means position_id transaction["id"] = transaction_id # check only new id if self.last_checked_transaction_id >= transaction_id: continue last_transaction_id_in_this_frame = max(last_transaction_id_in_this_frame, transaction_id) # check pair this_pair = transaction["pair"] if this_pair != self.pair: continue # check position that is created by the new_order that is self.order_id: new_order_id = int(transaction["order_id"]) transaction["order_id"] = new_order_id is_position_transaction = new_order_id in self.got_all_order_ids is_close_transaction = new_order_id in self.got_close_order_ids if not is_position_transaction and not is_close_transaction: continue # other pair if qty_cur not in transaction["funds"] or base_cur not in transaction["funds"]: continue # this position is created by this class's order qty_amount = float(transaction["funds"][qty_cur]) transaction["amount"] = transaction["amount"] = qty_amount transaction["open_rate"] = float(transaction["rate"]) open_rate = float(transaction["open_rate"]) positioned_value_in_qty += float(qty_amount) created_time = dateutil.parser.parse(transaction["created_at"]) transaction["created_at_datetime"] = created_time if is_position_transaction: # check close orders # 漏れがあるとまずい(cancelしなくなる)ので、とりあえずあるだけリンクしておく position_id_to_sellids[transaction_id] = [] transaction["close_orders"] = [] positions.append(transaction) else: close_transactions.append(transaction) # in next frame, only transaction_id > self.last_checked_transaction_id will be checked self.last_checked_transaction_id = last_transaction_id_in_this_frame print("last_checked_transaction_id = ", self.last_checked_transaction_id) print("self.exist_close_order_info_list", self.exist_close_order_info_list) if self.exist_close_order_info_list is not None: for pos_i, position in enumerate(positions): transaction_id = position["id"] position_id_to_sellids[transaction_id] = list(map(lambda x:x["id"], self.exist_close_order_info_list)) position["close_orders"] = self.exist_close_order_info_list for i, order in enumerate(position["close_orders"]): order["status"] = "open" order["side"] = order["order_type"] if "amount" not in order: order["amount"] = float(order["pending_amount"]) position["close_orders"][i] = order positions[pos_i] = position # round very small value if abs(positioned_value_in_qty) < self.api.order.min_create_amount(self.pair)*0.1: positioned_value_in_qty = 0 positions = sorted(positions, key=lambda x:-x["id"]) # order by desc # concat very near created_at transactions grouped_positions = self._group_near_transactions(positions) # remove closed position & update positioned_value_in_jpy valid_positions, positioned_value_in_base = self._remain_non_closed_transactions(grouped_positions, positioned_value_in_qty) if abs(positioned_value_in_base) < self.api.order.tick_price(self.pair) * self.api.order.min_create_amount(self.pair) * 0.1: positioned_value_in_base = 0 # merge position_id_to_sellids self.position_id_to_sellids = {} for position in valid_positions: pos_id = position["id"] self.position_id_to_sellids[pos_id] = position_id_to_sellids[pos_id] self.positioned_price_base = positioned_value_in_base self.positioned_value_in_qty = positioned_value_in_qty self.position_id_to_sellids = position_id_to_sellids self.positions = valid_positions print("position_count=%d, positioned_%s=%f, positioned_%s=%f" % (len(self.positions), base_cur, self.positioned_price_base, qty_cur, self.positioned_value_in_qty,)) # close したかどうか、残っているポジション残量を計算するのに、全て遡らないといけないのは現実的ではない # 既にこの段階で解決できるポジション状態(close order id見て、それがあれば反対売買が成立している) # を用い、↑で貯めたpositionsから、反対売買済みのものを(amount基準で)消していき(前回フレームで残っていたpositionも含めて)、残ったpositionだけを生きているポジションとし、1つに集約する(現物用なので、idが分かれている意味はない) # その残ったpositionID, 消費した反対売買IDのIDを持っておき、次回からはそれより新しいIDのみを反映する # ただし、ずっと続けると計算誤差がたまるので、jpyもしくはbtcベースでその合計値が極めて小さくなったら丸めてノーポジ扱いにする # うーん...現物とレバレッジで管理が結構変わるから同じクラスにするのはまずかった?ごちゃごちゃしてきてしまった # 時間的に約定時刻が近いpositionをまとめる def _group_near_transactions(self, target_transactions): grouped_positions = [] positions = target_transactions if len(positions) > 0: def grouping(desced_position_array): ret_pos = dict(desced_position_array[0]) total_amount = 0 total_jpy = 0 for p in desced_position_array: total_amount += p["amount"] total_jpy += p["amount"] * p["open_rate"] ret_pos["amount"] = total_amount ret_pos["open_rate"] = total_jpy / total_amount return ret_pos concat_start_index = 0 prev_created_at = positions[0]["created_at_datetime"] for idx, pos in enumerate(positions): cur_created_at = pos["created_at_datetime"] if abs((cur_created_at - prev_created_at).total_seconds()) <= self.timelimit_to_grouping_transaction: # can group prev_created_at = cur_created_at continue # this position cannot be grouped. make a new group from pos[start_index] - pos[idx-1] grouped_positions.append(grouping(positions[concat_start_index:idx])) #print(grouped_positions[-1]) concat_start_index = idx prev_created_at = cur_created_at # remain positioned not be grouped grouped_positions.append(grouping(positions[concat_start_index:])) return grouped_positions # まだcloseされていないtransactionだけを残す def _remain_non_closed_transactions(self, target_transactions, positioned_value_in_qty): valid_positions = [] remain_qty = positioned_value_in_qty total_base = 0 for position in target_transactions: if remain_qty <= 0: break amount = position["amount"] if remain_qty >= amount: remain_qty -= amount else: position["amount"] = remain_qty remain_qty = 0 valid_positions.append(position) total_base += position["amount"] * position["open_rate"] return valid_positions, total_base def _update_order_id_status(self, valid_order_info): #### # parse orders #### """ orders example (array of "orders" will be passed) { "success": true, "orders": [ { "id": 202835, "order_type": "buy", "rate": 26890, "pair": "btc_jpy", "pending_amount": "0.5527", "pending_market_buy_amount": null, "stop_loss_rate": null, "created_at": "2015-01-10T05:55:38.000Z" }, { "id": 202836, "order_type": "sell", "rate": 26990, "pair": "btc_jpy", "pending_amount": "0.77", "pending_market_buy_amount": null, "stop_loss_rate": null, "created_at": "2015-01-10T05:55:38.000Z" }, { "id": 38632107, "order_type": "buy", "rate": null, "pair": "btc_jpy", "pending_amount": null, "pending_market_buy_amount": "10000.0", "stop_loss_rate": "50000.0", "created_at": "2016-02-23T12:14:50.000Z" } ] } """ #exist_order_ids = list(map(lambda x:x["id"], valid_order_info)) exist_orders = [] exist_close_orders = [] other_orders = [] for idx, order in enumerate(valid_order_info): order_id = order["id"] order_pair = order["pair"] is_added = False if order_pair == self.pair: if order_id in self.got_all_order_ids: is_added = True exist_orders.append(order) elif order_id in self.got_close_order_ids: is_added = True exist_close_orders.append(order) if not is_added: other_orders.append(order) print("exist_create_orders", exist_orders) print("exist_close_orders", exist_close_orders) self.exist_order_info_list = exist_orders if len(exist_orders) > 0 else None self.exist_close_order_info_list = exist_close_orders if len(exist_close_orders) > 0 else None #self.other_reserved_base = 0 #if not self.use_leverage: # for o in other_orders: # if o["order_type"] == "buy": # self.other_reserved_base += float(o["pending_amount"]) * float(o["rate"]) # returns: (is_success, is_new_order_created) def _update_or_create_order(self, position_type, target_value, possible_qty, stop_loss_rate = None): assert (self.api is not None) # order list は現物とleverageで変わらない if self.exist_order_info_list is not None: # check the same value or not if len(self.exist_order_info_list) == 1: exist_order_info = self.exist_order_info_list[0] cur_rate = exist_order_info["rate"] if "rate" in exist_order_info else None # get current stoploss cur_stoploss = exist_order_info["stop_loss_rate"] if "stop_loss_rate" in exist_order_info else None cur_stoploss_float_or_none = None if cur_stoploss is not None: cur_stoploss_float_or_none = float(cur_stoploss) target_stoploss_float_or_none = None if stop_loss_rate is not None: target_stoploss_float_or_none = float(stop_loss_rate) cur_amount = None if "amount" in exist_order_info: cur_amount = exist_order_info["amount"] elif "pending_amount" in exist_order_info: cur_amount = exist_order_info["pending_amount"] order_type = None if "order_type" in exist_order_info: if exist_order_info["order_type"] == "buy" or\ exist_order_info["order_type"] == "leverage_buy": order_type = "long" if exist_order_info["order_type"] == "sell" or \ exist_order_info["order_type"] == "leverage_sell": order_type = "short" if cur_rate is not None and cur_amount is not None and order_type is not None: if abs(float(cur_rate)-float(target_value)) < 0.00001 and \ abs(float(cur_amount)-float(possible_qty)) < 0.00001 and \ cur_stoploss_float_or_none == target_stoploss_float_or_none and \ order_type == position_type: # same order. do nothing print("You already ordered this order: rate=%.1f, amount=%f, stoploss_rate=%s, position_type=%s" % (target_value, possible_qty, str(stop_loss_rate), position_type,)) return True, False # cancel all exist orders if not self._cancel_exist_all_buy_orders(): return False, False # check minimum btc min_qty = self.api.order.min_create_amount(self.pair) if possible_qty < min_qty: print("Minimum order btc = %f, you requested = %f" % (min_qty, possible_qty,)) return False, False # make new order """ ret val example "success": true, "id": 12345, "rate": "30010.0", "amount": "1.3", "order_type": "sell", "stop_loss_rate": null, "pair": "btc_jpy", "created_at": "2015-01-10T05:55:38.000Z" """ is_long = position_type == "long" order_type = 'leverage_buy' if is_long else 'leverage_sell' if not self.use_leverage: order_type = 'buy' if is_long else 'sell' order = { 'rate': "%.8f" % target_value, 'amount': "%.8f" % possible_qty, 'order_type': order_type, 'pair': self.pair } # not correct # this "stop_loss_rate" means: if a value >= stop_loss_rate, sashine will be placed at "rate" if stop_loss_rate is not None: order["stop_loss_rate"] = stop_loss_rate ret_str = self.api.order.create(order) ret = None if ret_str is not None: try: ret = json.loads(ret_str) except: print("failed to parse api.order.create result") try: print(ret_str) except Exception as e: print("failed to show returned json str") print(e) if ret is None or ret["success"] is not True or "id" not in ret: print("Failed to create order!!") try: print(ret_str) except Exception as e: print("failed to show returned json str") print(e) return False, False self.exist_order_info_list = [ret] self.got_all_order_ids.append(ret["id"]) # remove very old orders if len(self.got_all_order_ids) > 500: self.got_all_order_ids = self.got_all_order_ids[-500:] print("order success!", ret_str) return True, True def _cancel_exist_all_buy_orders(self): failed_to_cancel = False exist_order_i = 0 while exist_order_i < len(self.exist_order_info_list): exist_order_info = self.exist_order_info_list[exist_order_i] if self._cancel_order(exist_order_info["id"]) is False: # something error happened!! print("order cancel failed %d even if there is a valid order in internal state" % (exist_order_info["id"],)) failed_to_cancel = True del self.exist_order_info_list[exist_order_i] else: exist_order_i += 1 if len(self.exist_order_info_list) == 0: self.exist_order_info_list = None if failed_to_cancel: return False return True # target_value: sashine value. if None, market-make def _update_or_create_close_order(self, position, target_value): position_id = position["id"] if position_id not in self.position_id_to_sellids: return False sell_qty = float(position["amount"]) sell_ids = self.position_id_to_sellids[position_id] position_type = position["side"] # convert position type name if position_type == "buy": position_type = "long" if position_type == "sell": position_type = "short" is_close_long = True if position_type == "long": is_close_long = True if position_type == "short": is_close_long = False # check exist sell-orders. if target value and amount are completely same, do not pass new order valid_close_orders = list(filter(lambda x:x["status"] != "cancel" and x["id"] in sell_ids, position["close_orders"])) print("valid_close_order count = %d" % len(valid_close_orders)) if len(valid_close_orders) == 1 and target_value is not None: # check the order is already created on exchanger valid_close_order = valid_close_orders[0] print("your order: rate=%f, amount=%f" % (target_value, sell_qty,)) print("valid_close_order[0]:") print(valid_close_order) rate = None if "rate" in valid_close_order: rate = float(valid_close_order["rate"]) amount = valid_close_order["amount"] is_cur_close_long = False if "side" in valid_close_order: is_cur_close_long = valid_close_order["side"] == "sell" elif "order_type" in valid_close_order: is_cur_close_long = valid_close_order["order_type"] == "sell" if abs(float(rate)-float(target_value)) < 0.00001 and \ abs(float(amount)-float(sell_qty)) < 0.00001 and \ is_close_long == is_cur_close_long: # completely same!! print("requested close order is already ordered on server:") print(" position id:%s, target_value:%s, amount:%s, close_long:%s" % (str(position_id), str(target_value), str(amount), str(is_cur_close_long),)) return True min_qty = self.api.order.min_create_amount(self.pair) if sell_qty < min_qty: qty_cur = self.get_qty_currency() print("Minimum order %s = %f, you requested = %f" % (qty_cur, min_qty, sell_qty,)) return False # cancel all for sell_id in sell_ids: self._cancel_order(sell_id) self.position_id_to_sellids[position_id] = [] # make new order order = {} if self.use_leverage: order = { 'amount': '%.8f' % BitcoinUtil.roundBTCby1satoshi(sell_qty), 'position_id': position_id, 'order_type': 'close_long' if is_close_long else 'close_short', 'pair': 'btc_jpy', } if target_value is not None: order['rate'] = target_value else: # if not leverage order, close order is always "sell" if not is_close_long: print("normal order cannot make short position!") print("you passed close 'short' for normal order") return False order = { 'amount': '%.8f' % BitcoinUtil.roundBTCby1satoshi(sell_qty), 'order_type': 'sell', 'pair': self.pair, } if target_value is None: # market_sell order['order_type'] = "market_sell" else: order['rate'] = target_value ret = self.api.order.create(order) ret_str = ret if ret is not None: try: ret = json.loads(ret) except: print("failed to parse close_long order result") try: print(ret_str) except Exception as e: print("failed to print error") print(e) if ret is None or ret["success"] is not True or "id" not in ret or ret["id"] is None: print("sell order canceled but failed to create new sell order!!: position id: %s" % (str(position_id),)) try: print(ret_str) except Exception as e: print("failed to print error") print(e) return False sell_ids = [ret["id"]] self.position_id_to_sellids[position_id] = sell_ids self.got_close_order_ids.append(ret["id"]) if len(self.got_close_order_ids) > 500: self.got_close_order_ids = self.got_close_order_ids[-500:] return True def _cancel_order(self, order_id): # call apis for current orders if order_id is None: print("order is already canceled") return True # do something ret_str = self.api.order.cancel({"id": order_id, "pair": self.pair}) ret = None if ret_str is not None: try: ret = json.loads(ret_str) except: print("failed to parse cancel order ret str") try: print(ret_str) except Exception as e: print("failed to print returned error json") print(e) if ret is None or ret["success"] is not True or "id" not in ret: print("Failed to cancel order %s: %s" % (str(order_id), str(ret_str),)) return False return True
2.65625
3
test.py
sbcshop/PiRelay-8
2
4654
<reponame>sbcshop/PiRelay-8<filename>test.py<gh_stars>1-10 from PiRelay8 import Relay import time r1 = Relay("RELAY1") r2 = Relay("RELAY2") r3 = Relay("RELAY3") r4 = Relay("RELAY4") r5 = Relay("RELAY5") r6 = Relay("RELAY6") r7 = Relay("RELAY7") r8 = Relay("RELAY8") r1.off() r2.off() r3.off() r4.off() r5.off() r6.off() r7.off() r8.off() r1.on() time.sleep(0.5) r1.off() time.sleep(0.5) r2.on() time.sleep(0.5) r2.off() time.sleep(0.5) r3.on() time.sleep(0.5) r3.off() time.sleep(0.5) r4.on() time.sleep(0.5) r4.off() time.sleep(0.5) r5.on() time.sleep(0.5) r5.off() time.sleep(0.5) r6.on() time.sleep(0.5) r6.off() time.sleep(0.5) r7.on() time.sleep(0.5) r7.off() time.sleep(0.5) r8.on() time.sleep(0.5) r8.off() time.sleep(0.5)
1.84375
2
bot/cogs/clan.py
johnvictorfs/atlantisbot-rewrite
0
4655
<gh_stars>0 import rs3clans import discord from discord.ext import commands from bot.bot_client import Bot from bot.utils.tools import separator from bot.utils.context import Context class Clan(commands.Cog): def __init__(self, bot: Bot): self.bot = bot @commands.cooldown(1, 5, commands.BucketType.user) @commands.bot_has_permissions(embed_links=True) @commands.command(aliases=['clan']) async def clan_detail_info(self, ctx: Context, *, clan_name: str): try: clan = rs3clans.Clan(name=clan_name, set_exp=True) except ConnectionError: return await ctx.send(f"Houve um erro ao tentar conectar a API da Jagex. Tente novamente mais tarde.") except rs3clans.ClanNotFoundError: return await ctx.send(f"O clã '{clan_name}' não existe.") clan_leader = None for member in clan: if member.rank == 'Owner': clan_leader = member.name clan_url = clan.name.replace(' ', '%20') clan_embed = discord.Embed( title=clan.name, color=discord.Color.green(), url=f'http://services.runescape.com/m=clan-home/clan/{clan_url}' ) clan_embed.set_author(name='RuneClan', url=f'https://runeclan.com/clan/{clan_url}') clan_embed.set_thumbnail(url=f'http://services.runescape.com/m=avatar-rs/{clan_url}/clanmotif.png') clan_embed.add_field(name="Exp Total", value=f'{clan.exp:,}') clan_embed.add_field(name="Membros", value=str(clan.count)) clan_embed.add_field(name="Líder", value=clan_leader) clan_embed.add_field(name="Exp Média por Membro", value=f'{clan.avg_exp:,.0f}') return await ctx.send(embed=clan_embed) @commands.cooldown(1, 5, commands.BucketType.user) @commands.bot_has_permissions(embed_links=True) @commands.command(aliases=['claninfo', 'clanexp', 'claexp', 'clainfo', 'clãexp', 'clãinfo']) async def clan_user_info(self, ctx: Context, *, username: str): try: player = rs3clans.Player(name=username, runemetrics=True) except ConnectionError: return await ctx.send(f"Houve um erro ao tentar conectar a API da Jagex. Tente novamente mais tarde.") if not player.exists: return await ctx.send(f"Jogador '{player.name}' não existe.") if not player.clan: return await ctx.send(f"Jogador '{player.name}' não está em um clã.") user_clan = rs3clans.Clan(name=player.clan) member = user_clan.get_member(username) user_clan_exp = member.exp user_rank = member.rank display_username = player.name if self.bot.setting.show_titles: if player.suffix: display_username = f"{player.name} {player.title}" else: display_username = f"{player.title} {player.name}" user_url_name = player.name.replace(" ", "%20") user_url_clan = player.clan.replace(" ", "%20") icon_url = f"https://secure.runescape.com/m=avatar-rs/{user_url_name}/chat.png" runeclan_url = f"https://runeclan.com/user/{user_url_name}" clan_banner_url = f"http://services.runescape.com/m=avatar-rs/l=3/a=869/{user_url_clan}/clanmotif.png" embed_title = "RuneClan" rank_header = "__Rank__" clan_header = "__Clã__" exp_header = "__Exp no Clã__" total_exp_header = "__Exp Total__" private_profile_header = "Indisponível - Perfil Privado" rank_emoji = self.bot.setting.clan_settings[user_rank]['Emoji'] user_rank = self.bot.setting.clan_settings[user_rank]['Translation'] clan_info_embed = discord.Embed( title=embed_title, description="", color=discord.Colour.dark_blue(), url=runeclan_url, ) clan_info_embed.set_author( icon_url=icon_url, name=display_username ) clan_info_embed.set_thumbnail( url=clan_banner_url ) clan_info_embed.add_field( name=clan_header, value=player.clan ) clan_info_embed.add_field( name=rank_header, value=f"{user_rank} {rank_emoji}" ) clan_info_embed.add_field( name=exp_header, value=f"{user_clan_exp:,}" ) if player.private_profile: clan_info_embed.add_field( name=total_exp_header, value=private_profile_header, inline=False ) else: clan_info_embed.add_field( name=total_exp_header, value=f"{player.exp:,}" ) return await ctx.send(content=None, embed=clan_info_embed) @commands.cooldown(1, 5, commands.BucketType.user) @commands.bot_has_permissions(embed_links=True) @commands.command(aliases=['ranksupdate', 'upranks', 'rank']) async def ranks(self, ctx: Context, *, clan: str = 'Atlantis'): if clan.lower() == 'atlantis argus': return await ctx.send('`!rank argus` irmão') elif clan.lower() == 'atlantis': exp_general = 2_000_000_000 exp_captain = 1_000_000_000 exp_lieutenant = 500_000_000 exp_seargent = 250_000_000 exp_corporal = 125_000_000 elif clan.lower() == 'argus': exp_general = 500_000_000 exp_captain = 250_000_000 exp_lieutenant = 125_000_000 exp_seargent = 60_000_000 exp_corporal = 30_000_000 clan = 'Atlantis Argus' else: return await ctx.send('Clã não reconhecido.') rank_emoji = { 'Recruit': self.bot.setting.clan_settings['Recruit']['Emoji'], 'Corporal': self.bot.setting.clan_settings['Corporal']['Emoji'], 'Sergeant': self.bot.setting.clan_settings['Sergeant']['Emoji'], 'Lieutenant': self.bot.setting.clan_settings['Lieutenant']['Emoji'], 'Captain': self.bot.setting.clan_settings['Captain']['Emoji'], 'General': self.bot.setting.clan_settings['General']['Emoji'], } ranks_embed = discord.Embed( title="__Ranks a Atualizar__", description=" ", ) found = False clan = rs3clans.Clan(clan, set_exp=False) clan_members = reversed([member for member in clan]) member: rs3clans.ClanMember for member in clan_members: if len(ranks_embed.fields) >= 20: await ctx.send('Muitos ranks a serem atualizados, enviando apenas os 20 primeiros.') break if member.exp >= exp_corporal and member.rank == 'Recruit': ranks_embed.add_field( name=member.name, value=f"Recruta {rank_emoji['Recruit']} ❯ Cabo {rank_emoji['Corporal']}\n" f"**__Exp:__** {member.exp:,}\n{separator}", inline=False) found = True elif member.exp >= exp_general and member.rank == 'Captain': ranks_embed.add_field( name=member.name, value=f"Capitão {rank_emoji['Captain']} ❯ General {rank_emoji['General']}\n" f"**__Exp:__** {member.exp:,}\n{separator}", inline=False) found = True elif member.exp >= exp_captain and member.rank == 'Lieutenant': ranks_embed.add_field( name=member.name, value=f"Tenente {rank_emoji['Lieutenant']} ❯ Capitão {rank_emoji['Captain']}\n" f"**__Exp:__** {member.exp:,}\n{separator}", inline=False) found = True elif member.exp >= exp_lieutenant and member.rank == 'Sergeant': ranks_embed.add_field( name=member.name, value=f"Sargento {rank_emoji['Sergeant']} ❯ Tenente {rank_emoji['Lieutenant']}\n" f"**__Exp:__** {member.exp:,}\n{separator}", inline=False) found = True elif member.exp >= exp_seargent and member.rank == 'Corporal': ranks_embed.add_field( name=member.name, value=f"Cabo {rank_emoji['Corporal']} ❯ Sargento {rank_emoji['Sergeant']}\n" f"**__Exp:__** {member.exp:,}\n{separator}", inline=False) found = True if not found: ranks_embed.add_field( name="Nenhum Rank a ser atualizado no momento :)", value=separator, inline=False ) return await ctx.send(embed=ranks_embed) def setup(bot): bot.add_cog(Clan(bot))
2.28125
2
otcextensions/tests/functional/osclient/vpc/v2/common.py
zsoltn/python-otcextensions
10
4656
<filename>otcextensions/tests/functional/osclient/vpc/v2/common.py # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import json import uuid from datetime import datetime from openstackclient.tests.functional import base class VpcTestCase(base.TestCase): """Common functional test bits for VPC commands""" CURR_TIME = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") def setUp(self): super(VpcTestCase, self).setUp() UUID = uuid.uuid4().hex[:8] self.LOCAL_ROUTER_NAME = 'test-local-router-otce-cli' + UUID self.PEER_ROUTER_NAME = 'test-peer-router-otce-cli' + UUID self.PEERING_NAME = 'test-peering-otce-cli-' + UUID self.LOCAL_ROUTER_ID = None self.PEER_ROUTER_ID = None self.PEERING_ID = None def create_vpc_peering(self, name=None): self._create_routers() name = name or self.PEERING_NAME json_output = json.loads(self.openstack( 'vpc peering create ' '{name} ' '--local-router-id "{local_router_id}" ' '--peer-router-id "{peer_router_id}" ' '-f json'.format( name=name, local_router_id=self.LOCAL_ROUTER_ID, peer_router_id=self.PEER_ROUTER_ID) )) self.assertIsNotNone(json_output) self.PEERING_ID = json_output['id'] return json_output def delete_vpc_peering(self): self.addCleanup(self._delete_routers) self.openstack('vpc peering delete {}'.format(self.PEERING_ID)) def _create_routers(self): local_router = json.loads(self.openstack( 'router create -f json ' + self.LOCAL_ROUTER_NAME )) self.LOCAL_ROUTER_ID = local_router['id'] peer_router = json.loads(self.openstack( 'router create -f json ' + self.PEER_ROUTER_NAME )) self.PEER_ROUTER_ID = peer_router['id'] def _delete_routers(self): self.openstack( 'router delete {} {}'.format( self.LOCAL_ROUTER_ID, self.PEER_ROUTER_ID ))
1.921875
2
tests/test_nested_structures_inside_structure_values.py
Robinson04/StructNoSQL
3
4657
<reponame>Robinson04/StructNoSQL import unittest from typing import Set, Optional, Dict, List from uuid import uuid4 from StructNoSQL import BaseField, MapModel, TableDataModel from tests.components.playground_table_clients import PlaygroundDynamoDBBasicTable, TEST_ACCOUNT_ID class TableModel(TableDataModel): accountId = BaseField(field_type=str, required=True) nestedDictDictStructure = BaseField(field_type=Dict[str, Dict[str, bool]], required=False, key_name='itemKey') # nestedDictListStructure = BaseField(field_type=Dict[str, List[str]], required=False) # nestedDictSetStructure = BaseField(field_type=Dict[str, Set[str]], required=False) class TestsNestedStructuresInsideStructureValues(unittest.TestCase): def __init__(self, method_name: str): super().__init__(methodName=method_name) self.users_table = PlaygroundDynamoDBBasicTable(data_model=TableModel) def test_nested_dict_dict_structure(self): random_parent_key = f"parentKey_{uuid4()}" random_child_key = f"childKey_{uuid4()}" keys_fields_switch = list(self.users_table.fields_switch.keys()) self.assertIn('nestedDictDictStructure.{{itemKey}}.{{itemKeyChild}}', keys_fields_switch) update_success = self.users_table.update_field( key_value=TEST_ACCOUNT_ID, field_path='nestedDictDictStructure.{{itemKey}}.{{itemKeyChild}}', query_kwargs={'itemKey': random_parent_key, 'itemKeyChild': random_child_key}, value_to_set=True ) self.assertTrue(update_success) retrieved_item = self.users_table.get_field( key_value=TEST_ACCOUNT_ID, field_path='nestedDictDictStructure.{{itemKey}}', query_kwargs={'itemKey': random_parent_key} ) self.assertEqual(retrieved_item, {'itemKeyChild': True}) removed_item = self.users_table.remove_field( key_value=TEST_ACCOUNT_ID, field_path='nestedDictDictStructure.{{itemKey}}', query_kwargs={'itemKey': random_parent_key} ) self.assertEqual(removed_item, {'itemKeyChild': True}) retrieved_expected_none_item = self.users_table.get_field( TEST_ACCOUNT_ID, field_path='nestedDictDictStructure.{{itemKey}}', query_kwargs={'itemKey': random_parent_key} ) self.assertIsNone(retrieved_expected_none_item) def test_nested_dict_list_structure(self): # todo: implement pass def test_nested_dict_set_structure(self): # todo: implement pass if __name__ == '__main__': unittest.main()
2.5
2
test/cpython/test_base64.py
aisk/pyston
1
4658
import unittest from test import test_support import base64 class LegacyBase64TestCase(unittest.TestCase): def test_encodestring(self): eq = self.assertEqual eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n") eq(base64.encodestring("a"), "YQ==\n") eq(base64.encodestring("ab"), "YWI=\n") eq(base64.encodestring("abc"), "YWJj\n") eq(base64.encodestring(""), "") eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "<KEY>" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") # Non-bytes eq(base64.encodestring(bytearray('abc')), 'YWJj\n') def test_decodestring(self): eq = self.assertEqual eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org") eq(base64.decodestring("YQ==\n"), "a") eq(base64.decodestring("YWI=\n"), "ab") eq(base64.decodestring("YWJj\n"), "abc") eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.decodestring(''), '') # Non-bytes eq(base64.decodestring(bytearray("YWJj\n")), "abc") def test_encode(self): eq = self.assertEqual from cStringIO import StringIO infp = StringIO('abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' '0123456789!@#0^&*();:<>,. []{}') outfp = StringIO() base64.encode(infp, outfp) eq(outfp.getvalue(), 'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' 'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' 'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') def test_decode(self): from cStringIO import StringIO infp = StringIO('d3d3LnB5dGhvbi5vcmc=') outfp = StringIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), 'www.python.org') class BaseXYTestCase(unittest.TestCase): def test_b64encode(self): eq = self.assertEqual # Test default alphabet eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.b64encode('\x00'), 'AA==') eq(base64.b64encode("a"), "YQ==") eq(base64.b64encode("ab"), "YWI=") eq(base64.b64encode("abc"), "YWJj") eq(base64.b64encode(""), "") eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with arbitrary alternative characters eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd') # Non-bytes eq(base64.b64encode(bytearray('abcd')), 'YWJjZA==') self.assertRaises(TypeError, base64.b64encode, '\xd3V\xbeo\xf7\x1d', altchars=bytearray('*$')) # Test standard alphabet eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.standard_b64encode("a"), "YQ==") eq(base64.standard_b64encode("ab"), "YWI=") eq(base64.standard_b64encode("abc"), "YWJj") eq(base64.standard_b64encode(""), "") eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Non-bytes eq(base64.standard_b64encode(bytearray('abcd')), 'YWJjZA==') # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd') # Non-bytes eq(base64.urlsafe_b64encode(bytearray('\xd3V\xbeo\xf7\x1d')), '01a-b_cd') def test_b64decode(self): eq = self.assertEqual eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.b64decode('AA=='), '\x00') eq(base64.b64decode("YQ=="), "a") eq(base64.b64decode("YWI="), "ab") eq(base64.b64decode("YWJj"), "abc") eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.b64decode(''), '') # Test with arbitrary alternative characters eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d') # Non-bytes eq(base64.b64decode(bytearray("YWJj")), "abc") # Test standard alphabet eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.standard_b64decode("YQ=="), "a") eq(base64.standard_b64decode("YWI="), "ab") eq(base64.standard_b64decode("YWJj"), "abc") eq(base64.standard_b64decode(""), "") eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") # Non-bytes eq(base64.standard_b64decode(bytearray("YWJj")), "abc") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') # Non-bytes eq(base64.urlsafe_b64decode(bytearray('01a-b_cd')), '\xd3V\xbeo\xf7\x1d') def test_b64decode_error(self): self.assertRaises(TypeError, base64.b64decode, 'abc') def test_b32encode(self): eq = self.assertEqual eq(base64.b32encode(''), '') eq(base64.b32encode('\x00'), 'AA======') eq(base64.b32encode('a'), 'ME======') eq(base64.b32encode('ab'), 'MFRA====') eq(base64.b32encode('abc'), 'MFRGG===') eq(base64.b32encode('abcd'), 'MFRGGZA=') eq(base64.b32encode('abcde'), 'MFRGGZDF') # Non-bytes eq(base64.b32encode(bytearray('abcd')), 'MFRGGZA=') def test_b32decode(self): eq = self.assertEqual eq(base64.b32decode(''), '') eq(base64.b32decode('AA======'), '\x00') eq(base64.b32decode('ME======'), 'a') eq(base64.b32decode('MFRA===='), 'ab') eq(base64.b32decode('MFRGG==='), 'abc') eq(base64.b32decode('MFRGGZA='), 'abcd') eq(base64.b32decode('MFRGGZDF'), 'abcde') # Non-bytes self.assertRaises(TypeError, base64.b32decode, bytearray('MFRGG===')) def test_b32decode_casefold(self): eq = self.assertEqual eq(base64.b32decode('', True), '') eq(base64.b32decode('ME======', True), 'a') eq(base64.b32decode('MFRA====', True), 'ab') eq(base64.b32decode('MFRGG===', True), 'abc') eq(base64.b32decode('MFRGGZA=', True), 'abcd') eq(base64.b32decode('MFRGGZDF', True), 'abcde') # Lower cases eq(base64.b32decode('me======', True), 'a') eq(base64.b32decode('mfra====', True), 'ab') eq(base64.b32decode('mfrgg===', True), 'abc') eq(base64.b32decode('mfrggza=', True), 'abcd') eq(base64.b32decode('mfrggzdf', True), 'abcde') # Expected exceptions self.assertRaises(TypeError, base64.b32decode, 'me======') # Mapping zero and one eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe') eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe') eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe') def test_b32decode_error(self): self.assertRaises(TypeError, base64.b32decode, 'abc') self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==') def test_b16encode(self): eq = self.assertEqual eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF') eq(base64.b16encode('\x00'), '00') # Non-bytes eq(base64.b16encode(bytearray('\x01\x02\xab\xcd\xef')), '0102ABCDEF') def test_b16decode(self): eq = self.assertEqual eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef') eq(base64.b16decode('00'), '\x00') # Lower case is not allowed without a flag self.assertRaises(TypeError, base64.b16decode, '0102abcdef') # Case fold eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef') # Non-bytes eq(base64.b16decode(bytearray("0102ABCDEF")), '\x01\x02\xab\xcd\xef') def test_main(): test_support.run_unittest(__name__) if __name__ == '__main__': test_main()
3.078125
3
src/Path.py
gabbonj/Workbench
2
4659
import numpy as np from ctypes import c_void_p from .Shader import Shader from .transforms import * from OpenGL.GL import * class Path: # position=[x1, y1, z1, ..., xn, yn, zn] ; rotation = [[Rx1, Ry1, Rz1], ..., [Rxn, Ryn, Rzn]] def __init__(self, position, rotation=None): self.loadPath(position) if rotation: assert len(position) == len(rotation) * 3 self.loadRotation(rotation) else: self.rotation = 'Pio è un figo' def loadPath(self, position): # compiling shader self.path_shader = Shader('src\\shaders\\path\\pathvert.glsl', 'src\\shaders\\path\\pathfrag.glsl').shaderProgram # setting path buffer self.vertices = position self.patharray = glGenVertexArrays(1) glBindVertexArray(self.patharray) self.lineBuffer = glGenBuffers(1) glBindBuffer(GL_ARRAY_BUFFER, self.lineBuffer) glBufferData(GL_ARRAY_BUFFER, np.array(self.vertices, dtype='float32'), GL_STATIC_DRAW) glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * 4, c_void_p(0)) def loadRotation(self, rotation): self.rotation = rotation # compiling shader self.xpath_shader = Shader('src\\shaders\\path\\pathvert.glsl', 'src\\shaders\\path\\xpathfrag.glsl').shaderProgram self.ypath_shader = Shader('src\\shaders\\path\\pathvert.glsl', 'src\\shaders\\path\\ypathfrag.glsl').shaderProgram self.zpath_shader = Shader('src\\shaders\\path\\pathvert.glsl', 'src\\shaders\\path\\zpathfrag.glsl').shaderProgram # setting versors self.xvertices = [] self.yvertices = [] self.zvertices = [] for pos in range(len(rotation)): xversor = self.getVersorAtTime(np.array([1, 0, 0, 1], dtype='float32'), pos) yversor = self.getVersorAtTime(np.array([0, 1, 0, 1], dtype='float32'), pos) zversor = self.getVersorAtTime(np.array([0, 0, 1, 1], dtype='float32'), pos) pos = [self.vertices[pos*3], self.vertices[pos*3 + 1], self.vertices[pos*3 + 2]] self.xvertices.extend(pos) self.xvertices.extend([xversor[0], xversor[1], xversor[2]]) self.yvertices.extend(pos) self.yvertices.extend([yversor[0], yversor[1], yversor[2]]) self.zvertices.extend(pos) self.zvertices.extend([zversor[0], zversor[1], zversor[2]]) #setting xline bufer self.xpatharray = glGenVertexArrays(1) glBindVertexArray(self.xpatharray) self.xlineBuffer = glGenBuffers(1) glBindBuffer(GL_ARRAY_BUFFER, self.xlineBuffer) glBufferData(GL_ARRAY_BUFFER, np.array(self.xvertices, dtype='float32'), GL_STATIC_DRAW) glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * 4, c_void_p(0)) # setting yline buffer self.ypatharray = glGenVertexArrays(1) glBindVertexArray(self.ypatharray) self.ylineBuffer = glGenBuffers(1) glBindBuffer(GL_ARRAY_BUFFER, self.ylineBuffer) glBufferData(GL_ARRAY_BUFFER, np.array(self.yvertices, dtype='float32'), GL_STATIC_DRAW) glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * 4, c_void_p(0)) #setting xline bufer self.zpatharray = glGenVertexArrays(1) glBindVertexArray(self.zpatharray) self.zlineBuffer = glGenBuffers(1) glBindBuffer(GL_ARRAY_BUFFER, self.zlineBuffer) glBufferData(GL_ARRAY_BUFFER, np.array(self.zvertices, dtype='float32'), GL_STATIC_DRAW) glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * 4, c_void_p(0)) def getVersorAtTime(self, versor, index): r_versor = np.dot(get_rot(self.rotation[index][0], 0), versor) r_versor = np.dot(get_rot(self.rotation[index][1], 1), r_versor) r_versor = np.dot(get_rot(self.rotation[index][2], 2), r_versor) t_versor = np.dot(get_traslation(self.vertices[index*3], self.vertices[index*3 + 1], self.vertices[index*3 + 2]), r_versor) return t_versor def renderPath(self, camera): model = np.identity(4) view = camera.view proj = camera.proj # rendering the path glBindVertexArray(self.patharray) glUseProgram(self.path_shader) modelLocation = glGetUniformLocation(self.path_shader, 'model') viewLocation = glGetUniformLocation(self.path_shader, 'view') projectionLocation = glGetUniformLocation(self.path_shader, 'projection') glUniformMatrix4fv(modelLocation, 1, GL_TRUE, model) glUniformMatrix4fv(viewLocation, 1, GL_TRUE, view) glUniformMatrix4fv(projectionLocation, 1, GL_FALSE, proj) glEnableVertexAttribArray(0) glDrawArrays(GL_LINE_STRIP, 0, int(len(self.vertices)/3)) glDisableVertexAttribArray(0) # rendering the xlines if self.rotation != 'Pio è un figo': glBindVertexArray(self.xpatharray) glUseProgram(self.xpath_shader) modelLocation = glGetUniformLocation(self.xpath_shader, 'model') viewLocation = glGetUniformLocation(self.xpath_shader, 'view') projectionLocation = glGetUniformLocation(self.xpath_shader, 'projection') glUniformMatrix4fv(modelLocation, 1, GL_TRUE, model) glUniformMatrix4fv(viewLocation, 1, GL_TRUE, view) glUniformMatrix4fv(projectionLocation, 1, GL_FALSE, proj) glEnableVertexAttribArray(0) glDrawArrays(GL_LINES, 0, int(len(self.xvertices)/3)) glDisableVertexAttribArray(0) # rendering the ylines glBindVertexArray(self.ypatharray) glUseProgram(self.ypath_shader) modelLocation = glGetUniformLocation(self.ypath_shader, 'model') viewLocation = glGetUniformLocation(self.ypath_shader, 'view') projectionLocation = glGetUniformLocation(self.ypath_shader, 'projection') glUniformMatrix4fv(modelLocation, 1, GL_TRUE, model) glUniformMatrix4fv(viewLocation, 1, GL_TRUE, view) glUniformMatrix4fv(projectionLocation, 1, GL_FALSE, proj) glEnableVertexAttribArray(0) glDrawArrays(GL_LINES, 0, int(len(self.xvertices)/3)) glDisableVertexAttribArray(0) # rendering the zlines glBindVertexArray(self.zpatharray) glUseProgram(self.zpath_shader) modelLocation = glGetUniformLocation(self.zpath_shader, 'model') viewLocation = glGetUniformLocation(self.zpath_shader, 'view') projectionLocation = glGetUniformLocation(self.zpath_shader, 'projection') glUniformMatrix4fv(modelLocation, 1, GL_TRUE, model) glUniformMatrix4fv(viewLocation, 1, GL_TRUE, view) glUniformMatrix4fv(projectionLocation, 1, GL_FALSE, proj) glEnableVertexAttribArray(0) glDrawArrays(GL_LINES, 0, int(len(self.xvertices)/3)) glDisableVertexAttribArray(0)
2.390625
2
icfree/echo_instructor/args.py
brsynth/icfree-ml
1
4660
from argparse import ( ArgumentParser ) from os import getcwd as os_getcwd DEFAULT_OUTPUT_FOLDER = os_getcwd() DEFAULT_SAMPLE_VOLUME = 10000 def build_args_parser( program, description): parser = ArgumentParser( program, description, ) parser = add_arguments(parser) return parser def add_arguments(parser): parser.add_argument( 'cfps', type=str, help='Path to a .tsv file containing CFPS parameters and features', ) parser.add_argument( 'init_tset', type=str, help='Path to a .tsv file containing initial training set', ) parser.add_argument( 'norm_set', type=str, help='Path to a .tsv file containing normalizer set', ) parser.add_argument( 'autofluo_set', type=str, help='Path to a .tsv file containing autofluorescence set', ) parser.add_argument( '-v', '--sample_volume', type=int, default=DEFAULT_SAMPLE_VOLUME, help=('Final sample volume in each well in nL' f' (default: {DEFAULT_SAMPLE_VOLUME})') ) parser.add_argument( '-of', '--output-folder', type=str, default=DEFAULT_OUTPUT_FOLDER, help=('Output folder to write output files' f' (default: {DEFAULT_OUTPUT_FOLDER})') ) return parser
2.890625
3
ex1_01.py
sitdh/59.com-prog
1
4661
import math x = float(input()) prop_2 = -(x**2) / math.factorial(2) prop_3 = (x**4) / math.factorial(4) prop_4 = -(x**6) / math.factorial(6) cos_x = float(1 + prop_2 + prop_3 + prop_4) print(prop_2) print(prop_3) print(prop_4) print(cos_x)
3.609375
4
test/datagateway_api/icat/filters/test_where_filter.py
MRichards99/datagateway-api
2
4662
<filename>test/datagateway_api/icat/filters/test_where_filter.py import pytest from datagateway_api.src.common.exceptions import BadRequestError, FilterError from datagateway_api.src.datagateway_api.filter_order_handler import FilterOrderHandler from datagateway_api.src.datagateway_api.icat.filters import PythonICATWhereFilter class TestICATWhereFilter: @pytest.mark.parametrize( "operation, value, expected_condition_value", [ pytest.param("eq", 5, ["%s = '5'"], id="equal"), pytest.param("ne", 5, ["%s != 5"], id="not equal"), pytest.param("like", 5, ["%s like '%%5%%'"], id="like"), pytest.param("ilike", 5, ["UPPER(%s) like UPPER('%%5%%')"], id="ilike"), pytest.param("nlike", 5, ["%s not like '%%5%%'"], id="not like"), pytest.param( "nilike", 5, ["UPPER(%s) not like UPPER('%%5%%')"], id="not ilike", ), pytest.param("lt", 5, ["%s < '5'"], id="less than"), pytest.param("lte", 5, ["%s <= '5'"], id="less than or equal"), pytest.param("gt", 5, ["%s > '5'"], id="greater than"), pytest.param("gte", 5, ["%s >= '5'"], id="greater than or equal"), pytest.param("in", [1, 2, 3, 4], ["%s in (1, 2, 3, 4)"], id="in a list"), pytest.param("in", [], ["%s in (NULL)"], id="empty list"), ], ) def test_valid_operations( self, icat_query, operation, value, expected_condition_value, ): test_filter = PythonICATWhereFilter("id", value, operation) test_filter.apply_filter(icat_query) assert icat_query.conditions == {"id": expected_condition_value} def test_invalid_in_operation(self, icat_query): with pytest.raises(BadRequestError): PythonICATWhereFilter("id", "1, 2, 3, 4, 5", "in") def test_invalid_operation(self, icat_query): test_filter = PythonICATWhereFilter("id", 10, "non") with pytest.raises(FilterError): test_filter.apply_filter(icat_query) def test_valid_internal_icat_value(self, icat_query): """Check that values that point to other values in the schema are applied""" test_filter = PythonICATWhereFilter("startDate", "o.endDate", "lt") test_filter.apply_filter(icat_query) assert icat_query.conditions == {"startDate": ["%s < o.endDate"]} def test_valid_field(self, icat_query): test_filter = PythonICATWhereFilter("title", "Investigation Title", "eq") test_filter.apply_filter(icat_query) assert icat_query.conditions == {"title": ["%s = 'Investigation Title'"]} def test_invalid_field(self, icat_query): test_filter = PythonICATWhereFilter("random_field", "my_value", "eq") with pytest.raises(FilterError): test_filter.apply_filter(icat_query) def test_multiple_conditions_per_field(self, icat_query): lt_filter = PythonICATWhereFilter("id", 10, "lt") gt_filter = PythonICATWhereFilter("id", 5, "gt") filter_handler = FilterOrderHandler() filter_handler.add_filters([lt_filter, gt_filter]) filter_handler.apply_filters(icat_query) assert icat_query.conditions == {"id": ["%s < '10'", "%s > '5'"]}
2.265625
2
scrapy/spider/spider/items.py
huobingli/splider
0
4663
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # https://doc.scrapy.org/en/latest/topics/items.html import scrapy from scrapy.loader import ItemLoader from scrapy.loader.processors import TakeFirst # class SpiderItem(scrapy.Item): # # define the fields for your item here like: # # name = scrapy.Field() # pass # # # # class TorrentItem(scrapy.Item): # url = scrapy.Field() # name = scrapy.Field() # description = scrapy.Field() # size = scrapy.Field() # # import scrapy class StockstarItemLoader(ItemLoader): # 自定义itemloader,用于存储爬虫所抓取的字段内容 default_output_processor = TakeFirst() class StockstarItem(scrapy.Item): # 建立相应的字段 # define the fields for your item here like: # name = scrapy.Field() code = scrapy.Field() # 股票代码 abbr = scrapy.Field() # 股票简称 last_trade = scrapy.Field() # 最新价 chg_ratio = scrapy.Field() # 涨跌幅 chg_amt = scrapy.Field() # 涨跌额 chg_ratio_5min = scrapy.Field() # 5分钟涨幅 volumn = scrapy.Field() # 成交量 turn_over = scrapy.Field() # 成交额
2.8125
3
codetools/contexts/multi_context.py
enthought/codetools
29
4664
# # (C) Copyright 2013 Enthought, Inc., Austin, TX # All right reserved. # # This file is open source software distributed according to the terms in # LICENSE.txt # """ Context holding multiple subcontexts. """ from __future__ import absolute_import from itertools import chain from collections import MutableMapping as DictMixin from traits.api import (Bool, List, Str, Undefined, Supports, adapt, provides, on_trait_change) from .data_context import DataContext, ListenableMixin, PersistableMixin from .i_context import ICheckpointable, IDataContext, IRestrictedContext from .utils import safe_repr @provides(IDataContext) class MultiContext(ListenableMixin, PersistableMixin, DictMixin): """ Wrap several subcontexts. """ #: The name of the context. name = Str("multidummy") #: The underlying dictionary. subcontexts = List(Supports(IRestrictedContext, factory=DataContext)) #: Suppress subcontext modified events veto_subcontext_modified = Bool(True) def __init__(self, *subcontexts, **traits): subcontexts = list(subcontexts) super(MultiContext, self).__init__(subcontexts=subcontexts, **traits) #### IContext interface #################################################### def __iter__(self): return iter(self.keys()) def __len__(self): return len(list(self.keys())) def __contains__(self, key): for c in self.subcontexts: if key in c: return True return False def __delitem__(self, key): """ Remove the given key with [] access. Only deletes the first instance of the key. Parameters ---------- key : str Raises ------ KeyError if the kew is not available in the context. """ for c in self.subcontexts: try: del c[key] return except KeyError: continue raise KeyError(key) def __getitem__(self, key): for c in self.subcontexts: try: return c[key] except KeyError: continue raise KeyError(key) def __setitem__(self, key, value): """ Set item with [] access. The first subcontext which allows the key/value pair will get it. If an earlier subcontext has the key, but does not allow the assignment, then that key will be deleted. Later contexts with the key will be untouched. If the key/value pair cannot be assigned to anything, no deletion will take place. Parameters ---------- key : str value : object Raises ------ ValueError if the key is not permitted to be assigned that value. """ # Let subtypes dictate compatibility independently of contained contexts if not self.allows(value, key): raise ValueError('Disallowed mapping: %s = %s' % (key, safe_repr(value))) set = False blocking_contexts = [] for c in self.subcontexts: if not set: if c.allows(value, key): if key in c: added = [] current_value = c[key] try: is_modified = bool(current_value != value) except Exception: is_modified = current_value is not value if is_modified: modified = [key] c[key] = value else: modified = [] else: added = [key] modified = [] c[key] = value set = True break elif key in c: # Record this context as blocking access to the final # location of the value. blocking_contexts.append(c) # Remove all blocking instances. for c in blocking_contexts: del c[key] if not set: raise ValueError('Disallowed mapping: %s = %s' % (key, safe_repr(value))) def keys(self): return list(set(chain(*[list(c.keys()) for c in self.subcontexts]))) # Expose DictMixin's get method over HasTraits'. get = DictMixin.get def __str__(self): # Maybe a good default string subcontext_str = '[%s]' % ', '.join([str(x) for x in self.subcontexts]) return '%s(name=%r, subcontexts=%s)' % (type(self).__name__, self.name, subcontext_str) def __repr__(self): # Maybe a good default representation return '%s(name=%r)' % (type(self).__name__, self.name) #### IRestrictedContext interface ########################################## def allows(self, value, name=None): for c in self.subcontexts: if c.allows(value, name=name): return True return False #### Trait Event Handlers ################################################## @on_trait_change('subcontexts:items_modified') def subcontexts_items_modified(self, event): """ Pass events up. """ if event is Undefined: # Nothing to do. return event.veto = self.veto_subcontext_modified self._fire_event(added=event.added, removed=event.removed, modified=event.modified, context=event.context) def _subcontexts_items_changed(self, event): """ Trait listener for items of subcontexts list. """ added = [] removed = [] # Add to the list of items added if len(event.added): for context in event.added: added.extend(list(context.keys())) # Add to the list of items removed if len(event.removed): for context in event.removed: removed.extend(list(context.keys())) self._fire_event(added=added, removed=removed) #### ICheckpointable interface ############################################ def checkpoint(self): """ Make a shallow copy of the context. Technically, this is actually a fairly deep copy. All of the object structure should be replicated, but the actual dictionary storage will be shallowly copied:: copy = context.shallow_copy() copy[key] is context[key] for key in context.keys() These semantics are useful for saving out checkpointed versions of the context for implementing an undo/redo stack. They may not be useful for other purposes. Returns ------- copy : IContext """ copy = self.clone_traits() new_subcontexts = [] for context in self.subcontexts: checkpointable_subcontext = adapt(context, ICheckpointable) new_subcontexts.append(checkpointable_subcontext.checkpoint()) copy.subcontexts = new_subcontexts return copy
2.109375
2
line_counter/TestCodes/python_test.py
FMoller/coding-auxiliary-tools
0
4665
"""A simple file to test the line_counter performance in python This is a multiline doctest """ __author__ = "<NAME>" __copyright__ = "" __credits__ = ["<NAME>"] __license__ = "MIT" __version__ = "1.0.1" __maintainer__ = "<NAME>" __email__ = "" __status__ = "" #import things import math #define things def some_function(var_one, var_two, var_three): """This is a function that do things""" if var_one > var_two: if var_two*var_three > var_one: return "blab" #this happens else: return "blob" else: return "fish"
2.984375
3
causal_attribution/data.py
VaniW/deconfounded-lexicon-induction
25
4666
"""Data pipelines.""" from collections import defaultdict, OrderedDict from tqdm import tqdm from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler import torch def get_info(examples, vocab=None, max_seq_len=256): """Gathers info on and creats a featurized example generator for a list of raw examples. Args: examples: list(list, float, or string). Examples to create generator for. vocab: list(str). A vocabulary for discrete datatypes (e.g. text or categorical). max_seq_len: int. maximum sequence length for text examples. Returns: A dict of info about this variable as well as a generator over featurized examples. """ assert isinstance(examples, list), 'examples must be list; got ' + str(type(examples)) assert len(examples) > 0, 'Empty example list!' # Text if isinstance(examples[0], list): assert vocab is not None, 'ERROR: must provide a vocab.' example_type = 'input' vocab = ['UNK', 'PAD'] + vocab tok2id = {tok: i for i, tok in enumerate(vocab)} ngrams = max(len(x.split()) for x in vocab) unk_id = 0 def featurizer(example): ids = [] for n in range(1, ngrams + 1): toks = [' '.join(example[i: i + n]) for i in range(len(example) - n + 1)] ids += [tok2id.get(x, 0) for x in toks] ids = ids[:max_seq_len] padded_ids = ids + ([1] * (max_seq_len - len(ids))) # pad idx = 1 return padded_ids # Continuous elif isinstance(examples[0], float) or isinstance(examples[0], int): example_type = 'continuous' vocab = ['N/A'] if isinstance(examples[0], int): featurizer = lambda ex: float(ex) else: featurizer = lambda ex: ex # Categorical elif isinstance(examples[0], str): example_type = 'categorical' if not vocab: vocab = ['UNK'] + sorted(list(set(examples))) tok2id = {tok: i for i, tok in enumerate(vocab)} featurizer = lambda ex: tok2id.get(ex, 0) # 0 is the unk id. else: print("ERROR: unrecognized example type: ", examples[0]) quit() return featurizer, example_type, vocab def get_iterator(vocab, df, name_to_type, batch_size=32, max_seq_len=256): """Builds a data iterator for text, confounds, and outcomes. Args: vocab: list(str). The vocabulary to use. df: pandas.df. The data we want to iterate over. The columns of these data should be a superset of the keys in name_to_type. name_to_type: dict. A mapping from variable names to whether they are "input", "predict", or "control" variables. batch_size: int. The batch size to use. max_seq_len: int. Maximum length of text sequences. Returns: A generator which yields dictionaries where variable names are mapped to tensors of batched data. """ def featurize(featurizer): return [featurizer(ex) for ex in examples] var_info = defaultdict(lambda: OrderedDict()) featurized_data = defaultdict(list) for var_name, var_type in name_to_type.items(): examples = list(df[var_name]) if var_type == 'input': examples = [x.split() for x in examples] featurizer, _, vocab = get_info(examples, vocab, max_seq_len) var_info[var_name] = { 'control': False, 'name': var_name, 'type': var_type, 'vocab': vocab } else: featurizer, varType, vocab = get_info(examples) var_info[var_name] = { 'control': var_type == 'control', 'name': var_name, 'type': varType, 'vocab': vocab } featurized_data[var_name] = [featurizer(ex) for ex in examples] def to_tensor(var_name): dtype = torch.float if var_info[var_name]['type'] in {'categorical', 'input'}: dtype = torch.long return torch.tensor(featurized_data[var_name], dtype=dtype) feature_names = sorted(featurized_data.keys()) data = TensorDataset(*[to_tensor(name) for name in feature_names]) dataloader = DataLoader( dataset=data, sampler=RandomSampler(data), collate_fn=lambda batch: [torch.stack(x) for x in zip(*batch)], # group by datatype. batch_size=batch_size) def iterator(): for batch in dataloader: yield dict(zip(feature_names, batch)) return iterator, var_info
2.78125
3
fopp/Chapter 12. Functions/get_num_digits.py
H2u-Hwng/EVC
0
4667
<reponame>H2u-Hwng/EVC # Take number, and convert integer to string # Calculate and return number of digits def get_num_digits(num): # Convert int to str num_str = str(num) # Calculate number of digits digits = len(num_str) return digits # Define main function def main(): # Prompt user for an integer number = int(input('Enter an integer: ')) # Obtain number of digits num_digits = get_num_digits(number) # Display result print(f'The number of digits in number {number} is {num_digits}.') # Call main function main()
4.15625
4
app/dialog/avatar_picture_dialog.py
tirinox/alphavatarbot
1
4668
<filename>app/dialog/avatar_picture_dialog.py<gh_stars>1-10 import asyncio from contextlib import AsyncExitStack from aiogram.dispatcher.filters.state import StatesGroup, State from aiogram.dispatcher.storage import FSMContextProxy from aiogram.types import Message, PhotoSize, ReplyKeyboardRemove, ContentTypes from aiogram.utils.helper import HelperMode from dialog.avatar_image_work import download_tg_photo, get_userpic, combine_frame_and_photo_v2, img_to_bio from dialog.base import BaseDialog, message_handler from localization import BaseLocalization from lib.depcont import DepContainer from lib.texts import kbd # todo: accept documents! class AvatarStates(StatesGroup): mode = HelperMode.snake_case # fixme: no state handle MAIN = State() class AvatarDialog(BaseDialog): def __init__(self, loc: BaseLocalization, data: FSMContextProxy, d: DepContainer): super().__init__(loc, data, d) self._work_lock = asyncio.Lock() def menu_kbd(self): return kbd([ self.loc.BUTTON_AVA_FROM_MY_USERPIC, ], vert=True) @message_handler(state=None) async def on_no_state(self, message: Message): await self.on_enter(message) @message_handler(state=AvatarStates.MAIN) async def on_enter(self, message: Message): if message.text == self.loc.BUTTON_AVA_FROM_MY_USERPIC: await self.handle_avatar_picture(message, self.loc) else: await AvatarStates.MAIN.set() await message.answer(self.loc.TEXT_AVA_WELCOME, reply_markup=self.menu_kbd()) @message_handler(state=AvatarStates.MAIN, content_types=ContentTypes.PHOTO) async def on_picture(self, message: Message): await self.handle_avatar_picture(message, self.loc, explicit_picture=message.photo[0]) async def handle_avatar_picture(self, message: Message, loc: BaseLocalization, explicit_picture: PhotoSize = None): async with AsyncExitStack() as stack: stack.enter_async_context(self._work_lock) # POST A LOADING STICKER sticker = await message.answer_sticker(self.loc.LOADING_STICKER, disable_notification=True, reply_markup=ReplyKeyboardRemove()) # CLEAN UP IN THE END stack.push_async_callback(sticker.delete) if explicit_picture is not None: user_pic = await download_tg_photo(explicit_picture) else: user_pic = await get_userpic(message.from_user) w, h = user_pic.size if not w or not h: await message.answer(loc.TEXT_AVA_ERR_INVALID, reply_markup=self.menu_kbd()) return if not ((64 <= w <= 4096) and (64 <= h <= 4096)): await message.answer(loc.TEXT_AVA_ERR_SIZE, reply_markup=self.menu_kbd()) return # pic = await combine_frame_and_photo(self.deps.cfg, user_pic) pic = await combine_frame_and_photo_v2(self.deps.cfg, user_pic) user_id = message.from_user.id pic = img_to_bio(pic, name=f'alpha_avatar_{user_id}.png') await message.answer_document(pic, caption=loc.TEXT_AVA_READY, reply_markup=self.menu_kbd())
2.140625
2
image_store_processing.py
olubiyiontheweb/digid_websearch_flask
1
4669
<reponame>olubiyiontheweb/digid_websearch_flask from skimage.metrics import structural_similarity as ssim from glob import glob from PIL import Image import numpy as np import ntpath import dhash import cv2 from database_structure import database_migrations IMAGE_FOLDER = "./image_store" ALLOWED_EXTENSIONS = ['png', 'jpg', 'jpeg'] image_store_hash = dict() db_ops = database_migrations() class preprocess: def __init__(self): # image table values to insert in database self.images_list = dict() self.image_store = list() def load_images_into_to_db(self): for img_type in ALLOWED_EXTENSIONS: images = glob(IMAGE_FOLDER + "/*" + img_type) for img in images: imgname = ntpath.basename(img) values = imgname, IMAGE_FOLDER, "local" print(values) db_ops.image_store_migrations() # TODO Abstract requests and insert from database db_ops.insert_operations("image_store", values) def request_list_of_images_in_db(self): # images = glob(IMAGE_FOLDER + "/*" + img_type) images = db_ops.request_matches("image_store") print("list from database" + str(images)) self.image_store.clear() self.images_list.clear() for img in images: # get image name print("current list" + str(self.image_store)) self.images_list["image_id"] = img[0] self.images_list["image_name"] = img[1] print("Check the values" + str(self.images_list)) self.image_store.append(self.images_list.copy()) print("Check the images" + str(self.image_store)) print("We have" + str(len(self.image_store)) + "images in the store") print(self.image_store) return self.image_store def generate_hash(self): images_in_db = self.request_list_of_images_in_db() print(images_in_db) for img in images_in_db: image = Image.open(IMAGE_FOLDER + "\\" + img["image_name"]) row, col = dhash.dhash_row_col(image) img_hash = dhash.format_hex(row, col) values = img_hash, img["image_id"] db_ops.image_store_migrations() print(values) db_ops.insert_operations("image_store_hash", values) class compare_files: def __init__(self): # image table values to insert in database self.images_list = dict() self.image_store = list() def request_image_hashes(self): images = db_ops.request_matches("image_store_hash") print("list from database" + str(images)) self.image_store.clear() self.images_list.clear() for img in images: # get image name print("current list" + str(img)) self.images_list["image_hash"] = img[1] # request image name from image store database img_name = db_ops.conditional_request_matches( "image_store", img[2], "image_name", "image_id") self.images_list["image_name"] = img_name[0][0] print("Check the values" + str(self.images_list)) self.image_store.append(self.images_list.copy()) print("Check the images" + str(self.image_store)) print("We have" + str(len(self.image_store)) + "images in the store") print(self.image_store) return self.image_store def calculate_hamming_dist(self, uploaded_hash, db_store_hash): i = 0 count = 0 while (i < len(uploaded_hash)): if (uploaded_hash[i] != db_store_hash[i]): count += 1 i += 1 return count def mean_squared_error(self, uploaded_image, db_store_image): # the 'Mean Squared Error' between the two images is the # sum of the squared difference between the two images; # NOTE: the two images must have the same dimension err = np.sum((uploaded_image.astype("float") - db_store_image.astype("float"))**2) err /= float(uploaded_image.shape[0] * uploaded_image.shape[1]) # return the MSE, the lower the error, the more "similar" # the two images are return err def structural_similarity_index(self, uploaded_image, db_store_image): ssim_index = ssim(uploaded_image, db_store_image) return ssim_index def convert_and_resize_compare(self, uploaded_image, db_store_image): # TODO: make structural similarity and mean squared error functionals uploaded_image = cv2.imread(uploaded_image) db_store_image = cv2.imread(db_store_image) uploaded_image = cv2.resize() db_store_image = cv2.resize() uploaded_image = cv2.cvtColor(uploaded_image, cv2.COLOR_BGR2GRAY) db_store_image = cv2.cvtColor(db_store_image, cv2.COLOR_BGR2GRAY) mean_sq_error = self.mean_squared_error(uploaded_image, db_store_image) ssim_index = self.structural_similarity_index(uploaded_image, db_store_image) return ssim_index, mean_sq_error
2.53125
3
cogs/jpserv.py
elthorito/Rai
0
4670
import discord from discord.ext import commands import os import json from datetime import date, datetime, timedelta from .utils import helper_functions as hf from copy import deepcopy dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))).replace('\\', '/') class Jpserv(commands.Cog): """Modules unique for the Japanese server""" def __init__(self, bot): self.bot = bot async def cog_check(self, ctx): if not ctx.guild: return return ctx.guild.id == 189571157446492161 or ctx.guild.id == 275146036178059265 # these commands are only useable on Japanese server or my testing server @commands.command() @hf.is_admin() async def swap(self, ctx): """Swaps JHO/JHO2's names and positions in the lists, for if we temporarily want welcome messages to go to JHO2""" jpJHO = self.bot.get_channel(189571157446492161) jpJHO2 = self.bot.get_channel(326263874770829313) if jpJHO.position == 4: await jpJHO.edit(position=5, name='just_hanging_out_2') await jpJHO2.edit(position=4, name='just_hanging_out') else: await jpJHO.edit(position=4, name='just_hanging_out') await jpJHO2.edit(position=5, name='just_hanging_out_2') @commands.group(invoke_without_command=True, aliases=['uhc']) async def ultrahardcore(self, ctx, *, member=None): """Irreversible hardcore mode. Must talk to an admin to have this undone.""" # if ctx.guild.id != 189571157446492161: # return role = ctx.guild.get_role(486851965121331200) config = self.bot.db['ultraHardcore']['users'] if member: # if you specified someone else's ID, then remove UHC from them member = await hf.member_converter(ctx, member) if not member: return if hf.admin_check(ctx) and ctx.author.id != member.id: if str(member.id) in config: if config[str(member.id)][0]: config[str(member.id)][0] = False else: await ctx.send("That user is not in UHC") return else: await ctx.send("That user is not in UHC mode.") return await hf.dump_json() try: await member.remove_roles(role) except discord.errors.Forbidden: await ctx.send("I couldn't remove the ultra hardcore role") await ctx.send(f'Undid ultra hardcore mode for {member.name}') else: await ctx.send("You can not remove UHC. Ask a mod/admin to help you.") else: if str(ctx.author.id) in config: if config[str(ctx.author.id)][0]: await ctx.invoke(self.explanation) return await ctx.send(f"This is ultra hardcore mode. It means you must speak in the language you are learning" f" (for example, if you are learning Japanese, any messages in English will be deleted)." f" This can not be undone unless you ask a mod to remove it for you. \n\n" f"To enable ultra hardcore mode, type `;uhc on` or `;uhc enable`. ") @ultrahardcore.command(aliases=['enable']) async def on(self, ctx): """Enables UHC""" if ctx.guild.id != 189571157446492161: return role = ctx.guild.get_role(486851965121331200) config = self.bot.db['ultraHardcore']['users'] if str(ctx.author.id) in config: # if not enabled user = config[str(ctx.author.id)] if user[0]: await ctx.send("You're already in ultra hardcore mode.") return else: user[0] = True else: config[str(ctx.author.id)] = [True, date.today().strftime("%Y/%m/%d"), 0] await hf.dump_json() try: await ctx.author.add_roles(role) except discord.errors.Forbidden: await ctx.send("I couldn't add the ultra hardcore role") await ctx.send(f"{ctx.author.name} has chosen to enable ultra hardcore mode. It works the same as " "normal hardcore mode except that you can't undo it and asterisks don't change " "anything. Talk to a mod to undo this.") @ultrahardcore.command() async def list(self, ctx): """Lists the people currently in ultra hardcore mode""" if ctx.guild.id != 189571157446492161: return string = 'The members in ultra hardcore mode right now are ' guild = self.bot.get_guild(189571157446492161) members = [] config = self.bot.db['ultraHardcore']['users'] for member_id in config.copy(): if config[member_id][0]: member = guild.get_member(int(member_id)) if member is not None: # in case a member leaves members.append(member.name) else: del config[member_id] await ctx.send(f'Removed <@{member_id}> from the list, as they seem to have left the server') await ctx.send(string + ', '.join(members)) @ultrahardcore.command() async def explanation(self, ctx): """Explains ultra hardcore mode for those who are using it and can't explain it""" if ctx.guild.id != 189571157446492161: return if str(ctx.author.id) in self.bot.db['ultraHardcore']['users']: if self.bot.db['ultraHardcore']['users'][str(ctx.author.id)][0]: await ctx.send(f"{ctx.author.mention} is currently using ultra hardcore mode. In this mode, they can't" f" speak their native language, and they also cannot undo this mode themselves.") return await ctx.send(f"{ctx.author.mention} is currently NOT using hardcore mode, so I don't know why " f"they're trying to use this command. But, ultra hardcore mode means a user can't speak " f"any English, and can't undo this mode themselves no matter what.") @ultrahardcore.command(aliases=['lb']) async def leaderboard(self, ctx): """Shows a leaderboard of who has had UHC on for the longest""" if ctx.guild.id != 189571157446492161: return time_dict = deepcopy(self.bot.db['ultraHardcore']['users']) for i in time_dict: if time_dict[i][0]: time_dict[i][2] += (datetime.today() - datetime.strptime(time_dict[i][1], "%Y/%m/%d")).days # {('243703909166612480', [True, '2019/02/14', 124]), # ('219617844973797376', [False, '2018/11/30', 122]), ...} to_sort = [[i[0], i[1][0], i[1][2]] for i in list(time_dict.items())] # to_sort: [['243703909166612480', True, 162], ['219617844973797376', False, 122], ...] sorted_dict = sorted(to_sort, key=lambda x: x[2], reverse=True) leaderboard = f"The number of days each user has had UHC enabled " \ f"(Bold = This user currently has UHC enabled)\n\n" for i in sorted_dict: user = ctx.guild.get_member(int(i[0])) if (i[2] < 10 and not i[1]) or (not user): continue if user.nick: name_str = f"{user.mention} ({user.name})" else: name_str = f"{user.name}" if i[1]: leaderboard += f"**{i[2]}: {name_str}**\n" else: leaderboard += f"{i[2]}: {name_str}\n" emb = discord.Embed(title="UHC Leaderboard", description=leaderboard, color=discord.Color(int('ff5500', 16))) await ctx.send(embed=emb) @ultrahardcore.command() @hf.is_admin() async def ignore(self, ctx): """Ignores a channel for UHC""" if ctx.guild.id != 189571157446492161: return config = self.bot.db['ultraHardcore'] try: if ctx.channel.id not in config['ignore']: config['ignore'].append(ctx.channel.id) await ctx.send(f"Added {ctx.channel.name} to list of ignored channels for UHC") else: config['ignore'].remove(ctx.channel.id) await ctx.send(f"Removed {ctx.channel.name} from list of ignored channels for UHC") except KeyError: config['ignore'] = [ctx.channel.id] await ctx.send(f"Added {ctx.channel.name} to list of ignored channels for UHC") await hf.dump_json() def setup(bot): bot.add_cog(Jpserv(bot))
2.375
2
nehebn2.py
psifertex/nehebn2
0
4671
<gh_stars>0 #!/usr/bin/env python3 from components import ProgramState import binaryninja as binja import argparse import os.path import curses # TODO...impliment live-refreashing the settings.json during run (add the keybinding and check for it here in the global input loop) # TODO...support multi-key presses? Not sure if this already works or not # TODO...make sure to support small terminals (I think it does right now, but I should add some more checks so nothing goes out of bounds) def main(stdscr): # Setup parser = argparse.ArgumentParser(description='Nearly Headless BinaryNinja.') parser.add_argument('filename', nargs='?', default="") args = parser.parse_args() program = '' if not args.filename == "": if os.path.isfile(args.filename): bv = binja.BinaryViewType.get_view_of_file(''.join(args.filename), False) bv.update_analysis() while not str(bv.analysis_progress) == "Idle": prog = bv.analysis_progress stdscr.erase() stdscr.border() state = '' if prog.state == binja.AnalysisState.DisassembleState: state = "Disassembling" else: state = "Analyzing" loadingText = "Loading File: " prog = int((prog.count/(prog.total+1))*34.0) stdscr.addstr(2, 4, loadingText) stdscr.addstr(2, 4 + len(loadingText), state) stdscr.addstr(4, 4, '[' + '#'*prog + ' '*(34-prog) + ']') stdscr.refresh() program = ProgramState(stdscr, bv) else: raise IOError("File does not exist.") else: program = ProgramState(stdscr) key = "" while program.is_running: # Input Filtering try: key = stdscr.getkey() except curses.error as err: if not str(err) == "no input": raise curses.error(str(err)) else: key = "" # Clear Key Buffer # Rendering and input program.parseInput(key) program.render() curses.doupdate() if __name__ == "__main__": background = "2a2a2a" text = "e0e0e0" curses.wrapper(main)
2.5625
3
keras/layers/pooling/base_pooling3d.py
itsraina/keras
0
4672
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Private base class for pooling 3D layers.""" import tensorflow.compat.v2 as tf from keras import backend from keras.engine.base_layer import Layer from keras.engine.input_spec import InputSpec from keras.utils import conv_utils class Pooling3D(Layer): """Pooling layer for arbitrary pooling functions, for 3D inputs. This class only exists for code reuse. It will never be an exposed API. Args: pool_function: The pooling function to apply, e.g. `tf.nn.max_pool2d`. pool_size: An integer or tuple/list of 3 integers: (pool_depth, pool_height, pool_width) specifying the size of the pooling window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 3 integers, specifying the strides of the pooling operation. Can be a single integer to specify the same value for all spatial dimensions. padding: A string. The padding method, either 'valid' or 'same'. Case-insensitive. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch, depth, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch, channels, depth, height, width)`. name: A string, the name of the layer. """ def __init__( self, pool_function, pool_size, strides, padding="valid", data_format="channels_last", name=None, **kwargs ): super().__init__(name=name, **kwargs) if data_format is None: data_format = backend.image_data_format() if strides is None: strides = pool_size self.pool_function = pool_function self.pool_size = conv_utils.normalize_tuple(pool_size, 3, "pool_size") self.strides = conv_utils.normalize_tuple( strides, 3, "strides", allow_zero=True ) self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.input_spec = InputSpec(ndim=5) def call(self, inputs): pool_shape = (1,) + self.pool_size + (1,) strides = (1,) + self.strides + (1,) if self.data_format == "channels_first": # TF does not support `channels_first` with 3D pooling operations, # so we must handle this case manually. # TODO(fchollet): remove this when TF pooling is feature-complete. inputs = tf.transpose(inputs, (0, 2, 3, 4, 1)) outputs = self.pool_function( inputs, ksize=pool_shape, strides=strides, padding=self.padding.upper(), ) if self.data_format == "channels_first": outputs = tf.transpose(outputs, (0, 4, 1, 2, 3)) return outputs def compute_output_shape(self, input_shape): input_shape = tf.TensorShape(input_shape).as_list() if self.data_format == "channels_first": len_dim1 = input_shape[2] len_dim2 = input_shape[3] len_dim3 = input_shape[4] else: len_dim1 = input_shape[1] len_dim2 = input_shape[2] len_dim3 = input_shape[3] len_dim1 = conv_utils.conv_output_length( len_dim1, self.pool_size[0], self.padding, self.strides[0] ) len_dim2 = conv_utils.conv_output_length( len_dim2, self.pool_size[1], self.padding, self.strides[1] ) len_dim3 = conv_utils.conv_output_length( len_dim3, self.pool_size[2], self.padding, self.strides[2] ) if self.data_format == "channels_first": return tf.TensorShape( [input_shape[0], input_shape[1], len_dim1, len_dim2, len_dim3] ) else: return tf.TensorShape( [input_shape[0], len_dim1, len_dim2, len_dim3, input_shape[4]] ) def get_config(self): config = { "pool_size": self.pool_size, "padding": self.padding, "strides": self.strides, "data_format": self.data_format, } base_config = super().get_config() return dict(list(base_config.items()) + list(config.items()))
2.46875
2
main.py
lucastan96/video2bitmap
1
4673
<reponame>lucastan96/video2bitmap<gh_stars>1-10 import moviepy.editor as mpy import moviepy.video.fx.all as vfx import subprocess as sp # Crop and resize video clip = mpy.VideoFileClip("smoke.mp4") (w, h) = clip.size cropped_clip = vfx.crop(clip, width=(h/128)*64, height=h, x1=w/4*3-100, y1=0).resize((64, 128)) cropped_clip.write_videofile('smoke-cropped.mp4') # Convert video to frames # Make sure to install ffmpeg on machine cmd='ffmpeg -i /path/to/smoke-cropped.mp4 /path/to/frames_temp/%d.bmp' sp.call(cmd,shell=True) # Convert image to black and white bitmap for i in range(202): col = Image.open("frames_temp/" + str(i + 1) + ".bmp") gray = col.convert('L') bw = gray.point(lambda x: 0 if x<128 else 255, '1') bw.save("frames/" + str(i) + ".bmp")
2.75
3
ghostwriter/rolodex/apps.py
bbhunter/Ghostwriter
601
4674
"""This contains the configuration of the Rolodex application.""" # Django Imports from django.apps import AppConfig class RolodexConfig(AppConfig): name = "ghostwriter.rolodex" def ready(self): try: import ghostwriter.rolodex.signals # noqa F401 isort:skip except ImportError: pass
1.59375
2
boards/migrations/0024_boardpreferences_moderators.py
oscarsiles/jotlet
0
4675
<filename>boards/migrations/0024_boardpreferences_moderators.py # Generated by Django 4.0.3 on 2022-03-01 14:42 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('boards', '0023_alter_image_type'), ] operations = [ migrations.AddField( model_name='boardpreferences', name='moderators', field=models.ManyToManyField(blank=True, related_name='moderated_boards', to=settings.AUTH_USER_MODEL), ), ]
1.539063
2
models/minimize_model.py
MichalBusta/OpenCitiesAIC
7
4676
<filename>models/minimize_model.py ''' Created on Mar 22, 2020 @author: Michal.Busta at gmail.com ''' #get rid of the optimizer state ... import torch MODEL_PATH = '/models/model-b2-2.pth' state = torch.load(MODEL_PATH, map_location=lambda storage, loc: storage) state_out = { "state_dict": state["state_dict"], } torch.save(state_out, 'model-b2-2.pth')
2.34375
2
setup.py
thomas-kloeber/braumeister
6
4677
import os import re from setuptools import setup version = re.search( '^__version__\s*=\s*"(.*)"', open('braumeister/braumeister.py').read(), re.M ).group(1) def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name="braumeister", packages=["braumeister", "braumeister.actions"], version=version, author="<NAME>", author_email="<EMAIL>", description="Easy release bulding, combining JIRA and git", long_description=read('README.md'), license="MIT", keywords="git jira release", url="https://www.talentsconnect.com", include_package_data=True, install_requires=['requests', 'colorama'], entry_points={ 'console_scripts': ["braumeister = braumeister.braumeister:main"] }, python_requires='!=2.7, !=3.4, >=3.5', zip_safe=False, classifiers=[ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "Topic :: Utilities", "Topic :: Software Development :: Version Control :: Git" ], )
1.734375
2
modules/inference.py
rubelchowdhury20/wuton-with-densepose
12
4678
# standard library imports import os # third party imports import numpy as np from PIL import Image import torch.nn as nn from torchvision import transforms # local imports import config from . import utils from . import geometric_transformer class GeoTransformationInfer(nn.Module): def __init__(self, output_dir="./output/results"): super(GeoTransformationInfer, self).__init__() self.output_dir = output_dir utils.ensure_folder(self.output_dir) def forward(self, model_apparel, warped_image, model_image, warped_model_image, random_product_image, random_product_image_warped, output_on_random_product, batch_index, epoch): batch_size = warped_image.shape[0] model_apparel = model_apparel.cpu().numpy() warped_image = warped_image.cpu().numpy() model_image = model_image.cpu().numpy() warped_model_image = warped_model_image.cpu().numpy() random_product_image = random_product_image.cpu().numpy() random_product_image_warped = random_product_image_warped.cpu().numpy() output_on_random_product = output_on_random_product.cpu().numpy() for i in range(batch_size): self._save_image_sheet( batch_index*config.PARAMS["batch_size"] + i, model_apparel[i], warped_image[i], model_image[i], warped_model_image[i], random_product_image[i], random_product_image_warped[i], output_on_random_product[i], epoch) def _save_image_sheet(self, idx, model_apparel, warped_image, model_image, warped_model_image, random_product_image, random_product_image_warped, output_on_random_product, epoch): # inverse normalization of the images along with channel first to channel last steps and finally converting np array to pillow format for saving model_apparel = np.moveaxis(model_apparel, 0, 2) * [0.229, 0.224, 0.225] + [0.485, 0.456, 0.406] model_apparel = Image.fromarray(np.uint8(model_apparel * 255)) warped_image = np.moveaxis(warped_image, 0, 2) * [0.229, 0.224, 0.225] + [0.485, 0.456, 0.406] warped_image = Image.fromarray(np.uint8(warped_image * 255)) model_image = np.moveaxis(model_image, 0, 2) * [0.229, 0.224, 0.225] + [0.485, 0.456, 0.406] model_image = Image.fromarray(np.uint8(model_image * 255)) warped_model_image = np.moveaxis(warped_model_image, 0, 2) * [0.229, 0.224, 0.225] + [0.485, 0.456, 0.406] warped_model_image = Image.fromarray(np.uint8(warped_model_image * 255)) random_product_image = np.moveaxis(random_product_image, 0, 2) * [0.229, 0.224, 0.225] + [0.485, 0.456, 0.406] random_product_image = Image.fromarray(np.uint8(random_product_image * 255)) random_product_image_warped = np.moveaxis(random_product_image_warped, 0, 2) * [0.229, 0.224, 0.225] + (0.485, 0.456, 0.406) random_product_image_warped = Image.fromarray(np.uint8(random_product_image_warped * 255)) output_on_random_product = np.moveaxis(output_on_random_product, 0, 2) * [0.229, 0.224, 0.225] + [0.485, 0.456, 0.406] output_on_random_product = Image.fromarray(np.uint8(output_on_random_product * 255)) sheet = Image.new('RGB', (1568, 224), 'white') sheet.paste(model_apparel, (0, 0)) sheet.paste(warped_image, (224, 0)) sheet.paste(model_image, (448, 0)) sheet.paste(warped_model_image, (672, 0)) sheet.paste(random_product_image, (896, 0)) sheet.paste(random_product_image_warped, (1120, 0)) sheet.paste(output_on_random_product, (1344, 0)) sheet.save(os.path.join(self.output_dir, "image_sheet_{}-epoch{}".format(idx, str(epoch).zfill(3)) + ".jpg"))
2.3125
2
imggen/fonts.py
p-lambda/unlabeled_outputs
4
4679
from pathlib import Path import h5py import numpy as np from torchvision.datasets.vision import VisionDataset from PIL import Image import requests import zipfile from tqdm import tqdm def download_file_from_google_drive(id, destination): URL = "https://docs.google.com/uc?export=download" session = requests.Session() response = session.get(URL, params = { 'id' : id }, stream = True) token = get_confirm_token(response) if token: params = { 'id' : id, 'confirm' : token } response = session.get(URL, params = params, stream = True) save_response_content(response, destination) def get_confirm_token(response): for key, value in response.cookies.items(): if key.startswith('download_warning'): return value return None def save_response_content(response, destination): CHUNK_SIZE = 32768 with open(destination, "wb") as f: for chunk in tqdm(response.iter_content(CHUNK_SIZE)): if chunk: # filter out keep-alive new chunks f.write(chunk) class Fonts(VisionDataset): url_id = '0B0GtwTQ6IF9AU3NOdzFzUWZ0aDQ' base_folder = 'fonts' def __init__(self, root, split='train', transform=None, target_transform=None, download=True, denoise=False, denoise_transform=None, num_fonts_pi=None, num_examples=2500): ''' Args: root (str): path num_train_domains (int): number of train domains up to 41443 test_mean_chars (bool): Use the mean characters as test set split (str): 'train', 'val', 'test' transform: input transformation target_transform: target transformation download (bool): download or not ''' super().__init__(root, transform=transform, target_transform=target_transform) self.split = split self.transform = transform self.target_transform = target_transform self.denoise = denoise self.denoise_transform = denoise_transform self.path = Path(self.root) / self.base_folder self.path.mkdir(parents=True, exist_ok=True) self.download_path = self.path / 'fonts.hdf5' if download: self.download() with h5py.File(str(self.download_path), 'r') as f: data_by_domain = f['fonts'][()] np.random.seed(484347) # limit the number of fonts num_fonts = 100 font_idxs = np.arange(len(data_by_domain)) np.random.shuffle(font_idxs) if not denoise: data_by_domain = data_by_domain[font_idxs[:num_fonts]] print(f"NUM FONTS: {num_fonts}") print(f"NUM CHARS: {data_by_domain.shape[1]}") num_classes = data_by_domain.shape[1] self.all_targets = np.concatenate( [np.arange(num_classes)]*num_fonts, axis=0) self.all_domain_labels = np.repeat(np.arange(num_fonts), num_classes) self.all_data = data_by_domain.reshape(data_by_domain.shape[0]*data_by_domain.shape[1], data_by_domain.shape[2], data_by_domain.shape[3]) idxs = np.arange(len(self.all_data)) np.random.shuffle(idxs) train_val_max = 2600 if num_examples > train_val_max: # to be able to heuristically test what happens if we have more training data train_val_max = 5000 if split == 'train': idxs = idxs[:num_examples] elif split == 'val': idxs = idxs[num_examples: train_val_max] else: idxs = idxs[train_val_max:] self.targets = self.all_targets[idxs] self.domain_labels = self.all_domain_labels[idxs] self.data = self.all_data[idxs] else: # get the train data train_dbd = data_by_domain[font_idxs[:num_fonts]] all_data = train_dbd.reshape(train_dbd.shape[0]*train_dbd.shape[1], train_dbd.shape[2], train_dbd.shape[3]) idxs = np.arange(len(all_data)) np.random.shuffle(idxs) idxs = idxs[:num_examples] train_data = all_data[idxs] if num_fonts_pi is not None: data_by_domain = data_by_domain[font_idxs[num_fonts:num_fonts+num_fonts_pi]] else: data_by_domain = data_by_domain[font_idxs[num_fonts:]] self.data = data_by_domain.reshape(data_by_domain.shape[0]*data_by_domain.shape[1], data_by_domain.shape[2], data_by_domain.shape[3]) self.data = np.concatenate([train_data, self.data], axis=0) def get_nearest_neighbor(self, all_imgs, x): idx = np.argmin(np.sum(np.square(all_imgs - x), axis=(1,2))) return self[idx] def download(self): if not self.download_path.exists(): download_file_from_google_drive(self.url_id, str(self.download_path)) def __getitem__(self, index): """ Args: index (int): Index Returns: tuple: (image, target) where target is index of the target class. """ if self.denoise: img = self.data[index] img = Image.fromarray(img) if self.transform is not None: tgt_img = self.transform(img) if self.denoise_transform is not None: src_img = self.denoise_transform(img) return src_img, tgt_img else: img, target = self.data[index], self.targets[index] domain_label = self.domain_labels[index] # doing this so that it is consistent with all other datasets # to return a PIL Image img = Image.fromarray(img) if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) return img, target, domain_label def get_item_from_all(self, index): img, target = self.all_data[index], self.all_targets[index] domain_label = self.all_domain_labels[index] # doing this so that it is consistent with all other datasets # to return a PIL Image img = Image.fromarray(img) if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) return img, target, domain_label def __len__(self): return len(self.data)
2.453125
2
leetcode/47.py
windniw/just-for-fun
1
4680
""" link: https://leetcode.com/problems/permutations-ii problem: 求全排列,nums中存在重复数 solution: 同46,加上排序即可 """ class Solution: def permuteUnique(self, nums: List[int]) -> List[List[int]]: if len(nums) == 1: return [nums] new_nums = nums.copy() new_nums.sort() res = [] for i in range(0, len(new_nums)): if i + 1 < len(new_nums) and new_nums[i] == new_nums[i + 1]: continue new_nums[i], new_nums[0] = new_nums[0], new_nums[i] sub_result = self.permuteUnique(new_nums[1:]) for r in sub_result: res.append([new_nums[0]] + r.copy()) new_nums[i], new_nums[0] = new_nums[0], new_nums[i] return res
3.1875
3
adminmgr/media/code/A3/task3/T1_ocefXVJ.py
IamMayankThakur/test-bigdata
9
4681
import findspark findspark.init() from pyspark import SparkConf,SparkContext from pyspark.streaming import StreamingContext from pyspark.sql import Row,SQLContext import sys import requests def tmp(x): y = (x.split(';')[7]).split(',') return (y) def forf(x): for i in x: yield (i,1) def topprint(time,rdd): res1=rdd.take(5) count=0 for i in res1: if(count==4): print("%s" % i) else: print("%s" % i,end=',') count = count +1 conf=SparkConf() conf.setAppName("BigData") sc=SparkContext(conf=conf) ssc=StreamingContext(sc,int(sys.argv[1])) ssc.checkpoint("/checkpoint_BIGDATA") ''' #Selecting a window : #outpu3: inputStream=ssc.socketTextStream("localhost",9009) dataStream = inputStream.window(int(sys.argv[1]),int(sys.argv[2])) tweet=dataStream.map(tmp) septweet=tweet.flatMap(forf) count=septweet.reduceByKey(lambda x,y:x+y) sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[1],ascending=False)) tweet1=sortcount.filter(lambda w:w[0] is not '') tweet1.pprint() res = tweet1.map(lambda a : a[0]) res.foreachRDD(topprint) #res.pprint(3) ''' ''' #Selecting a datastream and then reducing by window: #outpu2 dataStream=ssc.socketTextStream("localhost",9009) tweet=dataStream.map(tmp) septweet=tweet.flatMap(forf) #septweet.pprint() count=septweet.reduceByKeyAndWindow(lambda x,y:x+y,int(sys.argv[1]),int(sys.argv[2])) sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[0],ascending=True)) sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[1],ascending=False)) tweet1=sortcount.filter(lambda w:w[0] is not '') #tweet1.pprint() res = tweet1.map(lambda a : a[0]) res.foreachRDD(topprint) ''' #Try in outpu1 inputStream=ssc.socketTextStream("localhost",9009) dataStream = inputStream.window(int(sys.argv[2]),int(sys.argv[1])) tweet=dataStream.map(tmp) septweet=tweet.flatMap(forf) count=septweet.reduceByKey(lambda x,y:x+y) sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[0],ascending=True)) sortcount = sortcount.transform(lambda rdd :rdd.sortBy(lambda a:a[1],ascending=False)) tweet1=sortcount.filter(lambda w:w[0] is not '') #tweet1.pprint() res = tweet1.map(lambda a : a[0]) res.foreachRDD(topprint) #TO maintain state # totalcount=tweet.updateStateByKey(aggregate_tweets_count) # totalcount.pprint() #To Perform operation on each RDD # totalcount.foreachRDD(process_rdd) ssc.start() ssc.awaitTermination(25) ssc.stop()
2.625
3
courses/migrations/0003_alter_content_options_alter_module_options_and_more.py
antonnifo/E-Soma
1
4682
# Generated by Django 4.0.1 on 2022-01-20 13:10 import courses.fields from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('courses', '0002_video_text_image_file_content'), ] operations = [ migrations.AlterModelOptions( name='content', options={'ordering': ['order']}, ), migrations.AlterModelOptions( name='module', options={'ordering': ['order']}, ), migrations.AddField( model_name='content', name='order', field=courses.fields.OrderField(blank=True, default=0), preserve_default=False, ), migrations.AddField( model_name='module', name='order', field=courses.fields.OrderField(blank=True, default=0), preserve_default=False, ), ]
1.664063
2
pkg/maths/maths.py
prateekdegaons1991/experiment-loadtest
8
4683
#Atoi stands for ASCII to Integer Conversion def atoi(string): res = 0 # Iterate through all characters of # input and update result for i in range(len(string)): res = res * 10 + (ord(string[i]) - ord('0')) return res #Adjustment contains rule of three for calculating an integer given another integer representing a percentage def Adjustment(a, b): return (a * b) / 100
3.90625
4
google_compute_engine/config_manager.py
redoxdrh/GCP-Flask
2
4684
#!/usr/bin/python # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A library for retrieving and modifying configuration settings.""" import os import textwrap from google_compute_engine import file_utils from google_compute_engine.compat import parser CONFIG = '/etc/default/instance_configs.cfg' class ConfigManager(object): """Process the configuration defaults.""" def __init__(self, config_file=None, config_header=None): """Constructor. Args: config_file: string, the location of the config file. config_header: string, the message to write at the top of the config. """ self.config_file = config_file or CONFIG self.config_header = config_header self.config = parser.SafeConfigParser() self.config.read(self.config_file) def _AddHeader(self, fp): """Create a file header in the config. Args: fp: int, a file pointer for writing the header. """ text = textwrap.wrap( textwrap.dedent(self.config_header), break_on_hyphens=False) fp.write('\n'.join(['# ' + line for line in text])) fp.write('\n\n') def GetOptionString(self, section, option): """Get the value of an option in the config file. Args: section: string, the section of the config file to check. option: string, the option to retrieve the value of. Returns: string, the value of the option or None if the option doesn't exist. """ if self.config.has_option(section, option): return self.config.get(section, option) else: return None def GetOptionBool(self, section, option): """Get the value of an option in the config file. Args: section: string, the section of the config file to check. option: string, the option to retrieve the value of. Returns: bool, True if the option is enabled or not set. """ return (not self.config.has_option(section, option) or self.config.getboolean(section, option)) def SetOption(self, section, option, value, overwrite=True): """Set the value of an option in the config file. Args: section: string, the section of the config file to check. option: string, the option to set the value of. value: string, the value to set the option. overwrite: bool, True to overwrite an existing value in the config file. """ if not overwrite and self.config.has_option(section, option): return if not self.config.has_section(section): self.config.add_section(section) self.config.set(section, option, str(value)) def WriteConfig(self, config_file=None): """Write the config values to a given file. Args: config_file: string, the file location of the config file to write. """ config_file = config_file or self.config_file config_name = os.path.splitext(os.path.basename(config_file))[0] config_lock = '/var/lock/google_%s.lock' % config_name with file_utils.LockFile(config_lock): with open(config_file, 'w') as config_fp: if self.config_header: self._AddHeader(config_fp) self.config.write(config_fp)
2.5625
3
mscreen/autodocktools_prepare_py3k/mglutil/web/services/AppService_services.py
e-mayo/mscreen
9
4685
<reponame>e-mayo/mscreen ################################################## # ./AppService_services.py # generated by ZSI.wsdl2python # # ################################################## from .AppService_services_types import * from .AppService_services_types import \ nbcr_sdsc_edu_opal_types as ns1 import urllib.parse, types from ZSI.TCcompound import Struct from ZSI import client import ZSI class AppServiceInterface: def getAppServicePortType(self, portAddress=None, **kw): raise NonImplementationError("method not implemented") class AppServiceLocator(AppServiceInterface): AppServicePortType_address = "https://rocks-106.sdsc.edu:8443/axis/services/AutogridServicePort" def getAppServicePortTypeAddress(self): return AppServiceLocator.AppServicePortType_address def getAppServicePortType(self, portAddress=None, **kw): return AppServicePortSoapBindingSOAP(portAddress or AppServiceLocator.AppServicePortType_address, **kw) class AppServicePortSoapBindingSOAP: def __init__(self, addr, **kw): netloc = (urllib.parse.urlparse(addr)[1]).split(":") + [80,] if "host" not in kw: kw["host"] = netloc[0] if "port" not in kw: kw["port"] = int(netloc[1]) if "url" not in kw: kw["url"] = urllib.parse.urlparse(addr)[2] self.binding = client.Binding(**kw) def destroy(self, request): """ @param: request is str @return: response from destroyResponse:: _destroyOutput: ns1.StatusOutputType_Def _baseURL: str _code: int _message: str """ if not isinstance(request, str): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {'requestclass': destroyRequestWrapper} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/destroy", **kw) response = self.binding.Receive(destroyResponseWrapper()) if not isinstance(response, destroyResponse) and\ not issubclass(destroyResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def getAppConfig(self, request): """ @param: request to getAppConfigRequest @return: response from getAppConfigResponse:: _getAppConfigOutput: ns1.AppConfigType_Def _binaryLocation: str _defaultArgs: str, optional _metadata: ns1.AppMetadataType_Def _info: str, optional _types: ns1.ArgumentsType_Def, optional _flags: ns1.FlagsArrayType_Def, optional _flag: ns1.FlagsType_Def, optional _id: str _tag: str _textDesc: str, optional _implicitParams: ns1.ImplicitParamsArrayType_Def, optional _param: ns1.ImplicitParamsType_Def, optional _extension: str, optional _id: str _ioType: ns1.IOType_Def _IOType: str, optional _max: int, optional _min: int, optional _name: str, optional _required: boolean, optional _semanticType: str, optional _textDesc: str, optional _taggedParams: ns1.ParamsArrayType_Def, optional _param: ns1.ParamsType_Def, optional _id: str _ioType: ns1.IOType_Def, optional _paramType: ns1.ParamType_Def _ParamType: str, optional _required: boolean, optional _semanticType: str, optional _tag: str, optional _textDesc: str, optional _value: str, optional _separator: str, optional _untaggedParams: ns1.ParamsArrayType_Def, optional _usage: str _parallel: boolean """ if not isinstance(request, getAppConfigRequest) and\ not issubclass(getAppConfigRequest, request.__class__): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/getAppConfig", **kw) response = self.binding.Receive(getAppConfigResponseWrapper()) if not isinstance(response, getAppConfigResponse) and\ not issubclass(getAppConfigResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def getAppMetadata(self, request): """ @param: request to getAppMetadataRequest @return: response from getAppMetadataResponse:: _getAppMetadataOutput: ns1.AppMetadataType_Def _info: str, optional _types: ns1.ArgumentsType_Def, optional _flags: ns1.FlagsArrayType_Def, optional _flag: ns1.FlagsType_Def, optional _id: str _tag: str _textDesc: str, optional _implicitParams: ns1.ImplicitParamsArrayType_Def, optional _param: ns1.ImplicitParamsType_Def, optional _extension: str, optional _id: str _ioType: ns1.IOType_Def _IOType: str, optional _max: int, optional _min: int, optional _name: str, optional _required: boolean, optional _semanticType: str, optional _textDesc: str, optional _taggedParams: ns1.ParamsArrayType_Def, optional _param: ns1.ParamsType_Def, optional _id: str _ioType: ns1.IOType_Def, optional _paramType: ns1.ParamType_Def _ParamType: str, optional _required: boolean, optional _semanticType: str, optional _tag: str, optional _textDesc: str, optional _value: str, optional _separator: str, optional _untaggedParams: ns1.ParamsArrayType_Def, optional _usage: str """ if not isinstance(request, getAppMetadataRequest) and\ not issubclass(getAppMetadataRequest, request.__class__): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/getAppMetadata", **kw) response = self.binding.Receive(getAppMetadataResponseWrapper()) if not isinstance(response, getAppMetadataResponse) and\ not issubclass(getAppMetadataResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def getOutputAsBase64ByName(self, request): """ @param: request to getOutputAsBase64ByNameRequest:: _getOutputAsBase64ByNameInput: ns1.OutputsByNameInputType_Def _fileName: str _jobID: str @return: response from getOutputAsBase64ByNameResponse:: _item: str, optional """ if not isinstance(request, getOutputAsBase64ByNameRequest) and\ not issubclass(getOutputAsBase64ByNameRequest, request.__class__): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/getOutputAsBase64ByName", **kw) response = self.binding.Receive(getOutputAsBase64ByNameResponseWrapper()) if not isinstance(response, getOutputAsBase64ByNameResponse) and\ not issubclass(getOutputAsBase64ByNameResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def getOutputs(self, request): """ @param: request is str @return: response from getOutputsResponse:: _getOutputsOutput: ns1.JobOutputType_Def _outputFile: ns1.OutputFileType_Def, optional _name: str _url: str _stdErr: str, optional _stdOut: str, optional """ if not isinstance(request, str): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {'requestclass': getOutputsRequestWrapper} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/getOutputs", **kw) response = self.binding.Receive(getOutputsResponseWrapper()) if not isinstance(response, getOutputsResponse) and\ not issubclass(getOutputsResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def launchJob(self, request): """ @param: request to launchJobRequest:: _launchJobInput: ns1.JobInputType_Def _argList: str, optional _inputFile: ns1.InputFileType_Def, optional _contents: str _name: str _numProcs: int, optional @return: response from launchJobResponse:: _launchJobOutput: ns1.JobSubOutputType_Def _jobID: str _status: ns1.StatusOutputType_Def _baseURL: str _code: int _message: str """ if not isinstance(request, launchJobRequest) and\ not issubclass(launchJobRequest, request.__class__): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/launchJob", **kw) response = self.binding.Receive(launchJobResponseWrapper()) if not isinstance(response, launchJobResponse) and\ not issubclass(launchJobResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def launchJobBlocking(self, request): """ @param: request to launchJobBlockingRequest:: _launchJobBlockingInput: ns1.JobInputType_Def _argList: str, optional _inputFile: ns1.InputFileType_Def, optional _contents: str _name: str _numProcs: int, optional @return: response from launchJobBlockingResponse:: _launchJobBlockingOutput: ns1.BlockingOutputType_Def _jobOut: ns1.JobOutputType_Def _outputFile: ns1.OutputFileType_Def, optional _name: str _url: str _stdErr: str, optional _stdOut: str, optional _status: ns1.StatusOutputType_Def _baseURL: str _code: int _message: str """ if not isinstance(request, launchJobBlockingRequest) and\ not issubclass(launchJobBlockingRequest, request.__class__): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/launchJobBlocking", **kw) response = self.binding.Receive(launchJobBlockingResponseWrapper()) if not isinstance(response, launchJobBlockingResponse) and\ not issubclass(launchJobBlockingResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response def queryStatus(self, request): """ @param: request is str @return: response from queryStatusResponse:: _queryStatusOutput: ns1.StatusOutputType_Def _baseURL: str _code: int _message: str """ if not isinstance(request, str): raise TypeError("%s incorrect request type" %(request.__class__)) kw = {'requestclass': queryStatusRequestWrapper} response = self.binding.Send(None, None, request, soapaction="http://nbcr.sdsc.edu/opal/queryStatus", **kw) response = self.binding.Receive(queryStatusResponseWrapper()) if not isinstance(response, queryStatusResponse) and\ not issubclass(queryStatusResponse, response.__class__): raise TypeError("%s incorrect response type" %(response.__class__)) return response class destroyRequest(ns1.destroyInput_Dec): if not hasattr( ns1.destroyInput_Dec(), "typecode" ): typecode = ns1.destroyInput_Dec() def __init__(self, name=None, ns=None): ns1.destroyInput_Dec.__init__(self, name=None, ns=None) class destroyRequestWrapper(destroyRequest): """wrapper for document:literal message""" typecode = destroyRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): destroyRequest.__init__( self, name=None, ns=None ) class destroyResponse(ns1.destroyOutput_Dec): if not hasattr( ns1.destroyOutput_Dec(), "typecode" ): typecode = ns1.destroyOutput_Dec() def __init__(self, name=None, ns=None): ns1.destroyOutput_Dec.__init__(self, name=None, ns=None) class destroyResponseWrapper(destroyResponse): """wrapper for document:literal message""" typecode = destroyResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): destroyResponse.__init__( self, name=None, ns=None ) class getAppConfigRequest: def __init__(self, name=None, ns=None): getAppConfigRequest.typecode = Struct(getAppConfigRequest,[], pname=name, aname="%s" % name, oname="%s xmlns=\"\"" % name ) class getAppConfigRequestWrapper(getAppConfigRequest): """wrapper for document:literal message""" typecode = getAppConfigRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getAppConfigRequest.__init__( self, name=None, ns=None ) class getAppConfigResponse(ns1.getAppConfigOutput_Dec): if not hasattr( ns1.getAppConfigOutput_Dec(), "typecode" ): typecode = ns1.getAppConfigOutput_Dec() def __init__(self, name=None, ns=None): ns1.getAppConfigOutput_Dec.__init__(self, name=None, ns=None) class getAppConfigResponseWrapper(getAppConfigResponse): """wrapper for document:literal message""" typecode = getAppConfigResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getAppConfigResponse.__init__( self, name=None, ns=None ) class getAppMetadataRequest: def __init__(self, name=None, ns=None): getAppMetadataRequest.typecode = Struct(getAppMetadataRequest,[], pname=name, aname="%s" % name, oname="%s xmlns=\"\"" % name ) class getAppMetadataRequestWrapper(getAppMetadataRequest): """wrapper for document:literal message""" typecode = getAppMetadataRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getAppMetadataRequest.__init__( self, name=None, ns=None ) class getAppMetadataResponse(ns1.getAppMetadataOutput_Dec): if not hasattr( ns1.getAppMetadataOutput_Dec(), "typecode" ): typecode = ns1.getAppMetadataOutput_Dec() def __init__(self, name=None, ns=None): ns1.getAppMetadataOutput_Dec.__init__(self, name=None, ns=None) class getAppMetadataResponseWrapper(getAppMetadataResponse): """wrapper for document:literal message""" typecode = getAppMetadataResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getAppMetadataResponse.__init__( self, name=None, ns=None ) class getOutputAsBase64ByNameRequest(ns1.getOutputAsBase64ByNameInput_Dec): if not hasattr( ns1.getOutputAsBase64ByNameInput_Dec(), "typecode" ): typecode = ns1.getOutputAsBase64ByNameInput_Dec() def __init__(self, name=None, ns=None): ns1.getOutputAsBase64ByNameInput_Dec.__init__(self, name=None, ns=None) class getOutputAsBase64ByNameRequestWrapper(getOutputAsBase64ByNameRequest): """wrapper for document:literal message""" typecode = getOutputAsBase64ByNameRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getOutputAsBase64ByNameRequest.__init__( self, name=None, ns=None ) class getOutputAsBase64ByNameResponse(ns1.getOutputAsBase64ByNameOutput_Dec): if not hasattr( ns1.getOutputAsBase64ByNameOutput_Dec(), "typecode" ): typecode = ns1.getOutputAsBase64ByNameOutput_Dec() def __init__(self, name=None, ns=None): ns1.getOutputAsBase64ByNameOutput_Dec.__init__(self, name=None, ns=None) class getOutputAsBase64ByNameResponseWrapper(getOutputAsBase64ByNameResponse): """wrapper for document:literal message""" typecode = getOutputAsBase64ByNameResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getOutputAsBase64ByNameResponse.__init__( self, name=None, ns=None ) class getOutputsRequest(ns1.getOutputsInput_Dec): if not hasattr( ns1.getOutputsInput_Dec(), "typecode" ): typecode = ns1.getOutputsInput_Dec() def __init__(self, name=None, ns=None): ns1.getOutputsInput_Dec.__init__(self, name=None, ns=None) class getOutputsRequestWrapper(getOutputsRequest): """wrapper for document:literal message""" typecode = getOutputsRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getOutputsRequest.__init__( self, name=None, ns=None ) class getOutputsResponse(ns1.getOutputsOutput_Dec): if not hasattr( ns1.getOutputsOutput_Dec(), "typecode" ): typecode = ns1.getOutputsOutput_Dec() def __init__(self, name=None, ns=None): ns1.getOutputsOutput_Dec.__init__(self, name=None, ns=None) class getOutputsResponseWrapper(getOutputsResponse): """wrapper for document:literal message""" typecode = getOutputsResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): getOutputsResponse.__init__( self, name=None, ns=None ) class launchJobBlockingRequest(ns1.launchJobBlockingInput_Dec): if not hasattr( ns1.launchJobBlockingInput_Dec(), "typecode" ): typecode = ns1.launchJobBlockingInput_Dec() def __init__(self, name=None, ns=None): ns1.launchJobBlockingInput_Dec.__init__(self, name=None, ns=None) class launchJobBlockingRequestWrapper(launchJobBlockingRequest): """wrapper for document:literal message""" typecode = launchJobBlockingRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): launchJobBlockingRequest.__init__( self, name=None, ns=None ) class launchJobBlockingResponse(ns1.launchJobBlockingOutput_Dec): if not hasattr( ns1.launchJobBlockingOutput_Dec(), "typecode" ): typecode = ns1.launchJobBlockingOutput_Dec() def __init__(self, name=None, ns=None): ns1.launchJobBlockingOutput_Dec.__init__(self, name=None, ns=None) class launchJobBlockingResponseWrapper(launchJobBlockingResponse): """wrapper for document:literal message""" typecode = launchJobBlockingResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): launchJobBlockingResponse.__init__( self, name=None, ns=None ) class launchJobRequest(ns1.launchJobInput_Dec): if not hasattr( ns1.launchJobInput_Dec(), "typecode" ): typecode = ns1.launchJobInput_Dec() def __init__(self, name=None, ns=None): ns1.launchJobInput_Dec.__init__(self, name=None, ns=None) class launchJobRequestWrapper(launchJobRequest): """wrapper for document:literal message""" typecode = launchJobRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): launchJobRequest.__init__( self, name=None, ns=None ) class launchJobResponse(ns1.launchJobOutput_Dec): if not hasattr( ns1.launchJobOutput_Dec(), "typecode" ): typecode = ns1.launchJobOutput_Dec() def __init__(self, name=None, ns=None): ns1.launchJobOutput_Dec.__init__(self, name=None, ns=None) class launchJobResponseWrapper(launchJobResponse): """wrapper for document:literal message""" typecode = launchJobResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): launchJobResponse.__init__( self, name=None, ns=None ) class queryStatusRequest(ns1.queryStatusInput_Dec): if not hasattr( ns1.queryStatusInput_Dec(), "typecode" ): typecode = ns1.queryStatusInput_Dec() def __init__(self, name=None, ns=None): ns1.queryStatusInput_Dec.__init__(self, name=None, ns=None) class queryStatusRequestWrapper(queryStatusRequest): """wrapper for document:literal message""" typecode = queryStatusRequest( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): queryStatusRequest.__init__( self, name=None, ns=None ) class queryStatusResponse(ns1.queryStatusOutput_Dec): if not hasattr( ns1.queryStatusOutput_Dec(), "typecode" ): typecode = ns1.queryStatusOutput_Dec() def __init__(self, name=None, ns=None): ns1.queryStatusOutput_Dec.__init__(self, name=None, ns=None) class queryStatusResponseWrapper(queryStatusResponse): """wrapper for document:literal message""" typecode = queryStatusResponse( name=None, ns=None ).typecode def __init__( self, name=None, ns=None, **kw ): queryStatusResponse.__init__( self, name=None, ns=None )
2.03125
2
examples/pytorch/swin/checkpoint_quantization.py
hieuhoang/FasterTransformer
0
4686
# Copyright (c) 2020-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import argparse import re import numpy as np import torch ACTIVATION_AMAX_NUM = 72 INT8O_KERNEL_NUM = 5 INT8O_GEMM_NUM = 7 TRT_FUSED_MHA_AMAX_NUM = 3 SCALE_RESERVE_NUM = 8 def extract_amaxlist(init_dict, depths, ths_path='../lib/libpyt_swintransformer.so', verbose=True): # print("Quantizing checkpoint ...") torch.classes.load_library(ths_path) weight_quantize = torch.ops.fastertransformer.swin_weight_quantize layer_num = len(depths) amaxTotalNum = ACTIVATION_AMAX_NUM + INT8O_KERNEL_NUM + INT8O_GEMM_NUM + 1 + TRT_FUSED_MHA_AMAX_NUM + SCALE_RESERVE_NUM kernel_name_list = ["attn.qkv", "attn.proj", "mlp.fc1", "mlp.fc2"] amax_name_list = ["attn.qkv._input_quantizer", "attn.qkv._aftergemm_quantizer", "attn.proj._input_quantizer", "attn.proj._aftergemm_quantizer", "attn.matmul_q_input_quantizer", "attn.matmul_k_input_quantizer", "attn.matmul_v_input_quantizer", "attn.matmul_a_input_quantizer", "attn.softmax_input_quantizer", "mlp.fc1._input_quantizer", "mlp.fc1._aftergemm_quantizer", "mlp.fc2._input_quantizer", "mlp.fc2._aftergemm_quantizer", "add1_residual_input_quantizer", "add2_residual_input_quantizer" ] int8O_gemm_weight_amax_list = [0 for i in range(INT8O_GEMM_NUM)] int8O_gemm_weight_list = ["attn.qkv", "attn.proj", "mlp.fc1", "mlp.fc2", "attn.matmul_k_input_quantizer", "attn.matmul_v_input_quantizer"] int8O_gemm_input_amax_list = [0 for i in range(INT8O_GEMM_NUM)] int8O_gemm_input_list = ["attn.qkv._input_quantizer", "attn.proj._input_quantizer", "mlp.fc1._input_quantizer", "mlp.fc2._input_quantizer", "attn.matmul_q_input_quantizer", "attn.matmul_a_input_quantizer"] int8O_gemm_output_amax_list = [0 for i in range(INT8O_GEMM_NUM)] int8O_gemm_output_list = ["attn.qkv._aftergemm_quantizer", "attn.proj._aftergemm_quantizer", "mlp.fc1._aftergemm_quantizer", "mlp.fc2._aftergemm_quantizer", "attn.softmax_input_quantizer", "attn.proj._input_quantizer"] downsample_input = "downsample.reduction._input_quantizer" downsample_weight = "downsample.reduction._weight_quantizer" downsample_out = "downsample.reduction._aftergemm_quantizer" factor = 1000000.0 for i in range(layer_num): for depth in range(depths[i]): amaxList = np.zeros([amaxTotalNum]).astype(np.float32) amax_id = 0 for amax_name in amax_name_list: quant_max = init_dict["layers.{}.blocks.{}.{}._amax".format(i, depth, amax_name)].item() amax = abs(quant_max)#round(abs(quant_max)*factor)/factor if amax_name in int8O_gemm_input_list: int8O_gemm_input_amax_list[int8O_gemm_input_list.index(amax_name)] = amax if amax_name in int8O_gemm_output_list: int8O_gemm_output_amax_list[int8O_gemm_output_list.index(amax_name)] = amax if amax_name in int8O_gemm_weight_list: int8O_gemm_weight_amax_list[int8O_gemm_weight_list.index(amax_name)] = amax amaxList[amax_id] = amax amax_id += 1 amaxList[amax_id] = amax/127.0 amax_id += 1 amaxList[amax_id] = amax/127.0/127.0 amax_id += 1 amaxList[amax_id] = 127.0/amax amax_id += 1 # if verbose: # print(i, amax_name) # print('quant_max:', quant_max) # print('amax:', amax) if i != layer_num - 1: amax = init_dict["layers.{}.{}._amax".format(i, downsample_input)].item() amaxList[amax_id] = amax amax_id += 1 amaxList[amax_id] = amax/127.0 amax_id += 1 amaxList[amax_id] = amax/127.0/127.0 amax_id += 1 amaxList[amax_id] = 127.0/amax amax_id += 1 amax = init_dict["layers.{}.{}._amax".format(i, downsample_out)].item() amaxList[amax_id] = amax amax_id += 1 amaxList[amax_id] = amax/127.0 amax_id += 1 amaxList[amax_id] = amax/127.0/127.0 amax_id += 1 amaxList[amax_id] = 127.0/amax amax_id += 1 else: amax_id += 8 if verbose: print("done process layer_{} block_{} activation amax".format(i, depth)) #kernel amax starts from ACTIVATION_AMAX_NUM assert amax_id == 68 amax_id = ACTIVATION_AMAX_NUM for kernel_id, kernel_name in enumerate(kernel_name_list): kernel = init_dict["layers.{}.blocks.{}.{}.weight".format(i, depth, kernel_name)].transpose(-1, -2).contiguous() quant_max2 = init_dict["layers.{}.blocks.{}.{}._weight_quantizer._amax".format(i, depth, kernel_name)] amax2 = abs(quant_max2) # if (amax2.dim() == 0): # quant_max_processed = torch.full((kernel.size(1),), amax2.item(), dtype=amax2.dtype, device=amax2.device) # else: # quant_max_processed = amax2.view(-1) kernel_processed = weight_quantize(kernel, amax2.cuda()) init_dict["layers.{}.blocks.{}.{}.weight".format(i, depth, kernel_name)] = kernel_processed if kernel_name in int8O_gemm_weight_list: int8O_gemm_weight_amax_list[int8O_gemm_weight_list.index(kernel_name)] = amax2.item() amaxList[amax_id] = amax2 amax_id += 1 # if verbose: # print(i, kernel_name) # print('kernel:', kernel) # print('quant_max2:', quant_max2) # print('quant_max_processed_:', quant_max_processed) if i != layer_num - 1: amaxList[amax_id] = init_dict["layers.{}.downsample.reduction._weight_quantizer._amax".format(i)].item() amax_id += 1 assert amax_id == ACTIVATION_AMAX_NUM + INT8O_KERNEL_NUM #for int8O gemm deQuant for j in range(INT8O_GEMM_NUM - 1): amaxList[amax_id] = (int8O_gemm_input_amax_list[j]*int8O_gemm_weight_amax_list[j])/(127.0*int8O_gemm_output_amax_list[j]) # print('layernum:', i, 'j:', j, ' gemm_int8IO_scale:',amaxList[amax_id]) # print(int8O_gemm_input_amax_list[j], int8O_gemm_weight_amax_list[j], int8O_gemm_output_amax_list[j]) amax_id += 1 if i != layer_num - 1: patchMerge_i = init_dict["layers.{}.{}._amax".format(i, downsample_input)].item() patchMerge_w = init_dict["layers.{}.{}._amax".format(i, downsample_weight)].item() patchMerge_o = init_dict["layers.{}.{}._amax".format(i, downsample_out)].item() amaxList[amax_id] = (patchMerge_i * patchMerge_w) / (127 * patchMerge_o) amax_id += 1 assert amax_id == ACTIVATION_AMAX_NUM + INT8O_KERNEL_NUM + INT8O_GEMM_NUM amax_id += 1 #for trt fused MHA amax #### QKV_addBias_amax # amaxList[amax_id] = np.maximum(np.maximum(amaxList[16],amaxList[20]), amaxList[24]) # amax_id += 1 # #### softmax amax # amaxList[amax_id] = amaxList[28] # amax_id += 1 # #### bmm2 amax # amaxList[amax_id] = amaxList[8] # amax_id += 1 qkvMax = np.maximum(np.maximum(amaxList[16],amaxList[20]), amaxList[24]) amaxList[amax_id] = amaxList[16] * amaxList[20] / (127.0 * 127.0) amax_id += 1 amaxList[amax_id] = 127.0 / amaxList[28] amax_id += 1 amaxList[amax_id] = amaxList[24] * amaxList[28] / (127.0 * amaxList[8]) amax_id += 1 init_dict["layers.{}.blocks.{}.amaxList".format(i, depth)] = torch.tensor(amaxList, dtype=torch.float32) if verbose: print("done process layer_{} block_{} kernel weight".format(i, depth)) if i != layer_num - 1: kernel = init_dict["layers.{}.downsample.reduction.weight".format(i)] quant_max2 = init_dict["layers.{}.downsample.reduction._weight_quantizer._amax".format(i)] amax2 = abs(quant_max2) kernel = kernel.transpose(-1, -2).contiguous() kernel_processed = weight_quantize(kernel, amax2.cuda()) init_dict["layers.{}.downsample.reduction.weight".format(i)] = kernel_processed # print("Quantizing checkpoint done.") return init_dict if __name__ == '__main__': weights = torch.load('pytorch_model.bin') extract_amaxlist(weights, [2, 2, 6, 2])
1.554688
2
mars/deploy/kubernetes/core.py
tomzhang/mars-1
0
4687
# -*- coding: utf-8 -*- # Copyright 1999-2020 Alibaba Group Holding Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os import random import time from ...actors import new_client, FunctionActor logger = logging.getLogger(__name__) class K8SPodsIPWatcher(object): """ Pods watcher class, compatible with SchedulerDiscoverer """ dynamic = True def __init__(self, k8s_config=None, k8s_namespace=None, label_selector=None): from kubernetes import config, client from gevent.threadpool import ThreadPool if k8s_config is not None: self._k8s_config = k8s_config elif os.environ.get('KUBE_API_ADDRESS'): self._k8s_config = client.Configuration() self._k8s_config.host = os.environ['KUBE_API_ADDRESS'] else: self._k8s_config = config.load_incluster_config() self._k8s_namespace = k8s_namespace or os.environ.get('MARS_K8S_POD_NAMESPACE') or 'default' self._label_selector = label_selector self._client = client.CoreV1Api(client.ApiClient(self._k8s_config)) self._pool = ThreadPool(1) self._pod_to_ep = None def __reduce__(self): return type(self), (self._k8s_config, self._k8s_namespace, self._label_selector) def _extract_pod_name_ep(self, pod_data): svc_port = pod_data['spec']['containers'][0]['ports'][0]['container_port'] return pod_data['metadata']['name'], '%s:%s' % (pod_data['status']['pod_ip'], svc_port) @staticmethod def _extract_pod_ready(obj_data): # if conditions not supported, always return True if 'status' not in obj_data or 'conditions' not in obj_data['status']: return True return any(cond['type'] == 'Ready' and cond['status'] == 'True' for cond in obj_data['status']['conditions']) def _get_pod_to_ep(self): query = self._pool.spawn(self._client.list_namespaced_pod, namespace=self._k8s_namespace, label_selector=self._label_selector).result().to_dict() result = dict() for el in query['items']: name, pod_ep = self._extract_pod_name_ep(el) if pod_ep is not None and not self._extract_pod_ready(el): pod_ep = None result[name] = pod_ep return result def get(self, update=False): if self._pod_to_ep is None or update: self._pod_to_ep = self._get_pod_to_ep() return sorted(a for a in self._pod_to_ep.values() if a is not None) def is_all_ready(self): self.get(True) return all(a is not None for a in self._pod_to_ep.values()) def watch(self): from urllib3.exceptions import ReadTimeoutError from kubernetes import watch cur_pods = set(self.get(True)) w = watch.Watch() while True: # when some schedulers are not ready, we refresh faster linger = 10 if self.is_all_ready() else 1 streamer = w.stream(self._client.list_namespaced_pod, namespace=self._k8s_namespace, label_selector=self._label_selector, timeout_seconds=linger) while True: try: event = self._pool.spawn(next, streamer, StopIteration).result() if event is StopIteration: raise StopIteration except (ReadTimeoutError, StopIteration): new_pods = set(self.get(True)) if new_pods != cur_pods: cur_pods = new_pods yield self.get(False) break except: # noqa: E722 logger.exception('Unexpected error when watching on kubernetes') break obj_dict = event['object'].to_dict() pod_name, endpoint = self._extract_pod_name_ep(obj_dict) self._pod_to_ep[pod_name] = endpoint \ if endpoint and self._extract_pod_ready(obj_dict) else None yield self.get(False) class ReadinessActor(FunctionActor): """ Dummy actor indicating service start """ @classmethod def default_uid(cls): return 'k:0:%s' % cls.__name__ class K8SServiceMixin: @staticmethod def write_pid_file(): with open('/tmp/mars-service.pid', 'w') as pid_file: pid_file.write(str(os.getpid())) def wait_all_schedulers_ready(self): """ Wait till all containers are ready, both in kubernetes and in ClusterInfoActor """ from ...scheduler.utils import SchedulerClusterInfoActor # check if all schedulers are ready using Kubernetes API sleep_fun = (getattr(self, 'pool', None) or time).sleep while not self.scheduler_discoverer.is_all_ready(): sleep_fun(1) kube_schedulers = self.scheduler_discoverer.get() logger.debug('Schedulers all ready in kubernetes, waiting ClusterInfoActor to be ready') # check if all schedulers are registered in ClusterInfoActor actor_client = new_client() while True: cluster_info = actor_client.actor_ref( SchedulerClusterInfoActor.default_uid(), address=random.choice(kube_schedulers)) cluster_info_schedulers = cluster_info.get_schedulers() if set(cluster_info_schedulers) == set(kube_schedulers): from ...cluster_info import INITIAL_SCHEDULER_FILE with open(INITIAL_SCHEDULER_FILE, 'w') as scheduler_file: scheduler_file.write(','.join(cluster_info_schedulers)) logger.debug('Scheduler detection finished. Result: %r', kube_schedulers) break sleep_fun(1) # pragma: no cover def create_scheduler_discoverer(self): self.scheduler_discoverer = K8SPodsIPWatcher(label_selector='name=marsscheduler')
1.90625
2
tests/tests_model/tests_bert_model.py
elangovana/gene_normalisation
1
4688
from unittest import TestCase import torch import transformers from model.bert_model import BertModel class TestBertModel(TestCase): def test_forward(self): # Bert Config vocab_size = 10 sequence_len = 20 batch = 32 num_classes = 3 expected_shape = (batch, sequence_len, num_classes) input_batch = torch.randint(low=0, high=vocab_size-1, size=(batch, sequence_len)) config= transformers.BertConfig(vocab_size=vocab_size,hidden_size=10, num_hidden_layers=1, num_attention_heads=1,num_labels=num_classes) sut = BertModel(None, None, bert_config=config) # Act actual = sut.forward(input_batch)[0] # Assert self.assertEqual(expected_shape, actual.shape)
2.96875
3
tests/renderer_test.py
tmcclintock/PyDonJuan
2
4689
import json import os import tempfile from unittest import TestCase import pytest from donjuan import Dungeon, DungeonRandomizer, Renderer class RendererTest(TestCase): def setUp(self): super().setUp() self.TEMP_DIR = tempfile.mkdtemp() def test_smoke(self): r = Renderer() assert r is not None def test_scale(self): r = Renderer(scale=3) assert r.scale == 3 @pytest.mark.slow def test_render_dummy_dungeon(self): inpath = os.path.abspath(os.path.dirname(__file__)) inpath = os.path.join(inpath, "fixtures/dummy_dungeon.json") with open(inpath, "r") as f: darr = json.load(f)["dungeon"] n_rows = len(darr) n_cols = len(darr) dungeon = Dungeon(n_rows=n_rows, n_cols=n_cols) for i in range(n_rows): for j in range(n_cols): dungeon.grid.cells[i][j].filled = bool(darr[i][j]) # Render and check for the file fp = os.path.join(self.TEMP_DIR, "rendered_dungeon.png") r = Renderer() r.render(dungeon, file_path=fp) assert os.path.exists(fp) @pytest.mark.slow def test_render_dungeon_with_rooms(self): randomizer = DungeonRandomizer() dungeon = Dungeon(10, 10, randomizers=[randomizer]) dungeon.randomize() dungeon.emplace_rooms() renderer = Renderer() # Render and check for the file fp = os.path.join(self.TEMP_DIR, "rendered_dungeon.png") renderer.render(dungeon, file_path=fp) assert os.path.exists(fp)
2.5
2
src/foremast/validate.py
dnava013/foremast
157
4690
<gh_stars>100-1000 """Spinnaker validate functions.""" import logging from .consts import API_URL from .utils.credentials import get_env_credential LOG = logging.getLogger(__name__) def validate_gate(): """Check Gate connection.""" try: credentials = get_env_credential() LOG.debug('Found credentials: %s', credentials) LOG.info('Gate working.') except TypeError: LOG.fatal('Gate connection not valid: API_URL = %s', API_URL) def validate_all(args): """Run all validate steps.""" LOG.debug('Args: %s', args) LOG.info('Running all validate steps.') validate_gate()
2.71875
3
constellation_forms/migrations/0001_initial.py
ConstellationApps/Forms
2
4691
<filename>constellation_forms/migrations/0001_initial.py # -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-03-15 00:56 from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Form', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('form_id', models.IntegerField()), ('version', models.IntegerField()), ('name', models.TextField()), ('description', models.TextField(blank=True)), ('elements', django.contrib.postgres.fields.jsonb.JSONField()), ], options={ 'ordering': ('-version',), 'db_table': 'form', }, ), migrations.CreateModel( name='FormSubmission', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('state', models.IntegerField(choices=[(0, 'draft'), (1, 'submitted'), (2, 'approved'), (3, 'denied')])), ('modified', models.DateField()), ('submission', django.contrib.postgres.fields.jsonb.JSONField()), ('form', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='constellation_forms.Form')), ('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'db_table': 'form_submission', }, ), migrations.CreateModel( name='Validator', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('regex', models.TextField()), ], options={ 'db_table': 'validators', }, ), migrations.AlterUniqueTogether( name='form', unique_together=set([('form_id', 'version')]), ), ]
1.625
2
src/webpy1/src/borough/dbsqli.py
ptphp/PyLib
1
4692
<gh_stars>1-10 #!/usr/bin/env python # -*- coding: utf-8 -*- import sqlite3 as sqlite import os.path as osp import sys class Sqli(object): conn = '' cursor = '' def __init__(self, dbname): try: self.conn = sqlite.connect(osp.abspath(dbname)) except Exception, what: print what sys.exit() self.conn.row_factory = sqlite.Row self.cursor = self.conn.cursor() def createTable(self): self.cursor.execute(''' CREATE TABLE IF NOT EXISTS [website]( [id] INTEGER PRIMARY KEY, [siteName] TEXT, [loginUrl] TEXT, [loginQuery] TEXT, [postUrl] TEXT, [postQuery] TEXT, UNIQUE([siteName])); ''') print "create table website " self.cursor.execute(''' CREATE INDEX IF NOT EXISTS [website_idx_siteName] ON [website]([siteName]); ''') print 'create website index' self.conn.commit() def createTable_com(self): self.cursor.execute(''' CREATE TABLE IF NOT EXISTS [com]( [id] INTEGER PRIMARY KEY, [title] TEXT, [city] TEXT, [url] TEXT, UNIQUE([url])); ''') print "create table com " self.cursor.execute(''' CREATE INDEX IF NOT EXISTS [website_idx_url] ON [com]([url]); ''') print 'create map index' self.conn.commit() def createTable_58(self): self.cursor.execute(''' CREATE TABLE IF NOT EXISTS [com]( [id] INTEGER PRIMARY KEY, [title] TEXT, [city] TEXT, [url] TEXT, UNIQUE([url])); ''') print "create table com " self.cursor.execute(''' CREATE INDEX IF NOT EXISTS [website_idx_url] ON [com]([url]); ''') print 'create map index' self.conn.commit() def query(self, sql): try: self.cursor.execute(sql) self.conn.commit() except Exception, what: print what def show(self): r = self.cursor.fetchall() return r def showone(self): return self.cursor.fetchone() def __del__(self): self.cursor.close() self.conn.close()
3.5
4
losses/all_lost.py
Liudzz/loss-chapter
2
4693
""" easy way to use losses """ from center_loss import Centerloss import torch.nn as nn from FocalLoss import FocalLoss def center_loss(pred,label,num_calss,feature): loss = Centerloss(num_calss,feature) return loss(pred,label) def Focal_loss(pred,label,num_calss,alaph=None, gamma): loss = Centerloss(num_calss,gamma) return loss(pred,label) def L1_loss(pred,label): loss = nn.L1Loss(pred,label) return loss def L2_loss(pred,label): loss = nn.MSELoss(pred,label) return loss def SmoothL1_loss(pred,label): loss = nn.SmoothL1Loss(pred,label) return loss
2.734375
3
nova/tests/functional/test_metadata.py
Nexenta/nova
1
4694
# Copyright 2016 Rackspace Australia # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures import jsonschema import os import requests from oslo_serialization import jsonutils from oslo_utils import uuidutils from nova import test from nova.tests import fixtures as nova_fixtures from nova.tests.functional import fixtures as func_fixtures from nova.tests.functional import integrated_helpers from nova.tests.unit.image import fake as fake_image class fake_result(object): def __init__(self, result): self.status_code = 200 self.text = jsonutils.dumps(result) real_request = requests.request def fake_request(obj, url, method, **kwargs): if url.startswith('http://127.0.0.1:123'): return fake_result({'a': 1, 'b': 'foo'}) if url.startswith('http://127.0.0.1:124'): return fake_result({'c': 3}) if url.startswith('http://127.0.0.1:125'): return fake_result(jsonutils.loads(kwargs.get('data', '{}'))) return real_request(method, url, **kwargs) class MetadataTest(test.TestCase, integrated_helpers.InstanceHelperMixin): def setUp(self): super(MetadataTest, self).setUp() fake_image.stub_out_image_service(self) self.addCleanup(fake_image.FakeImageService_reset) self.useFixture(nova_fixtures.NeutronFixture(self)) self.useFixture(func_fixtures.PlacementFixture()) self.start_service('conductor') self.start_service('scheduler') self.api = self.useFixture( nova_fixtures.OSAPIFixture(api_version='v2.1')).api self.start_service('compute') # create a server for the tests server = self._build_server(name='test') server = self.api.post_server({'server': server}) self.server = self._wait_for_state_change(server, 'ACTIVE') self.api_fixture = self.useFixture(nova_fixtures.OSMetadataServer()) self.md_url = self.api_fixture.md_url # make sure that the metadata service returns information about the # server we created above def fake_get_fixed_ip_by_address(self, ctxt, address): return {'instance_uuid': server['id']} self.useFixture( fixtures.MonkeyPatch( 'nova.network.neutron.API.get_fixed_ip_by_address', fake_get_fixed_ip_by_address)) def test_lookup_metadata_root_url(self): res = requests.request('GET', self.md_url, timeout=5) self.assertEqual(200, res.status_code) def test_lookup_metadata_openstack_url(self): url = '%sopenstack' % self.md_url res = requests.request('GET', url, timeout=5, headers={'X-Forwarded-For': '127.0.0.2'}) self.assertEqual(200, res.status_code) def test_lookup_metadata_data_url(self): url = '%sopenstack/latest/meta_data.json' % self.md_url res = requests.request('GET', url, timeout=5) self.assertEqual(200, res.status_code) j = jsonutils.loads(res.text) self.assertIn('hostname', j) self.assertEqual('test.novalocal', j['hostname']) def test_lookup_external_service(self): self.flags( vendordata_providers=['StaticJSON', 'DynamicJSON'], vendordata_dynamic_targets=[ 'testing@http://127.0.0.1:123', 'hamster@http://127.0.0.1:123' ], group='api' ) self.useFixture(fixtures.MonkeyPatch( 'keystoneauth1.session.Session.request', fake_request)) url = '%sopenstack/2016-10-06/vendor_data2.json' % self.md_url res = requests.request('GET', url, timeout=5) self.assertEqual(200, res.status_code) j = jsonutils.loads(res.text) self.assertEqual({}, j['static']) self.assertEqual(1, j['testing']['a']) self.assertEqual('foo', j['testing']['b']) self.assertEqual(1, j['hamster']['a']) self.assertEqual('foo', j['hamster']['b']) def test_lookup_external_service_no_overwrite(self): self.flags( vendordata_providers=['DynamicJSON'], vendordata_dynamic_targets=[ 'testing@http://127.0.0.1:123', 'testing@http://127.0.0.1:124' ], group='api' ) self.useFixture(fixtures.MonkeyPatch( 'keystoneauth1.session.Session.request', fake_request)) url = '%sopenstack/2016-10-06/vendor_data2.json' % self.md_url res = requests.request('GET', url, timeout=5) self.assertEqual(200, res.status_code) j = jsonutils.loads(res.text) self.assertNotIn('static', j) self.assertEqual(1, j['testing']['a']) self.assertEqual('foo', j['testing']['b']) self.assertNotIn('c', j['testing']) def test_lookup_external_service_passes_data(self): # Much of the data we pass to the REST service is missing because of # the way we've created the fake instance, but we should at least try # and ensure we're passing _some_ data through to the external REST # service. self.flags( vendordata_providers=['DynamicJSON'], vendordata_dynamic_targets=[ 'testing@http://127.0.0.1:125' ], group='api' ) self.useFixture(fixtures.MonkeyPatch( 'keystoneauth1.session.Session.request', fake_request)) url = '%sopenstack/2016-10-06/vendor_data2.json' % self.md_url res = requests.request('GET', url, timeout=5) self.assertEqual(200, res.status_code) j = jsonutils.loads(res.text) self.assertIn('instance-id', j['testing']) self.assertTrue(uuidutils.is_uuid_like(j['testing']['instance-id'])) self.assertIn('hostname', j['testing']) self.assertEqual(self.server['tenant_id'], j['testing']['project-id']) self.assertIn('metadata', j['testing']) self.assertIn('image-id', j['testing']) self.assertIn('user-data', j['testing']) def test_network_data_matches_schema(self): self.useFixture(fixtures.MonkeyPatch( 'keystoneauth1.session.Session.request', fake_request)) url = '%sopenstack/latest/network_data.json' % self.md_url res = requests.request('GET', url, timeout=5) self.assertEqual(200, res.status_code) # load the jsonschema for network_data schema_file = os.path.normpath(os.path.join( os.path.dirname(os.path.abspath(__file__)), "../../../doc/api_schemas/network_data.json")) with open(schema_file, 'rb') as f: schema = jsonutils.load(f) jsonschema.validate(res.json(), schema)
1.828125
2
openue/sequence_labeling/subject_labeling_data_manager.py
zxlzr/OpenUE
8
4695
import os import sys import json sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../bert"))) sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../"))) import tokenization from config import config class Model_data_preparation(object): def __init__(self, DATA_INPUT_DIR="raw_data", DATA_OUTPUT_DIR="SKE_2019_tokened_labeling", vocab_file_path="vocab.txt", do_lower_case=True,General_Mode = False): self.bert_tokenizer = tokenization.FullTokenizer(vocab_file=self.get_vocab_file_path(vocab_file_path), do_lower_case=do_lower_case) # 初始化 bert_token 工具 self.DATA_INPUT_DIR = self.get_data_input_dir(DATA_INPUT_DIR) self.DATA_OUTPUT_DIR = os.path.join(os.path.dirname(__file__), DATA_OUTPUT_DIR) self.General_Mode = General_Mode def get_data_input_dir(self, DATA_INPUT_DIR): DATAself_INPUT_DIR = os.path.join( os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")), DATA_INPUT_DIR) return DATA_INPUT_DIR def get_vocab_file_path(self, vocab_file_path): print(vocab_file_path) return vocab_file_path def subject_object_labeling(self, spo_list, text): def _spo_list_to_spo_predicate_dict(spo_list): spo_predicate_dict = dict() for spo_item in spo_list: predicate = spo_item["predicate"] subject = spo_item["subject"] object = spo_item["object"] spo_predicate_dict.setdefault(predicate, []).append((subject, object)) return spo_predicate_dict def _gen_event_dic(spo_list): res = [] res_d = {} predicate = "" for spo_item in spo_list: predicate = spo_item["event"] if 'time' in spo_item: time = spo_item["time"] res.append(('time',time)) if 'location' in spo_item: location = spo_item["location"] res.append(('location',location)) if 'participant' in spo_item: participant = spo_item["participant"] res.append(('participant',participant)) if 'denoter' in spo_item: denoter = spo_item["denoter"] res.append(('denoter',denoter)) if 'object' in spo_item: object = spo_item["object"] res.append(('object',object)) res_d[predicate] = res return res_d def _index_q_list_in_k_list(q_list, k_list): """Known q_list in k_list, find index(first time) of q_list in k_list""" q_list_length = len(q_list) k_list_length = len(k_list) for idx in range(k_list_length - q_list_length + 1): t = [q == k for q, k in zip(q_list, k_list[idx: idx + q_list_length])] # print(idx, t) if all(t): # print(idx) idx_start = idx return idx_start def _labeling_type(subject_object, so_type): tokener_error_flag = False so_tokened = self.bert_tokenizer.tokenize(subject_object) so_tokened_length = len(so_tokened) idx_start = _index_q_list_in_k_list(q_list=so_tokened, k_list=text_tokened) if idx_start is None: tokener_error_flag = True ''' 实体: "1981年" 原句: "●1981年2月27日,中国人口学会成立" so_tokened ['1981', '年'] text_tokened ['●', '##19', '##81', '年', '2', '月', '27', '日', ',', '中', '国', '人', '口', '学', '会', '成', '立'] so_tokened 无法在 text_tokened 找到!原因是bert_tokenizer.tokenize 分词增添 “##” 所致! ''' self.bert_tokener_error_log_f.write(subject_object + " @@ " + text + "\n") self.bert_tokener_error_log_f.write(str(so_tokened) + " @@ " + str(text_tokened) + "\n") else: #给实体开始处标 B 其它位置标 I labeling_list[idx_start] = "B-" + so_type if so_tokened_length == 2: labeling_list[idx_start + 1] = "I-" + so_type elif so_tokened_length >= 3: labeling_list[idx_start + 1: idx_start + so_tokened_length] = ["I-" + so_type] * (so_tokened_length - 1) return tokener_error_flag text_tokened = self.bert_tokenizer.tokenize(text) text_tokened_not_UNK = self.bert_tokenizer.tokenize_not_UNK(text) if not self.General_Mode: spo_predicate_dict = _spo_list_to_spo_predicate_dict(spo_list) else: spo_predicate_dict = _gen_event_dic(spo_list) for predicate, spo_list_form in spo_predicate_dict.items(): tokener_error_flag = False labeling_list = ["O"] * len(text_tokened) if not self.General_Mode: for (spo_subject, spo_object) in spo_list_form: flag_A = _labeling_type(spo_subject, "SUB") #flag_B = _labeling_type(spo_object, "OBJ") if flag_A or flag_B: tokener_error_flag = True else: for item in spo_list_form: if item[1]== None: continue flag_A = _labeling_type(item[1],item[0]) if flag_A: tokener_error_flag = True #给被bert_tokenizer.tokenize 拆分的词语打上特殊标签[##WordPiece] for idx, token in enumerate(text_tokened): """标注被 bert_tokenizer.tokenize 拆分的词语""" if token.startswith("##"): labeling_list[idx] = "[##WordPiece]" if not tokener_error_flag: self.token_label_and_one_prdicate_out_f.write(" ".join(labeling_list)+"\t"+predicate+"\n") self.text_f.write(text + "\n") self.token_in_f.write(" ".join(text_tokened)+"\t"+predicate+"\n") self.token_in_not_UNK_f.write(" ".join(text_tokened_not_UNK) + "\n") def separate_raw_data_and_token_labeling(self): if not os.path.exists(self.DATA_OUTPUT_DIR): os.makedirs(os.path.join(self.DATA_OUTPUT_DIR, "train")) os.makedirs(os.path.join(self.DATA_OUTPUT_DIR, "valid")) os.makedirs(os.path.join(self.DATA_OUTPUT_DIR, "test")) for file_set_type in ["train", "valid"]: print(os.path.join(os.path.join(self.DATA_OUTPUT_DIR, file_set_type))) self.token_label_and_one_prdicate_out_f = open(os.path.join(os.path.join(self.DATA_OUTPUT_DIR, file_set_type), "token_label_and_one_prdicate_out.txt"), "w", encoding='utf-8') self.bert_tokener_error_log_f = open(os.path.join(os.path.join(self.DATA_OUTPUT_DIR, file_set_type), "bert_tokener_error_log.txt"), "w", encoding='utf-8') self.text_f = open(os.path.join(os.path.join(self.DATA_OUTPUT_DIR, file_set_type), "text.txt"), "w", encoding='utf-8') self.token_in_f = open(os.path.join(os.path.join(self.DATA_OUTPUT_DIR, file_set_type), "token_in.txt"), "w", encoding='utf-8') self.token_in_not_UNK_f = open(os.path.join(os.path.join(self.DATA_OUTPUT_DIR, file_set_type), "token_in_not_UNK.txt"), "w", encoding='utf-8') if file_set_type == "train": path_to_raw_data_file = "train.json" elif file_set_type == "valid": path_to_raw_data_file = "valid.json" else: pass with open(os.path.join(self.DATA_INPUT_DIR, path_to_raw_data_file), 'r', encoding='utf-8') as f: count_numbers = 0 while True: line = f.readline() if line: count_numbers += 1 r = json.loads(line) text = r["text"] spo_list = r["spo_list"] self.subject_object_labeling(spo_list=spo_list, text=text) else: break print("all numbers", count_numbers) self.text_f.close() self.token_in_f.close() self.token_in_not_UNK_f.close() self.token_label_and_one_prdicate_out_f.close() self.bert_tokener_error_log_f.close() if __name__=="__main__": DATA_INPUT_DIR = config.data_dir DATA_OUTPUT_DIR = "sequence_labeling_data" Vocab_Path = config.bert_vocab_dir General_Mode = False model_data = Model_data_preparation(General_Mode = General_Mode,DATA_INPUT_DIR=DATA_INPUT_DIR, DATA_OUTPUT_DIR=DATA_OUTPUT_DIR,vocab_file_path=Vocab_Path) model_data.separate_raw_data_and_token_labeling()
2.46875
2
clarifai/rest/grpc/custom_converters/custom_message_to_dict.py
Taik/clarifai-python
322
4696
<reponame>Taik/clarifai-python import typing # noqa from google.protobuf import descriptor from google.protobuf.json_format import _IsMapEntry, _Printer from google.protobuf.message import Message # noqa from clarifai.rest.grpc.proto.clarifai.api.utils import extensions_pb2 def protobuf_to_dict(object_protobuf, use_integers_for_enums=True, ignore_show_empty=False): # type: (Message, typing.Optional[bool], typing.Optional[bool]) -> dict # printer = _CustomPrinter( printer = _CustomPrinter( including_default_value_fields=False, preserving_proto_field_name=True, use_integers_for_enums=use_integers_for_enums, ignore_show_empty=ignore_show_empty) # pylint: disable=protected-access return printer._MessageToJsonObject(object_protobuf) class _CustomPrinter(_Printer): def __init__(self, including_default_value_fields, preserving_proto_field_name, use_integers_for_enums, ignore_show_empty): super(_CustomPrinter, self).__init__(including_default_value_fields, preserving_proto_field_name, use_integers_for_enums) self._ignore_show_empty = ignore_show_empty def _RegularMessageToJsonObject(self, message, js): """ Because of the fields with the custom extension `cl_show_if_empty`, we need to adjust the original's method's return JSON object and keep these fields. """ js = super(_CustomPrinter, self)._RegularMessageToJsonObject(message, js) message_descriptor = message.DESCRIPTOR for field in message_descriptor.fields: if (self._ignore_show_empty and not field.GetOptions().Extensions[extensions_pb2.cl_default_float]): continue if not field.GetOptions().Extensions[extensions_pb2.cl_show_if_empty]: continue # Singular message fields and oneof fields will not be affected. if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or field.containing_oneof): continue if self.preserving_proto_field_name: name = field.name else: name = field.json_name if name in js: # Skip the field which has been serialized already. continue if _IsMapEntry(field): js[name] = {} elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: js[name] = [] else: js[name] = self._FieldToJsonObject(field, field.default_value) return js def _StructMessageToJsonObject(self, message): """ Converts Struct message according to Proto3 JSON Specification. However, by default, empty objects {} get converted to null. We overwrite this behavior so {} get converted to {}. """ fields = message.fields ret = {} for key in fields: # When there's a Struct with an empty Struct field, this condition will hold True. # Far as I know this is the only case this condition will be true. If not, this condition # needs to be amended. if fields[key].WhichOneof('kind') is None: json_object = {} else: json_object = self._ValueMessageToJsonObject(fields[key]) ret[key] = json_object return ret
1.976563
2
__init__.py
mschrimpf/CapsNetKeras
0
4697
<gh_stars>0 from .capsulenet import *
1.140625
1
gate/mate_ksx3267v2.py
mrchoi87/IRSOSv4
0
4698
<filename>gate/mate_ksx3267v2.py #!/usr/bin/env python # # -*- coding: utf-8 -*- # # Copyright (c) 2018 JiNong, Inc. # All right reserved. # import struct import time import socket import select import traceback import hashlib import json from enum import IntEnum from threading import Thread, Lock from mate import Mate, ThreadMate, DevType from mblock import MBlock, BlkType, StatCode, ResCode, CmdCode, Observation, Request, Response, NotiCode, Notice from pymodbus.client.sync import ModbusSerialClient from pymodbus.client.sync import ModbusTcpClient class NodeType(IntEnum): SENNODE = 1 ACTNODE = 2 INTNODE = 3 NUTNODE = 4 class ProtoVer(IntEnum): KS_X_3267_2020 = 10 KS_X_3267_2018 = 101 TTA_1 = 201 class KSX3267MateV2(ThreadMate): _SLEEP = 0.5 _VERSION = "KSX3267_0.1" _KEYWORDS = {"value" : (2, "float"), "status" : (1, "status"), "opid" : (1, "short"), "state-hold-time" : (2, "int"), "ratio": (1, "short"), "position" : (1, "short"), "remain-time" : (2, "int"), "control": (1, "control"), "area" : (1, "short"), "alert" : (1, "alert"), "hold-time" : (2, "int"), "operation" : (1, "operation"), "time" : (2, "int"), "opentime" : (1, "short"), "closetime" : (1, "short"), "EC": (2, "float"), "pH": (2, "float"), "on-sec" : (1, "short"), "start-area" : (1, "short"), "stop-area": (1, "short"), "epoch" : (2, "int"), "vfloat": (2, "float"), "vint" : (2, "int")} _DEVINFOREG = 2 _DEVCODEREG = 101 def __init__(self, option, devinfo, coupleid, logger): super(KSX3267MateV2, self).__init__(option, devinfo, coupleid, logger) self._timeout = 3 if "timeout" not in option else option["timeout"] self._conn = {} self._tempthd = [] self._isdetecting = False self._detection = {"port": [], "saddr":0, "eaddr":0, "opid":0} #self._nodes = self._devinfo.getgw()["children"] self._lock = Lock() self._logger.info("KSX3267MateV2 Started.") def detect_node(self, conn, unit, registers): print "detect_node", unit, registers compcode = registers[0] nodecode = registers[2] size = registers[4] while True: res = self.readregister(conn, KSX3267MateV2._DEVCODEREG, size, unit) if res is None or res.isError(): self._logger.warn("Fail to get devices from " + str(unit) + " " + str(res)) return None if len(res.registers) != size: self._logger.info("retry to get data since size of data is not matched. " + str(size) + " " + str(len(res.registers))) continue return {"compcode" : compcode, "nodecode" : nodecode, "devcodes": res.registers} def getdk(self, dev, idx): dk = json.loads(dev["dk"]) return dk[idx] def setdetection(self, flag, opid=0): self._isdetecting = flag self._detection["opid"] = opid def startdetection(self, params, opid): if self._detection["opid"] != 0: self._logger.info("detection is processing.... so this command would be ignored.") return ResCode.FAIL self.setdetection(True, opid) if params: self._detection["saddr"] = params['saddr'] self._detection["eaddr"] = params['eaddr'] self._detection["port"] = params['port'] else: self._detection["saddr"] = 1 self._detection["eaddr"] = 12 self._detection["port"] = None return ResCode.OK def readregister(self, conn, addr, count, unit): print "....... before lock for read" with self._lock: time.sleep(KSX3267MateV2._SLEEP) #mrchoi87 self._logger.info("read_holding_registers: " + str(unit) + " " + str(addr) + " " + str(count)) print "read register", unit, addr, count try: return conn.read_holding_registers(addr, count, unit=unit) except Exception as ex: self._logger.warn("fail to read holding registers. : " + str(ex)) return None def detect(self): detected = {} for port, conn in self._conn.iteritems(): if self._isdetecting == False or self.isexecuting() == False: self._logger.info("Total detection is canceled.") break info = self.detectone(port, conn) detected[port] = info self._logger.info ("finished to detect devices : " + str(detected)) noti = Notice(None, NotiCode.DETECT_FINISHED) # Detection Started if noti: noti.setkeyvalue("opid", self._detection["opid"]) for port, info in detected.iteritems(): noti.setcontent(port, info) self.writecb(noti) self.setdetection(False) def detectone(self, port, conn): detected = {} if self._detection["port"] is not None and port not in self._detection["port"]: return detected #mrchoi87 #for unit in range(self._detection["saddr"], 12): for unit in range(self._detection["saddr"], self._detection["eaddr"]): if self._isdetecting == False or self.isexecuting() == False: self._logger.info("A port " + str(port) + " detection is canceled.") break tempid = port + "-" + str(unit) noti = Notice(None, NotiCode.DETECT_NODE_STARTED, devid=tempid) # Detection Started if noti: noti.setkeyvalue("opid", self._detection["opid"]) self.writecb(noti) noti = None info = None res = None for _ in range(3): res = self.readregister(conn, KSX3267MateV2._DEVINFOREG, 6, unit) if res is None or res.isError(): continue if len(res.registers) != 6: self._logger.info("retry to get data since size of data is not matched. 6 " + str(len(res.registers))) continue break if res is None or res.isError(): noti = Notice(None, NotiCode.DETECT_NO_NODE, devid=tempid) # Detection Started self._logger.info ("Fail to get information from a node : " + str(unit) + " " + str(res)) elif res.registers[1] in (NodeType.SENNODE, NodeType.ACTNODE, NodeType.INTNODE): # device type if res.registers[3] == ProtoVer.KS_X_3267_2020 or res.registers[3] == ProtoVer.KS_X_3267_2018: info = self.detect_node(conn, unit, res.registers) self._logger.info ("Found a node : " + str(unit) + " " + str(info)) else: noti = Notice(None, NotiCode.DETECT_UNKNOWN_PROTOCOL_VER, devid=tempid) # unknown protocol version elif res.registers[1] == NodeType.NUTNODE: if res.registers[3] == ProtoVer.TTA_1: info = self.detect_node(conn, unit, res.registers) self._logger.info ("Found a nutrient system : " + str(unit) + " " + str(info)) else: noti = Notice(None, NotiCode.DETECT_UNKNOWN_PROTOCOL_VER, devid=tempid) # unknown protocol version else: noti = Notice(unit, NotiCode.DETECT_UNKNOWN_NODE, devid=tempid) # unknown device if noti is None: if info is None: noti = Notice(None, NotiCode.DETECT_WRONG_DEVICE, devid=tempid) # fail to find a node else: noti = Notice(None, NotiCode.DETECT_NODE_DETECTED, devid=port, content={unit : info}) # found a node detected[unit] = info noti.setkeyvalue("opid", self._detection["opid"]) print "noti", noti.stringify() self.writecb(noti) time.sleep(0.1) return detected def canceldetection(self, params): time.sleep(self._timeout) noti = Notice(None, NotiCode.DETECT_CANCELED) # detection is canceled noti.setkeyvalue("opid", self._detection["opid"]) self.writecb(noti) self.setdetection(False) return ResCode.OK def _listen(self, opt): try: servsoc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) servsoc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) servsoc.bind((opt['host'], opt['port'])) servsoc.listen(1) self._logger.info("listen : " + str(opt)) executing = True while executing: self._logger.info("waiting a client~") rsoc, wsoc, esoc = select.select([servsoc], [], [], 10) for sock in rsoc: if sock == servsoc: clisoc, address = servsoc.accept() self._logger.info("client connected from " + str(address)) for tmp in self._tempthd: if tmp["port"] == opt["port"]: conn = ModbusTcpClient(timeout=self._timeout) conn.socket = clisoc self._conn[opt["port"]] = conn tmp["status"] = 10 # connected executing = False except Exception as ex: servsoc.close() for tmp in self._tempthd: if tmp["port"] == opt["port"]: self._logger.warn(" port [" + str(opt["port"]) + "] exception : " + str(ex)) tmp["status"] = 5 # error def listen(self, opt): tmp = {"thd" : Thread(target=self._listen, args=(opt)), "status": 0, "port":opt['port']} self._tempthd.append(tmp) tmp["thd"].start() def checktempthreads(self): for tmp in self._tempthd: if tmp["status"] > 2: tmp["thd"].stop() tmp["thd"].join() def connectone(self, opt): ret = False conn = None if opt['method'] == 'rtu': conn = ModbusSerialClient(method='rtu', port=opt['port'], timeout=self._timeout, baudrate=opt['baudrate']) ret = conn.connect() msg = "failed to connect with rtu" code = NotiCode.RTU_CONNECTED if ret else NotiCode.RTU_FAILED_CONNECTION elif opt['method'] == 'tcpc': conn = ModbusTcpClient(opt['host'], port=opt['port'], timeout=self._timeout) ret = conn.connect() msg = "failed to connect with tcp" code = NotiCode.TCP_CONNECTED if ret else NotiCode.RTU_FAILED_CONNECTION elif opt['method'] == 'tcpcs': self._logger.info("It would wait for a while to connect a client.") ret = self.listen(opt) msg = "failed to connect with tcp" code = NotiCode.TCP_WAITING if ret else NotiCode.RTU_FAILED_CONNECTION conn = None else: msg = "It's a wrong connection method. " + str(opt['method']) if ret == False: self._logger.warn(msg) noti = Notice(None, NotiCode.RTU_FAILED_CONNECTION) # detection is canceled else: noti = Notice(None, NotiCode.RTU_CONNECTED) # detection is canceled self.writecb(noti) return conn def connect(self): ret = False for opt in self._option['conn']: conn = self.connectone(opt) if conn: self._conn[opt["port"][8:]] = conn super(KSX3267MateV2, self).connect() return ret def closeone(self, port): self._conn[port].close() def close(self): for port in self._conn.keys(): self.closeone(port) super(KSX3267MateV2, self).close() def readmsg(self): self._msgq = [] for gw in self._devinfo: for nd in gw["children"]: self._msgq.append(self.readsensornodeinfo(nd)) if self._isdetecting: self.detect() self.checktempthreads() def processrequest(self, dev, request, node): gw = self._devinfo.findgateway(request.getnodeid()) unit = self.getdk(dev, 0) operation = request.getcommand() params = request.getparams() params["operation"] = operation # need to convert by case params["opid"] = request.getopid() # need to convert by case properparams = CmdCode.getparams(operation) + ["operation", "opid"] registers = [] for key in self.getdk(dev, 4): if key not in properparams: # key param is not used for this operation # However, the register should be filled. val = 0 elif key in params: val = params[key] else: self._logger.warn("Wrong Keyword : " + str(key)) return ResCode.FAIL_WRONG_KEYWORD if KSX3267MateV2._KEYWORDS[key][0] == 1: registers.append(val) elif KSX3267MateV2._KEYWORDS[key][1] == "int": registers.extend(struct.unpack('HH', struct.pack('i', val))) elif KSX3267MateV2._KEYWORDS[key][1] == "float": registers.extend(struct.unpack('HH', struct.pack('f', val))) #else: # self._logger.warn("This param is needed for this operation. " + str(params['operation']) + ", " + str(key)) # return ResCode.FAIL_WRONG_KEYWORD print "....... befor lock for write" with self._lock: time.sleep(KSX3267MateV2._SLEEP) print "....... lock for write", self.getdk(dev, 3), registers res = self._conn[gw["dk"]].write_registers(self.getdk(dev, 3), registers, unit=unit) if res.isError(): self._logger.warn("Fail to write a request to dev." + str(dev) + "," + str(res) + ":" + str(request)) return ResCode.FAIL_TO_WRITE msg = self.readactinfo(node, dev) if msg is None: self._logger.warn("Fail to read dev status.") else: self.sendnoticeforactuatorstatus(msg) return ResCode.OK def writeblk(self, blk): print "received message", blk.getdevid(), self._coupleid if BlkType.isrequest(blk.gettype()) is False: self._logger.warn("The message is not request. " + str(blk.gettype())) return False response = Response(blk) cmd = blk.getcommand() nd = self._devinfo.finddevbyid(blk.getnodeid()) dev = self._devinfo.finddevbyid(blk.getdevid()) if blk.getdevid() == self._coupleid: params = blk.getparams() if cmd == CmdCode.DETECT_DEVICE: print "detect device" code = self.startdetection(params, blk.getopid()) elif cmd == CmdCode.CANCEL_DETECT: print "cancel to detect device" code = self.canceldetection(params) else: self._logger.warn("Unknown Error. " + str(blk) + ", " + str(dev)) code = ResCode.FAIL elif dev is None: self._logger.warn("There is no device. " + str(blk.getdevid())) code = ResCode.FAIL_NO_DEVICE elif DevType.ispropercommand(dev['dt'], cmd) is False: self._logger.warn("The request is not proper. " + str(cmd) + " " + str(dev['dt'])) code = ResCode.FAIL_NOT_PROPER_COMMAND elif DevType.isactuator(dev['dt']) or DevType.isnode(dev['dt']): # modbus code = self.processrequest(dev, blk, nd) self._logger.info("Actuator processed : " + str(code)) elif DevType.isgateway(dev['dt']): self._logger.info("Gateway does not receive a request") code = ResCode.FAIL else: self._logger.warn("Unknown Error. " + str(blk) + ", " + str(dev)) code = ResCode.FAIL response.setresult(code) self._logger.info("write response: " + str(response)) self.writecb(response) return True #if code == ResCode.OK else False def parseregisters(self, names, values): idx = 0 ret = {} for nm in names: (size, vtype) = KSX3267MateV2._KEYWORDS[nm] if vtype == "float": val = struct.unpack('f', struct.pack('HH', values[idx], values[idx+1]))[0] elif vtype == "int": val = struct.unpack('i', struct.pack('HH', values[idx], values[idx+1]))[0] else: val = values[idx] ret[nm] = val idx = idx + size print "parsed", ret return ret def readinfofromdev(self, conn, dev): size = self.getsize(self.getdk(dev, 2)) #for _ in range(3): res = self.readregister(conn, self.getdk(dev, 1), size, self.getdk(dev, 0)) if res is None: self._logger.warn("fail to get status from " + str(dev['dk'])) # break elif res.isError(): self._logger.info("retry to get status from " + str(dev['dk']) + " " + str(res)) # continue else: if len(res.registers) == size: return self.parseregisters(self.getdk(dev, 2), res.registers) else: self._logger.info("retry to get data since size of data is not matched. " + str(size) + " " + str(len(res.registers))) return None def readnodeinfo(self, node): ret = {"id" : node["id"], "sen" : {}, "act" : {}, "nd" : {"status":StatCode.ERROR.value}} gw = self._devinfo.findgateway(node["id"]) conn = self._conn[gw["dk"]] ret["conn"] = conn info = self.readinfofromdev(conn, node) if info: ret["nd"] = info else: self._logger.warn("fail to read node info : " + str(node)) return ret def readsensornodeinfo(self, node): ret = self.readnodeinfo(node) for dev in node['children']: if DevType.issensor(dev["dt"]): info = self.readinfofromdev(ret["conn"], dev) if info: ret["sen"][dev["id"]] = info #else: # self._logger.warn("fail to read sensor info : " + str(dev) + " however continue to read other device") return ret def readactnodeinfo(self, node): ret = self.readnodeinfo(node) for dev in node['children']: if DevType.issensor(dev["dt"]) == False: info = self.readinfofromdev(ret["conn"], dev) if info: ret["act"][dev["id"]] = info else: self._logger.warn("fail to read actuator info : " + str(dev) + " however continue to read other device") return ret def readactinfo(self, node, act): ret = self.readnodeinfo(node) info = self.readinfofromdev(ret["conn"], act) if info: ret["act"][act["id"]] = info else: self._logger.warn("fail to read actuator info : " + str(act) + " however continue to read other device") return ret def sendobs(self): for msg in self._msgq: if msg is None: continue self.sendobservation(msg) def sendnoti(self): for gw in self._devinfo: for node in gw["children"]: ret = self.readnodeinfo(node) i = 1 for dev in node['children']: if DevType.issensor(dev["dt"]) == False: info = self.readinfofromdev(ret["conn"], dev) if info: ret["act"][dev["id"]] = info i = i + 1 if i % 3 == 0: self.sendnoticeforactuatorstatus(ret) ret["act"] = {} self.sendnoticeforactuatorstatus(ret) def sendobservation(self, ndinfo): if StatCode.has_value(ndinfo["nd"]["status"]) == False: ndinfo["nd"]["status"] = StatCode.ERROR.value obsblk = Observation(ndinfo["id"]) obsblk.setobservation(ndinfo["id"], 0, StatCode(ndinfo["nd"]["status"])) for devid, info in ndinfo["sen"].iteritems(): if StatCode.has_value(info["status"]) == False: info["status"] = StatCode.ERROR.value obsblk.setobservation(devid, info["value"], StatCode(info["status"])) # do not send observation for actuator #for devid, info in ndinfo["act"].iteritems(): # if StatCode.has_value(info["status"]) == False: # info["status"] = StatCode.ERROR.value # obsblk.setobservation(devid, 0, StatCode(info["status"])) self.writecb(obsblk) def sendnoticeforactuatorstatus(self, ndinfo): blk = Notice(ndinfo["id"], NotiCode.ACTUATOR_STATUS, ndinfo["id"], ndinfo["nd"]) for devid, info in ndinfo["act"].iteritems(): blk.setcontent(devid, info) self.writecb(blk) def start(self, writecb): super(KSX3267MateV2, self).start(writecb) return True def stop(self): super(KSX3267MateV2, self).stop() return True def getsize(self, lst): size =0 for k in lst: if k in KSX3267MateV2._KEYWORDS: size = size + KSX3267MateV2._KEYWORDS[k][0] else: self._logger.warn("wrong keyword : " + str(k)) return -1 return size if __name__ == "__main__": isnutri = False opt = { 'conn' : [{ 'method': 'rtu', 'port' : '/dev/ttyJND2', 'baudrate' : 9600, 'timeout': 5 }] } nutriinfo = [{ "id" : "1", "dk" : "", "dt": "gw", "children" : [{ "id" : "101", "dk" : '[1,40201,["status"],45001,["operation","opid"]]', "dt": "nd", "children" : [ {"id" : "102", "dk" : '[1,40211,["control","status","area","alert","opid"],45001,["operation", "opid", "control","EC","pH", "start-area", "stop-area", "on-sec"]]', "dt": "nutrient-supply/level1"}, {"id" : "103", "dk" : '[1,40221,["value","status"]]', "dt": "sen"}, {"id" : "104", "dk" : '[1,40231,["value","status"]]', "dt": "sen"}, {"id" : "105", "dk" : '[1,40241,["value","status"]]', "dt": "sen"}, {"id" : "106", "dk" : '[1,40251,["value","status"]]', "dt": "sen"}, {"id" : "107", "dk" : '[1,40261,["value","status"]]', "dt": "sen"}, {"id" : "109", "dk" : '[1,40271,["value","status"]]', "dt": "sen"}, {"id" : "110", "dk" : '[1,40281,["value","status"]]', "dt": "sen"}, {"id" : "111", "dk" : '[1,40291,["value","status"]]', "dt": "sen"}, {"id" : "112", "dk" : '[1,40301,["value","status"]]', "dt": "sen"}, {"id" : "113", "dk" : '[1,40311,["value","status"]]', "dt": "sen"} ]} ]} ] devinfo = [{ "id" : "1", "dk" : "JND2", "dt": "gw", "children" : [ # { # "id" : "101", "dk" : '[1,201,["status"],301,["operation","opid"]]', "dt": "nd", "children" : [ #{"id" : "102", "dk" : '[1,210,["value","status"]]', "dt": "sen"}, #{"id" : "103", "dk" : '[1,220,["value","status"]]', "dt": "sen"} # "id" : "101", "dk" : '[1,40201,["status"],45001,["operation","opid"]]', "dt": "nd", "children" : [ #{"id" : "102", "dk" : '[1,41010,["value","status"]]', "dt": "sen"}, #{"id" : "103", "dk" : '[1,41020,["value","status"]]', "dt": "sen"} # {"id" : "102", "dk" : '[1,40202,["value","status"]]', "dt": "sen"}, # {"id" : "103", "dk" : '[1,40205,["value","status"]]', "dt": "sen"}, #{"id" : "104", "dk" : '[1,40208,["value","status"]]', "dt": "sen"}, # {"id" : "105", "dk" : '[1,40211,["value","status"]]', "dt": "sen"}, #{"id" : "106", "dk" : '[1,40251,["value","status"]]', "dt": "sen"}, #{"id" : "107", "dk" : '[1,40261,["value","status"]]', "dt": "sen"}, #{"id" : "108", "dk" : '[1,40271,["value","status"]]', "dt": "sen"}, #{"id" : "109", "dk" : '[1,40281,["value","status"]]', "dt": "sen"}, #{"id" : "110", "dk" : '[1,40291,["value","status"]]', "dt": "sen"} # ] # } ] }] """ }, { "id" : "201", "dk" : '[2,40201,["status"],45001,["operation","opid"]]', "dt": "nd", "children" : [ {"id" : "202", "dk" : '[2,40202,["opid","status","state-hold-time","remain-time"],40206,["operation","opid","time"]]', "dt": "act/retractable/level1"}, {"id" : "202", "dk" : '[2,40209,["opid","status","state-hold-time","remain-time"],40213,["operation","opid","time"]]', "dt": "act/retractable/level1"}, {"id" : "203", "dk" : '[2,40216,["value","status"]]', "dt": "sen"}, {"id" : "204", "dk" : '[2,40219,["value","status"]]', "dt": "sen"}, #{"id" : "203", "dk" : (2,40221,["opid","status"],45021,["operation","opid"]), "dt": "act/switch/level0"}, #{"id" : "204", "dk" : (2,40231,["opid","status"],45031,["operation","opid"]), "dt": "act/switch/level0"}, #{"id" : "205", "dk" : (2,40241,["opid","status"],45041,["operation","opid"]), "dt": "act/switch/level0"}, #{"id" : "206", "dk" : (2,40251,["opid","status"],45051,["operation","opid"]), "dt": "act/switch/level0"}, #{"id" : "207", "dk" : (2,40261,["opid","status"],45061,["operation","opid"]), "dt": "act/switch/level0"}, #{"id" : "208", "dk" : (2,40271,["opid","status"],45071,["operation","opid"]), "dt": "act/switch/level0"}, #{"id" : "209", "dk" : (2,40281,["opid","status"],45081,["operation","opid"]), "dt": "act/switch/level0"} ] }, { "id" : "301", "dk" : (3,40201,["opid","status"],45001,["operation","opid"]), "dt": "nd", "children" : [ {"id" : "302", "dk" : (3,40211,["opid","status"],45011,["operation","opid"]), "dt": "act/retractable/level0"}, {"id" : "303", "dk" : (3,40221,["opid","status"],45021,["operation","opid"]), "dt": "act/retractable/level0"}, {"id" : "304", "dk" : (3,40231,["opid","status"],45031,["operation","opid"]), "dt": "act/retractable/level0"}, {"id" : "305", "dk" : (3,40241,["opid","status"],45041,["operation","opid"]), "dt": "act/retractable/level0"} ] }] }] """ if isnutri: kdmate = KSX3267MateV2(opt, nutriinfo, "1", None) else: kdmate = KSX3267MateV2(opt, devinfo, "1", None) mate = Mate ({}, [], "1", None) kdmate.start (mate.writeblk) print "mate started" time.sleep(10) req = Request(None) req.setcommand("1", CmdCode.DETECT_DEVICE, None) print "=======================================#1" kdmate.writeblk(req) print "=======================================#1" """ time.sleep(1) req = Request(None) req.setcommand("1", CmdCode.CANCEL_DETECT, {}) print "=======================================#2" kdmate.writeblk(req) print "=======================================#2" time.sleep(1) req = Request(None) req.setcommand("1", CmdCode.DETECT_DEVICE, None) print "=======================================#3" kdmate.writeblk(req) print "=======================================#3" time.sleep(1) req = Request(None) req.setcommand("1", CmdCode.CANCEL_DETECT, {}) print "=======================================#4" kdmate.writeblk(req) print "=======================================#4" time.sleep(10) req = Request(201) req.setcommand(202, CmdCode.OPEN, {}) kdmate.writeblk(req) time.sleep(5) req = Request(201) req.setcommand(202, CmdCode.OFF, {}) kdmate.writeblk(req) time.sleep(10) req = Request(201) req.setcommand(202, CmdCode.TIMED_OPEN, {"time":10}) kdmate.writeblk(req) time.sleep(15) req = Request(201) req.setcommand(202, CmdCode.TIMED_CLOSE, {"time":10}) kdmate.writeblk(req) time.sleep(5) req = Request(201) req.setcommand(202, CmdCode.OFF, {}) kdmate.writeblk(req) """ time.sleep(30) kdmate.stop() print "mate stopped"
1.976563
2
CircleciScripts/run_integrationtests.py
aimalygin/aws-sdk-ios
17
4699
<filename>CircleciScripts/run_integrationtests.py import demjson import sys from subprocess import Popen, PIPE import subprocess import xml.etree.ElementTree as ET import os from datetime import datetime from functions import runcommand #from sets import Set def getfailedcases(withBundle = True): xmlfile='build/reports/junit.xml' tree = ET.parse(xmlfile) root = tree.getroot() testbundle = root.get('name') testbundle = testbundle[0:len(testbundle) - 7] failedtests = set() #TODO we can filter with condtion for testsuite in root.findall(".//testsuite"): for testcase in testsuite.findall('.//testcase[failure]'): suitename = testsuite.get('name') casename = testcase.get('name') if withBundle: failedtests.add(testbundle + '/' + suitename + '/' + casename) else: failedtests.add(suitename + '/' + casename) return failedtests #run test def runtest(otherargments, projectPath, schemeName, projectName, destination, derivedDataPath, timeout = 0): runcommand("rm raw.log") runcommand("rm xcpretty.log") testcommand = "xcodebuild test-without-building -project {0} -scheme {1} -sdk iphonesimulator -destination '{2}' -derivedDataPath {3}/{4}".format(projectPath,schemeName, destination, derivedDataPath, projectName) testcommand +=" " + otherargments; rawoutput = open('raw.log','w') exit_code = runcommand(testcommand,timeout, pipeout = rawoutput) rawoutput.close() print("Formatting test result .......") xcprettycommand = "cat raw.log | xcpretty -r junit | tee xcpretty.log" runcommand(xcprettycommand) return exit_code ########################## main function ############################### # a command will like if (len(sys.argv) < 3 or sys.argv[1] == '-h' or sys.argv[1] == '-h') : print("Usage: \r\n {0} <integrationTestsConfiguration json file path> <test result location> <group name>".format(sys.argv[0])) ; exit(1) jsonfilename=sys.argv[1] test_result_folder=sys.argv[2] group_name = sys.argv[3] destination = sys.argv[4] derivedDataPath = sys.argv[5] with open(jsonfilename, 'r') as jsonfile: jsonstring = jsonfile.read() testConfigure = demjson.decode(jsonstring) runningConfigure = testConfigure['runningConfigure'] projectName = runningConfigure['projectName'] projectPath = runningConfigure['projectPath'] schemeName = runningConfigure['schemeName'] sdkName = runningConfigure['sdkName'] print("group name:", group_name) testgroup = testConfigure[group_name] testlist = testgroup['test_list'] if 'projectName' in testgroup.keys() : projectName = testgroup['projectName'] if 'projectPath' in testgroup.keys(): projectPath = testgroup['projectPath'] if 'schemeName' in testgroup.keys(): schemeName = testgroup['schemeName'] print("projectName, projectPath, schemeName, destination", projectName, projectPath, schemeName, destination) # testcommandhead = f"xcodebuild test-without-building -project {projectName} -scheme {schemeName} -sdk {sdkName} -destination 'platform={paltformName},name={deviceName},OS={osVersion}'" # testcommandtail = " | tee raw.log | xcpretty -r junit | tee xcpretty.log" runcommand('echo "export testresult=0" >> $BASH_ENV') testresult = 0 for testname in testlist: print("-------------------------------", testname , "-------------------------------"); test = testlist[testname] testarguments = ' -only-testing:' + testname #create skipping tests parameters skipingtests = "" if 'excludetests' in test: for skipingtest in test['excludetests']: skipingtests += ' -skip-testing:' + testname+ "/" + skipingtest print("excludetests:", skipingtests) exit_code = runtest(testarguments + skipingtests, projectPath, schemeName, projectName, destination, derivedDataPath) print(testname, "exit code:", exit_code) # if test fails, check if the failed tests can be retried if exit_code == 65: retriabletimes = 3 ; if 'retriabletimes' in test: retriabletimes = test['retriabletimes'] if retriabletimes > 1: #get all failed test cases faileds = getfailedcases() if len(faileds) == 0 : print("test command return an error code, but the failed test cases is 0") print("exit code:", exit_code) break; print("failed tests:",faileds) retrytimes = 1 print('retriabletimes:', retriabletimes) while retrytimes <= retriabletimes and exit_code > 0: print("retry ", testname, "for ", retrytimes, " times") testarguments = "" for failed in faileds: testarguments += ' -only-testing:' + failed retrytimes += 1 exit_code = runtest(testarguments,projectPath, schemeName, projectName, destination, derivedDataPath); print("retry exit code:", exit_code) if(exit_code != 0 ): faileds = getfailedcases() if exit_code != 0 : print("exit code:", exit_code) runcommand('mkdir -p {0}/{1}'.format(test_result_folder,testname)) runcommand('echo "{2}" >> {0}/{1}/exitcode.log'.format(test_result_folder,testname,exit_code)) runcommand('mv raw.log {0}/{1}/raw.log'.format(test_result_folder,testname)) runcommand('mv xcpretty.log {0}/{1}/xcpretty.log'.format(test_result_folder,testname)) runcommand('cp build/reports/junit.xml {0}/{1}/junit.xml'.format(test_result_folder,testname)) ignorefailure = False ; if exit_code == 65 : failedtests = getfailedcases(False) print("failedtests:", failedtests) if 'ignoreFailures' in test and failedtests : ignoreFailures = set(test['ignoreFailures']) if failedtests.issubset(ignoreFailures): print("There are failed testcases that can be ignored") ignorefailure = True; else : print("Failed testcases that cannot be ignored: ", failedtests - ignoreFailures ) if not ignorefailure: print("There are faillures in the test") testresult = 1 else: print("Test succeed") print("testresult:", testresult) runcommand('echo "export testresult={0}" >> $BASH_ENV'.format(testresult))
2.140625
2